- New Media Foundation grabber

- JsonAPI available grabber fix
- commented json config removed
This commit is contained in:
Paulchen Panther
2020-12-18 17:38:21 +01:00
parent a42aae44d1
commit c672ae6075
30 changed files with 2097 additions and 666 deletions

138
include/grabber/MFGrabber.h Normal file
View File

@@ -0,0 +1,138 @@
#pragma once
// COM includes
#include <Guiddef.h>
// Qt includes
#include <QObject>
#include <QRectF>
#include <QMap>
#include <QMultiMap>
// utils includes
#include <utils/PixelFormat.h>
#include <utils/Components.h>
#include <hyperion/Grabber.h>
// decoder thread includes
#include <grabber/MFThread.h>
// TurboJPEG decoder
#ifdef HAVE_TURBO_JPEG
#include <turbojpeg.h>
#endif
/// Forward class declaration
class SourceReaderCB;
/// Forward struct declaration
struct IMFSourceReader;
///
/// Media Foundation capture class
///
class MFGrabber : public Grabber
{
Q_OBJECT
friend class SourceReaderCB;
public:
struct DevicePropertiesItem
{
int x, y,fps,fps_a,fps_b;
PixelFormat pf;
GUID guid;
};
struct DeviceProperties
{
QString name = QString();
QMultiMap<QString, int> inputs = QMultiMap<QString, int>();
QStringList displayResolutions = QStringList();
QStringList framerates = QStringList();
QList<DevicePropertiesItem> valid = QList<DevicePropertiesItem>();
};
MFGrabber(const QString & device, const unsigned width, const unsigned height, const unsigned fps, const unsigned input, int pixelDecimation);
~MFGrabber() override;
void receive_image(const void *frameImageBuffer, int size, QString message);
QRectF getSignalDetectionOffset() const { return QRectF(_x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max); }
bool getSignalDetectionEnabled() const { return _signalDetectionEnabled; }
bool getCecDetectionEnabled() const { return _cecDetectionEnabled; }
QStringList getV4L2devices() const override;
QString getV4L2deviceName(const QString& devicePath) const override { return devicePath; }
QMultiMap<QString, int> getV4L2deviceInputs(const QString& devicePath) const override { return _deviceProperties.value(devicePath).inputs; }
QStringList getResolutions(const QString& devicePath) const override { return _deviceProperties.value(devicePath).displayResolutions; }
QStringList getFramerates(const QString& devicePath) const override { return _deviceProperties.value(devicePath).framerates; }
QStringList getV4L2EncodingFormats(const QString& devicePath) const override;
void setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold) override;
void setSignalDetectionOffset( double verticalMin, double horizontalMin, double verticalMax, double horizontalMax) override;
void setSignalDetectionEnable(bool enable) override;
void setPixelDecimation(int pixelDecimation) override;
void setCecDetectionEnable(bool enable) override;
void setDeviceVideoStandard(QString device, VideoStandard videoStandard) override;
bool setInput(int input) override;
bool setWidthHeight(int width, int height) override;
bool setFramerate(int fps) override;
void setFpsSoftwareDecimation(int decimation);
void setEncoding(QString enc);
void setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue);
public slots:
bool start();
void stop();
void newThreadFrame(unsigned int _workerIndex, const Image<ColorRgb>& image,unsigned int sourceCount);
signals:
void newFrame(const Image<ColorRgb> & image);
private:
struct buffer
{
void *start;
size_t length;
};
bool init();
void uninit();
bool init_device(QString device, DevicePropertiesItem props);
void uninit_device();
void enumVideoCaptureDevices();
void start_capturing();
bool process_image(const void *frameImageBuffer, int size);
void checkSignalDetectionEnabled(Image<ColorRgb> image);
QString _deviceName;
QMap<QString, MFGrabber::DeviceProperties> _deviceProperties;
std::vector<buffer> _buffers;
HRESULT _hr;
SourceReaderCB* _sourceReaderCB;
PixelFormat _pixelFormat;
int _pixelDecimation,
_lineLength,
_frameByteSize,
_noSignalCounterThreshold,
_noSignalCounter,
_fpsSoftwareDecimation,
_brightness,
_contrast,
_saturation,
_hue;
volatile unsigned int _currentFrame;
ColorRgb _noSignalThresholdColor;
bool _signalDetectionEnabled,
_cecDetectionEnabled,
_noSignalDetected,
_initialized;
double _x_frac_min,
_y_frac_min,
_x_frac_max,
_y_frac_max;
MFThreadManager _threadManager;
IMFSourceReader* _sourceReader;
#ifdef HAVE_TURBO_JPEG
int _subsamp;
#endif
};

138
include/grabber/MFThread.h Normal file
View File

@@ -0,0 +1,138 @@
#pragma once
// Qt includes
#include <QThread>
#include <QSemaphore>
// util includes
#include <utils/PixelFormat.h>
#include <utils/ImageResampler.h>
// TurboJPEG decoder
#ifdef HAVE_TURBO_JPEG
#include <QImage>
#include <QColor>
#include <turbojpeg.h>
#endif
// Forward class declaration
class MFThreadManager;
/// Encoder thread for USB devices
class MFThread : public QThread
{
Q_OBJECT
friend class MFThreadManager;
public:
MFThread();
~MFThread();
void setup(
unsigned int threadIndex, PixelFormat pixelFormat, uint8_t* sharedData,
int size, int width, int height, int lineLength,
int subsamp, unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight,
VideoMode videoMode, int currentFrame, int pixelDecimation);
void run();
bool isBusy();
void noBusy();
signals:
void newFrame(unsigned int threadIndex, const Image<ColorRgb>& data, unsigned int sourceCount);
private:
void processImageMjpeg();
#ifdef HAVE_TURBO_JPEG
tjhandle _decompress;
int _scalingFactorsCount = 0;
tjscalingfactor* _scalingFactors = nullptr;
#endif
static volatile bool _isActive;
volatile bool _isBusy;
QSemaphore _semaphore;
unsigned int _workerIndex;
PixelFormat _pixelFormat;
uint8_t* _localData;
int _localDataSize;
int _size;
int _width;
int _height;
int _lineLength;
int _subsamp;
unsigned _cropLeft;
unsigned _cropTop;
unsigned _cropBottom;
unsigned _cropRight;
int _currentFrame;
int _pixelDecimation;
ImageResampler _imageResampler;
};
class MFThreadManager : public QObject
{
Q_OBJECT
public:
MFThreadManager() : _threads(nullptr)
{
int select = QThread::idealThreadCount();
if (select >= 2 && select <= 3)
select = 2;
else if (select > 3 && select <= 5)
select = 3;
else if (select > 5)
select = 4;
_maxThreads = qMax(select, 1);
}
~MFThreadManager()
{
if (_threads != nullptr)
{
for(unsigned i=0; i < _maxThreads; i++)
if (_threads[i] != nullptr)
{
_threads[i]->deleteLater();
_threads[i] = nullptr;
}
delete[] _threads;
_threads = nullptr;
}
}
void initThreads()
{
if (_maxThreads >= 1)
{
_threads = new MFThread*[_maxThreads];
for (unsigned i=0; i < _maxThreads; i++)
_threads[i] = new MFThread();
}
}
void start() { MFThread::_isActive = true; }
bool isActive() { return MFThread::_isActive; }
void stop()
{
MFThread::_isActive = false;
if (_threads != nullptr)
{
for(unsigned i=0; i < _maxThreads; i++)
if (_threads[i] != nullptr)
{
_threads[i]->quit();
_threads[i]->wait();
}
}
}
unsigned int _maxThreads;
MFThread** _threads;
};

View File

@@ -0,0 +1,46 @@
#pragma once
#include <hyperion/GrabberWrapper.h>
#include <grabber/MFGrabber.h>
class MFWrapper : public GrabberWrapper
{
Q_OBJECT
public:
MFWrapper(const QString & device, const unsigned grabWidth, const unsigned grabHeight, const unsigned fps, const unsigned input, int pixelDecimation);
~MFWrapper() override;
bool getSignalDetectionEnable() const;
bool getCecDetectionEnable() const;
public slots:
bool start() override;
void stop() override;
void setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold);
void setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) override;
void setSignalDetectionOffset(double verticalMin, double horizontalMin, double verticalMax, double horizontalMax);
void setSignalDetectionEnable(bool enable);
void setCecDetectionEnable(bool enable);
void setDeviceVideoStandard(const QString& device, VideoStandard videoStandard);
void handleSettingsUpdate(settings::type type, const QJsonDocument& config) override;
///
/// @brief set software decimation (v4l2)
///
void setFpsSoftwareDecimation(int decimation);
void setEncoding(QString enc);
void setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue);
private slots:
void newFrame(const Image<ColorRgb> & image);
void action() override;
private:
/// The Media Foundation grabber
MFGrabber _grabber;
};

View File

@@ -51,15 +51,7 @@ public:
QStringList framerates = QStringList();
};
V4L2Grabber(const QString & device,
const unsigned width,
const unsigned height,
const unsigned fps,
const unsigned input,
VideoStandard videoStandard,
PixelFormat pixelFormat,
int pixelDecimation
);
V4L2Grabber(const QString & device, const unsigned width, const unsigned height, const unsigned fps, const unsigned input, VideoStandard videoStandard, PixelFormat pixelFormat, int pixelDecimation);
~V4L2Grabber() override;
QRectF getSignalDetectionOffset() const

View File

@@ -15,7 +15,7 @@ public:
const unsigned input,
VideoStandard videoStandard,
PixelFormat pixelFormat,
int pixelDecimation );
int pixelDecimation);
~V4L2Wrapper() override;
bool getSignalDetectionEnable() const;

View File

@@ -135,6 +135,13 @@ public:
///
virtual QMultiMap<QString, int> getV4L2deviceInputs(const QString& /*devicePath*/) const { return QMultiMap<QString, int>(); }
///
/// @brief Get a list of supported hardware encoding formats
/// @param devicePath The device path
/// @return List of hardware encoding formats on success else empty List
///
virtual QStringList getV4L2EncodingFormats(const QString& /*devicePath*/) const { return QStringList(); }
///
/// @brief Get a list of supported device resolutions
/// @param devicePath The device path

View File

@@ -18,9 +18,9 @@ class Grabber;
class GlobalSignals;
class QTimer;
/// List of Hyperion instances that requested screen capt
static QList<int> GRABBER_SYS_CLIENTS;
static QList<int> GRABBER_V4L_CLIENTS;
/// Map of Hyperion instances with grabber name that requested screen capture
static QMap<int, QString> GRABBER_SYS_CLIENTS;
static QMap<int, QString> GRABBER_V4L_CLIENTS;
///
/// This class will be inherted by FramebufferWrapper and others which contains the real capture interface
@@ -76,6 +76,13 @@ public:
///
virtual QMultiMap<QString, int> getV4L2deviceInputs(const QString& devicePath) const;
///
/// @brief Get a list of supported hardware encoding formats
/// @param devicePath The device path
/// @return List of hardware encoding formats on success else empty List
///
virtual QStringList getV4L2EncodingFormats(const QString& devicePath) const;
///
/// @brief Get a list of supported device resolutions
/// @param devicePath The device path
@@ -92,9 +99,10 @@ public:
///
/// @brief Get active grabber name
/// @return Active grabber name
/// @param hyperionInd The instance index
/// @return Active grabbers
///
virtual QString getActive() const;
virtual QStringList getActive(int inst) const;
static QStringList availableGrabbers();

View File

@@ -23,32 +23,32 @@ inline PixelFormat parsePixelFormat(const QString& pixelFormat)
// convert to lower case
QString format = pixelFormat.toLower();
if (format.compare("yuyv") )
if (format.compare("yuyv") == 0)
{
return PixelFormat::YUYV;
}
else if (format.compare("uyvy") )
else if (format.compare("uyvy") == 0)
{
return PixelFormat::UYVY;
}
else if (format.compare("bgr16") )
else if (format.compare("bgr16") == 0)
{
return PixelFormat::BGR16;
}
else if (format.compare("bgr24") )
else if (format.compare("bgr24") == 0)
{
return PixelFormat::BGR24;
}
else if (format.compare("rgb32") )
else if (format.compare("rgb32") == 0)
{
return PixelFormat::RGB32;
}
else if (format.compare("bgr32") )
else if (format.compare("bgr32") == 0)
{
return PixelFormat::BGR32;
}
#ifdef HAVE_JPEG_DECODER
else if (format.compare("mjpeg") )
else if (format.compare("mjpeg") == 0)
{
return PixelFormat::MJPEG;
}
@@ -57,3 +57,41 @@ inline PixelFormat parsePixelFormat(const QString& pixelFormat)
// return the default NO_CHANGE
return PixelFormat::NO_CHANGE;
}
inline QString pixelFormatToString(const PixelFormat& pixelFormat)
{
if ( pixelFormat == PixelFormat::YUYV)
{
return "yuyv";
}
else if (pixelFormat == PixelFormat::UYVY)
{
return "uyvy";
}
else if (pixelFormat == PixelFormat::BGR16)
{
return "bgr16";
}
else if (pixelFormat == PixelFormat::BGR24)
{
return "bgr24";
}
else if (pixelFormat == PixelFormat::RGB32)
{
return "rgb32";
}
else if (pixelFormat == PixelFormat::BGR32)
{
return "bgr32";
}
#ifdef HAVE_JPEG_DECODER
else if (pixelFormat == PixelFormat::MJPEG)
{
return "mjpeg";
}
#endif
// return the default NO_CHANGE
return "NO_CHANGE";
}