Refactor MediaFoundation (Part 2)

This commit is contained in:
Paulchen Panther 2021-01-31 13:49:31 +01:00
parent 07a6d5e5b1
commit 878d4fe0a1
9 changed files with 198 additions and 179 deletions

View File

@ -53,7 +53,7 @@ SET ( DEFAULT_EXPERIMENTAL OFF )
SET ( DEFAULT_MF OFF )
IF ( ${CMAKE_SYSTEM} MATCHES "Linux" )
SET ( DEFAULT_V4L2 ON )
SET ( DEFAULT_V4L2 OFF ) # TODO: Reactivate when refactor of V4L2 grabber is finished.
SET ( DEFAULT_SPIDEV ON )
SET ( DEFAULT_TINKERFORGE ON )
SET ( DEFAULT_FB ON )

View File

@ -65,20 +65,35 @@ $(document).ready(function () {
? enumTitelVals.push(v4l2_properties[i]['name'])
: enumTitelVals.push(v4l2_properties[i]['device']);
}
} else if (key == 'resolutions' || key == 'framerates' || key == 'encoding_format') {
} else if (key == 'device_inputs') {
for (var i = 0; i < v4l2_properties.length; i++) {
if (v4l2_properties[i]['device'] == device) {
for (var index = 0; index < v4l2_properties[i]['device_inputs'].length; index++) {
enumVals.push(v4l2_properties[i]['device_inputs'][index]['inputIndex'].toString());
enumTitelVals.push(v4l2_properties[i]['device_inputs'][index]['inputName']);
}
break;
}
}
} else if (key == 'encoding_format') {
for (var i = 0; i < v4l2_properties.length; i++) {
if (v4l2_properties[i]['device'] == device) {
enumVals = enumTitelVals = v4l2_properties[i][key];
break;
}
}
} else if (key == 'device_inputs') {
}
else if (key == 'resolutions') {
for (var i = 0; i < v4l2_properties.length; i++) {
if (v4l2_properties[i]['device'] == device) {
for (var index = 0; index < v4l2_properties[i]['inputs'].length; index++) {
enumVals.push(v4l2_properties[i]['inputs'][index]['inputIndex'].toString());
enumTitelVals.push(v4l2_properties[i]['inputs'][index]['inputName']);
}
enumVals = enumTitelVals = v4l2_properties[i][key];
break;
}
}
} else if (key == 'framerates') {
for (var i = 0; i < v4l2_properties.length; i++) {
if (v4l2_properties[i]['device'] == device) {
enumVals = enumTitelVals = v4l2_properties[i][key];
break;
}
}
@ -88,7 +103,6 @@ $(document).ready(function () {
"type": schema[key].type,
"title": schema[key].title,
...(schema[key].custom ? {"enum": [].concat(["auto"], enumVals, ["custom"]),} : {"enum": [].concat(["auto"], enumVals),}),
// "enum": [].concat(["auto"], enumVals, ["custom"]),
"options":
{
"enum_titles": [].concat(["edt_conf_enum_automatic"], enumTitelVals, ["edt_conf_enum_custom"]),
@ -362,9 +376,6 @@ $(document).ready(function () {
conf_editor_fg.on('ready', function () {
var availableGrabbers = window.serverInfo.grabbers.available;
console.log("conf_editor_fg.on->ready, availableGrabbers: ", availableGrabbers);
var fgOptions = conf_editor_fg.getEditor('root.framegrabber');
var orginalGrabberTypes = fgOptions.schema.properties.type.enum;
var orginalGrabberTitles = fgOptions.schema.properties.type.options.enum_titles;
@ -383,8 +394,6 @@ $(document).ready(function () {
var activeGrabbers = window.serverInfo.grabbers.active.map(v => v.toLowerCase());
console.log("conf_editor_fg.on->ready, activeGrabbers: ", activeGrabbers);
// Select first active platform grabber
for (var i = 0; i < enumVals.length; i++) {
var grabberType = enumVals[i];
@ -397,13 +406,8 @@ $(document).ready(function () {
});
conf_editor_fg.on('change', function () {
var selectedType = conf_editor_fg.getEditor("root.framegrabber.type").getValue();
console.log("conf_editor_fg.on->change, selectedType: ", selectedType);
filerFgGrabberOptions(selectedType);
conf_editor_fg.validate().length || window.readOnlyMode ? $('#btn_submit_fg').attr('disabled', true) : $('#btn_submit_fg').attr('disabled', false);
});

View File

@ -40,20 +40,16 @@ class MFGrabber : public Grabber
friend class SourceReaderCB;
public:
struct DevicePropertiesItem
{
int x, y,fps,fps_a,fps_b;
PixelFormat pf;
GUID guid;
};
struct DeviceProperties
{
QString name = QString();
QMultiMap<QString, int> inputs = QMultiMap<QString, int>();
QStringList displayResolutions = QStringList();
QStringList framerates = QStringList();
QList<DevicePropertiesItem> valid = QList<DevicePropertiesItem>();
QString symlink = QString();
int width = 0;
int height = 0;
int fps = 0;
int numerator = 0;
int denominator = 0;
PixelFormat pf = PixelFormat::NO_CHANGE;
GUID guid = GUID_NULL;
};
MFGrabber(const QString & device, const unsigned width, const unsigned height, const unsigned fps, int pixelDecimation, QString flipMode);
@ -63,12 +59,11 @@ public:
QRectF getSignalDetectionOffset() const { return QRectF(_x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max); }
bool getSignalDetectionEnabled() const { return _signalDetectionEnabled; }
bool getCecDetectionEnabled() const { return _cecDetectionEnabled; }
QStringList getV4L2devices() const override;
QString getV4L2deviceName(const QString& devicePath) const override { return devicePath; }
QMultiMap<QString, int> getV4L2deviceInputs(const QString& devicePath) const override { return _deviceProperties.value(devicePath).inputs; }
QStringList getResolutions(const QString& devicePath) const override { return _deviceProperties.value(devicePath).displayResolutions; }
QStringList getFramerates(const QString& devicePath) const override { return _deviceProperties.value(devicePath).framerates; }
QStringList getV4L2EncodingFormats(const QString& devicePath) const override;
QStringList getDevices() const override;
QString getDeviceName(const QString& devicePath) const override { return devicePath; }
QStringList getAvailableEncodingFormats(const QString& devicePath, const int& /*deviceInput*/) const override;
QStringList getAvailableDeviceResolutions(const QString& devicePath, const int& /*deviceInput*/, const PixelFormat& encFormat) const override;
QStringList getAvailableDeviceFramerates(const QString& devicePath, const int& /*deviceInput*/, const PixelFormat& encFormat, const unsigned width, const unsigned height) const override;
void setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold) override;
void setSignalDetectionOffset( double verticalMin, double horizontalMin, double verticalMax, double horizontalMax) override;
void setSignalDetectionEnable(bool enable) override;
@ -95,7 +90,7 @@ signals:
private:
bool init();
void uninit();
HRESULT init_device(QString device, DevicePropertiesItem props);
HRESULT init_device(QString device, DeviceProperties props);
void uninit_device();
void enumVideoCaptureDevices();
void start_capturing();
@ -103,7 +98,7 @@ private:
void checkSignalDetectionEnabled(Image<ColorRgb> image);
QString _currentDeviceName, _newDeviceName;
QMap<QString, MFGrabber::DeviceProperties> _deviceProperties;
QMap<QString, QList<DeviceProperties>> _deviceProperties;
HRESULT _hr;
SourceReaderCB* _sourceReaderCB;
PixelFormat _pixelFormat, _pixelFormatConfig;

View File

@ -15,8 +15,7 @@
///
/// @brief The Grabber class is responsible to apply image resizes (with or without ImageResampler)
/// Overwrite the videoMode with setVideoMode()
/// Overwrite setCropping()
class Grabber : public QObject
{
Q_OBJECT
@ -127,45 +126,52 @@ public:
void setEnabled(bool enable);
///
/// @brief Get a list of all available V4L devices
/// @return List of all available V4L devices on success else empty List
/// @brief Get a list of all available devices
/// @return List of all available devices on success else empty List
///
virtual QStringList getV4L2devices() const { return QStringList(); }
virtual QStringList getDevices() const { return QStringList(); }
///
/// @brief Get the V4L device name
/// @brief Get the device name by path
/// @param devicePath The device path
/// @return The name of the V4L device on success else empty String
/// @return The name of the device on success else empty String
///
virtual QString getV4L2deviceName(const QString& /*devicePath*/) const { return QString(); }
virtual QString getDeviceName(const QString& /*devicePath*/) const { return QString(); }
///
/// @brief Get a name/index pair of supported device inputs
/// @param devicePath The device path
/// @return multi pair of name/index on success else empty pair
///
virtual QMultiMap<QString, int> getV4L2deviceInputs(const QString& /*devicePath*/) const { return QMultiMap<QString, int>(); }
virtual QMultiMap<QString, int> getDeviceInputs(const QString& /*devicePath*/) const { return {{ "", 0}}; }
///
/// @brief Get a list of supported hardware encoding formats
/// @brief Get a list of all available device encoding formats depends on device input
/// @param devicePath The device path
/// @return List of hardware encoding formats on success else empty List
/// @param inputIndex The device input index
/// @return List of device encoding formats on success else empty List
///
virtual QStringList getV4L2EncodingFormats(const QString& /*devicePath*/) const { return QStringList(); }
virtual QStringList getAvailableEncodingFormats(const QString& /*devicePath*/, const int& /*deviceInput*/) const { return QStringList(); }
///
/// @brief Get a list of supported device resolutions
/// @brief Get a list of available device resolutions depends on device input and encoding format
/// @param devicePath The device path
/// @param inputIndex The device input index
/// @param encFormat The device encoding format
/// @return List of resolutions on success else empty List
///
virtual QStringList getResolutions(const QString& /*devicePath*/) const { return QStringList(); }
virtual QStringList getAvailableDeviceResolutions(const QString& /*devicePath*/, const int& /*deviceInput*/, const PixelFormat& /*encFormat*/) const { return QStringList(); }
///
/// @brief Get a list of supported device framerates
/// @brief Get a list of available device framerates depends on device input, encoding format and resolution
/// @param devicePath The device path
/// @param inputIndex The device input index
/// @param encFormat The device encoding format
/// @param width The device width
/// @param heigth The device heigth
/// @return List of framerates on success else empty List
///
virtual QStringList getFramerates(const QString& devicePath) const { return QStringList(); }
virtual QStringList getAvailableDeviceFramerates(const QString& /*devicePath*/, const int& /*deviceInput*/, const PixelFormat& /*encFormat*/, const unsigned /*width*/, const unsigned /*height*/) const { return QStringList(); }
protected:
ImageResampler _imageResampler;

View File

@ -12,6 +12,7 @@
#include <utils/Image.h>
#include <utils/ColorRgb.h>
#include <utils/VideoMode.h>
#include <utils/PixelFormat.h>
#include <utils/settings.h>
class Grabber;
@ -57,45 +58,52 @@ public:
virtual bool isActive() const;
///
/// @brief Get a list of all available V4L devices
/// @return List of all available V4L devices on success else empty List
/// @brief Get a list of all available devices
/// @return List of all available devices on success else empty List
///
virtual QStringList getV4L2devices() const;
virtual QStringList getDevices() const;
///
/// @brief Get the V4L device name
/// @brief Get the device name by path
/// @param devicePath The device path
/// @return The name of the V4L device on success else empty String
/// @return The name of the device on success else empty String
///
virtual QString getV4L2deviceName(const QString& devicePath) const;
virtual QString getDeviceName(const QString& devicePath) const;
///
/// @brief Get a name/index pair of supported device inputs
/// @param devicePath The device path
/// @return multi pair of name/index on success else empty pair
///
virtual QMultiMap<QString, int> getV4L2deviceInputs(const QString& devicePath) const;
virtual QMultiMap<QString, int> getDeviceInputs(const QString& devicePath) const;
///
/// @brief Get a list of supported hardware encoding formats
/// @brief Get a list of all available device encoding formats depends on device input
/// @param devicePath The device path
/// @return List of hardware encoding formats on success else empty List
/// @param inputIndex The device input index
/// @return List of device encoding formats on success else empty List
///
virtual QStringList getV4L2EncodingFormats(const QString& devicePath) const;
virtual QStringList getAvailableEncodingFormats(const QString& devicePath, const int& deviceInput) const;
///
/// @brief Get a list of supported device resolutions
/// @brief Get a list of available device resolutions depends on device input and encoding format
/// @param devicePath The device path
/// @param inputIndex The device input index
/// @param encFormat The device encoding format
/// @return List of resolutions on success else empty List
///
virtual QStringList getResolutions(const QString& devicePath) const;
virtual QStringList getAvailableDeviceResolutions(const QString& devicePath, const int& deviceInput, const PixelFormat& encFormat) const;
///
/// @brief Get a list of supported device framerates
/// @brief Get a list of available device framerates depends on encoding format and resolution
/// @param devicePath The device path
/// @param inputIndex The device input index
/// @param encFormat The device encoding format
/// @param width The device width
/// @param heigth The device heigth
/// @return List of framerates on success else empty List
///
virtual QStringList getFramerates(const QString& devicePath) const;
virtual QStringList getAvailableDeviceFramerates(const QString& devicePath, const int& deviceInput, const PixelFormat& encFormat, const unsigned width, const unsigned height) const;
///
/// @brief Get active grabber name

View File

@ -192,7 +192,7 @@ public slots:
bool clear(int priority, bool forceClearAll=false);
/// #############
// EFFECTENGINE
/// EFFECTENGINE
///
/// @brief Get a pointer to the effect engine
/// @return EffectEngine instance pointer

View File

@ -491,52 +491,53 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject &message, const QString
#if defined(ENABLE_V4L2) || defined(ENABLE_MF)
QJsonArray availableV4L2devices;
for (const auto& devicePath : GrabberWrapper::getInstance()->getV4L2devices())
QJsonArray availableDevices;
for (const auto& devicePath : GrabberWrapper::getInstance()->getDevices())
{
QJsonObject device;
device["device"] = devicePath;
device["name"] = GrabberWrapper::getInstance()->getV4L2deviceName(devicePath);
device["name"] = GrabberWrapper::getInstance()->getDeviceName(devicePath);
QJsonArray availableInputs;
QMultiMap<QString, int> inputs = GrabberWrapper::getInstance()->getV4L2deviceInputs(devicePath);
QJsonObject availableInputs;
QMultiMap<QString, int> inputs = GrabberWrapper::getInstance()->getDeviceInputs(devicePath);
for (auto input = inputs.begin(); input != inputs.end(); input++)
{
QJsonObject availableInput;
availableInput["inputName"] = input.key();
availableInput["inputIndex"] = input.value();
availableInputs.append(availableInput);
}
device.insert("inputs", availableInputs);
QJsonObject availableEncodingFormats;
availableInputs["inputName"] = input.key();
availableInputs["inputIndex"] = input.value();
QJsonArray availableEncodingFormats;
QStringList encodingFormats = GrabberWrapper::getInstance()->getV4L2EncodingFormats(devicePath);
for (auto encodingFormat : encodingFormats)
{
availableEncodingFormats.append(encodingFormat);
}
device.insert("encoding_format", availableEncodingFormats);
QStringList encodingFormats = GrabberWrapper::getInstance()->getAvailableEncodingFormats(devicePath, input.value());
for (auto encodingFormat : encodingFormats)
{
QJsonArray formats;
QStringList resolutions = GrabberWrapper::getInstance()->getAvailableDeviceResolutions(devicePath, input.value(), parsePixelFormat(encodingFormat));
for (auto resolution : resolutions)
{
QJsonObject format;
format["resolution"] = resolution;
QJsonArray availableResolutions;
QStringList resolutions = GrabberWrapper::getInstance()->getResolutions(devicePath);
for (auto resolution : resolutions)
{
availableResolutions.append(resolution);
}
device.insert("resolutions", availableResolutions);
QJsonArray availableFramerates;
QStringList framerates = GrabberWrapper::getInstance()->getAvailableDeviceFramerates(devicePath, input.value(), parsePixelFormat(encodingFormat), resolution.split("x")[0].toInt(), resolution.split("x")[1].toInt());
for (auto framerate : framerates)
{
availableFramerates.append(framerate);
}
QJsonArray availableFramerates;
QStringList framerates = GrabberWrapper::getInstance()->getFramerates(devicePath);
for (auto framerate : framerates)
{
availableFramerates.append(framerate);
}
device.insert("framerates", availableFramerates);
format["framerates"] = availableFramerates;
formats.append(format);
}
availableV4L2devices.append(device);
availableEncodingFormats[encodingFormat] = formats;
}
availableInputs["encoding_formats"] = availableEncodingFormats;
}
device["device_inputs"] = availableInputs;
availableDevices.append(device);
}
grabbers["v4l2_properties"] = availableV4L2devices;
grabbers["v4l2_properties"] = availableDevices;
#endif

View File

@ -91,26 +91,26 @@ bool MFGrabber::init()
return false;
}
MFGrabber::DeviceProperties dev = _deviceProperties[foundDevice];
QList<DeviceProperties> dev = _deviceProperties[foundDevice];
Debug(_log, "Searching for %s %d x %d @ %d fps (%s)", QSTRING_CSTR(foundDevice), _width, _height,_fps, QSTRING_CSTR(pixelFormatToString(_pixelFormat)));
for( int i = 0; i < dev.valid.count() && foundIndex < 0; ++i )
for( int i = 0; i < dev.count() && foundIndex < 0; ++i )
{
bool strict = false;
const auto& val = dev.valid[i];
const auto& val = dev[i];
if(bestGuess == -1 || (val.x <= bestGuessMinX && val.x >= 640 && val.fps <= bestGuessMinFPS && val.fps >= 10))
if(bestGuess == -1 || (val.width <= bestGuessMinX && val.width >= 640 && val.fps <= bestGuessMinFPS && val.fps >= 10))
{
bestGuess = i;
bestGuessMinFPS = val.fps;
bestGuessMinX = val.x;
bestGuessMinX = val.width;
}
if(_width && _height)
{
strict = true;
if(val.x != _width || val.y != _height)
if(val.width != _width || val.height != _height)
continue;
}
@ -144,7 +144,7 @@ bool MFGrabber::init()
if(foundIndex>=0)
{
if(SUCCEEDED(init_device(foundDevice, dev.valid[foundIndex])))
if(SUCCEEDED(init_device(foundDevice, dev[foundIndex])))
_initialized = true;
}
else
@ -164,22 +164,18 @@ void MFGrabber::uninit()
}
}
HRESULT MFGrabber::init_device(QString deviceName, DevicePropertiesItem props)
HRESULT MFGrabber::init_device(QString deviceName, DeviceProperties props)
{
PixelFormat pixelformat = GetPixelFormatForGuid(props.guid);
QString error, guid = _deviceProperties[deviceName].name;
int size = guid.length() + 1024;
wchar_t *name = new wchar_t[size];
memset(name, 0, size);
guid.toWCharArray(name);
QString error;
IMFMediaSource* device = nullptr;
IMFAttributes* deviceAttributes = nullptr, *sourceReaderAttributes = nullptr;
IAMVideoProcAmp *pProcAmp = nullptr;
IMFMediaType* type = nullptr;
HRESULT hr = S_OK;
Debug(_log, "Init %s, %d x %d @ %d fps (%s)", QSTRING_CSTR(deviceName), props.x, props.y, props.fps, QSTRING_CSTR(pixelFormatToString(pixelformat)));
DebugIf(verbose, _log, "Symbolic link: %s", QSTRING_CSTR(guid));
Debug(_log, "Init %s, %d x %d @ %d fps (%s)", QSTRING_CSTR(deviceName), props.width, props.height, props.fps, QSTRING_CSTR(pixelFormatToString(pixelformat)));
DebugIf(verbose, _log, "Symbolic link: %s", QSTRING_CSTR(props.symlink));
hr = MFCreateAttributes(&deviceAttributes, 2);
if(FAILED(hr))
@ -195,7 +191,7 @@ HRESULT MFGrabber::init_device(QString deviceName, DevicePropertiesItem props)
goto done;
}
if(FAILED(deviceAttributes->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, (LPCWSTR)name)) && _sourceReaderCB)
if(FAILED(deviceAttributes->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, (LPCWSTR)props.symlink.toStdString().c_str())) && _sourceReaderCB)
{
error = QString("IMFAttributes_SetString_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK (%1)").arg(hr);
goto done;
@ -343,14 +339,14 @@ HRESULT MFGrabber::init_device(QString deviceName, DevicePropertiesItem props)
goto done;
}
hr = MFSetAttributeSize(type, MF_MT_FRAME_SIZE, props.x, props.y);
hr = MFSetAttributeSize(type, MF_MT_FRAME_SIZE, props.width, props.height);
if(FAILED(hr))
{
error = QString("Could not set stream parameter: SMFSetAttributeSize_MF_MT_FRAME_SIZE (%1)").arg(hr);
goto done;
}
hr = MFSetAttributeSize(type, MF_MT_FRAME_RATE, props.fps_a, props.fps_b);
hr = MFSetAttributeSize(type, MF_MT_FRAME_RATE, props.numerator, props.denominator);
if(FAILED(hr))
{
error = QString("Could not set stream parameter: MFSetAttributeSize_MF_MT_FRAME_RATE (%1)").arg(hr);
@ -386,15 +382,14 @@ done:
else
{
_pixelFormat = props.pf;
_width = props.x;
_height = props.y;
_frameByteSize = props.x * props.y * 3;
_lineLength = props.x * 3;
_width = props.width;
_height = props.height;
_frameByteSize = _width * _height * 3;
_lineLength = _width * 3;
}
// Cleanup
SAFE_RELEASE(deviceAttributes);
delete[] name;
SAFE_RELEASE(device);
SAFE_RELEASE(pProcAmp);
SAFE_RELEASE(type);
@ -438,11 +433,10 @@ void MFGrabber::enumVideoCaptureDevices()
{
if(SUCCEEDED(devices[i]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &symlink, &length)))
{
QList<DeviceProperties> devicePropertyList;
QString dev = QString::fromUtf16((const ushort*)name);
MFGrabber::DeviceProperties properties;
properties.name = QString::fromUtf16((const ushort*)symlink);
Debug(_log, "Found capture device: %s", QSTRING_CSTR(dev));
IMFMediaSource *pSource = nullptr;
if(SUCCEEDED(devices[i]->ActivateObject(IID_PPV_ARGS(&pSource))))
{
@ -456,42 +450,27 @@ void MFGrabber::enumVideoCaptureDevices()
break;
GUID format;
UINT64 frame_size;
UINT64 frame_rate;
UINT32 width = 0, height = 0, numerator = 0, denominator = 0;
if( SUCCEEDED(pType->GetGUID(MF_MT_SUBTYPE, &format)) &&
SUCCEEDED(pType->GetUINT64(MF_MT_FRAME_SIZE, &frame_size)) &&
SUCCEEDED(pType->GetUINT64(MF_MT_FRAME_RATE, &frame_rate)) &&
frame_rate > 0)
SUCCEEDED(MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height)) &&
SUCCEEDED(MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &numerator, &denominator)))
{
PixelFormat pixelformat = GetPixelFormatForGuid(format);
DWORD w = frame_size >> 32;
DWORD h = (DWORD) frame_size;
DWORD fr1 = frame_rate >> 32;
DWORD fr2 = (DWORD) frame_rate;
if (pixelformat != PixelFormat::NO_CHANGE)
{
int framerate = fr1/fr2;
QString sFrame = QString::number(framerate).rightJustified(2,' ').trimmed();
QString displayResolutions = QString::number(w).rightJustified(4,' ').trimmed() +"x"+ QString::number(h).rightJustified(4,' ').trimmed();
DeviceProperties properties;
properties.symlink = QString::fromUtf16((const ushort*)symlink);
properties.width = width;
properties.height = height;
properties.fps = numerator / denominator;
properties.numerator = numerator;
properties.denominator = denominator;
properties.pf = pixelformat;
properties.guid = format;
devicePropertyList.append(properties);
if (!properties.displayResolutions.contains(displayResolutions))
properties.displayResolutions << displayResolutions;
if (!properties.framerates.contains(sFrame))
properties.framerates << sFrame;
DevicePropertiesItem di;
di.x = w;
di.y = h;
di.fps = framerate;
di.fps_a = fr1;
di.fps_b = fr2;
di.pf = pixelformat;
di.guid = format;
properties.valid.append(di);
DebugIf(verbose, _log, "%s %d x %d @ %d fps (%s)", QSTRING_CSTR(dev), di.x, di.y, di.fps, QSTRING_CSTR(pixelFormatToString(di.pf)));
DebugIf(verbose, _log, "%s %d x %d @ %d fps (%s)", QSTRING_CSTR(dev), properties.width, properties.height, properties.fps, QSTRING_CSTR(pixelFormatToString(properties.pf)));
}
}
@ -502,9 +481,7 @@ void MFGrabber::enumVideoCaptureDevices()
pSource->Release();
}
properties.displayResolutions.sort();
properties.framerates.sort();
_deviceProperties.insert(dev, properties);
_deviceProperties.insert(dev, devicePropertyList);
}
CoTaskMemFree(symlink);
@ -710,7 +687,7 @@ void MFGrabber::checkSignalDetectionEnabled(Image<ColorRgb> image)
emit newFrame(image);
}
QStringList MFGrabber::getV4L2devices() const
QStringList MFGrabber::getDevices() const
{
QStringList result = QStringList();
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
@ -719,13 +696,41 @@ QStringList MFGrabber::getV4L2devices() const
return result;
}
QStringList MFGrabber::getV4L2EncodingFormats(const QString& devicePath) const
QStringList MFGrabber::getAvailableEncodingFormats(const QString& devicePath, const int& /*device input not used on windows*/) const
{
QStringList result = QStringList();
for(int i = 0; i < _deviceProperties[devicePath].valid.count(); ++i )
if(!result.contains(pixelFormatToString(_deviceProperties[devicePath].valid[i].pf), Qt::CaseInsensitive))
result << pixelFormatToString(_deviceProperties[devicePath].valid[i].pf).toLower();
for(int i = 0; i < _deviceProperties[devicePath].count(); ++i )
if(!result.contains(pixelFormatToString(_deviceProperties[devicePath][i].pf), Qt::CaseInsensitive))
result << pixelFormatToString(_deviceProperties[devicePath][i].pf).toLower();
return result;
}
QStringList MFGrabber::getAvailableDeviceResolutions(const QString& devicePath, const int& /*device input not used on windows*/, const PixelFormat& encFormat) const
{
QStringList result = QStringList();
for(int i = 0; i < _deviceProperties[devicePath].count(); ++i )
{
QString displayResolutions = QString::number(_deviceProperties[devicePath][i].width) +"x"+ QString::number(_deviceProperties[devicePath][i].height);
if(!result.contains(displayResolutions, Qt::CaseInsensitive) && _deviceProperties[devicePath][i].pf == encFormat)
result << displayResolutions;
}
return result;
}
QStringList MFGrabber::getAvailableDeviceFramerates(const QString& devicePath, const int& /*device input not used on windows*/, const PixelFormat& encFormat, const unsigned width, const unsigned height) const
{
QStringList result = QStringList();
for(int i = 0; i < _deviceProperties[devicePath].count(); ++i )
{
QString fps = QString::number(_deviceProperties[devicePath][i].numerator / _deviceProperties[devicePath][i].denominator);
if(!result.contains(fps, Qt::CaseInsensitive) && _deviceProperties[devicePath][i].pf == encFormat && _deviceProperties[devicePath][i].width == width && _deviceProperties[devicePath][i].height == height)
result << fps;
}
return result;
}

View File

@ -222,50 +222,50 @@ void GrabberWrapper::tryStart()
}
}
QStringList GrabberWrapper::getV4L2devices() const
QStringList GrabberWrapper::getDevices() const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getV4L2devices();
return _ggrabber->getDevices();
return QStringList();
}
QString GrabberWrapper::getV4L2deviceName(const QString& devicePath) const
QString GrabberWrapper::getDeviceName(const QString& devicePath) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getV4L2deviceName(devicePath);
return _ggrabber->getDeviceName(devicePath);
return QString();
}
QMultiMap<QString, int> GrabberWrapper::getV4L2deviceInputs(const QString& devicePath) const
QMultiMap<QString, int> GrabberWrapper::getDeviceInputs(const QString& devicePath) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getV4L2deviceInputs(devicePath);
return _ggrabber->getDeviceInputs(devicePath);
return QMultiMap<QString, int>();
return {{ "", 0}};
}
QStringList GrabberWrapper::getV4L2EncodingFormats(const QString& devicePath) const
QStringList GrabberWrapper::getAvailableEncodingFormats(const QString& devicePath, const int& deviceInput) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getV4L2EncodingFormats(devicePath);
return _ggrabber->getAvailableEncodingFormats(devicePath, deviceInput);
return QStringList();
}
QStringList GrabberWrapper::getResolutions(const QString& devicePath) const
QStringList GrabberWrapper::getAvailableDeviceResolutions(const QString& devicePath, const int& deviceInput, const PixelFormat& encFormat) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getResolutions(devicePath);
return _ggrabber->getAvailableDeviceResolutions(devicePath, deviceInput, encFormat);
return QStringList();
}
QStringList GrabberWrapper::getFramerates(const QString& devicePath) const
QStringList GrabberWrapper::getAvailableDeviceFramerates(const QString& devicePath, const int& deviceInput, const PixelFormat& encFormat, const unsigned width, const unsigned height) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getFramerates(devicePath);
return _ggrabber->getAvailableDeviceFramerates(devicePath, deviceInput, encFormat, width, height);
return QStringList();
}