mirror of
https://github.com/hyperion-project/hyperion.ng.git
synced 2025-03-01 10:33:28 +00:00
Merge remote-tracking branch 'origin/grabberDiscovery' into mediafoundation
This commit is contained in:
@@ -14,13 +14,13 @@ if (ENABLE_OSX)
|
||||
add_subdirectory(osx)
|
||||
endif(ENABLE_OSX)
|
||||
|
||||
if (ENABLE_V4L2)
|
||||
add_subdirectory(v4l2)
|
||||
endif (ENABLE_V4L2)
|
||||
# if (ENABLE_V4L2)
|
||||
# add_subdirectory(v4l2)
|
||||
# endif (ENABLE_V4L2)
|
||||
|
||||
if (ENABLE_MF)
|
||||
add_subdirectory(mediafoundation)
|
||||
endif (ENABLE_MF)
|
||||
if (ENABLE_V4L2 OR ENABLE_MF)
|
||||
add_subdirectory(video)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_X11)
|
||||
add_subdirectory(x11)
|
||||
|
@@ -1,12 +1,13 @@
|
||||
#include <windows.h>
|
||||
#include <grabber/DirectXGrabber.h>
|
||||
#include <QImage>
|
||||
|
||||
#pragma comment(lib, "d3d9.lib")
|
||||
#pragma comment(lib,"d3dx9.lib")
|
||||
|
||||
DirectXGrabber::DirectXGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display)
|
||||
: Grabber("DXGRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom)
|
||||
, _pixelDecimation(pixelDecimation)
|
||||
, _display(unsigned(display))
|
||||
, _displayWidth(0)
|
||||
, _displayHeight(0)
|
||||
, _srcRect(0)
|
||||
@@ -43,6 +44,8 @@ bool DirectXGrabber::setupDisplay()
|
||||
|
||||
D3DDISPLAYMODE ddm;
|
||||
D3DPRESENT_PARAMETERS d3dpp;
|
||||
HMONITOR hMonitor = nullptr;
|
||||
MONITORINFO monitorInfo = { 0 };
|
||||
|
||||
if ((_d3d9 = Direct3DCreate9(D3D_SDK_VERSION)) == nullptr)
|
||||
{
|
||||
@@ -50,7 +53,17 @@ bool DirectXGrabber::setupDisplay()
|
||||
return false;
|
||||
}
|
||||
|
||||
if (FAILED(_d3d9->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &ddm)))
|
||||
SecureZeroMemory(&monitorInfo, sizeof(monitorInfo));
|
||||
monitorInfo.cbSize = sizeof(MONITORINFO);
|
||||
|
||||
hMonitor = _d3d9->GetAdapterMonitor(_display);
|
||||
if (hMonitor == nullptr || GetMonitorInfo(hMonitor, &monitorInfo) == FALSE)
|
||||
{
|
||||
Info(_log, "Specified display %d is not available. Primary display %d is used", _display, D3DADAPTER_DEFAULT);
|
||||
_display = D3DADAPTER_DEFAULT;
|
||||
}
|
||||
|
||||
if (FAILED(_d3d9->GetAdapterDisplayMode(_display, &ddm)))
|
||||
{
|
||||
Error(_log, "Failed to get current display mode");
|
||||
return false;
|
||||
@@ -69,7 +82,7 @@ bool DirectXGrabber::setupDisplay()
|
||||
d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
|
||||
d3dpp.FullScreen_RefreshRateInHz = D3DPRESENT_RATE_DEFAULT;
|
||||
|
||||
if (FAILED(_d3d9->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, nullptr, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &_device)))
|
||||
if (FAILED(_d3d9->CreateDevice(_display, D3DDEVTYPE_HAL, nullptr, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &_device)))
|
||||
{
|
||||
Error(_log, "CreateDevice failed");
|
||||
return false;
|
||||
@@ -147,12 +160,11 @@ int DirectXGrabber::grabFrame(Image<ColorRgb> & image)
|
||||
return 0;
|
||||
}
|
||||
|
||||
memcpy(image.memptr(), lockedRect.pBits, _width * _height * 3);
|
||||
for(int i=0 ; i < _height ; i++)
|
||||
memcpy((unsigned char*)image.memptr() + i * _width * 3, (unsigned char*)lockedRect.pBits + i * lockedRect.Pitch, _width * 3);
|
||||
|
||||
for (int idx = 0; idx < _width * _height; idx++)
|
||||
{
|
||||
const ColorRgb & color = image.memptr()[idx];
|
||||
image.memptr()[idx] = ColorRgb{color.blue, color.green, color.red};
|
||||
}
|
||||
image.memptr()[idx] = ColorRgb{image.memptr()[idx].blue, image.memptr()[idx].green, image.memptr()[idx].red};
|
||||
|
||||
if (FAILED(_surfaceDest->UnlockRect()))
|
||||
{
|
||||
@@ -179,3 +191,12 @@ void DirectXGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned
|
||||
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
|
||||
setupDisplay();
|
||||
}
|
||||
|
||||
void DirectXGrabber::setDisplayIndex(int index)
|
||||
{
|
||||
if(_display != unsigned(index))
|
||||
{
|
||||
_display = unsigned(index);
|
||||
setupDisplay();
|
||||
}
|
||||
}
|
||||
|
@@ -1,16 +0,0 @@
|
||||
# Define the current source locations
|
||||
SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber)
|
||||
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/mediafoundation)
|
||||
|
||||
FILE ( GLOB MF_SOURCES "${CURRENT_HEADER_DIR}/MF*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp" )
|
||||
|
||||
add_library(mf-grabber ${MF_SOURCES} )
|
||||
|
||||
target_link_libraries(mf-grabber
|
||||
hyperion
|
||||
${QT_LIBRARIES}
|
||||
)
|
||||
|
||||
if(TURBOJPEG_FOUND)
|
||||
target_link_libraries(mf-grabber ${TurboJPEG_LIBRARY})
|
||||
endif(TURBOJPEG_FOUND)
|
@@ -1,170 +0,0 @@
|
||||
#include <QMetaType>
|
||||
|
||||
#include <grabber/MFWrapper.h>
|
||||
|
||||
// qt
|
||||
#include <QTimer>
|
||||
|
||||
MFWrapper::MFWrapper(const QString &device, unsigned grabWidth, unsigned grabHeight, unsigned fps, int pixelDecimation, QString flipMode)
|
||||
: GrabberWrapper("V4L2:MEDIA_FOUNDATION", &_grabber, grabWidth, grabHeight, 10)
|
||||
, _grabber(device, grabWidth, grabHeight, fps, pixelDecimation, flipMode)
|
||||
{
|
||||
_ggrabber = &_grabber;
|
||||
|
||||
// register the image type
|
||||
qRegisterMetaType<Image<ColorRgb>>("Image<ColorRgb>");
|
||||
|
||||
// Handle the image in the captured thread using a direct connection
|
||||
connect(&_grabber, &MFGrabber::newFrame, this, &MFWrapper::newFrame, Qt::DirectConnection);
|
||||
}
|
||||
|
||||
MFWrapper::~MFWrapper()
|
||||
{
|
||||
stop();
|
||||
}
|
||||
|
||||
bool MFWrapper::start()
|
||||
{
|
||||
return ( _grabber.start() && GrabberWrapper::start());
|
||||
}
|
||||
|
||||
void MFWrapper::stop()
|
||||
{
|
||||
_grabber.stop();
|
||||
GrabberWrapper::stop();
|
||||
}
|
||||
|
||||
void MFWrapper::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold)
|
||||
{
|
||||
_grabber.setSignalThreshold( redSignalThreshold, greenSignalThreshold, blueSignalThreshold, noSignalCounterThreshold);
|
||||
}
|
||||
|
||||
void MFWrapper::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
|
||||
{
|
||||
_grabber.setCropping(cropLeft, cropRight, cropTop, cropBottom);
|
||||
}
|
||||
|
||||
void MFWrapper::setSignalDetectionOffset(double verticalMin, double horizontalMin, double verticalMax, double horizontalMax)
|
||||
{
|
||||
_grabber.setSignalDetectionOffset(verticalMin, horizontalMin, verticalMax, horizontalMax);
|
||||
}
|
||||
|
||||
void MFWrapper::newFrame(const Image<ColorRgb> &image)
|
||||
{
|
||||
emit systemImage(_grabberName, image);
|
||||
}
|
||||
|
||||
void MFWrapper::action()
|
||||
{
|
||||
// dummy
|
||||
}
|
||||
|
||||
void MFWrapper::setSignalDetectionEnable(bool enable)
|
||||
{
|
||||
_grabber.setSignalDetectionEnable(enable);
|
||||
}
|
||||
|
||||
bool MFWrapper::getSignalDetectionEnable() const
|
||||
{
|
||||
return _grabber.getSignalDetectionEnabled();
|
||||
}
|
||||
|
||||
void MFWrapper::setCecDetectionEnable(bool enable)
|
||||
{
|
||||
_grabber.setCecDetectionEnable(enable);
|
||||
}
|
||||
|
||||
bool MFWrapper::getCecDetectionEnable() const
|
||||
{
|
||||
return _grabber.getCecDetectionEnabled();
|
||||
}
|
||||
|
||||
bool MFWrapper::setDevice(const QString& device)
|
||||
{
|
||||
return _grabber.setDevice(device);
|
||||
}
|
||||
|
||||
void MFWrapper::setFpsSoftwareDecimation(int decimation)
|
||||
{
|
||||
_grabber.setFpsSoftwareDecimation(decimation);
|
||||
}
|
||||
|
||||
bool MFWrapper::setEncoding(QString enc)
|
||||
{
|
||||
return _grabber.setEncoding(enc);
|
||||
}
|
||||
|
||||
bool MFWrapper::setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue)
|
||||
{
|
||||
return _grabber.setBrightnessContrastSaturationHue(brightness, contrast, saturation, hue);
|
||||
}
|
||||
|
||||
void MFWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
|
||||
{
|
||||
if(type == settings::V4L2 && _grabberName.startsWith("V4L2"))
|
||||
{
|
||||
// extract settings
|
||||
const QJsonObject& obj = config.object();
|
||||
// reload state
|
||||
bool reload = false;
|
||||
|
||||
// device name, video standard
|
||||
if (_grabber.setDevice(obj["device"].toString("auto")))
|
||||
reload = true;
|
||||
|
||||
// device input
|
||||
_grabber.setInput(obj["input"].toInt(-1));
|
||||
|
||||
// device resolution
|
||||
if (_grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0)))
|
||||
reload = true;
|
||||
|
||||
// device framerate
|
||||
if (_grabber.setFramerate(obj["fps"].toInt(15)))
|
||||
reload = true;
|
||||
|
||||
// image size decimation
|
||||
_grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8));
|
||||
|
||||
// flip mode
|
||||
_grabber.setFlipMode(obj["flip"].toString("NO_CHANGE"));
|
||||
|
||||
// image cropping
|
||||
_grabber.setCropping(
|
||||
obj["cropLeft"].toInt(0),
|
||||
obj["cropRight"].toInt(0),
|
||||
obj["cropTop"].toInt(0),
|
||||
obj["cropBottom"].toInt(0));
|
||||
|
||||
// Brightness, Contrast, Saturation, Hue
|
||||
if (_grabber.setBrightnessContrastSaturationHue(obj["hardware_brightness"].toInt(0), obj["hardware_contrast"].toInt(0), obj["hardware_saturation"].toInt(0), obj["hardware_hue"].toInt(0)))
|
||||
reload = true;
|
||||
|
||||
// CEC Standby
|
||||
_grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true));
|
||||
|
||||
// software frame skipping
|
||||
_grabber.setFpsSoftwareDecimation(obj["fpsSoftwareDecimation"].toInt(1));
|
||||
|
||||
// Signal detection
|
||||
_grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true));
|
||||
_grabber.setSignalDetectionOffset(
|
||||
obj["sDHOffsetMin"].toDouble(0.25),
|
||||
obj["sDVOffsetMin"].toDouble(0.25),
|
||||
obj["sDHOffsetMax"].toDouble(0.75),
|
||||
obj["sDVOffsetMax"].toDouble(0.75));
|
||||
_grabber.setSignalThreshold(
|
||||
obj["redSignalThreshold"].toDouble(0.0)/100.0,
|
||||
obj["greenSignalThreshold"].toDouble(0.0)/100.0,
|
||||
obj["blueSignalThreshold"].toDouble(0.0)/100.0,
|
||||
obj["noSignalCounterThreshold"].toInt(50) );
|
||||
|
||||
// Hardware encoding format
|
||||
if (_grabber.setEncoding(obj["encoding"].toString("NO_CHANGE")))
|
||||
reload = true;
|
||||
|
||||
// Reload the Grabber if any settings have been changed that require it
|
||||
if (reload)
|
||||
_grabber.reloadGrabber();
|
||||
}
|
||||
}
|
@@ -7,10 +7,18 @@
|
||||
#include <QGuiApplication>
|
||||
#include <QWidget>
|
||||
#include <QScreen>
|
||||
#include <QJsonObject>
|
||||
#include <QJsonArray>
|
||||
#include <QJsonDocument>
|
||||
|
||||
// Constants
|
||||
namespace {
|
||||
const bool verbose = false;
|
||||
} //End of constants
|
||||
|
||||
QtGrabber::QtGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display)
|
||||
: Grabber("QTGRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom)
|
||||
, _display(unsigned(display))
|
||||
, _display(display)
|
||||
, _pixelDecimation(pixelDecimation)
|
||||
, _calculatedWidth(0)
|
||||
, _calculatedHeight(0)
|
||||
@@ -18,12 +26,12 @@ QtGrabber::QtGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, i
|
||||
, _src_y(0)
|
||||
, _src_x_max(0)
|
||||
, _src_y_max(0)
|
||||
, _isWayland(false)
|
||||
, _screen(nullptr)
|
||||
, _isVirtual(false)
|
||||
{
|
||||
_logger = Logger::getInstance("Qt");
|
||||
_useImageResampler = false;
|
||||
|
||||
// init
|
||||
setupDisplay();
|
||||
}
|
||||
|
||||
QtGrabber::~QtGrabber()
|
||||
@@ -36,51 +44,97 @@ void QtGrabber::freeResources()
|
||||
// Qt seems to hold the ownership of the QScreen pointers
|
||||
}
|
||||
|
||||
bool QtGrabber::open()
|
||||
{
|
||||
bool rc = false;
|
||||
|
||||
if (getenv("WAYLAND_DISPLAY") != nullptr)
|
||||
{
|
||||
_isWayland = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
rc = true;
|
||||
}
|
||||
return rc;
|
||||
}
|
||||
|
||||
bool QtGrabber::setupDisplay()
|
||||
{
|
||||
// cleanup last screen
|
||||
freeResources();
|
||||
|
||||
QScreen* primary = QGuiApplication::primaryScreen();
|
||||
QList<QScreen *> screens = QGuiApplication::screens();
|
||||
// inject main screen at 0, if not nullptr
|
||||
if(primary != nullptr)
|
||||
bool result = false;
|
||||
if ( ! open() )
|
||||
{
|
||||
screens.prepend(primary);
|
||||
// remove last main screen if twice in list
|
||||
if(screens.lastIndexOf(primary) > 0)
|
||||
screens.removeAt(screens.lastIndexOf(primary));
|
||||
if ( _isWayland )
|
||||
{
|
||||
Error(_log, "Grabber does not work under Wayland!");
|
||||
}
|
||||
}
|
||||
|
||||
if(screens.isEmpty())
|
||||
else
|
||||
{
|
||||
Error(_log, "No displays found to capture from!");
|
||||
return false;
|
||||
// cleanup last screen
|
||||
freeResources();
|
||||
_numberOfSDisplays = 0;
|
||||
|
||||
QScreen* primary = QGuiApplication::primaryScreen();
|
||||
QList<QScreen *> screens = QGuiApplication::screens();
|
||||
// inject main screen at 0, if not nullptr
|
||||
if(primary != nullptr)
|
||||
{
|
||||
screens.prepend(primary);
|
||||
// remove last main screen if twice in list
|
||||
if(screens.lastIndexOf(primary) > 0)
|
||||
{
|
||||
screens.removeAt(screens.lastIndexOf(primary));
|
||||
}
|
||||
}
|
||||
|
||||
if(screens.isEmpty())
|
||||
{
|
||||
Error(_log, "No displays found to capture from!");
|
||||
result = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
_numberOfSDisplays = screens.size();
|
||||
|
||||
Info(_log,"Available Displays:");
|
||||
int index = 0;
|
||||
for(auto * screen : qAsConst(screens))
|
||||
{
|
||||
const QRect geo = screen->geometry();
|
||||
Info(_log,"Display %d: Name:%s Geometry: (L,T,R,B) %d,%d,%d,%d Depth:%dbit", index, QSTRING_CSTR(screen->name()), geo.left(), geo.top() ,geo.right(), geo.bottom(), screen->depth());
|
||||
++index;
|
||||
}
|
||||
|
||||
_isVirtual = false;
|
||||
// be sure the index is available
|
||||
if (_display > _numberOfSDisplays - 1 )
|
||||
{
|
||||
|
||||
if (screens.at(0)->size() != screens.at(0)->virtualSize())
|
||||
{
|
||||
Info(_log, "Using virtual display across all screens");
|
||||
_isVirtual = true;
|
||||
_display = 0;
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
Info(_log, "The requested display index '%d' is not available, falling back to display 0", _display);
|
||||
_display = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// init the requested display
|
||||
_screen = screens.at(_display);
|
||||
connect(_screen, &QScreen::geometryChanged, this, &QtGrabber::geometryChanged);
|
||||
updateScreenDimensions(true);
|
||||
|
||||
Info(_log,"Initialized display %d", _display);
|
||||
result = true;
|
||||
}
|
||||
}
|
||||
|
||||
Info(_log,"Available Displays:");
|
||||
int index = 0;
|
||||
for(auto screen : screens)
|
||||
{
|
||||
const QRect geo = screen->geometry();
|
||||
Info(_log,"Display %d: Name:%s Geometry: (L,T,R,B) %d,%d,%d,%d Depth:%dbit", index, QSTRING_CSTR(screen->name()), geo.left(), geo.top() ,geo.right(), geo.bottom(), screen->depth());
|
||||
index++;
|
||||
}
|
||||
|
||||
// be sure the index is available
|
||||
if(_display > unsigned(screens.size()-1))
|
||||
{
|
||||
Info(_log, "The requested display index '%d' is not available, falling back to display 0", _display);
|
||||
_display = 0;
|
||||
}
|
||||
|
||||
// init the requested display
|
||||
_screen = screens.at(_display);
|
||||
connect(_screen, &QScreen::geometryChanged, this, &QtGrabber::geometryChanged);
|
||||
updateScreenDimensions(true);
|
||||
|
||||
Info(_log,"Initialized display %d", _display);
|
||||
return true;
|
||||
return result;
|
||||
}
|
||||
|
||||
void QtGrabber::geometryChanged(const QRect &geo)
|
||||
@@ -91,30 +145,48 @@ void QtGrabber::geometryChanged(const QRect &geo)
|
||||
|
||||
int QtGrabber::grabFrame(Image<ColorRgb> & image)
|
||||
{
|
||||
if (!_enabled) return 0;
|
||||
if (!_enabled)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if(_screen == nullptr)
|
||||
{
|
||||
// reinit, this will disable capture on failure
|
||||
setEnabled(setupDisplay());
|
||||
bool result = setupDisplay();
|
||||
setEnabled(result);
|
||||
return -1;
|
||||
}
|
||||
|
||||
QPixmap originalPixmap = _screen->grabWindow(0, _src_x, _src_y, _src_x_max, _src_y_max);
|
||||
QImage imageFrame = originalPixmap.toImage().scaled(_calculatedWidth, _calculatedHeight).convertToFormat( QImage::Format_RGB888);
|
||||
image.resize(_calculatedWidth, _calculatedHeight);
|
||||
image.resize(static_cast<uint>(_calculatedWidth), static_cast<uint>(_calculatedHeight));
|
||||
|
||||
for (int y = 0; y < imageFrame.height(); y++)
|
||||
memcpy((unsigned char*)image.memptr() + y * image.width() * 3, (unsigned char*)imageFrame.scanLine(y), imageFrame.width() * 3);
|
||||
{
|
||||
memcpy((unsigned char*)image.memptr() + y * image.width() * 3, static_cast<unsigned char*>(imageFrame.scanLine(y)), imageFrame.width() * 3);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int QtGrabber::updateScreenDimensions(bool force)
|
||||
{
|
||||
if(!_screen)
|
||||
if(_screen == nullptr)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
const QRect& geo = _screen->geometry();
|
||||
QRect geo;
|
||||
|
||||
if (_isVirtual)
|
||||
{
|
||||
geo = _screen->virtualGeometry();
|
||||
}
|
||||
else
|
||||
{
|
||||
geo = _screen->geometry();
|
||||
}
|
||||
if (!force && _width == geo.width() && _height == geo.height())
|
||||
{
|
||||
// No update required
|
||||
@@ -125,7 +197,8 @@ int QtGrabber::updateScreenDimensions(bool force)
|
||||
_width = geo.width();
|
||||
_height = geo.height();
|
||||
|
||||
int width=0, height=0;
|
||||
int width=0;
|
||||
int height=0;
|
||||
|
||||
// Image scaling is performed by Qt
|
||||
width = (_width > (_cropLeft + _cropRight))
|
||||
@@ -177,11 +250,6 @@ void QtGrabber::setVideoMode(VideoMode mode)
|
||||
updateScreenDimensions(true);
|
||||
}
|
||||
|
||||
void QtGrabber::setPixelDecimation(int pixelDecimation)
|
||||
{
|
||||
_pixelDecimation = pixelDecimation;
|
||||
}
|
||||
|
||||
void QtGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
|
||||
{
|
||||
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
|
||||
@@ -190,9 +258,108 @@ void QtGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned crop
|
||||
|
||||
void QtGrabber::setDisplayIndex(int index)
|
||||
{
|
||||
if(_display != unsigned(index))
|
||||
if (_display != index)
|
||||
{
|
||||
_display = unsigned(index);
|
||||
if (index <= _numberOfSDisplays)
|
||||
{
|
||||
_display = index;
|
||||
}
|
||||
else {
|
||||
_display = 0;
|
||||
}
|
||||
setupDisplay();
|
||||
}
|
||||
}
|
||||
|
||||
QJsonObject QtGrabber::discover(const QJsonObject& params)
|
||||
{
|
||||
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
|
||||
|
||||
QJsonObject inputsDiscovered;
|
||||
if ( open() )
|
||||
{
|
||||
QList<QScreen*> screens = QGuiApplication::screens();
|
||||
|
||||
inputsDiscovered["device"] = "qt";
|
||||
inputsDiscovered["device_name"] = "QT";
|
||||
inputsDiscovered["type"] = "screen";
|
||||
|
||||
QJsonArray video_inputs;
|
||||
|
||||
if (!screens.isEmpty())
|
||||
{
|
||||
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
|
||||
|
||||
for (int i = 0; i < screens.size(); ++i)
|
||||
{
|
||||
QJsonObject in;
|
||||
|
||||
QString name = screens.at(i)->name();
|
||||
|
||||
int pos = name.lastIndexOf('\\');
|
||||
if (pos != -1)
|
||||
{
|
||||
name = name.right(name.length()-pos-1);
|
||||
}
|
||||
|
||||
in["name"] = name;
|
||||
in["inputIdx"] = i;
|
||||
|
||||
QJsonArray formats;
|
||||
QJsonObject format;
|
||||
|
||||
QJsonArray resolutionArray;
|
||||
|
||||
QJsonObject resolution;
|
||||
|
||||
resolution["width"] = screens.at(i)->size().width();
|
||||
resolution["height"] = screens.at(i)->size().height();
|
||||
resolution["fps"] = fps;
|
||||
|
||||
resolutionArray.append(resolution);
|
||||
|
||||
format["resolutions"] = resolutionArray;
|
||||
formats.append(format);
|
||||
|
||||
in["formats"] = formats;
|
||||
video_inputs.append(in);
|
||||
}
|
||||
|
||||
if (screens.at(0)->size() != screens.at(0)->virtualSize())
|
||||
{
|
||||
QJsonObject in;
|
||||
in["name"] = "All Displays";
|
||||
in["inputIdx"] = screens.size();
|
||||
in["virtual"] = true;
|
||||
|
||||
QJsonArray formats;
|
||||
QJsonObject format;
|
||||
|
||||
QJsonArray resolutionArray;
|
||||
|
||||
QJsonObject resolution;
|
||||
|
||||
resolution["width"] = screens.at(0)->virtualSize().width();
|
||||
resolution["height"] = screens.at(0)->virtualSize().height();
|
||||
resolution["fps"] = fps;
|
||||
|
||||
resolutionArray.append(resolution);
|
||||
|
||||
format["resolutions"] = resolutionArray;
|
||||
formats.append(format);
|
||||
|
||||
in["formats"] = formats;
|
||||
video_inputs.append(in);
|
||||
}
|
||||
inputsDiscovered["video_inputs"] = video_inputs;
|
||||
}
|
||||
else
|
||||
{
|
||||
DebugIf(verbose, _log, "No displays found to capture from!");
|
||||
}
|
||||
}
|
||||
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
|
||||
|
||||
return inputsDiscovered;
|
||||
|
||||
}
|
||||
|
@@ -5,6 +5,11 @@ QtWrapper::QtWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, i
|
||||
, _grabber(cropLeft, cropRight, cropTop, cropBottom, pixelDecimation, display)
|
||||
{}
|
||||
|
||||
bool QtWrapper::open()
|
||||
{
|
||||
return _grabber.open();
|
||||
}
|
||||
|
||||
void QtWrapper::action()
|
||||
{
|
||||
transferFrame(_grabber);
|
||||
|
@@ -1,18 +0,0 @@
|
||||
# Define the current source locations
|
||||
SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber)
|
||||
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/v4l2)
|
||||
|
||||
FILE ( GLOB V4L2_SOURCES "${CURRENT_HEADER_DIR}/V4L2*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp" )
|
||||
|
||||
add_library(v4l2-grabber ${V4L2_SOURCES} )
|
||||
|
||||
target_link_libraries(v4l2-grabber
|
||||
hyperion
|
||||
${QT_LIBRARIES}
|
||||
)
|
||||
|
||||
if(TURBOJPEG_FOUND)
|
||||
target_link_libraries(v4l2-grabber ${TurboJPEG_LIBRARY})
|
||||
elseif (JPEG_FOUND)
|
||||
target_link_libraries(v4l2-grabber ${JPEG_LIBRARY})
|
||||
endif(TURBOJPEG_FOUND)
|
@@ -1,156 +0,0 @@
|
||||
#include <QMetaType>
|
||||
|
||||
#include <grabber/V4L2Wrapper.h>
|
||||
|
||||
// qt
|
||||
#include <QTimer>
|
||||
|
||||
V4L2Wrapper::V4L2Wrapper(const QString &device,
|
||||
unsigned grabWidth,
|
||||
unsigned grabHeight,
|
||||
unsigned fps,
|
||||
unsigned input,
|
||||
VideoStandard videoStandard,
|
||||
PixelFormat pixelFormat,
|
||||
int pixelDecimation)
|
||||
: GrabberWrapper("V4L2:"+device, &_grabber, grabWidth, grabHeight, 10)
|
||||
, _grabber(device,
|
||||
grabWidth,
|
||||
grabHeight,
|
||||
fps,
|
||||
input,
|
||||
videoStandard,
|
||||
pixelFormat,
|
||||
pixelDecimation)
|
||||
{
|
||||
_ggrabber = &_grabber;
|
||||
|
||||
// register the image type
|
||||
qRegisterMetaType<Image<ColorRgb>>("Image<ColorRgb>");
|
||||
|
||||
// Handle the image in the captured thread using a direct connection
|
||||
connect(&_grabber, &V4L2Grabber::newFrame, this, &V4L2Wrapper::newFrame, Qt::DirectConnection);
|
||||
connect(&_grabber, &V4L2Grabber::readError, this, &V4L2Wrapper::readError, Qt::DirectConnection);
|
||||
}
|
||||
|
||||
V4L2Wrapper::~V4L2Wrapper()
|
||||
{
|
||||
stop();
|
||||
}
|
||||
|
||||
bool V4L2Wrapper::start()
|
||||
{
|
||||
return ( _grabber.start() && GrabberWrapper::start());
|
||||
}
|
||||
|
||||
void V4L2Wrapper::stop()
|
||||
{
|
||||
_grabber.stop();
|
||||
GrabberWrapper::stop();
|
||||
}
|
||||
|
||||
void V4L2Wrapper::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold)
|
||||
{
|
||||
_grabber.setSignalThreshold( redSignalThreshold, greenSignalThreshold, blueSignalThreshold, 50);
|
||||
}
|
||||
|
||||
void V4L2Wrapper::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
|
||||
{
|
||||
_grabber.setCropping(cropLeft, cropRight, cropTop, cropBottom);
|
||||
}
|
||||
|
||||
void V4L2Wrapper::setSignalDetectionOffset(double verticalMin, double horizontalMin, double verticalMax, double horizontalMax)
|
||||
{
|
||||
_grabber.setSignalDetectionOffset(verticalMin, horizontalMin, verticalMax, horizontalMax);
|
||||
}
|
||||
|
||||
void V4L2Wrapper::newFrame(const Image<ColorRgb> &image)
|
||||
{
|
||||
emit systemImage(_grabberName, image);
|
||||
}
|
||||
|
||||
void V4L2Wrapper::readError(const char* err)
|
||||
{
|
||||
Error(_log, "stop grabber, because reading device failed. (%s)", err);
|
||||
stop();
|
||||
}
|
||||
|
||||
void V4L2Wrapper::action()
|
||||
{
|
||||
// dummy as v4l get notifications from stream
|
||||
}
|
||||
|
||||
void V4L2Wrapper::setSignalDetectionEnable(bool enable)
|
||||
{
|
||||
_grabber.setSignalDetectionEnable(enable);
|
||||
}
|
||||
|
||||
bool V4L2Wrapper::getSignalDetectionEnable() const
|
||||
{
|
||||
return _grabber.getSignalDetectionEnabled();
|
||||
}
|
||||
|
||||
void V4L2Wrapper::setCecDetectionEnable(bool enable)
|
||||
{
|
||||
_grabber.setCecDetectionEnable(enable);
|
||||
}
|
||||
|
||||
bool V4L2Wrapper::getCecDetectionEnable() const
|
||||
{
|
||||
return _grabber.getCecDetectionEnabled();
|
||||
}
|
||||
|
||||
void V4L2Wrapper::setDeviceVideoStandard(const QString& device, VideoStandard videoStandard)
|
||||
{
|
||||
_grabber.setDeviceVideoStandard(device, videoStandard);
|
||||
}
|
||||
|
||||
void V4L2Wrapper::handleCecEvent(CECEvent event)
|
||||
{
|
||||
_grabber.handleCecEvent(event);
|
||||
}
|
||||
|
||||
void V4L2Wrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
|
||||
{
|
||||
if(type == settings::V4L2 && _grabberName.startsWith("V4L"))
|
||||
{
|
||||
// extract settings
|
||||
const QJsonObject& obj = config.object();
|
||||
|
||||
// pixel decimation for v4l
|
||||
_grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8));
|
||||
|
||||
// crop for v4l
|
||||
_grabber.setCropping(
|
||||
obj["cropLeft"].toInt(0),
|
||||
obj["cropRight"].toInt(0),
|
||||
obj["cropTop"].toInt(0),
|
||||
obj["cropBottom"].toInt(0));
|
||||
|
||||
// device input
|
||||
_grabber.setInput(obj["input"].toInt(0));
|
||||
|
||||
// device resolution
|
||||
_grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0));
|
||||
|
||||
// device framerate
|
||||
_grabber.setFramerate(obj["fps"].toInt(15));
|
||||
|
||||
// CEC Standby
|
||||
_grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true));
|
||||
|
||||
_grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true));
|
||||
_grabber.setSignalDetectionOffset(
|
||||
obj["sDHOffsetMin"].toDouble(0.25),
|
||||
obj["sDVOffsetMin"].toDouble(0.25),
|
||||
obj["sDHOffsetMax"].toDouble(0.75),
|
||||
obj["sDVOffsetMax"].toDouble(0.75));
|
||||
_grabber.setSignalThreshold(
|
||||
obj["redSignalThreshold"].toDouble(0.0)/100.0,
|
||||
obj["greenSignalThreshold"].toDouble(0.0)/100.0,
|
||||
obj["blueSignalThreshold"].toDouble(0.0)/100.0);
|
||||
_grabber.setDeviceVideoStandard(
|
||||
obj["device"].toString("auto"),
|
||||
parseVideoStandard(obj["standard"].toString("NO_CHANGE")));
|
||||
}
|
||||
}
|
23
libsrc/grabber/video/CMakeLists.txt
Normal file
23
libsrc/grabber/video/CMakeLists.txt
Normal file
@@ -0,0 +1,23 @@
|
||||
# Common cmake definition for external video grabber
|
||||
|
||||
# Define the wrapper/header/source locations and collect them
|
||||
SET(WRAPPER_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video)
|
||||
SET(HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber)
|
||||
if (ENABLE_MF)
|
||||
project(mf-grabber)
|
||||
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/mediafoundation)
|
||||
FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/MF*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp")
|
||||
elseif(ENABLE_V4L2)
|
||||
project(v4l2-grabber)
|
||||
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/v4l2)
|
||||
FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/V4L2*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp")
|
||||
endif()
|
||||
|
||||
add_library(${PROJECT_NAME} ${SOURCES})
|
||||
target_link_libraries(${PROJECT_NAME} hyperion ${QT_LIBRARIES})
|
||||
|
||||
if(TURBOJPEG_FOUND)
|
||||
target_link_libraries(${PROJECT_NAME} ${TurboJPEG_LIBRARY})
|
||||
elseif (JPEG_FOUND)
|
||||
target_link_libraries(${PROJECT_NAME} ${JPEG_LIBRARY})
|
||||
endif()
|
134
libsrc/grabber/video/VideoWrapper.cpp
Normal file
134
libsrc/grabber/video/VideoWrapper.cpp
Normal file
@@ -0,0 +1,134 @@
|
||||
#include <QMetaType>
|
||||
|
||||
#include <grabber/VideoWrapper.h>
|
||||
|
||||
// qt
|
||||
#include <QTimer>
|
||||
|
||||
VideoWrapper::VideoWrapper()
|
||||
#if defined(ENABLE_V4L2)
|
||||
: GrabberWrapper("V4L2", &_grabber, 0, 0, 10)
|
||||
#elif defined(ENABLE_MF)
|
||||
: GrabberWrapper("V4L2:MEDIA_FOUNDATION", &_grabber, 0, 0, 10)
|
||||
#endif
|
||||
, _grabber()
|
||||
{
|
||||
// register the image type
|
||||
qRegisterMetaType<Image<ColorRgb>>("Image<ColorRgb>");
|
||||
|
||||
// Handle the image in the captured thread (Media Foundation/V4L2) using a direct connection
|
||||
connect(&_grabber, SIGNAL(newFrame(const Image<ColorRgb>&)), this, SLOT(newFrame(const Image<ColorRgb>&)), Qt::DirectConnection);
|
||||
connect(&_grabber, SIGNAL(readError(const char*)), this, SLOT(readError(const char*)), Qt::DirectConnection);
|
||||
}
|
||||
|
||||
VideoWrapper::~VideoWrapper()
|
||||
{
|
||||
stop();
|
||||
}
|
||||
|
||||
bool VideoWrapper::start()
|
||||
{
|
||||
return (_grabber.prepare() && _grabber.start() && GrabberWrapper::start());
|
||||
}
|
||||
|
||||
void VideoWrapper::stop()
|
||||
{
|
||||
_grabber.stop();
|
||||
GrabberWrapper::stop();
|
||||
}
|
||||
|
||||
#if defined(ENABLE_CEC) && !defined(ENABLE_MF)
|
||||
|
||||
void VideoWrapper::handleCecEvent(CECEvent event)
|
||||
{
|
||||
_grabber.handleCecEvent(event);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
void VideoWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
|
||||
{
|
||||
if(type == settings::V4L2 && _grabberName.startsWith("V4L2"))
|
||||
{
|
||||
// extract settings
|
||||
const QJsonObject& obj = config.object();
|
||||
|
||||
// Device
|
||||
_grabber.setDevice(obj["device"].toString("auto"));
|
||||
|
||||
// Device input
|
||||
_grabber.setInput(obj["input"].toInt(0));
|
||||
|
||||
// Device resolution
|
||||
_grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0));
|
||||
|
||||
// Device framerate
|
||||
_grabber.setFramerate(obj["fps"].toInt(15));
|
||||
|
||||
// Device encoding format
|
||||
_grabber.setEncoding(obj["encoding"].toString("NO_CHANGE"));
|
||||
|
||||
// Video standard
|
||||
_grabber.setVideoStandard(parseVideoStandard(obj["standard"].toString("NO_CHANGE")));
|
||||
|
||||
// Image size decimation
|
||||
_grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8));
|
||||
|
||||
// Flip mode
|
||||
_grabber.setFlipMode(parseFlipMode(obj["flip"].toString("NO_CHANGE")));
|
||||
|
||||
// Image cropping
|
||||
_grabber.setCropping(
|
||||
obj["cropLeft"].toInt(0),
|
||||
obj["cropRight"].toInt(0),
|
||||
obj["cropTop"].toInt(0),
|
||||
obj["cropBottom"].toInt(0));
|
||||
|
||||
// Brightness, Contrast, Saturation, Hue
|
||||
_grabber.setBrightnessContrastSaturationHue(
|
||||
obj["hardware_brightness"].toInt(0),
|
||||
obj["hardware_contrast"].toInt(0),
|
||||
obj["hardware_saturation"].toInt(0),
|
||||
obj["hardware_hue"].toInt(0));
|
||||
|
||||
#if defined(ENABLE_CEC) && defined(ENABLE_V4L2)
|
||||
// CEC Standby
|
||||
_grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true));
|
||||
#endif
|
||||
|
||||
// Software frame skipping
|
||||
_grabber.setFpsSoftwareDecimation(obj["fpsSoftwareDecimation"].toInt(1));
|
||||
|
||||
// Signal detection
|
||||
_grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true));
|
||||
_grabber.setSignalDetectionOffset(
|
||||
obj["sDHOffsetMin"].toDouble(0.25),
|
||||
obj["sDVOffsetMin"].toDouble(0.25),
|
||||
obj["sDHOffsetMax"].toDouble(0.75),
|
||||
obj["sDVOffsetMax"].toDouble(0.75));
|
||||
_grabber.setSignalThreshold(
|
||||
obj["redSignalThreshold"].toDouble(0.0)/100.0,
|
||||
obj["greenSignalThreshold"].toDouble(0.0)/100.0,
|
||||
obj["blueSignalThreshold"].toDouble(0.0)/100.0,
|
||||
obj["noSignalCounterThreshold"].toInt(50));
|
||||
|
||||
// Reload the Grabber if any settings have been changed that require it
|
||||
_grabber.reload();
|
||||
}
|
||||
}
|
||||
|
||||
void VideoWrapper::newFrame(const Image<ColorRgb> &image)
|
||||
{
|
||||
emit systemImage(_grabberName, image);
|
||||
}
|
||||
|
||||
void VideoWrapper::readError(const char* err)
|
||||
{
|
||||
Error(_log, "Stop grabber, because reading device failed. (%s)", err);
|
||||
stop();
|
||||
}
|
||||
|
||||
void VideoWrapper::action()
|
||||
{
|
||||
// dummy as v4l get notifications from stream
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -44,9 +44,11 @@ public:
|
||||
, _grabber(grabber)
|
||||
, _bEOS(FALSE)
|
||||
, _hrStatus(S_OK)
|
||||
, _isBusy(false)
|
||||
, _transform(nullptr)
|
||||
, _pixelformat(PixelFormat::NO_CHANGE)
|
||||
{
|
||||
// Initialize critical section.
|
||||
InitializeCriticalSection(&_critsec);
|
||||
}
|
||||
|
||||
@@ -78,21 +80,29 @@ public:
|
||||
|
||||
// IMFSourceReaderCallback methods
|
||||
STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD /*dwStreamIndex*/,
|
||||
DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample)
|
||||
DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample* pSample)
|
||||
{
|
||||
EnterCriticalSection(&_critsec);
|
||||
_isBusy = true;
|
||||
|
||||
if(dwStreamFlags & MF_SOURCE_READERF_STREAMTICK)
|
||||
if (_grabber->_sourceReader == nullptr)
|
||||
{
|
||||
_isBusy = false;
|
||||
LeaveCriticalSection(&_critsec);
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
if (dwStreamFlags & MF_SOURCE_READERF_STREAMTICK)
|
||||
{
|
||||
Debug(_grabber->_log, "Skipping stream gap");
|
||||
LeaveCriticalSection(&_critsec);
|
||||
_grabber->_sourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL);
|
||||
_grabber->_sourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, nullptr, nullptr, nullptr, nullptr);
|
||||
return S_OK;
|
||||
}
|
||||
}
|
||||
|
||||
if (dwStreamFlags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED)
|
||||
{
|
||||
IMFMediaType *type = nullptr;
|
||||
IMFMediaType* type = nullptr;
|
||||
GUID format;
|
||||
_grabber->_sourceReader->GetNativeMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, MF_SOURCE_READER_CURRENT_TYPE_INDEX, &type);
|
||||
type->GetGUID(MF_MT_SUBTYPE, &format);
|
||||
@@ -103,7 +113,7 @@ public:
|
||||
|
||||
if (dwStreamFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)
|
||||
{
|
||||
IMFMediaType *type = nullptr;
|
||||
IMFMediaType* type = nullptr;
|
||||
GUID format;
|
||||
_grabber->_sourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, &type);
|
||||
type->GetGUID(MF_MT_SUBTYPE, &format);
|
||||
@@ -119,7 +129,7 @@ public:
|
||||
{
|
||||
_hrStatus = hrStatus;
|
||||
_com_error error(_hrStatus);
|
||||
Error(_grabber->_log, "Source Reader error => %s", error.ErrorMessage());
|
||||
Error(_grabber->_log, "%s", error.ErrorMessage());
|
||||
goto done;
|
||||
}
|
||||
|
||||
@@ -129,7 +139,7 @@ public:
|
||||
goto done;
|
||||
}
|
||||
|
||||
if(_pixelformat != PixelFormat::MJPEG && _pixelformat != PixelFormat::NO_CHANGE)
|
||||
if (_pixelformat != PixelFormat::MJPEG && _pixelformat != PixelFormat::BGR24 && _pixelformat != PixelFormat::NO_CHANGE)
|
||||
pSample = TransformSample(_transform, pSample);
|
||||
|
||||
_hrStatus = pSample->ConvertToContiguousBuffer(&buffer);
|
||||
@@ -150,7 +160,7 @@ public:
|
||||
goto done;
|
||||
}
|
||||
|
||||
_grabber->receive_image(data,currentLength);
|
||||
_grabber->receive_image(data, currentLength);
|
||||
|
||||
_hrStatus = buffer->Unlock();
|
||||
if (FAILED(_hrStatus))
|
||||
@@ -165,6 +175,7 @@ public:
|
||||
if (MF_SOURCE_READERF_ENDOFSTREAM & dwStreamFlags)
|
||||
_bEOS = TRUE; // Reached the end of the stream.
|
||||
|
||||
_isBusy = false;
|
||||
LeaveCriticalSection(&_critsec);
|
||||
return _hrStatus;
|
||||
}
|
||||
@@ -172,11 +183,11 @@ public:
|
||||
HRESULT SourceReaderCB::InitializeVideoEncoder(IMFMediaType* type, PixelFormat format)
|
||||
{
|
||||
_pixelformat = format;
|
||||
if (format == PixelFormat::MJPEG || format == PixelFormat::NO_CHANGE)
|
||||
if (format == PixelFormat::MJPEG || format == PixelFormat::BGR24 || format == PixelFormat::NO_CHANGE)
|
||||
return S_OK;
|
||||
|
||||
// Variable declaration
|
||||
IMFMediaType *output = nullptr;
|
||||
IMFMediaType* output = nullptr;
|
||||
DWORD mftStatus = 0;
|
||||
QString error = "";
|
||||
|
||||
@@ -269,7 +280,16 @@ public:
|
||||
return _hrStatus;
|
||||
}
|
||||
|
||||
STDMETHODIMP OnEvent(DWORD, IMFMediaEvent *) { return S_OK; }
|
||||
BOOL SourceReaderCB::isBusy()
|
||||
{
|
||||
EnterCriticalSection(&_critsec);
|
||||
BOOL result = _isBusy;
|
||||
LeaveCriticalSection(&_critsec);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
STDMETHODIMP OnEvent(DWORD, IMFMediaEvent*) { return S_OK; }
|
||||
STDMETHODIMP OnFlush(DWORD) { return S_OK; }
|
||||
|
||||
private:
|
||||
@@ -282,13 +302,16 @@ private:
|
||||
}
|
||||
|
||||
SAFE_RELEASE(_transform);
|
||||
|
||||
// Delete critical section.
|
||||
DeleteCriticalSection(&_critsec);
|
||||
}
|
||||
|
||||
IMFSample* SourceReaderCB::TransformSample(IMFTransform* transform, IMFSample* in_sample)
|
||||
{
|
||||
IMFSample* result = nullptr;
|
||||
IMFMediaBuffer* out_buffer = nullptr;
|
||||
MFT_OUTPUT_DATA_BUFFER outputDataBuffer = {0};
|
||||
MFT_OUTPUT_DATA_BUFFER outputDataBuffer = { 0 };
|
||||
|
||||
// Process the input sample
|
||||
_hrStatus = transform->ProcessInput(0, in_sample, 0);
|
||||
@@ -371,4 +394,5 @@ private:
|
||||
HRESULT _hrStatus;
|
||||
IMFTransform* _transform;
|
||||
PixelFormat _pixelformat;
|
||||
std::atomic<bool> _isBusy;
|
||||
};
|
@@ -1,19 +1,14 @@
|
||||
#include "grabber/MFThread.h"
|
||||
|
||||
volatile bool MFThread::_isActive = false;
|
||||
|
||||
MFThread::MFThread()
|
||||
: _isBusy(false)
|
||||
, _semaphore(1)
|
||||
, _localData(nullptr)
|
||||
: _localData(nullptr)
|
||||
, _scalingFactorsCount(0)
|
||||
, _scalingFactors(nullptr)
|
||||
, _transform(nullptr)
|
||||
, _decompress(nullptr)
|
||||
, _xform(nullptr)
|
||||
, _imageResampler()
|
||||
{
|
||||
}
|
||||
{}
|
||||
|
||||
MFThread::~MFThread()
|
||||
{
|
||||
@@ -28,12 +23,11 @@ MFThread::~MFThread()
|
||||
}
|
||||
|
||||
void MFThread::setup(
|
||||
unsigned int threadIndex, PixelFormat pixelFormat, uint8_t* sharedData,
|
||||
PixelFormat pixelFormat, uint8_t* sharedData,
|
||||
int size, int width, int height, int lineLength,
|
||||
int subsamp, unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight,
|
||||
VideoMode videoMode, FlipMode flipMode, int currentFrame, int pixelDecimation)
|
||||
VideoMode videoMode, FlipMode flipMode, int pixelDecimation)
|
||||
{
|
||||
_threadIndex = threadIndex;
|
||||
_lineLength = lineLength;
|
||||
_pixelFormat = pixelFormat;
|
||||
_size = (unsigned long) size;
|
||||
@@ -45,7 +39,6 @@ void MFThread::setup(
|
||||
_cropBottom = cropBottom;
|
||||
_cropRight = cropRight;
|
||||
_flipMode = flipMode;
|
||||
_currentFrame = currentFrame;
|
||||
_pixelDecimation = pixelDecimation;
|
||||
|
||||
_imageResampler.setVideoMode(videoMode);
|
||||
@@ -61,9 +54,10 @@ void MFThread::setup(
|
||||
memcpy(_localData, sharedData, size);
|
||||
}
|
||||
|
||||
void MFThread::run()
|
||||
void MFThread::process()
|
||||
{
|
||||
if (_isActive && _width > 0 && _height > 0)
|
||||
_busy = true;
|
||||
if (_width > 0 && _height > 0)
|
||||
{
|
||||
if (_pixelFormat == PixelFormat::MJPEG)
|
||||
{
|
||||
@@ -85,31 +79,10 @@ void MFThread::run()
|
||||
|
||||
Image<ColorRgb> image = Image<ColorRgb>();
|
||||
_imageResampler.processImage(_localData, _width, _height, _lineLength, PixelFormat::BGR24, image);
|
||||
emit newFrame(_threadIndex, image, _currentFrame);
|
||||
emit newFrame(image);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool MFThread::isBusy()
|
||||
{
|
||||
bool temp;
|
||||
_semaphore.acquire();
|
||||
if (_isBusy)
|
||||
temp = true;
|
||||
else
|
||||
{
|
||||
temp = false;
|
||||
_isBusy = true;
|
||||
}
|
||||
_semaphore.release();
|
||||
return temp;
|
||||
}
|
||||
|
||||
void MFThread::noBusy()
|
||||
{
|
||||
_semaphore.acquire();
|
||||
_isBusy = false;
|
||||
_semaphore.release();
|
||||
_busy = false;
|
||||
}
|
||||
|
||||
void MFThread::processImageMjpeg()
|
||||
@@ -176,7 +149,7 @@ void MFThread::processImageMjpeg()
|
||||
|
||||
// got image, process it
|
||||
if ( !(_cropLeft > 0 || _cropTop > 0 || _cropBottom > 0 || _cropRight > 0))
|
||||
emit newFrame(_threadIndex, srcImage, _currentFrame);
|
||||
emit newFrame(srcImage);
|
||||
else
|
||||
{
|
||||
// calculate the output size
|
||||
@@ -200,6 +173,6 @@ void MFThread::processImageMjpeg()
|
||||
}
|
||||
|
||||
// emit
|
||||
emit newFrame(_threadIndex, destImage, _currentFrame);
|
||||
emit newFrame(destImage);
|
||||
}
|
||||
}
|
@@ -27,37 +27,41 @@
|
||||
#define CLEAR(x) memset(&(x), 0, sizeof(x))
|
||||
|
||||
#ifndef V4L2_CAP_META_CAPTURE
|
||||
#define V4L2_CAP_META_CAPTURE 0x00800000 // Specified in kernel header v4.16. Required for backward compatibility.
|
||||
#define V4L2_CAP_META_CAPTURE 0x00800000 // Specified in kernel header v4.16. Required for backward compatibility.
|
||||
#endif
|
||||
|
||||
// Constants
|
||||
namespace { const bool verbose = false; }
|
||||
|
||||
static PixelFormat GetPixelFormat(const unsigned int format)
|
||||
{
|
||||
if (format == V4L2_PIX_FMT_RGB32) return PixelFormat::RGB32;
|
||||
if (format == V4L2_PIX_FMT_RGB24) return PixelFormat::BGR24;
|
||||
if (format == V4L2_PIX_FMT_YUYV) return PixelFormat::YUYV;
|
||||
if (format == V4L2_PIX_FMT_UYVY) return PixelFormat::UYVY;
|
||||
if (format == V4L2_PIX_FMT_MJPEG) return PixelFormat::MJPEG;
|
||||
if (format == V4L2_PIX_FMT_NV12) return PixelFormat::NV12;
|
||||
if (format == V4L2_PIX_FMT_YUV420) return PixelFormat::I420;
|
||||
#ifdef HAVE_JPEG_DECODER
|
||||
if (format == V4L2_PIX_FMT_MJPEG) return PixelFormat::MJPEG;
|
||||
#endif
|
||||
return PixelFormat::NO_CHANGE;
|
||||
};
|
||||
|
||||
V4L2Grabber::V4L2Grabber(const QString & device, unsigned width, unsigned height, unsigned fps, unsigned input, VideoStandard videoStandard, PixelFormat pixelFormat, int pixelDecimation)
|
||||
: Grabber("V4L2:"+device)
|
||||
, _deviceName()
|
||||
, _videoStandard(videoStandard)
|
||||
V4L2Grabber::V4L2Grabber()
|
||||
: Grabber("V4L2")
|
||||
, _currentDeviceName("none")
|
||||
, _newDeviceName("none")
|
||||
, _ioMethod(IO_METHOD_MMAP)
|
||||
, _fileDescriptor(-1)
|
||||
, _buffers()
|
||||
, _pixelFormat(pixelFormat)
|
||||
, _pixelDecimation(pixelDecimation)
|
||||
, _pixelFormat(PixelFormat::NO_CHANGE)
|
||||
, _pixelFormatConfig(PixelFormat::NO_CHANGE)
|
||||
, _lineLength(-1)
|
||||
, _frameByteSize(-1)
|
||||
, _noSignalCounterThreshold(40)
|
||||
, _noSignalThresholdColor(ColorRgb{0,0,0})
|
||||
, _signalDetectionEnabled(true)
|
||||
, _cecDetectionEnabled(true)
|
||||
, _cecStandbyActivated(false)
|
||||
, _signalDetectionEnabled(true)
|
||||
, _noSignalDetected(false)
|
||||
, _noSignalCounter(0)
|
||||
, _x_frac_min(0.25)
|
||||
@@ -66,17 +70,8 @@ V4L2Grabber::V4L2Grabber(const QString & device, unsigned width, unsigned height
|
||||
, _y_frac_max(0.75)
|
||||
, _streamNotifier(nullptr)
|
||||
, _initialized(false)
|
||||
, _deviceAutoDiscoverEnabled(false)
|
||||
, _reload(false)
|
||||
{
|
||||
setPixelDecimation(pixelDecimation);
|
||||
getV4Ldevices();
|
||||
|
||||
// init
|
||||
setInput(input);
|
||||
setWidthHeight(width, height);
|
||||
setFramerate(fps);
|
||||
setDeviceVideoStandard(device, videoStandard);
|
||||
Debug(_log,"Init pixel format: %i", static_cast<int>(_pixelFormat));
|
||||
}
|
||||
|
||||
V4L2Grabber::~V4L2Grabber()
|
||||
@@ -89,7 +84,7 @@ void V4L2Grabber::uninit()
|
||||
// stop if the grabber was not stopped
|
||||
if (_initialized)
|
||||
{
|
||||
Debug(_log,"uninit grabber: %s", QSTRING_CSTR(_deviceName));
|
||||
Debug(_log,"Uninit grabber: %s", QSTRING_CSTR(_newDeviceName));
|
||||
stop();
|
||||
}
|
||||
}
|
||||
@@ -98,66 +93,47 @@ bool V4L2Grabber::init()
|
||||
{
|
||||
if (!_initialized)
|
||||
{
|
||||
getV4Ldevices();
|
||||
QString v4lDevices_str;
|
||||
bool noDeviceName = _currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName.compare("auto", Qt::CaseInsensitive) == 0;
|
||||
|
||||
// show list only once
|
||||
if (!_deviceName.startsWith("/dev/"))
|
||||
// enumerate the video capture devices on the user's system
|
||||
enumVideoCaptureDevices();
|
||||
|
||||
if(noDeviceName)
|
||||
return false;
|
||||
|
||||
if(!_deviceProperties.contains(_currentDeviceName))
|
||||
{
|
||||
for (auto& dev: _v4lDevices)
|
||||
{
|
||||
v4lDevices_str += "\t"+ dev.first + "\t" + dev.second + "\n";
|
||||
}
|
||||
if (!v4lDevices_str.isEmpty())
|
||||
Info(_log, "available V4L2 devices:\n%s", QSTRING_CSTR(v4lDevices_str));
|
||||
Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDeviceName));
|
||||
_currentDeviceName = "none";
|
||||
return false;
|
||||
}
|
||||
|
||||
if (_deviceName == "auto")
|
||||
bool valid = false;
|
||||
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
|
||||
if (i.key() == _currentDeviceName && valid == false)
|
||||
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
|
||||
if (y.key() == _input && valid == false)
|
||||
for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++)
|
||||
if(enc.key() == _pixelFormat && enc.value().width == _width && enc.value().height == _height && valid == false)
|
||||
for (auto fps = enc.value().framerates.begin(); fps != enc.value().framerates.end(); fps++)
|
||||
if(*fps == _fps && valid == false)
|
||||
valid = true;
|
||||
|
||||
if (!valid)
|
||||
{
|
||||
_deviceAutoDiscoverEnabled = true;
|
||||
_deviceName = "unknown";
|
||||
Info( _log, "search for usable video devices" );
|
||||
for (auto& dev: _v4lDevices)
|
||||
{
|
||||
_deviceName = dev.first;
|
||||
if (init())
|
||||
{
|
||||
Info(_log, "found usable v4l2 device: %s (%s)",QSTRING_CSTR(dev.first), QSTRING_CSTR(dev.second));
|
||||
_deviceAutoDiscoverEnabled = false;
|
||||
return _initialized;
|
||||
}
|
||||
}
|
||||
Info(_log, "no usable device found");
|
||||
}
|
||||
else if (!_deviceName.startsWith("/dev/"))
|
||||
{
|
||||
for (auto& dev: _v4lDevices)
|
||||
{
|
||||
if (_deviceName.toLower() == dev.second.toLower())
|
||||
{
|
||||
_deviceName = dev.first;
|
||||
Info(_log, "found v4l2 device with configured name: %s (%s)", QSTRING_CSTR(dev.second), QSTRING_CSTR(dev.first) );
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Info(_log, "%s v4l device: %s", (_deviceAutoDiscoverEnabled? "test" : "configured"), QSTRING_CSTR(_deviceName));
|
||||
Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDeviceName));
|
||||
_currentDeviceName = "none";
|
||||
return false;
|
||||
}
|
||||
|
||||
bool opened = false;
|
||||
try
|
||||
{
|
||||
// do not init with unknown device
|
||||
if (_deviceName != "unknown")
|
||||
if (open_device())
|
||||
{
|
||||
if (open_device())
|
||||
{
|
||||
opened = true;
|
||||
init_device(_videoStandard);
|
||||
_initialized = true;
|
||||
}
|
||||
opened = true;
|
||||
init_device(_videoStandard);
|
||||
_initialized = true;
|
||||
}
|
||||
}
|
||||
catch(std::exception& e)
|
||||
@@ -167,14 +143,15 @@ bool V4L2Grabber::init()
|
||||
uninit_device();
|
||||
close_device();
|
||||
}
|
||||
ErrorIf( !_deviceAutoDiscoverEnabled, _log, "V4l2 init failed (%s)", e.what());
|
||||
|
||||
Error(_log, "V4l2 init failed (%s)", e.what());
|
||||
}
|
||||
}
|
||||
|
||||
return _initialized;
|
||||
}
|
||||
|
||||
void V4L2Grabber::getV4Ldevices()
|
||||
void V4L2Grabber::enumVideoCaptureDevices()
|
||||
{
|
||||
QDirIterator it("/sys/class/video4linux/", QDirIterator::NoIteratorFlags);
|
||||
_deviceProperties.clear();
|
||||
@@ -319,35 +296,12 @@ void V4L2Grabber::getV4Ldevices()
|
||||
properties.name = devName;
|
||||
devNameFile.close();
|
||||
}
|
||||
_v4lDevices.emplace("/dev/"+it.fileName(), devName);
|
||||
|
||||
_deviceProperties.insert("/dev/"+it.fileName(), properties);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void V4L2Grabber::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold)
|
||||
{
|
||||
_noSignalThresholdColor.red = uint8_t(255*redSignalThreshold);
|
||||
_noSignalThresholdColor.green = uint8_t(255*greenSignalThreshold);
|
||||
_noSignalThresholdColor.blue = uint8_t(255*blueSignalThreshold);
|
||||
_noSignalCounterThreshold = qMax(1, noSignalCounterThreshold);
|
||||
|
||||
Info(_log, "Signal threshold set to: {%d, %d, %d}", _noSignalThresholdColor.red, _noSignalThresholdColor.green, _noSignalThresholdColor.blue );
|
||||
}
|
||||
|
||||
void V4L2Grabber::setSignalDetectionOffset(double horizontalMin, double verticalMin, double horizontalMax, double verticalMax)
|
||||
{
|
||||
// rainbow 16 stripes 0.47 0.2 0.49 0.8
|
||||
// unicolor: 0.25 0.25 0.75 0.75
|
||||
|
||||
_x_frac_min = horizontalMin;
|
||||
_y_frac_min = verticalMin;
|
||||
_x_frac_max = horizontalMax;
|
||||
_y_frac_max = verticalMax;
|
||||
|
||||
Info(_log, "Signal detection area set to: %f,%f x %f,%f", _x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max );
|
||||
}
|
||||
|
||||
bool V4L2Grabber::start()
|
||||
{
|
||||
try
|
||||
@@ -386,23 +340,23 @@ bool V4L2Grabber::open_device()
|
||||
{
|
||||
struct stat st;
|
||||
|
||||
if (-1 == stat(QSTRING_CSTR(_deviceName), &st))
|
||||
if (-1 == stat(QSTRING_CSTR(_currentDeviceName), &st))
|
||||
{
|
||||
throw_errno_exception("Cannot identify '" + _deviceName + "'");
|
||||
throw_errno_exception("Cannot identify '" + _currentDeviceName + "'");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!S_ISCHR(st.st_mode))
|
||||
{
|
||||
throw_exception("'" + _deviceName + "' is no device");
|
||||
throw_exception("'" + _currentDeviceName + "' is no device");
|
||||
return false;
|
||||
}
|
||||
|
||||
_fileDescriptor = open(QSTRING_CSTR(_deviceName), O_RDWR | O_NONBLOCK, 0);
|
||||
_fileDescriptor = open(QSTRING_CSTR(_currentDeviceName), O_RDWR | O_NONBLOCK, 0);
|
||||
|
||||
if (-1 == _fileDescriptor)
|
||||
{
|
||||
throw_errno_exception("Cannot open '" + _deviceName + "'");
|
||||
throw_errno_exception("Cannot open '" + _currentDeviceName + "'");
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -455,7 +409,7 @@ void V4L2Grabber::init_mmap()
|
||||
{
|
||||
if (EINVAL == errno)
|
||||
{
|
||||
throw_exception("'" + _deviceName + "' does not support memory mapping");
|
||||
throw_exception("'" + _currentDeviceName + "' does not support memory mapping");
|
||||
return;
|
||||
}
|
||||
else
|
||||
@@ -467,7 +421,7 @@ void V4L2Grabber::init_mmap()
|
||||
|
||||
if (req.count < 2)
|
||||
{
|
||||
throw_exception("Insufficient buffer memory on " + _deviceName);
|
||||
throw_exception("Insufficient buffer memory on " + _currentDeviceName);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -519,7 +473,7 @@ void V4L2Grabber::init_userp(unsigned int buffer_size)
|
||||
{
|
||||
if (EINVAL == errno)
|
||||
{
|
||||
throw_exception("'" + _deviceName + "' does not support user pointer");
|
||||
throw_exception("'" + _currentDeviceName + "' does not support user pointer");
|
||||
return;
|
||||
}
|
||||
else
|
||||
@@ -553,7 +507,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
|
||||
{
|
||||
if (EINVAL == errno)
|
||||
{
|
||||
throw_exception("'" + _deviceName + "' is no V4L2 device");
|
||||
throw_exception("'" + _currentDeviceName + "' is no V4L2 device");
|
||||
return;
|
||||
}
|
||||
else
|
||||
@@ -565,7 +519,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
|
||||
|
||||
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
|
||||
{
|
||||
throw_exception("'" + _deviceName + "' is no video capture device");
|
||||
throw_exception("'" + _currentDeviceName + "' is no video capture device");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -575,7 +529,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
|
||||
{
|
||||
if (!(cap.capabilities & V4L2_CAP_READWRITE))
|
||||
{
|
||||
throw_exception("'" + _deviceName + "' does not support read i/o");
|
||||
throw_exception("'" + _currentDeviceName + "' does not support read i/o");
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -586,7 +540,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
|
||||
{
|
||||
if (!(cap.capabilities & V4L2_CAP_STREAMING))
|
||||
{
|
||||
throw_exception("'" + _deviceName + "' does not support streaming i/o");
|
||||
throw_exception("'" + _currentDeviceName + "' does not support streaming i/o");
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -699,16 +653,28 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
|
||||
// set the requested pixel format
|
||||
switch (_pixelFormat)
|
||||
{
|
||||
case PixelFormat::UYVY:
|
||||
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
|
||||
case PixelFormat::RGB32:
|
||||
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB32;
|
||||
break;
|
||||
|
||||
case PixelFormat::BGR24:
|
||||
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
|
||||
break;
|
||||
|
||||
case PixelFormat::YUYV:
|
||||
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
|
||||
break;
|
||||
|
||||
case PixelFormat::RGB32:
|
||||
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB32;
|
||||
case PixelFormat::UYVY:
|
||||
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
|
||||
break;
|
||||
|
||||
case PixelFormat::NV12:
|
||||
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12;
|
||||
break;
|
||||
|
||||
case PixelFormat::I420:
|
||||
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
|
||||
break;
|
||||
|
||||
#ifdef HAVE_JPEG_DECODER
|
||||
@@ -727,7 +693,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
|
||||
}
|
||||
|
||||
// set custom resolution for width and height if they are not zero
|
||||
if(_width && _height)
|
||||
if(_width != 0 && _height != 0)
|
||||
{
|
||||
fmt.fmt.pix.width = _width;
|
||||
fmt.fmt.pix.height = _height;
|
||||
@@ -772,14 +738,23 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
|
||||
// check pixel format and frame size
|
||||
switch (fmt.fmt.pix.pixelformat)
|
||||
{
|
||||
case V4L2_PIX_FMT_UYVY:
|
||||
case V4L2_PIX_FMT_RGB32:
|
||||
{
|
||||
_pixelFormat = PixelFormat::UYVY;
|
||||
_frameByteSize = _width * _height * 2;
|
||||
Debug(_log, "Pixel format=UYVY");
|
||||
_pixelFormat = PixelFormat::RGB32;
|
||||
_frameByteSize = _width * _height * 4;
|
||||
Debug(_log, "Pixel format=RGB32");
|
||||
}
|
||||
break;
|
||||
|
||||
case V4L2_PIX_FMT_RGB24:
|
||||
{
|
||||
_pixelFormat = PixelFormat::BGR24;
|
||||
_frameByteSize = _width * _height * 3;
|
||||
Debug(_log, "Pixel format=BGR24");
|
||||
}
|
||||
break;
|
||||
|
||||
|
||||
case V4L2_PIX_FMT_YUYV:
|
||||
{
|
||||
_pixelFormat = PixelFormat::YUYV;
|
||||
@@ -788,11 +763,27 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
|
||||
}
|
||||
break;
|
||||
|
||||
case V4L2_PIX_FMT_RGB32:
|
||||
case V4L2_PIX_FMT_UYVY:
|
||||
{
|
||||
_pixelFormat = PixelFormat::RGB32;
|
||||
_frameByteSize = _width * _height * 4;
|
||||
Debug(_log, "Pixel format=RGB32");
|
||||
_pixelFormat = PixelFormat::UYVY;
|
||||
_frameByteSize = _width * _height * 2;
|
||||
Debug(_log, "Pixel format=UYVY");
|
||||
}
|
||||
break;
|
||||
|
||||
case V4L2_PIX_FMT_NV12:
|
||||
{
|
||||
_pixelFormat = PixelFormat::NV12;
|
||||
_frameByteSize = (_width * _height * 6) / 4;
|
||||
Debug(_log, "Pixel format=NV12");
|
||||
}
|
||||
break;
|
||||
|
||||
case V4L2_PIX_FMT_YUV420:
|
||||
{
|
||||
_pixelFormat = PixelFormat::I420;
|
||||
_frameByteSize = (_width * _height * 6) / 4;
|
||||
Debug(_log, "Pixel format=I420");
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -807,9 +798,9 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
|
||||
|
||||
default:
|
||||
#ifdef HAVE_JPEG_DECODER
|
||||
throw_exception("Only pixel formats UYVY, YUYV, RGB32 and MJPEG are supported");
|
||||
throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12, I420 and MJPEG are supported");
|
||||
#else
|
||||
throw_exception("Only pixel formats UYVY, YUYV, and RGB32 are supported");
|
||||
throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12 and I420 are supported");
|
||||
#endif
|
||||
return;
|
||||
}
|
||||
@@ -992,7 +983,7 @@ int V4L2Grabber::read_frame()
|
||||
{
|
||||
throw_errno_exception("VIDIOC_DQBUF");
|
||||
stop();
|
||||
getV4Ldevices();
|
||||
enumVideoCaptureDevices();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
@@ -1029,7 +1020,7 @@ int V4L2Grabber::read_frame()
|
||||
{
|
||||
throw_errno_exception("VIDIOC_DQBUF");
|
||||
stop();
|
||||
getV4Ldevices();
|
||||
enumVideoCaptureDevices();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
@@ -1298,6 +1289,254 @@ int V4L2Grabber::xioctl(int fileDescriptor, int request, void *arg)
|
||||
return r;
|
||||
}
|
||||
|
||||
void V4L2Grabber::setDevice(const QString& device)
|
||||
{
|
||||
if (_currentDeviceName != device)
|
||||
{
|
||||
(_initialized)
|
||||
? _newDeviceName = device
|
||||
: _currentDeviceName = _newDeviceName = device;
|
||||
|
||||
_reload = true;
|
||||
}
|
||||
}
|
||||
|
||||
bool V4L2Grabber::setInput(int input)
|
||||
{
|
||||
if(Grabber::setInput(input))
|
||||
{
|
||||
_reload = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool V4L2Grabber::setWidthHeight(int width, int height)
|
||||
{
|
||||
if(Grabber::setWidthHeight(width, height))
|
||||
{
|
||||
_reload = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void V4L2Grabber::setEncoding(QString enc)
|
||||
{
|
||||
if(_pixelFormatConfig != parsePixelFormat(enc))
|
||||
{
|
||||
_pixelFormatConfig = parsePixelFormat(enc);
|
||||
if(_initialized)
|
||||
{
|
||||
Debug(_log,"Set hardware encoding to: %s", QSTRING_CSTR(enc.toUpper()));
|
||||
_reload = true;
|
||||
}
|
||||
else
|
||||
_pixelFormat = _pixelFormatConfig;
|
||||
}
|
||||
}
|
||||
|
||||
void V4L2Grabber::setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue)
|
||||
{
|
||||
if(_initialized)
|
||||
DebugIf(verbose, _log,"TODO: Set brightness to %i, contrast to %i, saturation to %i, hue to %i", brightness, contrast, saturation, hue);
|
||||
}
|
||||
|
||||
void V4L2Grabber::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold)
|
||||
{
|
||||
_noSignalThresholdColor.red = uint8_t(255*redSignalThreshold);
|
||||
_noSignalThresholdColor.green = uint8_t(255*greenSignalThreshold);
|
||||
_noSignalThresholdColor.blue = uint8_t(255*blueSignalThreshold);
|
||||
_noSignalCounterThreshold = qMax(1, noSignalCounterThreshold);
|
||||
|
||||
if(_signalDetectionEnabled)
|
||||
Info(_log, "Signal threshold set to: {%d, %d, %d}", _noSignalThresholdColor.red, _noSignalThresholdColor.green, _noSignalThresholdColor.blue );
|
||||
}
|
||||
|
||||
void V4L2Grabber::setSignalDetectionOffset(double horizontalMin, double verticalMin, double horizontalMax, double verticalMax)
|
||||
{
|
||||
// rainbow 16 stripes 0.47 0.2 0.49 0.8
|
||||
// unicolor: 0.25 0.25 0.75 0.75
|
||||
|
||||
_x_frac_min = horizontalMin;
|
||||
_y_frac_min = verticalMin;
|
||||
_x_frac_max = horizontalMax;
|
||||
_y_frac_max = verticalMax;
|
||||
|
||||
if(_signalDetectionEnabled)
|
||||
Info(_log, "Signal detection area set to: %f,%f x %f,%f", _x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max );
|
||||
}
|
||||
|
||||
void V4L2Grabber::setSignalDetectionEnable(bool enable)
|
||||
{
|
||||
if (_signalDetectionEnabled != enable)
|
||||
{
|
||||
_signalDetectionEnabled = enable;
|
||||
if(_initialized)
|
||||
Info(_log, "Signal detection is now %s", enable ? "enabled" : "disabled");
|
||||
}
|
||||
}
|
||||
|
||||
void V4L2Grabber::setCecDetectionEnable(bool enable)
|
||||
{
|
||||
if (_cecDetectionEnabled != enable)
|
||||
{
|
||||
_cecDetectionEnabled = enable;
|
||||
if(_initialized)
|
||||
Info(_log, QString("CEC detection is now %1").arg(enable ? "enabled" : "disabled").toLocal8Bit());
|
||||
}
|
||||
}
|
||||
|
||||
bool V4L2Grabber::reload(bool force)
|
||||
{
|
||||
if (_streamNotifier != nullptr && _streamNotifier->isEnabled() && (_reload || force))
|
||||
{
|
||||
Info(_log,"Reloading V4L2 Grabber");
|
||||
uninit();
|
||||
_pixelFormat = _pixelFormatConfig;
|
||||
_newDeviceName = _currentDeviceName;
|
||||
_reload = false;
|
||||
return start();
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
#if defined(ENABLE_CEC)
|
||||
|
||||
void V4L2Grabber::handleCecEvent(CECEvent event)
|
||||
{
|
||||
switch (event)
|
||||
{
|
||||
case CECEvent::On :
|
||||
Debug(_log,"CEC on event received");
|
||||
_cecStandbyActivated = false;
|
||||
return;
|
||||
case CECEvent::Off :
|
||||
Debug(_log,"CEC off event received");
|
||||
_cecStandbyActivated = true;
|
||||
return;
|
||||
default: break;
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
QJsonArray V4L2Grabber::discover(const QJsonObject& params)
|
||||
{
|
||||
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
|
||||
|
||||
enumVideoCaptureDevices();
|
||||
|
||||
QJsonArray inputsDiscovered;
|
||||
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
|
||||
{
|
||||
QJsonObject device, in;
|
||||
QJsonArray video_inputs, formats;
|
||||
|
||||
device["device"] = it.key();
|
||||
device["device_name"] = _deviceProperties.value(it.key()).name;
|
||||
device["type"] = "v4l2";
|
||||
|
||||
QMultiMap<QString, int> inputs = QMultiMap<QString, int>();
|
||||
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
|
||||
if (i.key() == it.key())
|
||||
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
|
||||
if (!inputs.contains(y.value().inputName, y.key()))
|
||||
inputs.insert(y.value().inputName, y.key());
|
||||
|
||||
for (auto input = inputs.begin(); input != inputs.end(); input++)
|
||||
{
|
||||
in["name"] = input.key();
|
||||
in["inputIdx"] = input.value();
|
||||
|
||||
QJsonArray standards;
|
||||
QList<VideoStandard> videoStandards = QList<VideoStandard>();
|
||||
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
|
||||
if (i.key() == it.key())
|
||||
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
|
||||
if (y.key() == input.value())
|
||||
for (auto std = y.value().standards.begin(); std != y.value().standards.end(); std++)
|
||||
if(!videoStandards.contains(*std))
|
||||
videoStandards << *std;
|
||||
|
||||
for (auto standard : videoStandards)
|
||||
standards.append(VideoStandard2String(standard));
|
||||
|
||||
if (!standards.isEmpty())
|
||||
in["standards"] = standards;
|
||||
|
||||
QList<PixelFormat> encodingFormats = QList<PixelFormat>();
|
||||
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
|
||||
if (i.key() == it.key())
|
||||
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
|
||||
if (y.key() == input.value())
|
||||
for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++)
|
||||
if (!encodingFormats.contains(enc.key()))
|
||||
encodingFormats << enc.key();
|
||||
|
||||
for (auto encodingFormat : encodingFormats)
|
||||
{
|
||||
QJsonObject format;
|
||||
QJsonArray resolutionArray;
|
||||
|
||||
format["format"] = pixelFormatToString(encodingFormat);
|
||||
|
||||
QMultiMap<int, int> deviceResolutions = QMultiMap<int, int>();
|
||||
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
|
||||
if (i.key() == it.key())
|
||||
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
|
||||
if (y.key() == input.value())
|
||||
for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++)
|
||||
if (enc.key() == encodingFormat && !deviceResolutions.contains(enc.value().width, enc.value().height))
|
||||
deviceResolutions.insert(enc.value().width, enc.value().height);
|
||||
|
||||
for (auto width_height = deviceResolutions.begin(); width_height != deviceResolutions.end(); width_height++)
|
||||
{
|
||||
QJsonObject resolution;
|
||||
QJsonArray fps;
|
||||
|
||||
resolution["width"] = int(width_height.key());
|
||||
resolution["height"] = int(width_height.value());
|
||||
|
||||
QIntList framerates = QIntList();
|
||||
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
|
||||
if (i.key() == it.key())
|
||||
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
|
||||
if (y.key() == input.value())
|
||||
for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++)
|
||||
if(enc.key() == encodingFormat && enc.value().width == width_height.key() && enc.value().height == width_height.value())
|
||||
for (auto fps = enc.value().framerates.begin(); fps != enc.value().framerates.end(); fps++)
|
||||
if(!framerates.contains(*fps))
|
||||
framerates << *fps;
|
||||
|
||||
for (auto framerate : framerates)
|
||||
fps.append(framerate);
|
||||
|
||||
resolution["fps"] = fps;
|
||||
resolutionArray.append(resolution);
|
||||
}
|
||||
|
||||
format["resolutions"] = resolutionArray;
|
||||
formats.append(format);
|
||||
}
|
||||
in["formats"] = formats;
|
||||
video_inputs.append(in);
|
||||
|
||||
}
|
||||
|
||||
device["video_inputs"] = video_inputs;
|
||||
inputsDiscovered.append(device);
|
||||
}
|
||||
|
||||
_deviceProperties.clear();
|
||||
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
|
||||
|
||||
return inputsDiscovered;
|
||||
}
|
||||
|
||||
void V4L2Grabber::enumFrameIntervals(QList<int> &framerates, int fileDescriptor, int pixelformat, int width, int height)
|
||||
{
|
||||
// collect available frame rates
|
||||
@@ -1349,188 +1588,3 @@ void V4L2Grabber::enumFrameIntervals(QList<int> &framerates, int fileDescriptor,
|
||||
framerates.append(streamparms.parm.capture.timeperframe.denominator / streamparms.parm.capture.timeperframe.numerator);
|
||||
}
|
||||
}
|
||||
|
||||
void V4L2Grabber::setSignalDetectionEnable(bool enable)
|
||||
{
|
||||
if (_signalDetectionEnabled != enable)
|
||||
{
|
||||
_signalDetectionEnabled = enable;
|
||||
Info(_log, "Signal detection is now %s", enable ? "enabled" : "disabled");
|
||||
}
|
||||
}
|
||||
|
||||
void V4L2Grabber::setCecDetectionEnable(bool enable)
|
||||
{
|
||||
if (_cecDetectionEnabled != enable)
|
||||
{
|
||||
_cecDetectionEnabled = enable;
|
||||
Info(_log, QString("CEC detection is now %1").arg(enable ? "enabled" : "disabled").toLocal8Bit());
|
||||
}
|
||||
}
|
||||
|
||||
void V4L2Grabber::setPixelDecimation(int pixelDecimation)
|
||||
{
|
||||
if (_pixelDecimation != pixelDecimation)
|
||||
{
|
||||
_pixelDecimation = pixelDecimation;
|
||||
_imageResampler.setHorizontalPixelDecimation(pixelDecimation);
|
||||
_imageResampler.setVerticalPixelDecimation(pixelDecimation);
|
||||
}
|
||||
}
|
||||
|
||||
void V4L2Grabber::setDeviceVideoStandard(QString device, VideoStandard videoStandard)
|
||||
{
|
||||
if (_deviceName != device || _videoStandard != videoStandard)
|
||||
{
|
||||
// extract input of device
|
||||
QChar input = device.at(device.size() - 1);
|
||||
_input = input.isNumber() ? input.digitValue() : -1;
|
||||
|
||||
bool started = _initialized;
|
||||
uninit();
|
||||
_deviceName = device;
|
||||
_videoStandard = videoStandard;
|
||||
|
||||
if(started) start();
|
||||
}
|
||||
}
|
||||
|
||||
bool V4L2Grabber::setInput(int input)
|
||||
{
|
||||
if(Grabber::setInput(input))
|
||||
{
|
||||
bool started = _initialized;
|
||||
uninit();
|
||||
if(started) start();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool V4L2Grabber::setWidthHeight(int width, int height)
|
||||
{
|
||||
if(Grabber::setWidthHeight(width,height))
|
||||
{
|
||||
bool started = _initialized;
|
||||
uninit();
|
||||
if(started) start();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool V4L2Grabber::setFramerate(int fps)
|
||||
{
|
||||
if(Grabber::setFramerate(fps))
|
||||
{
|
||||
bool started = _initialized;
|
||||
uninit();
|
||||
if(started) start();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
QStringList V4L2Grabber::getDevices() const
|
||||
{
|
||||
QStringList result = QStringList();
|
||||
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
|
||||
result << it.key();
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
QString V4L2Grabber::getDeviceName(const QString& devicePath) const
|
||||
{
|
||||
return _deviceProperties.value(devicePath).name;
|
||||
}
|
||||
|
||||
QMultiMap<QString, int> V4L2Grabber::getDeviceInputs(const QString& devicePath) const
|
||||
{
|
||||
QMultiMap<QString, int> result = QMultiMap<QString, int>();
|
||||
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
|
||||
if (it.key() == devicePath)
|
||||
for (auto input = it.value().inputs.begin(); input != it.value().inputs.end(); input++)
|
||||
if (!result.contains(input.value().inputName, input.key()))
|
||||
result.insert(input.value().inputName, input.key());
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
QList<VideoStandard> V4L2Grabber::getAvailableDeviceStandards(const QString& devicePath, const int& deviceInput) const
|
||||
{
|
||||
QList<VideoStandard> result =QList<VideoStandard>();
|
||||
|
||||
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
|
||||
if (it.key() == devicePath)
|
||||
for (auto input = it.value().inputs.begin(); input != it.value().inputs.end(); input++)
|
||||
if (input.key() == deviceInput)
|
||||
for (auto standard = input.value().standards.begin(); standard != input.value().standards.end(); standard++)
|
||||
if(!result.contains(*standard))
|
||||
result << *standard;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
QStringList V4L2Grabber::getAvailableEncodingFormats(const QString& devicePath, const int& deviceInput) const
|
||||
{
|
||||
QStringList result = QStringList();
|
||||
|
||||
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
|
||||
if (it.key() == devicePath)
|
||||
for (auto input = it.value().inputs.begin(); input != it.value().inputs.end(); input++)
|
||||
if (input.key() == deviceInput)
|
||||
for (auto enc = input.value().encodingFormats.begin(); enc != input.value().encodingFormats.end(); enc++)
|
||||
if (!result.contains(pixelFormatToString(enc.key()).toLower(), Qt::CaseInsensitive))
|
||||
result << pixelFormatToString(enc.key()).toLower();
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
QMultiMap<int, int> V4L2Grabber::getAvailableDeviceResolutions(const QString& devicePath, const int& deviceInput, const PixelFormat& encFormat) const
|
||||
{
|
||||
QMultiMap<int, int> result = QMultiMap<int, int>();
|
||||
|
||||
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
|
||||
if (it.key() == devicePath)
|
||||
for (auto input = it.value().inputs.begin(); input != it.value().inputs.end(); input++)
|
||||
if (input.key() == deviceInput)
|
||||
for (auto enc = input.value().encodingFormats.begin(); enc != input.value().encodingFormats.end(); enc++)
|
||||
if (!result.contains(enc.value().width, enc.value().height))
|
||||
result.insert(enc.value().width, enc.value().height);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
QIntList V4L2Grabber::getAvailableDeviceFramerates(const QString& devicePath, const int& deviceInput, const PixelFormat& encFormat, const unsigned width, const unsigned height) const
|
||||
{
|
||||
QIntList result = QIntList();
|
||||
|
||||
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
|
||||
if (it.key() == devicePath)
|
||||
for (auto input = it.value().inputs.begin(); input != it.value().inputs.end(); input++)
|
||||
if (input.key() == deviceInput)
|
||||
for (auto enc = input.value().encodingFormats.begin(); enc != input.value().encodingFormats.end(); enc++)
|
||||
if(enc.key() == encFormat && enc.value().width == width && enc.value().height == height)
|
||||
for (auto fps = enc.value().framerates.begin(); fps != enc.value().framerates.end(); fps++)
|
||||
if(!result.contains(*fps))
|
||||
result << *fps;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void V4L2Grabber::handleCecEvent(CECEvent event)
|
||||
{
|
||||
switch (event)
|
||||
{
|
||||
case CECEvent::On :
|
||||
Debug(_log,"CEC on event received");
|
||||
_cecStandbyActivated = false;
|
||||
return;
|
||||
case CECEvent::Off :
|
||||
Debug(_log,"CEC off event received");
|
||||
_cecStandbyActivated = true;
|
||||
return;
|
||||
default: break;
|
||||
}
|
||||
}
|
@@ -4,9 +4,15 @@
|
||||
#include <xcb/randr.h>
|
||||
#include <xcb/xcb_event.h>
|
||||
|
||||
// Constants
|
||||
namespace {
|
||||
const bool verbose = false;
|
||||
} //End of constants
|
||||
|
||||
X11Grabber::X11Grabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation)
|
||||
: Grabber("X11GRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom)
|
||||
, _x11Display(nullptr)
|
||||
, _xImage(nullptr)
|
||||
, _pixmap(None)
|
||||
, _srcFormat(nullptr)
|
||||
, _dstFormat(nullptr)
|
||||
@@ -17,8 +23,15 @@ X11Grabber::X11Grabber(int cropLeft, int cropRight, int cropTop, int cropBottom,
|
||||
, _calculatedHeight(0)
|
||||
, _src_x(cropLeft)
|
||||
, _src_y(cropTop)
|
||||
, _XShmAvailable(false)
|
||||
, _XRenderAvailable(false)
|
||||
, _XRandRAvailable(false)
|
||||
, _isWayland (false)
|
||||
, _logger{}
|
||||
, _image(0,0)
|
||||
{
|
||||
_logger = Logger::getInstance("X11");
|
||||
|
||||
_useImageResampler = false;
|
||||
_imageResampler.setCropping(0, 0, 0, 0); // cropping is performed by XRender, XShmGetImage or XGetImage
|
||||
memset(&_pictAttr, 0, sizeof(_pictAttr));
|
||||
@@ -37,7 +50,10 @@ X11Grabber::~X11Grabber()
|
||||
void X11Grabber::freeResources()
|
||||
{
|
||||
// Cleanup allocated resources of the X11 grab
|
||||
XDestroyImage(_xImage);
|
||||
if (_xImage != nullptr)
|
||||
{
|
||||
XDestroyImage(_xImage);
|
||||
}
|
||||
if (_XRandRAvailable)
|
||||
{
|
||||
qApp->removeNativeEventFilter(this);
|
||||
@@ -100,39 +116,72 @@ void X11Grabber::setupResources()
|
||||
_imageResampler.setHorizontalPixelDecimation(_pixelDecimation);
|
||||
_imageResampler.setVerticalPixelDecimation(_pixelDecimation);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
bool X11Grabber::Setup()
|
||||
bool X11Grabber::open()
|
||||
{
|
||||
_x11Display = XOpenDisplay(NULL);
|
||||
if (_x11Display == nullptr)
|
||||
bool rc = false;
|
||||
|
||||
if (getenv("WAYLAND_DISPLAY") != nullptr)
|
||||
{
|
||||
Error(_log, "Unable to open display");
|
||||
if (getenv("DISPLAY"))
|
||||
_isWayland = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
_x11Display = XOpenDisplay(nullptr);
|
||||
if (_x11Display != nullptr)
|
||||
{
|
||||
Error(_log, "%s",getenv("DISPLAY"));
|
||||
rc = true;
|
||||
}
|
||||
}
|
||||
return rc;
|
||||
}
|
||||
|
||||
bool X11Grabber::setupDisplay()
|
||||
{
|
||||
bool result = false;
|
||||
|
||||
if ( ! open() )
|
||||
{
|
||||
if ( _isWayland )
|
||||
{
|
||||
Error(_log, "Grabber does not work under Wayland!");
|
||||
}
|
||||
else
|
||||
{
|
||||
Error(_log, "DISPLAY environment variable not set");
|
||||
if (getenv("DISPLAY") != nullptr)
|
||||
{
|
||||
Error(_log, "Unable to open display [%s]",getenv("DISPLAY"));
|
||||
}
|
||||
else
|
||||
{
|
||||
Error(_log, "DISPLAY environment variable not set");
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
_window = DefaultRootWindow(_x11Display);
|
||||
|
||||
_window = DefaultRootWindow(_x11Display);
|
||||
int dummy, pixmaps_supported;
|
||||
|
||||
int dummy, pixmaps_supported;
|
||||
_XRandRAvailable = XRRQueryExtension(_x11Display, &_XRandREventBase, &dummy);
|
||||
_XRenderAvailable = XRenderQueryExtension(_x11Display, &dummy, &dummy);
|
||||
_XShmAvailable = XShmQueryExtension(_x11Display);
|
||||
XShmQueryVersion(_x11Display, &dummy, &dummy, &pixmaps_supported);
|
||||
_XShmPixmapAvailable = pixmaps_supported && XShmPixmapFormat(_x11Display) == ZPixmap;
|
||||
|
||||
_XRandRAvailable = XRRQueryExtension(_x11Display, &_XRandREventBase, &dummy);
|
||||
_XRenderAvailable = XRenderQueryExtension(_x11Display, &dummy, &dummy);
|
||||
_XShmAvailable = XShmQueryExtension(_x11Display);
|
||||
XShmQueryVersion(_x11Display, &dummy, &dummy, &pixmaps_supported);
|
||||
_XShmPixmapAvailable = pixmaps_supported && XShmPixmapFormat(_x11Display) == ZPixmap;
|
||||
Info(_log, QString("XRandR=[%1] XRender=[%2] XShm=[%3] XPixmap=[%4]")
|
||||
.arg(_XRandRAvailable ? "available" : "unavailable")
|
||||
.arg(_XRenderAvailable ? "available" : "unavailable")
|
||||
.arg(_XShmAvailable ? "available" : "unavailable")
|
||||
.arg(_XShmPixmapAvailable ? "available" : "unavailable")
|
||||
.toStdString().c_str());
|
||||
|
||||
bool result = (updateScreenDimensions(true) >=0);
|
||||
ErrorIf(!result, _log, "X11 Grabber start failed");
|
||||
setEnabled(result);
|
||||
result = (updateScreenDimensions(true) >=0);
|
||||
ErrorIf(!result, _log, "X11 Grabber start failed");
|
||||
setEnabled(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -235,7 +284,8 @@ int X11Grabber::updateScreenDimensions(bool force)
|
||||
_width = _windowAttr.width;
|
||||
_height = _windowAttr.height;
|
||||
|
||||
int width=0, height=0;
|
||||
int width=0;
|
||||
int height=0;
|
||||
|
||||
// Image scaling is performed by XRender when available, otherwise by ImageResampler
|
||||
if (_XRenderAvailable)
|
||||
@@ -301,19 +351,24 @@ void X11Grabber::setVideoMode(VideoMode mode)
|
||||
updateScreenDimensions(true);
|
||||
}
|
||||
|
||||
void X11Grabber::setPixelDecimation(int pixelDecimation)
|
||||
bool X11Grabber::setPixelDecimation(int pixelDecimation)
|
||||
{
|
||||
if(_pixelDecimation != pixelDecimation)
|
||||
if(Grabber::setPixelDecimation(pixelDecimation))
|
||||
{
|
||||
_pixelDecimation = pixelDecimation;
|
||||
updateScreenDimensions(true);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void X11Grabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
|
||||
{
|
||||
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
|
||||
if(_x11Display != nullptr) updateScreenDimensions(true); // segfault on init
|
||||
if(_x11Display != nullptr)
|
||||
{
|
||||
updateScreenDimensions(true); // segfault on init
|
||||
}
|
||||
}
|
||||
|
||||
bool X11Grabber::nativeEventFilter(const QByteArray & eventType, void * message, long int * /*result*/)
|
||||
@@ -332,3 +387,78 @@ bool X11Grabber::nativeEventFilter(const QByteArray & eventType, void * message,
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
QJsonObject X11Grabber::discover(const QJsonObject& params)
|
||||
{
|
||||
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
|
||||
|
||||
QJsonObject inputsDiscovered;
|
||||
if ( open() )
|
||||
{
|
||||
inputsDiscovered["device"] = "x11";
|
||||
inputsDiscovered["device_name"] = "X11";
|
||||
inputsDiscovered["type"] = "screen";
|
||||
|
||||
QJsonArray video_inputs;
|
||||
|
||||
if (_x11Display != nullptr)
|
||||
{
|
||||
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
|
||||
|
||||
// Iterate through all X screens
|
||||
for (int i = 0; i < XScreenCount(_x11Display); ++i)
|
||||
{
|
||||
_window = DefaultRootWindow(_x11Display);
|
||||
|
||||
const Status status = XGetWindowAttributes(_x11Display, _window, &_windowAttr);
|
||||
if (status == 0)
|
||||
{
|
||||
Debug(_log, "Failed to obtain window attributes");
|
||||
}
|
||||
else
|
||||
{
|
||||
QJsonObject in;
|
||||
|
||||
QString displayName;
|
||||
char* name;
|
||||
if ( XFetchName(_x11Display, _window, &name) > 0 )
|
||||
{
|
||||
displayName = name;
|
||||
}
|
||||
else {
|
||||
displayName = QString("Display:%1").arg(i);
|
||||
}
|
||||
|
||||
in["name"] = displayName;
|
||||
in["inputIdx"] = i;
|
||||
|
||||
QJsonArray formats;
|
||||
QJsonArray resolutionArray;
|
||||
QJsonObject format;
|
||||
QJsonObject resolution;
|
||||
|
||||
resolution["width"] = _windowAttr.width;
|
||||
resolution["height"] = _windowAttr.height;
|
||||
resolution["fps"] = fps;
|
||||
|
||||
resolutionArray.append(resolution);
|
||||
|
||||
format["resolutions"] = resolutionArray;
|
||||
formats.append(format);
|
||||
|
||||
in["formats"] = formats;
|
||||
video_inputs.append(in);
|
||||
}
|
||||
}
|
||||
|
||||
if ( !video_inputs.isEmpty() )
|
||||
{
|
||||
inputsDiscovered["video_inputs"] = video_inputs;
|
||||
}
|
||||
}
|
||||
}
|
||||
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
|
||||
|
||||
return inputsDiscovered;
|
||||
}
|
||||
|
||||
|
@@ -19,7 +19,7 @@ void X11Wrapper::action()
|
||||
if (! _init )
|
||||
{
|
||||
_init = true;
|
||||
if ( ! _grabber.Setup() )
|
||||
if ( ! _grabber.setupDisplay() )
|
||||
{
|
||||
stop();
|
||||
}
|
||||
|
@@ -22,7 +22,7 @@ void check_error(xcb_generic_error_t * error)
|
||||
// Requests with void response type
|
||||
template<class Request, class ...Args>
|
||||
typename std::enable_if<std::is_same<typename Request::ResponseType, xcb_void_cookie_t>::value, void>::type
|
||||
query(xcb_connection_t * connection, Args&& ...args)
|
||||
static query(xcb_connection_t * connection, Args&& ...args)
|
||||
{
|
||||
auto cookie = Request::RequestFunction(connection, std::forward<Args>(args)...);
|
||||
|
||||
@@ -33,9 +33,8 @@ template<class Request, class ...Args>
|
||||
|
||||
// Requests with non-void response type
|
||||
template<class Request, class ...Args>
|
||||
typename std::enable_if<!std::is_same<typename Request::ResponseType, xcb_void_cookie_t>::value,
|
||||
std::unique_ptr<typename Request::ResponseType, decltype(&free)>>::type
|
||||
query(xcb_connection_t * connection, Args&& ...args)
|
||||
typename std::enable_if<!std::is_same<typename Request::ResponseType, xcb_void_cookie_t>::value, std::unique_ptr<typename Request::ResponseType, decltype(&free)>>::type
|
||||
static query(xcb_connection_t * connection, Args&& ...args)
|
||||
{
|
||||
auto cookie = Request::RequestFunction(connection, std::forward<Args>(args)...);
|
||||
|
||||
|
@@ -21,6 +21,14 @@ struct GetGeometry
|
||||
static constexpr auto ReplyFunction = xcb_get_geometry_reply;
|
||||
};
|
||||
|
||||
struct GetProperty
|
||||
{
|
||||
typedef xcb_get_property_reply_t ResponseType;
|
||||
|
||||
static constexpr auto RequestFunction = xcb_get_property;
|
||||
static constexpr auto ReplyFunction = xcb_get_property_reply;
|
||||
};
|
||||
|
||||
struct ShmQueryVersion
|
||||
{
|
||||
typedef xcb_shm_query_version_reply_t ResponseType;
|
||||
|
@@ -14,6 +14,11 @@
|
||||
|
||||
#include <memory>
|
||||
|
||||
// Constants
|
||||
namespace {
|
||||
const bool verbose = false;
|
||||
} //End of constants
|
||||
|
||||
#define DOUBLE_TO_FIXED(d) ((xcb_render_fixed_t) ((d) * 65536))
|
||||
|
||||
XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation)
|
||||
@@ -36,6 +41,7 @@ XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom,
|
||||
, _XcbRandRAvailable{}
|
||||
, _XcbShmAvailable{}
|
||||
, _XcbShmPixmapAvailable{}
|
||||
, _isWayland (false)
|
||||
, _logger{}
|
||||
, _shmData{}
|
||||
, _XcbRandREventBase{-1}
|
||||
@@ -181,48 +187,77 @@ void XcbGrabber::setupShm()
|
||||
}
|
||||
}
|
||||
|
||||
bool XcbGrabber::Setup()
|
||||
bool XcbGrabber::open()
|
||||
{
|
||||
int screen_num;
|
||||
_connection = xcb_connect(nullptr, &screen_num);
|
||||
bool rc = false;
|
||||
|
||||
int ret = xcb_connection_has_error(_connection);
|
||||
if (ret != 0)
|
||||
if (getenv("WAYLAND_DISPLAY") != nullptr)
|
||||
{
|
||||
Error(_logger, "Cannot open display, error %d", ret);
|
||||
return false;
|
||||
_isWayland = true;
|
||||
}
|
||||
|
||||
const xcb_setup_t * setup = xcb_get_setup(_connection);
|
||||
_screen = getScreen(setup, screen_num);
|
||||
|
||||
if (!_screen)
|
||||
else
|
||||
{
|
||||
Error(_log, "Unable to open display, screen %d does not exist", screen_num);
|
||||
_connection = xcb_connect(nullptr, &_screen_num);
|
||||
|
||||
if (getenv("DISPLAY"))
|
||||
Error(_log, "%s", getenv("DISPLAY"));
|
||||
int ret = xcb_connection_has_error(_connection);
|
||||
if (ret != 0)
|
||||
{
|
||||
Debug(_logger, "Cannot open display, error %d", ret);
|
||||
}
|
||||
else
|
||||
Error(_log, "DISPLAY environment variable not set");
|
||||
|
||||
freeResources();
|
||||
return false;
|
||||
{
|
||||
const xcb_setup_t * setup = xcb_get_setup(_connection);
|
||||
_screen = getScreen(setup, _screen_num);
|
||||
if ( _screen != nullptr)
|
||||
{
|
||||
rc = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setupRandr();
|
||||
setupRender();
|
||||
setupShm();
|
||||
return rc;
|
||||
}
|
||||
|
||||
Info(_log, QString("XcbRandR=[%1] XcbRender=[%2] XcbShm=[%3] XcbPixmap=[%4]")
|
||||
.arg(_XcbRandRAvailable ? "available" : "unavailable")
|
||||
.arg(_XcbRenderAvailable ? "available" : "unavailable")
|
||||
.arg(_XcbShmAvailable ? "available" : "unavailable")
|
||||
.arg(_XcbShmPixmapAvailable ? "available" : "unavailable")
|
||||
.toStdString().c_str());
|
||||
bool XcbGrabber::setupDisplay()
|
||||
{
|
||||
bool result = false;
|
||||
|
||||
bool result = (updateScreenDimensions(true) >= 0);
|
||||
ErrorIf(!result, _log, "XCB Grabber start failed");
|
||||
setEnabled(result);
|
||||
if ( ! open() )
|
||||
{
|
||||
if ( _isWayland )
|
||||
{
|
||||
Error(_log, "Grabber does not work under Wayland!");
|
||||
}
|
||||
else
|
||||
{
|
||||
if (getenv("DISPLAY") != nullptr)
|
||||
{
|
||||
Error(_log, "Unable to open display [%s], screen %d does not exist", getenv("DISPLAY"), _screen_num);
|
||||
}
|
||||
else
|
||||
{
|
||||
Error(_log, "DISPLAY environment variable not set");
|
||||
}
|
||||
freeResources();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
setupRandr();
|
||||
setupRender();
|
||||
setupShm();
|
||||
|
||||
Info(_log, QString("XcbRandR=[%1] XcbRender=[%2] XcbShm=[%3] XcbPixmap=[%4]")
|
||||
.arg(_XcbRandRAvailable ? "available" : "unavailable")
|
||||
.arg(_XcbRenderAvailable ? "available" : "unavailable")
|
||||
.arg(_XcbShmAvailable ? "available" : "unavailable")
|
||||
.arg(_XcbShmPixmapAvailable ? "available" : "unavailable")
|
||||
.toStdString().c_str());
|
||||
|
||||
result = (updateScreenDimensions(true) >= 0);
|
||||
ErrorIf(!result, _log, "XCB Grabber start failed");
|
||||
setEnabled(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -394,13 +429,15 @@ void XcbGrabber::setVideoMode(VideoMode mode)
|
||||
updateScreenDimensions(true);
|
||||
}
|
||||
|
||||
void XcbGrabber::setPixelDecimation(int pixelDecimation)
|
||||
bool XcbGrabber::setPixelDecimation(int pixelDecimation)
|
||||
{
|
||||
if(_pixelDecimation != pixelDecimation)
|
||||
if(Grabber::setPixelDecimation(pixelDecimation))
|
||||
{
|
||||
_pixelDecimation = pixelDecimation;
|
||||
updateScreenDimensions(true);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void XcbGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
|
||||
@@ -459,3 +496,89 @@ xcb_render_pictformat_t XcbGrabber::findFormatForVisual(xcb_visualid_t visual) c
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
QJsonObject XcbGrabber::discover(const QJsonObject& params)
|
||||
{
|
||||
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
|
||||
|
||||
QJsonObject inputsDiscovered;
|
||||
if ( open() )
|
||||
{
|
||||
inputsDiscovered["device"] = "xcb";
|
||||
inputsDiscovered["device_name"] = "XCB";
|
||||
inputsDiscovered["type"] = "screen";
|
||||
|
||||
QJsonArray video_inputs;
|
||||
|
||||
if (_connection != nullptr && _screen != nullptr )
|
||||
{
|
||||
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
|
||||
|
||||
const xcb_setup_t * setup = xcb_get_setup(_connection);
|
||||
|
||||
xcb_screen_iterator_t it = xcb_setup_roots_iterator(setup);
|
||||
xcb_screen_t * screen = nullptr;
|
||||
|
||||
int i = 0;
|
||||
// Iterate through all X screens
|
||||
for (; it.rem > 0; xcb_screen_next(&it))
|
||||
{
|
||||
screen = it.data;
|
||||
|
||||
auto geometry = query<GetGeometry>(_connection, screen->root);
|
||||
if (geometry == nullptr)
|
||||
{
|
||||
Debug(_log, "Failed to obtain screen geometry for screen [%d]", i);
|
||||
}
|
||||
else
|
||||
{
|
||||
QJsonObject in;
|
||||
|
||||
QString displayName;
|
||||
auto property = query<GetProperty>(_connection, 0, screen->root, XCB_ATOM_WM_NAME, XCB_ATOM_STRING, 0, 0);
|
||||
if ( property != nullptr )
|
||||
{
|
||||
if ( xcb_get_property_value_length(property.get()) > 0 )
|
||||
{
|
||||
displayName = (char *) xcb_get_property_value(property.get());
|
||||
}
|
||||
}
|
||||
|
||||
if (displayName.isEmpty())
|
||||
{
|
||||
displayName = QString("Display:%1").arg(i);
|
||||
}
|
||||
|
||||
in["name"] = displayName;
|
||||
in["inputIdx"] = i;
|
||||
|
||||
QJsonArray formats;
|
||||
QJsonArray resolutionArray;
|
||||
QJsonObject format;
|
||||
QJsonObject resolution;
|
||||
|
||||
resolution["width"] = geometry->width;
|
||||
resolution["height"] = geometry->height;
|
||||
resolution["fps"] = fps;
|
||||
|
||||
resolutionArray.append(resolution);
|
||||
|
||||
format["resolutions"] = resolutionArray;
|
||||
formats.append(format);
|
||||
|
||||
in["formats"] = formats;
|
||||
video_inputs.append(in);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
|
||||
if ( !video_inputs.isEmpty() )
|
||||
{
|
||||
inputsDiscovered["video_inputs"] = video_inputs;
|
||||
}
|
||||
}
|
||||
}
|
||||
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
|
||||
|
||||
return inputsDiscovered;
|
||||
}
|
||||
|
@@ -19,7 +19,7 @@ void XcbWrapper::action()
|
||||
if (! _init )
|
||||
{
|
||||
_init = true;
|
||||
if ( ! _grabber.Setup() )
|
||||
if ( ! _grabber.setupDisplay() )
|
||||
{
|
||||
stop();
|
||||
}
|
||||
|
Reference in New Issue
Block a user