V4L2/MF changes

This commit is contained in:
Paulchen Panther
2021-05-16 15:47:25 +02:00
parent bf32557a8a
commit cfb34e6bfc
19 changed files with 426 additions and 537 deletions

View File

@@ -1,16 +1,28 @@
# Common cmake definition for external video grabber
# Add Turbo JPEG library
if (ENABLE_V4L2 OR ENABLE_MF)
find_package(TurboJPEG)
if (TURBOJPEG_FOUND)
add_definitions(-DHAVE_TURBO_JPEG)
message( STATUS "Using Turbo JPEG library: ${TurboJPEG_LIBRARY}")
include_directories(${TurboJPEG_INCLUDE_DIRS})
else ()
message( STATUS "Turbo JPEG library not found, MJPEG camera format won't work.")
endif ()
endif()
# Define the wrapper/header/source locations and collect them
SET(WRAPPER_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video)
SET(HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber)
if (ENABLE_MF)
project(mf-grabber)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/mediafoundation)
FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/MF*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp")
FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/MF*.h" "${HEADER_DIR}/Encoder*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp")
elseif(ENABLE_V4L2)
project(v4l2-grabber)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/v4l2)
FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/V4L2*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp")
FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/V4L2*.h" "${HEADER_DIR}/Encoder*.h" "${CURRENT_SOURCE_DIR}/*.cpp")
endif()
add_library(${PROJECT_NAME} ${SOURCES})
@@ -18,6 +30,4 @@ target_link_libraries(${PROJECT_NAME} hyperion ${QT_LIBRARIES})
if(TURBOJPEG_FOUND)
target_link_libraries(${PROJECT_NAME} ${TurboJPEG_LIBRARY})
elseif (JPEG_FOUND)
target_link_libraries(${PROJECT_NAME} ${JPEG_LIBRARY})
endif()

View File

@@ -1,31 +1,39 @@
#include "grabber/MFThread.h"
#include "grabber/EncoderThread.h"
MFThread::MFThread()
EncoderThread::EncoderThread()
: _localData(nullptr)
, _scalingFactorsCount(0)
, _scalingFactors(nullptr)
, _imageResampler()
#ifdef HAVE_TURBO_JPEG
, _transform(nullptr)
, _decompress(nullptr)
, _scalingFactors(nullptr)
, _xform(nullptr)
, _imageResampler()
#endif
{}
MFThread::~MFThread()
EncoderThread::~EncoderThread()
{
#ifdef HAVE_TURBO_JPEG
if (_transform)
tjDestroy(_transform);
if (_decompress)
tjDestroy(_decompress);
#endif
if (_localData)
#ifdef HAVE_TURBO_JPEG
tjFree(_localData);
#else
delete[] _localData;
#endif
}
void MFThread::setup(
void EncoderThread::setup(
PixelFormat pixelFormat, uint8_t* sharedData,
int size, int width, int height, int lineLength,
int subsamp, unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight,
unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight,
VideoMode videoMode, FlipMode flipMode, int pixelDecimation)
{
_lineLength = lineLength;
@@ -33,7 +41,6 @@ void MFThread::setup(
_size = (unsigned long) size;
_width = width;
_height = height;
_subsamp = subsamp;
_cropLeft = cropLeft;
_cropTop = cropTop;
_cropBottom = cropBottom;
@@ -47,23 +54,32 @@ void MFThread::setup(
_imageResampler.setHorizontalPixelDecimation(_pixelDecimation);
_imageResampler.setVerticalPixelDecimation(_pixelDecimation);
#ifdef HAVE_TURBO_JPEG
if (_localData)
tjFree(_localData);
_localData = (uint8_t*)tjAlloc(size + 1);
#else
delete[] _localData;
_localData = nullptr;
_localData = new uint8_t(size + 1);
#endif
memcpy(_localData, sharedData, size);
}
void MFThread::process()
void EncoderThread::process()
{
_busy = true;
if (_width > 0 && _height > 0)
{
#ifdef HAVE_TURBO_JPEG
if (_pixelFormat == PixelFormat::MJPEG)
{
processImageMjpeg();
}
else
#endif
{
if (_pixelFormat == PixelFormat::BGR24)
{
@@ -78,14 +94,27 @@ void MFThread::process()
}
Image<ColorRgb> image = Image<ColorRgb>();
_imageResampler.processImage(_localData, _width, _height, _lineLength, PixelFormat::BGR24, image);
_imageResampler.processImage(
_localData,
_width,
_height,
_lineLength,
#if defined(ENABLE_V4L2)
_pixelFormat,
#else
PixelFormat::BGR24,
#endif
image
);
emit newFrame(image);
}
}
_busy = false;
}
void MFThread::processImageMjpeg()
#ifdef HAVE_TURBO_JPEG
void EncoderThread::processImageMjpeg()
{
if (!_transform && _flipMode != FlipMode::NO_CHANGE)
{
@@ -111,7 +140,8 @@ void MFThread::processImageMjpeg()
_scalingFactors = tjGetScalingFactors(&_scalingFactorsCount);
}
if (tjDecompressHeader2(_decompress, _localData, _size, &_width, &_height, &_subsamp) != 0)
int subsamp = 0;
if (tjDecompressHeader2(_decompress, _localData, _size, &_width, &_height, &subsamp) != 0)
{
if (tjGetErrorCode(_decompress) == TJERR_FATAL)
return;
@@ -176,3 +206,4 @@ void MFThread::processImageMjpeg()
emit newFrame(destImage);
}
}
#endif

View File

@@ -2,7 +2,7 @@
#include <grabber/VideoWrapper.h>
// qt
// qt includes
#include <QTimer>
VideoWrapper::VideoWrapper()
@@ -53,13 +53,20 @@ void VideoWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument
// extract settings
const QJsonObject& obj = config.object();
// global grabber state
GLOBAL_GRABBER_V4L_ENABLE = obj["enable"].toBool(false);
// set global grabber state
setV4lGrabberState(obj["enable"].toBool(false));
if (GLOBAL_GRABBER_V4L_ENABLE)
if (getV4lGrabberState())
{
// Device
_grabber.setDevice(obj["device"].toString("auto"));
#if defined(ENABLE_MF)
// Device path
_grabber.setDevice(obj["device"].toString("none"));
#endif
#if defined(ENABLE_V4L2)
// Device path and name
_grabber.setDevice(obj["device"].toString("none"), obj["available_devices"].toString("none"));
#endif
// Device input
_grabber.setInput(obj["input"].toInt(0));
@@ -118,8 +125,10 @@ void VideoWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument
obj["noSignalCounterThreshold"].toInt(50));
// Reload the Grabber if any settings have been changed that require it
_grabber.reload();
_grabber.reload(getV4lGrabberState());
}
else
stop();
}
}

View File

@@ -62,7 +62,7 @@ bool MFGrabber::prepare()
_sourceReaderCB = new SourceReaderCB(this);
if (!_threadManager)
_threadManager = new MFThreadManager(this);
_threadManager = new EncoderThreadManager(this);
return (_sourceReaderCB != nullptr && _threadManager != nullptr);
}
@@ -76,7 +76,7 @@ bool MFGrabber::start()
{
if (init())
{
connect(_threadManager, &MFThreadManager::newFrame, this, &MFGrabber::newThreadFrame);
connect(_threadManager, &EncoderThreadManager::newFrame, this, &MFGrabber::newThreadFrame);
_threadManager->start();
DebugIf(verbose, _log, "Decoding threads: %d", _threadManager->_threadCount);
@@ -518,7 +518,7 @@ void MFGrabber::process_image(const void *frameImageBuffer, int size)
{
if (!_threadManager->_threads[i]->isBusy())
{
_threadManager->_threads[i]->setup(_pixelFormat, (uint8_t*)frameImageBuffer, size, _width, _height, _lineLength, _subsamp, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, _flipMode, _pixelDecimation);
_threadManager->_threads[i]->setup(_pixelFormat, (uint8_t*)frameImageBuffer, size, _width, _height, _lineLength, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, _flipMode, _pixelDecimation);
_threadManager->_threads[i]->process();
break;
}

View File

@@ -21,6 +21,7 @@
#include <QDirIterator>
#include <QFileInfo>
#include <QDebug>
#include "grabber/V4L2Grabber.h"
@@ -41,7 +42,7 @@ static PixelFormat GetPixelFormat(const unsigned int format)
if (format == V4L2_PIX_FMT_UYVY) return PixelFormat::UYVY;
if (format == V4L2_PIX_FMT_NV12) return PixelFormat::NV12;
if (format == V4L2_PIX_FMT_YUV420) return PixelFormat::I420;
#ifdef HAVE_JPEG_DECODER
#ifdef HAVE_TURBO_JPEG
if (format == V4L2_PIX_FMT_MJPEG) return PixelFormat::MJPEG;
#endif
return PixelFormat::NO_CHANGE;
@@ -49,14 +50,16 @@ static PixelFormat GetPixelFormat(const unsigned int format)
V4L2Grabber::V4L2Grabber()
: Grabber("V4L2")
, _currentDevicePath("none")
, _currentDeviceName("none")
, _newDeviceName("none")
, _threadManager(nullptr)
, _ioMethod(IO_METHOD_MMAP)
, _fileDescriptor(-1)
, _pixelFormat(PixelFormat::NO_CHANGE)
, _pixelFormatConfig(PixelFormat::NO_CHANGE)
, _lineLength(-1)
, _frameByteSize(-1)
, _currentFrame(0)
, _noSignalCounterThreshold(40)
, _noSignalThresholdColor(ColorRgb{0,0,0})
, _cecDetectionEnabled(true)
@@ -77,6 +80,18 @@ V4L2Grabber::V4L2Grabber()
V4L2Grabber::~V4L2Grabber()
{
uninit();
if (_threadManager)
delete _threadManager;
_threadManager = nullptr;
}
bool V4L2Grabber::prepare()
{
if (!_threadManager)
_threadManager = new EncoderThreadManager(this);
return (_threadManager != nullptr);
}
void V4L2Grabber::uninit()
@@ -84,7 +99,7 @@ void V4L2Grabber::uninit()
// stop if the grabber was not stopped
if (_initialized)
{
Debug(_log,"Uninit grabber: %s", QSTRING_CSTR(_newDeviceName));
Debug(_log,"Uninit grabber: %s (%s)", QSTRING_CSTR(_currentDeviceName), QSTRING_CSTR(_currentDevicePath));
stop();
}
}
@@ -93,39 +108,53 @@ bool V4L2Grabber::init()
{
if (!_initialized)
{
bool noDeviceName = _currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName.compare("auto", Qt::CaseInsensitive) == 0;
bool noDevicePath = _currentDevicePath.compare("none", Qt::CaseInsensitive) == 0 || _currentDevicePath.compare("auto", Qt::CaseInsensitive) == 0;
// enumerate the video capture devices on the user's system
enumVideoCaptureDevices();
if(noDeviceName)
if(noDevicePath)
return false;
if(!_deviceProperties.contains(_currentDeviceName))
if(!_deviceProperties.contains(_currentDevicePath))
{
Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDeviceName));
_currentDeviceName = "none";
Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDevicePath));
_currentDevicePath = "none";
return false;
}
bool valid = false;
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
if (i.key() == _currentDeviceName && valid == false)
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
if (y.key() == _input && valid == false)
for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++)
if(enc.key() == _pixelFormat && enc.value().width == _width && enc.value().height == _height && valid == false)
for (auto fps = enc.value().framerates.begin(); fps != enc.value().framerates.end(); fps++)
if(*fps == _fps && valid == false)
valid = true;
if (!valid)
else
{
Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDeviceName));
_currentDeviceName = "none";
return false;
if (HyperionIManager::getInstance())
if (_currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName != _deviceProperties.value(_currentDevicePath).name)
return false;
Debug(_log, "Set device (path) to: %s (%s)", QSTRING_CSTR(_deviceProperties.value(_currentDevicePath).name), QSTRING_CSTR(_currentDevicePath));
}
// correct invalid parameters
QMap<int, DeviceProperties::InputProperties>::const_iterator inputIterator = _deviceProperties.value(_currentDevicePath).inputs.find(_input);
if (inputIterator == _deviceProperties.value(_currentDevicePath).inputs.end())
setInput(_deviceProperties.value(_currentDevicePath).inputs.firstKey());
QMultiMap<PixelFormat, DeviceProperties::InputProperties::EncodingProperties>::const_iterator encodingIterator = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.find(_pixelFormat);
if (encodingIterator == _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.end())
setEncoding(pixelFormatToString(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.firstKey()));
bool validDimensions = false;
for (auto enc = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.begin(); enc != _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.end(); enc++)
if(enc.key() == _pixelFormat && enc.value().width == _width && enc.value().height == _height)
{
validDimensions = true;
break;
}
if (!validDimensions)
setWidthHeight(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.first().width, _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.first().height);
QList<int> availableframerates = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.value(_pixelFormat).framerates;
if (!availableframerates.isEmpty() && !availableframerates.contains(_fps))
setFramerate(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.value(_pixelFormat).framerates.first());
bool opened = false;
try
{
@@ -151,163 +180,16 @@ bool V4L2Grabber::init()
return _initialized;
}
void V4L2Grabber::enumVideoCaptureDevices()
{
QDirIterator it("/sys/class/video4linux/", QDirIterator::NoIteratorFlags);
_deviceProperties.clear();
while(it.hasNext())
{
//_v4lDevices
QString dev = it.next();
if (it.fileName().startsWith("video"))
{
QString devName = "/dev/" + it.fileName();
int fd = open(QSTRING_CSTR(devName), O_RDWR | O_NONBLOCK, 0);
if (fd < 0)
{
throw_errno_exception("Cannot open '" + devName + "'");
continue;
}
struct v4l2_capability cap;
CLEAR(cap);
if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
{
throw_errno_exception("'" + devName + "' is no V4L2 device");
close(fd);
continue;
}
if (cap.device_caps & V4L2_CAP_META_CAPTURE) // this device has bit 23 set (and bit 1 reset), so it doesn't have capture.
{
close(fd);
continue;
}
// get the current settings
struct v4l2_format fmt;
CLEAR(fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0)
{
close(fd);
continue;
}
V4L2Grabber::DeviceProperties properties;
// collect available device inputs (index & name)
struct v4l2_input input;
CLEAR(input);
input.index = 0;
while (xioctl(fd, VIDIOC_ENUMINPUT, &input) >= 0)
{
V4L2Grabber::DeviceProperties::InputProperties inputProperties;
inputProperties.inputName = QString((char*)input.name);
// Enumerate video standards
struct v4l2_standard standard;
CLEAR(standard);
standard.index = 0;
while (xioctl(fd, VIDIOC_ENUMSTD, &standard) >= 0)
{
if (standard.id & input.std)
{
if (standard.id == V4L2_STD_PAL)
inputProperties.standards.append(VideoStandard::PAL);
else if (standard.id == V4L2_STD_NTSC)
inputProperties.standards.append(VideoStandard::NTSC);
else if (standard.id == V4L2_STD_SECAM)
inputProperties.standards.append(VideoStandard::SECAM);
}
standard.index++;
}
// Enumerate pixel formats
struct v4l2_fmtdesc desc;
CLEAR(desc);
desc.index = 0;
desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (xioctl(fd, VIDIOC_ENUM_FMT, &desc) == 0)
{
PixelFormat encodingFormat = GetPixelFormat(desc.pixelformat);
if (encodingFormat != PixelFormat::NO_CHANGE)
{
V4L2Grabber::DeviceProperties::InputProperties::EncodingProperties encodingProperties;
// Enumerate frame sizes and frame rates
struct v4l2_frmsizeenum frmsizeenum;
CLEAR(frmsizeenum);
frmsizeenum.index = 0;
frmsizeenum.pixel_format = desc.pixelformat;
while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) >= 0)
{
switch (frmsizeenum.type)
{
case V4L2_FRMSIZE_TYPE_DISCRETE:
{
encodingProperties.width = frmsizeenum.discrete.width;
encodingProperties.height = frmsizeenum.discrete.height;
enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, frmsizeenum.discrete.width, frmsizeenum.discrete.height);
}
break;
case V4L2_FRMSIZE_TYPE_CONTINUOUS:
case V4L2_FRMSIZE_TYPE_STEPWISE: // We do not take care of V4L2_FRMSIZE_TYPE_CONTINUOUS or V4L2_FRMSIZE_TYPE_STEPWISE
break;
}
inputProperties.encodingFormats.insert(encodingFormat, encodingProperties);
frmsizeenum.index++;
}
// Failsafe: In case VIDIOC_ENUM_FRAMESIZES fails, insert current heigth, width and fps.
if (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == -1)
{
encodingProperties.width = fmt.fmt.pix.width;
encodingProperties.height = fmt.fmt.pix.height;
enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, encodingProperties.width, encodingProperties.height);
inputProperties.encodingFormats.insert(encodingFormat, encodingProperties);
}
}
desc.index++;
}
properties.inputs.insert(input.index, inputProperties);
input.index++;
}
if (close(fd) < 0) continue;
QFile devNameFile(dev+"/name");
if (devNameFile.exists())
{
devNameFile.open(QFile::ReadOnly);
devName = devNameFile.readLine();
devName = devName.trimmed();
properties.name = devName;
devNameFile.close();
}
_deviceProperties.insert("/dev/"+it.fileName(), properties);
}
}
}
bool V4L2Grabber::start()
{
try
{
if (init() && _streamNotifier != nullptr && !_streamNotifier->isEnabled())
{
connect(_threadManager, &EncoderThreadManager::newFrame, this, &V4L2Grabber::newThreadFrame);
_threadManager->start();
DebugIf(verbose, _log, "Decoding threads: %d", _threadManager->_threadCount);
_streamNotifier->setEnabled(true);
start_capturing();
Info(_log, "Started");
@@ -326,6 +208,8 @@ void V4L2Grabber::stop()
{
if (_streamNotifier != nullptr && _streamNotifier->isEnabled())
{
_threadManager->stop();
disconnect(_threadManager, nullptr, nullptr, nullptr);
stop_capturing();
_streamNotifier->setEnabled(false);
uninit_device();
@@ -340,23 +224,23 @@ bool V4L2Grabber::open_device()
{
struct stat st;
if (-1 == stat(QSTRING_CSTR(_currentDeviceName), &st))
if (-1 == stat(QSTRING_CSTR(_currentDevicePath), &st))
{
throw_errno_exception("Cannot identify '" + _currentDeviceName + "'");
throw_errno_exception("Cannot identify '" + _currentDevicePath + "'");
return false;
}
if (!S_ISCHR(st.st_mode))
{
throw_exception("'" + _currentDeviceName + "' is no device");
throw_exception("'" + _currentDevicePath + "' is no device");
return false;
}
_fileDescriptor = open(QSTRING_CSTR(_currentDeviceName), O_RDWR | O_NONBLOCK, 0);
_fileDescriptor = open(QSTRING_CSTR(_currentDevicePath), O_RDWR | O_NONBLOCK, 0);
if (-1 == _fileDescriptor)
{
throw_errno_exception("Cannot open '" + _currentDeviceName + "'");
throw_errno_exception("Cannot open '" + _currentDevicePath + "'");
return false;
}
@@ -409,7 +293,7 @@ void V4L2Grabber::init_mmap()
{
if (EINVAL == errno)
{
throw_exception("'" + _currentDeviceName + "' does not support memory mapping");
throw_exception("'" + _currentDevicePath + "' does not support memory mapping");
return;
}
else
@@ -421,7 +305,7 @@ void V4L2Grabber::init_mmap()
if (req.count < 2)
{
throw_exception("Insufficient buffer memory on " + _currentDeviceName);
throw_exception("Insufficient buffer memory on " + _currentDevicePath);
return;
}
@@ -473,7 +357,7 @@ void V4L2Grabber::init_userp(unsigned int buffer_size)
{
if (EINVAL == errno)
{
throw_exception("'" + _currentDeviceName + "' does not support user pointer");
throw_exception("'" + _currentDevicePath + "' does not support user pointer");
return;
}
else
@@ -507,7 +391,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
{
if (EINVAL == errno)
{
throw_exception("'" + _currentDeviceName + "' is no V4L2 device");
throw_exception("'" + _currentDevicePath + "' is no V4L2 device");
return;
}
else
@@ -519,7 +403,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
throw_exception("'" + _currentDeviceName + "' is no video capture device");
throw_exception("'" + _currentDevicePath + "' is no video capture device");
return;
}
@@ -529,7 +413,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
{
if (!(cap.capabilities & V4L2_CAP_READWRITE))
{
throw_exception("'" + _currentDeviceName + "' does not support read i/o");
throw_exception("'" + _currentDevicePath + "' does not support read i/o");
return;
}
}
@@ -540,14 +424,13 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
{
if (!(cap.capabilities & V4L2_CAP_STREAMING))
{
throw_exception("'" + _currentDeviceName + "' does not support streaming i/o");
throw_exception("'" + _currentDevicePath + "' does not support streaming i/o");
return;
}
}
break;
}
/* Select video input, video standard and tune here. */
struct v4l2_cropcap cropcap;
@@ -571,10 +454,6 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
}
}
}
else
{
/* Errors ignored. */
}
// set input if needed and supported
struct v4l2_input v4l2Input;
@@ -677,7 +556,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
break;
#ifdef HAVE_JPEG_DECODER
#ifdef HAVE_TURBO_JPEG
case PixelFormat::MJPEG:
{
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
@@ -787,7 +666,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
}
break;
#ifdef HAVE_JPEG_DECODER
#ifdef HAVE_TURBO_JPEG
case V4L2_PIX_FMT_MJPEG:
{
_pixelFormat = PixelFormat::MJPEG;
@@ -797,7 +676,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
#endif
default:
#ifdef HAVE_JPEG_DECODER
#ifdef HAVE_TURBO_JPEG
throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12, I420 and MJPEG are supported");
#else
throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12 and I420 are supported");
@@ -1036,7 +915,7 @@ int V4L2Grabber::read_frame()
rc = process_image((void *)buf.m.userptr, buf.bytesused);
if (-1 == xioctl(VIDIOC_QBUF, &buf))
if (!rc && -1 == xioctl(VIDIOC_QBUF, &buf))
{
throw_errno_exception("VIDIOC_QBUF");
return 0;
@@ -1056,8 +935,13 @@ int V4L2Grabber::read_frame()
bool V4L2Grabber::process_image(const void *p, int size)
{
// We do want a new frame...
#ifdef HAVE_JPEG_DECODER
int processFrameIndex = _currentFrame++, result = false;
// frame skipping
if ((processFrameIndex % (_fpsSoftwareDecimation + 1) != 0) && (_fpsSoftwareDecimation > 0))
return result;
#ifdef HAVE_TURBO_JPEG
if (size < _frameByteSize && _pixelFormat != PixelFormat::MJPEG)
#else
if (size < _frameByteSize)
@@ -1065,151 +949,28 @@ bool V4L2Grabber::process_image(const void *p, int size)
{
Error(_log, "Frame too small: %d != %d", size, _frameByteSize);
}
else
else if (_threadManager != nullptr)
{
process_image(reinterpret_cast<const uint8_t *>(p), size);
return true;
for (int i = 0; i < _threadManager->_threadCount; i++)
{
if (!_threadManager->_threads[i]->isBusy())
{
_threadManager->_threads[i]->setup(_pixelFormat, (uint8_t*)p, size, _width, _height, _lineLength, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, _flipMode, _pixelDecimation);
_threadManager->_threads[i]->process();
result = true;
break;
}
}
}
return false;
return result;
}
void V4L2Grabber::process_image(const uint8_t * data, int size)
void V4L2Grabber::newThreadFrame(Image<ColorRgb> image)
{
if (_cecDetectionEnabled && _cecStandbyActivated)
return;
Image<ColorRgb> image(_width, _height);
/* ----------------------------------------------------------
* ----------- BEGIN of JPEG decoder related code -----------
* --------------------------------------------------------*/
#ifdef HAVE_JPEG_DECODER
if (_pixelFormat == PixelFormat::MJPEG)
{
#endif
#ifdef HAVE_JPEG
_decompress = new jpeg_decompress_struct;
_error = new errorManager;
_decompress->err = jpeg_std_error(&_error->pub);
_error->pub.error_exit = &errorHandler;
_error->pub.output_message = &outputHandler;
jpeg_create_decompress(_decompress);
if (setjmp(_error->setjmp_buffer))
{
jpeg_abort_decompress(_decompress);
jpeg_destroy_decompress(_decompress);
delete _decompress;
delete _error;
return;
}
jpeg_mem_src(_decompress, const_cast<uint8_t*>(data), size);
if (jpeg_read_header(_decompress, (bool) TRUE) != JPEG_HEADER_OK)
{
jpeg_abort_decompress(_decompress);
jpeg_destroy_decompress(_decompress);
delete _decompress;
delete _error;
return;
}
_decompress->scale_num = 1;
_decompress->scale_denom = 1;
_decompress->out_color_space = JCS_RGB;
_decompress->dct_method = JDCT_IFAST;
if (!jpeg_start_decompress(_decompress))
{
jpeg_abort_decompress(_decompress);
jpeg_destroy_decompress(_decompress);
delete _decompress;
delete _error;
return;
}
if (_decompress->out_color_components != 3)
{
jpeg_abort_decompress(_decompress);
jpeg_destroy_decompress(_decompress);
delete _decompress;
delete _error;
return;
}
QImage imageFrame = QImage(_decompress->output_width, _decompress->output_height, QImage::Format_RGB888);
int y = 0;
while (_decompress->output_scanline < _decompress->output_height)
{
uchar *row = imageFrame.scanLine(_decompress->output_scanline);
jpeg_read_scanlines(_decompress, &row, 1);
y++;
}
jpeg_finish_decompress(_decompress);
jpeg_destroy_decompress(_decompress);
delete _decompress;
delete _error;
if (imageFrame.isNull() || _error->pub.num_warnings > 0)
return;
#endif
#ifdef HAVE_TURBO_JPEG
_decompress = tjInitDecompress();
if (_decompress == nullptr)
return;
if (tjDecompressHeader2(_decompress, const_cast<uint8_t*>(data), size, &_width, &_height, &_subsamp) != 0)
{
tjDestroy(_decompress);
return;
}
QImage imageFrame = QImage(_width, _height, QImage::Format_RGB888);
if (tjDecompress2(_decompress, const_cast<uint8_t*>(data), size, imageFrame.bits(), _width, 0, _height, TJPF_RGB, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE) != 0)
{
tjDestroy(_decompress);
return;
}
tjDestroy(_decompress);
if (imageFrame.isNull())
return;
#endif
#ifdef HAVE_JPEG_DECODER
QRect rect(_cropLeft, _cropTop, imageFrame.width() - _cropLeft - _cropRight, imageFrame.height() - _cropTop - _cropBottom);
imageFrame = imageFrame.copy(rect);
imageFrame = imageFrame.scaled(imageFrame.width() / _pixelDecimation, imageFrame.height() / _pixelDecimation,Qt::KeepAspectRatio);
if ((image.width() != unsigned(imageFrame.width())) || (image.height() != unsigned(imageFrame.height())))
image.resize(imageFrame.width(), imageFrame.height());
for (int y=0; y<imageFrame.height(); ++y)
for (int x=0; x<imageFrame.width(); ++x)
{
QColor inPixel(imageFrame.pixel(x,y));
ColorRgb & outPixel = image(x,y);
outPixel.red = inPixel.red();
outPixel.green = inPixel.green();
outPixel.blue = inPixel.blue();
}
}
else
#endif
/* ----------------------------------------------------------
* ------------ END of JPEG decoder related code ------------
* --------------------------------------------------------*/
_imageResampler.processImage(data, _width, _height, _lineLength, _pixelFormat, image);
if (_signalDetectionEnabled)
{
// check signal (only in center of the resulting image, because some grabbers have noise values along the borders)
@@ -1223,19 +984,12 @@ void V4L2Grabber::process_image(const uint8_t * data, int size)
unsigned xMax = image.width() * _x_frac_max;
unsigned yMax = image.height() * _y_frac_max;
for (unsigned x = xOffset; noSignal && x < xMax; ++x)
{
for (unsigned y = yOffset; noSignal && y < yMax; ++y)
{
noSignal &= (ColorRgb&)image(x, y) <= _noSignalThresholdColor;
}
}
if (noSignal)
{
++_noSignalCounter;
}
else
{
if (_noSignalCounter >= _noSignalCounterThreshold)
@@ -1258,9 +1012,7 @@ void V4L2Grabber::process_image(const uint8_t * data, int size)
}
}
else
{
emit newFrame(image);
}
}
int V4L2Grabber::xioctl(int request, void *arg)
@@ -1289,14 +1041,12 @@ int V4L2Grabber::xioctl(int fileDescriptor, int request, void *arg)
return r;
}
void V4L2Grabber::setDevice(const QString& device)
void V4L2Grabber::setDevice(const QString& devicePath, const QString& deviceName)
{
if (_currentDeviceName != device)
if (_currentDevicePath != devicePath || _currentDeviceName != deviceName)
{
(_initialized)
? _newDeviceName = device
: _currentDeviceName = _newDeviceName = device;
_currentDevicePath = devicePath;
_currentDeviceName = deviceName;
_reload = true;
}
}
@@ -1391,14 +1141,17 @@ void V4L2Grabber::setCecDetectionEnable(bool enable)
bool V4L2Grabber::reload(bool force)
{
if (_streamNotifier != nullptr && _streamNotifier->isEnabled() && (_reload || force))
if (_reload || force)
{
Info(_log,"Reloading V4L2 Grabber");
uninit();
_pixelFormat = _pixelFormatConfig;
_newDeviceName = _currentDeviceName;
if (_streamNotifier != nullptr && _streamNotifier->isEnabled())
{
Info(_log,"Reloading V4L2 Grabber");
uninit();
_pixelFormat = _pixelFormatConfig;
}
_reload = false;
return start();
return prepare() && start();
}
return false;
@@ -1537,6 +1290,157 @@ QJsonArray V4L2Grabber::discover(const QJsonObject& params)
return inputsDiscovered;
}
void V4L2Grabber::enumVideoCaptureDevices()
{
QDirIterator it("/sys/class/video4linux/", QDirIterator::NoIteratorFlags);
_deviceProperties.clear();
while(it.hasNext())
{
//_v4lDevices
QString dev = it.next();
if (it.fileName().startsWith("video"))
{
QString devName = "/dev/" + it.fileName();
int fd = open(QSTRING_CSTR(devName), O_RDWR | O_NONBLOCK, 0);
if (fd < 0)
{
throw_errno_exception("Cannot open '" + devName + "'");
continue;
}
struct v4l2_capability cap;
CLEAR(cap);
if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
{
throw_errno_exception("'" + devName + "' is no V4L2 device");
close(fd);
continue;
}
if (cap.device_caps & V4L2_CAP_META_CAPTURE) // this device has bit 23 set (and bit 1 reset), so it doesn't have capture.
{
close(fd);
continue;
}
// get the current settings
struct v4l2_format fmt;
CLEAR(fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0)
{
close(fd);
continue;
}
V4L2Grabber::DeviceProperties properties;
// collect available device inputs (index & name)
struct v4l2_input input;
CLEAR(input);
input.index = 0;
while (xioctl(fd, VIDIOC_ENUMINPUT, &input) >= 0)
{
V4L2Grabber::DeviceProperties::InputProperties inputProperties;
inputProperties.inputName = QString((char*)input.name);
// Enumerate video standards
struct v4l2_standard standard;
CLEAR(standard);
standard.index = 0;
while (xioctl(fd, VIDIOC_ENUMSTD, &standard) >= 0)
{
if (standard.id & input.std)
{
if (standard.id == V4L2_STD_PAL)
inputProperties.standards.append(VideoStandard::PAL);
else if (standard.id == V4L2_STD_NTSC)
inputProperties.standards.append(VideoStandard::NTSC);
else if (standard.id == V4L2_STD_SECAM)
inputProperties.standards.append(VideoStandard::SECAM);
}
standard.index++;
}
// Enumerate pixel formats
struct v4l2_fmtdesc desc;
CLEAR(desc);
desc.index = 0;
desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (xioctl(fd, VIDIOC_ENUM_FMT, &desc) == 0)
{
PixelFormat encodingFormat = GetPixelFormat(desc.pixelformat);
if (encodingFormat != PixelFormat::NO_CHANGE)
{
V4L2Grabber::DeviceProperties::InputProperties::EncodingProperties encodingProperties;
// Enumerate frame sizes and frame rates
struct v4l2_frmsizeenum frmsizeenum;
CLEAR(frmsizeenum);
frmsizeenum.index = 0;
frmsizeenum.pixel_format = desc.pixelformat;
while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) >= 0)
{
switch (frmsizeenum.type)
{
case V4L2_FRMSIZE_TYPE_DISCRETE:
{
encodingProperties.width = frmsizeenum.discrete.width;
encodingProperties.height = frmsizeenum.discrete.height;
enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, frmsizeenum.discrete.width, frmsizeenum.discrete.height);
}
break;
case V4L2_FRMSIZE_TYPE_CONTINUOUS:
case V4L2_FRMSIZE_TYPE_STEPWISE: // We do not take care of V4L2_FRMSIZE_TYPE_CONTINUOUS or V4L2_FRMSIZE_TYPE_STEPWISE
break;
}
inputProperties.encodingFormats.insert(encodingFormat, encodingProperties);
frmsizeenum.index++;
}
// Failsafe: In case VIDIOC_ENUM_FRAMESIZES fails, insert current heigth, width and fps.
if (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == -1)
{
encodingProperties.width = fmt.fmt.pix.width;
encodingProperties.height = fmt.fmt.pix.height;
enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, encodingProperties.width, encodingProperties.height);
inputProperties.encodingFormats.insert(encodingFormat, encodingProperties);
}
}
desc.index++;
}
properties.inputs.insert(input.index, inputProperties);
input.index++;
}
if (close(fd) < 0) continue;
QFile devNameFile(dev+"/name");
if (devNameFile.exists())
{
devNameFile.open(QFile::ReadOnly);
devName = devNameFile.readLine();
devName = devName.trimmed();
properties.name = devName;
devNameFile.close();
}
_deviceProperties.insert("/dev/"+it.fileName(), properties);
}
}
}
void V4L2Grabber::enumFrameIntervals(QList<int> &framerates, int fileDescriptor, int pixelformat, int width, int height)
{
// collect available frame rates

View File

@@ -170,10 +170,10 @@ void GrabberWrapper::handleSettingsUpdate(settings::type type, const QJsonDocume
// extract settings
const QJsonObject& obj = config.object();
// global grabber state
GLOBAL_GRABBER_SYS_ENABLE = obj["enable"].toBool(false);
// set global grabber state
setSysGrabberState(obj["enable"].toBool(false));
if (GLOBAL_GRABBER_SYS_ENABLE)
if (getSysGrabberState())
{
// width/height
_ggrabber->setWidthHeight(obj["width"].toInt(96), obj["height"].toInt(96));
@@ -197,6 +197,8 @@ void GrabberWrapper::handleSettingsUpdate(settings::type type, const QJsonDocume
// eval new update time
updateTimer(1000/obj["fps"].toInt(10));
}
else
stop();
}
}
@@ -209,7 +211,7 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp
else
GRABBER_SYS_CLIENTS.remove(hyperionInd);
if(GRABBER_SYS_CLIENTS.empty() || !GLOBAL_GRABBER_SYS_ENABLE)
if(GRABBER_SYS_CLIENTS.empty() || !getSysGrabberState())
stop();
else
start();
@@ -221,7 +223,7 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp
else
GRABBER_V4L_CLIENTS.remove(hyperionInd);
if(GRABBER_V4L_CLIENTS.empty() || !GLOBAL_GRABBER_V4L_ENABLE)
if(GRABBER_V4L_CLIENTS.empty() || !getV4lGrabberState())
stop();
else
start();
@@ -231,6 +233,6 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp
void GrabberWrapper::tryStart()
{
// verify start condition
if(!_grabberName.startsWith("V4L") && !GRABBER_SYS_CLIENTS.empty() && GLOBAL_GRABBER_SYS_ENABLE)
if(!_grabberName.startsWith("V4L") && !GRABBER_SYS_CLIENTS.empty() && getSysGrabberState())
start();
}

View File

@@ -56,7 +56,7 @@ Hyperion::Hyperion(quint8 instance, bool readonlyMode)
, _hwLedCount()
, _ledGridSize(hyperion::getLedLayoutGridSize(getSetting(settings::LEDS).array()))
, _BGEffectHandler(nullptr)
,_captureCont(nullptr)
, _captureCont(nullptr)
, _ledBuffer(_ledString.leds().size(), ColorRgb::BLACK)
, _boblightServer(nullptr)
, _readOnlyMode(readonlyMode)

View File

@@ -156,7 +156,7 @@ void ImageResampler::processImage(const uint8_t * data, int width, int height, i
break;
}
break;
#ifdef HAVE_JPEG_DECODER
#ifdef HAVE_TURBO_JPEG
case PixelFormat::MJPEG:
break;
#endif