V4L2/MF changes

This commit is contained in:
Paulchen Panther
2021-05-16 15:47:25 +02:00
parent bf32557a8a
commit cfb34e6bfc
19 changed files with 426 additions and 537 deletions

View File

@@ -21,6 +21,7 @@
#include <QDirIterator>
#include <QFileInfo>
#include <QDebug>
#include "grabber/V4L2Grabber.h"
@@ -41,7 +42,7 @@ static PixelFormat GetPixelFormat(const unsigned int format)
if (format == V4L2_PIX_FMT_UYVY) return PixelFormat::UYVY;
if (format == V4L2_PIX_FMT_NV12) return PixelFormat::NV12;
if (format == V4L2_PIX_FMT_YUV420) return PixelFormat::I420;
#ifdef HAVE_JPEG_DECODER
#ifdef HAVE_TURBO_JPEG
if (format == V4L2_PIX_FMT_MJPEG) return PixelFormat::MJPEG;
#endif
return PixelFormat::NO_CHANGE;
@@ -49,14 +50,16 @@ static PixelFormat GetPixelFormat(const unsigned int format)
V4L2Grabber::V4L2Grabber()
: Grabber("V4L2")
, _currentDevicePath("none")
, _currentDeviceName("none")
, _newDeviceName("none")
, _threadManager(nullptr)
, _ioMethod(IO_METHOD_MMAP)
, _fileDescriptor(-1)
, _pixelFormat(PixelFormat::NO_CHANGE)
, _pixelFormatConfig(PixelFormat::NO_CHANGE)
, _lineLength(-1)
, _frameByteSize(-1)
, _currentFrame(0)
, _noSignalCounterThreshold(40)
, _noSignalThresholdColor(ColorRgb{0,0,0})
, _cecDetectionEnabled(true)
@@ -77,6 +80,18 @@ V4L2Grabber::V4L2Grabber()
V4L2Grabber::~V4L2Grabber()
{
uninit();
if (_threadManager)
delete _threadManager;
_threadManager = nullptr;
}
bool V4L2Grabber::prepare()
{
if (!_threadManager)
_threadManager = new EncoderThreadManager(this);
return (_threadManager != nullptr);
}
void V4L2Grabber::uninit()
@@ -84,7 +99,7 @@ void V4L2Grabber::uninit()
// stop if the grabber was not stopped
if (_initialized)
{
Debug(_log,"Uninit grabber: %s", QSTRING_CSTR(_newDeviceName));
Debug(_log,"Uninit grabber: %s (%s)", QSTRING_CSTR(_currentDeviceName), QSTRING_CSTR(_currentDevicePath));
stop();
}
}
@@ -93,39 +108,53 @@ bool V4L2Grabber::init()
{
if (!_initialized)
{
bool noDeviceName = _currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName.compare("auto", Qt::CaseInsensitive) == 0;
bool noDevicePath = _currentDevicePath.compare("none", Qt::CaseInsensitive) == 0 || _currentDevicePath.compare("auto", Qt::CaseInsensitive) == 0;
// enumerate the video capture devices on the user's system
enumVideoCaptureDevices();
if(noDeviceName)
if(noDevicePath)
return false;
if(!_deviceProperties.contains(_currentDeviceName))
if(!_deviceProperties.contains(_currentDevicePath))
{
Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDeviceName));
_currentDeviceName = "none";
Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDevicePath));
_currentDevicePath = "none";
return false;
}
bool valid = false;
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
if (i.key() == _currentDeviceName && valid == false)
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
if (y.key() == _input && valid == false)
for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++)
if(enc.key() == _pixelFormat && enc.value().width == _width && enc.value().height == _height && valid == false)
for (auto fps = enc.value().framerates.begin(); fps != enc.value().framerates.end(); fps++)
if(*fps == _fps && valid == false)
valid = true;
if (!valid)
else
{
Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDeviceName));
_currentDeviceName = "none";
return false;
if (HyperionIManager::getInstance())
if (_currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName != _deviceProperties.value(_currentDevicePath).name)
return false;
Debug(_log, "Set device (path) to: %s (%s)", QSTRING_CSTR(_deviceProperties.value(_currentDevicePath).name), QSTRING_CSTR(_currentDevicePath));
}
// correct invalid parameters
QMap<int, DeviceProperties::InputProperties>::const_iterator inputIterator = _deviceProperties.value(_currentDevicePath).inputs.find(_input);
if (inputIterator == _deviceProperties.value(_currentDevicePath).inputs.end())
setInput(_deviceProperties.value(_currentDevicePath).inputs.firstKey());
QMultiMap<PixelFormat, DeviceProperties::InputProperties::EncodingProperties>::const_iterator encodingIterator = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.find(_pixelFormat);
if (encodingIterator == _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.end())
setEncoding(pixelFormatToString(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.firstKey()));
bool validDimensions = false;
for (auto enc = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.begin(); enc != _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.end(); enc++)
if(enc.key() == _pixelFormat && enc.value().width == _width && enc.value().height == _height)
{
validDimensions = true;
break;
}
if (!validDimensions)
setWidthHeight(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.first().width, _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.first().height);
QList<int> availableframerates = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.value(_pixelFormat).framerates;
if (!availableframerates.isEmpty() && !availableframerates.contains(_fps))
setFramerate(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.value(_pixelFormat).framerates.first());
bool opened = false;
try
{
@@ -151,163 +180,16 @@ bool V4L2Grabber::init()
return _initialized;
}
void V4L2Grabber::enumVideoCaptureDevices()
{
QDirIterator it("/sys/class/video4linux/", QDirIterator::NoIteratorFlags);
_deviceProperties.clear();
while(it.hasNext())
{
//_v4lDevices
QString dev = it.next();
if (it.fileName().startsWith("video"))
{
QString devName = "/dev/" + it.fileName();
int fd = open(QSTRING_CSTR(devName), O_RDWR | O_NONBLOCK, 0);
if (fd < 0)
{
throw_errno_exception("Cannot open '" + devName + "'");
continue;
}
struct v4l2_capability cap;
CLEAR(cap);
if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
{
throw_errno_exception("'" + devName + "' is no V4L2 device");
close(fd);
continue;
}
if (cap.device_caps & V4L2_CAP_META_CAPTURE) // this device has bit 23 set (and bit 1 reset), so it doesn't have capture.
{
close(fd);
continue;
}
// get the current settings
struct v4l2_format fmt;
CLEAR(fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0)
{
close(fd);
continue;
}
V4L2Grabber::DeviceProperties properties;
// collect available device inputs (index & name)
struct v4l2_input input;
CLEAR(input);
input.index = 0;
while (xioctl(fd, VIDIOC_ENUMINPUT, &input) >= 0)
{
V4L2Grabber::DeviceProperties::InputProperties inputProperties;
inputProperties.inputName = QString((char*)input.name);
// Enumerate video standards
struct v4l2_standard standard;
CLEAR(standard);
standard.index = 0;
while (xioctl(fd, VIDIOC_ENUMSTD, &standard) >= 0)
{
if (standard.id & input.std)
{
if (standard.id == V4L2_STD_PAL)
inputProperties.standards.append(VideoStandard::PAL);
else if (standard.id == V4L2_STD_NTSC)
inputProperties.standards.append(VideoStandard::NTSC);
else if (standard.id == V4L2_STD_SECAM)
inputProperties.standards.append(VideoStandard::SECAM);
}
standard.index++;
}
// Enumerate pixel formats
struct v4l2_fmtdesc desc;
CLEAR(desc);
desc.index = 0;
desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (xioctl(fd, VIDIOC_ENUM_FMT, &desc) == 0)
{
PixelFormat encodingFormat = GetPixelFormat(desc.pixelformat);
if (encodingFormat != PixelFormat::NO_CHANGE)
{
V4L2Grabber::DeviceProperties::InputProperties::EncodingProperties encodingProperties;
// Enumerate frame sizes and frame rates
struct v4l2_frmsizeenum frmsizeenum;
CLEAR(frmsizeenum);
frmsizeenum.index = 0;
frmsizeenum.pixel_format = desc.pixelformat;
while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) >= 0)
{
switch (frmsizeenum.type)
{
case V4L2_FRMSIZE_TYPE_DISCRETE:
{
encodingProperties.width = frmsizeenum.discrete.width;
encodingProperties.height = frmsizeenum.discrete.height;
enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, frmsizeenum.discrete.width, frmsizeenum.discrete.height);
}
break;
case V4L2_FRMSIZE_TYPE_CONTINUOUS:
case V4L2_FRMSIZE_TYPE_STEPWISE: // We do not take care of V4L2_FRMSIZE_TYPE_CONTINUOUS or V4L2_FRMSIZE_TYPE_STEPWISE
break;
}
inputProperties.encodingFormats.insert(encodingFormat, encodingProperties);
frmsizeenum.index++;
}
// Failsafe: In case VIDIOC_ENUM_FRAMESIZES fails, insert current heigth, width and fps.
if (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == -1)
{
encodingProperties.width = fmt.fmt.pix.width;
encodingProperties.height = fmt.fmt.pix.height;
enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, encodingProperties.width, encodingProperties.height);
inputProperties.encodingFormats.insert(encodingFormat, encodingProperties);
}
}
desc.index++;
}
properties.inputs.insert(input.index, inputProperties);
input.index++;
}
if (close(fd) < 0) continue;
QFile devNameFile(dev+"/name");
if (devNameFile.exists())
{
devNameFile.open(QFile::ReadOnly);
devName = devNameFile.readLine();
devName = devName.trimmed();
properties.name = devName;
devNameFile.close();
}
_deviceProperties.insert("/dev/"+it.fileName(), properties);
}
}
}
bool V4L2Grabber::start()
{
try
{
if (init() && _streamNotifier != nullptr && !_streamNotifier->isEnabled())
{
connect(_threadManager, &EncoderThreadManager::newFrame, this, &V4L2Grabber::newThreadFrame);
_threadManager->start();
DebugIf(verbose, _log, "Decoding threads: %d", _threadManager->_threadCount);
_streamNotifier->setEnabled(true);
start_capturing();
Info(_log, "Started");
@@ -326,6 +208,8 @@ void V4L2Grabber::stop()
{
if (_streamNotifier != nullptr && _streamNotifier->isEnabled())
{
_threadManager->stop();
disconnect(_threadManager, nullptr, nullptr, nullptr);
stop_capturing();
_streamNotifier->setEnabled(false);
uninit_device();
@@ -340,23 +224,23 @@ bool V4L2Grabber::open_device()
{
struct stat st;
if (-1 == stat(QSTRING_CSTR(_currentDeviceName), &st))
if (-1 == stat(QSTRING_CSTR(_currentDevicePath), &st))
{
throw_errno_exception("Cannot identify '" + _currentDeviceName + "'");
throw_errno_exception("Cannot identify '" + _currentDevicePath + "'");
return false;
}
if (!S_ISCHR(st.st_mode))
{
throw_exception("'" + _currentDeviceName + "' is no device");
throw_exception("'" + _currentDevicePath + "' is no device");
return false;
}
_fileDescriptor = open(QSTRING_CSTR(_currentDeviceName), O_RDWR | O_NONBLOCK, 0);
_fileDescriptor = open(QSTRING_CSTR(_currentDevicePath), O_RDWR | O_NONBLOCK, 0);
if (-1 == _fileDescriptor)
{
throw_errno_exception("Cannot open '" + _currentDeviceName + "'");
throw_errno_exception("Cannot open '" + _currentDevicePath + "'");
return false;
}
@@ -409,7 +293,7 @@ void V4L2Grabber::init_mmap()
{
if (EINVAL == errno)
{
throw_exception("'" + _currentDeviceName + "' does not support memory mapping");
throw_exception("'" + _currentDevicePath + "' does not support memory mapping");
return;
}
else
@@ -421,7 +305,7 @@ void V4L2Grabber::init_mmap()
if (req.count < 2)
{
throw_exception("Insufficient buffer memory on " + _currentDeviceName);
throw_exception("Insufficient buffer memory on " + _currentDevicePath);
return;
}
@@ -473,7 +357,7 @@ void V4L2Grabber::init_userp(unsigned int buffer_size)
{
if (EINVAL == errno)
{
throw_exception("'" + _currentDeviceName + "' does not support user pointer");
throw_exception("'" + _currentDevicePath + "' does not support user pointer");
return;
}
else
@@ -507,7 +391,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
{
if (EINVAL == errno)
{
throw_exception("'" + _currentDeviceName + "' is no V4L2 device");
throw_exception("'" + _currentDevicePath + "' is no V4L2 device");
return;
}
else
@@ -519,7 +403,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
throw_exception("'" + _currentDeviceName + "' is no video capture device");
throw_exception("'" + _currentDevicePath + "' is no video capture device");
return;
}
@@ -529,7 +413,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
{
if (!(cap.capabilities & V4L2_CAP_READWRITE))
{
throw_exception("'" + _currentDeviceName + "' does not support read i/o");
throw_exception("'" + _currentDevicePath + "' does not support read i/o");
return;
}
}
@@ -540,14 +424,13 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
{
if (!(cap.capabilities & V4L2_CAP_STREAMING))
{
throw_exception("'" + _currentDeviceName + "' does not support streaming i/o");
throw_exception("'" + _currentDevicePath + "' does not support streaming i/o");
return;
}
}
break;
}
/* Select video input, video standard and tune here. */
struct v4l2_cropcap cropcap;
@@ -571,10 +454,6 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
}
}
}
else
{
/* Errors ignored. */
}
// set input if needed and supported
struct v4l2_input v4l2Input;
@@ -677,7 +556,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
break;
#ifdef HAVE_JPEG_DECODER
#ifdef HAVE_TURBO_JPEG
case PixelFormat::MJPEG:
{
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
@@ -787,7 +666,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
}
break;
#ifdef HAVE_JPEG_DECODER
#ifdef HAVE_TURBO_JPEG
case V4L2_PIX_FMT_MJPEG:
{
_pixelFormat = PixelFormat::MJPEG;
@@ -797,7 +676,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
#endif
default:
#ifdef HAVE_JPEG_DECODER
#ifdef HAVE_TURBO_JPEG
throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12, I420 and MJPEG are supported");
#else
throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12 and I420 are supported");
@@ -1036,7 +915,7 @@ int V4L2Grabber::read_frame()
rc = process_image((void *)buf.m.userptr, buf.bytesused);
if (-1 == xioctl(VIDIOC_QBUF, &buf))
if (!rc && -1 == xioctl(VIDIOC_QBUF, &buf))
{
throw_errno_exception("VIDIOC_QBUF");
return 0;
@@ -1056,8 +935,13 @@ int V4L2Grabber::read_frame()
bool V4L2Grabber::process_image(const void *p, int size)
{
// We do want a new frame...
#ifdef HAVE_JPEG_DECODER
int processFrameIndex = _currentFrame++, result = false;
// frame skipping
if ((processFrameIndex % (_fpsSoftwareDecimation + 1) != 0) && (_fpsSoftwareDecimation > 0))
return result;
#ifdef HAVE_TURBO_JPEG
if (size < _frameByteSize && _pixelFormat != PixelFormat::MJPEG)
#else
if (size < _frameByteSize)
@@ -1065,151 +949,28 @@ bool V4L2Grabber::process_image(const void *p, int size)
{
Error(_log, "Frame too small: %d != %d", size, _frameByteSize);
}
else
else if (_threadManager != nullptr)
{
process_image(reinterpret_cast<const uint8_t *>(p), size);
return true;
for (int i = 0; i < _threadManager->_threadCount; i++)
{
if (!_threadManager->_threads[i]->isBusy())
{
_threadManager->_threads[i]->setup(_pixelFormat, (uint8_t*)p, size, _width, _height, _lineLength, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, _flipMode, _pixelDecimation);
_threadManager->_threads[i]->process();
result = true;
break;
}
}
}
return false;
return result;
}
void V4L2Grabber::process_image(const uint8_t * data, int size)
void V4L2Grabber::newThreadFrame(Image<ColorRgb> image)
{
if (_cecDetectionEnabled && _cecStandbyActivated)
return;
Image<ColorRgb> image(_width, _height);
/* ----------------------------------------------------------
* ----------- BEGIN of JPEG decoder related code -----------
* --------------------------------------------------------*/
#ifdef HAVE_JPEG_DECODER
if (_pixelFormat == PixelFormat::MJPEG)
{
#endif
#ifdef HAVE_JPEG
_decompress = new jpeg_decompress_struct;
_error = new errorManager;
_decompress->err = jpeg_std_error(&_error->pub);
_error->pub.error_exit = &errorHandler;
_error->pub.output_message = &outputHandler;
jpeg_create_decompress(_decompress);
if (setjmp(_error->setjmp_buffer))
{
jpeg_abort_decompress(_decompress);
jpeg_destroy_decompress(_decompress);
delete _decompress;
delete _error;
return;
}
jpeg_mem_src(_decompress, const_cast<uint8_t*>(data), size);
if (jpeg_read_header(_decompress, (bool) TRUE) != JPEG_HEADER_OK)
{
jpeg_abort_decompress(_decompress);
jpeg_destroy_decompress(_decompress);
delete _decompress;
delete _error;
return;
}
_decompress->scale_num = 1;
_decompress->scale_denom = 1;
_decompress->out_color_space = JCS_RGB;
_decompress->dct_method = JDCT_IFAST;
if (!jpeg_start_decompress(_decompress))
{
jpeg_abort_decompress(_decompress);
jpeg_destroy_decompress(_decompress);
delete _decompress;
delete _error;
return;
}
if (_decompress->out_color_components != 3)
{
jpeg_abort_decompress(_decompress);
jpeg_destroy_decompress(_decompress);
delete _decompress;
delete _error;
return;
}
QImage imageFrame = QImage(_decompress->output_width, _decompress->output_height, QImage::Format_RGB888);
int y = 0;
while (_decompress->output_scanline < _decompress->output_height)
{
uchar *row = imageFrame.scanLine(_decompress->output_scanline);
jpeg_read_scanlines(_decompress, &row, 1);
y++;
}
jpeg_finish_decompress(_decompress);
jpeg_destroy_decompress(_decompress);
delete _decompress;
delete _error;
if (imageFrame.isNull() || _error->pub.num_warnings > 0)
return;
#endif
#ifdef HAVE_TURBO_JPEG
_decompress = tjInitDecompress();
if (_decompress == nullptr)
return;
if (tjDecompressHeader2(_decompress, const_cast<uint8_t*>(data), size, &_width, &_height, &_subsamp) != 0)
{
tjDestroy(_decompress);
return;
}
QImage imageFrame = QImage(_width, _height, QImage::Format_RGB888);
if (tjDecompress2(_decompress, const_cast<uint8_t*>(data), size, imageFrame.bits(), _width, 0, _height, TJPF_RGB, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE) != 0)
{
tjDestroy(_decompress);
return;
}
tjDestroy(_decompress);
if (imageFrame.isNull())
return;
#endif
#ifdef HAVE_JPEG_DECODER
QRect rect(_cropLeft, _cropTop, imageFrame.width() - _cropLeft - _cropRight, imageFrame.height() - _cropTop - _cropBottom);
imageFrame = imageFrame.copy(rect);
imageFrame = imageFrame.scaled(imageFrame.width() / _pixelDecimation, imageFrame.height() / _pixelDecimation,Qt::KeepAspectRatio);
if ((image.width() != unsigned(imageFrame.width())) || (image.height() != unsigned(imageFrame.height())))
image.resize(imageFrame.width(), imageFrame.height());
for (int y=0; y<imageFrame.height(); ++y)
for (int x=0; x<imageFrame.width(); ++x)
{
QColor inPixel(imageFrame.pixel(x,y));
ColorRgb & outPixel = image(x,y);
outPixel.red = inPixel.red();
outPixel.green = inPixel.green();
outPixel.blue = inPixel.blue();
}
}
else
#endif
/* ----------------------------------------------------------
* ------------ END of JPEG decoder related code ------------
* --------------------------------------------------------*/
_imageResampler.processImage(data, _width, _height, _lineLength, _pixelFormat, image);
if (_signalDetectionEnabled)
{
// check signal (only in center of the resulting image, because some grabbers have noise values along the borders)
@@ -1223,19 +984,12 @@ void V4L2Grabber::process_image(const uint8_t * data, int size)
unsigned xMax = image.width() * _x_frac_max;
unsigned yMax = image.height() * _y_frac_max;
for (unsigned x = xOffset; noSignal && x < xMax; ++x)
{
for (unsigned y = yOffset; noSignal && y < yMax; ++y)
{
noSignal &= (ColorRgb&)image(x, y) <= _noSignalThresholdColor;
}
}
if (noSignal)
{
++_noSignalCounter;
}
else
{
if (_noSignalCounter >= _noSignalCounterThreshold)
@@ -1258,9 +1012,7 @@ void V4L2Grabber::process_image(const uint8_t * data, int size)
}
}
else
{
emit newFrame(image);
}
}
int V4L2Grabber::xioctl(int request, void *arg)
@@ -1289,14 +1041,12 @@ int V4L2Grabber::xioctl(int fileDescriptor, int request, void *arg)
return r;
}
void V4L2Grabber::setDevice(const QString& device)
void V4L2Grabber::setDevice(const QString& devicePath, const QString& deviceName)
{
if (_currentDeviceName != device)
if (_currentDevicePath != devicePath || _currentDeviceName != deviceName)
{
(_initialized)
? _newDeviceName = device
: _currentDeviceName = _newDeviceName = device;
_currentDevicePath = devicePath;
_currentDeviceName = deviceName;
_reload = true;
}
}
@@ -1391,14 +1141,17 @@ void V4L2Grabber::setCecDetectionEnable(bool enable)
bool V4L2Grabber::reload(bool force)
{
if (_streamNotifier != nullptr && _streamNotifier->isEnabled() && (_reload || force))
if (_reload || force)
{
Info(_log,"Reloading V4L2 Grabber");
uninit();
_pixelFormat = _pixelFormatConfig;
_newDeviceName = _currentDeviceName;
if (_streamNotifier != nullptr && _streamNotifier->isEnabled())
{
Info(_log,"Reloading V4L2 Grabber");
uninit();
_pixelFormat = _pixelFormatConfig;
}
_reload = false;
return start();
return prepare() && start();
}
return false;
@@ -1537,6 +1290,157 @@ QJsonArray V4L2Grabber::discover(const QJsonObject& params)
return inputsDiscovered;
}
void V4L2Grabber::enumVideoCaptureDevices()
{
QDirIterator it("/sys/class/video4linux/", QDirIterator::NoIteratorFlags);
_deviceProperties.clear();
while(it.hasNext())
{
//_v4lDevices
QString dev = it.next();
if (it.fileName().startsWith("video"))
{
QString devName = "/dev/" + it.fileName();
int fd = open(QSTRING_CSTR(devName), O_RDWR | O_NONBLOCK, 0);
if (fd < 0)
{
throw_errno_exception("Cannot open '" + devName + "'");
continue;
}
struct v4l2_capability cap;
CLEAR(cap);
if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
{
throw_errno_exception("'" + devName + "' is no V4L2 device");
close(fd);
continue;
}
if (cap.device_caps & V4L2_CAP_META_CAPTURE) // this device has bit 23 set (and bit 1 reset), so it doesn't have capture.
{
close(fd);
continue;
}
// get the current settings
struct v4l2_format fmt;
CLEAR(fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0)
{
close(fd);
continue;
}
V4L2Grabber::DeviceProperties properties;
// collect available device inputs (index & name)
struct v4l2_input input;
CLEAR(input);
input.index = 0;
while (xioctl(fd, VIDIOC_ENUMINPUT, &input) >= 0)
{
V4L2Grabber::DeviceProperties::InputProperties inputProperties;
inputProperties.inputName = QString((char*)input.name);
// Enumerate video standards
struct v4l2_standard standard;
CLEAR(standard);
standard.index = 0;
while (xioctl(fd, VIDIOC_ENUMSTD, &standard) >= 0)
{
if (standard.id & input.std)
{
if (standard.id == V4L2_STD_PAL)
inputProperties.standards.append(VideoStandard::PAL);
else if (standard.id == V4L2_STD_NTSC)
inputProperties.standards.append(VideoStandard::NTSC);
else if (standard.id == V4L2_STD_SECAM)
inputProperties.standards.append(VideoStandard::SECAM);
}
standard.index++;
}
// Enumerate pixel formats
struct v4l2_fmtdesc desc;
CLEAR(desc);
desc.index = 0;
desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (xioctl(fd, VIDIOC_ENUM_FMT, &desc) == 0)
{
PixelFormat encodingFormat = GetPixelFormat(desc.pixelformat);
if (encodingFormat != PixelFormat::NO_CHANGE)
{
V4L2Grabber::DeviceProperties::InputProperties::EncodingProperties encodingProperties;
// Enumerate frame sizes and frame rates
struct v4l2_frmsizeenum frmsizeenum;
CLEAR(frmsizeenum);
frmsizeenum.index = 0;
frmsizeenum.pixel_format = desc.pixelformat;
while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) >= 0)
{
switch (frmsizeenum.type)
{
case V4L2_FRMSIZE_TYPE_DISCRETE:
{
encodingProperties.width = frmsizeenum.discrete.width;
encodingProperties.height = frmsizeenum.discrete.height;
enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, frmsizeenum.discrete.width, frmsizeenum.discrete.height);
}
break;
case V4L2_FRMSIZE_TYPE_CONTINUOUS:
case V4L2_FRMSIZE_TYPE_STEPWISE: // We do not take care of V4L2_FRMSIZE_TYPE_CONTINUOUS or V4L2_FRMSIZE_TYPE_STEPWISE
break;
}
inputProperties.encodingFormats.insert(encodingFormat, encodingProperties);
frmsizeenum.index++;
}
// Failsafe: In case VIDIOC_ENUM_FRAMESIZES fails, insert current heigth, width and fps.
if (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == -1)
{
encodingProperties.width = fmt.fmt.pix.width;
encodingProperties.height = fmt.fmt.pix.height;
enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, encodingProperties.width, encodingProperties.height);
inputProperties.encodingFormats.insert(encodingFormat, encodingProperties);
}
}
desc.index++;
}
properties.inputs.insert(input.index, inputProperties);
input.index++;
}
if (close(fd) < 0) continue;
QFile devNameFile(dev+"/name");
if (devNameFile.exists())
{
devNameFile.open(QFile::ReadOnly);
devName = devNameFile.readLine();
devName = devName.trimmed();
properties.name = devName;
devNameFile.close();
}
_deviceProperties.insert("/dev/"+it.fileName(), properties);
}
}
}
void V4L2Grabber::enumFrameIntervals(QList<int> &framerates, int fileDescriptor, int pixelformat, int width, int height)
{
// collect available frame rates