diff --git a/CMakeLists.txt b/CMakeLists.txt index 7cde982c..c10df978 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -400,31 +400,6 @@ find_package(libusb-1.0 REQUIRED) find_package(Threads REQUIRED) add_definitions(${QT_DEFINITIONS}) -# Add JPEG library -if (ENABLE_V4L2 OR ENABLE_MF) - # Turbo JPEG - find_package(TurboJPEG) - if (TURBOJPEG_FOUND) - add_definitions(-DHAVE_TURBO_JPEG) - message( STATUS "Using Turbo JPEG library: ${TurboJPEG_LIBRARY}") - include_directories(${TurboJPEG_INCLUDE_DIRS}) - elseif(ENABLE_V4L2) - # System JPEG - find_package(JPEG) - if (JPEG_FOUND) - add_definitions(-DHAVE_JPEG) - message( STATUS "Using system JPEG library: ${JPEG_LIBRARIES}") - include_directories(${JPEG_INCLUDE_DIR}) - else() - message( STATUS "JPEG library not found, MJPEG camera format won't work in V4L2 grabber.") - endif() - endif () - - if (TURBOJPEG_FOUND OR JPEG_FOUND) - add_definitions(-DHAVE_JPEG_DECODER) - endif() -endif() - if(APPLE) set(CMAKE_EXE_LINKER_FLAGS "-framework CoreGraphics") endif() diff --git a/CompileHowto.md b/CompileHowto.md index 978a65a0..4e474f8f 100644 --- a/CompileHowto.md +++ b/CompileHowto.md @@ -58,7 +58,7 @@ cd $HYPERION_HOME ```console sudo apt-get update -sudo apt-get install git cmake build-essential qtbase5-dev libqt5serialport5-dev libqt5sql5-sqlite libqt5svg5-dev libqt5x11extras5-dev libusb-1.0-0-dev python3-dev libcec-dev libxcb-image0-dev libxcb-util0-dev libxcb-shm0-dev libxcb-render0-dev libxcb-randr0-dev libxrandr-dev libxrender-dev libavahi-core-dev libavahi-compat-libdnssd-dev libjpeg-dev libturbojpeg0-dev libssl-dev zlib1g-dev +sudo apt-get install git cmake build-essential qtbase5-dev libqt5serialport5-dev libqt5sql5-sqlite libqt5svg5-dev libqt5x11extras5-dev libusb-1.0-0-dev python3-dev libcec-dev libxcb-image0-dev libxcb-util0-dev libxcb-shm0-dev libxcb-render0-dev libxcb-randr0-dev libxrandr-dev libxrender-dev libavahi-core-dev libavahi-compat-libdnssd-dev libturbojpeg0-dev libssl-dev zlib1g-dev ``` **on RPI you need the videocore IV headers** @@ -83,7 +83,7 @@ See [AUR](https://aur.archlinux.org/packages/?O=0&SeB=nd&K=hyperion&outdated=&SB The following dependencies are needed to build hyperion.ng on fedora. ```console sudo dnf -y groupinstall "Development Tools" -sudo dnf install python3-devel qt-devel qt5-qtbase-devel qt5-qtserialport-devel libjpeg-devel xrandr xcb-util-image-devel qt5-qtx11extras-devel turbojpeg-devel libusb-devel avahi-libs avahi-compat-libdns_sd-devel xcb-util-devel dbus-devel openssl-devel fedora-packager rpmdevtools gcc libcec-devel +sudo dnf install python3-devel qt-devel qt5-qtbase-devel qt5-qtserialport-devel xrandr xcb-util-image-devel qt5-qtx11extras-devel turbojpeg-devel libusb-devel avahi-libs avahi-compat-libdns_sd-devel xcb-util-devel dbus-devel openssl-devel fedora-packager rpmdevtools gcc libcec-devel ``` After installing the dependencies, you can continue with the compile instructions later on this page (the more detailed way..). diff --git a/bin/compile.sh b/bin/compile.sh old mode 100755 new mode 100644 index 7843d2b9..f8820dae --- a/bin/compile.sh +++ b/bin/compile.sh @@ -25,7 +25,6 @@ sudo apt-get install \ libavahi-core-dev \ libavahi-compat-libdnssd-dev \ libssl-dev \ - libjpeg-dev \ libqt5sql5-sqlite \ libqt5svg5-dev \ zlib1g-dev \ diff --git a/include/grabber/MFThread.h b/include/grabber/EncoderThread.h similarity index 65% rename from include/grabber/MFThread.h rename to include/grabber/EncoderThread.h index 108d328c..be88dd82 100644 --- a/include/grabber/MFThread.h +++ b/include/grabber/EncoderThread.h @@ -7,25 +7,26 @@ #include #include -// TurboJPEG decoder +// Determine the cmake options +#include + +// Turbo JPEG decoder #ifdef HAVE_TURBO_JPEG - #include - #include #include #endif /// Encoder thread for USB devices -class MFThread : public QObject +class EncoderThread : public QObject { Q_OBJECT public: - explicit MFThread(); - ~MFThread(); + explicit EncoderThread(); + ~EncoderThread(); void setup( PixelFormat pixelFormat, uint8_t* sharedData, int size, int width, int height, int lineLength, - int subsamp, unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight, + unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight, VideoMode videoMode, FlipMode flipMode, int pixelDecimation); void process(); @@ -37,15 +38,6 @@ signals: void newFrame(const Image& data); private: - void processImageMjpeg(); - -#ifdef HAVE_TURBO_JPEG - tjhandle _transform, - _decompress; - tjscalingfactor* _scalingFactors; - tjtransform* _xform; -#endif - PixelFormat _pixelFormat; uint8_t* _localData, *_flipBuffer; @@ -53,7 +45,6 @@ private: _width, _height, _lineLength, - _subsamp, _currentFrame, _pixelDecimation; unsigned long _size; @@ -63,6 +54,14 @@ private: _cropRight; FlipMode _flipMode; ImageResampler _imageResampler; + +#ifdef HAVE_TURBO_JPEG + tjhandle _transform, _decompress; + tjscalingfactor* _scalingFactors; + tjtransform* _xform; + + void processImageMjpeg(); +#endif }; template class Thread : public QThread @@ -83,36 +82,36 @@ public: wait(); } - MFThread* thread() const { return qobject_cast(_thread); } + EncoderThread* thread() const { return qobject_cast(_thread); } void setup( PixelFormat pixelFormat, uint8_t* sharedData, int size, int width, int height, int lineLength, - int subsamp, unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight, + unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight, VideoMode videoMode, FlipMode flipMode, int pixelDecimation) { - auto mfthread = qobject_cast(_thread); - if (mfthread != nullptr) - mfthread->setup(pixelFormat, sharedData, + auto encThread = qobject_cast(_thread); + if (encThread != nullptr) + encThread->setup(pixelFormat, sharedData, size, width, height, lineLength, - subsamp, cropLeft, cropTop, cropBottom, cropRight, + cropLeft, cropTop, cropBottom, cropRight, videoMode, flipMode, pixelDecimation); } bool isBusy() { - auto mfthread = qobject_cast(_thread); - if (mfthread != nullptr) - return mfthread->isBusy(); + auto encThread = qobject_cast(_thread); + if (encThread != nullptr) + return encThread->isBusy(); return true; } void process() { - auto mfthread = qobject_cast(_thread); - if (mfthread != nullptr) - mfthread->process(); + auto encThread = qobject_cast(_thread); + if (encThread != nullptr) + encThread->process(); } protected: @@ -123,24 +122,24 @@ protected: } }; -class MFThreadManager : public QObject +class EncoderThreadManager : public QObject { Q_OBJECT public: - explicit MFThreadManager(QObject *parent = nullptr) + explicit EncoderThreadManager(QObject *parent = nullptr) : QObject(parent) , _threadCount(qMax(QThread::idealThreadCount(), 1)) , _threads(nullptr) { - _threads = new Thread*[_threadCount]; + _threads = new Thread*[_threadCount]; for (int i = 0; i < _threadCount; i++) { - _threads[i] = new Thread(new MFThread, this); - _threads[i]->setObjectName("MFThread " + i); + _threads[i] = new Thread(new EncoderThread, this); + _threads[i]->setObjectName("Encoder " + i); } } - ~MFThreadManager() + ~EncoderThreadManager() { if (_threads != nullptr) { @@ -159,7 +158,7 @@ public: { if (_threads != nullptr) for (int i = 0; i < _threadCount; i++) - connect(_threads[i]->thread(), &MFThread::newFrame, this, &MFThreadManager::newFrame); + connect(_threads[i]->thread(), &EncoderThread::newFrame, this, &EncoderThreadManager::newFrame); } void stop() @@ -170,7 +169,7 @@ public: } int _threadCount; - Thread** _threads; + Thread** _threads; signals: void newFrame(const Image& data); diff --git a/include/grabber/MFGrabber.h b/include/grabber/MFGrabber.h index ebdedafc..68dc2851 100644 --- a/include/grabber/MFGrabber.h +++ b/include/grabber/MFGrabber.h @@ -21,12 +21,7 @@ #include // decoder thread includes -#include - -// TurboJPEG decoder -#ifdef HAVE_TURBO_JPEG - #include -#endif +#include /// Forward class declaration class SourceReaderCB; @@ -99,7 +94,7 @@ private: HRESULT _hr; IMFSourceReader* _sourceReader; SourceReaderCB* _sourceReaderCB; - MFThreadManager* _threadManager; + EncoderThreadManager* _threadManager; PixelFormat _pixelFormat, _pixelFormatConfig; int _lineLength, @@ -120,8 +115,4 @@ private: _y_frac_min, _x_frac_max, _y_frac_max; - -#ifdef HAVE_TURBO_JPEG - int _subsamp; -#endif }; diff --git a/include/grabber/V4L2Grabber.h b/include/grabber/V4L2Grabber.h index 56d310b8..cd05e801 100644 --- a/include/grabber/V4L2Grabber.h +++ b/include/grabber/V4L2Grabber.h @@ -17,29 +17,16 @@ #include #include -#include // Required to determine the cmake options +// decoder thread includes +#include + +// Determine the cmake options +#include #if defined(ENABLE_CEC) #include #endif -// general JPEG decoder includes -#ifdef HAVE_JPEG_DECODER - #include - #include -#endif - -// System JPEG decoder -#ifdef HAVE_JPEG - #include - #include -#endif - -// TurboJPEG decoder -#ifdef HAVE_TURBO_JPEG - #include -#endif - /// /// Capture class for V4L2 devices /// @@ -71,7 +58,7 @@ public: ~V4L2Grabber() override; int grabFrame(Image &); - void setDevice(const QString& device); + void setDevice(const QString& devicePath, const QString& deviceName); bool setInput(int input) override; bool setWidthHeight(int width, int height) override; void setEncoding(QString enc); @@ -94,9 +81,10 @@ public: QJsonArray discover(const QJsonObject& params); public slots: - bool prepare() { return true; } + bool prepare(); bool start(); void stop(); + void newThreadFrame(Image image); #if defined(ENABLE_CEC) void handleCecEvent(CECEvent event); @@ -110,7 +98,6 @@ private slots: int read_frame(); private: - void enumVideoCaptureDevices(); bool init(); void uninit(); bool open_device(); @@ -123,7 +110,6 @@ private: void start_capturing(); void stop_capturing(); bool process_image(const void *p, int size); - void process_image(const uint8_t *p, int size); int xioctl(int request, void *arg); int xioctl(int fileDescriptor, int request, void *arg); @@ -151,35 +137,9 @@ private: size_t length; }; -#ifdef HAVE_JPEG - struct errorManager - { - jpeg_error_mgr pub; - jmp_buf setjmp_buffer; - }; - - static void errorHandler(j_common_ptr cInfo) - { - errorManager* mgr = reinterpret_cast(cInfo->err); - longjmp(mgr->setjmp_buffer, 1); - } - - static void outputHandler(j_common_ptr cInfo) - { - // Suppress fprintf warnings. - } - - jpeg_decompress_struct* _decompress; - errorManager* _error; -#endif - -#ifdef HAVE_TURBO_JPEG - tjhandle _decompress = nullptr; - int _subsamp; -#endif - private: - QString _currentDeviceName, _newDeviceName; + QString _currentDevicePath, _currentDeviceName; + EncoderThreadManager* _threadManager; QMap _deviceProperties; io_method _ioMethod; @@ -187,10 +147,11 @@ private: std::vector _buffers; PixelFormat _pixelFormat, _pixelFormatConfig; - int _pixelDecimation; int _lineLength; int _frameByteSize; + QAtomicInt _currentFrame; + // signal detection int _noSignalCounterThreshold; ColorRgb _noSignalThresholdColor; @@ -206,5 +167,6 @@ private: bool _initialized, _reload; protected: + void enumVideoCaptureDevices(); void enumFrameIntervals(QList &framerates, int fileDescriptor, int pixelformat, int width, int height); }; diff --git a/include/hyperion/GrabberWrapper.h b/include/hyperion/GrabberWrapper.h index 9abe5daf..81679cc3 100644 --- a/include/hyperion/GrabberWrapper.h +++ b/include/hyperion/GrabberWrapper.h @@ -67,6 +67,11 @@ public: /// virtual QStringList getActive(int inst) const; + virtual bool getSysGrabberState(){ return GLOBAL_GRABBER_SYS_ENABLE; } + virtual void setSysGrabberState(bool sysGrabberState){ GLOBAL_GRABBER_SYS_ENABLE = sysGrabberState; } + virtual bool getV4lGrabberState(){ return GLOBAL_GRABBER_V4L_ENABLE; } + virtual void setV4lGrabberState(bool v4lGrabberState){ GLOBAL_GRABBER_V4L_ENABLE = v4lGrabberState; } + static QStringList availableGrabbers(); public: diff --git a/include/utils/PixelFormat.h b/include/utils/PixelFormat.h index c77836a0..b639faf5 100644 --- a/include/utils/PixelFormat.h +++ b/include/utils/PixelFormat.h @@ -14,7 +14,7 @@ enum class PixelFormat { BGR32, NV12, I420, -#ifdef HAVE_JPEG_DECODER +#ifdef HAVE_TURBO_JPEG MJPEG, #endif NO_CHANGE @@ -57,7 +57,7 @@ inline PixelFormat parsePixelFormat(const QString& pixelFormat) { return PixelFormat::NV12; } -#ifdef HAVE_JPEG_DECODER +#ifdef HAVE_TURBO_JPEG else if (format.compare("mjpeg") == 0) { return PixelFormat::MJPEG; @@ -103,7 +103,7 @@ inline QString pixelFormatToString(const PixelFormat& pixelFormat) { return "NV12"; } -#ifdef HAVE_JPEG_DECODER +#ifdef HAVE_TURBO_JPEG else if (pixelFormat == PixelFormat::MJPEG) { return "MJPEG"; diff --git a/libsrc/grabber/video/CMakeLists.txt b/libsrc/grabber/video/CMakeLists.txt index 0343584e..43a0e580 100644 --- a/libsrc/grabber/video/CMakeLists.txt +++ b/libsrc/grabber/video/CMakeLists.txt @@ -1,16 +1,28 @@ # Common cmake definition for external video grabber +# Add Turbo JPEG library +if (ENABLE_V4L2 OR ENABLE_MF) + find_package(TurboJPEG) + if (TURBOJPEG_FOUND) + add_definitions(-DHAVE_TURBO_JPEG) + message( STATUS "Using Turbo JPEG library: ${TurboJPEG_LIBRARY}") + include_directories(${TurboJPEG_INCLUDE_DIRS}) + else () + message( STATUS "Turbo JPEG library not found, MJPEG camera format won't work.") + endif () +endif() + # Define the wrapper/header/source locations and collect them SET(WRAPPER_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video) SET(HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber) if (ENABLE_MF) project(mf-grabber) SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/mediafoundation) - FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/MF*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp") + FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/MF*.h" "${HEADER_DIR}/Encoder*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp") elseif(ENABLE_V4L2) project(v4l2-grabber) SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/v4l2) - FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/V4L2*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp") + FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/V4L2*.h" "${HEADER_DIR}/Encoder*.h" "${CURRENT_SOURCE_DIR}/*.cpp") endif() add_library(${PROJECT_NAME} ${SOURCES}) @@ -18,6 +30,4 @@ target_link_libraries(${PROJECT_NAME} hyperion ${QT_LIBRARIES}) if(TURBOJPEG_FOUND) target_link_libraries(${PROJECT_NAME} ${TurboJPEG_LIBRARY}) -elseif (JPEG_FOUND) - target_link_libraries(${PROJECT_NAME} ${JPEG_LIBRARY}) endif() diff --git a/libsrc/grabber/video/mediafoundation/MFThread.cpp b/libsrc/grabber/video/EncoderThread.cpp similarity index 84% rename from libsrc/grabber/video/mediafoundation/MFThread.cpp rename to libsrc/grabber/video/EncoderThread.cpp index 24785935..ff2ec0b4 100644 --- a/libsrc/grabber/video/mediafoundation/MFThread.cpp +++ b/libsrc/grabber/video/EncoderThread.cpp @@ -1,31 +1,39 @@ -#include "grabber/MFThread.h" +#include "grabber/EncoderThread.h" -MFThread::MFThread() +EncoderThread::EncoderThread() : _localData(nullptr) , _scalingFactorsCount(0) - , _scalingFactors(nullptr) + , _imageResampler() +#ifdef HAVE_TURBO_JPEG , _transform(nullptr) , _decompress(nullptr) + , _scalingFactors(nullptr) , _xform(nullptr) - , _imageResampler() +#endif {} -MFThread::~MFThread() +EncoderThread::~EncoderThread() { +#ifdef HAVE_TURBO_JPEG if (_transform) tjDestroy(_transform); if (_decompress) tjDestroy(_decompress); +#endif if (_localData) +#ifdef HAVE_TURBO_JPEG tjFree(_localData); +#else + delete[] _localData; +#endif } -void MFThread::setup( +void EncoderThread::setup( PixelFormat pixelFormat, uint8_t* sharedData, int size, int width, int height, int lineLength, - int subsamp, unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight, + unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight, VideoMode videoMode, FlipMode flipMode, int pixelDecimation) { _lineLength = lineLength; @@ -33,7 +41,6 @@ void MFThread::setup( _size = (unsigned long) size; _width = width; _height = height; - _subsamp = subsamp; _cropLeft = cropLeft; _cropTop = cropTop; _cropBottom = cropBottom; @@ -47,23 +54,32 @@ void MFThread::setup( _imageResampler.setHorizontalPixelDecimation(_pixelDecimation); _imageResampler.setVerticalPixelDecimation(_pixelDecimation); +#ifdef HAVE_TURBO_JPEG if (_localData) tjFree(_localData); _localData = (uint8_t*)tjAlloc(size + 1); +#else + delete[] _localData; + _localData = nullptr; + _localData = new uint8_t(size + 1); +#endif + memcpy(_localData, sharedData, size); } -void MFThread::process() +void EncoderThread::process() { _busy = true; if (_width > 0 && _height > 0) { +#ifdef HAVE_TURBO_JPEG if (_pixelFormat == PixelFormat::MJPEG) { processImageMjpeg(); } else +#endif { if (_pixelFormat == PixelFormat::BGR24) { @@ -78,14 +94,27 @@ void MFThread::process() } Image image = Image(); - _imageResampler.processImage(_localData, _width, _height, _lineLength, PixelFormat::BGR24, image); + _imageResampler.processImage( + _localData, + _width, + _height, + _lineLength, +#if defined(ENABLE_V4L2) + _pixelFormat, +#else + PixelFormat::BGR24, +#endif + image + ); + emit newFrame(image); } } _busy = false; } -void MFThread::processImageMjpeg() +#ifdef HAVE_TURBO_JPEG +void EncoderThread::processImageMjpeg() { if (!_transform && _flipMode != FlipMode::NO_CHANGE) { @@ -111,7 +140,8 @@ void MFThread::processImageMjpeg() _scalingFactors = tjGetScalingFactors(&_scalingFactorsCount); } - if (tjDecompressHeader2(_decompress, _localData, _size, &_width, &_height, &_subsamp) != 0) + int subsamp = 0; + if (tjDecompressHeader2(_decompress, _localData, _size, &_width, &_height, &subsamp) != 0) { if (tjGetErrorCode(_decompress) == TJERR_FATAL) return; @@ -176,3 +206,4 @@ void MFThread::processImageMjpeg() emit newFrame(destImage); } } +#endif diff --git a/libsrc/grabber/video/VideoWrapper.cpp b/libsrc/grabber/video/VideoWrapper.cpp index 26814b57..af473d7b 100644 --- a/libsrc/grabber/video/VideoWrapper.cpp +++ b/libsrc/grabber/video/VideoWrapper.cpp @@ -2,7 +2,7 @@ #include -// qt +// qt includes #include VideoWrapper::VideoWrapper() @@ -53,13 +53,20 @@ void VideoWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument // extract settings const QJsonObject& obj = config.object(); - // global grabber state - GLOBAL_GRABBER_V4L_ENABLE = obj["enable"].toBool(false); + // set global grabber state + setV4lGrabberState(obj["enable"].toBool(false)); - if (GLOBAL_GRABBER_V4L_ENABLE) + if (getV4lGrabberState()) { - // Device - _grabber.setDevice(obj["device"].toString("auto")); +#if defined(ENABLE_MF) + // Device path + _grabber.setDevice(obj["device"].toString("none")); +#endif + +#if defined(ENABLE_V4L2) + // Device path and name + _grabber.setDevice(obj["device"].toString("none"), obj["available_devices"].toString("none")); +#endif // Device input _grabber.setInput(obj["input"].toInt(0)); @@ -118,8 +125,10 @@ void VideoWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument obj["noSignalCounterThreshold"].toInt(50)); // Reload the Grabber if any settings have been changed that require it - _grabber.reload(); + _grabber.reload(getV4lGrabberState()); } + else + stop(); } } diff --git a/libsrc/grabber/video/mediafoundation/MFGrabber.cpp b/libsrc/grabber/video/mediafoundation/MFGrabber.cpp index b0e710ff..7df2a83e 100644 --- a/libsrc/grabber/video/mediafoundation/MFGrabber.cpp +++ b/libsrc/grabber/video/mediafoundation/MFGrabber.cpp @@ -62,7 +62,7 @@ bool MFGrabber::prepare() _sourceReaderCB = new SourceReaderCB(this); if (!_threadManager) - _threadManager = new MFThreadManager(this); + _threadManager = new EncoderThreadManager(this); return (_sourceReaderCB != nullptr && _threadManager != nullptr); } @@ -76,7 +76,7 @@ bool MFGrabber::start() { if (init()) { - connect(_threadManager, &MFThreadManager::newFrame, this, &MFGrabber::newThreadFrame); + connect(_threadManager, &EncoderThreadManager::newFrame, this, &MFGrabber::newThreadFrame); _threadManager->start(); DebugIf(verbose, _log, "Decoding threads: %d", _threadManager->_threadCount); @@ -518,7 +518,7 @@ void MFGrabber::process_image(const void *frameImageBuffer, int size) { if (!_threadManager->_threads[i]->isBusy()) { - _threadManager->_threads[i]->setup(_pixelFormat, (uint8_t*)frameImageBuffer, size, _width, _height, _lineLength, _subsamp, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, _flipMode, _pixelDecimation); + _threadManager->_threads[i]->setup(_pixelFormat, (uint8_t*)frameImageBuffer, size, _width, _height, _lineLength, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, _flipMode, _pixelDecimation); _threadManager->_threads[i]->process(); break; } diff --git a/libsrc/grabber/video/v4l2/V4L2Grabber.cpp b/libsrc/grabber/video/v4l2/V4L2Grabber.cpp index cfedd009..4bcfdaa1 100644 --- a/libsrc/grabber/video/v4l2/V4L2Grabber.cpp +++ b/libsrc/grabber/video/v4l2/V4L2Grabber.cpp @@ -21,6 +21,7 @@ #include #include +#include #include "grabber/V4L2Grabber.h" @@ -41,7 +42,7 @@ static PixelFormat GetPixelFormat(const unsigned int format) if (format == V4L2_PIX_FMT_UYVY) return PixelFormat::UYVY; if (format == V4L2_PIX_FMT_NV12) return PixelFormat::NV12; if (format == V4L2_PIX_FMT_YUV420) return PixelFormat::I420; -#ifdef HAVE_JPEG_DECODER +#ifdef HAVE_TURBO_JPEG if (format == V4L2_PIX_FMT_MJPEG) return PixelFormat::MJPEG; #endif return PixelFormat::NO_CHANGE; @@ -49,14 +50,16 @@ static PixelFormat GetPixelFormat(const unsigned int format) V4L2Grabber::V4L2Grabber() : Grabber("V4L2") + , _currentDevicePath("none") , _currentDeviceName("none") - , _newDeviceName("none") + , _threadManager(nullptr) , _ioMethod(IO_METHOD_MMAP) , _fileDescriptor(-1) , _pixelFormat(PixelFormat::NO_CHANGE) , _pixelFormatConfig(PixelFormat::NO_CHANGE) , _lineLength(-1) , _frameByteSize(-1) + , _currentFrame(0) , _noSignalCounterThreshold(40) , _noSignalThresholdColor(ColorRgb{0,0,0}) , _cecDetectionEnabled(true) @@ -77,6 +80,18 @@ V4L2Grabber::V4L2Grabber() V4L2Grabber::~V4L2Grabber() { uninit(); + + if (_threadManager) + delete _threadManager; + _threadManager = nullptr; +} + +bool V4L2Grabber::prepare() +{ + if (!_threadManager) + _threadManager = new EncoderThreadManager(this); + + return (_threadManager != nullptr); } void V4L2Grabber::uninit() @@ -84,7 +99,7 @@ void V4L2Grabber::uninit() // stop if the grabber was not stopped if (_initialized) { - Debug(_log,"Uninit grabber: %s", QSTRING_CSTR(_newDeviceName)); + Debug(_log,"Uninit grabber: %s (%s)", QSTRING_CSTR(_currentDeviceName), QSTRING_CSTR(_currentDevicePath)); stop(); } } @@ -93,39 +108,53 @@ bool V4L2Grabber::init() { if (!_initialized) { - bool noDeviceName = _currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName.compare("auto", Qt::CaseInsensitive) == 0; + bool noDevicePath = _currentDevicePath.compare("none", Qt::CaseInsensitive) == 0 || _currentDevicePath.compare("auto", Qt::CaseInsensitive) == 0; // enumerate the video capture devices on the user's system enumVideoCaptureDevices(); - if(noDeviceName) + if(noDevicePath) return false; - if(!_deviceProperties.contains(_currentDeviceName)) + if(!_deviceProperties.contains(_currentDevicePath)) { - Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDeviceName)); - _currentDeviceName = "none"; + Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDevicePath)); + _currentDevicePath = "none"; return false; } - - bool valid = false; - for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i) - if (i.key() == _currentDeviceName && valid == false) - for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++) - if (y.key() == _input && valid == false) - for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++) - if(enc.key() == _pixelFormat && enc.value().width == _width && enc.value().height == _height && valid == false) - for (auto fps = enc.value().framerates.begin(); fps != enc.value().framerates.end(); fps++) - if(*fps == _fps && valid == false) - valid = true; - - if (!valid) + else { - Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDeviceName)); - _currentDeviceName = "none"; - return false; + if (HyperionIManager::getInstance()) + if (_currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName != _deviceProperties.value(_currentDevicePath).name) + return false; + + Debug(_log, "Set device (path) to: %s (%s)", QSTRING_CSTR(_deviceProperties.value(_currentDevicePath).name), QSTRING_CSTR(_currentDevicePath)); } + // correct invalid parameters + QMap::const_iterator inputIterator = _deviceProperties.value(_currentDevicePath).inputs.find(_input); + if (inputIterator == _deviceProperties.value(_currentDevicePath).inputs.end()) + setInput(_deviceProperties.value(_currentDevicePath).inputs.firstKey()); + + QMultiMap::const_iterator encodingIterator = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.find(_pixelFormat); + if (encodingIterator == _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.end()) + setEncoding(pixelFormatToString(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.firstKey())); + + bool validDimensions = false; + for (auto enc = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.begin(); enc != _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.end(); enc++) + if(enc.key() == _pixelFormat && enc.value().width == _width && enc.value().height == _height) + { + validDimensions = true; + break; + } + + if (!validDimensions) + setWidthHeight(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.first().width, _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.first().height); + + QList availableframerates = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.value(_pixelFormat).framerates; + if (!availableframerates.isEmpty() && !availableframerates.contains(_fps)) + setFramerate(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.value(_pixelFormat).framerates.first()); + bool opened = false; try { @@ -151,163 +180,16 @@ bool V4L2Grabber::init() return _initialized; } -void V4L2Grabber::enumVideoCaptureDevices() -{ - QDirIterator it("/sys/class/video4linux/", QDirIterator::NoIteratorFlags); - _deviceProperties.clear(); - while(it.hasNext()) - { - //_v4lDevices - QString dev = it.next(); - if (it.fileName().startsWith("video")) - { - QString devName = "/dev/" + it.fileName(); - int fd = open(QSTRING_CSTR(devName), O_RDWR | O_NONBLOCK, 0); - - if (fd < 0) - { - throw_errno_exception("Cannot open '" + devName + "'"); - continue; - } - - struct v4l2_capability cap; - CLEAR(cap); - - if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) - { - throw_errno_exception("'" + devName + "' is no V4L2 device"); - close(fd); - continue; - } - - if (cap.device_caps & V4L2_CAP_META_CAPTURE) // this device has bit 23 set (and bit 1 reset), so it doesn't have capture. - { - close(fd); - continue; - } - - // get the current settings - struct v4l2_format fmt; - CLEAR(fmt); - - fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0) - { - close(fd); - continue; - } - - V4L2Grabber::DeviceProperties properties; - - // collect available device inputs (index & name) - struct v4l2_input input; - CLEAR(input); - - input.index = 0; - while (xioctl(fd, VIDIOC_ENUMINPUT, &input) >= 0) - { - V4L2Grabber::DeviceProperties::InputProperties inputProperties; - inputProperties.inputName = QString((char*)input.name); - - // Enumerate video standards - struct v4l2_standard standard; - CLEAR(standard); - - standard.index = 0; - while (xioctl(fd, VIDIOC_ENUMSTD, &standard) >= 0) - { - if (standard.id & input.std) - { - if (standard.id == V4L2_STD_PAL) - inputProperties.standards.append(VideoStandard::PAL); - else if (standard.id == V4L2_STD_NTSC) - inputProperties.standards.append(VideoStandard::NTSC); - else if (standard.id == V4L2_STD_SECAM) - inputProperties.standards.append(VideoStandard::SECAM); - } - - standard.index++; - } - - // Enumerate pixel formats - struct v4l2_fmtdesc desc; - CLEAR(desc); - - desc.index = 0; - desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - while (xioctl(fd, VIDIOC_ENUM_FMT, &desc) == 0) - { - PixelFormat encodingFormat = GetPixelFormat(desc.pixelformat); - if (encodingFormat != PixelFormat::NO_CHANGE) - { - V4L2Grabber::DeviceProperties::InputProperties::EncodingProperties encodingProperties; - - // Enumerate frame sizes and frame rates - struct v4l2_frmsizeenum frmsizeenum; - CLEAR(frmsizeenum); - - frmsizeenum.index = 0; - frmsizeenum.pixel_format = desc.pixelformat; - while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) >= 0) - { - switch (frmsizeenum.type) - { - case V4L2_FRMSIZE_TYPE_DISCRETE: - { - encodingProperties.width = frmsizeenum.discrete.width; - encodingProperties.height = frmsizeenum.discrete.height; - enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, frmsizeenum.discrete.width, frmsizeenum.discrete.height); - } - break; - case V4L2_FRMSIZE_TYPE_CONTINUOUS: - case V4L2_FRMSIZE_TYPE_STEPWISE: // We do not take care of V4L2_FRMSIZE_TYPE_CONTINUOUS or V4L2_FRMSIZE_TYPE_STEPWISE - break; - } - - inputProperties.encodingFormats.insert(encodingFormat, encodingProperties); - frmsizeenum.index++; - } - - // Failsafe: In case VIDIOC_ENUM_FRAMESIZES fails, insert current heigth, width and fps. - if (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == -1) - { - encodingProperties.width = fmt.fmt.pix.width; - encodingProperties.height = fmt.fmt.pix.height; - enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, encodingProperties.width, encodingProperties.height); - inputProperties.encodingFormats.insert(encodingFormat, encodingProperties); - } - } - - desc.index++; - } - - properties.inputs.insert(input.index, inputProperties); - input.index++; - } - - if (close(fd) < 0) continue; - - QFile devNameFile(dev+"/name"); - if (devNameFile.exists()) - { - devNameFile.open(QFile::ReadOnly); - devName = devNameFile.readLine(); - devName = devName.trimmed(); - properties.name = devName; - devNameFile.close(); - } - - _deviceProperties.insert("/dev/"+it.fileName(), properties); - } - } -} - bool V4L2Grabber::start() { try { if (init() && _streamNotifier != nullptr && !_streamNotifier->isEnabled()) { + connect(_threadManager, &EncoderThreadManager::newFrame, this, &V4L2Grabber::newThreadFrame); + _threadManager->start(); + DebugIf(verbose, _log, "Decoding threads: %d", _threadManager->_threadCount); + _streamNotifier->setEnabled(true); start_capturing(); Info(_log, "Started"); @@ -326,6 +208,8 @@ void V4L2Grabber::stop() { if (_streamNotifier != nullptr && _streamNotifier->isEnabled()) { + _threadManager->stop(); + disconnect(_threadManager, nullptr, nullptr, nullptr); stop_capturing(); _streamNotifier->setEnabled(false); uninit_device(); @@ -340,23 +224,23 @@ bool V4L2Grabber::open_device() { struct stat st; - if (-1 == stat(QSTRING_CSTR(_currentDeviceName), &st)) + if (-1 == stat(QSTRING_CSTR(_currentDevicePath), &st)) { - throw_errno_exception("Cannot identify '" + _currentDeviceName + "'"); + throw_errno_exception("Cannot identify '" + _currentDevicePath + "'"); return false; } if (!S_ISCHR(st.st_mode)) { - throw_exception("'" + _currentDeviceName + "' is no device"); + throw_exception("'" + _currentDevicePath + "' is no device"); return false; } - _fileDescriptor = open(QSTRING_CSTR(_currentDeviceName), O_RDWR | O_NONBLOCK, 0); + _fileDescriptor = open(QSTRING_CSTR(_currentDevicePath), O_RDWR | O_NONBLOCK, 0); if (-1 == _fileDescriptor) { - throw_errno_exception("Cannot open '" + _currentDeviceName + "'"); + throw_errno_exception("Cannot open '" + _currentDevicePath + "'"); return false; } @@ -409,7 +293,7 @@ void V4L2Grabber::init_mmap() { if (EINVAL == errno) { - throw_exception("'" + _currentDeviceName + "' does not support memory mapping"); + throw_exception("'" + _currentDevicePath + "' does not support memory mapping"); return; } else @@ -421,7 +305,7 @@ void V4L2Grabber::init_mmap() if (req.count < 2) { - throw_exception("Insufficient buffer memory on " + _currentDeviceName); + throw_exception("Insufficient buffer memory on " + _currentDevicePath); return; } @@ -473,7 +357,7 @@ void V4L2Grabber::init_userp(unsigned int buffer_size) { if (EINVAL == errno) { - throw_exception("'" + _currentDeviceName + "' does not support user pointer"); + throw_exception("'" + _currentDevicePath + "' does not support user pointer"); return; } else @@ -507,7 +391,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard) { if (EINVAL == errno) { - throw_exception("'" + _currentDeviceName + "' is no V4L2 device"); + throw_exception("'" + _currentDevicePath + "' is no V4L2 device"); return; } else @@ -519,7 +403,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard) if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { - throw_exception("'" + _currentDeviceName + "' is no video capture device"); + throw_exception("'" + _currentDevicePath + "' is no video capture device"); return; } @@ -529,7 +413,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard) { if (!(cap.capabilities & V4L2_CAP_READWRITE)) { - throw_exception("'" + _currentDeviceName + "' does not support read i/o"); + throw_exception("'" + _currentDevicePath + "' does not support read i/o"); return; } } @@ -540,14 +424,13 @@ void V4L2Grabber::init_device(VideoStandard videoStandard) { if (!(cap.capabilities & V4L2_CAP_STREAMING)) { - throw_exception("'" + _currentDeviceName + "' does not support streaming i/o"); + throw_exception("'" + _currentDevicePath + "' does not support streaming i/o"); return; } } break; } - /* Select video input, video standard and tune here. */ struct v4l2_cropcap cropcap; @@ -571,10 +454,6 @@ void V4L2Grabber::init_device(VideoStandard videoStandard) } } } - else - { - /* Errors ignored. */ - } // set input if needed and supported struct v4l2_input v4l2Input; @@ -677,7 +556,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard) fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; break; -#ifdef HAVE_JPEG_DECODER +#ifdef HAVE_TURBO_JPEG case PixelFormat::MJPEG: { fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; @@ -787,7 +666,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard) } break; -#ifdef HAVE_JPEG_DECODER +#ifdef HAVE_TURBO_JPEG case V4L2_PIX_FMT_MJPEG: { _pixelFormat = PixelFormat::MJPEG; @@ -797,7 +676,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard) #endif default: -#ifdef HAVE_JPEG_DECODER +#ifdef HAVE_TURBO_JPEG throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12, I420 and MJPEG are supported"); #else throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12 and I420 are supported"); @@ -1036,7 +915,7 @@ int V4L2Grabber::read_frame() rc = process_image((void *)buf.m.userptr, buf.bytesused); - if (-1 == xioctl(VIDIOC_QBUF, &buf)) + if (!rc && -1 == xioctl(VIDIOC_QBUF, &buf)) { throw_errno_exception("VIDIOC_QBUF"); return 0; @@ -1056,8 +935,13 @@ int V4L2Grabber::read_frame() bool V4L2Grabber::process_image(const void *p, int size) { - // We do want a new frame... -#ifdef HAVE_JPEG_DECODER + int processFrameIndex = _currentFrame++, result = false; + + // frame skipping + if ((processFrameIndex % (_fpsSoftwareDecimation + 1) != 0) && (_fpsSoftwareDecimation > 0)) + return result; + +#ifdef HAVE_TURBO_JPEG if (size < _frameByteSize && _pixelFormat != PixelFormat::MJPEG) #else if (size < _frameByteSize) @@ -1065,151 +949,28 @@ bool V4L2Grabber::process_image(const void *p, int size) { Error(_log, "Frame too small: %d != %d", size, _frameByteSize); } - else + else if (_threadManager != nullptr) { - process_image(reinterpret_cast(p), size); - return true; + for (int i = 0; i < _threadManager->_threadCount; i++) + { + if (!_threadManager->_threads[i]->isBusy()) + { + _threadManager->_threads[i]->setup(_pixelFormat, (uint8_t*)p, size, _width, _height, _lineLength, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, _flipMode, _pixelDecimation); + _threadManager->_threads[i]->process(); + result = true; + break; + } + } } - return false; + return result; } -void V4L2Grabber::process_image(const uint8_t * data, int size) +void V4L2Grabber::newThreadFrame(Image image) { if (_cecDetectionEnabled && _cecStandbyActivated) return; - Image image(_width, _height); - -/* ---------------------------------------------------------- - * ----------- BEGIN of JPEG decoder related code ----------- - * --------------------------------------------------------*/ - -#ifdef HAVE_JPEG_DECODER - if (_pixelFormat == PixelFormat::MJPEG) - { -#endif -#ifdef HAVE_JPEG - _decompress = new jpeg_decompress_struct; - _error = new errorManager; - - _decompress->err = jpeg_std_error(&_error->pub); - _error->pub.error_exit = &errorHandler; - _error->pub.output_message = &outputHandler; - - jpeg_create_decompress(_decompress); - - if (setjmp(_error->setjmp_buffer)) - { - jpeg_abort_decompress(_decompress); - jpeg_destroy_decompress(_decompress); - delete _decompress; - delete _error; - return; - } - - jpeg_mem_src(_decompress, const_cast(data), size); - - if (jpeg_read_header(_decompress, (bool) TRUE) != JPEG_HEADER_OK) - { - jpeg_abort_decompress(_decompress); - jpeg_destroy_decompress(_decompress); - delete _decompress; - delete _error; - return; - } - - _decompress->scale_num = 1; - _decompress->scale_denom = 1; - _decompress->out_color_space = JCS_RGB; - _decompress->dct_method = JDCT_IFAST; - - if (!jpeg_start_decompress(_decompress)) - { - jpeg_abort_decompress(_decompress); - jpeg_destroy_decompress(_decompress); - delete _decompress; - delete _error; - return; - } - - if (_decompress->out_color_components != 3) - { - jpeg_abort_decompress(_decompress); - jpeg_destroy_decompress(_decompress); - delete _decompress; - delete _error; - return; - } - - QImage imageFrame = QImage(_decompress->output_width, _decompress->output_height, QImage::Format_RGB888); - - int y = 0; - while (_decompress->output_scanline < _decompress->output_height) - { - uchar *row = imageFrame.scanLine(_decompress->output_scanline); - jpeg_read_scanlines(_decompress, &row, 1); - y++; - } - - jpeg_finish_decompress(_decompress); - jpeg_destroy_decompress(_decompress); - delete _decompress; - delete _error; - - if (imageFrame.isNull() || _error->pub.num_warnings > 0) - return; -#endif -#ifdef HAVE_TURBO_JPEG - _decompress = tjInitDecompress(); - if (_decompress == nullptr) - return; - - if (tjDecompressHeader2(_decompress, const_cast(data), size, &_width, &_height, &_subsamp) != 0) - { - tjDestroy(_decompress); - return; - } - - QImage imageFrame = QImage(_width, _height, QImage::Format_RGB888); - if (tjDecompress2(_decompress, const_cast(data), size, imageFrame.bits(), _width, 0, _height, TJPF_RGB, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE) != 0) - { - tjDestroy(_decompress); - return; - } - - tjDestroy(_decompress); - - if (imageFrame.isNull()) - return; -#endif -#ifdef HAVE_JPEG_DECODER - QRect rect(_cropLeft, _cropTop, imageFrame.width() - _cropLeft - _cropRight, imageFrame.height() - _cropTop - _cropBottom); - imageFrame = imageFrame.copy(rect); - imageFrame = imageFrame.scaled(imageFrame.width() / _pixelDecimation, imageFrame.height() / _pixelDecimation,Qt::KeepAspectRatio); - - if ((image.width() != unsigned(imageFrame.width())) || (image.height() != unsigned(imageFrame.height()))) - image.resize(imageFrame.width(), imageFrame.height()); - - for (int y=0; y= _noSignalCounterThreshold) @@ -1258,9 +1012,7 @@ void V4L2Grabber::process_image(const uint8_t * data, int size) } } else - { emit newFrame(image); - } } int V4L2Grabber::xioctl(int request, void *arg) @@ -1289,14 +1041,12 @@ int V4L2Grabber::xioctl(int fileDescriptor, int request, void *arg) return r; } -void V4L2Grabber::setDevice(const QString& device) +void V4L2Grabber::setDevice(const QString& devicePath, const QString& deviceName) { - if (_currentDeviceName != device) + if (_currentDevicePath != devicePath || _currentDeviceName != deviceName) { - (_initialized) - ? _newDeviceName = device - : _currentDeviceName = _newDeviceName = device; - + _currentDevicePath = devicePath; + _currentDeviceName = deviceName; _reload = true; } } @@ -1391,14 +1141,17 @@ void V4L2Grabber::setCecDetectionEnable(bool enable) bool V4L2Grabber::reload(bool force) { - if (_streamNotifier != nullptr && _streamNotifier->isEnabled() && (_reload || force)) + if (_reload || force) { - Info(_log,"Reloading V4L2 Grabber"); - uninit(); - _pixelFormat = _pixelFormatConfig; - _newDeviceName = _currentDeviceName; + if (_streamNotifier != nullptr && _streamNotifier->isEnabled()) + { + Info(_log,"Reloading V4L2 Grabber"); + uninit(); + _pixelFormat = _pixelFormatConfig; + } + _reload = false; - return start(); + return prepare() && start(); } return false; @@ -1537,6 +1290,157 @@ QJsonArray V4L2Grabber::discover(const QJsonObject& params) return inputsDiscovered; } +void V4L2Grabber::enumVideoCaptureDevices() +{ + QDirIterator it("/sys/class/video4linux/", QDirIterator::NoIteratorFlags); + _deviceProperties.clear(); + while(it.hasNext()) + { + //_v4lDevices + QString dev = it.next(); + if (it.fileName().startsWith("video")) + { + QString devName = "/dev/" + it.fileName(); + int fd = open(QSTRING_CSTR(devName), O_RDWR | O_NONBLOCK, 0); + + if (fd < 0) + { + throw_errno_exception("Cannot open '" + devName + "'"); + continue; + } + + struct v4l2_capability cap; + CLEAR(cap); + + if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) + { + throw_errno_exception("'" + devName + "' is no V4L2 device"); + close(fd); + continue; + } + + if (cap.device_caps & V4L2_CAP_META_CAPTURE) // this device has bit 23 set (and bit 1 reset), so it doesn't have capture. + { + close(fd); + continue; + } + + // get the current settings + struct v4l2_format fmt; + CLEAR(fmt); + + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0) + { + close(fd); + continue; + } + + V4L2Grabber::DeviceProperties properties; + + // collect available device inputs (index & name) + struct v4l2_input input; + CLEAR(input); + + input.index = 0; + while (xioctl(fd, VIDIOC_ENUMINPUT, &input) >= 0) + { + V4L2Grabber::DeviceProperties::InputProperties inputProperties; + inputProperties.inputName = QString((char*)input.name); + + // Enumerate video standards + struct v4l2_standard standard; + CLEAR(standard); + + standard.index = 0; + while (xioctl(fd, VIDIOC_ENUMSTD, &standard) >= 0) + { + if (standard.id & input.std) + { + if (standard.id == V4L2_STD_PAL) + inputProperties.standards.append(VideoStandard::PAL); + else if (standard.id == V4L2_STD_NTSC) + inputProperties.standards.append(VideoStandard::NTSC); + else if (standard.id == V4L2_STD_SECAM) + inputProperties.standards.append(VideoStandard::SECAM); + } + + standard.index++; + } + + // Enumerate pixel formats + struct v4l2_fmtdesc desc; + CLEAR(desc); + + desc.index = 0; + desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + while (xioctl(fd, VIDIOC_ENUM_FMT, &desc) == 0) + { + PixelFormat encodingFormat = GetPixelFormat(desc.pixelformat); + if (encodingFormat != PixelFormat::NO_CHANGE) + { + V4L2Grabber::DeviceProperties::InputProperties::EncodingProperties encodingProperties; + + // Enumerate frame sizes and frame rates + struct v4l2_frmsizeenum frmsizeenum; + CLEAR(frmsizeenum); + + frmsizeenum.index = 0; + frmsizeenum.pixel_format = desc.pixelformat; + while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) >= 0) + { + switch (frmsizeenum.type) + { + case V4L2_FRMSIZE_TYPE_DISCRETE: + { + encodingProperties.width = frmsizeenum.discrete.width; + encodingProperties.height = frmsizeenum.discrete.height; + enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, frmsizeenum.discrete.width, frmsizeenum.discrete.height); + } + break; + case V4L2_FRMSIZE_TYPE_CONTINUOUS: + case V4L2_FRMSIZE_TYPE_STEPWISE: // We do not take care of V4L2_FRMSIZE_TYPE_CONTINUOUS or V4L2_FRMSIZE_TYPE_STEPWISE + break; + } + + inputProperties.encodingFormats.insert(encodingFormat, encodingProperties); + frmsizeenum.index++; + } + + // Failsafe: In case VIDIOC_ENUM_FRAMESIZES fails, insert current heigth, width and fps. + if (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == -1) + { + encodingProperties.width = fmt.fmt.pix.width; + encodingProperties.height = fmt.fmt.pix.height; + enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, encodingProperties.width, encodingProperties.height); + inputProperties.encodingFormats.insert(encodingFormat, encodingProperties); + } + } + + desc.index++; + } + + properties.inputs.insert(input.index, inputProperties); + input.index++; + } + + if (close(fd) < 0) continue; + + QFile devNameFile(dev+"/name"); + if (devNameFile.exists()) + { + devNameFile.open(QFile::ReadOnly); + devName = devNameFile.readLine(); + devName = devName.trimmed(); + properties.name = devName; + devNameFile.close(); + } + + _deviceProperties.insert("/dev/"+it.fileName(), properties); + } + } +} + void V4L2Grabber::enumFrameIntervals(QList &framerates, int fileDescriptor, int pixelformat, int width, int height) { // collect available frame rates diff --git a/libsrc/hyperion/GrabberWrapper.cpp b/libsrc/hyperion/GrabberWrapper.cpp index 8e9a912c..3019adaa 100644 --- a/libsrc/hyperion/GrabberWrapper.cpp +++ b/libsrc/hyperion/GrabberWrapper.cpp @@ -170,10 +170,10 @@ void GrabberWrapper::handleSettingsUpdate(settings::type type, const QJsonDocume // extract settings const QJsonObject& obj = config.object(); - // global grabber state - GLOBAL_GRABBER_SYS_ENABLE = obj["enable"].toBool(false); + // set global grabber state + setSysGrabberState(obj["enable"].toBool(false)); - if (GLOBAL_GRABBER_SYS_ENABLE) + if (getSysGrabberState()) { // width/height _ggrabber->setWidthHeight(obj["width"].toInt(96), obj["height"].toInt(96)); @@ -197,6 +197,8 @@ void GrabberWrapper::handleSettingsUpdate(settings::type type, const QJsonDocume // eval new update time updateTimer(1000/obj["fps"].toInt(10)); } + else + stop(); } } @@ -209,7 +211,7 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp else GRABBER_SYS_CLIENTS.remove(hyperionInd); - if(GRABBER_SYS_CLIENTS.empty() || !GLOBAL_GRABBER_SYS_ENABLE) + if(GRABBER_SYS_CLIENTS.empty() || !getSysGrabberState()) stop(); else start(); @@ -221,7 +223,7 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp else GRABBER_V4L_CLIENTS.remove(hyperionInd); - if(GRABBER_V4L_CLIENTS.empty() || !GLOBAL_GRABBER_V4L_ENABLE) + if(GRABBER_V4L_CLIENTS.empty() || !getV4lGrabberState()) stop(); else start(); @@ -231,6 +233,6 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp void GrabberWrapper::tryStart() { // verify start condition - if(!_grabberName.startsWith("V4L") && !GRABBER_SYS_CLIENTS.empty() && GLOBAL_GRABBER_SYS_ENABLE) + if(!_grabberName.startsWith("V4L") && !GRABBER_SYS_CLIENTS.empty() && getSysGrabberState()) start(); } diff --git a/libsrc/hyperion/Hyperion.cpp b/libsrc/hyperion/Hyperion.cpp index 149a4458..e20cca5c 100644 --- a/libsrc/hyperion/Hyperion.cpp +++ b/libsrc/hyperion/Hyperion.cpp @@ -56,7 +56,7 @@ Hyperion::Hyperion(quint8 instance, bool readonlyMode) , _hwLedCount() , _ledGridSize(hyperion::getLedLayoutGridSize(getSetting(settings::LEDS).array())) , _BGEffectHandler(nullptr) - ,_captureCont(nullptr) + , _captureCont(nullptr) , _ledBuffer(_ledString.leds().size(), ColorRgb::BLACK) , _boblightServer(nullptr) , _readOnlyMode(readonlyMode) diff --git a/libsrc/utils/ImageResampler.cpp b/libsrc/utils/ImageResampler.cpp index 6ab8364d..6452348b 100644 --- a/libsrc/utils/ImageResampler.cpp +++ b/libsrc/utils/ImageResampler.cpp @@ -156,7 +156,7 @@ void ImageResampler::processImage(const uint8_t * data, int width, int height, i break; } break; -#ifdef HAVE_JPEG_DECODER +#ifdef HAVE_TURBO_JPEG case PixelFormat::MJPEG: break; #endif diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml index 2135ee58..d1fae2af 100644 --- a/snap/snapcraft.yaml +++ b/snap/snapcraft.yaml @@ -44,7 +44,6 @@ parts: - libxrender-dev - libavahi-core-dev - libavahi-compat-libdnssd-dev - - libjpeg-dev - libturbojpeg0-dev - libssl-dev - zlib1g-dev diff --git a/src/hyperion-v4l2/hyperion-v4l2.cpp b/src/hyperion-v4l2/hyperion-v4l2.cpp index 2d1a78ac..d1c5281c 100644 --- a/src/hyperion-v4l2/hyperion-v4l2.cpp +++ b/src/hyperion-v4l2/hyperion-v4l2.cpp @@ -56,9 +56,9 @@ int main(int argc, char** argv) IntOption & argInput = parser.add ('i', "input", "The device input [default: %1]", "0"); SwitchOption & argVideoStandard= parser.add>('v', "video-standard", "The used video standard. Valid values are PAL, NTSC, SECAM or no-change. [default: %1]", "no-change"); SwitchOption & argPixelFormat = parser.add> (0x0, "pixel-format", "The use pixel format. Valid values are YUYV, UYVY, RGB32, MJPEG or no-change. [default: %1]", "no-change"); - IntOption & argFps = parser.add ('f', "framerate", "Capture frame rate [default: %1]", "15", 1, 25); - IntOption & argWidth = parser.add (0x0, "width", "Width of the captured image [default: %1]", "160", 160); - IntOption & argHeight = parser.add (0x0, "height", "Height of the captured image [default: %1]", "160", 160); + IntOption & argFps = parser.add ('f', "framerate", "Capture frame rate [default: %1]", "25", 25); + IntOption & argWidth = parser.add ('w', "width", "Width of the captured image [default: %1]", "640", 640); + IntOption & argHeight = parser.add ('h', "height", "Height of the captured image [default: %1]", "480", 480); SwitchOption & argFlipMode = parser.add>(0x0, "flip-mode", "The used image flip mode. Valid values are HORIZONTAL, VERTICAL, BOTH or no-change. [default: %1]", "no-change"); IntOption & argCropWidth = parser.add (0x0, "crop-width", "Number of pixels to crop from the left and right sides of the picture before decimation [default: %1]", "0"); IntOption & argCropHeight = parser.add (0x0, "crop-height", "Number of pixels to crop from the top and the bottom of the picture before decimation [default: %1]", "0"); @@ -66,15 +66,14 @@ int main(int argc, char** argv) IntOption & argCropRight = parser.add (0x0, "crop-right", "Number of pixels to crop from the right of the picture before decimation (overrides --crop-width)"); IntOption & argCropTop = parser.add (0x0, "crop-top", "Number of pixels to crop from the top of the picture before decimation (overrides --crop-height)"); IntOption & argCropBottom = parser.add (0x0, "crop-bottom", "Number of pixels to crop from the bottom of the picture before decimation (overrides --crop-height)"); - IntOption & argSizeDecimation = parser.add ('s', "size-decimator", "Decimation factor for the output size [default=%1]", "6", 1); - BooleanOption & argScreenshot = parser.add(0x0, "screenshot", "Take a single screenshot, save it to file and quit"); + IntOption & argSizeDecimation = parser.add ('s', "size-decimator", "Decimation factor for the output size [default=%1]", "8", 1); + BooleanOption & argScreenshot = parser.add('S', "screenshot", "Take a single screenshot, save it to file and quit"); - BooleanOption & argSignalDetection = parser.add('s', "signal-detection-disabled", "disable signal detection"); - DoubleOption & argSignalThreshold = parser.add ('t', "signal-threshold", "The signal threshold for detecting the presence of a signal. Value should be between 0.0 and 1.0.", QString(), 0.0, 1.0); + BooleanOption & argSignalDetection = parser.add(0x0, "signal-detection-disabled", "disable signal detection"); + DoubleOption & argSignalThreshold = parser.add (0x0, "signal-threshold", "The signal threshold for detecting the presence of a signal. Value should be between 0.0 and 1.0.", QString(), 0.0, 1.0); DoubleOption & argRedSignalThreshold = parser.add (0x0, "red-threshold", "The red signal threshold. Value should be between 0.0 and 1.0. (overrides --signal-threshold)"); DoubleOption & argGreenSignalThreshold= parser.add (0x0, "green-threshold", "The green signal threshold. Value should be between 0.0 and 1.0. (overrides --signal-threshold)"); DoubleOption & argBlueSignalThreshold = parser.add (0x0, "blue-threshold", "The blue signal threshold. Value should be between 0.0 and 1.0. (overrides --signal-threshold)"); - DoubleOption & argSignalHorizontalMin = parser.add (0x0, "signal-horizontal-min", "area for signal detection - horizontal minimum offset value. Values between 0.0 and 1.0"); DoubleOption & argSignalVerticalMin = parser.add (0x0, "signal-vertical-min" , "area for signal detection - vertical minimum offset value. Values between 0.0 and 1.0"); DoubleOption & argSignalHorizontalMax = parser.add (0x0, "signal-horizontal-max", "area for signal detection - horizontal maximum offset value. Values between 0.0 and 1.0"); @@ -86,7 +85,7 @@ int main(int argc, char** argv) IntOption & argPriority = parser.add ('p', "priority", "Use the provided priority channel (suggested 100-199) [default: %1]", "150"); BooleanOption & argSkipReply = parser.add(0x0, "skip-reply", "Do not receive and check reply messages from Hyperion"); BooleanOption & argDebug = parser.add(0x0, "debug", "Enable debug logging"); - BooleanOption & argHelp = parser.add('h', "help", "Show this help message and exit"); + BooleanOption & argHelp = parser.add(0x0, "help", "Show this help message and exit"); argVideoStandard.addSwitch("pal", VideoStandard::PAL); argVideoStandard.addSwitch("ntsc", VideoStandard::NTSC); @@ -125,7 +124,7 @@ int main(int argc, char** argv) V4L2Grabber grabber; // set device - grabber.setDevice(argDevice.value(parser)); + grabber.setDevice(argDevice.value(parser), ""); // set input grabber.setInput(argInput.getInt(parser)); @@ -134,13 +133,16 @@ int main(int argc, char** argv) grabber.setWidthHeight(argWidth.getInt(parser), argHeight.getInt(parser)); // set fps - grabber.setFramerate(1000 / argFps.getInt(parser)); + if (parser.isSet(argFps)) + grabber.setFramerate(argFps.getInt(parser)); - // TODO set encoding format - // grabber.setEncoding(argPixelFormat.switchValue(parser)); + // set encoding format + if (parser.isSet(argPixelFormat)) + grabber.setEncoding(pixelFormatToString(argPixelFormat.switchValue(parser))); // set video standard - grabber.setVideoStandard(argVideoStandard.switchValue(parser)); + if (parser.isSet(argVideoStandard)) + grabber.setVideoStandard(argVideoStandard.switchValue(parser)); // set image size decimation grabber.setPixelDecimation(std::max(1, argSizeDecimation.getInt(parser))); @@ -243,7 +245,8 @@ int main(int argc, char** argv) QObject::connect(&grabber, SIGNAL(newFrame(const Image &)), &flatbuf, SLOT(setImage(Image))); // Start the capturing - grabber.start(); + if (grabber.prepare()) + grabber.start(); // Start the application app.exec(); diff --git a/src/hyperiond/hyperiond.cpp b/src/hyperiond/hyperiond.cpp index 44d636dd..6cedd509 100644 --- a/src/hyperiond/hyperiond.cpp +++ b/src/hyperiond/hyperiond.cpp @@ -493,8 +493,8 @@ void HyperionDaemon::handleSettingsUpdate(settings::type settingsType, const QJs createGrabberFramebuffer(grabberConfig); } #ifdef ENABLE_FB - _dispmanx->handleSettingsUpdate(settings::SYSTEMCAPTURE, getSetting(settings::SYSTEMCAPTURE)); - _dispmanx->tryStart(); + _fbGrabber->handleSettingsUpdate(settings::SYSTEMCAPTURE, getSetting(settings::SYSTEMCAPTURE)); + _fbGrabber->tryStart(); #endif } else if (type == "dispmanx")