diff --git a/.ci/ci_build.sh b/.ci/ci_build.sh index 00538255..d74d4144 100755 --- a/.ci/ci_build.sh +++ b/.ci/ci_build.sh @@ -1,14 +1,8 @@ #!/bin/bash # detect CI -if [ "$SYSTEM_COLLECTIONID" != "" ]; then - # Azure Pipelines - echo "Azure detected" - CI_NAME="$(echo "$AGENT_OS" | tr '[:upper:]' '[:lower:]')" - CI_BUILD_DIR="$BUILD_SOURCESDIRECTORY" -elif [ "$HOME" != "" ]; then +if [ "$HOME" != "" ]; then # GitHub Actions - echo "Github Actions detected" CI_NAME="$(uname -s | tr '[:upper:]' '[:lower:]')" CI_BUILD_DIR="$GITHUB_WORKSPACE" else diff --git a/.ci/ci_install.sh b/.ci/ci_install.sh index 35b85104..86c6ffd4 100755 --- a/.ci/ci_install.sh +++ b/.ci/ci_install.sh @@ -1,11 +1,7 @@ #!/bin/bash # detect CI -if [ "$SYSTEM_COLLECTIONID" != "" ]; then - # Azure Pipelines - CI_NAME="$(echo "$AGENT_OS" | tr '[:upper:]' '[:lower:]')" - CI_BUILD_DIR="$BUILD_SOURCESDIRECTORY" -elif [ "$HOME" != "" ]; then +if [ "$HOME" != "" ]; then # GitHub Actions CI_NAME="$(uname -s | tr '[:upper:]' '[:lower:]')" CI_BUILD_DIR="$GITHUB_WORKSPACE" diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index 8b221e12..52ab9063 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -159,10 +159,21 @@ jobs: path: C:\Users\runneradmin\AppData\Local\Temp\chocolatey key: ${{ runner.os }}-chocolatey - - name: Install Python, NSIS, OpenSSL + - name: "Remove Redistributable" + shell: cmd + run: | + MsiExec.exe /passive /X{F0C3E5D1-1ADE-321E-8167-68EF0DE699A5} + MsiExec.exe /passive /X{1D8E6291-B0D5-35EC-8441-6616F567A0F7} + + - name: Install Python, NSIS, OpenSSL, DirectX SDK shell: powershell run: | - choco install --no-progress python nsis openssl -y + choco install --no-progress python nsis openssl directx-sdk -y + + - name: Install libjpeg-turbo + run: | + Invoke-WebRequest https://netcologne.dl.sourceforge.net/project/libjpeg-turbo/2.0.6/libjpeg-turbo-2.0.6-vc64.exe -OutFile libjpeg-turbo.exe -UserAgent NativeHost + .\libjpeg-turbo /S - name: Set up x64 build architecture environment shell: cmd diff --git a/.github/workflows/push-master.yml b/.github/workflows/push-master.yml index 7a9bcc54..8596b2c9 100644 --- a/.github/workflows/push-master.yml +++ b/.github/workflows/push-master.yml @@ -122,10 +122,21 @@ jobs: path: C:\Users\runneradmin\AppData\Local\Temp\chocolatey key: ${{ runner.os }}-chocolatey - - name: Install Python, NSIS, OpenSSL + - name: "Remove Redistributable" + shell: cmd + run: | + MsiExec.exe /passive /X{F0C3E5D1-1ADE-321E-8167-68EF0DE699A5} + MsiExec.exe /passive /X{1D8E6291-B0D5-35EC-8441-6616F567A0F7} + + - name: Install Python, NSIS, OpenSSL, DirectX SDK shell: powershell run: | - choco install --no-progress python nsis openssl -y + choco install --no-progress python nsis openssl directx-sdk -y + + - name: Install libjpeg-turbo + run: | + Invoke-WebRequest https://netcologne.dl.sourceforge.net/project/libjpeg-turbo/2.0.6/libjpeg-turbo-2.0.6-vc64.exe -OutFile libjpeg-turbo.exe -UserAgent NativeHost + .\libjpeg-turbo /S - name: Set up x64 build architecture environment shell: cmd diff --git a/.gitignore b/.gitignore index 0713ddb3..40563443 100644 --- a/.gitignore +++ b/.gitignore @@ -27,5 +27,11 @@ libsrc/flatbufserver/hyperion_request_generated.h *.kdev* # Visual Studio 2015/2017/2019 cache/options directory -.vs/ +# Ignore +.vs/* CMakeSettings.json +# Allow +!.vs/launch.vs.json + +# LedDevice 'File' output +NULL diff --git a/.vs/launch.vs.json b/.vs/launch.vs.json new file mode 100644 index 00000000..37627ab1 --- /dev/null +++ b/.vs/launch.vs.json @@ -0,0 +1,17 @@ +{ + "version": "0.2.1", + "defaults": {}, + "configurations": [ + { + "type": "default", + "project": "CMakeLists.txt", + "projectTarget": "hyperiond.exe (bin\\hyperiond.exe)", + "name": "Run hyperion with debug option and external console", + "args": [ + "-d", + "-c" + ], + "externalConsole": true + } + ] +} diff --git a/.vscode/launch.json b/.vscode/launch.json index ad6f92cc..f1eb3533 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -27,12 +27,12 @@ "name": "(Windows) hyperiond", "type": "cppvsdbg", "request": "launch", - "program": "${workspaceFolder}/build/bin/Debug/hyperiond.exe", + "program": "${command:cmake.launchTargetDirectory}/hyperiond", "args": ["-d"], "stopAtEntry": false, "cwd": "${workspaceFolder}", "environment": [], - "externalConsole": false + "console": "internalConsole" } ] } diff --git a/CMakeLists.txt b/CMakeLists.txt index ecebcc11..f4a733cd 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -4,10 +4,15 @@ message( STATUS "CMake Version: ${CMAKE_VERSION}" ) PROJECT(hyperion) -# Parse semantic version of version file +# Parse semantic version of version file and write version to config include (${CMAKE_CURRENT_SOURCE_DIR}/cmake/version.cmake) file (STRINGS "version" HYPERION_VERSION) SetVersionNumber(HYPERION ${HYPERION_VERSION}) +set(DEFAULT_JSON_CONFIG_FILE ${CMAKE_CURRENT_SOURCE_DIR}/config/hyperion.config.json.default) +file(READ ${DEFAULT_JSON_CONFIG_FILE} DEFAULT_JSON_CONFIG_VAR) +string(REPLACE "configVersionValue" ${HYPERION_VERSION} DEFAULT_JSON_CONFIG_VAR "${DEFAULT_JSON_CONFIG_VAR}") +string(REPLACE "previousVersionValue" ${HYPERION_VERSION} DEFAULT_JSON_CONFIG_VAR "${DEFAULT_JSON_CONFIG_VAR}") +file(WRITE ${CMAKE_BINARY_DIR}/config/hyperion.config.json.default "${DEFAULT_JSON_CONFIG_VAR}") # Instruct CMake to run moc automatically when needed. set(CMAKE_AUTOMOC ON) @@ -50,6 +55,7 @@ SET ( DEFAULT_USE_SYSTEM_PROTO_LIBS OFF ) SET ( DEFAULT_USE_SYSTEM_MBEDTLS_LIBS OFF ) SET ( DEFAULT_TESTS OFF ) SET ( DEFAULT_EXPERIMENTAL OFF ) +SET ( DEFAULT_MF OFF ) SET ( DEFAULT_DEPLOY_DEPENDENCIES ON ) IF ( ${CMAKE_SYSTEM} MATCHES "Linux" ) @@ -60,7 +66,8 @@ IF ( ${CMAKE_SYSTEM} MATCHES "Linux" ) SET ( DEFAULT_USB_HID ON ) SET ( DEFAULT_CEC ON ) ELSEIF ( WIN32 ) - SET ( DEFAULT_DX OFF ) + SET ( DEFAULT_DX ON ) + SET ( DEFAULT_MF ON ) ELSE() SET ( DEFAULT_V4L2 OFF ) SET ( DEFAULT_FB OFF ) @@ -121,9 +128,14 @@ elseif ( "${PLATFORM}" MATCHES "rpi" ) SET ( DEFAULT_DISPMANX ON ) SET ( DEFAULT_WS281XPWM ON ) elseif ( "${PLATFORM}" STREQUAL "amlogic" ) - SET ( DEFAULT_AMLOGIC ON ) + SET ( DEFAULT_AMLOGIC ON ) +elseif ( "${PLATFORM}" STREQUAL "amlogic-dev" ) + SET ( DEFAULT_AMLOGIC ON ) + SET ( DEFAULT_DISPMANX OFF ) + SET ( DEFAULT_QT OFF ) + SET ( DEFAULT_CEC OFF ) elseif ( "${PLATFORM}" STREQUAL "amlogic64" ) - SET ( DEFAULT_AMLOGIC ON ) + SET ( DEFAULT_AMLOGIC ON ) elseif ( "${PLATFORM}" MATCHES "x11" ) SET ( DEFAULT_X11 ON ) SET ( DEFAULT_XCB ON ) @@ -150,17 +162,18 @@ ADD_DEFINITIONS( ${PLATFORM_DEFINE} ) option(ENABLE_AMLOGIC "Enable the AMLOGIC video grabber" ${DEFAULT_AMLOGIC} ) message(STATUS "ENABLE_AMLOGIC = ${ENABLE_AMLOGIC}") -option(ENABLE_DISPMANX "Enable the RPi dispmanx grabber" ${DEFAULT_DISPMANX} ) -message(STATUS "ENABLE_DISPMANX = ${ENABLE_DISPMANX}") - if (ENABLE_AMLOGIC) SET(ENABLE_FB ON) else() option(ENABLE_FB "Enable the framebuffer grabber" ${DEFAULT_FB} ) endif() + message(STATUS "ENABLE_FB = ${ENABLE_FB}") -option(ENABLE_OSX "Enable the osx grabber" ${DEFAULT_OSX} ) +option(ENABLE_DISPMANX "Enable the RPi dispmanx grabber" ${DEFAULT_DISPMANX} ) +message(STATUS "ENABLE_DISPMANX = ${ENABLE_DISPMANX}") + +option(ENABLE_OSX "Enable the OSX grabber" ${DEFAULT_OSX} ) message(STATUS "ENABLE_OSX = ${ENABLE_OSX}") option(ENABLE_SPIDEV "Enable the SPIDEV device" ${DEFAULT_SPIDEV} ) @@ -172,6 +185,9 @@ message(STATUS "ENABLE_TINKERFORGE = ${ENABLE_TINKERFORGE}") option(ENABLE_V4L2 "Enable the V4L2 grabber" ${DEFAULT_V4L2}) message(STATUS "ENABLE_V4L2 = ${ENABLE_V4L2}") +option(ENABLE_MF "Enable the Media Foundation grabber" ${DEFAULT_MF}) +message(STATUS "ENABLE_MF = ${ENABLE_MF}") + option(ENABLE_WS281XPWM "Enable the WS281x-PWM device" ${DEFAULT_WS281XPWM} ) message(STATUS "ENABLE_WS281XPWM = ${ENABLE_WS281XPWM}") @@ -190,7 +206,7 @@ message(STATUS "ENABLE_X11 = ${ENABLE_X11}") option(ENABLE_XCB "Enable the XCB grabber" ${DEFAULT_XCB}) message(STATUS "ENABLE_XCB = ${ENABLE_XCB}") -option(ENABLE_QT "Enable the qt grabber" ${DEFAULT_QT}) +option(ENABLE_QT "Enable the Qt grabber" ${DEFAULT_QT}) message(STATUS "ENABLE_QT = ${ENABLE_QT}") option(ENABLE_DX "Enable the DirectX grabber" ${DEFAULT_DX}) @@ -216,10 +232,7 @@ SET ( PROTOBUF_INSTALL_LIB_DIR ${CMAKE_BINARY_DIR}/proto ) # check all json files FILE ( GLOB_RECURSE HYPERION_SCHEMAS RELATIVE ${CMAKE_SOURCE_DIR} ${CMAKE_SOURCE_DIR}/libsrc/*schema*.json ) -SET( JSON_FILES - config/hyperion.config.json.default - ${HYPERION_SCHEMAS} -) +SET( JSON_FILES ${CMAKE_BINARY_DIR}/config/hyperion.config.json.default ${HYPERION_SCHEMAS}) EXECUTE_PROCESS ( COMMAND ${PYTHON_EXECUTABLE} test/jsonchecks/checkjson.py ${JSON_FILES} @@ -243,7 +256,7 @@ ENDIF () # TODO on windows it can't resolve the path inside the file (Das System kann den angegebenen Pfad nicht finden: '\\schema\\schema-general.json') IF (NOT WIN32) EXECUTE_PROCESS ( - COMMAND python test/jsonchecks/checkschema.py config/hyperion.config.json.default libsrc/hyperion/hyperion.schema.json + COMMAND python test/jsonchecks/checkschema.py ${CMAKE_BINARY_DIR}/config/hyperion.config.json.default libsrc/hyperion/hyperion.schema.json WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} RESULT_VARIABLE CHECK_CONFIG_FAILED ) @@ -399,31 +412,6 @@ find_package(libusb-1.0 REQUIRED) find_package(Threads REQUIRED) add_definitions(${QT_DEFINITIONS}) -# Add JPEG library -if (ENABLE_V4L2) - # Turbo JPEG - find_package(TurboJPEG) - if (TURBOJPEG_FOUND) - add_definitions(-DHAVE_TURBO_JPEG) - message( STATUS "Using Turbo JPEG library: ${TurboJPEG_LIBRARY}") - include_directories(${TurboJPEG_INCLUDE_DIRS}) - else() - # System JPEG - find_package(JPEG) - if (JPEG_FOUND) - add_definitions(-DHAVE_JPEG) - message( STATUS "Using system JPEG library: ${JPEG_LIBRARIES}") - include_directories(${JPEG_INCLUDE_DIR}) - else() - message( STATUS "JPEG library not found, MJPEG camera format won't work in V4L2 grabber.") - endif() - endif (TURBOJPEG_FOUND) - - if (TURBOJPEG_FOUND OR JPEG_FOUND) - add_definitions(-DHAVE_JPEG_DECODER) - endif() -endif() - if(APPLE) set(CMAKE_EXE_LINKER_FLAGS "-framework CoreGraphics") endif() diff --git a/HyperionConfig.h.in b/HyperionConfig.h.in index 00537e71..4933ad00 100644 --- a/HyperionConfig.h.in +++ b/HyperionConfig.h.in @@ -1,48 +1,51 @@ // Generated config file -// Define to enable the dispmanx grabber +// Define to enable the DispmanX grabber #cmakedefine ENABLE_DISPMANX -// Define to enable the v4l2 grabber +// Define to enable the V4L2 grabber #cmakedefine ENABLE_V4L2 -// Define to enable the framebuffer grabber +// Define to enable the Media Foundation grabber +#cmakedefine ENABLE_MF + +// Define to enable the Framebuffer grabber #cmakedefine ENABLE_FB -// Define to enable the amlogic grabber +// Define to enable the AMLogic grabber #cmakedefine ENABLE_AMLOGIC -// Define to enable the osx grabber +// Define to enable the OSX grabber #cmakedefine ENABLE_OSX -// Define to enable the x11 grabber +// Define to enable the X11 grabber #cmakedefine ENABLE_X11 -// Define to enable the xcb grabber +// Define to enable the XCB grabber #cmakedefine ENABLE_XCB -// Define to enable the qt grabber +// Define to enable the Qt grabber #cmakedefine ENABLE_QT // Define to enable the DirectX grabber #cmakedefine ENABLE_DX -// Define to enable the spi-device +// Define to enable the SPI-Device #cmakedefine ENABLE_SPIDEV -// Define to enable the ws281x-pwm-via-dma-device using jgarff's library +// Define to enable the WS281x-PWM-via-DMA-device using jgarff's library #cmakedefine ENABLE_WS281XPWM -// Define to enable the tinkerforge device +// Define to enable the Tinkerforge device #cmakedefine ENABLE_TINKERFORGE -// Define to enable avahi +// Define to enable AVAHI #cmakedefine ENABLE_AVAHI -// Define to enable cec +// Define to enable CEC #cmakedefine ENABLE_CEC -// Define to enable the usb / hid devices +// Define to enable the USB / HID devices #cmakedefine ENABLE_USB_HID // Define to enable profiler for development purpose diff --git a/assets/webconfig/content/conf_grabber.html b/assets/webconfig/content/conf_grabber.html index 2865dc03..1b50ba3f 100644 --- a/assets/webconfig/content/conf_grabber.html +++ b/assets/webconfig/content/conf_grabber.html @@ -2,7 +2,7 @@
- +
diff --git a/assets/webconfig/content/conf_instcapture.html b/assets/webconfig/content/conf_instcapture.html new file mode 100644 index 00000000..73a8bf75 --- /dev/null +++ b/assets/webconfig/content/conf_instcapture.html @@ -0,0 +1,24 @@ + +
+
+
+ + +
+
+ +
+
+ +
+
+
+
+ + diff --git a/assets/webconfig/content/conf_leds.html b/assets/webconfig/content/conf_leds.html index 6236da28..5acbd9d0 100755 --- a/assets/webconfig/content/conf_leds.html +++ b/assets/webconfig/content/conf_leds.html @@ -6,17 +6,17 @@
-
-
- +
+
'; $('.instances').prepend(instances_html); + updateUiOnInstance(window.currentHyperionInstance); updateHyperionInstanceListing(); @@ -83,7 +92,7 @@ $(document).ready(function () { for (var idx = 0; idx < components.length; idx++) { if (components[idx].name != "ALL") { $("#general_comp_" + components[idx].name).bootstrapToggle(); - $("#general_comp_" + components[idx].name).bootstrapToggle(hyperion_enabled ? "enable" : "disable") + $("#general_comp_" + components[idx].name).bootstrapToggle(hyperion_enabled ? "enable" : "disable"); $("#general_comp_" + components[idx].name).change(e => { requestSetComponentState(e.currentTarget.id.split('_')[2], e.currentTarget.checked); }); @@ -92,6 +101,12 @@ $(document).ready(function () { } // add more info + + var screenGrabber = window.serverConfig.framegrabber.enable ? $.i18n('general_enabled') : $.i18n('general_disabled'); + $('#dash_screen_grabber').html(screenGrabber); + var videoGrabber = window.serverConfig.grabberV4L2.enable ? $.i18n('general_enabled') : $.i18n('general_disabled'); + $('#dash_video_grabber').html(videoGrabber); + var fbPort = window.serverConfig.flatbufServer.enable ? window.serverConfig.flatbufServer.port : $.i18n('general_disabled'); $('#dash_fbPort').html(fbPort); var pbPort = window.serverConfig.protoServer.enable ? window.serverConfig.protoServer.port : $.i18n('general_disabled'); diff --git a/assets/webconfig/js/content_general.js b/assets/webconfig/js/content_general.js index 233ee819..c8f68d5d 100644 --- a/assets/webconfig/js/content_general.js +++ b/assets/webconfig/js/content_general.js @@ -1,196 +1,182 @@ -$(document).ready( function() { - performTranslation(); +$(document).ready(function () { + performTranslation(); - var importedConf; - var confName; - var conf_editor = null; + var importedConf; + var confName; + var conf_editor = null; - $('#conf_cont').append(createOptPanel('fa-wrench', $.i18n("edt_conf_gen_heading_title"), 'editor_container', 'btn_submit', 'panel-system')); - if(window.showOptHelp) - { - $('#conf_cont').append(createHelpTable(window.schema.general.properties, $.i18n("edt_conf_gen_heading_title"))); - } - else - $('#conf_imp').appendTo('#conf_cont'); + $('#conf_cont').append(createOptPanel('fa-wrench', $.i18n("edt_conf_gen_heading_title"), 'editor_container', 'btn_submit', 'panel-system')); + if (window.showOptHelp) { + $('#conf_cont').append(createHelpTable(window.schema.general.properties, $.i18n("edt_conf_gen_heading_title"))); + } + else + $('#conf_imp').appendTo('#conf_cont'); - conf_editor = createJsonEditor('editor_container', { - general: window.schema.general - }, true, true); + conf_editor = createJsonEditor('editor_container', { + general: window.schema.general + }, true, true); - conf_editor.on('change',function() { - conf_editor.validate().length || window.readOnlyMode ? $('#btn_submit').attr('disabled', true) : $('#btn_submit').attr('disabled', false); - }); + conf_editor.on('change', function () { + conf_editor.validate().length || window.readOnlyMode ? $('#btn_submit').attr('disabled', true) : $('#btn_submit').attr('disabled', false); + }); - $('#btn_submit').off().on('click',function() { - requestWriteConfig(conf_editor.getValue()); - }); + $('#btn_submit').off().on('click', function () { + window.showOptHelp = conf_editor.getEditor("root.general.showOptHelp").getValue(); + requestWriteConfig(conf_editor.getValue()); + }); - // Instance handling - function handleInstanceRename(e) - { + // Instance handling + function handleInstanceRename(e) { - conf_editor.on('change',function() { - window.readOnlyMode ? $('#btn_cl_save').attr('disabled', true) : $('#btn_submit').attr('disabled', false); - window.readOnlyMode ? $('#btn_ma_save').attr('disabled', true) : $('#btn_submit').attr('disabled', false); - }); + conf_editor.on('change', function () { + window.readOnlyMode ? $('#btn_cl_save').attr('disabled', true) : $('#btn_submit').attr('disabled', false); + window.readOnlyMode ? $('#btn_ma_save').attr('disabled', true) : $('#btn_submit').attr('disabled', false); + }); - var inst = e.currentTarget.id.split("_")[1]; - showInfoDialog('renInst', $.i18n('conf_general_inst_renreq_t'), getInstanceNameByIndex(inst)); + var inst = e.currentTarget.id.split("_")[1]; + showInfoDialog('renInst', $.i18n('conf_general_inst_renreq_t'), getInstanceNameByIndex(inst)); - $("#id_btn_ok").off().on('click', function(){ - requestInstanceRename(inst, $('#renInst_name').val()) - }); + $("#id_btn_ok").off().on('click', function () { + requestInstanceRename(inst, $('#renInst_name').val()) + }); - $('#renInst_name').off().on('input',function(e) { - (e.currentTarget.value.length >= 5 && e.currentTarget.value != getInstanceNameByIndex(inst)) ? $('#id_btn_ok').attr('disabled', false) : $('#id_btn_ok').attr('disabled', true); - }); - } + $('#renInst_name').off().on('input', function (e) { + (e.currentTarget.value.length >= 5 && e.currentTarget.value != getInstanceNameByIndex(inst)) ? $('#id_btn_ok').attr('disabled', false) : $('#id_btn_ok').attr('disabled', true); + }); + } - function handleInstanceDelete(e) - { - var inst = e.currentTarget.id.split("_")[1]; - showInfoDialog('delInst',$.i18n('conf_general_inst_delreq_h'),$.i18n('conf_general_inst_delreq_t',getInstanceNameByIndex(inst))); - $("#id_btn_yes").off().on('click', function(){ - requestInstanceDelete(inst) - }); - } + function handleInstanceDelete(e) { + var inst = e.currentTarget.id.split("_")[1]; + showInfoDialog('delInst', $.i18n('conf_general_inst_delreq_h'), $.i18n('conf_general_inst_delreq_t', getInstanceNameByIndex(inst))); + $("#id_btn_yes").off().on('click', function () { + requestInstanceDelete(inst) + }); + } - function buildInstanceList() - { - var inst = serverInfo.instance - $('.itbody').html(""); - for(var key in inst) - { - var enable_style = inst[key].running ? "checked" : ""; - var renameBtn = ''; - var startBtn = "" - var delBtn = ""; - if(inst[key].instance > 0) - { - delBtn = ''; - startBtn = ''; + function buildInstanceList() { + var inst = serverInfo.instance + $('.itbody').html(""); + for (var key in inst) { + var enable_style = inst[key].running ? "checked" : ""; + var renameBtn = ''; + var startBtn = "" + var delBtn = ""; + if (inst[key].instance > 0) { + delBtn = ''; + startBtn = ''; - } - $('.itbody').append(createTableRow([inst[key].friendly_name, startBtn, renameBtn, delBtn], false, true)); - $('#instren_'+inst[key].instance).off().on('click', handleInstanceRename); + } + $('.itbody').append(createTableRow([inst[key].friendly_name, startBtn, renameBtn, delBtn], false, true)); + $('#instren_' + inst[key].instance).off().on('click', handleInstanceRename); - $('#inst_'+inst[key].instance).bootstrapToggle(); - $('#inst_'+inst[key].instance).change(e => { - requestInstanceStartStop(e.currentTarget.id.split('_').pop(), e.currentTarget.checked); - }); - $('#instdel_'+inst[key].instance).off().on('click', handleInstanceDelete); + $('#inst_' + inst[key].instance).bootstrapToggle(); + $('#inst_' + inst[key].instance).change(e => { + requestInstanceStartStop(e.currentTarget.id.split('_').pop(), e.currentTarget.checked); + }); + $('#instdel_' + inst[key].instance).off().on('click', handleInstanceDelete); - window.readOnlyMode ? $('#instren_'+inst[key].instance).attr('disabled', true) : $('#btn_submit').attr('disabled', false); - window.readOnlyMode ? $('#inst_'+inst[key].instance).attr('disabled', true) : $('#btn_submit').attr('disabled', false); - window.readOnlyMode ? $('#instdel_'+inst[key].instance).attr('disabled', true) : $('#btn_submit').attr('disabled', false); - } - } + window.readOnlyMode ? $('#instren_' + inst[key].instance).attr('disabled', true) : $('#btn_submit').attr('disabled', false); + window.readOnlyMode ? $('#inst_' + inst[key].instance).attr('disabled', true) : $('#btn_submit').attr('disabled', false); + window.readOnlyMode ? $('#instdel_' + inst[key].instance).attr('disabled', true) : $('#btn_submit').attr('disabled', false); + } + } - createTable('ithead', 'itbody', 'itable'); - $('.ithead').html(createTableRow([$.i18n('conf_general_inst_namehead'), "", $.i18n('conf_general_inst_actionhead'), ""], true, true)); - buildInstanceList(); + createTable('ithead', 'itbody', 'itable'); + $('.ithead').html(createTableRow([$.i18n('conf_general_inst_namehead'), "", $.i18n('conf_general_inst_actionhead'), ""], true, true)); + buildInstanceList(); - $('#inst_name').off().on('input',function(e) { - (e.currentTarget.value.length >= 5) && !window.readOnlyMode ? $('#btn_create_inst').attr('disabled', false) : $('#btn_create_inst').attr('disabled', true); - if(5-e.currentTarget.value.length >= 1 && 5-e.currentTarget.value.length <= 4) - $('#inst_chars_needed').html(5-e.currentTarget.value.length + " " + $.i18n('general_chars_needed')) - else - $('#inst_chars_needed').html("
") - }); + $('#inst_name').off().on('input', function (e) { + (e.currentTarget.value.length >= 5) && !window.readOnlyMode ? $('#btn_create_inst').attr('disabled', false) : $('#btn_create_inst').attr('disabled', true); + if (5 - e.currentTarget.value.length >= 1 && 5 - e.currentTarget.value.length <= 4) + $('#inst_chars_needed').html(5 - e.currentTarget.value.length + " " + $.i18n('general_chars_needed')) + else + $('#inst_chars_needed').html("
") + }); - $('#btn_create_inst').off().on('click',function(e) { - requestInstanceCreate($('#inst_name').val()); - $('#inst_name').val(""); - $('#btn_create_inst').attr('disabled', true) - }); + $('#btn_create_inst').off().on('click', function (e) { + requestInstanceCreate($('#inst_name').val()); + $('#inst_name').val(""); + $('#btn_create_inst').attr('disabled', true) + }); - $(hyperion).off("instance-updated").on("instance-updated", function(event) { - buildInstanceList() - }); + $(hyperion).off("instance-updated").on("instance-updated", function (event) { + buildInstanceList() + }); - //import - function dis_imp_btn(state) - { - state || window.readOnlyMode ? $('#btn_import_conf').attr('disabled', true) : $('#btn_import_conf').attr('disabled', false); - } + //import + function dis_imp_btn(state) { + state || window.readOnlyMode ? $('#btn_import_conf').attr('disabled', true) : $('#btn_import_conf').attr('disabled', false); + } - function readFile(evt) - { - var f = evt.target.files[0]; + function readFile(evt) { + var f = evt.target.files[0]; - if (f) - { - var r = new FileReader(); - r.onload = function(e) - { - var content = e.target.result.replace(/[^:]?\/\/.*/g, ''); //remove Comments + if (f) { + var r = new FileReader(); + r.onload = function (e) { + var content = e.target.result.replace(/[^:]?\/\/.*/g, ''); //remove Comments - //check file is json - var check = isJsonString(content); - if(check.length != 0) - { - showInfoDialog('error', "", $.i18n('infoDialog_import_jsonerror_text', f.name, JSON.stringify(check))); - dis_imp_btn(true); - } - else - { - content = JSON.parse(content); - //check for hyperion json - if(typeof content.leds === 'undefined' || typeof content.general === 'undefined') - { - showInfoDialog('error', "", $.i18n('infoDialog_import_hyperror_text', f.name)); - dis_imp_btn(true); - } - else - { - dis_imp_btn(false); - importedConf = content; - confName = f.name; - } - } - } - r.readAsText(f); - } - } + //check file is json + var check = isJsonString(content); + if (check.length != 0) { + showInfoDialog('error', "", $.i18n('infoDialog_import_jsonerror_text', f.name, JSON.stringify(check))); + dis_imp_btn(true); + } + else { + content = JSON.parse(content); + //check for hyperion json + if (typeof content.leds === 'undefined' || typeof content.general === 'undefined') { + showInfoDialog('error', "", $.i18n('infoDialog_import_hyperror_text', f.name)); + dis_imp_btn(true); + } + else { + dis_imp_btn(false); + importedConf = content; + confName = f.name; + } + } + } + r.readAsText(f); + } + } - $('#btn_import_conf').off().on('click', function(){ - showInfoDialog('import', $.i18n('infoDialog_import_confirm_title'), $.i18n('infoDialog_import_confirm_text', confName)); + $('#btn_import_conf').off().on('click', function () { + showInfoDialog('import', $.i18n('infoDialog_import_confirm_title'), $.i18n('infoDialog_import_confirm_text', confName)); - $('#id_btn_import').off().on('click', function(){ - requestWriteConfig(importedConf, true); - setTimeout(initRestart, 100); - }); - }); + $('#id_btn_import').off().on('click', function () { + requestWriteConfig(importedConf, true); + setTimeout(initRestart, 100); + }); + }); - $('#select_import_conf').off().on('change', function(e){ - if (window.File && window.FileReader && window.FileList && window.Blob) - readFile(e); - else - showInfoDialog('error', "", $.i18n('infoDialog_import_comperror_text')); - }); + $('#select_import_conf').off().on('change', function (e) { + if (window.File && window.FileReader && window.FileList && window.Blob) + readFile(e); + else + showInfoDialog('error', "", $.i18n('infoDialog_import_comperror_text')); + }); - //export - $('#btn_export_conf').off().on('click', function(){ - var name = window.serverConfig.general.name; + //export + $('#btn_export_conf').off().on('click', function () { + var name = window.serverConfig.general.name; - var d = new Date(); - var month = d.getMonth()+1; - var day = d.getDate(); + var d = new Date(); + var month = d.getMonth() + 1; + var day = d.getDate(); - var timestamp = d.getFullYear() + '.' + - (month<10 ? '0' : '') + month + '.' + - (day<10 ? '0' : '') + day; + var timestamp = d.getFullYear() + '.' + + (month < 10 ? '0' : '') + month + '.' + + (day < 10 ? '0' : '') + day; - download(JSON.stringify(window.serverConfig, null, "\t"), 'Hyperion-'+window.currentVersion+'-Backup ('+name+') '+timestamp+'.json', "application/json"); - }); + download(JSON.stringify(window.serverConfig, null, "\t"), 'Hyperion-' + window.currentVersion + '-Backup (' + name + ') ' + timestamp + '.json', "application/json"); + }); - //create introduction - if(window.showOptHelp) - { - createHint("intro", $.i18n('conf_general_intro'), "editor_container"); - createHint("intro", $.i18n('conf_general_tok_desc'), "tok_desc_cont"); - createHint("intro", $.i18n('conf_general_inst_desc'), "inst_desc_cont"); - } + //create introduction + if (window.showOptHelp) { + createHint("intro", $.i18n('conf_general_intro'), "editor_container"); + createHint("intro", $.i18n('conf_general_tok_desc'), "tok_desc_cont"); + createHint("intro", $.i18n('conf_general_inst_desc'), "inst_desc_cont"); + } - removeOverlay(); + removeOverlay(); }); diff --git a/assets/webconfig/js/content_grabber.js b/assets/webconfig/js/content_grabber.js old mode 100644 new mode 100755 index 246f32e9..36003c31 --- a/assets/webconfig/js/content_grabber.js +++ b/assets/webconfig/js/content_grabber.js @@ -1,377 +1,792 @@ $(document).ready(function () { performTranslation(); - var conf_editor_v4l2 = null; - var conf_editor_fg = null; - var conf_editor_instCapt = null; - var V4L2_AVAIL = window.serverInfo.grabbers.available.includes("v4l2"); - if (V4L2_AVAIL) { - // Dynamic v4l2 enum schema - var v4l2_dynamic_enum_schema = { - "available_devices": - { - "type": "string", - "title": "edt_conf_v4l2_device_title", - "propertyOrder": 1, - "required": true - }, - "device_inputs": - { - "type": "string", - "title": "edt_conf_v4l2_input_title", - "propertyOrder": 3, - "required": true - }, - "resolutions": - { - "type": "string", - "title": "edt_conf_v4l2_resolution_title", - "propertyOrder": 6, - "required": true - }, - "framerates": - { - "type": "string", - "title": "edt_conf_v4l2_framerate_title", - "propertyOrder": 9, - "required": true - } - }; + var conf_editor_video = null; + var conf_editor_screen = null; - // Build dynamic v4l2 enum schema parts - var buildSchemaPart = function (key, schema, device) { - if (schema[key]) { - var enumVals = []; - var enumTitelVals = []; - var v4l2_properties = JSON.parse(JSON.stringify(window.serverInfo.grabbers.v4l2_properties)); + // Screen-Grabber + $('#conf_cont').append(createRow('conf_cont_screen')); + $('#conf_cont_screen').append(createOptPanel('fa-camera', $.i18n("edt_conf_fg_heading_title"), 'editor_container_screengrabber', 'btn_submit_screengrabber', 'panel-system', 'screengrabberPanelId')); + if (window.showOptHelp) { + $('#conf_cont_screen').append(createHelpTable(window.schema.framegrabber.properties, $.i18n("edt_conf_fg_heading_title"), "screengrabberHelpPanelId")); + } - if (key === 'available_devices') { - for (var i = 0; i < v4l2_properties.length; i++) { - enumVals.push(v4l2_properties[i]['device']); + // Video-Grabber + $('#conf_cont').append(createRow('conf_cont_video')); + $('#conf_cont_video').append(createOptPanel('fa-camera', $.i18n("edt_conf_v4l2_heading_title"), 'editor_container_videograbber', 'btn_submit_videograbber', 'panel-system', 'videograbberPanelId')); - v4l2_properties[i].hasOwnProperty('name') - ? enumTitelVals.push(v4l2_properties[i]['name']) - : enumTitelVals.push(v4l2_properties[i]['device']); - } - } else if (key == 'resolutions' || key == 'framerates') { - for (var i = 0; i < v4l2_properties.length; i++) { - if (v4l2_properties[i]['device'] == device) { - enumVals = enumTitelVals = v4l2_properties[i][key]; - break; - } - } - } else if (key == 'device_inputs') { - for (var i = 0; i < v4l2_properties.length; i++) { - if (v4l2_properties[i]['device'] == device) { - for (var index = 0; index < v4l2_properties[i]['inputs'].length; index++) { - enumVals.push(v4l2_properties[i]['inputs'][index]['inputIndex'].toString()); - enumTitelVals.push(v4l2_properties[i]['inputs'][index]['inputName']); - } - break; - } - } - } - - window.schema.grabberV4L2.properties[key] = { - "type": schema[key].type, - "title": schema[key].title, - "enum": [].concat(["auto"], enumVals, ["custom"]), - "options": - { - "enum_titles": [].concat(["edt_conf_enum_automatic"], enumTitelVals, ["edt_conf_enum_custom"]), - }, - "propertyOrder": schema[key].propertyOrder, - "required": schema[key].required - }; - } - }; - - // Switch between visible states - function toggleOption(option, state) { - $('[data-schemapath="root.grabberV4L2.' + option + '"]').toggle(state); - if (state) ( - $('[data-schemapath="root.grabberV4L2.' + option + '"]').addClass('col-md-12'), - $('label[for="root_grabberV4L2_' + option + '"]').css('left', '10px'), - $('[id="root_grabberV4L2_' + option + '"]').css('left', '10px') - ); - } - - // Watch all v4l2 dynamic fields - var setWatchers = function (schema) { - var path = 'root.grabberV4L2.'; - Object.keys(schema).forEach(function (key) { - conf_editor_v4l2.watch(path + key, function () { - var ed = conf_editor_v4l2.getEditor(path + key); - var val = ed.getValue(); - - if (key == 'available_devices') { - var V4L2properties = ['device_inputs', 'resolutions', 'framerates']; - if (val == 'custom') { - var grabberV4L2 = ed.parent; - V4L2properties.forEach(function (item) { - buildSchemaPart(item, v4l2_dynamic_enum_schema, 'none'); - grabberV4L2.original_schema.properties[item] = window.schema.grabberV4L2.properties[item]; - grabberV4L2.schema.properties[item] = window.schema.grabberV4L2.properties[item]; - conf_editor_v4l2.validator.schema.properties.grabberV4L2.properties[item] = window.schema.grabberV4L2.properties[item]; - - grabberV4L2.removeObjectProperty(item); - delete grabberV4L2.cached_editors[item]; - grabberV4L2.addObjectProperty(item); - - conf_editor_v4l2.getEditor(path + item).enable(); - }); - - conf_editor_v4l2.getEditor(path + 'standard').enable(); - toggleOption('device', true); - } else if (val == 'auto') { - V4L2properties.forEach(function (item) { - conf_editor_v4l2.getEditor(path + item).setValue('auto'); - conf_editor_v4l2.getEditor(path + item).disable(); - }); - - conf_editor_v4l2.getEditor(path + 'standard').setValue('auto'); - conf_editor_v4l2.getEditor(path + 'standard').disable(); - - (toggleOption('device', false), toggleOption('input', false), - toggleOption('width', false), toggleOption('height', false), - toggleOption('fps', false)); - } else { - var grabberV4L2 = ed.parent; - V4L2properties.forEach(function (item) { - buildSchemaPart(item, v4l2_dynamic_enum_schema, val); - grabberV4L2.original_schema.properties[item] = window.schema.grabberV4L2.properties[item]; - grabberV4L2.schema.properties[item] = window.schema.grabberV4L2.properties[item]; - conf_editor_v4l2.validator.schema.properties.grabberV4L2.properties[item] = window.schema.grabberV4L2.properties[item]; - - grabberV4L2.removeObjectProperty(item); - delete grabberV4L2.cached_editors[item]; - grabberV4L2.addObjectProperty(item); - - conf_editor_v4l2.getEditor(path + item).enable(); - }); - - conf_editor_v4l2.getEditor(path + 'standard').enable(); - toggleOption('device', false); - } - } - - if (key == 'resolutions') - val != 'custom' - ? (toggleOption('width', false), toggleOption('height', false)) - : (toggleOption('width', true), toggleOption('height', true)); - - if (key == 'framerates') - val != 'custom' - ? toggleOption('fps', false) - : toggleOption('fps', true); - - if (key == 'device_inputs') - val != 'custom' - ? toggleOption('input', false) - : toggleOption('input', true); - }); - }); - }; - - // Insert dynamic v4l2 enum schema parts - Object.keys(v4l2_dynamic_enum_schema).forEach(function (key) { - buildSchemaPart(key, v4l2_dynamic_enum_schema, window.serverConfig.grabberV4L2.device); - }); + if (storedAccess === 'expert') { + var conf_cont_video_footer = document.getElementById("editor_container_videograbber").nextElementSibling; + $(conf_cont_video_footer).prepend(' '); } if (window.showOptHelp) { - // Instance Capture - $('#conf_cont').append(createRow('conf_cont_instCapt')); - $('#conf_cont_instCapt').append(createOptPanel('fa-camera', $.i18n("edt_conf_instCapture_heading_title"), 'editor_container_instCapt', 'btn_submit_instCapt')); - $('#conf_cont_instCapt').append(createHelpTable(window.schema.instCapture.properties, $.i18n("edt_conf_instCapture_heading_title"))); - - // Framegrabber - $('#conf_cont').append(createRow('conf_cont_fg')); - $('#conf_cont_fg').append(createOptPanel('fa-camera', $.i18n("edt_conf_fg_heading_title"), 'editor_container_fg', 'btn_submit_fg')); - $('#conf_cont_fg').append(createHelpTable(window.schema.framegrabber.properties, $.i18n("edt_conf_fg_heading_title"))); - - // V4L2 - hide if not available - if (V4L2_AVAIL) { - $('#conf_cont').append(createRow('conf_cont_v4l')); - $('#conf_cont_v4l').append(createOptPanel('fa-camera', $.i18n("edt_conf_v4l2_heading_title"), 'editor_container_v4l2', 'btn_submit_v4l2')); - $('#conf_cont_v4l').append(createHelpTable(window.schema.grabberV4L2.properties, $.i18n("edt_conf_v4l2_heading_title"))); - } - } else { - $('#conf_cont').addClass('row'); - $('#conf_cont').append(createOptPanel('fa-camera', $.i18n("edt_conf_instCapture_heading_title"), 'editor_container_instCapt', 'btn_submit_instCapt')); - $('#conf_cont').append(createOptPanel('fa-camera', $.i18n("edt_conf_fg_heading_title"), 'editor_container_fg', 'btn_submit_fg')); - if (V4L2_AVAIL) { - $('#conf_cont').append(createOptPanel('fa-camera', $.i18n("edt_conf_v4l2_heading_title"), 'editor_container_v4l2', 'btn_submit_v4l2')); - } + $('#conf_cont_video').append(createHelpTable(window.schema.grabberV4L2.properties, $.i18n("edt_conf_v4l2_heading_title"), "videograbberHelpPanelId")); } - // Instance Capture - conf_editor_instCapt = createJsonEditor('editor_container_instCapt', { - instCapture: window.schema.instCapture - }, true, true); + JSONEditor.defaults.custom_validators.push(function (schema, value, path) { + var errors = []; - conf_editor_instCapt.on('change', function () { - conf_editor_instCapt.validate().length || window.readOnlyMode ? $('#btn_submit_instCapt').attr('disabled', true) : $('#btn_submit_instCapt').attr('disabled', false); + if (path === "root.grabberV4L2" || path === "root.framegrabber") { + var editor; + switch (path) { + case "root.framegrabber": + editor = conf_editor_screen; + break; + case "root.grabberV4L2": + editor = conf_editor_video; + break; + } + + if (value.cropLeft || value.cropRight) { + var width = editor.getEditor(path + ".width").getValue(); + if (value.cropLeft + value.cropRight > width) { + errors.push({ + path: path, + property: 'maximum', + message: $.i18n('edt_conf_v4l2_cropWidthValidation_error', width) + }); + } + } + + if (value.cropTop || value.cropBottom) { + var height = editor.getEditor(path + ".height").getValue(); + if (value.cropTop + value.cropBottom > height) { + errors.push({ + path: path, + property: 'maximum', + message: $.i18n('edt_conf_v4l2_cropHeightValidation_error', height) + }); + } + } + } + return errors; }); - $('#btn_submit_instCapt').off().on('click', function () { - requestWriteConfig(conf_editor_instCapt.getValue()); - }); + function updateCropForWidth(editor, path) { + var width = editor.getEditor(path + ".width").getValue(); + updateJsonEditorRange(editor, path, 'cropLeft', 0, width); + updateJsonEditorRange(editor, path, 'cropRight', 0, width); + } - // Framegrabber - conf_editor_fg = createJsonEditor('editor_container_fg', { + function updateCropForHeight(editor, path) { + var height = editor.getEditor(path + ".height").getValue(); + updateJsonEditorRange(editor, path, 'cropTop', 0, height); + updateJsonEditorRange(editor, path, 'cropBottom', 0, height); + } + + // Screen-Grabber + conf_editor_screen = createJsonEditor('editor_container_screengrabber', { framegrabber: window.schema.framegrabber }, true, true); - conf_editor_fg.on('change', function () { - //Remove Grabbers which are not supported - var grabbers = window.serverInfo.grabbers.available; + conf_editor_screen.on('ready', function () { + // Trigger conf_editor_screen.watch - 'root.framegrabber.enable' + var screenEnable = window.serverConfig.framegrabber.enable; + conf_editor_screen.getEditor("root.framegrabber.enable").setValue(screenEnable); + }); - var selector = "root_framegrabber_type"; - var options = $("#" + selector + " option"); + conf_editor_screen.on('change', function () { - for (var i = 0; i < options.length; i++) { - var type = options[i].value; - if (grabbers.indexOf(type) === -1) { - $("#" + selector + " option[value='" + type + "']").remove(); + if (!conf_editor_screen.validate().length) { + var deviceSelected = conf_editor_screen.getEditor("root.framegrabber.available_devices").getValue(); + switch (deviceSelected) { + case "SELECT": + showInputOptionsForKey(conf_editor_screen, "framegrabber", ["enable", "available_devices"], false); + break; + case "NONE": + showInputOptionsForKey(conf_editor_screen, "framegrabber", ["enable", "available_devices"], false); + break; + default: + window.readOnlyMode ? $('#btn_submit_screengrabber').attr('disabled', true) : $('#btn_submit_screengrabber').attr('disabled', false); + break; + } + } + else { + $('#btn_submit_screengrabber').attr('disabled', true); + } + }); + + conf_editor_screen.watch('root.framegrabber.enable', () => { + + var screenEnable = conf_editor_screen.getEditor("root.framegrabber.enable").getValue(); + if (screenEnable) { + showInputOptionsForKey(conf_editor_screen, "framegrabber", "enable", true); + if (window.showOptHelp) { + $('#screengrabberHelpPanelId').show(); + } + discoverInputSources("screen"); + } + else { + $('#btn_submit_screengrabber').attr('disabled', false); + showInputOptionsForKey(conf_editor_screen, "framegrabber", "enable", false); + $('#screengrabberHelpPanelId').hide(); + } + + }); + + conf_editor_screen.watch('root.framegrabber.available_devices', () => { + + var deviceSelected = conf_editor_screen.getEditor("root.framegrabber.available_devices").getValue(); + if (deviceSelected === "SELECT" || deviceSelected === "NONE" || deviceSelected === "") { + $('#btn_submit_screengrabber').attr('disabled', true); + showInputOptionsForKey(conf_editor_screen, "framegrabber", ["enable", "available_devices"], false); + } + else { + showInputOptionsForKey(conf_editor_screen, "framegrabber", ["enable", "available_devices"], true); + var addSchemaElements = {}; + var enumVals = []; + var enumTitelVals = []; + var enumDefaultVal = ""; + + var deviceProperties = getPropertiesOfDevice("screen", deviceSelected); + + //Update hidden input element + conf_editor_screen.getEditor("root.framegrabber.device").setValue(deviceProperties.device); + + var video_inputs = deviceProperties.video_inputs; + if (video_inputs.length <= 1) { + addSchemaElements.access = "expert"; + } + + for (const video_input of video_inputs) { + enumVals.push(video_input.inputIdx.toString()); + enumTitelVals.push(video_input.name); + } + + if (enumVals.length > 0) { + if (deviceSelected === configuredDevice) { + var configuredVideoInput = window.serverConfig.framegrabber.input.toString(); + if ($.inArray(configuredVideoInput, enumVals) != -1) { + enumDefaultVal = configuredVideoInput; + } + } + updateJsonEditorSelection(conf_editor_screen, 'root.framegrabber', + 'device_inputs', addSchemaElements, enumVals, enumTitelVals, enumDefaultVal, false); + } + + if (conf_editor_screen.validate().length && !window.readOnlyMode) { + $('#btn_submit_screengrabber').attr('disabled', false); + } + } + }); + + conf_editor_screen.watch('root.framegrabber.device_inputs', () => { + var deviceSelected = conf_editor_screen.getEditor("root.framegrabber.available_devices").getValue(); + var videoInputSelected = conf_editor_screen.getEditor("root.framegrabber.device_inputs").getValue(); + + //Update hidden input element + conf_editor_screen.getEditor("root.framegrabber.input").setValue(parseInt(videoInputSelected)); + + var addSchemaElements = {}; + var enumVals = []; + var enumTitelVals = []; + var enumDefaultVal = ""; + + var deviceProperties = getPropertiesOfDevice("screen", deviceSelected); + + var formats = deviceProperties.video_inputs[videoInputSelected].formats; + var formatIdx = 0; + + var resolutions = formats[formatIdx].resolutions; + if (resolutions.length <= 1) { + addSchemaElements.access = "advanced"; + } else { + resolutions.sort(compareTwoValues('width', 'height', 'asc')); + } + + for (var i = 0; i < resolutions.length; i++) { + enumVals.push(i.toString()); + var resolutionText = resolutions[i].width + "x" + resolutions[i].height; + enumTitelVals.push(resolutionText); + } + + if (enumVals.length > 0) { + if (deviceSelected === configuredDevice) { + var configuredResolutionText = window.serverConfig.framegrabber.width + "x" + window.serverConfig.framegrabber.height; + var idx = $.inArray(configuredResolutionText, enumTitelVals) + if (idx != -1) { + enumDefaultVal = idx.toString(); + } + } + + updateJsonEditorSelection(conf_editor_screen, 'root.framegrabber', + 'resolutions', addSchemaElements, enumVals, enumTitelVals, enumDefaultVal, false); + } + + if (conf_editor_screen.validate().length && !window.readOnlyMode) { + $('#btn_submit_screengrabber').attr('disabled', false); + } + }); + + conf_editor_screen.watch('root.framegrabber.resolutions', () => { + var deviceSelected = conf_editor_screen.getEditor("root.framegrabber.available_devices").getValue(); + var videoInputSelected = conf_editor_screen.getEditor("root.framegrabber.device_inputs").getValue(); + var resolutionSelected = conf_editor_screen.getEditor("root.framegrabber.resolutions").getValue(); + + var addSchemaElements = {}; + var enumVals = []; + var enumDefaultVal = ""; + + var deviceProperties = getPropertiesOfDevice("screen", deviceSelected); + + var formats = deviceProperties.video_inputs[videoInputSelected].formats; + var formatIdx = 0; + + //Update hidden resolution related elements + var width = parseInt(formats[formatIdx].resolutions[resolutionSelected].width); + conf_editor_screen.getEditor("root.framegrabber.width").setValue(width); + + var height = parseInt(formats[formatIdx].resolutions[resolutionSelected].height); + conf_editor_screen.getEditor("root.framegrabber.height").setValue(height); + + //Update crop rage depending on selected resolution + updateCropForWidth(conf_editor_screen, "root.framegrabber"); + updateCropForHeight(conf_editor_screen, "root.framegrabber"); + + var fps = formats[formatIdx].resolutions[resolutionSelected].fps; + if (!fps) { + enumVals.push("NONE"); + addSchemaElements.options = { "hidden": true }; + } else { + fps.sort((a, b) => a - b); + for (var i = 0; i < fps.length; i++) { + enumVals.push(fps[i].toString()); } } - if (window.serverInfo.grabbers.active) - { - var activegrabber = window.serverInfo.grabbers.active.toLowerCase(); - $("#" + selector + " option[value='" + activegrabber + "']").attr('selected', 'selected'); - } + if (enumVals.length <= 1) { + addSchemaElements.access = "expert"; + } - var selectedType = $("#root_framegrabber_type").val(); - filerFgGrabberOptions(selectedType); + if (enumVals.length > 0) { + if (deviceSelected === configuredDevice) { + var configuredFps = window.serverConfig.framegrabber.fps.toString(); + if ($.inArray(configuredFps, enumVals) != -1) { + enumDefaultVal = configuredFps; + } + } + updateJsonEditorSelection(conf_editor_screen, 'root.framegrabber', + 'framerates', addSchemaElements, enumVals, [], enumDefaultVal, false); + } - conf_editor_fg.validate().length || window.readOnlyMode ? $('#btn_submit_fg').attr('disabled', true) : $('#btn_submit_fg').attr('disabled', false); + if (conf_editor_screen.validate().length && !window.readOnlyMode) { + $('#btn_submit_screengrabber').attr('disabled', false); + } }); - $('#btn_submit_fg').off().on('click', function () { - requestWriteConfig(conf_editor_fg.getValue()); + conf_editor_screen.watch('root.framegrabber.framerates', () => { + //Update hidden fps element + var fps = 0; + var framerates = conf_editor_screen.getEditor("root.framegrabber.framerates").getValue(); + if (framerates !== "NONE") { + fps = parseInt(framerates); + } + conf_editor_screen.getEditor("root.framegrabber.fps").setValue(fps); }); - if (V4L2_AVAIL) { - conf_editor_v4l2 = createJsonEditor('editor_container_v4l2', { - grabberV4L2: window.schema.grabberV4L2 - }, true, true); - conf_editor_v4l2.on('change', function () { - conf_editor_v4l2.validate().length || window.readOnlyMode ? $('#btn_submit_v4l2').attr('disabled', true) : $('#btn_submit_v4l2').attr('disabled', false); - }); + $('#btn_submit_screengrabber').off().on('click', function () { + var saveOptions = conf_editor_screen.getValue(); - conf_editor_v4l2.on('ready', function () { - setWatchers(v4l2_dynamic_enum_schema); + var instCaptOptions = window.serverConfig.instCapture; + instCaptOptions.systemEnable = true; + saveOptions.instCapture = instCaptOptions; - if (window.serverConfig.grabberV4L2.available_devices == 'custom' && window.serverConfig.grabberV4L2.device != 'auto') - toggleOption('device', true); + requestWriteConfig(saveOptions); + }); - if (window.serverConfig.grabberV4L2.device == 'auto') - conf_editor_v4l2.getEditor('root.grabberV4L2.available_devices').setValue('auto'); + // External Input Sources (Video-Grabbers) - if (window.serverConfig.grabberV4L2.available_devices == 'auto') { - ['device_inputs', 'standard', 'resolutions', 'framerates'].forEach(function (item) { - conf_editor_v4l2.getEditor('root.grabberV4L2.' + item).setValue('auto'); - conf_editor_v4l2.getEditor('root.grabberV4L2.' + item).disable(); - }); + var configuredDevice = ""; + var discoveredInputSources = {}; + var deviceProperties = {}; + + function updateDeviceProperties(deviceProperties, property, key) { + var properties = {}; + if (deviceProperties) { + if (deviceProperties.hasOwnProperty(property)) { + properties = deviceProperties[property]; + } + } + updateJsonEditorRange(conf_editor_video, "root.grabberV4L2", key, + properties.minValue, + properties.maxValue, + properties.current, + properties.step, + true); + + if (jQuery.isEmptyObject(properties)) { + showInputOptionForItem(conf_editor_video, "grabberV4L2", key, false); + } else { + showInputOptionForItem(conf_editor_video, "grabberV4L2", key, true); + } + } + + conf_editor_video = createJsonEditor('editor_container_videograbber', { + grabberV4L2: window.schema.grabberV4L2 + }, true, true); + + conf_editor_video.on('ready', function () { + // Trigger conf_editor_video.watch - 'root.grabberV4L2.enable' + var videoEnable = window.serverConfig.grabberV4L2.enable; + conf_editor_video.getEditor("root.grabberV4L2.enable").setValue(videoEnable); + }); + + conf_editor_video.on('change', function () { + + // Hide elements not supported by the backend + if (window.serverInfo.cec.enabled === false) { + showInputOptionForItem(conf_editor_video, "grabberV4L2", "cecDetection", false); + } + + // Validate the current editor's content + if (!conf_editor_video.validate().length) { + var deviceSelected = conf_editor_video.getEditor("root.grabberV4L2.available_devices").getValue(); + switch (deviceSelected) { + case "SELECT": + showInputOptionsForKey(conf_editor_video, "grabberV4L2", ["enable", "available_devices"], false); + break; + case "NONE": + showInputOptionsForKey(conf_editor_video, "grabberV4L2", ["enable", "available_devices"], false); + break; + default: + window.readOnlyMode ? $('#btn_submit_videograbber').attr('disabled', true) : $('#btn_submit_videograbber').attr('disabled', false); + break; + } + } + else { + $('#btn_submit_videograbber').attr('disabled', true); + } + }); + + conf_editor_video.watch('root.grabberV4L2.enable', () => { + + var videoEnable = conf_editor_video.getEditor("root.grabberV4L2.enable").getValue(); + if (videoEnable) { + showInputOptionsForKey(conf_editor_video, "grabberV4L2", "enable", true); + $('#btn_videograbber_set_defaults').show(); + if (window.showOptHelp) { + $('#videograbberHelpPanelId').show(); + } + discoverInputSources("video"); + } + else { + $('#btn_submit_videograbber').attr('disabled', false); + $('#btn_videograbber_set_defaults').hide(); + showInputOptionsForKey(conf_editor_video, "grabberV4L2", "enable", false); + $('#videograbberHelpPanelId').hide(); + } + }); + + conf_editor_video.watch('root.grabberV4L2.available_devices', () => { + var deviceSelected = conf_editor_video.getEditor("root.grabberV4L2.available_devices").getValue(); + if (deviceSelected === "SELECT" || deviceSelected === "NONE" || deviceSelected === "") { + $('#btn_submit_videograbber').attr('disabled', true); + showInputOptionsForKey(conf_editor_video, "grabberV4L2", ["enable", "available_devices"], false); + } + else { + showInputOptionsForKey(conf_editor_video, "grabberV4L2", ["enable", "available_devices"], true); + var addSchemaElements = {}; + var enumVals = []; + var enumTitelVals = []; + var enumDefaultVal = ""; + + var deviceProperties = getPropertiesOfDevice("video", deviceSelected); + + //Update hidden input element + conf_editor_video.getEditor("root.grabberV4L2.device").setValue(deviceProperties.device); + + if (deviceProperties.hasOwnProperty('default') && !jQuery.isEmptyObject(deviceProperties.default.properties)) { + $('#btn_videograbber_set_defaults').attr('disabled', false); + } else { + $('#btn_videograbber_set_defaults').attr('disabled', true); } - if (window.serverConfig.grabberV4L2.device_inputs == 'custom' && window.serverConfig.grabberV4L2.device != 'auto') - toggleOption('input', true); + //If configured device is selected, use the saved values as current values + if (deviceSelected === configuredDevice) { + // Only if the device reported properties, use the configured values. In case no properties are presented, the device properties cannot be controlled. + if (deviceProperties.hasOwnProperty('properties') && !jQuery.isEmptyObject(deviceProperties.properties)) { + let properties = { + brightness: { current: window.serverConfig.grabberV4L2.hardware_brightness }, + contrast: { current: window.serverConfig.grabberV4L2.hardware_contrast }, + saturation: { current: window.serverConfig.grabberV4L2.hardware_saturation }, + hue: { current: window.serverConfig.grabberV4L2.hardware_hue } + } + deviceProperties.properties = properties; + } + } - if (window.serverConfig.grabberV4L2.resolutions == 'custom' && window.serverConfig.grabberV4L2.device != 'auto') - (toggleOption('width', true), toggleOption('height', true)); + updateDeviceProperties(deviceProperties.properties, "brightness", "hardware_brightness"); + updateDeviceProperties(deviceProperties.properties, "contrast", "hardware_contrast"); + updateDeviceProperties(deviceProperties.properties, "saturation", "hardware_saturation"); + updateDeviceProperties(deviceProperties.properties, "hue", "hardware_hue"); - if (window.serverConfig.grabberV4L2.framerates == 'custom' && window.serverConfig.grabberV4L2.device != 'auto') - toggleOption('fps', true); - }); + var video_inputs = deviceProperties.video_inputs; + if (video_inputs.length <= 1) { + addSchemaElements.access = "expert"; + } - $('#btn_submit_v4l2').off().on('click', function () { - var v4l2Options = conf_editor_v4l2.getValue(); + for (const video_input of video_inputs) { + enumVals.push(video_input.inputIdx.toString()); + enumTitelVals.push(video_input.name); + } - if (v4l2Options.grabberV4L2.available_devices != 'custom' && v4l2Options.grabberV4L2.available_devices != 'auto') - v4l2Options.grabberV4L2.device = v4l2Options.grabberV4L2.available_devices; + if (enumVals.length > 0) { + if (deviceSelected === configuredDevice) { + var configuredVideoInput = window.serverConfig.grabberV4L2.input.toString(); + if ($.inArray(configuredVideoInput, enumVals) != -1) { + enumDefaultVal = configuredVideoInput; + } + } - if (v4l2Options.grabberV4L2.available_devices == 'auto') - v4l2Options.grabberV4L2.device = 'auto'; + updateJsonEditorSelection(conf_editor_video, 'root.grabberV4L2', + 'device_inputs', addSchemaElements, enumVals, enumTitelVals, enumDefaultVal, false, false); + } - if (v4l2Options.grabberV4L2.device_inputs != 'custom' && v4l2Options.grabberV4L2.device_inputs != 'auto' && v4l2Options.grabberV4L2.available_devices != 'auto') - v4l2Options.grabberV4L2.input = parseInt(v4l2Options.grabberV4L2.device_inputs); + if (conf_editor_video.validate().length && !window.readOnlyMode) { + $('#btn_submit_videograbber').attr('disabled', false); + } + } + }); - if (v4l2Options.grabberV4L2.device_inputs == 'auto') - v4l2Options.grabberV4L2.input = -1; + conf_editor_video.watch('root.grabberV4L2.device_inputs', () => { + var deviceSelected = conf_editor_video.getEditor("root.grabberV4L2.available_devices").getValue(); + var videoInputSelected = conf_editor_video.getEditor("root.grabberV4L2.device_inputs").getValue(); - if (v4l2Options.grabberV4L2.resolutions != 'custom' && v4l2Options.grabberV4L2.resolutions != 'auto' && v4l2Options.grabberV4L2.available_devices != 'auto') - (v4l2Options.grabberV4L2.width = parseInt(v4l2Options.grabberV4L2.resolutions.split('x')[0]), - v4l2Options.grabberV4L2.height = parseInt(v4l2Options.grabberV4L2.resolutions.split('x')[1])); + var addSchemaElements = {}; + var enumVals = []; + var enumTitelVals = []; + var enumDefaultVal = ""; - if (v4l2Options.grabberV4L2.resolutions == 'auto') - (v4l2Options.grabberV4L2.width = 0, v4l2Options.grabberV4L2.height = 0); + var deviceProperties = getPropertiesOfDevice("video", deviceSelected); + var formats = deviceProperties.video_inputs[videoInputSelected].formats; - if (v4l2Options.grabberV4L2.framerates != 'custom' && v4l2Options.grabberV4L2.framerates != 'auto' && v4l2Options.grabberV4L2.available_devices != 'auto') - v4l2Options.grabberV4L2.fps = parseInt(v4l2Options.grabberV4L2.framerates); + addSchemaElements.access = "advanced"; - if (v4l2Options.grabberV4L2.framerates == 'auto') - v4l2Options.grabberV4L2.fps = 15; + for (var i = 0; i < formats.length; i++) { + if (formats[i].format) { + enumVals.push(formats[i].format); + enumTitelVals.push(formats[i].format.toUpperCase()); + } + else { + enumVals.push("NONE"); + } + } - requestWriteConfig(v4l2Options); - }); - } + if (enumVals.length > 0) { + if (deviceSelected === configuredDevice) { + var configuredEncoding = window.serverConfig.grabberV4L2.encoding; + if ($.inArray(configuredEncoding, enumVals) != -1) { + enumDefaultVal = configuredEncoding; + } + } + updateJsonEditorSelection(conf_editor_video, 'root.grabberV4L2', + 'encoding', addSchemaElements, enumVals, enumTitelVals, enumDefaultVal, false); + } + + var enumVals = []; + var enumDefaultVal = ""; + + var standards = deviceProperties.video_inputs[videoInputSelected].standards; + if (!standards) { + enumVals.push("NONE"); + addSchemaElements.options = { "hidden": true }; + } else { + enumVals = standards; + } + + if (enumVals.length > 0) { + if (deviceSelected === configuredDevice) { + var configuredStandard = window.serverConfig.grabberV4L2.standard; + if ($.inArray(configuredStandard, enumVals) != -1) { + enumDefaultVal = configuredStandard; + } + } + + updateJsonEditorSelection(conf_editor_video, 'root.grabberV4L2', + 'standard', addSchemaElements, enumVals, [], enumDefaultVal, false); + } + + if (conf_editor_video.validate().length && !window.readOnlyMode) { + $('#btn_submit_videograbber').attr('disabled', false); + } + }); + + conf_editor_video.watch('root.grabberV4L2.encoding', () => { + var deviceSelected = conf_editor_video.getEditor("root.grabberV4L2.available_devices").getValue(); + var videoInputSelected = conf_editor_video.getEditor("root.grabberV4L2.device_inputs").getValue(); + var formatSelected = conf_editor_video.getEditor("root.grabberV4L2.encoding").getValue(); + + //Update hidden input element + conf_editor_video.getEditor("root.grabberV4L2.input").setValue(parseInt(videoInputSelected)); + + var addSchemaElements = {}; + var enumVals = []; + var enumTitelVals = []; + var enumDefaultVal = ""; + + var deviceProperties = getPropertiesOfDevice("video", deviceSelected); + + var formats = deviceProperties.video_inputs[videoInputSelected].formats; + var formatIdx = 0; + if (formatSelected !== "NONE") { + formatIdx = formats.findIndex(x => x.format === formatSelected); + } + + var resolutions = formats[formatIdx].resolutions; + if (resolutions.length <= 1) { + addSchemaElements.access = "advanced"; + } else { + resolutions.sort(compareTwoValues('width', 'height', 'asc')); + } + + for (var i = 0; i < resolutions.length; i++) { + enumVals.push(i.toString()); + var resolutionText = resolutions[i].width + "x" + resolutions[i].height; + enumTitelVals.push(resolutionText); + } + + if (enumVals.length > 0) { + if (deviceSelected === configuredDevice) { + var configuredResolutionText = window.serverConfig.grabberV4L2.width + "x" + window.serverConfig.grabberV4L2.height; + var idx = $.inArray(configuredResolutionText, enumTitelVals) + if (idx != -1) { + enumDefaultVal = idx.toString(); + } + } + + updateJsonEditorSelection(conf_editor_video, 'root.grabberV4L2', + 'resolutions', addSchemaElements, enumVals, enumTitelVals, enumDefaultVal, false); + } + + if (conf_editor_video.validate().length && !window.readOnlyMode) { + $('#btn_submit_videograbber').attr('disabled', false); + } + }); + + conf_editor_video.watch('root.grabberV4L2.resolutions', () => { + var deviceSelected = conf_editor_video.getEditor("root.grabberV4L2.available_devices").getValue(); + var videoInputSelected = conf_editor_video.getEditor("root.grabberV4L2.device_inputs").getValue(); + var formatSelected = conf_editor_video.getEditor("root.grabberV4L2.encoding").getValue(); + var resolutionSelected = conf_editor_video.getEditor("root.grabberV4L2.resolutions").getValue(); + + var addSchemaElements = {}; + var enumVals = []; + var enumDefaultVal = ""; + + var deviceProperties = getPropertiesOfDevice("video", deviceSelected); + + var formats = deviceProperties.video_inputs[videoInputSelected].formats; + var formatIdx = 0; + if (formatSelected !== "NONE") { + formatIdx = formats.findIndex(x => x.format === formatSelected); + } + + //Update hidden resolution related elements + var width = parseInt(formats[formatIdx].resolutions[resolutionSelected].width); + conf_editor_video.getEditor("root.grabberV4L2.width").setValue(width); + + var height = parseInt(formats[formatIdx].resolutions[resolutionSelected].height); + conf_editor_video.getEditor("root.grabberV4L2.height").setValue(height); + + //Update crop rage depending on selected resolution + updateCropForWidth(conf_editor_video, "root.grabberV4L2"); + updateCropForHeight(conf_editor_video, "root.grabberV4L2"); + + var fps = formats[formatIdx].resolutions[resolutionSelected].fps; + if (!fps) { + addSchemaElements.options = { "hidden": true }; + } else { + fps.sort((a, b) => a - b); + for (var i = 0; i < fps.length; i++) { + enumVals.push(fps[i].toString()); + } + } + + if (enumVals.length <= 1) { + addSchemaElements.access = "expert"; + } + + if (enumVals.length > 0) { + if (deviceSelected === configuredDevice) { + var configuredFps = window.serverConfig.grabberV4L2.fps.toString(); + if ($.inArray(configuredFps, enumVals) != -1) { + enumDefaultVal = configuredFps; + } + } + updateJsonEditorSelection(conf_editor_video, 'root.grabberV4L2', + 'framerates', addSchemaElements, enumVals, [], enumDefaultVal, false); + } + + if (conf_editor_video.validate().length && !window.readOnlyMode) { + $('#btn_submit_videograbber').attr('disabled', false); + } + }); + + conf_editor_video.watch('root.grabberV4L2.framerates', () => { + //Update hidden fps element + var fps = 0; + var framerates = conf_editor_video.getEditor("root.grabberV4L2.framerates").getValue(); + if (framerates !== "NONE") { + fps = parseInt(framerates); + } + //Show Frameskipping only when more than 2 fps + if (fps > 2) { + showInputOptionForItem(conf_editor_video, "grabberV4L2", "fpsSoftwareDecimation", true); + } + else { + showInputOptionForItem(conf_editor_video, "grabberV4L2", "fpsSoftwareDecimation", false); + } + conf_editor_video.getEditor("root.grabberV4L2.fps").setValue(fps); + }); + + $('#btn_submit_videograbber').off().on('click', function () { + var saveOptions = conf_editor_video.getValue(); + + var instCaptOptions = window.serverConfig.instCapture; + instCaptOptions.v4lEnable = true; + saveOptions.instCapture = instCaptOptions; + + requestWriteConfig(saveOptions); + }); + + // ------------------------------------------------------------------ + + $('#btn_videograbber_set_defaults').off().on('click', function () { + var deviceSelected = conf_editor_video.getEditor("root.grabberV4L2.available_devices").getValue(); + var deviceProperties = getPropertiesOfDevice("video", deviceSelected); + + var defaultDeviceProperties = {}; + if (deviceProperties.hasOwnProperty('default')) { + if (deviceProperties.default.hasOwnProperty('properties')) { + defaultDeviceProperties = deviceProperties.default.properties; + if (defaultDeviceProperties.brightness) { + conf_editor_video.getEditor("root.grabberV4L2.hardware_brightness").setValue(defaultDeviceProperties.brightness); + } + if (defaultDeviceProperties.contrast) { + conf_editor_video.getEditor("root.grabberV4L2.hardware_contrast").setValue(defaultDeviceProperties.contrast); + } + if (defaultDeviceProperties.saturation) { + conf_editor_video.getEditor("root.grabberV4L2.hardware_saturation").setValue(defaultDeviceProperties.saturation); + } + if (defaultDeviceProperties.hue) { + conf_editor_video.getEditor("root.grabberV4L2.hardware_hue").setValue(defaultDeviceProperties.hue); + } + } + } + }); + + // ------------------------------------------------------------------ ////////////////////////////////////////////////// //create introduction if (window.showOptHelp) { - createHint("intro", $.i18n('conf_grabber_fg_intro'), "editor_container_fg"); - if (V4L2_AVAIL) { - createHint("intro", $.i18n('conf_grabber_v4l_intro'), "editor_container_v4l2"); - } + createHint("intro", $.i18n('conf_grabber_fg_intro'), "editor_container_screengrabber"); + createHint("intro", $.i18n('conf_grabber_v4l_intro'), "editor_container_videograbber"); } - function toggleFgOptions(el, state) { - for (var i = 0; i < el.length; i++) { - $('[data-schemapath*="root.framegrabber.' + el[i] + '"]').toggle(state); - } - } - - function filerFgGrabberOptions(type) { - //hide specific options for grabbers found - - var grabbers = window.serverInfo.grabbers.available; - if (grabbers.indexOf(type) > -1) { - toggleFgOptions(["width", "height", "pixelDecimation", "display"], true); - - switch (type) { - case "dispmanx": - toggleFgOptions(["pixelDecimation", "display"], false); - break; - case "x11": - case "xcb": - toggleFgOptions(["width", "height", "display"], false); - break; - case "framebuffer": - toggleFgOptions(["display"], false); - break; - case "amlogic": - toggleFgOptions(["pixelDecimation", "display"], false); - break; - case "qt": - break; - case "dx": - break; - case "osx": - break; - default: - } - } - }; - - $('#root_framegrabber_type').change(function () { - var selectedType = $("#root_framegrabber_type").val(); - filerFgGrabberOptions(selectedType); - }); - removeOverlay(); + + // build dynamic screen input enum + var updateScreenSourcesList = function (type, discoveryInfo) { + var enumVals = []; + var enumTitelVals = []; + var enumDefaultVal = ""; + var addSelect = false; + + if (jQuery.isEmptyObject(discoveryInfo)) { + enumVals.push("NONE"); + enumTitelVals.push($.i18n('edt_conf_grabber_discovered_none')); + } + else { + for (const device of discoveryInfo) { + enumVals.push(device.device_name); + } + conf_editor_screen.getEditor('root.framegrabber').enable(); + configuredDevice = window.serverConfig.framegrabber.available_devices; + + if ($.inArray(configuredDevice, enumVals) != -1) { + enumDefaultVal = configuredDevice; + } + else { + addSelect = true; + } + } + if (enumVals.length > 0) { + updateJsonEditorSelection(conf_editor_screen, 'root.framegrabber', + 'available_devices', {}, enumVals, enumTitelVals, enumDefaultVal, addSelect, false); + } + } + + // build dynamic video input enum + var updateVideoSourcesList = function (type, discoveryInfo) { + var enumVals = []; + var enumTitelVals = []; + var enumDefaultVal = ""; + var addSelect = false; + + if (jQuery.isEmptyObject(discoveryInfo)) { + enumVals.push("NONE"); + enumTitelVals.push($.i18n('edt_conf_grabber_discovered_none')); + } + else { + for (const device of discoveryInfo) { + enumVals.push(device.device_name); + } + conf_editor_video.getEditor('root.grabberV4L2').enable(); + configuredDevice = window.serverConfig.grabberV4L2.available_devices; + + if ($.inArray(configuredDevice, enumVals) != -1) { + enumDefaultVal = configuredDevice; + } + else { + addSelect = true; + } + } + + if (enumVals.length > 0) { + updateJsonEditorSelection(conf_editor_video, 'root.grabberV4L2', + 'available_devices', {}, enumVals, enumTitelVals, enumDefaultVal, addSelect, false); + } + } + + async function discoverInputSources(type, params) { + const result = await requestInputSourcesDiscovery(type, params); + + var discoveryResult; + if (result && !result.error) { + discoveryResult = result.info; + } + else { + discoveryResult = { + "video_sources": [] + } + } + + switch (type) { + case "screen": + discoveredInputSources.screen = discoveryResult.video_sources; + updateScreenSourcesList(type, discoveredInputSources.screen); + break; + case "video": + discoveredInputSources.video = discoveryResult.video_sources; + updateVideoSourcesList(type, discoveredInputSources.video); + break; + } + } + + function getPropertiesOfDevice(type, deviceName) { + deviceProperties = {}; + for (const deviceRecord of discoveredInputSources[type]) { + if (deviceRecord.device_name === deviceName) { + deviceProperties = deviceRecord; + break; + } + } + return deviceProperties; + } + }); diff --git a/assets/webconfig/js/content_index.js b/assets/webconfig/js/content_index.js index 124ca20c..26fd19d9 100755 --- a/assets/webconfig/js/content_index.js +++ b/assets/webconfig/js/content_index.js @@ -307,12 +307,17 @@ $(document).ready(function () { window.scrollTo(0, 0); }); - $(window).scroll(function(){ + $(window).scroll(function() { if ($(window).scrollTop() > 65) $("#navbar_brand_logo").css("display", "none"); else $("#navbar_brand_logo").css("display", ""); - }); + }); + + $('#side-menu li a, #side-menu li ul li a').click(function() { + $('#side-menu').find('.active').toggleClass('inactive'); // find all active classes and set inactive; + $(this).addClass('active'); + }); }); function suppressDefaultPwWarning() { @@ -349,7 +354,7 @@ $("#btn_darkmode").off().on("click", function (e) { }); // Menuitem toggle; -function SwitchToMenuItem(target) { +function SwitchToMenuItem(target, item) { document.getElementById(target).click(); // Get '; + grabber_config_info_html += '' + $.i18n('conf_grabber_inst_grabber_config_info') + ''; + grabber_config_info_html += ''; + grabber_config_info_html += ''; + $('#editor_container_instCapt').append(grabber_config_info_html); + + conf_editor_instCapt.on('ready', function () { + + if (!window.serverConfig.framegrabber.enable) { + conf_editor_instCapt.getEditor("root.instCapture.systemEnable").setValue(false); + conf_editor_instCapt.getEditor("root.instCapture.systemEnable").disable(); + } + else { + conf_editor_instCapt.getEditor("root.instCapture.systemEnable").setValue(window.serverConfig.instCapture.systemEnable); + } + + if (!window.serverConfig.grabberV4L2.enable) { + conf_editor_instCapt.getEditor("root.instCapture.v4lEnable").setValue(false); + conf_editor_instCapt.getEditor("root.instCapture.v4lEnable").disable(); + } + else { + conf_editor_instCapt.getEditor("root.instCapture.v4lEnable").setValue(window.serverConfig.instCapture.v4lEnable); + } + + }); + + conf_editor_instCapt.on('change', function () { + + if (!conf_editor_instCapt.validate().length) { + if (!window.serverConfig.framegrabber.enable && !window.serverConfig.grabberV4L2.enable) { + $('#btn_submit_instCapt').attr('disabled', true); + } else { + window.readOnlyMode ? $('#btn_submit_instCapt').attr('disabled', true) : $('#btn_submit_instCapt').attr('disabled', false); + } + } + else { + $('#btn_submit_instCapt').attr('disabled', true); + } + }); + + conf_editor_instCapt.watch('root.instCapture.systemEnable', () => { + + var screenEnable = conf_editor_instCapt.getEditor("root.instCapture.systemEnable").getValue(); + if (screenEnable) { + conf_editor_instCapt.getEditor("root.instCapture.systemGrabberDevice").setValue(window.serverConfig.framegrabber.available_devices); + conf_editor_instCapt.getEditor("root.instCapture.systemGrabberDevice").disable(); + showInputOptions("instCapture", ["systemGrabberDevice"], true); + showInputOptions("instCapture", ["systemPriority"], true); + + } else { + showInputOptions("instCapture", ["systemGrabberDevice"], false); + showInputOptions("instCapture", ["systemPriority"], false); + } + + }); + + conf_editor_instCapt.watch('root.instCapture.v4lEnable', () => { + var videoEnable = conf_editor_instCapt.getEditor("root.instCapture.v4lEnable").getValue(); + if (videoEnable) { + conf_editor_instCapt.getEditor("root.instCapture.v4lGrabberDevice").setValue(window.serverConfig.grabberV4L2.available_devices); + conf_editor_instCapt.getEditor("root.instCapture.v4lGrabberDevice").disable(); + showInputOptions("instCapture", ["v4lGrabberDevice"], true); + showInputOptions("instCapture", ["v4lPriority"], true); + } + else { + if (!window.serverConfig.grabberV4L2.enable) { + conf_editor_instCapt.getEditor("root.instCapture.v4lEnable").disable(); + } + showInputOptions("instCapture", ["v4lGrabberDevice"], false); + showInputOptions("instCapture", ["v4lPriority"], false); + } + }); + + $('#btn_submit_instCapt').off().on('click', function () { + requestWriteConfig(conf_editor_instCapt.getValue()); + }); + + removeOverlay(); +}); diff --git a/assets/webconfig/js/content_leds.js b/assets/webconfig/js/content_leds.js index 982c6a00..92a6a494 100755 --- a/assets/webconfig/js/content_leds.js +++ b/assets/webconfig/js/content_leds.js @@ -701,6 +701,8 @@ $(document).ready(function () { var hwLedCountDefault = 1; var colorOrderDefault = "rgb"; + $('#btn_test_controller').hide(); + switch (ledType) { case "cololight": case "wled": @@ -769,11 +771,38 @@ $(document).ready(function () { }); conf_editor.on('change', function () { - //Check, if device can be identified/tested and/or saved + // //Check, if device can be identified/tested and/or saved var canIdentify = false; var canSave = false; switch (ledType) { + + case "atmoorb": + case "fadecandy": + case "tinkerforge": + case "tpm2net": + case "udpe131": + case "udpartnet": + case "udph801": + case "udpraw": + var host = conf_editor.getEditor("root.specificOptions.host").getValue(); + if (host !== "") { + canSave = true; + } + break; + + case "philipshue": + var host = conf_editor.getEditor("root.specificOptions.host").getValue(); + var username = conf_editor.getEditor("root.specificOptions.username").getValue(); + if (host !== "" && username != "") { + var useEntertainmentAPI = conf_editor.getEditor("root.specificOptions.useEntertainmentAPI").getValue(); + var clientkey = conf_editor.getEditor("root.specificOptions.clientkey").getValue(); + if (!useEntertainmentAPI || clientkey !== "") { + canSave = true; + } + } + break; + case "cololight": case "wled": var hostList = conf_editor.getEditor("root.specificOptions.hostList").getValue(); @@ -797,48 +826,20 @@ $(document).ready(function () { } } break; - - case "adalight": - var output = conf_editor.getEditor("root.specificOptions.output").getValue(); - if (output !== "NONE" && output !== "SELECT" && output !== "") { - canIdentify = true; - } - case "atmo": - case "dmx": - case "karate": - case "sedu": - case "tpm2": - case "apa102": - case "apa104": - case "ws2801": - case "lpd6803": - case "lpd8806": - case "p9813": - case "sk6812spi": - case "sk6822spi": - case "sk9822": - case "ws2812spi": - case "piblaster": - var output = conf_editor.getEditor("root.specificOptions.output").getValue(); - if (output !== "NONE" && output !== "SELECT" && output !== "") { - canSave = true; - } - break; default: canIdentify = false; canSave = true; } - if (canIdentify) { - $("#btn_test_controller").removeClass('hidden'); - $('#btn_test_controller').attr('disabled', false); - } - else { - $('#btn_test_controller').attr('disabled', true); - } - - var hardwareLedCount = conf_editor.getEditor("root.generalOptions.hardwareLedCount").getValue(); - if (hardwareLedCount < 1) { + if (!conf_editor.validate().length) { + if (canIdentify) { + $("#btn_test_controller").show(); + $('#btn_test_controller').attr('disabled', false); + } else { + $('#btn_test_controller').hide(); + $('#btn_test_controller').attr('disabled', true); + } + } else { canSave = false; } @@ -886,9 +887,9 @@ $(document).ready(function () { conf_editor.getEditor(specOptPath + "host").setValue(val); break; } - } - showAllDeviceInputOptions("hostList", showOptions); + showAllDeviceInputOptions("hostList", showOptions); + } }); conf_editor.watch('root.specificOptions.host', () => { @@ -900,8 +901,10 @@ $(document).ready(function () { else { let params = {}; switch (ledType) { + case "cololight": params = { host: host }; + getProperties_device(ledType, host, params); break; case "nanoleaf": @@ -910,33 +913,70 @@ $(document).ready(function () { return; } params = { host: host, token: token }; + getProperties_device(ledType, host, params); break; case "wled": params = { host: host, filter: "info" }; + getProperties_device(ledType, host, params); break; default: } - - getProperties_device(ledType, host, params); } }); conf_editor.watch('root.specificOptions.output', () => { var output = conf_editor.getEditor("root.specificOptions.output").getValue(); if (output === "NONE" || output === "SELECT" || output === "") { + + $('#btn_submit_controller').attr('disabled', true); + $('#btn_test_controller').attr('disabled', true); + $('#btn_test_controller').hide(); + conf_editor.getEditor("root.generalOptions.hardwareLedCount").setValue(1); showAllDeviceInputOptions("output", false); } else { showAllDeviceInputOptions("output", true); let params = {}; + var canIdentify = false; switch (ledType) { + case "adalight": + canIdentify = true; + break; case "atmo": case "karate": params = { serialPort: output }; getProperties_device(ledType, output, params); break; + case "dmx": + case "sedu": + case "tpm2": + case "apa102": + case "apa104": + case "ws2801": + case "lpd6803": + case "lpd8806": + case "p9813": + case "sk6812spi": + case "sk6822spi": + case "sk9822": + case "ws2812spi": + case "piblaster": + default: + } + + if (!conf_editor.validate().length) { + if (canIdentify) { + $("#btn_test_controller").show(); + $('#btn_test_controller').attr('disabled', false); + } else { + $('#btn_test_controller').hide(); + $('#btn_test_controller').attr('disabled', true); + } + if (!window.readOnlyMode) { + $('#btn_submit_controller').attr('disabled', false); + } } } }); @@ -1229,6 +1269,8 @@ function saveLedConfig(genDefLayout = false) { break; } + + //Rewrite whole LED & Layout configuration, in case changes were done accross tabs and no default layout if (genDefLayout !== true) { result.ledConfig = getLedConfig(); @@ -1269,8 +1311,6 @@ var updateSelectList = function (ledType, discoveryInfo) { ledTypeGroup = "devRPiGPIO"; } - var specOpt = conf_editor.getEditor('root.specificOptions'); // get specificOptions of the editor - switch (ledTypeGroup) { case "devNET": key = "hostList"; @@ -1434,11 +1474,14 @@ var updateSelectList = function (ledType, discoveryInfo) { } if (enumVals.length > 0) { - updateJsonEditorSelection(specOpt, key, addSchemaElements, enumVals, enumTitelVals, enumDefaultVal, addSelect, addCustom); + updateJsonEditorSelection(conf_editor, 'root.specificOptions', key, addSchemaElements, enumVals, enumTitelVals, enumDefaultVal, addSelect, addCustom); } }; async function discover_device(ledType, params) { + + $('#btn_submit_controller').attr('disabled', true); + const result = await requestLedDeviceDiscovery(ledType, params); var discoveryResult; @@ -1479,6 +1522,7 @@ async function getProperties_device(ledType, key, params) { } else { $('#btn_submit_controller').attr('disabled', true); + $('#btn_test_controller').attr('disabled', true); } } } @@ -1544,8 +1588,7 @@ function updateElements(ledType, key) { if (ledProperties && ledProperties.ledCount) { if (ledProperties.ledCount.length > 0) { var configuredLedCount = window.serverConfig.device.hardwareLedCount; - var generalOpt = conf_editor.getEditor('root.generalOptions'); - updateJsonEditorSelection(generalOpt, "hardwareLedCount", {}, ledProperties.ledCount, [], configuredLedCount); + updateJsonEditorSelection(conf_editor, 'root.generalOptions', "hardwareLedCount", {}, ledProperties.ledCount, [], configuredLedCount); } } break; diff --git a/assets/webconfig/js/content_logging.js b/assets/webconfig/js/content_logging.js index 389f5066..6acd7677 100644 --- a/assets/webconfig/js/content_logging.js +++ b/assets/webconfig/js/content_logging.js @@ -25,7 +25,7 @@ $(document).ready(function () { $('#btn_submit').off().on('click', function () { var displayedLogLevel = conf_editor.getEditor("root.logger.level").getValue(); - var newLogLevel = {logger:{}}; + var newLogLevel = { logger: {} }; newLogLevel.logger.level = displayedLogLevel; requestWriteConfig(newLogLevel); @@ -103,7 +103,7 @@ $(document).ready(function () { } }); - $('#log_footer').append(''); + $('#log_footer').append(''); $('#btn_clipboard').off().on('click', function () { const temp = document.createElement('textarea'); @@ -153,7 +153,7 @@ $(document).ready(function () { $(window.hyperion).on("cmd-logging-update", function (event) { - var messages = (event.response.result.messages); + var messages = (event.response.result.messages); if (messages.length != 0) { if (!createdCont) { diff --git a/assets/webconfig/js/content_remote.js b/assets/webconfig/js/content_remote.js index c5d58f25..fcd94955 100644 --- a/assets/webconfig/js/content_remote.js +++ b/assets/webconfig/js/content_remote.js @@ -7,7 +7,7 @@ $(document).ready(function () { var oldEffects = []; var cpcolor = '#B500FF'; var mappingList = window.serverSchema.properties.color.properties.imageToLedMappingType.enum; - var duration = 0; + var duration = ENDLESS; var rgb = { r: 255, g: 0, b: 0 }; var lastImgData = ""; var lastFileName = ""; @@ -201,7 +201,9 @@ $(document).ready(function () { }); for (const comp of components) { - if (comp.name === "ALL") + if (comp.name === "ALL" || (comp.name === "FORWARDER" && window.currentHyperionInstance != 0) || + (comp.name === "GRABBER" && !window.serverConfig.framegrabber.enable) || + (comp.name === "V4L" && !window.serverConfig.grabberV4L2.enable)) continue; const enable_style = (comp.enabled ? "checked" : ""); diff --git a/assets/webconfig/js/hyperion.js b/assets/webconfig/js/hyperion.js index d3a00498..9402e383 100644 --- a/assets/webconfig/js/hyperion.js +++ b/assets/webconfig/js/hyperion.js @@ -33,37 +33,39 @@ window.comps = []; window.defaultPasswordIsSet = null; tokenList = {}; +const ENDLESS = -1; + function initRestart() { - $(window.hyperion).off(); - requestServerConfigReload(); - window.watchdog = 10; - connectionLostDetection('restart'); + $(window.hyperion).off(); + requestServerConfigReload(); + window.watchdog = 10; + connectionLostDetection('restart'); } function connectionLostDetection(type) { - if ( window.watchdog > 2 ) - { - var interval_id = window.setInterval(function(){clearInterval(interval_id);}, 9999); // Get a reference to the last - for (var i = 1; i < interval_id; i++) - window.clearInterval(i); - if(type == 'restart') - { - $("body").html($("#container_restart").html()); - // setTimeout delay for probably slower systems, some browser don't execute THIS action - setTimeout(restartAction,250); - } - else - { - $("body").html($("#container_connection_lost").html()); - connectionLostAction(); - } - } - else - { - $.get( "/cgi/cfg_jsonserver", function() {window.watchdog=0}).fail(function() {window.watchdog++;}); - } + if ( window.watchdog > 2 ) + { + var interval_id = window.setInterval(function(){clearInterval(interval_id);}, 9999); // Get a reference to the last + for (var i = 1; i < interval_id; i++) + window.clearInterval(i); + if(type == 'restart') + { + $("body").html($("#container_restart").html()); + // setTimeout delay for probably slower systems, some browser don't execute THIS action + setTimeout(restartAction,250); + } + else + { + $("body").html($("#container_connection_lost").html()); + connectionLostAction(); + } + } + else + { + $.get( "/cgi/cfg_jsonserver", function() {window.watchdog=0}).fail(function() {window.watchdog++;}); + } } setInterval(connectionLostDetection, 3000); @@ -72,107 +74,107 @@ setInterval(connectionLostDetection, 3000); function initWebSocket() { - if ("WebSocket" in window) - { - if (window.websocket == null) - { - window.jsonPort = ''; - if(document.location.port == '' && document.location.protocol == "http:") - window.jsonPort = '80'; - else if (document.location.port == '' && document.location.protocol == "https:") - window.jsonPort = '443'; - else - window.jsonPort = document.location.port; - window.websocket = (document.location.protocol == "https:") ? new WebSocket('wss://'+document.location.hostname+":"+window.jsonPort) : new WebSocket('ws://'+document.location.hostname+":"+window.jsonPort); + if ("WebSocket" in window) + { + if (window.websocket == null) + { + window.jsonPort = ''; + if(document.location.port == '' && document.location.protocol == "http:") + window.jsonPort = '80'; + else if (document.location.port == '' && document.location.protocol == "https:") + window.jsonPort = '443'; + else + window.jsonPort = document.location.port; + window.websocket = (document.location.protocol == "https:") ? new WebSocket('wss://'+document.location.hostname+":"+window.jsonPort) : new WebSocket('ws://'+document.location.hostname+":"+window.jsonPort); - window.websocket.onopen = function (event) { - $(window.hyperion).trigger({type:"open"}); + window.websocket.onopen = function (event) { + $(window.hyperion).trigger({type:"open"}); - $(window.hyperion).on("cmd-serverinfo", function(event) { - window.watchdog = 0; - }); - }; + $(window.hyperion).on("cmd-serverinfo", function(event) { + window.watchdog = 0; + }); + }; - window.websocket.onclose = function (event) { - // See http://tools.ietf.org/html/rfc6455#section-7.4.1 - var reason; - switch(event.code) - { - case 1000: reason = "Normal closure, meaning that the purpose for which the connection was established has been fulfilled."; break; - case 1001: reason = "An endpoint is \"going away\", such as a server going down or a browser having navigated away from a page."; break; - case 1002: reason = "An endpoint is terminating the connection due to a protocol error"; break; - case 1003: reason = "An endpoint is terminating the connection because it has received a type of data it cannot accept (e.g., an endpoint that understands only text data MAY send this if it receives a binary message)."; break; - case 1004: reason = "Reserved. The specific meaning might be defined in the future."; break; - case 1005: reason = "No status code was actually present."; break; - case 1006: reason = "The connection was closed abnormally, e.g., without sending or receiving a Close control frame"; break; - case 1007: reason = "An endpoint is terminating the connection because it has received data within a message that was not consistent with the type of the message (e.g., non-UTF-8 [http://tools.ietf.org/html/rfc3629] data within a text message)."; break; - case 1008: reason = "An endpoint is terminating the connection because it has received a message that \"violates its policy\". This reason is given either if there is no other sutible reason, or if there is a need to hide specific details about the policy."; break; - case 1009: reason = "An endpoint is terminating the connection because it has received a message that is too big for it to process."; break; - case 1010: reason = "An endpoint (client) is terminating the connection because it has expected the server to negotiate one or more extension, but the server didn't return them in the response message of the WebSocket handshake.
Specifically, the extensions that are needed are: " + event.reason; break; - case 1011: reason = "A server is terminating the connection because it encountered an unexpected condition that prevented it from fulfilling the request."; break; - case 1015: reason = "The connection was closed due to a failure to perform a TLS handshake (e.g., the server certificate can't be verified)."; break; - default: reason = "Unknown reason"; - } - $(window.hyperion).trigger({type:"close", reason:reason}); - window.watchdog = 10; - connectionLostDetection(); - }; + window.websocket.onclose = function (event) { + // See http://tools.ietf.org/html/rfc6455#section-7.4.1 + var reason; + switch(event.code) + { + case 1000: reason = "Normal closure, meaning that the purpose for which the connection was established has been fulfilled."; break; + case 1001: reason = "An endpoint is \"going away\", such as a server going down or a browser having navigated away from a page."; break; + case 1002: reason = "An endpoint is terminating the connection due to a protocol error"; break; + case 1003: reason = "An endpoint is terminating the connection because it has received a type of data it cannot accept (e.g., an endpoint that understands only text data MAY send this if it receives a binary message)."; break; + case 1004: reason = "Reserved. The specific meaning might be defined in the future."; break; + case 1005: reason = "No status code was actually present."; break; + case 1006: reason = "The connection was closed abnormally, e.g., without sending or receiving a Close control frame"; break; + case 1007: reason = "An endpoint is terminating the connection because it has received data within a message that was not consistent with the type of the message (e.g., non-UTF-8 [http://tools.ietf.org/html/rfc3629] data within a text message)."; break; + case 1008: reason = "An endpoint is terminating the connection because it has received a message that \"violates its policy\". This reason is given either if there is no other sutible reason, or if there is a need to hide specific details about the policy."; break; + case 1009: reason = "An endpoint is terminating the connection because it has received a message that is too big for it to process."; break; + case 1010: reason = "An endpoint (client) is terminating the connection because it has expected the server to negotiate one or more extension, but the server didn't return them in the response message of the WebSocket handshake.
Specifically, the extensions that are needed are: " + event.reason; break; + case 1011: reason = "A server is terminating the connection because it encountered an unexpected condition that prevented it from fulfilling the request."; break; + case 1015: reason = "The connection was closed due to a failure to perform a TLS handshake (e.g., the server certificate can't be verified)."; break; + default: reason = "Unknown reason"; + } + $(window.hyperion).trigger({type:"close", reason:reason}); + window.watchdog = 10; + connectionLostDetection(); + }; - window.websocket.onmessage = function (event) { - try - { - var response = JSON.parse(event.data); - var success = response.success; - var cmd = response.command; - var tan = response.tan - if (success || typeof(success) == "undefined") - { - $(window.hyperion).trigger({type:"cmd-"+cmd, response:response}); - } - else - { - // skip tan -1 error handling - if(tan != -1){ - var error = response.hasOwnProperty("error")? response.error : "unknown"; - $(window.hyperion).trigger({type:"error",reason:error}); - console.log("[window.websocket::onmessage] ",error) - } - } - } - catch(exception_error) - { - $(window.hyperion).trigger({type:"error",reason:exception_error}); - console.log("[window.websocket::onmessage] ",exception_error) - } - }; + window.websocket.onmessage = function (event) { + try + { + var response = JSON.parse(event.data); + var success = response.success; + var cmd = response.command; + var tan = response.tan + if (success || typeof(success) == "undefined") + { + $(window.hyperion).trigger({type:"cmd-"+cmd, response:response}); + } + else + { + // skip tan -1 error handling + if(tan != -1){ + var error = response.hasOwnProperty("error")? response.error : "unknown"; + $(window.hyperion).trigger({type:"error",reason:error}); + console.log("[window.websocket::onmessage] ",error) + } + } + } + catch(exception_error) + { + $(window.hyperion).trigger({type:"error",reason:exception_error}); + console.log("[window.websocket::onmessage] ",exception_error) + } + }; - window.websocket.onerror = function (error) { - $(window.hyperion).trigger({type:"error",reason:error}); - console.log("[window.websocket::onerror] ",error) - }; - } - } - else - { - $(window.hyperion).trigger("error"); - alert("Websocket is not supported by your browser"); - return; - } + window.websocket.onerror = function (error) { + $(window.hyperion).trigger({type:"error",reason:error}); + console.log("[window.websocket::onerror] ",error) + }; + } + } + else + { + $(window.hyperion).trigger("error"); + alert("Websocket is not supported by your browser"); + return; + } } function sendToHyperion(command, subcommand, msg) { - if (typeof subcommand != 'undefined' && subcommand.length > 0) - subcommand = ',"subcommand":"'+subcommand+'"'; - else - subcommand = ""; + if (typeof subcommand != 'undefined' && subcommand.length > 0) + subcommand = ',"subcommand":"'+subcommand+'"'; + else + subcommand = ""; - if (typeof msg != 'undefined' && msg.length > 0) - msg = ","+msg; - else - msg = ""; + if (typeof msg != 'undefined' && msg.length > 0) + msg = ","+msg; + else + msg = ""; - window.websocket.send('{"command":"'+command+'", "tan":'+window.wsTan+subcommand+msg+'}'); + window.websocket.send('{"command":"'+command+'", "tan":'+window.wsTan+subcommand+msg+'}'); } // Send a json message to Hyperion and wait for a matching response @@ -228,250 +230,256 @@ async function __sendAsync (data) { // Test if admin requires authentication function requestRequiresAdminAuth() { - sendToHyperion("authorize","adminRequired"); + sendToHyperion("authorize","adminRequired"); } // Test if the default password needs to be changed function requestRequiresDefaultPasswortChange() { - sendToHyperion("authorize","newPasswordRequired"); + sendToHyperion("authorize","newPasswordRequired"); } // Change password function requestChangePassword(oldPw, newPw) { - sendToHyperion("authorize","newPassword",'"password": "'+oldPw+'", "newPassword":"'+newPw+'"'); + sendToHyperion("authorize","newPassword",'"password": "'+oldPw+'", "newPassword":"'+newPw+'"'); } function requestAuthorization(password) { - sendToHyperion("authorize","login",'"password": "' + password + '"'); + sendToHyperion("authorize","login",'"password": "' + password + '"'); } function requestTokenAuthorization(token) { - sendToHyperion("authorize","login",'"token": "' + token + '"'); + sendToHyperion("authorize","login",'"token": "' + token + '"'); } function requestToken(comment) { - sendToHyperion("authorize","createToken",'"comment": "'+comment+'"'); + sendToHyperion("authorize","createToken",'"comment": "'+comment+'"'); } function requestTokenInfo() { - sendToHyperion("authorize","getTokenList",""); + sendToHyperion("authorize","getTokenList",""); } function requestGetPendingTokenRequests (id, state) { - sendToHyperion("authorize", "getPendingTokenRequests", ""); + sendToHyperion("authorize", "getPendingTokenRequests", ""); } function requestHandleTokenRequest(id, state) { - sendToHyperion("authorize","answerRequest",'"id":"'+id+'", "accept":'+state); + sendToHyperion("authorize","answerRequest",'"id":"'+id+'", "accept":'+state); } function requestTokenDelete(id) { - sendToHyperion("authorize","deleteToken",'"id":"'+id+'"'); + sendToHyperion("authorize","deleteToken",'"id":"'+id+'"'); } function requestInstanceRename(inst, name) { - sendToHyperion("instance", "saveName",'"instance": '+inst+', "name": "'+name+'"'); + sendToHyperion("instance", "saveName",'"instance": '+inst+', "name": "'+name+'"'); } function requestInstanceStartStop(inst, start) { - if(start) - sendToHyperion("instance","startInstance",'"instance": '+inst); - else - sendToHyperion("instance","stopInstance",'"instance": '+inst); + if(start) + sendToHyperion("instance","startInstance",'"instance": '+inst); + else + sendToHyperion("instance","stopInstance",'"instance": '+inst); } function requestInstanceDelete(inst) { - sendToHyperion("instance","deleteInstance",'"instance": '+inst); + sendToHyperion("instance","deleteInstance",'"instance": '+inst); } function requestInstanceCreate(name) { - sendToHyperion("instance","createInstance",'"name": "'+name+'"'); + sendToHyperion("instance","createInstance",'"name": "'+name+'"'); } function requestInstanceSwitch(inst) { - sendToHyperion("instance","switchTo",'"instance": '+inst); + sendToHyperion("instance","switchTo",'"instance": '+inst); } function requestServerInfo() { - sendToHyperion("serverinfo","",'"subscribe":["components-update","sessions-update","priorities-update", "imageToLedMapping-update", "adjustment-update", "videomode-update", "effects-update", "settings-update", "instance-update"]'); + sendToHyperion("serverinfo","",'"subscribe":["components-update","sessions-update","priorities-update", "imageToLedMapping-update", "adjustment-update", "videomode-update", "effects-update", "settings-update", "instance-update"]'); } function requestSysInfo() { - sendToHyperion("sysinfo"); + sendToHyperion("sysinfo"); } function requestServerConfigSchema() { - sendToHyperion("config","getschema"); + sendToHyperion("config","getschema"); } function requestServerConfig() { - sendToHyperion("config", "getconfig"); + sendToHyperion("config", "getconfig"); } function requestServerConfigReload() { - sendToHyperion("config", "reload"); + sendToHyperion("config", "reload"); } function requestLedColorsStart() { - window.ledStreamActive=true; - sendToHyperion("ledcolors", "ledstream-start"); + window.ledStreamActive=true; + sendToHyperion("ledcolors", "ledstream-start"); } function requestLedColorsStop() { - window.ledStreamActive=false; - sendToHyperion("ledcolors", "ledstream-stop"); + window.ledStreamActive=false; + sendToHyperion("ledcolors", "ledstream-stop"); } function requestLedImageStart() { - window.imageStreamActive=true; - sendToHyperion("ledcolors", "imagestream-start"); + window.imageStreamActive=true; + sendToHyperion("ledcolors", "imagestream-start"); } function requestLedImageStop() { - window.imageStreamActive=false; - sendToHyperion("ledcolors", "imagestream-stop"); + window.imageStreamActive=false; + sendToHyperion("ledcolors", "imagestream-stop"); } function requestPriorityClear(prio) { - if(typeof prio !== 'number') - prio = window.webPrio; + if(typeof prio !== 'number') + prio = window.webPrio; - sendToHyperion("clear", "", '"priority":'+prio+''); + sendToHyperion("clear", "", '"priority":'+prio+''); } function requestClearAll() { - requestPriorityClear(-1) + requestPriorityClear(-1) } function requestPlayEffect(effectName, duration) { - sendToHyperion("effect", "", '"effect":{"name":"'+effectName+'"},"priority":'+window.webPrio+',"duration":'+validateDuration(duration)+',"origin":"'+window.webOrigin+'"'); + sendToHyperion("effect", "", '"effect":{"name":"'+effectName+'"},"priority":'+window.webPrio+',"duration":'+validateDuration(duration)+',"origin":"'+window.webOrigin+'"'); } function requestSetColor(r,g,b,duration) { - sendToHyperion("color", "", '"color":['+r+','+g+','+b+'], "priority":'+window.webPrio+',"duration":'+validateDuration(duration)+',"origin":"'+window.webOrigin+'"'); + sendToHyperion("color", "", '"color":['+r+','+g+','+b+'], "priority":'+window.webPrio+',"duration":'+validateDuration(duration)+',"origin":"'+window.webOrigin+'"'); } function requestSetImage(data,duration,name) { - sendToHyperion("image", "", '"imagedata":"'+data+'", "priority":'+window.webPrio+',"duration":'+validateDuration(duration)+', "format":"auto", "origin":"'+window.webOrigin+'", "name":"'+name+'"'); + sendToHyperion("image", "", '"imagedata":"'+data+'", "priority":'+window.webPrio+',"duration":'+validateDuration(duration)+', "format":"auto", "origin":"'+window.webOrigin+'", "name":"'+name+'"'); } function requestSetComponentState(comp, state) { - var state_str = state ? "true" : "false"; - sendToHyperion("componentstate", "", '"componentstate":{"component":"'+comp+'","state":'+state_str+'}'); + var state_str = state ? "true" : "false"; + sendToHyperion("componentstate", "", '"componentstate":{"component":"'+comp+'","state":'+state_str+'}'); } function requestSetSource(prio) { - if ( prio == "auto" ) - sendToHyperion("sourceselect", "", '"auto":true'); - else - sendToHyperion("sourceselect", "", '"priority":'+prio); + if ( prio == "auto" ) + sendToHyperion("sourceselect", "", '"auto":true'); + else + sendToHyperion("sourceselect", "", '"priority":'+prio); } function requestWriteConfig(config, full) { - if(full === true) - window.serverConfig = config; - else - { - jQuery.each(config, function(i, val) { - window.serverConfig[i] = val; - }); - } + if(full === true) + window.serverConfig = config; + else + { + jQuery.each(config, function(i, val) { + window.serverConfig[i] = val; + }); + } - sendToHyperion("config","setconfig", '"config":'+JSON.stringify(window.serverConfig)); + sendToHyperion("config","setconfig", '"config":'+JSON.stringify(window.serverConfig)); } function requestWriteEffect(effectName,effectPy,effectArgs,data) { - var cutArgs = effectArgs.slice(1, -1); - sendToHyperion("create-effect", "", '"name":"'+effectName+'", "script":"'+effectPy+'", '+cutArgs+',"imageData":"'+data+'"'); + var cutArgs = effectArgs.slice(1, -1); + sendToHyperion("create-effect", "", '"name":"'+effectName+'", "script":"'+effectPy+'", '+cutArgs+',"imageData":"'+data+'"'); } function requestTestEffect(effectName,effectPy,effectArgs,data) { - sendToHyperion("effect", "", '"effect":{"name":"'+effectName+'", "args":'+effectArgs+'}, "priority":'+window.webPrio+', "origin":"'+window.webOrigin+'", "pythonScript":"'+effectPy+'", "imageData":"'+data+'"'); + sendToHyperion("effect", "", '"effect":{"name":"'+effectName+'", "args":'+effectArgs+'}, "priority":'+window.webPrio+', "origin":"'+window.webOrigin+'", "pythonScript":"'+effectPy+'", "imageData":"'+data+'"'); } function requestDeleteEffect(effectName) { - sendToHyperion("delete-effect", "", '"name":"'+effectName+'"'); + sendToHyperion("delete-effect", "", '"name":"'+effectName+'"'); } function requestLoggingStart() { - window.loggingStreamActive=true; - sendToHyperion("logging", "start"); + window.loggingStreamActive=true; + sendToHyperion("logging", "start"); } function requestLoggingStop() { - window.loggingStreamActive=false; - sendToHyperion("logging", "stop"); + window.loggingStreamActive=false; + sendToHyperion("logging", "stop"); } function requestMappingType(type) { - sendToHyperion("processing", "", '"mappingType": "'+type+'"'); + sendToHyperion("processing", "", '"mappingType": "'+type+'"'); } function requestVideoMode(newMode) { - sendToHyperion("videomode", "", '"videoMode": "'+newMode+'"'); + sendToHyperion("videomode", "", '"videoMode": "'+newMode+'"'); } function requestAdjustment(type, value, complete) { - if(complete === true) - sendToHyperion("adjustment", "", '"adjustment": '+type+''); - else - sendToHyperion("adjustment", "", '"adjustment": {"'+type+'": '+value+'}'); + if(complete === true) + sendToHyperion("adjustment", "", '"adjustment": '+type+''); + else + sendToHyperion("adjustment", "", '"adjustment": {"'+type+'": '+value+'}'); } async function requestLedDeviceDiscovery(type, params) { - let data = { ledDeviceType: type, params: params }; + let data = { ledDeviceType: type, params: params }; - return sendAsyncToHyperion("leddevice", "discover", data, Math.floor(Math.random() * 1000) ); + return sendAsyncToHyperion("leddevice", "discover", data, Math.floor(Math.random() * 1000) ); } async function requestLedDeviceProperties(type, params) { - let data = { ledDeviceType: type, params: params }; + let data = { ledDeviceType: type, params: params }; - return sendAsyncToHyperion("leddevice", "getProperties", data, Math.floor(Math.random() * 1000)); + return sendAsyncToHyperion("leddevice", "getProperties", data, Math.floor(Math.random() * 1000)); } function requestLedDeviceIdentification(type, params) { - //sendToHyperion("leddevice", "identify", '"ledDeviceType": "'+type+'","params": '+JSON.stringify(params)+''); let data = { ledDeviceType: type, params: params }; - return sendAsyncToHyperion("leddevice", "identify", data, Math.floor(Math.random() * 1000)); + + return sendAsyncToHyperion("leddevice", "identify", data, Math.floor(Math.random() * 1000)); +} + +async function requestInputSourcesDiscovery(type, params) { + let data = { sourceType: type, params: params }; + + return sendAsyncToHyperion("inputsource", "discover", data, Math.floor(Math.random() * 1000)); } diff --git a/assets/webconfig/js/languages.js b/assets/webconfig/js/languages.js new file mode 100644 index 00000000..b455ba96 --- /dev/null +++ b/assets/webconfig/js/languages.js @@ -0,0 +1,33 @@ +var storedLang; +var availLang = ['cs', 'de', 'en', 'es', 'fr', 'it', 'nl', 'nb', 'pl', 'pt', 'ro', 'sv', 'vi', 'ru', 'tr', 'zh-CN']; +var availLangText = ['Čeština', 'Deutsch', 'English', 'Español', 'Français', 'Italiano', 'Nederlands', 'Norsk Bokmål', 'Polski', 'Português', 'Română', 'Svenska', 'Tiếng Việt', 'русский', 'Türkçe', '汉语']; + +//$.i18n.debug = true; + +//i18n +function initTrans(lc) { + $.i18n().load("i18n", lc).done( + function () { + $.i18n().locale = lc; + performTranslation(); + }); +} + +storedLang = getStorage("langcode"); +if (storedLang == null || storedLang === "undefined") { + + var langLocale = $.i18n().locale.substring(0, 2); + //Test, if language is supported by hyperion + var langIdx = availLang.indexOf(langLocale); + if (langIdx === -1) { + // If language is not supported by hyperion, try fallback language + langLocale = $.i18n().options.fallbackLocale.substring(0, 2); + langIdx = availLang.indexOf(langLocale); + if (langIdx === -1) { + langLocale = 'en'; + } + } + storedLang = langLocale; + setStorage("langcode", storedLang); +} +initTrans(storedLang); diff --git a/assets/webconfig/js/ledsim.js b/assets/webconfig/js/ledsim.js index d34f501a..541cbc6f 100644 --- a/assets/webconfig/js/ledsim.js +++ b/assets/webconfig/js/ledsim.js @@ -1,279 +1,329 @@ -$(document).ready(function() { - var modalOpened = false; - var ledsim_width = 540; - var ledsim_height = 489; - var dialog; - var leds; - var lC = false; - var imageCanvasNodeCtx; - var ledsCanvasNodeCtx; - var canvas_height; - var canvas_width; - var twoDPaths = []; - var toggleLeds, toggleLedsNum = false; +$(document).ready(function () { + var modalOpened = false; + var ledsim_width = 540; + var ledsim_height = 489; + var dialog; + var leds; + var grabberConfig; + var lC = false; + var imageCanvasNodeCtx; + var ledsCanvasNodeCtx; + var canvas_height; + var canvas_width; + var twoDPaths = []; + var toggleLeds = false; + var toggleLedsNum = false; + var toggleSigDetectArea = false; - /// add prototype for simple canvas clear() method - CanvasRenderingContext2D.prototype.clear = function(){ - this.clearRect(0, 0, this.canvas.width, this.canvas.height) - }; + var activeComponent = ""; - function create2dPaths(){ - twoDPaths = []; - for(var idx=0; idx'; - leds_html += ''; + for (var idx = 0; idx < leds.length; idx++) { + var led = leds[idx]; + // can be used as fallback when Path2D is not available + //roundRect(ledsCanvasNodeCtx, led.hmin * canvas_width, led.vmin * canvas_height, (led.hmax-led.hmin) * canvas_width, (led.vmax-led.vmin) * canvas_height, 4, true, colors[idx]) + //ledsCanvasNodeCtx.fillRect(led.hmin * canvas_width, led.vmin * canvas_height, (led.hmax-led.hmin) * canvas_width, (led.vmax-led.vmin) * canvas_height); - $('#leds_canvas').html(leds_html); + ledsCanvasNodeCtx.fillStyle = (useColor) ? "rgba(" + colors[cPos] + "," + colors[cPos + 1] + "," + colors[cPos + 2] + ",0.75)" : "hsla(" + (idx * 360 / leds.length) + ",100%,50%,0.75)"; + ledsCanvasNodeCtx.fill(twoDPaths[idx]); + ledsCanvasNodeCtx.strokeStyle = '#323232'; + ledsCanvasNodeCtx.stroke(twoDPaths[idx]); - imageCanvasNodeCtx = document.getElementById("image_preview_canv").getContext("2d"); - ledsCanvasNodeCtx = document.getElementById("leds_preview_canv").getContext("2d"); - create2dPaths(); - printLedsToCanvas(); - resetImage(); - } + if (toggleLedsNum) { + //ledsCanvasNodeCtx.shadowOffsetX = 1; + //ledsCanvasNodeCtx.shadowOffsetY = 1; + //ledsCanvasNodeCtx.shadowColor = "black"; + //ledsCanvasNodeCtx.shadowBlur = 4; + ledsCanvasNodeCtx.fillStyle = "white"; + ledsCanvasNodeCtx.textAlign = "center"; + ledsCanvasNodeCtx.fillText(((led.name) ? led.name : idx), (led.hmin * canvas_width) + (((led.hmax - led.hmin) * canvas_width) / 2), (led.vmin * canvas_height) + (((led.vmax - led.vmin) * canvas_height) / 2)); + } - // ------------------------------------------------------------------ - $('#leds_toggle_num').off().on("click", function() { - toggleLedsNum = !toggleLedsNum - toggleClass('#leds_toggle_num', "btn-danger", "btn-success"); - }); - // ------------------------------------------------------------------ + // increment colorsPosition + cPos += 3; + } + } - $('#leds_toggle').off().on("click", function() { - toggleLeds = !toggleLeds - ledsCanvasNodeCtx.clear(); - toggleClass('#leds_toggle', "btn-success", "btn-danger"); - }); + function updateLedLayout() { + if (grabberConfig.enable && grabberConfig.signalDetection && storedAccess === 'expert') { + $("#sigDetectArea_toggle").show(); + } else { + $("#sigDetectArea_toggle").hide(); + } - // ------------------------------------------------------------------ - $('#leds_toggle_live_video').off().on("click", function() { - setClassByBool('#leds_toggle_live_video',window.imageStreamActive,"btn-success","btn-danger"); - if ( window.imageStreamActive ) - { - requestLedImageStop(); - resetImage(); - } - else - { - requestLedImageStart(); - } - }); + //calculate body size + canvas_height = $('#ledsim_dialog').outerHeight() - $('#ledsim_text').outerHeight() - $('[data-role=footer]').outerHeight() - $('[data-role=header]').outerHeight() - 40; + canvas_width = $('#ledsim_dialog').outerWidth() - 30; - // ------------------------------------------------------------------ - $(window.hyperion).on("cmd-ledcolors-ledstream-update",function(event){ - if (!modalOpened) - { - requestLedColorsStop(); - } - else - { - printLedsToCanvas(event.response.result.leds) - } - }); + $('#leds_canvas').html(""); + var leds_html = ''; + leds_html += ''; + leds_html += ''; - // ------------------------------------------------------------------ - $(window.hyperion).on("cmd-ledcolors-imagestream-update",function(event){ - setClassByBool('#leds_toggle_live_video', window.imageStreamActive, "btn-danger", "btn-success"); - if (!modalOpened) - { - if ($('#leds_prev_toggle_live_video').length > 0) - return; - requestLedImageStop(); - } - else - { - var imageData = (event.response.result.image); + $('#leds_canvas').html(leds_html); - var image = new Image(); - image.onload = function() { - imageCanvasNodeCtx.drawImage(image, 0, 0, canvas_width, canvas_height); - }; - image.src = imageData; - } - }); + imageCanvasNodeCtx = document.getElementById("image_preview_canv").getContext("2d"); + ledsCanvasNodeCtx = document.getElementById("leds_preview_canv").getContext("2d"); + sigDetectAreaCanvasNodeCtx = document.getElementById("grab_preview_canv").getContext("2d"); + create2dPaths(); + printLedsToCanvas(); + resetImage(); - $("#btn_open_ledsim").off().on("click", function(event) { - dialog.open(); - }); + } - // ------------------------------------------------------------------ - $(window.hyperion).on("cmd-settings-update",function(event){ + // ------------------------------------------------------------------ + $('#leds_toggle_num').off().on("click", function () { + toggleLedsNum = !toggleLedsNum + toggleClass('#leds_toggle_num', "btn-danger", "btn-success"); + }); + // ------------------------------------------------------------------ - var obj = event.response.data - if ( obj.leds) { - console.log("ledsim: cmd-settings-update", event.response.data); - Object.getOwnPropertyNames(obj).forEach(function(val, idx, array) { - window.serverInfo[val] = obj[val]; - }); - leds = window.serverConfig.leds - updateLedLayout(); - } - }); + $('#leds_toggle').off().on("click", function () { + toggleLeds = !toggleLeds + ledsCanvasNodeCtx.clear(); + toggleClass('#leds_toggle', "btn-success", "btn-danger"); - function resetImage(){ - if (getStorage("darkMode", false) == "on") { - imageCanvasNodeCtx.clear(); - } else { - imageCanvasNodeCtx.fillStyle = "rgb(225,225,225)" - imageCanvasNodeCtx.fillRect(0, 0, canvas_width, canvas_height); - } + if (!toggleLeds) { + $("#leds_toggle_num").show(); + } else { + $("#leds_toggle_num").hide(); + } + }); - var image = document.getElementById('navbar_brand_logo'); - imageCanvasNodeCtx.drawImage(image, canvas_width / 2 - image.width / 2, canvas_height / 2 - image.height / 2, image.width, image.height); - } + // ------------------------------------------------------------------ + $('#leds_toggle_live_video').off().on("click", function () { + setClassByBool('#leds_toggle_live_video', window.imageStreamActive, "btn-success", "btn-danger"); + if (window.imageStreamActive) { + requestLedImageStop(); + resetImage(); + } + else { + requestLedImageStart(); + } + }); + + $('#sigDetectArea_toggle').off().on("click", function () { + toggleSigDetectArea = !toggleSigDetectArea + sigDetectAreaCanvasNodeCtx.clear(); + toggleClass('#sigDetectArea_toggle', "btn-success", "btn-danger"); + }); + + // ------------------------------------------------------------------ + $(window.hyperion).on("cmd-ledcolors-ledstream-update", function (event) { + if (!modalOpened) { + requestLedColorsStop(); + } + else { + printLedsToCanvas(event.response.result.leds) + } + }); + + // ------------------------------------------------------------------ + $(window.hyperion).on("cmd-ledcolors-imagestream-update", function (event) { + //console.log("cmd-ledcolors-imagestream-update", event.response); + setClassByBool('#leds_toggle_live_video', window.imageStreamActive, "btn-danger", "btn-success"); + if (!modalOpened) { + if ($('#leds_prev_toggle_live_video').length > 0) + return; + requestLedImageStop(); + } + else { + var imageData = (event.response.result.image); + + var image = new Image(); + image.onload = function () { + imageCanvasNodeCtx.drawImage(image, 0, 0, canvas_width, canvas_height); + }; + image.src = imageData; + } + }); + + $("#btn_open_ledsim").off().on("click", function (event) { + dialog.open(); + }); + + // ------------------------------------------------------------------ + $(window.hyperion).on("cmd-settings-update", function (event) { + + var obj = event.response.data + if (obj.leds || obj.grabberV4L2) { + //console.log("ledsim: cmd-settings-update", event.response.data); + Object.getOwnPropertyNames(obj).forEach(function (val, idx, array) { + window.serverInfo[val] = obj[val]; + }); + leds = window.serverConfig.leds; + grabberConfig = window.serverConfig.grabberV4L2; + updateLedLayout(); + } + }); + + $(window.hyperion).on("cmd-priorities-update", function (event) { + //console.log("cmd-priorities-update", event.response.data); + + var prios = event.response.data.priorities; + if (prios.length > 0) + { + //Clear image when new input + if (prios[0].componentId !== activeComponent) { + resetImage(); + activeComponent = prios[0].componentId; + } + else if (!prios[0].active) { + resetImage(); + } + } + else { + resetImage(); + } + + }); + + function resetImage() { + if (typeof imageCanvasNodeCtx !== "undefined") { + + imageCanvasNodeCtx.fillStyle = "#1c1c1c"; //90% gray + imageCanvasNodeCtx.fillRect(0, 0, canvas_width, canvas_height); + imageCanvasNodeCtx.drawImage(image, canvas_width / 2 - image.width / 2, canvas_height / 2 - image.height / 2, image.width, image.height); + } + } }); diff --git a/assets/webconfig/js/lib/jsoneditor.js b/assets/webconfig/js/lib/jsoneditor.js index 8b1c93dd..7ee0fd1c 100755 --- a/assets/webconfig/js/lib/jsoneditor.js +++ b/assets/webconfig/js/lib/jsoneditor.js @@ -1839,7 +1839,14 @@ JSONEditor.AbstractEditor = Class.extend({ this.parent = null; }, getDefault: function() { - if(this.schema["default"]) return this.schema["default"]; + var def = this.schema["default"]; + if(def) { + if (typeof def === "string") { + return $.i18n(def); + } else { + return def; + } + } if(this.schema["enum"]) return this.schema["enum"][0]; var type = this.schema.type || this.schema.oneOf; diff --git a/assets/webconfig/js/settings.js b/assets/webconfig/js/settings.js index bca0cc65..ba746b53 100644 --- a/assets/webconfig/js/settings.js +++ b/assets/webconfig/js/settings.js @@ -1,149 +1,111 @@ -var storedAccess; -var storedLang; -var availLang = ['cs', 'de', 'en', 'es', 'fr', 'it', 'nl', 'nb', 'pl', 'pt', 'ro', 'sv', 'vi', 'ru', 'tr', 'zh-CN']; -var availLangText = ['Čeština', 'Deutsch', 'English', 'Español', 'Français', 'Italiano', 'Nederlands', 'Norsk Bokmål', 'Polski', 'Português', 'Română', 'Svenska', 'Tiếng Việt', 'русский', 'Türkçe', '汉语']; var availAccess = ['default', 'advanced', 'expert']; - -//$.i18n.debug = true; +var storedAccess; //Change Password function changePassword(){ - showInfoDialog('changePassword', $.i18n('InfoDialog_changePassword_title')); + showInfoDialog('changePassword', $.i18n('InfoDialog_changePassword_title')); - // fill default pw if default is set - if(window.defaultPasswordIsSet) - $('#oldPw').val('hyperion') + // fill default pw if default is set + if(window.defaultPasswordIsSet) + $('#oldPw').val('hyperion') - $('#id_btn_ok').off().on('click',function() { - var oldPw = $('#oldPw').val(); - var newPw = $('#newPw').val(); + $('#id_btn_ok').off().on('click',function() { + var oldPw = $('#oldPw').val(); + var newPw = $('#newPw').val(); - requestChangePassword(oldPw, newPw) - }); + requestChangePassword(oldPw, newPw) + }); - $('#newPw, #oldPw').off().on('input',function(e) { - ($('#oldPw').val().length >= 8 && $('#newPw').val().length >= 8) && !window.readOnlyMode ? $('#id_btn_ok').attr('disabled', false) : $('#id_btn_ok').attr('disabled', true); - }); + $('#newPw, #oldPw').off().on('input',function(e) { + ($('#oldPw').val().length >= 8 && $('#newPw').val().length >= 8) && !window.readOnlyMode ? $('#id_btn_ok').attr('disabled', false) : $('#id_btn_ok').attr('disabled', true); + }); } -$(document).ready( function() { +$(document).ready(function () { - //i18n - function initTrans(lc){ - if (lc == 'auto') - { - $.i18n().load().done( - function() { - performTranslation(); - }); - } - else - { - $.i18n().locale = lc; - $.i18n().load( "i18n", lc ).done( - function() { - performTranslation(); - }); - } - } + if (!storageComp()) { + showInfoDialog('warning', "Can't store settings", "Your browser doesn't support localStorage. You can't save a specific language setting (fallback to 'auto detection') and access level (fallback to 'default'). Some wizards may be hidden. You could still use the webinterface without further issues"); + $('#language-select').attr("disabled", true); + $('#btn_setaccess').attr("disabled", true); + } - if (storageComp()) - { - storedLang = getStorage("langcode"); - if (storedLang == null) - { - setStorage("langcode", 'auto'); - storedLang = 'auto'; - initTrans(storedLang); - } - else - { - initTrans(storedLang); - } - } - else - { - showInfoDialog('warning', "Can't store settings", "Your browser doesn't support localStorage. You can't save a specific language setting (fallback to 'auto detection') and access level (fallback to 'default'). Some wizards may be hidden. You could still use the webinterface without further issues"); - initTrans('auto'); - storedLang = 'auto'; - storedAccess = "default"; - $('#btn_setlang').attr("disabled", true); - $('#btn_setaccess').attr("disabled", true); - } + initLanguageSelection(); - initLanguageSelection(); + //access + storedAccess = getStorage("accesslevel"); + if (storedAccess == null) { + storedAccess = "default"; + setStorage("accesslevel", storedAccess); + } - //access - storedAccess = getStorage("accesslevel"); - if (storedAccess == null) - { - setStorage("accesslevel", "default"); - storedAccess = "default"; - } + if (!storageComp()) { + showInfoDialog('warning', $.i18n('InfoDialog_nostorage_title'), $.i18n('InfoDialog_nostorage_text')); + $('#btn_setlang').attr("disabled", true); + } - $('#btn_setaccess').off().on('click',function() { - var newAccess; - showInfoDialog('select', $.i18n('InfoDialog_access_title'), $.i18n('InfoDialog_access_text')); + $('#btn_setaccess').off().on('click',function() { + var newAccess; + showInfoDialog('select', $.i18n('InfoDialog_access_title'), $.i18n('InfoDialog_access_text')); - for (var lcx = 0; lcx -1 && hyperionAddress.length == 36) hyperionAddress = '['+hyperionAddress+']'; - hyperionAddress = 'http://'+hyperionAddress+':'+window.wSess[i].port; - $('#id_select').append(createSelOpt(hyperionAddress, window.wSess[i].name)); - } - } + for (var i = 0; i -1 && hyperionAddress.length == 36) hyperionAddress = '['+hyperionAddress+']'; + hyperionAddress = 'http://'+hyperionAddress+':'+window.wSess[i].port; + $('#id_select').append(createSelOpt(hyperionAddress, window.wSess[i].name)); + } + } - $('#id_btn_saveset').off().on('click',function() { - $("#loading_overlay").addClass("overlay"); - window.location.href = $('#id_select').val(); - }); + $('#id_btn_saveset').off().on('click',function() { + $("#loading_overlay").addClass("overlay"); + window.location.href = $('#id_select').val(); + }); - }); + }); }); diff --git a/assets/webconfig/js/ui_utils.js b/assets/webconfig/js/ui_utils.js old mode 100755 new mode 100644 index 53e17daf..9c9d49fe --- a/assets/webconfig/js/ui_utils.js +++ b/assets/webconfig/js/ui_utils.js @@ -67,7 +67,7 @@ function updateSessions() { function validateDuration(d) { if (typeof d === "undefined" || d < 0) - return 0; + return ENDLESS; else return d *= 1000; } @@ -162,25 +162,22 @@ function initLanguageSelection() { var langLocale = storedLang; - // If no language has been set, resolve browser locale - if (langLocale === 'auto') { - langLocale = $.i18n().locale.substring(0, 2); - } - - // Resolve text for language code - var langText = 'Please Select'; - //Test, if language is supported by hyperion var langIdx = availLang.indexOf(langLocale); if (langIdx > -1) { langText = availLangText[langIdx]; - } - else { + } else { // If language is not supported by hyperion, try fallback language langLocale = $.i18n().options.fallbackLocale.substring(0, 2); langIdx = availLang.indexOf(langLocale); if (langIdx > -1) { langText = availLangText[langIdx]; + } else { + langLocale = 'en'; + langIdx = availLang.indexOf(langLocale); + if (langIdx > -1) { + langText = availLangText[langIdx]; + } } } @@ -195,10 +192,12 @@ function updateUiOnInstance(inst) { $('#btn_hypinstanceswitch').toggle(true); $('#active_instance_dropdown').prop('disabled', false); $('#active_instance_dropdown').css('cursor', 'pointer'); + $("#active_instance_dropdown").css("pointer-events", "auto"); } else { $('#btn_hypinstanceswitch').toggle(false); $('#active_instance_dropdown').prop('disabled', true); $("#active_instance_dropdown").css('cursor', 'default'); + $("#active_instance_dropdown").css("pointer-events", "none"); } } @@ -288,10 +287,12 @@ function showInfoDialog(type, header, message) { } else if (type == "changePassword") { $('#id_body_rename').html('
'); - $('#id_body_rename').append('

' + header + '

'); - $('#id_body_rename').append('
'); - $('#id_body_rename').append(''); - $('#id_footer_rename').html(''); + $('#id_body_rename').append('

' + header + '


'); + $('#id_body_rename').append('

' + $.i18n('infoDialog_password_current_text') + + '


'); + $('#id_body_rename').append('

' + $.i18n('infoDialog_password_new_text')+ + '

'); + $('#id_footer_rename').html(''); $('#id_footer_rename').append(''); } else if (type == "checklist") { @@ -461,7 +462,8 @@ function createJsonEditor(container, schema, setconfig, usePanel, arrayre) { return editor; } -function updateJsonEditorSelection(editor, key, addElements, newEnumVals, newTitelVals, newDefaultVal, addSelect, addCustom, addCustomAsFirst, customText) { +function updateJsonEditorSelection(rootEditor, path, key, addElements, newEnumVals, newTitelVals, newDefaultVal, addSelect, addCustom, addCustomAsFirst, customText) { + var editor = rootEditor.getEditor(path); var orginalProperties = editor.schema.properties[key]; var newSchema = []; @@ -536,13 +538,15 @@ function updateJsonEditorSelection(editor, key, addElements, newEnumVals, newTit editor.original_schema.properties[key] = orginalProperties; editor.schema.properties[key] = newSchema[key]; + rootEditor.validator.schema.properties[editor.key].properties[key] = newSchema[key]; editor.removeObjectProperty(key); delete editor.cached_editors[key]; editor.addObjectProperty(key); } -function updateJsonEditorMultiSelection(editor, key, addElements, newEnumVals, newTitelVals, newDefaultVal) { +function updateJsonEditorMultiSelection(rootEditor, path, key, addElements, newEnumVals, newTitelVals, newDefaultVal) { + var editor = rootEditor.getEditor(path); var orginalProperties = editor.schema.properties[key]; var newSchema = []; @@ -593,36 +597,55 @@ function updateJsonEditorMultiSelection(editor, key, addElements, newEnumVals, n editor.original_schema.properties[key] = orginalProperties; editor.schema.properties[key] = newSchema[key]; + rootEditor.validator.schema.properties[editor.key].properties[key] = newSchema[key]; editor.removeObjectProperty(key); delete editor.cached_editors[key]; editor.addObjectProperty(key); } -function updateJsonEditorRange(editor, key, minimum, maximum, defaultValue, step) { +function updateJsonEditorRange(rootEditor, path, key, minimum, maximum, defaultValue, step, clear) { + var editor = rootEditor.getEditor(path); + + //Preserve current value when updating range + var currentValue = rootEditor.getEditor(path + "." + key).getValue(); + var orginalProperties = editor.schema.properties[key]; var newSchema = []; newSchema[key] = orginalProperties; - if (minimum) { + if (clear) { + delete newSchema[key]["minimum"]; + delete newSchema[key]["maximum"]; + delete newSchema[key]["default"]; + delete newSchema[key]["step"]; + } + + if (typeof minimum !== "undefined") { newSchema[key]["minimum"] = minimum; } - if (maximum) { + if (typeof maximum !== "undefined") { newSchema[key]["maximum"] = maximum; } - if (defaultValue) { + if (typeof defaultValue !== "undefined") { newSchema[key]["default"] = defaultValue; + currentValue = defaultValue; } - if (step) { + + if (typeof step !== "undefined") { newSchema[key]["step"] = step; } editor.original_schema.properties[key] = orginalProperties; editor.schema.properties[key] = newSchema[key]; + rootEditor.validator.schema.properties[editor.key].properties[key] = newSchema[key]; editor.removeObjectProperty(key); delete editor.cached_editors[key]; editor.addObjectProperty(key); + + // Restore current (new default) value for new range + rootEditor.getEditor(path + "." + key).setValue(currentValue); } function buildWL(link, linkt, cl) { @@ -807,14 +830,15 @@ function createRow(id) { return el; } -function createOptPanel(phicon, phead, bodyid, footerid, css) { +function createOptPanel(phicon, phead, bodyid, footerid, css, panelId) { phead = '' + phead; + var pfooter = document.createElement('button'); pfooter.className = "btn btn-primary"; pfooter.setAttribute("id", footerid); pfooter.innerHTML = '' + $.i18n('general_button_savesettings'); - return createPanel(phead, "", pfooter, "panel-default", bodyid, css); + return createPanel(phead, "", pfooter, "panel-default", bodyid, css, panelId); } function compareTwoValues(key1, key2, order = 'asc') { @@ -1141,14 +1165,34 @@ function isAccessLevelCompliant(accessLevel) { function showInputOptions(path, elements, state) { for (var i = 0; i < elements.length; i++) { - $('[data-schemapath="' + path + '.' + elements[i] + '"]').toggle(state); + $('[data-schemapath="root.' + path + '.' + elements[i] + '"]').toggle(state); } } -function showInputOptionsForKey(editor, item, showForKey, state) { +function showInputOptionForItem(editor, path, item, state) { + var accessLevel = editor.schema.properties[path].properties[item].access; + // Enable element only, if access level compliant + if (!state || isAccessLevelCompliant(accessLevel)) { + showInputOptions(path, [item], state); + } +} + +function showInputOptionsForKey(editor, item, showForKeys, state) { var elements = []; + var keysToshow = []; + + if (Array.isArray(showForKeys)) { + keysToshow = showForKeys; + } else { + if (typeof showForKeys === 'string') { + keysToshow.push(showForKeys); + } else { + return + } + } + for (var key in editor.schema.properties[item].properties) { - if (showForKey !== key) { + if ($.inArray(key, keysToshow) === -1) { var accessLevel = editor.schema.properties[item].properties[key].access; //Always disable all elements, but only enable elements, if access level compliant @@ -1157,5 +1201,5 @@ function showInputOptionsForKey(editor, item, showForKey, state) { } } } - showInputOptions("root." + item, elements, state); + showInputOptions(item, elements, state); } diff --git a/bin/compile.sh b/bin/compile.sh old mode 100755 new mode 100644 index 7843d2b9..f8820dae --- a/bin/compile.sh +++ b/bin/compile.sh @@ -25,7 +25,6 @@ sudo apt-get install \ libavahi-core-dev \ libavahi-compat-libdnssd-dev \ libssl-dev \ - libjpeg-dev \ libqt5sql5-sqlite \ libqt5svg5-dev \ zlib1g-dev \ diff --git a/bin/scripts/docker-compile.sh b/bin/scripts/docker-compile.sh old mode 100644 new mode 100755 index be910bd1..227d7479 --- a/bin/scripts/docker-compile.sh +++ b/bin/scripts/docker-compile.sh @@ -15,6 +15,8 @@ BUILD_TAG="buster" BUILD_PACKAGES=true # packages string inserted to cmake cmd PACKAGES="" +# platform string inserted to cmake cmd +BUILD_PLATFORM="" #Run build using GitHub code files BUILD_LOCAL=0 #Build from scratch @@ -73,6 +75,7 @@ echo "######################################################## # docker-compile.sh -p true # If true, build packages with CPack # docker-compile.sh -l # Run build using local code files # docker-compile.sh -c # Run incremental build, i.e. do not delete files created during previous build +# docker-compile.sh -f x11 # cmake PLATFORM parameter # More informations to docker tags at: https://github.com/Hyperion-Project/hyperion.docker-ci" } @@ -84,7 +87,7 @@ function log () { echo "Compile Hyperion using a Docker container" -while getopts i:t:b:p:lcvh option +while getopts i:t:b:p:f:lcvh option do case "${option}" in @@ -92,6 +95,7 @@ do t) BUILD_TAG=${OPTARG};; b) BUILD_TYPE=${OPTARG};; p) BUILD_PACKAGES=${OPTARG};; + f) BUILD_PLATFORM=${OPTARG,,};; l) BUILD_LOCAL=1;; c) BUILD_INCREMENTAL=1;; v) _VERBOSE=1;; @@ -104,7 +108,12 @@ if [ ${BUILD_PACKAGES} == "true" ]; then PACKAGES="package" fi -echo "---> Initialize with IMAGE:TAG=${BUILD_IMAGE}:${BUILD_TAG}, BUILD_TYPE=${BUILD_TYPE}, BUILD_PACKAGES=${BUILD_PACKAGES}, BUILD_LOCAL=${BUILD_LOCAL}, BUILD_INCREMENTAL=${BUILD_INCREMENTAL}" +# determine platform cmake parameter +if [[ ! -z ${BUILD_PLATFORM} ]]; then + PLATFORM="-DPLATFORM=${BUILD_PLATFORM}" +fi + +echo "---> Initialize with IMAGE:TAG=${BUILD_IMAGE}:${BUILD_TAG}, BUILD_TYPE=${BUILD_TYPE}, BUILD_PACKAGES=${BUILD_PACKAGES}, PLATFORM=${BUILD_PLATFORM}, BUILD_LOCAL=${BUILD_LOCAL}, BUILD_INCREMENTAL=${BUILD_INCREMENTAL}" log "---> BASE_PATH = ${BASE_PATH}" CODE_PATH=${BASE_PATH}; @@ -155,7 +164,7 @@ $DOCKER run --rm \ -v "${CODE_PATH}/:/source:rw" \ ${REGISTRY_URL}/${BUILD_IMAGE}:${BUILD_TAG} \ /bin/bash -c "mkdir -p /source/${BUILD_DIR} && cd /source/${BUILD_DIR} && - cmake -DCMAKE_BUILD_TYPE=${BUILD_TYPE} .. || exit 2 && + cmake -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${PLATFORM} .. || exit 2 && make -j $(nproc) ${PACKAGES} || exit 3 || : && exit 0; exit 1 " || { echo "---> Hyperion compilation failed! Abort"; exit 4; } @@ -164,7 +173,6 @@ DOCKERRC=${?} # overwrite file owner to current user sudo chown -fR $(stat -c "%U:%G" ${BASE_PATH}) ${BUILD_PATH} -sudo chown -fR $(stat -c "%U:%G" ${BASE_PATH}) ${DEPLOY_PATH} if [ ${DOCKERRC} == 0 ]; then if [ ${BUILD_LOCAL} == 1 ]; then @@ -175,6 +183,7 @@ if [ ${DOCKERRC} == 0 ]; then echo "---> Copying packages to host folder: ${DEPLOY_PATH}" && cp -v ${BUILD_PATH}/Hyperion-* ${DEPLOY_PATH} 2>/dev/null echo "---> Find deployment packages in: ${DEPLOY_PATH}" + sudo chown -fR $(stat -c "%U:%G" ${BASE_PATH}) ${DEPLOY_PATH} fi fi echo "---> Script finished [${DOCKERRC}]" diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake index 26afe243..e00395b3 100644 --- a/cmake/Dependencies.cmake +++ b/cmake/Dependencies.cmake @@ -241,7 +241,7 @@ macro(DeployWindows TARGET) list(REMOVE_AT DEPENDENCIES 0 1) endwhile() - # Copy OpenSSL Libs + # Copy libssl/libcrypto to 'hyperion' if (OPENSSL_FOUND) string(REGEX MATCHALL "[0-9]+" openssl_versions "${OPENSSL_VERSION}") list(GET openssl_versions 0 openssl_version_major) @@ -271,6 +271,27 @@ macro(DeployWindows TARGET) ) endif(OPENSSL_FOUND) + # Copy libjpeg-turbo to 'hyperion' + if (ENABLE_MF AND TURBOJPEG_FOUND) + find_file(TURBOJPEG_DLL + NAMES "turbojpeg.dll" + PATHS ${TurboJPEG_INCLUDE_DIRS}/.. ${TurboJPEG_INCLUDE_DIRS}/../bin + NO_DEFAULT_PATH + ) + + find_file(JPEG_DLL + NAMES "jpeg62.dll" + PATHS ${TurboJPEG_INCLUDE_DIRS}/.. ${TurboJPEG_INCLUDE_DIRS}/../bin + NO_DEFAULT_PATH + ) + + install( + FILES ${TURBOJPEG_DLL} ${JPEG_DLL} + DESTINATION "bin" + COMPONENT "Hyperion" + ) + endif() + # Create a qt.conf file in 'bin' to override hard-coded search paths in Qt plugins file(WRITE "${CMAKE_BINARY_DIR}/qt.conf" "[Paths]\nPlugins=../lib/\n") install( @@ -318,6 +339,30 @@ macro(DeployWindows TARGET) ) endforeach() + if(ENABLE_DX) + # Download DirectX End-User Runtimes (June 2010) + set(url "https://download.microsoft.com/download/8/4/A/84A35BF1-DAFE-4AE8-82AF-AD2AE20B6B14/directx_Jun2010_redist.exe") + if(NOT EXISTS "${CMAKE_CURRENT_BINARY_DIR}/dx_redist.exe") + file(DOWNLOAD "${url}" "${CMAKE_CURRENT_BINARY_DIR}/dx_redist.exe" + STATUS result + ) + + # Check if the download is successful + list(GET result 0 result_code) + if(NOT result_code EQUAL 0) + list(GET result 1 reason) + message(FATAL_ERROR "Could not download DirectX End-User Runtimes: ${reason}") + endif() + endif() + + # Copy DirectX End-User Runtimes to 'hyperion' + install( + FILES ${CMAKE_CURRENT_BINARY_DIR}/dx_redist.exe + DESTINATION "bin" + COMPONENT "Hyperion" + ) + endif (ENABLE_DX) + else() # Run CMake after target was built add_custom_command( diff --git a/cmake/FindTurboJPEG.cmake b/cmake/FindTurboJPEG.cmake index d6a1c9af..37900e8a 100644 --- a/cmake/FindTurboJPEG.cmake +++ b/cmake/FindTurboJPEG.cmake @@ -3,15 +3,31 @@ # TurboJPEG_INCLUDE_DIRS # TurboJPEG_LIBRARY -find_path(TurboJPEG_INCLUDE_DIRS - NAMES turbojpeg.h - PATH_SUFFIXES include -) +if (ENABLE_MF) + find_path(TurboJPEG_INCLUDE_DIRS + NAMES turbojpeg.h + PATHS + "C:/libjpeg-turbo64" + PATH_SUFFIXES include + ) -find_library(TurboJPEG_LIBRARY - NAMES turbojpeg turbojpeg-static - PATH_SUFFIXES bin lib -) + find_library(TurboJPEG_LIBRARY + NAMES turbojpeg turbojpeg-static + PATHS + "C:/libjpeg-turbo64" + PATH_SUFFIXES bin lib + ) +else() + find_path(TurboJPEG_INCLUDE_DIRS + NAMES turbojpeg.h + PATH_SUFFIXES include + ) + + find_library(TurboJPEG_LIBRARY + NAMES turbojpeg turbojpeg-static + PATH_SUFFIXES bin lib + ) +endif() if(TurboJPEG_INCLUDE_DIRS AND TurboJPEG_LIBRARY) include(CheckCSourceCompiles) @@ -26,7 +42,7 @@ if(TurboJPEG_INCLUDE_DIRS AND TurboJPEG_LIBRARY) endif() include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(TurboJpeg +find_package_handle_standard_args(TurboJPEG FOUND_VAR TURBOJPEG_FOUND REQUIRED_VARS TurboJPEG_LIBRARY TurboJPEG_INCLUDE_DIRS TURBOJPEG_WORKS TurboJPEG_INCLUDE_DIRS TurboJPEG_LIBRARY diff --git a/cmake/debian/postinst b/cmake/debian/postinst index 4cef9515..2a1100e4 100644 --- a/cmake/debian/postinst +++ b/cmake/debian/postinst @@ -16,7 +16,7 @@ install_file() } -echo "---Hyperion ambient light postinstall ---" +echo "--- Hyperion ambient light postinstall ---" #check system CPU_RPI=`grep -m1 -c 'BCM2708\|BCM2709\|BCM2710\|BCM2835\|BCM2836\|BCM2837\|BCM2711' /proc/cpuinfo` diff --git a/cmake/desktop/hyperiond_128.png b/cmake/desktop/hyperiond_128.png index c5ce7b04..fa0fb12b 100644 Binary files a/cmake/desktop/hyperiond_128.png and b/cmake/desktop/hyperiond_128.png differ diff --git a/cmake/nsis/template/NSIS.template.in b/cmake/nsis/template/NSIS.template.in index 5f9a1a40..e5b5b0d5 100644 --- a/cmake/nsis/template/NSIS.template.in +++ b/cmake/nsis/template/NSIS.template.in @@ -790,7 +790,13 @@ Section "-Core installation" ;End: ; Install Visual c++ Redistributable -ExecWait '"$INSTDIR\bin\vc_redist.x64.exe" /install /quiet' +ExecWait '"$INSTDIR\bin\vc_redist.x64.exe" /install /quiet /norestart' + +; Install DirectX 9 Redistributable +ExecWait '"$INSTDIR\bin\dx_redist.exe" /q /t:"$INSTDIR\tmp"' + ExecWait '"$INSTDIR\tmp\DXSETUP.exe" /silent' + Delete '$INSTDIR\tmp\*.*' + RMDir '$INSTDIR\tmp' SectionEnd diff --git a/config/hyperion.config.json.commented b/config/hyperion.config.json.commented deleted file mode 100644 index 35ab38af..00000000 --- a/config/hyperion.config.json.commented +++ /dev/null @@ -1,535 +0,0 @@ -// This is a example config (hyperion.config.json) with comments, in any case you need to create your own one with HyperCon! -// location of all configs: /etc/hyperion -// Webpage: https://www.hyperion-project.org - - -{ - /// general Settings - /// * 'name' : The user friendly name of the hyperion instance (used for network things) - /// * 'versionBranch' : Which branch should be used for hyperion version - /// * 'showOptHelp' : Show option expanations at the webui. Highly recommended for beginners. - "general" : - { - "name" : "MyHyperionConfig", - "watchedVersionBranch" : "Stable", - "showOptHelp" : true - }, - /// set log level: silent warn verbose debug - "logger" : - { - "level" : "warn" - }, - - /// Device configuration contains the following fields: - /// * 'name' : The user friendly name of the device (only used for display purposes) - /// * 'type' : The type of the device - /// * [device type specific configuration] - /// * 'colorOrder' : The order of the color bytes ('rgb', 'rbg', 'bgr', etc.). - /// * 'rewriteTime': in ms. Data is resend to leds, if no new data is available in thistime. 0 means no refresh - "device" : - { - "type" : "file", - "hardwareLedCount" : 1, - "output" : "/dev/null", - "rate" : 1000000, - "colorOrder" : "rgb", - "rewriteTime": 5000 - }, - - /// Color manipulation configuration used to tune the output colors to specific surroundings. - /// The configuration contains a list of color-transforms. Each transform contains the - /// following fields: - /// * 'imageToLedMappingType' : multicolor_mean - every led has it's own calculatedmean color - /// unicolor_mean - every led has same color, color is the mean of whole image - /// * 'channelAdjustment' - /// * 'id' : The unique identifier of the channel adjustments (eg 'device_1') - /// * 'leds' : The indices (or index ranges) of the leds to which this channel adjustment applies - /// (eg '0-5, 9, 11, 12-17'). The indices are zero based. - /// * 'white'/'red'/'green'/'blue'/'cyan'/'magenta'/'yellow' : Array of RGB to adjust the output color - /// * 'gammaRed'/'gammaGreen'/'gammaBlue' : Gamma value for each channel - /// * 'id' : The unique identifier of the channel adjustments (eg 'device_1') - /// * 'id' : The unique identifier of the channel adjustments (eg 'device_1') - /// * 'backlightThreshold' : Minimum brightness (backlight) - /// * 'backlightColored' : backlight with color, instead of white - /// * 'brightness' : overall brightness - /// * 'brightnessCompensation' : 100 means brightness differences are compensated (white is as bright as red, is as bright as yellow. - /// 0 means white is 3x brighter than red, yellow is 2x brighter than red - "color" : - { - "imageToLedMappingType" : "multicolor_mean", - "channelAdjustment" : - [ - { - "id" : "default", - "leds" : "*", - "white" : [255,255,255], - "red" : [255,0,0], - "green" : [0,255,0], - "blue" : [0,0,255], - "cyan" : [0,255,255], - "magenta" : [255,0,255], - "yellow" : [255,255,0], - "gammaRed" : 1.5, - "gammaGreen" : 1.5, - "gammaBlue" : 1.5, - "backlightThreshold" : 0, - "backlightColored" : false, - "brightness" : 100, - "brightnessCompensation" : 80 - } - ] - }, - - /// smoothing - /// * 'smoothing' : Smoothing of the colors in the time-domain with the following tuning - /// parameters: - /// - 'enable' Enable or disable the smoothing (true/false) - /// - 'type' The type of smoothing algorithm ('linear' or 'none') - /// - 'time_ms' The time constant for smoothing algorithm in milliseconds - /// - 'updateFrequency' The update frequency of the leds in Hz - /// - 'updateDelay' The delay of the output to leds (in periods of smoothing) - /// - 'continuousOutput' Flag for enabling continuous output to Leds regardless of new input or not - "smoothing" : - { - "enable" : true, - "type" : "linear", - "time_ms" : 200, - "updateFrequency" : 25.0000, - "updateDelay" : 0, - "continuousOutput" : true - }, - - /// Configuration for the embedded V4L2 grabber - /// * device : V4L2 Device to use [default="auto"] (Auto detection) - /// * width : The width of the grabbed frames (pixels) [default=0] - /// * height : The height of the grabbed frames (pixels) [default=0] - /// * standard : Video standard (PAL/NTSC/SECAM/NO_CHANGE) [default="NO_CHANGE"] - /// * sizeDecimation : Size decimation factor [default=8] - /// * cropLeft : Cropping from the left [default=0] - /// * cropRight : Cropping from the right [default=0] - /// * cropTop : Cropping from the top [default=0] - /// * cropBottom : Cropping from the bottom [default=0] - /// * signalDetection : enable/disable signal detection [default=false] - /// * cecDetection : enable/disable cec detection [default=false] - /// * redSignalThreshold : Signal threshold for the red channel between 0 and 100 [default=5] - /// * greenSignalThreshold : Signal threshold for the green channel between 0 and 100 [default=5] - /// * blueSignalThreshold : Signal threshold for the blue channel between 0 and 100 [default=5] - /// * sDHOffsetMin : area for signal detection - horizontal minimum offset value. Values between 0.0 and 1.0 - /// * sDVOffsetMin : area for signal detection - vertical minimum offset value. Values between 0.0 and 1.0 - /// * sDHOffsetMax : area for signal detection - horizontal maximum offset value. Values between 0.0 and 1.0 - /// * sDVOffsetMax : area for signal detection - vertical maximum offset value. Values between 0.0 and 1.0 - "grabberV4L2" : - { - "device" : "auto", - "width" : 0, - "height" : 0, - "standard" : "NO_CHANGE", - "sizeDecimation" : 8, - "priority" : 240, - "cropLeft" : 0, - "cropRight" : 0, - "cropTop" : 0, - "cropBottom" : 0, - "redSignalThreshold" : 5, - "greenSignalThreshold" : 5, - "blueSignalThreshold" : 5, - "signalDetection" : false, - "cecDetection" : false, - "sDVOffsetMin" : 0.25, - "sDHOffsetMin" : 0.25, - "sDVOffsetMax" : 0.75, - "sDHOffsetMax" : 0.75 - }, - - /// The configuration for the frame-grabber, contains the following items: - /// * type : type of grabber. (auto|osx|dispmanx|amlogic|x11|xcb|framebuffer|qt) [auto] - /// * width : The width of the grabbed frames [pixels] - /// * height : The height of the grabbed frames [pixels] - /// * frequency_Hz : The frequency of the frame grab [Hz] - /// * ATTENTION : Power-of-Two resolution is not supported and leads to unexpected behaviour! - "framegrabber" : - { - // for all type of grabbers - "type" : "framebuffer", - "frequency_Hz" : 10, - "cropLeft" : 0, - "cropRight" : 0, - "cropTop" : 0, - "cropBottom" : 0, - - // valid for grabber: osx|dispmanx|amlogic|framebuffer - "width" : 96, - "height" : 96, - - // valid for x11|xcb|qt - "pixelDecimation" : 8, - - // valid for qt - "display" 0 - }, - - /// The black border configuration, contains the following items: - /// * enable : true if the detector should be activated - /// * threshold : Value below which a pixel is regarded as black (value between 0 and 100 [%]) - /// * unknownFrameCnt : Number of frames without any detection before the border is set to 0 (default 600) - /// * borderFrameCnt : Number of frames before a consistent detected border gets set (default 50) - /// * maxInconsistentCnt : Number of inconsistent frames that are ignored before a new border gets a chance to proof consistency - /// * blurRemoveCnt : Number of pixels that get removed from the detected border to cut away blur (default 1) - /// * mode : Border detection mode (values=default,classic,osd,letterbox) - "blackborderdetector" : - { - "enable" : true, - "threshold" : 5, - "unknownFrameCnt" : 600, - "borderFrameCnt" : 50, - "maxInconsistentCnt" : 10, - "blurRemoveCnt" : 1, - "mode" : "default" - }, - - /// foregroundEffect sets a "booteffect" or "bootcolor" during startup for a given period in ms (duration_ms) - /// * enable : if true, foreground effect is enabled - /// * type : choose between "color" or "effect" - /// * color : if type is color, a color is used (RGB) (example: [0,0,255]) - /// * effect : if type is effect a effect is used (example: "Rainbow swirl fast") - /// * duration_ms : The duration of the selected effect or color (0=endless) - /// HINT: "foregroundEffect" starts always with priority 0, so it blocks all remotes and grabbers if the duration_ms is endless (0) - "foregroundEffect" : - { - "enable" : true, - "type" : "effect", - "color" : [0,0,255], - "effect" : "Rainbow swirl fast", - "duration_ms" : 3000 - }, - - /// backgroundEffect sets a background effect or color. It is used when all capture devices are stopped (manual via remote). Could be also selected via priorities selection. - /// * enable : if true, background effect is enabled - /// * type : choose between "color" or "effect" - /// * color : if type is color, a color is used (RGB) (example: [255,134,0]) - /// * effect : if type is effect a effect is used (example: "Rainbow swirl fast") - "backgroundEffect" : - { - "enable" : true, - "type" : "effect", - "color" : [255,138,0], - "effect" : "Warm mood blobs" - }, - - /// The configuration of the Json/Proto forwarder. Forward messages to multiple instances of Hyperion on same and/or other hosts - /// 'proto' is mostly used for video streams and 'json' for effects - /// * enable : Enable or disable the forwarder (true/false) - /// * proto : Proto server adress and port of your target. Syntax:[IP:PORT] -> ["127.0.0.1:19401"] or more instances to forward ["127.0.0.1:19401","192.168.0.24:19403"] - /// * json : Json server adress and port of your target. Syntax:[IP:PORT] -> ["127.0.0.1:19446"] or more instances to forward ["127.0.0.1:19446","192.168.0.24:19448"] - /// HINT:If you redirect to "127.0.0.1" (localhost) you could start a second hyperion with another device/led config! - /// Be sure your client(s) is/are listening on the configured ports. The second Hyperion (if used) also needs to be configured! (WebUI -> Settings Level (Expert) -> Configuration -> Network Services -> Forwarder) - "forwarder" : - { - "enable" : false, - "flat" : ["127.0.0.1:19401"], - "json" : ["127.0.0.1:19446"] - }, - - /// The configuration of the Json server which enables the json remote interface - /// * port : Port at which the json server is started - "jsonServer" : - { - "port" : 19444 - }, - - /// The configuration of the Flatbuffer server which enables the Flatbuffer remote interface - /// * port : Port at which the flatbuffer server is started - "flatbufServer" : - { - "enable" : true, - "port" : 19400, - "timeout" : 5 - }, - - /// The configuration of the Protobuffer server which enables the Protobuffer remote interface - /// * port : Port at which the protobuffer server is started - "protoServer" : - { - "enable" : true, - "port" : 19445, - "timeout" : 5 - }, - - /// The configuration of the boblight server which enables the boblight remote interface - /// * enable : Enable or disable the boblight server (true/false) - /// * port : Port at which the boblight server is started - /// * priority : Priority of the boblight server (Default=128) HINT: lower value result in HIGHER priority! - "boblightServer" : - { - "enable" : false, - "port" : 19333, - "priority" : 128 - }, - - /// Configuration of the Hyperion webserver - /// * document_root : path to hyperion webapp files (webconfig developer only) - /// * port : the port where hyperion webapp is accasible - /// * sslPort : the secure (HTTPS) port of the hyperion webapp - /// * crtPath : the path to a certificate file to allow HTTPS connections. Should be in PEM format - /// * keyPath : the path to a private key file to allow HTTPS connections. Should be in PEM format and RSA encrypted - /// * keyPassPhrase : optional: If the key file requires a password add it here - "webConfig" : - { - "document_root" : "/path/to/files", - "port" : 8090, - "sslPort" : 8092, - "crtPath" : "/path/to/mycert.crt", - "keyPath" : "/path/to/mykey.key", - "keyPassPhrase" : "" - }, - - /// The configuration of the effect engine, contains the following items: - /// * paths : An array with absolute location(s) of directories with effects, - /// $ROOT is a keyword which will be replaced with the current rootPath that can be specified on startup from the commandline (defaults to your home directory) - /// * disable : An array with effect names that shouldn't be loaded - "effects" : - { - "paths" : - [ - "$ROOT/custom-effects", - "/usr/share/hyperion/effects" - ], - "disable" : - [ - "Rainbow swirl", - "X-Mas" - ] - }, - - "instCapture" : - { - "systemEnable" : true, - "systemPriority" : 250, - "v4lEnable" : false, - "v4lPriority" : 240 - }, - - /// The configuration of the network security restrictions, contains the following items: - /// * internetAccessAPI : When true allows connection from internet to the API. When false it blocks all outside connections - /// * restirctedInternetAccessAPI : webui voodoo only - ignore it - /// * ipWhitelist : Whitelist ip addresses from the internet to allow access to the API - /// * apiAuth : When true the API requires authentication through tokens to use the API. Read also "localApiAuth" - /// * localApiAuth : When false connections from the local network don't require an API authentification. - /// * localAdminApiAuth : When false connections from the local network don't require an authentification for administration access. - "network" : - { - "internetAccessAPI" : false, - "restirctedInternetAccessAPI" : false, - "ipWhitelist" : [], - "apiAuth" : true, - "localApiAuth" : false, - "localAdminAuth": true - }, - - /// Recreate and save led layouts made with web config. These values are just helpers for ui, not for Hyperion. - "ledConfig" : - { - "classic": - { - "top" : 8, - "bottom" : 8, - "left" : 5, - "right" : 5, - "glength" : 0, - "gpos" : 0, - "position" : 0, - "reverse" : false, - "hdepth" : 8, - "vdepth" : 5, - "overlap" : 0, - "edgegap" : 0, - "ptlh" : 0, - "ptlv" : 0, - "ptrh" : 100, - "ptrv" : 0, - "pblh" : 0, - "pblv" : 100, - "pbrh" : 100, - "pbrv" : 100 - - }, - "matrix": - { - "ledshoriz": 10, - "ledsvert" : 10, - "cabling" : "snake", - "start" : "top-left" - } - }, - - /// The configuration for each individual led. This contains the specification of the area - /// averaged of an input image for each led to determine its color. Each item in the list - /// contains the following fields: - /// * hmin: The fractional part of the image along the horizontal used for the averaging (minimum) - /// * hmax: The fractional part of the image along the horizontal used for the averaging (maximum) - /// * vmin: The fractional part of the image along the vertical used for the averaging (minimum) - /// * vmax: The fractional part of the image along the vertical used for the averaging (maximum) - /// * colorOrder: Usually the global colorOrder is set at the device section, you can overwrite it here per led - - "leds": - [ - { - "hmax": 0.125, - "hmin": 0, - "vmax": 0.08, - "vmin": 0 - - }, - { - "hmax": 0.25, - "hmin": 0.125, - "vmax": 0.08, - "vmin": 0 - - }, - { - "hmax": 0.375, - "hmin": 0.25, - "vmax": 0.08, - "vmin": 0 - }, - { - "hmax": 0.5, - "hmin": 0.375, - "vmax": 0.08, - "vmin": 0 - }, - { - "hmax": 0.625, - "hmin": 0.5, - "vmax": 0.08, - "vmin": 0 - }, - { - "hmax": 0.75, - "hmin": 0.625, - "vmax": 0.08, - "vmin": 0 - }, - { - "hmax": 0.875, - "hmin": 0.75, - "vmax": 0.08, - "vmin": 0 - }, - { - "hmax": 1, - "hmin": 0.875, - "vmax": 0.08, - "vmin": 0 - }, - { - "hmax": 1, - "hmin": 0.95, - "vmax": 0.2, - "vmin": 0 - }, - { - "hmax": 1, - "hmin": 0.95, - "vmax": 0.4, - "vmin": 0.2 - }, - { - "hmax": 1, - "hmin": 0.95, - "vmax": 0.6, - "vmin": 0.4 - }, - { - "hmax": 1, - "hmin": 0.95, - "vmax": 0.8, - "vmin": 0.6 - }, - { - "hmax": 1, - "hmin": 0.95, - "vmax": 1, - "vmin": 0.8 - }, - { - "hmax": 1, - "hmin": 0.875, - "vmax": 1, - "vmin": 0.92 - }, - { - "hmax": 0.875, - "hmin": 0.75, - "vmax": 1, - "vmin": 0.92 - }, - { - "hmax": 0.75, - "hmin": 0.625, - "vmax": 1, - "vmin": 0.92 - }, - { - "hmax": 0.625, - "hmin": 0.5, - "vmax": 1, - "vmin": 0.92 - }, - { - "hmax": 0.5, - "hmin": 0.375, - "vmax": 1, - "vmin": 0.92 - }, - { - "hmax": 0.375, - "hmin": 0.25, - "vmax": 1, - "vmin": 0.92 - }, - { - "hmax": 0.25, - "hmin": 0.125, - "vmax": 1, - "vmin": 0.92 - }, - { - "hmax": 0.125, - "hmin": 0, - "vmax": 1, - "vmin": 0.92 - }, - { - "hmax": 0.05, - "hmin": 0, - "vmax": 1, - "vmin": 0.8 - }, - { - "hmax": 0.05, - "hmin": 0, - "vmax": 0.8, - "vmin": 0.6 - }, - { - "hmax": 0.05, - "hmin": 0, - "vmax": 0.6, - "vmin": 0.4 - }, - { - "hmax": 0.05, - "hmin": 0, - "vmax": 0.4, - "vmin": 0.2 - }, - { - "hmax": 0.05, - "hmin": 0, - "vmax": 0.2, - "vmin": 0 - } - ] -} diff --git a/config/hyperion.config.json.default b/config/hyperion.config.json.default index ac5ef2ba..910980f1 100644 --- a/config/hyperion.config.json.default +++ b/config/hyperion.config.json.default @@ -2,6 +2,8 @@ "general" : { "name" : "My Hyperion Config", + "configVersion": "configVersionValue", + "previousVersion": "previousVersionValue", "watchedVersionBranch" : "Stable", "showOptHelp" : true }, @@ -62,40 +64,49 @@ "grabberV4L2" : { - "device" : "auto", + "enable" : false, + "device" : "none", "input" : 0, + "encoding" : "NO_CHANGE", "width" : 0, "height" : 0, "fps" : 15, - "standard" : "NO_CHANGE", + "flip" : "NO_CHANGE", + "fpsSoftwareDecimation" : 0, "sizeDecimation" : 8, "cropLeft" : 0, "cropRight" : 0, "cropTop" : 0, "cropBottom" : 0, - "redSignalThreshold" : 5, - "greenSignalThreshold" : 5, - "blueSignalThreshold" : 5, + "redSignalThreshold" : 0, + "greenSignalThreshold" : 100, + "blueSignalThreshold" : 0, "signalDetection" : false, + "noSignalCounterThreshold" : 200, "cecDetection" : false, - "sDVOffsetMin" : 0.25, - "sDHOffsetMin" : 0.25, - "sDVOffsetMax" : 0.75, - "sDHOffsetMax" : 0.75 + "sDVOffsetMin" : 0.1, + "sDVOffsetMax" : 0.9, + "sDHOffsetMin" : 0.4, + "sDHOffsetMax" : 0.46, + "hardware_brightness" : 0, + "hardware_contrast" : 0, + "hardware_saturation" : 0, + "hardware_hue" : 0 }, "framegrabber" : { - "type" : "auto", + "enable" : false, + "device" : "auto", + "input" : 0, "width" : 80, "height" : 45, - "frequency_Hz" : 10, + "fps" : 10, "pixelDecimation" : 8, "cropLeft" : 0, "cropRight" : 0, "cropTop" : 0, - "cropBottom" : 0, - "display" : 0 + "cropBottom" : 0 }, "blackborderdetector" : @@ -177,9 +188,11 @@ "instCapture" : { - "systemEnable" : true, + "systemEnable" : false, + "systemGrabberDevice" : "NONE", "systemPriority" : 250, "v4lEnable" : false, + "v4lGrabberDevice" : "NONE", "v4lPriority" : 240 }, diff --git a/doc/development/CompileHowto.md b/doc/development/CompileHowto.md index 978a65a0..ae5fc0a1 100644 --- a/doc/development/CompileHowto.md +++ b/doc/development/CompileHowto.md @@ -11,7 +11,7 @@ Note: call the script with `./docker-compile.sh -h` for more options. ```console wget -qN https://raw.github.com/hyperion-project/hyperion.ng/master/bin/scripts/docker-compile.sh && chmod +x *.sh && ./docker-compile.sh -i rpi-raspbian ``` -**Raspbian Buster** +**Raspbian Buster/Raspberry Pi OS** ```console wget -qN https://raw.github.com/hyperion-project/hyperion.ng/master/bin/scripts/docker-compile.sh && chmod +x *.sh && ./docker-compile.sh -i rpi-raspbian -t buster ``` @@ -58,7 +58,7 @@ cd $HYPERION_HOME ```console sudo apt-get update -sudo apt-get install git cmake build-essential qtbase5-dev libqt5serialport5-dev libqt5sql5-sqlite libqt5svg5-dev libqt5x11extras5-dev libusb-1.0-0-dev python3-dev libcec-dev libxcb-image0-dev libxcb-util0-dev libxcb-shm0-dev libxcb-render0-dev libxcb-randr0-dev libxrandr-dev libxrender-dev libavahi-core-dev libavahi-compat-libdnssd-dev libjpeg-dev libturbojpeg0-dev libssl-dev zlib1g-dev +sudo apt-get install git cmake build-essential qtbase5-dev libqt5serialport5-dev libqt5sql5-sqlite libqt5svg5-dev libqt5x11extras5-dev libusb-1.0-0-dev python3-dev libcec-dev libxcb-image0-dev libxcb-util0-dev libxcb-shm0-dev libxcb-render0-dev libxcb-randr0-dev libxrandr-dev libxrender-dev libavahi-core-dev libavahi-compat-libdnssd-dev libturbojpeg0-dev libssl-dev zlib1g-dev ``` **on RPI you need the videocore IV headers** @@ -83,7 +83,7 @@ See [AUR](https://aur.archlinux.org/packages/?O=0&SeB=nd&K=hyperion&outdated=&SB The following dependencies are needed to build hyperion.ng on fedora. ```console sudo dnf -y groupinstall "Development Tools" -sudo dnf install python3-devel qt-devel qt5-qtbase-devel qt5-qtserialport-devel libjpeg-devel xrandr xcb-util-image-devel qt5-qtx11extras-devel turbojpeg-devel libusb-devel avahi-libs avahi-compat-libdns_sd-devel xcb-util-devel dbus-devel openssl-devel fedora-packager rpmdevtools gcc libcec-devel +sudo dnf install python3-devel qt-devel qt5-qtbase-devel qt5-qtserialport-devel xrandr xcb-util-image-devel qt5-qtx11extras-devel turbojpeg-devel libusb-devel avahi-libs avahi-compat-libdns_sd-devel xcb-util-devel dbus-devel openssl-devel fedora-packager rpmdevtools gcc libcec-devel ``` After installing the dependencies, you can continue with the compile instructions later on this page (the more detailed way..). @@ -98,18 +98,20 @@ brew install qt5 python3 cmake libusb doxygen zlib ## Windows (WIP) We assume a 64bit Windows 10. Install the following; - [Git](https://git-scm.com/downloads) (Check: Add to PATH) -- [CMake (Windows win64-x64 Installer)](https://cmake.org/download/) (Check: Add to PATH) +- [CMake (Windows win64-x64 installer)](https://cmake.org/download/) (Check: Add to PATH) - [Visual Studio 2019 Build Tools](https://go.microsoft.com/fwlink/?linkid=840931) ([direct link](https://aka.ms/vs/16/release/vs_buildtools.exe)) - Select C++ Buildtools - On the right, just select `MSVC v142 VS 2019 C++ x64/x86-Buildtools` and latest `Windows 10 SDK`. Everything else is not needed. -- [Win64 OpenSSL v1.1.1h](https://slproweb.com/products/Win32OpenSSL.html) ([direct link](https://slproweb.com/download/Win64OpenSSL-1_1_1h.exe)) +- [Win64 OpenSSL v1.1.1k](https://slproweb.com/products/Win32OpenSSL.html) ([direct link](https://slproweb.com/download/Win64OpenSSL-1_1_1k.exe)) - [Python 3 (Windows x86-64 executable installer)](https://www.python.org/downloads/windows/) (Check: Add to PATH and Debug Symbols) - Open a console window and execute `pip install aqtinstall`. - Now we can download Qt to _C:\Qt_ `mkdir c:\Qt && aqt install -O c:\Qt 5.15.0 windows desktop win64_msvc2019_64` +- [libjpeg-turbo SDK for Visual C++](https://sourceforge.net/projects/libjpeg-turbo/files/) + - Download the latest 64bit installer (currently `libjpeg-turbo-2.1.0-vc64.exe`) and install to its default location `C:\libjpeg-turbo64`. ### Optional: - For DirectX9 grabber: - - DirectX Software Development Kit. The download link is no longer available, so you will have to search for it yourself. + - [DirectX Software Development Kit](https://www.microsoft.com/en-us/download/details.aspx?id=6812) ([direct link](https://download.microsoft.com/download/A/E/7/AE743F1F-632B-4809-87A9-AA1BB3458E31/DXSDK_Jun10.exe)) - For package creation: - [NSIS 3.x](https://sourceforge.net/projects/nsis/files/NSIS%203/) ([direct link](https://sourceforge.net/projects/nsis/files/latest/download)) diff --git a/include/api/JsonAPI.h b/include/api/JsonAPI.h index d7009a27..a68c9d90 100644 --- a/include/api/JsonAPI.h +++ b/include/api/JsonAPI.h @@ -278,6 +278,12 @@ private: /// void handleLedDeviceCommand(const QJsonObject &message, const QString &command, int tan); + /// Handle an incoming JSON message regarding Input Sources (Grabbers) + /// + /// @param message the incoming message + /// + void handleInputSourceCommand(const QJsonObject& message, const QString& command, int tan); + /// /// Handle an incoming JSON message of unknown type /// diff --git a/include/effectengine/Effect.h b/include/effectengine/Effect.h index 248d458f..14b51650 100644 --- a/include/effectengine/Effect.h +++ b/include/effectengine/Effect.h @@ -67,6 +67,7 @@ public: QString getName() const { return _name; } int getTimeout() const {return _timeout; } + bool isEndless() const { return _isEndless; } QJsonObject getArgs() const { return _args; } @@ -83,6 +84,7 @@ private: const int _priority; const int _timeout; + bool _isEndless; const QString _script; const QString _name; diff --git a/include/grabber/AmlogicGrabber.h b/include/grabber/AmlogicGrabber.h index dc2d8b79..97231354 100644 --- a/include/grabber/AmlogicGrabber.h +++ b/include/grabber/AmlogicGrabber.h @@ -14,12 +14,16 @@ public: /// /// Construct a AmlogicGrabber that will capture snapshots with specified dimensions. /// - /// @param[in] width The width of the captured screenshot - /// @param[in] height The heigth of the captured screenshot /// - AmlogicGrabber(unsigned width, unsigned height); + AmlogicGrabber(); ~AmlogicGrabber() override; + /// + /// @brief Setup a new capture screen, will free the previous one + /// @return True on success, false if no screen is found + /// + bool setupScreen(); + /// /// Captures a single snapshot of the display and writes the data to the given image. The /// provided image should have the same dimensions as the configured values (_width and @@ -31,14 +35,51 @@ public: /// int grabFrame(Image & image); + /// + /// @brief Discover AmLogic screens available (for configuration). + /// + /// @param[in] params Parameters used to overwrite discovery default behaviour + /// + /// @return A JSON structure holding a list of devices found + /// + QJsonObject discover(const QJsonObject& params); + + /// + /// Set the video mode (2D/3D) + /// @param[in] mode The new video mode + /// + void setVideoMode(VideoMode mode) override; + + /// + /// @brief Apply new crop values, on errors reject the values + /// + void setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) override; + + /// + /// @brief Apply new width/height values, on errors (collide with cropping) reject the values + /// @return True on success else false + /// + bool setWidthHeight(int width, int height) override; + + /// + /// @brief Apply new framerate + /// @param fps framesPerSecond + /// + bool setFramerate(int fps) override; + + /// + /// @brief Apply new pixelDecimation + /// + bool setPixelDecimation(int pixelDecimation) override; + private: /** * Returns true if video is playing over the amlogic chip * @return True if video is playing else false */ bool isVideoPlaying(); - void closeDev(int &fd); - bool openDev(int &fd, const char* dev); + void closeDevice(int &fd); + bool openDevice(int &fd, const char* dev); int grabFrame_amvideocap(Image & image); diff --git a/include/grabber/AmlogicWrapper.h b/include/grabber/AmlogicWrapper.h index c0f22d98..87796bcd 100644 --- a/include/grabber/AmlogicWrapper.h +++ b/include/grabber/AmlogicWrapper.h @@ -4,8 +4,8 @@ #include /// -/// The DispmanxWrapper uses an instance of the DispmanxFrameGrabber to obtain ImageRgb's from the -/// displayed content. This ImageRgb is processed to a ColorRgb for each led and commmited to the +/// The Amlogic uses an instance of the AmlogicGrabber to obtain ImageRgb's from the +/// displayed content. This ImageRgb is processed to a ColorRgb for each led and committed to the /// attached Hyperion. /// class AmlogicWrapper : public GrabberWrapper @@ -13,12 +13,14 @@ class AmlogicWrapper : public GrabberWrapper Q_OBJECT public: /// - /// Constructs the dispmanx frame grabber with a specified grab size and update rate. + /// Constructs the Amlogic frame grabber /// /// @param[in] grabWidth The width of the grabbed image [pixels] /// @param[in] grabHeight The height of the grabbed images [pixels] + /// @param[in] pixelDecimation Decimation factor for image [pixels] /// - AmlogicWrapper(unsigned grabWidth, unsigned grabHeight); + AmlogicWrapper(int pixelDecimation=GrabberWrapper::DEFAULT_PIXELDECIMATION, + int updateRate_Hz=GrabberWrapper::DEFAULT_RATE_HZ); public slots: /// diff --git a/include/grabber/DirectXGrabber.h b/include/grabber/DirectXGrabber.h index b5cca9e5..8f687e0b 100644 --- a/include/grabber/DirectXGrabber.h +++ b/include/grabber/DirectXGrabber.h @@ -8,8 +8,14 @@ // Hyperion-utils includes #include +#include #include +// qt includes +#include +#include +#include + /// /// @brief The DirectX9 capture implementation /// @@ -17,33 +23,32 @@ class DirectXGrabber : public Grabber { public: - DirectXGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display); + DirectXGrabber(int display=0, int cropLeft=0, int cropRight=0, int cropTop=0, int cropBottom=0); virtual ~DirectXGrabber(); /// /// Captures a single snapshot of the display and writes the data to the given image. The - /// provided image should have the same dimensions as the configured values (_width and - /// _height) + /// provided image should have the same dimensions as the configured values (_width and _height) /// /// @param[out] image The snapped screenshot /// - virtual int grabFrame(Image & image); + int grabFrame(Image & image); /// /// @brief Set a new video mode /// - virtual void setVideoMode(VideoMode mode); + void setVideoMode(VideoMode mode) override; /// /// @brief Apply new width/height values, overwrite Grabber.h implementation /// - virtual bool setWidthHeight(int width, int height) { return true; }; + bool setWidthHeight(int /* width */, int /*height*/) override { return true; } /// /// @brief Apply new pixelDecimation /// - virtual void setPixelDecimation(int pixelDecimation); + bool setPixelDecimation(int pixelDecimation) override; /// /// Set the crop values @@ -52,12 +57,20 @@ public: /// @param cropTop Top pixel crop /// @param cropBottom Bottom pixel crop /// - virtual void setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom); + void setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom); /// /// @brief Apply display index /// - void setDisplayIndex(int index) override; + bool setDisplayIndex(int index) override; + + /// @brief Discover QT screens available (for configuration). + /// + /// @param[in] params Parameters used to overwrite discovery default behaviour + /// + /// @return A JSON structure holding a list of devices found + /// + QJsonObject discover(const QJsonObject& params); private: /// @@ -72,7 +85,6 @@ private: void freeResources(); private: - int _pixelDecimation; unsigned _display; unsigned _displayWidth; unsigned _displayHeight; diff --git a/include/grabber/DirectXWrapper.h b/include/grabber/DirectXWrapper.h index d0bcb0b0..d063497d 100644 --- a/include/grabber/DirectXWrapper.h +++ b/include/grabber/DirectXWrapper.h @@ -9,15 +9,21 @@ public: /// /// Constructs the DirectX grabber with a specified grab size and update rate. /// + /// @param[in] updateRate_Hz The image grab rate [Hz] + /// @param[in] display Display to be grabbed + /// @param[in] pixelDecimation Decimation factor for image [pixels] /// @param[in] cropLeft Remove from left [pixels] /// @param[in] cropRight Remove from right [pixels] /// @param[in] cropTop Remove from top [pixels] /// @param[in] cropBottom Remove from bottom [pixels] - /// @param[in] pixelDecimation Decimation factor for image [pixels] - /// @param[in] display The display used[index] - /// @param[in] updateRate_Hz The image grab rate [Hz] + /// - DirectXWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display, const unsigned updateRate_Hz); + DirectXWrapper( int updateRate_Hz=GrabberWrapper::DEFAULT_RATE_HZ, + int display=0, + int pixelDecimation=GrabberWrapper::DEFAULT_PIXELDECIMATION, + int cropLeft=0, int cropRight=0, + int cropTop=0, int cropBottom=0 + ); /// /// Destructor of this DirectX grabber. Releases any claimed resources. diff --git a/include/grabber/DispmanxFrameGrabber.h b/include/grabber/DispmanxFrameGrabber.h index 61b106a1..ace508ad 100644 --- a/include/grabber/DispmanxFrameGrabber.h +++ b/include/grabber/DispmanxFrameGrabber.h @@ -14,8 +14,7 @@ #include /// -/// The DispmanxFrameGrabber is used for creating snapshots of the display (screenshots) with a -/// downsized and scaled resolution. +/// The DispmanxFrameGrabber is used for creating snapshots of the display (screenshots) with a downsized and scaled resolution. /// class DispmanxFrameGrabber : public Grabber { @@ -23,12 +22,16 @@ public: /// /// Construct a DispmanxFrameGrabber that will capture snapshots with specified dimensions. /// - /// @param[in] width The width of the captured screenshot - /// @param[in] height The heigth of the captured screenshot - /// - DispmanxFrameGrabber(unsigned width, unsigned height); + DispmanxFrameGrabber(); ~DispmanxFrameGrabber() override; + bool open(); + + /// + /// @brief Setup a new capture screen, will free the previous one + /// @return True on success, false if no screen is found + /// + bool setupScreen(); /// /// Captures a single snapshot of the display and writes the data to the given image. The @@ -44,13 +47,24 @@ public: ///@brief Set new width and height for dispmanx, overwrite Grabber.h impl bool setWidthHeight(int width, int height) override; + QSize getScreenSize(int display=0) const; + + /// + /// @brief Discover DispmanX screens available (for configuration). + /// + /// @param[in] params Parameters used to overwrite discovery default behaviour + /// + /// @return A JSON structure holding a list of devices found + /// + QJsonObject discover(const QJsonObject& params); + private: /// /// Updates the frame-grab flags as used by the VC library for frame grabbing /// /// @param vc_flags The snapshot grabbing mask /// - void setFlags(int vc_flags); + void setFlags(DISPMANX_TRANSFORM_T vc_flags); /// /// @brief free _vc_resource and captureBuffer @@ -63,11 +77,11 @@ private: /// Handle to the resource for storing the captured snapshot DISPMANX_RESOURCE_HANDLE_T _vc_resource; - /// Rectangle of the captured resource that is transfered to user space + /// Rectangle of the captured resource that is transferred to user space VC_RECT_T _rectangle; /// Flags (transforms) for creating snapshots - int _vc_flags; + DISPMANX_TRANSFORM_T _vc_flags; // temp buffer when capturing with unsupported pitch size or // when we need to crop the image @@ -78,5 +92,4 @@ private: // rgba output buffer Image _image_rgba; - }; diff --git a/include/grabber/DispmanxFrameGrabberMock.h b/include/grabber/DispmanxFrameGrabberMock.h index 6e76078a..c5933207 100644 --- a/include/grabber/DispmanxFrameGrabberMock.h +++ b/include/grabber/DispmanxFrameGrabberMock.h @@ -16,6 +16,7 @@ typedef int DISPMANX_TRANSFORM_T; struct DISPMANX_MODEINFO_T { int width; int height; + uint32_t display_num; }; struct VC_RECT_T { @@ -34,6 +35,6 @@ DISPMANX_RESOURCE_HANDLE_T vc_dispmanx_resource_create(int,int width,int height, void vc_dispmanx_resource_delete(DISPMANX_RESOURCE_HANDLE_T resource); int vc_dispmanx_resource_read_data(DISPMANX_RESOURCE_HANDLE_T vc_resource, VC_RECT_T *rectangle, void* capturePtr, unsigned capturePitch); void vc_dispmanx_rect_set(VC_RECT_T *rectangle, int left, int top, int width, int height); -int vc_dispmanx_snapshot(int, DISPMANX_RESOURCE_HANDLE_T resource, int vc_flags); +int vc_dispmanx_snapshot(int, DISPMANX_RESOURCE_HANDLE_T resource, DISPMANX_TRANSFORM_T vc_flags); #endif diff --git a/include/grabber/DispmanxWrapper.h b/include/grabber/DispmanxWrapper.h index b3b24d6d..3b4fb6bc 100644 --- a/include/grabber/DispmanxWrapper.h +++ b/include/grabber/DispmanxWrapper.h @@ -16,11 +16,14 @@ public: /// /// Constructs the dispmanx frame grabber with a specified grab size and update rate. /// - /// @param[in] grabWidth The width of the grabbed image [pixels] - /// @param[in] grabHeight The height of the grabbed images [pixels] + /// @param[in] pixelDecimation Decimation factor for image [pixels] /// @param[in] updateRate_Hz The image grab rate [Hz] /// - DispmanxWrapper(unsigned grabWidth, unsigned grabHeight, unsigned updateRate_Hz); + DispmanxWrapper( int updateRate_Hz=GrabberWrapper::DEFAULT_RATE_HZ, + int pixelDecimation=GrabberWrapper::DEFAULT_PIXELDECIMATION + ); + + bool screenInit(); public slots: /// diff --git a/include/grabber/EncoderThread.h b/include/grabber/EncoderThread.h new file mode 100644 index 00000000..be88dd82 --- /dev/null +++ b/include/grabber/EncoderThread.h @@ -0,0 +1,176 @@ +#pragma once + +// Qt includes +#include + +// util includes +#include +#include + +// Determine the cmake options +#include + +// Turbo JPEG decoder +#ifdef HAVE_TURBO_JPEG + #include +#endif + +/// Encoder thread for USB devices +class EncoderThread : public QObject +{ + Q_OBJECT +public: + explicit EncoderThread(); + ~EncoderThread(); + + void setup( + PixelFormat pixelFormat, uint8_t* sharedData, + int size, int width, int height, int lineLength, + unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight, + VideoMode videoMode, FlipMode flipMode, int pixelDecimation); + + void process(); + + bool isBusy() { return _busy; } + QAtomicInt _busy = false; + +signals: + void newFrame(const Image& data); + +private: + PixelFormat _pixelFormat; + uint8_t* _localData, + *_flipBuffer; + int _scalingFactorsCount, + _width, + _height, + _lineLength, + _currentFrame, + _pixelDecimation; + unsigned long _size; + unsigned _cropLeft, + _cropTop, + _cropBottom, + _cropRight; + FlipMode _flipMode; + ImageResampler _imageResampler; + +#ifdef HAVE_TURBO_JPEG + tjhandle _transform, _decompress; + tjscalingfactor* _scalingFactors; + tjtransform* _xform; + + void processImageMjpeg(); +#endif +}; + +template class Thread : public QThread +{ +public: + TThread *_thread; + explicit Thread(TThread *thread, QObject *parent = nullptr) + : QThread(parent) + , _thread(thread) + { + _thread->moveToThread(this); + start(); + } + + ~Thread() + { + quit(); + wait(); + } + + EncoderThread* thread() const { return qobject_cast(_thread); } + + void setup( + PixelFormat pixelFormat, uint8_t* sharedData, + int size, int width, int height, int lineLength, + unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight, + VideoMode videoMode, FlipMode flipMode, int pixelDecimation) + { + auto encThread = qobject_cast(_thread); + if (encThread != nullptr) + encThread->setup(pixelFormat, sharedData, + size, width, height, lineLength, + cropLeft, cropTop, cropBottom, cropRight, + videoMode, flipMode, pixelDecimation); + } + + bool isBusy() + { + auto encThread = qobject_cast(_thread); + if (encThread != nullptr) + return encThread->isBusy(); + + return true; + } + + void process() + { + auto encThread = qobject_cast(_thread); + if (encThread != nullptr) + encThread->process(); + } + +protected: + void run() override + { + QThread::run(); + delete _thread; + } +}; + +class EncoderThreadManager : public QObject +{ + Q_OBJECT +public: + explicit EncoderThreadManager(QObject *parent = nullptr) + : QObject(parent) + , _threadCount(qMax(QThread::idealThreadCount(), 1)) + , _threads(nullptr) + { + _threads = new Thread*[_threadCount]; + for (int i = 0; i < _threadCount; i++) + { + _threads[i] = new Thread(new EncoderThread, this); + _threads[i]->setObjectName("Encoder " + i); + } + } + + ~EncoderThreadManager() + { + if (_threads != nullptr) + { + for(int i = 0; i < _threadCount; i++) + { + _threads[i]->deleteLater(); + _threads[i] = nullptr; + } + + delete[] _threads; + _threads = nullptr; + } + } + + void start() + { + if (_threads != nullptr) + for (int i = 0; i < _threadCount; i++) + connect(_threads[i]->thread(), &EncoderThread::newFrame, this, &EncoderThreadManager::newFrame); + } + + void stop() + { + if (_threads != nullptr) + for(int i = 0; i < _threadCount; i++) + disconnect(_threads[i]->thread(), nullptr, nullptr, nullptr); + } + + int _threadCount; + Thread** _threads; + +signals: + void newFrame(const Image& data); +}; diff --git a/include/grabber/FramebufferFrameGrabber.h b/include/grabber/FramebufferFrameGrabber.h index a3daa409..947ef0ea 100644 --- a/include/grabber/FramebufferFrameGrabber.h +++ b/include/grabber/FramebufferFrameGrabber.h @@ -1,5 +1,7 @@ #pragma once +#include + // Utils includes #include #include @@ -14,10 +16,10 @@ public: /// Construct a FramebufferFrameGrabber that will capture snapshots with specified dimensions. /// /// @param[in] device The framebuffer device name/path - /// @param[in] width The width of the captured screenshot - /// @param[in] height The heigth of the captured screenshot /// - FramebufferFrameGrabber(const QString & device, unsigned width, unsigned height); + FramebufferFrameGrabber(const QString & device="/dev/fb0"); + + ~FramebufferFrameGrabber() override; /// /// Captures a single snapshot of the display and writes the data to the given image. The @@ -30,11 +32,42 @@ public: int grabFrame(Image & image); /// - /// @brief Overwrite Grabber.h implememtation + /// @brief Setup a new capture screen, will free the previous one + /// @return True on success, false if no screen is found /// - void setDevicePath(const QString& path) override; + bool setupScreen(); + + + QSize getScreenSize() const; + QSize getScreenSize(const QString& device) const; + + /// + ///@brief Set new width and height for framegrabber, overwrite Grabber.h implementation + bool setWidthHeight(int width, int height) override; + + QString getPath() const {return _fbDevice;} + + /// + /// @brief Discover Framebuffer screens available (for configuration). + /// + /// @param[in] params Parameters used to overwrite discovery default behaviour + /// + /// @return A JSON structure holding a list of devices found + /// + QJsonObject discover(const QJsonObject& params); private: + + bool openDevice(); + bool closeDevice(); + bool getScreenInfo(); + /// Framebuffer device e.g. /dev/fb0 QString _fbDevice; + + int _fbfd; + struct fb_var_screeninfo _varInfo; + struct fb_fix_screeninfo _fixInfo; + + PixelFormat _pixelFormat; }; diff --git a/include/grabber/FramebufferWrapper.h b/include/grabber/FramebufferWrapper.h index 0a59f1d6..2098d362 100644 --- a/include/grabber/FramebufferWrapper.h +++ b/include/grabber/FramebufferWrapper.h @@ -5,7 +5,7 @@ /// /// The FramebufferWrapper uses an instance of the FramebufferFrameGrabber to obtain ImageRgb's from the -/// displayed content. This ImageRgb is processed to a ColorRgb for each led and commmited to the +/// displayed content. This ImageRgb is processed to a ColorRgb for each led and committed to the /// attached Hyperion. /// class FramebufferWrapper: public GrabberWrapper @@ -15,12 +15,14 @@ public: /// /// Constructs the framebuffer frame grabber with a specified grab size and update rate. /// - /// @param[in] device Framebuffer device name/path - /// @param[in] grabWidth The width of the grabbed image [pixels] - /// @param[in] grabHeight The height of the grabbed images [pixels] /// @param[in] updateRate_Hz The image grab rate [Hz] + /// @param[in] device Framebuffer device name/path + /// @param[in] pixelDecimation Decimation factor for image [pixels] /// - FramebufferWrapper(const QString & device, unsigned grabWidth, unsigned grabHeight, unsigned updateRate_Hz); + FramebufferWrapper( int updateRate_Hz=GrabberWrapper::DEFAULT_RATE_HZ, + const QString & device = "/dev/fb0", + int pixelDecimation=GrabberWrapper::DEFAULT_PIXELDECIMATION + ); public slots: /// diff --git a/include/grabber/MFGrabber.h b/include/grabber/MFGrabber.h new file mode 100644 index 00000000..2b2bcc01 --- /dev/null +++ b/include/grabber/MFGrabber.h @@ -0,0 +1,129 @@ +#pragma once + +// Windows include +#include + +// COM includes +#include + +// Qt includes +#include +#include +#include +#include +#include +#include +#include + +// utils includes +#include +#include +#include + +// decoder thread includes +#include + +/// Forward class declaration +class SourceReaderCB; +/// Forward struct declaration +struct IMFSourceReader; + +/// +/// Media Foundation capture class +/// + +class MFGrabber : public Grabber +{ + Q_OBJECT + friend class SourceReaderCB; +public: + struct DeviceProperties + { + QString symlink = QString(); + int width = 0; + int height = 0; + int fps = 0; + int numerator = 0; + int denominator = 0; + PixelFormat pf = PixelFormat::NO_CHANGE; + GUID guid = GUID_NULL; + }; + + struct DeviceControls + { + QString property = QString(); + int minValue = 0; + int maxValue = 0; + int step = 0; + int default = 0; + int currentValue = 0; + }; + + MFGrabber(); + ~MFGrabber() override; + + void receive_image(const void *frameImageBuffer, int size); + void setDevice(const QString& device); + bool setInput(int input) override; + bool setWidthHeight(int width, int height) override; + void setEncoding(QString enc); + void setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue); + void setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold); + void setSignalDetectionOffset( double verticalMin, double horizontalMin, double verticalMax, double horizontalMax); + void setSignalDetectionEnable(bool enable); + bool reload(bool force = false); + + /// + /// @brief Discover available Media Foundation USB devices (for configuration). + /// @param[in] params Parameters used to overwrite discovery default behaviour + /// @return A JSON structure holding a list of USB devices found + /// + QJsonArray discover(const QJsonObject& params); + +public slots: + bool prepare(); + bool start(); + void stop(); + void newThreadFrame(Image image); + +signals: + void newFrame(const Image & image); + void readError(const char* err); + +private: + bool init(); + void uninit(); + HRESULT init_device(QString device, DeviceProperties props); + void enumVideoCaptureDevices(); + void start_capturing(); + void process_image(const void *frameImageBuffer, int size); + + QString _currentDeviceName, + _newDeviceName; + QMap> _deviceProperties; + QMap> _deviceControls; + HRESULT _hr; + IMFSourceReader* _sourceReader; + SourceReaderCB* _sourceReaderCB; + EncoderThreadManager* _threadManager; + PixelFormat _pixelFormat, + _pixelFormatConfig; + int _lineLength, + _frameByteSize, + _noSignalCounterThreshold, + _noSignalCounter, + _brightness, + _contrast, + _saturation, + _hue; + QAtomicInt _currentFrame; + ColorRgb _noSignalThresholdColor; + bool _signalDetectionEnabled, + _noSignalDetected, + _initialized, + _reload; + double _x_frac_min, + _y_frac_min, + _x_frac_max, + _y_frac_max; +}; diff --git a/include/grabber/OsxFrameGrabber.h b/include/grabber/OsxFrameGrabber.h index 1aa5ee0d..17530888 100644 --- a/include/grabber/OsxFrameGrabber.h +++ b/include/grabber/OsxFrameGrabber.h @@ -21,12 +21,17 @@ public: /// Construct a OsxFrameGrabber that will capture snapshots with specified dimensions. /// /// @param[in] display The index of the display to capture - /// @param[in] width The width of the captured screenshot - /// @param[in] height The heigth of the captured screenshot + /// - OsxFrameGrabber(unsigned display, unsigned width, unsigned height); + OsxFrameGrabber(int display=kCGDirectMainDisplay); ~OsxFrameGrabber() override; + /// + /// @brief Setup a new capture screen, will free the previous one + /// @return True on success, false if no screen is found + /// + bool setupDisplay(); + /// /// Captures a single snapshot of the display and writes the data to the given image. The /// provided image should have the same dimensions as the configured values (_width and @@ -40,12 +45,21 @@ public: /// /// @brief Overwrite Grabber.h implementation /// - void setDisplayIndex(int index) override; + bool setDisplayIndex(int index) override; + + /// + /// @brief Discover OSX screens available (for configuration). + /// + /// @param[in] params Parameters used to overwrite discovery default behaviour + /// + /// @return A JSON structure holding a list of devices found + /// + QJsonObject discover(const QJsonObject& params); private: /// display - unsigned _screenIndex; + int _screenIndex; - /// Reference to the captured diaplay + /// Reference to the captured display CGDirectDisplayID _display; }; diff --git a/include/grabber/OsxFrameGrabberMock.h b/include/grabber/OsxFrameGrabberMock.h index 65bc62f3..bdc622c4 100644 --- a/include/grabber/OsxFrameGrabberMock.h +++ b/include/grabber/OsxFrameGrabberMock.h @@ -4,31 +4,90 @@ /* * this is a mock up for compiling and testing osx wrapper on no osx platform. * this will show a test image and rotate the colors. + * + * see https://github.com/phracker/MacOSX-SDKs/blob/master/MacOSX10.8.sdk/System/Library/Frameworks/CoreGraphics.framework/Versions/A/Headers * */ #include #include -typedef int CGDirectDisplayID; +enum _CGError { + kCGErrorSuccess = 0, + kCGErrorFailure = 1000, + kCGErrorIllegalArgument = 1001, + kCGErrorInvalidConnection = 1002, + kCGErrorInvalidContext = 1003, + kCGErrorCannotComplete = 1004, + kCGErrorNotImplemented = 1006, + kCGErrorRangeCheck = 1007, + kCGErrorTypeCheck = 1008, + kCGErrorInvalidOperation = 1010, + kCGErrorNoneAvailable = 1011, + + /* Obsolete errors. */ + kCGErrorNameTooLong = 1005, + kCGErrorNoCurrentPoint = 1009, + kCGErrorApplicationRequiresNewerSystem = 1015, + kCGErrorApplicationNotPermittedToExecute = 1016, + kCGErrorApplicationIncorrectExecutableFormatFound = 1023, + kCGErrorApplicationIsLaunching = 1024, + kCGErrorApplicationAlreadyRunning = 1025, + kCGErrorApplicationCanOnlyBeRunInOneSessionAtATime = 1026, + kCGErrorClassicApplicationsMustBeLaunchedByClassic = 1027, + kCGErrorForkFailed = 1028, + kCGErrorRetryRegistration = 1029, + kCGErrorFirst = 1000, + kCGErrorLast = 1029 +}; +typedef int32_t CGError; +typedef double CGFloat; + +struct CGSize { + CGFloat width; + CGFloat height; +}; +typedef struct CGSize CGSize; + +struct CGPoint { + float x; + float y; +}; +typedef struct CGPoint CGPoint; + +struct CGRect { + CGPoint origin; + CGSize size; +}; +typedef struct CGRect CGRect; + +typedef CGError CGDisplayErr; +typedef uint32_t CGDirectDisplayID; +typedef uint32_t CGDisplayCount;; +typedef struct CGDisplayMode *CGDisplayModeRef; + typedef Image CGImage; typedef CGImage* CGImageRef; typedef unsigned char CFData; typedef CFData* CFDataRef; -typedef unsigned CGDisplayCount; const int kCGDirectMainDisplay = 0; -void CGGetActiveDisplayList(int max, CGDirectDisplayID *displays, CGDisplayCount *displayCount); -CGImageRef CGDisplayCreateImage(CGDirectDisplayID display); -void CGImageRelease(CGImageRef image); -CGImageRef CGImageGetDataProvider(CGImageRef image); -CFDataRef CGDataProviderCopyData(CGImageRef image); -unsigned char* CFDataGetBytePtr(CFDataRef imgData); -unsigned CGImageGetWidth(CGImageRef image); -unsigned CGImageGetHeight(CGImageRef image); -unsigned CGImageGetBitsPerPixel(CGImageRef image); -unsigned CGImageGetBytesPerRow(CGImageRef image); -void CFRelease(CFDataRef imgData); +CGError CGGetActiveDisplayList(uint32_t maxDisplays, CGDirectDisplayID *activeDisplays, uint32_t *displayCount); +CGDisplayModeRef CGDisplayCopyDisplayMode(CGDirectDisplayID display); +CGRect CGDisplayBounds(CGDirectDisplayID display); +void CGDisplayModeRelease(CGDisplayModeRef mode); + +CGImageRef CGDisplayCreateImage(CGDirectDisplayID display); +void CGImageRelease(CGImageRef image); +CGImageRef CGImageGetDataProvider(CGImageRef image); +CFDataRef CGDataProviderCopyData(CGImageRef image); +unsigned char* CFDataGetBytePtr(CFDataRef imgData); +unsigned CGImageGetWidth(CGImageRef image); +unsigned CGImageGetHeight(CGImageRef image); +unsigned CGImageGetBitsPerPixel(CGImageRef image); +unsigned CGImageGetBytesPerRow(CGImageRef image); +void CFRelease(CFDataRef imgData); + #endif diff --git a/include/grabber/OsxWrapper.h b/include/grabber/OsxWrapper.h index 94b28827..c9520f7e 100644 --- a/include/grabber/OsxWrapper.h +++ b/include/grabber/OsxWrapper.h @@ -4,9 +4,8 @@ #include /// -/// The OsxWrapper uses an instance of the OsxFrameGrabber to obtain ImageRgb's from the -/// displayed content. This ImageRgb is processed to a ColorRgb for each led and commmited to the -/// attached Hyperion. +/// The OsxWrapper uses an instance of the OsxFrameGrabber to obtain ImageRgb's from the displayed content. +/// This ImageRgb is processed to a ColorRgb for each led and committed to the attached Hyperion. /// class OsxWrapper: public GrabberWrapper { @@ -15,12 +14,14 @@ public: /// /// Constructs the osx frame grabber with a specified grab size and update rate. /// - /// @param[in] display Index of the display to grab - /// @param[in] grabWidth The width of the grabbed image [pixels] - /// @param[in] grabHeight The height of the grabbed images [pixels] /// @param[in] updateRate_Hz The image grab rate [Hz] + /// @param[in] display Index of the display to grab + /// @param[in] pixelDecimation Decimation factor for image [pixels] /// - OsxWrapper(unsigned display, unsigned grabWidth, unsigned grabHeight, unsigned updateRate_Hz); + OsxWrapper( int updateRate_Hz=GrabberWrapper::DEFAULT_RATE_HZ, + int display = kCGDirectMainDisplay, + int pixelDecimation=GrabberWrapper::DEFAULT_PIXELDECIMATION + ); public slots: /// diff --git a/include/grabber/QtGrabber.h b/include/grabber/QtGrabber.h index 9c5903b6..5eb65d5e 100644 --- a/include/grabber/QtGrabber.h +++ b/include/grabber/QtGrabber.h @@ -15,14 +15,13 @@ class QtGrabber : public Grabber { public: - QtGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display); + QtGrabber(int display=0, int cropLeft=0, int cropRight=0, int cropTop=0, int cropBottom=0); ~QtGrabber() override; /// /// Captures a single snapshot of the display and writes the data to the given image. The - /// provided image should have the same dimensions as the configured values (_width and - /// _height) + /// provided image should have the same dimensions as the configured values (_width and _height) /// /// @param[out] image The snapped screenshot (should be initialized with correct width and /// height) @@ -37,12 +36,12 @@ public: /// /// @brief Apply new width/height values, overwrite Grabber.h implementation as qt doesn't use width/height, just pixelDecimation to calc dimensions /// - bool setWidthHeight(int width, int height) override { return true; } + bool setWidthHeight(int /*width*/, int /*height*/) override { return true; } /// /// @brief Apply new pixelDecimation /// - void setPixelDecimation(int pixelDecimation) override; + bool setPixelDecimation(int pixelDecimation) override; /// /// Set the crop values @@ -51,14 +50,37 @@ public: /// @param cropTop Top pixel crop /// @param cropBottom Bottom pixel crop /// - void setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) override; + void setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) override; /// /// @brief Apply display index /// - void setDisplayIndex(int index) override; + bool setDisplayIndex(int index) override; + + /// + /// @brief Discover QT screens available (for configuration). + /// + /// @param[in] params Parameters used to overwrite discovery default behaviour + /// + /// @return A JSON structure holding a list of devices found + /// + QJsonObject discover(const QJsonObject& params); + + /// + /// @brief Setup a new capture display, will free the previous one + /// @return True on success, false if no display is found + /// + bool setupDisplay(); + + /// + /// @brief Opens the input device. + /// + /// @return Zero, on success (i.e. device is ready), else negative + /// + bool open(); private slots: + /// /// @brief is called whenever the current _screen changes it's geometry /// @param geo The new geometry @@ -66,11 +88,6 @@ private slots: void geometryChanged(const QRect &geo); private: - /// - /// @brief Setup a new capture display, will free the previous one - /// @return True on success, false if no display is found - /// - bool setupDisplay(); /// /// @brief Is called whenever we need new screen dimension calculations based on window geometry @@ -84,13 +101,19 @@ private: private: - unsigned _display; - int _pixelDecimation; - unsigned _screenWidth; - unsigned _screenHeight; - unsigned _src_x; - unsigned _src_y; - unsigned _src_x_max; - unsigned _src_y_max; + int _display; + int _numberOfSDisplays; + + int _calculatedWidth; + int _calculatedHeight; + int _src_x; + int _src_y; + int _src_x_max; + int _src_y_max; + bool _isWayland; + QScreen* _screen; + bool _isVirtual; + + Logger * _logger; }; diff --git a/include/grabber/QtWrapper.h b/include/grabber/QtWrapper.h index 4d32625c..3bba4a82 100644 --- a/include/grabber/QtWrapper.h +++ b/include/grabber/QtWrapper.h @@ -10,16 +10,28 @@ class QtWrapper: public GrabberWrapper { public: /// - /// Constructs the framebuffer frame grabber with a specified grab size and update rate. + /// Constructs the QT frame grabber with a specified grab size and update rate. /// + /// @param[in] updateRate_Hz The image grab rate [Hz] + /// @param[in] display Display to be grabbed + /// @param[in] pixelDecimation Decimation factor for image [pixels] /// @param[in] cropLeft Remove from left [pixels] /// @param[in] cropRight Remove from right [pixels] /// @param[in] cropTop Remove from top [pixels] /// @param[in] cropBottom Remove from bottom [pixels] - /// @param[in] pixelDecimation Decimation factor for image [pixels] - /// @param[in] updateRate_Hz The image grab rate [Hz] + /// - QtWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display, unsigned updateRate_Hz); + QtWrapper( int updateRate_Hz=GrabberWrapper::DEFAULT_RATE_HZ, + int display=0, + int pixelDecimation=GrabberWrapper::DEFAULT_PIXELDECIMATION, + int cropLeft=0, int cropRight=0, + int cropTop=0, int cropBottom=0 + ); + + /// + /// Starts the grabber which produces led values with the specified update rate + /// + bool open() override; public slots: /// diff --git a/include/grabber/V4L2Grabber.h b/include/grabber/V4L2Grabber.h index 894317d3..22f8cdba 100644 --- a/include/grabber/V4L2Grabber.h +++ b/include/grabber/V4L2Grabber.h @@ -14,30 +14,24 @@ // util includes #include #include -#include +#include +#include #include -#include -// general JPEG decoder includes -#ifdef HAVE_JPEG_DECODER - #include - #include -#endif - -// System JPEG decoder -#ifdef HAVE_JPEG - #include - #include -#endif - -// TurboJPEG decoder -#ifdef HAVE_TURBO_JPEG - #include +// decoder thread includes +#include + +// Determine the cmake options +#include + +#if defined(ENABLE_CEC) + #include #endif +/// /// Capture class for V4L2 devices /// -/// @see http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html + class V4L2Grabber : public Grabber { Q_OBJECT @@ -45,118 +39,67 @@ class V4L2Grabber : public Grabber public: struct DeviceProperties { - QString name = QString(); - QMultiMap inputs = QMultiMap(); - QStringList resolutions = QStringList(); - QStringList framerates = QStringList(); + QString name = QString(); + struct InputProperties + { + QString inputName = QString(); + QList standards = QList(); + struct EncodingProperties + { + int width = 0; + int height = 0; + QList framerates = QList(); + }; + QMultiMap encodingFormats = QMultiMap(); + }; + QMap inputs = QMap(); }; - V4L2Grabber(const QString & device, - const unsigned width, - const unsigned height, - const unsigned fps, - const unsigned input, - VideoStandard videoStandard, - PixelFormat pixelFormat, - int pixelDecimation - ); + struct DeviceControls + { + QString property = QString(); + int minValue = 0; + int maxValue = 0; + int step = 0; + int defaultValue = 0; + int currentValue = 0; + }; + + V4L2Grabber(); ~V4L2Grabber() override; - QRectF getSignalDetectionOffset() const - { - return QRectF(_x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max); - } - - bool getSignalDetectionEnabled() const { return _signalDetectionEnabled; } - bool getCecDetectionEnabled() const { return _cecDetectionEnabled; } - int grabFrame(Image &); - - /// - /// @brief set new PixelDecimation value to ImageResampler - /// @param pixelDecimation The new pixelDecimation value - /// - void setPixelDecimation(int pixelDecimation) override; - - /// - /// @brief overwrite Grabber.h implementation - /// - void setSignalThreshold( - double redSignalThreshold, - double greenSignalThreshold, - double blueSignalThreshold, - int noSignalCounterThreshold = 50) override; - - /// - /// @brief overwrite Grabber.h implementation - /// - void setSignalDetectionOffset( - double verticalMin, - double horizontalMin, - double verticalMax, - double horizontalMax) override; - /// - /// @brief overwrite Grabber.h implementation - /// - void setSignalDetectionEnable(bool enable) override; - - /// - /// @brief overwrite Grabber.h implementation - /// - void setCecDetectionEnable(bool enable) override; - - /// - /// @brief overwrite Grabber.h implementation - /// - void setDeviceVideoStandard(QString device, VideoStandard videoStandard) override; - - /// - /// @brief overwrite Grabber.h implementation - /// + void setDevice(const QString& devicePath, const QString& deviceName); bool setInput(int input) override; - - /// - /// @brief overwrite Grabber.h implementation - /// bool setWidthHeight(int width, int height) override; + void setEncoding(QString enc); + void setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue); + void setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold = 50); + void setSignalDetectionOffset( double verticalMin, double horizontalMin, double verticalMax, double horizontalMax); + void setSignalDetectionEnable(bool enable); + void setCecDetectionEnable(bool enable); + bool reload(bool force = false); + + QRectF getSignalDetectionOffset() const { return QRectF(_x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max); } //used from hyperion-v4l2 + + /// - /// @brief overwrite Grabber.h implementation + /// @brief Discover available V4L2 USB devices (for configuration). + /// @param[in] params Parameters used to overwrite discovery default behaviour + /// @return A JSON structure holding a list of USB devices found /// - bool setFramerate(int fps) override; - - /// - /// @brief overwrite Grabber.h implementation - /// - QStringList getV4L2devices() const override; - - /// - /// @brief overwrite Grabber.h implementation - /// - QString getV4L2deviceName(const QString& devicePath) const override; - - /// - /// @brief overwrite Grabber.h implementation - /// - QMultiMap getV4L2deviceInputs(const QString& devicePath) const override; - - /// - /// @brief overwrite Grabber.h implementation - /// - QStringList getResolutions(const QString& devicePath) const override; - - /// - /// @brief overwrite Grabber.h implementation - /// - QStringList getFramerates(const QString& devicePath) const override; + QJsonArray discover(const QJsonObject& params); public slots: - + bool prepare(); bool start(); - void stop(); + void newThreadFrame(Image image); +#if defined(ENABLE_CEC) void handleCecEvent(CECEvent event); +#endif signals: void newFrame(const Image & image); @@ -166,36 +109,19 @@ private slots: int read_frame(); private: - void getV4Ldevices(); - bool init(); - void uninit(); - bool open_device(); - void close_device(); - void init_read(unsigned int buffer_size); - void init_mmap(); - void init_userp(unsigned int buffer_size); - void init_device(VideoStandard videoStandard); - void uninit_device(); - void start_capturing(); - void stop_capturing(); - bool process_image(const void *p, int size); - - void process_image(const uint8_t *p, int size); - int xioctl(int request, void *arg); - int xioctl(int fileDescriptor, int request, void *arg); void throw_exception(const QString & error) @@ -222,56 +148,28 @@ private: size_t length; }; -#ifdef HAVE_JPEG - struct errorManager - { - jpeg_error_mgr pub; - jmp_buf setjmp_buffer; - }; - - static void errorHandler(j_common_ptr cInfo) - { - errorManager* mgr = reinterpret_cast(cInfo->err); - longjmp(mgr->setjmp_buffer, 1); - } - - static void outputHandler(j_common_ptr cInfo) - { - // Suppress fprintf warnings. - } - - jpeg_decompress_struct* _decompress; - errorManager* _error; -#endif - -#ifdef HAVE_TURBO_JPEG - tjhandle _decompress = nullptr; - int _subsamp; -#endif - private: - QString _deviceName; - std::map _v4lDevices; + QString _currentDevicePath, _currentDeviceName; + EncoderThreadManager* _threadManager; QMap _deviceProperties; + QMap> _deviceControls; - VideoStandard _videoStandard; io_method _ioMethod; int _fileDescriptor; std::vector _buffers; - PixelFormat _pixelFormat; - int _pixelDecimation; + PixelFormat _pixelFormat, _pixelFormatConfig; int _lineLength; int _frameByteSize; + QAtomicInt _currentFrame; + // signal detection int _noSignalCounterThreshold; ColorRgb _noSignalThresholdColor; - bool _signalDetectionEnabled; - bool _cecDetectionEnabled; - bool _cecStandbyActivated; - bool _noSignalDetected; + bool _cecDetectionEnabled, _cecStandbyActivated, _signalDetectionEnabled, _noSignalDetected; int _noSignalCounter; + int _brightness, _contrast, _saturation, _hue; double _x_frac_min; double _y_frac_min; double _x_frac_max; @@ -279,9 +177,9 @@ private: QSocketNotifier *_streamNotifier; - bool _initialized; - bool _deviceAutoDiscoverEnabled; + bool _initialized, _reload; protected: - void enumFrameIntervals(QStringList &framerates, int fileDescriptor, int pixelformat, int width, int height); + void enumVideoCaptureDevices(); + void enumFrameIntervals(QList &framerates, int fileDescriptor, int pixelformat, int width, int height); }; diff --git a/include/grabber/V4L2Wrapper.h b/include/grabber/V4L2Wrapper.h deleted file mode 100644 index 78474fd6..00000000 --- a/include/grabber/V4L2Wrapper.h +++ /dev/null @@ -1,46 +0,0 @@ -#pragma once - -#include -#include - -class V4L2Wrapper : public GrabberWrapper -{ - Q_OBJECT - -public: - V4L2Wrapper(const QString & device, - const unsigned grabWidth, - const unsigned grabHeight, - const unsigned fps, - const unsigned input, - VideoStandard videoStandard, - PixelFormat pixelFormat, - int pixelDecimation ); - ~V4L2Wrapper() override; - - bool getSignalDetectionEnable() const; - bool getCecDetectionEnable() const; - -public slots: - bool start() override; - void stop() override; - - void setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold); - void setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) override; - void setSignalDetectionOffset(double verticalMin, double horizontalMin, double verticalMax, double horizontalMax); - void setSignalDetectionEnable(bool enable); - void setCecDetectionEnable(bool enable); - void setDeviceVideoStandard(const QString& device, VideoStandard videoStandard); - void handleCecEvent(CECEvent event); - void handleSettingsUpdate(settings::type type, const QJsonDocument& config) override; - -private slots: - void newFrame(const Image & image); - void readError(const char* err); - - void action() override; - -private: - /// The V4L2 grabber - V4L2Grabber _grabber; -}; diff --git a/include/grabber/VideoWrapper.h b/include/grabber/VideoWrapper.h new file mode 100644 index 00000000..932ff1ab --- /dev/null +++ b/include/grabber/VideoWrapper.h @@ -0,0 +1,47 @@ +#pragma once + +#include // Required to determine the cmake options +#include + +#if defined(ENABLE_MF) + #include +#elif defined(ENABLE_V4L2) + #include +#endif + +#if defined(ENABLE_CEC) + #include +#endif + +class VideoWrapper : public GrabberWrapper +{ + Q_OBJECT + +public: + VideoWrapper(); + ~VideoWrapper() override; + +public slots: + bool start() override; + void stop() override; + +#if defined(ENABLE_CEC) + void handleCecEvent(CECEvent event); +#endif + + void handleSettingsUpdate(settings::type type, const QJsonDocument& config) override; + +private slots: + void newFrame(const Image & image); + void readError(const char* err); + + void action() override; + +private: + /// The Media Foundation or V4L2 grabber +#if defined(ENABLE_MF) + MFGrabber _grabber; +#elif defined(ENABLE_V4L2) + V4L2Grabber _grabber; +#endif +}; diff --git a/include/grabber/X11Grabber.h b/include/grabber/X11Grabber.h index abc347c3..642c929a 100644 --- a/include/grabber/X11Grabber.h +++ b/include/grabber/X11Grabber.h @@ -2,7 +2,13 @@ #include #include #include + +// QT includes #include +#include +#include +#include + // Hyperion-utils includes #include @@ -20,11 +26,13 @@ class X11Grabber : public Grabber , public QAbstractNativeEventFilter { public: - X11Grabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation); + X11Grabber(int cropLeft=0, int cropRight=0, int cropTop=0, int cropBottom=0); ~X11Grabber() override; - bool Setup(); + bool open(); + + bool setupDisplay(); /// /// Captures a single snapshot of the display and writes the data to the given image. The @@ -50,7 +58,7 @@ public: /// /// @brief Apply new pixelDecimation /// - void setPixelDecimation(int pixelDecimation) override; + bool setPixelDecimation(int pixelDecimation) override; /// /// Set the crop values @@ -59,22 +67,33 @@ public: /// @param cropTop Top pixel crop /// @param cropBottom Bottom pixel crop /// - void setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) override; + void setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) override; + + /// + /// @brief Discover X11 screens available (for configuration). + /// + /// @param[in] params Parameters used to overwrite discovery default behaviour + /// + /// @return A JSON structure holding a list of devices found + /// + QJsonObject discover(const QJsonObject& params); protected: bool nativeEventFilter(const QByteArray & eventType, void * message, long int * result) override; private: - bool _XShmAvailable, _XShmPixmapAvailable, _XRenderAvailable, _XRandRAvailable; - XImage* _xImage; - XShmSegmentInfo _shminfo; + void freeResources(); + void setupResources(); /// Reference to the X11 display (nullptr if not opened) Display* _x11Display; Window _window; XWindowAttributes _windowAttr; + XImage* _xImage; + XShmSegmentInfo _shminfo; + Pixmap _pixmap; XRenderPictFormat* _srcFormat; XRenderPictFormat* _dstFormat; @@ -85,15 +104,19 @@ private: int _XRandREventBase; XTransform _transform; - int _pixelDecimation; - unsigned _screenWidth; - unsigned _screenHeight; + unsigned _calculatedWidth; + unsigned _calculatedHeight; unsigned _src_x; unsigned _src_y; - Image _image; + bool _XShmAvailable; + bool _XShmPixmapAvailable; + bool _XRenderAvailable; + bool _XRandRAvailable; + bool _isWayland; - void freeResources(); - void setupResources(); + Logger * _logger; + + Image _image; }; diff --git a/include/grabber/X11Wrapper.h b/include/grabber/X11Wrapper.h index b876d828..79b6da92 100644 --- a/include/grabber/X11Wrapper.h +++ b/include/grabber/X11Wrapper.h @@ -9,25 +9,26 @@ /// -/// The X11Wrapper uses an instance of the X11Grabber to obtain ImageRgb's from the -/// displayed content. This ImageRgb is processed to a ColorRgb for each led and commmited to the -/// attached Hyperion. +/// The X11Wrapper uses an instance of the X11Grabber to obtain ImageRgb's from the displayed content. +/// This ImageRgb is processed to a ColorRgb for each led and committed to the attached Hyperion. /// class X11Wrapper: public GrabberWrapper { public: /// - /// Constructs the framebuffer frame grabber with a specified grab size and update rate. + /// Constructs the X11 frame grabber with a specified grab size and update rate. /// - /// @param[in] device X11 device name/path - /// @param[in] grabWidth The width of the grabbed image [pixels] - /// @param[in] grabHeight The height of the grabbed images [pixels] /// @param[in] updateRate_Hz The image grab rate [Hz] + /// @param[in] pixelDecimation Decimation factor for image [pixels] /// - X11Wrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, unsigned updateRate_Hz); + X11Wrapper( int updateRate_Hz=GrabberWrapper::DEFAULT_RATE_HZ, + int pixelDecimation=GrabberWrapper::DEFAULT_PIXELDECIMATION, + int cropLeft=0, int cropRight=0, + int cropTop=0, int cropBottom=0 + ); /// - /// Destructor of this framebuffer frame grabber. Releases any claimed resources. + /// Destructor of this frame grabber. Releases any claimed resources. /// ~X11Wrapper() override; diff --git a/include/grabber/XcbGrabber.h b/include/grabber/XcbGrabber.h index a4a573f1..56471aec 100644 --- a/include/grabber/XcbGrabber.h +++ b/include/grabber/XcbGrabber.h @@ -1,7 +1,11 @@ #pragma once #include +// QT includes #include +#include +#include +#include #include #include @@ -21,16 +25,28 @@ class XcbGrabber : public Grabber, public QAbstractNativeEventFilter Q_OBJECT public: - XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation); + XcbGrabber(int cropLeft=0, int cropRight=0, int cropTop=0, int cropBottom=0); + ~XcbGrabber() override; - bool Setup(); + bool open(); + bool setupDisplay(); + int grabFrame(Image & image, bool forceUpdate = false); int updateScreenDimensions(bool force = false); void setVideoMode(VideoMode mode) override; bool setWidthHeight(int width, int height) override { return true; } - void setPixelDecimation(int pixelDecimation) override; - void setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) override; + bool setPixelDecimation(int pixelDecimation) override; + void setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) override; + + /// + /// @brief Discover XCB screens available (for configuration). + /// + /// @param[in] params Parameters used to overwrite discovery default behaviour + /// + /// @return A JSON structure holding a list of devices found + /// + QJsonObject discover(const QJsonObject& params); private: bool nativeEventFilter(const QByteArray & eventType, void * message, long int * result) override; @@ -52,8 +68,7 @@ private: xcb_render_transform_t _transform; xcb_shm_seg_t _shminfo; - int _pixelDecimation; - + int _screen_num; unsigned _screenWidth; unsigned _screenHeight; unsigned _src_x; @@ -63,6 +78,8 @@ private: bool _XcbRandRAvailable; bool _XcbShmAvailable; bool _XcbShmPixmapAvailable; + bool _isWayland; + Logger * _logger; uint8_t * _shmData; diff --git a/include/grabber/XcbWrapper.h b/include/grabber/XcbWrapper.h index 758269b3..71bb70ea 100644 --- a/include/grabber/XcbWrapper.h +++ b/include/grabber/XcbWrapper.h @@ -11,7 +11,12 @@ class XcbWrapper: public GrabberWrapper { public: - XcbWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, const unsigned updateRate_Hz); + XcbWrapper( int updateRate_Hz=GrabberWrapper::DEFAULT_RATE_HZ, + int pixelDecimation=GrabberWrapper::DEFAULT_PIXELDECIMATION, + int cropLeft=0, int cropRight=0, + int cropTop=0, int cropBottom=0 + ); + ~XcbWrapper() override; public slots: diff --git a/include/hyperion/Grabber.h b/include/hyperion/Grabber.h index d198842e..ce5b350e 100644 --- a/include/hyperion/Grabber.h +++ b/include/hyperion/Grabber.h @@ -6,23 +6,21 @@ #include #include #include -#include +#include #include #include #include -#include - /// /// @brief The Grabber class is responsible to apply image resizes (with or without ImageResampler) -/// Overwrite the videoMode with setVideoMode() -/// Overwrite setCropping() + class Grabber : public QObject { Q_OBJECT public: - Grabber(const QString& grabberName = "", int width=0, int height=0, int cropLeft=0, int cropRight=0, int cropTop=0, int cropBottom=0); + + Grabber(const QString& grabberName = "", int cropLeft=0, int cropRight=0, int cropTop=0, int cropBottom=0); /// /// Set the video mode (2D/3D) @@ -31,12 +29,18 @@ public: virtual void setVideoMode(VideoMode mode); /// - /// @brief Apply new crop values, on errors reject the values + /// Apply new flip mode (vertical/horizontal/both) + /// @param[in] mode The new flip mode /// - virtual void setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom); + virtual void setFlipMode(FlipMode mode); /// - /// @brief Apply new video input (used from v4l) + /// @brief Apply new crop values, on errors reject the values + /// + virtual void setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom); + + /// + /// @brief Apply new video input (used from v4l2/MediaFoundation) /// @param input device input /// virtual bool setInput(int input); @@ -48,114 +52,95 @@ public: virtual bool setWidthHeight(int width, int height); /// - /// @brief Apply new framerate (used from v4l) + /// @brief Apply new capture framerate in Hz /// @param fps framesPerSecond /// virtual bool setFramerate(int fps); /// - /// @brief Apply new pixelDecimation (used from x11, xcb and qt) + /// @brief Apply new framerate software decimation (used from v4l2/MediaFoundation) + /// @param decimation how many frames per second to omit /// - virtual void setPixelDecimation(int pixelDecimation) {} + virtual void setFpsSoftwareDecimation(int decimation); /// - /// @brief Apply new signalThreshold (used from v4l) + /// @brief Apply videoStandard (used from v4l2) /// - virtual void setSignalThreshold( - double redSignalThreshold, - double greenSignalThreshold, - double blueSignalThreshold, - int noSignalCounterThreshold = 50) {} - /// - /// @brief Apply new SignalDetectionOffset (used from v4l) - /// - virtual void setSignalDetectionOffset( - double verticalMin, - double horizontalMin, - double verticalMax, - double horizontalMax) {} + virtual void setVideoStandard(VideoStandard videoStandard); /// - /// @brief Apply SignalDetectionEnable (used from v4l) + /// @brief Apply new pixelDecimation /// - virtual void setSignalDetectionEnable(bool enable) {} - - /// - /// @brief Apply CecDetectionEnable (used from v4l) - /// - virtual void setCecDetectionEnable(bool enable) {} - - /// - /// @brief Apply device and videoStanded (used from v4l) - /// - virtual void setDeviceVideoStandard(QString device, VideoStandard videoStandard) {} + virtual bool setPixelDecimation(int pixelDecimation); /// /// @brief Apply display index (used from qt) /// - virtual void setDisplayIndex(int index) {} - - /// - /// @brief Apply path for device (used from framebuffer) - /// - virtual void setDevicePath(const QString& path) {} - - /// - /// @brief get current resulting height of image (after crop) - /// - virtual int getImageWidth() { return _width; } - - /// - /// @brief get current resulting width of image (after crop) - /// - virtual int getImageHeight() { return _height; } + virtual bool setDisplayIndex(int /*index*/) { return true; } /// /// @brief Prevent the real capture implementation from capturing if disabled /// - void setEnabled(bool enable); + virtual void setEnabled(bool enable); /// - /// @brief Get a list of all available V4L devices - /// @return List of all available V4L devices on success else empty List + /// @brief get current resulting height of image (after crop) /// - virtual QStringList getV4L2devices() const { return QStringList(); } + int getImageWidth() const { return _width; } /// - /// @brief Get the V4L device name - /// @param devicePath The device path - /// @return The name of the V4L device on success else empty String + /// @brief get current resulting width of image (after crop) /// - virtual QString getV4L2deviceName(const QString& /*devicePath*/) const { return QString(); } + int getImageHeight() const { return _height; } /// - /// @brief Get a name/index pair of supported device inputs - /// @param devicePath The device path - /// @return multi pair of name/index on success else empty pair + /// @brief Get current capture framerate in Hz + /// @param fps framesPerSecond /// - virtual QMultiMap getV4L2deviceInputs(const QString& /*devicePath*/) const { return QMultiMap(); } + int getFramerate() const { return _fps; } /// - /// @brief Get a list of supported device resolutions - /// @param devicePath The device path - /// @return List of resolutions on success else empty List + /// @brief Get capture interval in ms /// - virtual QStringList getResolutions(const QString& /*devicePath*/) const { return QStringList(); } + int getUpdateInterval() const { return 1000/_fps; } /// - /// @brief Get a list of supported device framerates - /// @param devicePath The device path - /// @return List of framerates on success else empty List + /// @brief Get pixelDecimation /// - virtual QStringList getFramerates(const QString& devicePath) const { return QStringList(); } + int getPixelDecimation() const { return _pixelDecimation; } + + QString getGrabberName() const { return _grabberName; } + +protected slots: + /// + /// @brief Set device in error state + /// + /// @param[in] errorMsg The error message to be logged + /// + virtual void setInError( const QString& errorMsg); protected: + + QString _grabberName; + + /// logger instance + Logger * _log; + ImageResampler _imageResampler; bool _useImageResampler; /// the selected VideoMode - VideoMode _videoMode; + VideoMode _videoMode; + + /// the used video standard + VideoStandard _videoStandard; + + /// Image size decimation + int _pixelDecimation; + + /// the used Flip Mode + FlipMode _flipMode; /// With of the captured snapshot [pixels] int _width; @@ -166,15 +151,21 @@ protected: /// frame per second int _fps; + /// fps software decimation + int _fpsSoftwareDecimation; + /// device input int _input; /// number of pixels to crop after capturing int _cropLeft, _cropRight, _cropTop, _cropBottom; - bool _enabled; + // Device states - /// logger instance - Logger * _log; + /// Is the device enabled? + bool _isEnabled; + + /// Is the device in error state and stopped? + bool _isDeviceInError; }; diff --git a/include/hyperion/GrabberWrapper.h b/include/hyperion/GrabberWrapper.h index 00a50f6e..fe7dc632 100644 --- a/include/hyperion/GrabberWrapper.h +++ b/include/hyperion/GrabberWrapper.h @@ -12,30 +12,38 @@ #include #include #include +#include #include +#include class Grabber; class GlobalSignals; class QTimer; -/// List of Hyperion instances that requested screen capt -static QList GRABBER_SYS_CLIENTS; -static QList GRABBER_V4L_CLIENTS; - /// -/// This class will be inherted by FramebufferWrapper and others which contains the real capture interface +/// This class will be inherited by GrabberWrappers which contains the real capture interface /// class GrabberWrapper : public QObject { Q_OBJECT public: - GrabberWrapper(const QString& grabberName, Grabber * ggrabber, unsigned width, unsigned height, unsigned updateRate_Hz = 0); + GrabberWrapper(const QString& grabberName, Grabber * ggrabber,int updateRate_Hz = DEFAULT_RATE_HZ); ~GrabberWrapper() override; static GrabberWrapper* instance; static GrabberWrapper* getInstance(){ return instance; } + static const int DEFAULT_RATE_HZ; + static const int DEFAULT_MIN_GRAB_RATE_HZ; + static const int DEFAULT_MAX_GRAB_RATE_HZ; + static const int DEFAULT_PIXELDECIMATION; + + static QMap GRABBER_SYS_CLIENTS; + static QMap GRABBER_V4L_CLIENTS; + static bool GLOBAL_GRABBER_SYS_ENABLE; + static bool GLOBAL_GRABBER_V4L_ENABLE; + /// /// Starts the grabber which produces led values with the specified update rate /// @@ -56,45 +64,17 @@ public: /// virtual bool isActive() const; - /// - /// @brief Get a list of all available V4L devices - /// @return List of all available V4L devices on success else empty List - /// - virtual QStringList getV4L2devices() const; - - /// - /// @brief Get the V4L device name - /// @param devicePath The device path - /// @return The name of the V4L device on success else empty String - /// - virtual QString getV4L2deviceName(const QString& devicePath) const; - - /// - /// @brief Get a name/index pair of supported device inputs - /// @param devicePath The device path - /// @return multi pair of name/index on success else empty pair - /// - virtual QMultiMap getV4L2deviceInputs(const QString& devicePath) const; - - /// - /// @brief Get a list of supported device resolutions - /// @param devicePath The device path - /// @return List of resolutions on success else empty List - /// - virtual QStringList getResolutions(const QString& devicePath) const; - - /// - /// @brief Get a list of supported device framerates - /// @param devicePath The device path - /// @return List of framerates on success else empty List - /// - virtual QStringList getFramerates(const QString& devicePath) const; - /// /// @brief Get active grabber name - /// @return Active grabber name + /// @param hyperionInd The instance index + /// @return Active grabbers /// - virtual QString getActive() const; + virtual QStringList getActive(int inst) const; + + bool getSysGrabberState() const { return GLOBAL_GRABBER_SYS_ENABLE; } + void setSysGrabberState(bool sysGrabberState){ GLOBAL_GRABBER_SYS_ENABLE = sysGrabberState; } + bool getV4lGrabberState() const { return GLOBAL_GRABBER_V4L_ENABLE; } + void setV4lGrabberState(bool v4lGrabberState){ GLOBAL_GRABBER_V4L_ENABLE = v4lGrabberState; } static QStringList availableGrabbers(); @@ -130,6 +110,12 @@ public slots: /// virtual void setVideoMode(VideoMode videoMode); + /// + /// Set the Flip mode + /// @param flipMode The new flip mode + /// + virtual void setFlipMode(const QString &flipMode); + /// /// Set the crop values /// @param cropLeft Left pixel crop @@ -137,11 +123,11 @@ public slots: /// @param cropTop Top pixel crop /// @param cropBottom Bottom pixel crop /// - virtual void setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom); + virtual void setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom); /// /// @brief Handle settings update from HyperionDaemon Settingsmanager emit - /// @param type settingyType from enum + /// @param type settingsType from enum /// @param config configuration object /// virtual void handleSettingsUpdate(settings::type type, const QJsonDocument& config); @@ -159,22 +145,38 @@ private slots: /// /// @brief Update Update capture rate - /// @param type interval between frames in millisecons + /// @param type interval between frames in milliseconds /// void updateTimer(int interval); protected: + + /// + /// @brief Opens the input device. + /// + /// @return True, on success (i.e. device is ready) + /// + virtual bool open() { return true; } + + /// + /// @brief Closes the input device. + /// + /// @return True on success (i.e. device is closed) + /// + virtual bool close() { return true; } + + QString _grabberName; + /// The Logger instance + Logger * _log; + /// The timer for generating events with the specified update rate QTimer* _timer; - /// The calced update rate [ms] + /// The calculated update rate [ms] int _updateInterval_ms; - /// The Logger instance - Logger * _log; - Grabber *_ggrabber; /// The image used for grabbing frames diff --git a/include/hyperion/Hyperion.h b/include/hyperion/Hyperion.h index 512b3941..4b4e138a 100644 --- a/include/hyperion/Hyperion.h +++ b/include/hyperion/Hyperion.h @@ -99,7 +99,7 @@ public: /// QString getActiveDeviceType() const; - bool getReadOnlyMode() {return _readOnlyMode; }; + bool getReadOnlyMode() {return _readOnlyMode; } public slots: @@ -193,7 +193,7 @@ public slots: bool clear(int priority, bool forceClearAll=false); /// ############# - // EFFECTENGINE + /// EFFECTENGINE /// /// @brief Get a pointer to the effect engine /// @return EffectEngine instance pointer diff --git a/include/hyperion/PriorityMuxer.h b/include/hyperion/PriorityMuxer.h index 278e33fe..36026fbe 100644 --- a/include/hyperion/PriorityMuxer.h +++ b/include/hyperion/PriorityMuxer.h @@ -57,8 +57,11 @@ public: //Foreground and Background priorities const static int FG_PRIORITY; const static int BG_PRIORITY; + const static int MANUAL_SELECTED_PRIORITY; /// The lowest possible priority, which is used when no priority channels are active const static int LOWEST_PRIORITY; + /// Timeout used to identify a non active priority + const static int TIMEOUT_NOT_ACTIVE_PRIO; /// /// Constructs the PriorityMuxer for the given number of LEDs (used to switch to black when @@ -84,7 +87,7 @@ public: /// @param update True to update _currentPriority - INTERNAL usage. /// @return True if changed has been applied, false if the state is unchanged /// - bool setSourceAutoSelectEnabled(bool enabel, bool update = true); + bool setSourceAutoSelectEnabled(bool enable, bool update = true); /// /// @brief Get the state of source auto selection diff --git a/include/utils/ImageResampler.h b/include/utils/ImageResampler.h index 62adb2b9..7aba7a40 100644 --- a/include/utils/ImageResampler.h +++ b/include/utils/ImageResampler.h @@ -9,12 +9,13 @@ class ImageResampler { public: ImageResampler(); - ~ImageResampler(); + ~ImageResampler() {} - void setHorizontalPixelDecimation(int decimator); - void setVerticalPixelDecimation(int decimator); + void setHorizontalPixelDecimation(int decimator) { _horizontalDecimation = decimator; } + void setVerticalPixelDecimation(int decimator) { _verticalDecimation = decimator; } void setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom); - void setVideoMode(VideoMode mode); + void setVideoMode(VideoMode mode) { _videoMode = mode; } + void setFlipMode(FlipMode mode) { _flipMode = mode; } void processImage(const uint8_t * data, int width, int height, int lineLength, PixelFormat pixelFormat, Image & outputImage) const; private: @@ -25,5 +26,6 @@ private: int _cropTop; int _cropBottom; VideoMode _videoMode; + FlipMode _flipMode; }; diff --git a/include/utils/PixelFormat.h b/include/utils/PixelFormat.h index a60a20f4..b639faf5 100644 --- a/include/utils/PixelFormat.h +++ b/include/utils/PixelFormat.h @@ -12,7 +12,9 @@ enum class PixelFormat { BGR24, RGB32, BGR32, -#ifdef HAVE_JPEG_DECODER + NV12, + I420, +#ifdef HAVE_TURBO_JPEG MJPEG, #endif NO_CHANGE @@ -23,32 +25,40 @@ inline PixelFormat parsePixelFormat(const QString& pixelFormat) // convert to lower case QString format = pixelFormat.toLower(); - if (format.compare("yuyv") ) + if (format.compare("yuyv") == 0) { return PixelFormat::YUYV; } - else if (format.compare("uyvy") ) + else if (format.compare("uyvy") == 0) { return PixelFormat::UYVY; } - else if (format.compare("bgr16") ) + else if (format.compare("bgr16") == 0) { return PixelFormat::BGR16; } - else if (format.compare("bgr24") ) + else if (format.compare("bgr24") == 0) { return PixelFormat::BGR24; } - else if (format.compare("rgb32") ) + else if (format.compare("rgb32") == 0) { return PixelFormat::RGB32; } - else if (format.compare("bgr32") ) + else if (format.compare("bgr32") == 0) { return PixelFormat::BGR32; } -#ifdef HAVE_JPEG_DECODER - else if (format.compare("mjpeg") ) + else if (format.compare("i420") == 0) + { + return PixelFormat::I420; + } + else if (format.compare("nv12") == 0) + { + return PixelFormat::NV12; + } +#ifdef HAVE_TURBO_JPEG + else if (format.compare("mjpeg") == 0) { return PixelFormat::MJPEG; } @@ -57,3 +67,102 @@ inline PixelFormat parsePixelFormat(const QString& pixelFormat) // return the default NO_CHANGE return PixelFormat::NO_CHANGE; } + +inline QString pixelFormatToString(const PixelFormat& pixelFormat) +{ + + if ( pixelFormat == PixelFormat::YUYV) + { + return "YUYV"; + } + else if (pixelFormat == PixelFormat::UYVY) + { + return "UYVY"; + } + else if (pixelFormat == PixelFormat::BGR16) + { + return "BGR16"; + } + else if (pixelFormat == PixelFormat::BGR24) + { + return "BGR24"; + } + else if (pixelFormat == PixelFormat::RGB32) + { + return "RGB32"; + } + else if (pixelFormat == PixelFormat::BGR32) + { + return "BGR32"; + } + else if (pixelFormat == PixelFormat::I420) + { + return "I420"; + } + else if (pixelFormat == PixelFormat::NV12) + { + return "NV12"; + } +#ifdef HAVE_TURBO_JPEG + else if (pixelFormat == PixelFormat::MJPEG) + { + return "MJPEG"; + } +#endif + + // return the default NO_CHANGE + return "NO_CHANGE"; +} + +/** + * Enumeration of the possible flip modes + */ + +enum class FlipMode +{ + HORIZONTAL, + VERTICAL, + BOTH, + NO_CHANGE +}; + +inline FlipMode parseFlipMode(const QString& flipMode) +{ + // convert to lower case + QString mode = flipMode.toLower(); + + if (mode.compare("horizontal") == 0) + { + return FlipMode::HORIZONTAL; + } + else if (mode.compare("vertical") == 0) + { + return FlipMode::VERTICAL; + } + else if (mode.compare("both") == 0) + { + return FlipMode::BOTH; + } + + // return the default NO_CHANGE + return FlipMode::NO_CHANGE; +} + +inline QString flipModeToString(const FlipMode& flipMode) +{ + if ( flipMode == FlipMode::HORIZONTAL) + { + return "horizontal"; + } + else if (flipMode == FlipMode::VERTICAL) + { + return "vertical"; + } + else if (flipMode == FlipMode::BOTH) + { + return "both"; + } + + // return the default NO_CHANGE + return "NO_CHANGE"; +} diff --git a/include/grabber/VideoStandard.h b/include/utils/VideoStandard.h similarity index 51% rename from include/grabber/VideoStandard.h rename to include/utils/VideoStandard.h index adcf43be..d73a1a21 100644 --- a/include/grabber/VideoStandard.h +++ b/include/utils/VideoStandard.h @@ -1,5 +1,7 @@ #pragma once +#include + /** * Enumeration of the possible video standards the grabber can be set to */ @@ -13,17 +15,17 @@ enum class VideoStandard { inline VideoStandard parseVideoStandard(const QString& videoStandard) { // convert to lower case - QString standard = videoStandard.toLower(); + QString standard = videoStandard.toUpper(); - if (standard == "pal") + if (standard == "PAL") { return VideoStandard::PAL; } - else if (standard == "ntsc") + else if (standard == "NTSC") { return VideoStandard::NTSC; } - else if (standard == "secam") + else if (standard == "SECAM") { return VideoStandard::SECAM; } @@ -31,3 +33,14 @@ inline VideoStandard parseVideoStandard(const QString& videoStandard) // return the default NO_CHANGE return VideoStandard::NO_CHANGE; } + +inline QString VideoStandard2String(VideoStandard videoStandard) +{ + switch (videoStandard) + { + case VideoStandard::PAL: return "PAL"; + case VideoStandard::NTSC: return "NTSC"; + case VideoStandard::SECAM: return "SECAM"; + default: return "NO_CHANGE"; + } +} diff --git a/include/utils/global_defines.h b/include/utils/global_defines.h index eb75d339..e5a6808e 100644 --- a/include/utils/global_defines.h +++ b/include/utils/global_defines.h @@ -1,4 +1,6 @@ #pragma once #define QSTRING_CSTR(str) str.toLocal8Bit().constData() +typedef QList< int > QIntList; + diff --git a/include/utils/hyperion.h b/include/utils/hyperion.h index 5bb44ded..4ec913a9 100644 --- a/include/utils/hyperion.h +++ b/include/utils/hyperion.h @@ -8,6 +8,7 @@ // fg effect #include #include +#include /// /// @brief Provide utility methods for Hyperion class @@ -17,7 +18,6 @@ namespace hyperion { void handleInitialEffect(Hyperion* hyperion, const QJsonObject& FGEffectConfig) { #define FGCONFIG_ARRAY fgColorConfig.toArray() - const int DURATION_INFINITY = 0; // initial foreground effect/color if (FGEffectConfig["enable"].toBool(true)) @@ -27,7 +27,7 @@ namespace hyperion { const QJsonValue fgColorConfig = FGEffectConfig["color"]; int default_fg_duration_ms = 3000; int fg_duration_ms = FGEffectConfig["duration_ms"].toInt(default_fg_duration_ms); - if (fg_duration_ms == DURATION_INFINITY) + if (fg_duration_ms <= Effect::ENDLESS) { fg_duration_ms = default_fg_duration_ms; Warning(Logger::getInstance("HYPERION"), "foreground effect duration 'infinity' is forbidden, set to default value %d ms",default_fg_duration_ms); diff --git a/libsrc/api/CMakeLists.txt b/libsrc/api/CMakeLists.txt index 64180a9c..c4ce69c0 100644 --- a/libsrc/api/CMakeLists.txt +++ b/libsrc/api/CMakeLists.txt @@ -1,3 +1,12 @@ +# Find the BCM-package (VC control) +IF ( "${PLATFORM}" MATCHES rpi) + find_package(BCM REQUIRED) + include_directories(${BCM_INCLUDE_DIRS}) +ELSE() + SET(BCM_INCLUDE_DIRS "") + SET(BCM_LIBRARIES "") +ENDIF() + # Define the current source locations SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/api) @@ -12,6 +21,11 @@ add_library(hyperion-api ${Api_RESOURCES} ) +if(ENABLE_DX) + include_directories(${DIRECTX9_INCLUDE_DIRS}) + target_link_libraries(hyperion-api ${DIRECTX9_LIBRARIES}) +endif(ENABLE_DX) + target_link_libraries(hyperion-api hyperion hyperion-utils diff --git a/libsrc/api/JSONRPC_schema/schema-inputsource.json b/libsrc/api/JSONRPC_schema/schema-inputsource.json new file mode 100644 index 00000000..edb9eb7c --- /dev/null +++ b/libsrc/api/JSONRPC_schema/schema-inputsource.json @@ -0,0 +1,28 @@ +{ + "type":"object", + "required":true, + "properties": { + "command": { + "type": "string", + "required": true, + "enum": [ "inputsource" ] + }, + "tan": { + "type": "integer" + }, + "subcommand": { + "type": "string", + "required": true, + "enum": [ "discover", "getProperties" ] + }, + "sourceType": { + "type": "string", + "required": true + }, + "params": { + "type": "object", + "required": false + } + }, + "additionalProperties": false +} diff --git a/libsrc/api/JSONRPC_schema/schema.json b/libsrc/api/JSONRPC_schema/schema.json index e13982fe..12dcb6c1 100644 --- a/libsrc/api/JSONRPC_schema/schema.json +++ b/libsrc/api/JSONRPC_schema/schema.json @@ -5,7 +5,7 @@ "command": { "type" : "string", "required" : true, - "enum" : ["color", "image", "effect", "create-effect", "delete-effect", "serverinfo", "clear", "clearall", "adjustment", "sourceselect", "config", "componentstate", "ledcolors", "logging", "processing", "sysinfo", "videomode", "authorize", "instance", "leddevice", "transform", "correction" , "temperature"] + "enum": [ "color", "image", "effect", "create-effect", "delete-effect", "serverinfo", "clear", "clearall", "adjustment", "sourceselect", "config", "componentstate", "ledcolors", "logging", "processing", "sysinfo", "videomode", "authorize", "instance", "leddevice", "inputsource", "transform", "correction", "temperature" ] } } } diff --git a/libsrc/api/JSONRPC_schemas.qrc b/libsrc/api/JSONRPC_schemas.qrc index 2018684f..1f09fe1a 100644 --- a/libsrc/api/JSONRPC_schemas.qrc +++ b/libsrc/api/JSONRPC_schemas.qrc @@ -20,7 +20,8 @@ JSONRPC_schema/schema-videomode.json JSONRPC_schema/schema-authorize.json JSONRPC_schema/schema-instance.json - JSONRPC_schema/schema-leddevice.json + JSONRPC_schema/schema-leddevice.json + JSONRPC_schema/schema-inputsource.json JSONRPC_schema/schema-hyperion-classic.json JSONRPC_schema/schema-hyperion-classic.json diff --git a/libsrc/api/JsonAPI.cpp b/libsrc/api/JsonAPI.cpp index e75fa71b..1c1ee6e7 100644 --- a/libsrc/api/JsonAPI.cpp +++ b/libsrc/api/JsonAPI.cpp @@ -16,7 +16,45 @@ #include #include +#include // Required to determine the cmake options + #include +#include + +#if defined(ENABLE_MF) + #include +#elif defined(ENABLE_V4L2) + #include +#endif + +#if defined(ENABLE_X11) + #include +#endif + +#if defined(ENABLE_XCB) + #include +#endif + +#if defined(ENABLE_DX) + #include +#endif + +#if defined(ENABLE_FB) + #include +#endif + +#if defined(ENABLE_DISPMANX) + #include +#endif + +#if defined(ENABLE_AMLOGIC) + #include +#endif + +#if defined(ENABLE_OSX) + #include +#endif + #include #include #include @@ -41,7 +79,10 @@ using namespace hyperion; -JsonAPI::JsonAPI(QString peerAddress, Logger* log, bool localConnection, QObject* parent, bool noListener) +// Constants +namespace { const bool verbose = false; } + +JsonAPI::JsonAPI(QString peerAddress, Logger *log, bool localConnection, QObject *parent, bool noListener) : API(log, localConnection, parent) { _noListener = noListener; @@ -86,7 +127,7 @@ bool JsonAPI::handleInstanceSwitch(quint8 inst, bool forced) return false; } -void JsonAPI::handleMessage(const QString& messageString, const QString& httpAuthHeader) +void JsonAPI::handleMessage(const QString &messageString, const QString &httpAuthHeader) { const QString ident = "JsonRpc@" + _peerAddress; QJsonObject message; @@ -174,6 +215,8 @@ proceed: handleInstanceCommand(message, command, tan); else if (command == "leddevice") handleLedDeviceCommand(message, command, tan); + else if (command == "inputsource") + handleInputSourceCommand(message, command, tan); // BEGIN | The following commands are deprecated but used to ensure backward compatibility with hyperion Classic remote control else if (command == "clearall") @@ -187,17 +230,17 @@ proceed: handleNotImplemented(command, tan); } -void JsonAPI::handleColorCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleColorCommand(const QJsonObject &message, const QString &command, int tan) { emit forwardJsonMessage(message); int priority = message["priority"].toInt(); int duration = message["duration"].toInt(-1); const QString origin = message["origin"].toString("JsonRpc") + "@" + _peerAddress; - const QJsonArray& jsonColor = message["color"].toArray(); + const QJsonArray &jsonColor = message["color"].toArray(); std::vector colors; // TODO faster copy - for (const auto& entry : jsonColor) + for (const auto &entry : jsonColor) { colors.emplace_back(uint8_t(entry.toInt())); } @@ -206,7 +249,7 @@ void JsonAPI::handleColorCommand(const QJsonObject& message, const QString& comm sendSuccessReply(command, tan); } -void JsonAPI::handleImageCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleImageCommand(const QJsonObject &message, const QString &command, int tan) { emit forwardJsonMessage(message); @@ -230,7 +273,7 @@ void JsonAPI::handleImageCommand(const QJsonObject& message, const QString& comm sendSuccessReply(command, tan); } -void JsonAPI::handleEffectCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleEffectCommand(const QJsonObject &message, const QString &command, int tan) { emit forwardJsonMessage(message); @@ -249,19 +292,19 @@ void JsonAPI::handleEffectCommand(const QJsonObject& message, const QString& com sendErrorReply("Effect '" + dat.effectName + "' not found", command, tan); } -void JsonAPI::handleCreateEffectCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleCreateEffectCommand(const QJsonObject &message, const QString &command, int tan) { const QString resultMsg = API::saveEffect(message); resultMsg.isEmpty() ? sendSuccessReply(command, tan) : sendErrorReply(resultMsg, command, tan); } -void JsonAPI::handleDeleteEffectCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleDeleteEffectCommand(const QJsonObject &message, const QString &command, int tan) { const QString res = API::deleteEffect(message["name"].toString()); res.isEmpty() ? sendSuccessReply(command, tan) : sendErrorReply(res, command, tan); } -void JsonAPI::handleSysInfoCommand(const QJsonObject&, const QString& command, int tan) +void JsonAPI::handleSysInfoCommand(const QJsonObject &, const QString &command, int tan) { // create result QJsonObject result; @@ -304,7 +347,7 @@ void JsonAPI::handleSysInfoCommand(const QJsonObject&, const QString& command, i emit callbackMessage(result); } -void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleServerInfoCommand(const QJsonObject &message, const QString &command, int tan) { QJsonObject info; @@ -315,9 +358,9 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& activePriorities.removeAll(255); int currentPriority = _hyperion->getCurrentPriority(); - for (int priority : activePriorities) + for(int priority : activePriorities) { - const Hyperion::InputInfo& priorityInfo = _hyperion->getPriorityInfo(priority); + const Hyperion::InputInfo &priorityInfo = _hyperion->getPriorityInfo(priority); QJsonObject item; item["priority"] = priority; if (priorityInfo.timeoutTime_ms > 0) @@ -349,9 +392,9 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& // add HSL Value to Array QJsonArray HSLValue; ColorSys::rgb2hsl(priorityInfo.ledColors.begin()->red, - priorityInfo.ledColors.begin()->green, - priorityInfo.ledColors.begin()->blue, - Hue, Saturation, Luminace); + priorityInfo.ledColors.begin()->green, + priorityInfo.ledColors.begin()->blue, + Hue, Saturation, Luminace); HSLValue.append(Hue); HSLValue.append(Saturation); @@ -362,8 +405,8 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& } (priority == currentPriority) - ? priorities.prepend(item) - : priorities.append(item); + ? priorities.prepend(item) + : priorities.append(item); } info["priorities"] = priorities; @@ -371,9 +414,9 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& // collect adjustment information QJsonArray adjustmentArray; - for (const QString& adjustmentId : _hyperion->getAdjustmentIds()) + for (const QString &adjustmentId : _hyperion->getAdjustmentIds()) { - const ColorAdjustment* colorAdjustment = _hyperion->getAdjustment(adjustmentId); + const ColorAdjustment *colorAdjustment = _hyperion->getAdjustment(adjustmentId); if (colorAdjustment == nullptr) { Error(_log, "Incorrect color adjustment id: %s", QSTRING_CSTR(adjustmentId)); @@ -440,8 +483,8 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& // collect effect info QJsonArray effects; - const std::list& effectsDefinitions = _hyperion->getEffects(); - for (const EffectDefinition& effectDefinition : effectsDefinitions) + const std::list &effectsDefinitions = _hyperion->getEffects(); + for (const EffectDefinition &effectDefinition : effectsDefinitions) { QJsonObject effect; effect["name"] = effectDefinition.name; @@ -467,11 +510,18 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& QJsonObject grabbers; QJsonArray availableGrabbers; -#if defined(ENABLE_DISPMANX) || defined(ENABLE_V4L2) || defined(ENABLE_FB) || defined(ENABLE_AMLOGIC) || defined(ENABLE_OSX) || defined(ENABLE_X11) || defined(ENABLE_XCB) || defined(ENABLE_QT) +#if defined(ENABLE_DISPMANX) || defined(ENABLE_V4L2) || defined(ENABLE_MF) || defined(ENABLE_FB) || defined(ENABLE_AMLOGIC) || defined(ENABLE_OSX) || defined(ENABLE_X11) || defined(ENABLE_XCB) || defined(ENABLE_QT) - if (GrabberWrapper::getInstance() != nullptr) + if ( GrabberWrapper::getInstance() != nullptr ) { - grabbers["active"] = GrabberWrapper::getInstance()->getActive(); + QStringList activeGrabbers = GrabberWrapper::getInstance()->getActive(_hyperion->getInstanceIndex()); + QJsonArray activeGrabberNames; + for (auto grabberName : activeGrabbers) + { + activeGrabberNames.append(grabberName); + } + + grabbers["active"] = activeGrabberNames; } // get available grabbers @@ -480,55 +530,20 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& availableGrabbers.append(grabber); } -#endif - -#if defined(ENABLE_V4L2) - - QJsonArray availableV4L2devices; - for (const auto& devicePath : GrabberWrapper::getInstance()->getV4L2devices()) - { - QJsonObject device; - device["device"] = devicePath; - device["name"] = GrabberWrapper::getInstance()->getV4L2deviceName(devicePath); - - QJsonArray availableInputs; - QMultiMap inputs = GrabberWrapper::getInstance()->getV4L2deviceInputs(devicePath); - for (auto input = inputs.begin(); input != inputs.end(); input++) - { - QJsonObject availableInput; - availableInput["inputName"] = input.key(); - availableInput["inputIndex"] = input.value(); - availableInputs.append(availableInput); - } - device.insert("inputs", availableInputs); - - QJsonArray availableResolutions; - QStringList resolutions = GrabberWrapper::getInstance()->getResolutions(devicePath); - for (auto resolution : resolutions) - { - availableResolutions.append(resolution); - } - device.insert("resolutions", availableResolutions); - - QJsonArray availableFramerates; - QStringList framerates = GrabberWrapper::getInstance()->getFramerates(devicePath); - for (auto framerate : framerates) - { - availableFramerates.append(framerate); - } - device.insert("framerates", availableFramerates); - - availableV4L2devices.append(device); - } - - grabbers["v4l2_properties"] = availableV4L2devices; - #endif grabbers["available"] = availableGrabbers; info["videomode"] = QString(videoMode2String(_hyperion->getCurrentVideoMode())); info["grabbers"] = grabbers; + QJsonObject cecInfo; +#if defined(ENABLE_CEC) + cecInfo["enabled"] = true; +#else + cecInfo["enabled"] = false; +#endif + info["cec"] = cecInfo; + // get available components QJsonArray component; std::map components = _hyperion->getComponentRegister().getRegister(); @@ -547,7 +562,7 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& // add sessions QJsonArray sessions; #ifdef ENABLE_AVAHI - for (auto session : BonjourBrowserWrapper::getInstance()->getAllServices()) + for (auto session: BonjourBrowserWrapper::getInstance()->getAllServices()) { if (session.port < 0) continue; @@ -564,7 +579,7 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& #endif // add instance info QJsonArray instanceInfo; - for (const auto& entry : API::getAllInstanceData()) + for (const auto &entry : API::getAllInstanceData()) { QJsonObject obj; obj.insert("friendly_name", entry["friendly_name"].toString()); @@ -586,7 +601,7 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& // TRANSFORM INFORMATION (DEFAULT VALUES) QJsonArray transformArray; - for (const QString& transformId : _hyperion->getAdjustmentIds()) + for (const QString &transformId : _hyperion->getAdjustmentIds()) { QJsonObject transform; QJsonArray blacklevel, whitelevel, gamma, threshold; @@ -617,7 +632,7 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& // ACTIVE EFFECT INFO QJsonArray activeEffects; - for (const ActiveEffectDefinition& activeEffectDefinition : _hyperion->getActiveEffects()) + for (const ActiveEffectDefinition &activeEffectDefinition : _hyperion->getActiveEffects()) { if (activeEffectDefinition.priority != PriorityMuxer::LOWEST_PRIORITY - 1) { @@ -634,15 +649,15 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& // ACTIVE STATIC LED COLOR QJsonArray activeLedColors; - const Hyperion::InputInfo& priorityInfo = _hyperion->getPriorityInfo(_hyperion->getCurrentPriority()); + const Hyperion::InputInfo &priorityInfo = _hyperion->getPriorityInfo(_hyperion->getCurrentPriority()); if (priorityInfo.componentId == hyperion::COMP_COLOR && !priorityInfo.ledColors.empty()) { QJsonObject LEDcolor; // check if LED Color not Black (0,0,0) if ((priorityInfo.ledColors.begin()->red + - priorityInfo.ledColors.begin()->green + - priorityInfo.ledColors.begin()->blue != - 0)) + priorityInfo.ledColors.begin()->green + + priorityInfo.ledColors.begin()->blue != + 0)) { QJsonObject LEDcolor; @@ -659,9 +674,9 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& // add HSL Value to Array QJsonArray HSLValue; ColorSys::rgb2hsl(priorityInfo.ledColors.begin()->red, - priorityInfo.ledColors.begin()->green, - priorityInfo.ledColors.begin()->blue, - Hue, Saturation, Luminace); + priorityInfo.ledColors.begin()->green, + priorityInfo.ledColors.begin()->blue, + Hue, Saturation, Luminace); HSLValue.append(Hue); HSLValue.append(Saturation); @@ -706,7 +721,7 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject& message, const QString& } } -void JsonAPI::handleClearCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleClearCommand(const QJsonObject &message, const QString &command, int tan) { emit forwardJsonMessage(message); int priority = message["priority"].toInt(); @@ -720,7 +735,7 @@ void JsonAPI::handleClearCommand(const QJsonObject& message, const QString& comm sendSuccessReply(command, tan); } -void JsonAPI::handleClearallCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleClearallCommand(const QJsonObject &message, const QString &command, int tan) { emit forwardJsonMessage(message); QString replyMsg; @@ -728,12 +743,12 @@ void JsonAPI::handleClearallCommand(const QJsonObject& message, const QString& c sendSuccessReply(command, tan); } -void JsonAPI::handleAdjustmentCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleAdjustmentCommand(const QJsonObject &message, const QString &command, int tan) { - const QJsonObject& adjustment = message["adjustment"].toObject(); + const QJsonObject &adjustment = message["adjustment"].toObject(); const QString adjustmentId = adjustment["id"].toString(_hyperion->getAdjustmentIds().first()); - ColorAdjustment* colorAdjustment = _hyperion->getAdjustment(adjustmentId); + ColorAdjustment *colorAdjustment = _hyperion->getAdjustment(adjustmentId); if (colorAdjustment == nullptr) { Warning(_log, "Incorrect adjustment identifier: %s", adjustmentId.toStdString().c_str()); @@ -742,39 +757,39 @@ void JsonAPI::handleAdjustmentCommand(const QJsonObject& message, const QString& if (adjustment.contains("red")) { - const QJsonArray& values = adjustment["red"].toArray(); + const QJsonArray &values = adjustment["red"].toArray(); colorAdjustment->_rgbRedAdjustment.setAdjustment(values[0u].toInt(), values[1u].toInt(), values[2u].toInt()); } if (adjustment.contains("green")) { - const QJsonArray& values = adjustment["green"].toArray(); + const QJsonArray &values = adjustment["green"].toArray(); colorAdjustment->_rgbGreenAdjustment.setAdjustment(values[0u].toInt(), values[1u].toInt(), values[2u].toInt()); } if (adjustment.contains("blue")) { - const QJsonArray& values = adjustment["blue"].toArray(); + const QJsonArray &values = adjustment["blue"].toArray(); colorAdjustment->_rgbBlueAdjustment.setAdjustment(values[0u].toInt(), values[1u].toInt(), values[2u].toInt()); } if (adjustment.contains("cyan")) { - const QJsonArray& values = adjustment["cyan"].toArray(); + const QJsonArray &values = adjustment["cyan"].toArray(); colorAdjustment->_rgbCyanAdjustment.setAdjustment(values[0u].toInt(), values[1u].toInt(), values[2u].toInt()); } if (adjustment.contains("magenta")) { - const QJsonArray& values = adjustment["magenta"].toArray(); + const QJsonArray &values = adjustment["magenta"].toArray(); colorAdjustment->_rgbMagentaAdjustment.setAdjustment(values[0u].toInt(), values[1u].toInt(), values[2u].toInt()); } if (adjustment.contains("yellow")) { - const QJsonArray& values = adjustment["yellow"].toArray(); + const QJsonArray &values = adjustment["yellow"].toArray(); colorAdjustment->_rgbYellowAdjustment.setAdjustment(values[0u].toInt(), values[1u].toInt(), values[2u].toInt()); } if (adjustment.contains("white")) { - const QJsonArray& values = adjustment["white"].toArray(); + const QJsonArray &values = adjustment["white"].toArray(); colorAdjustment->_rgbWhiteAdjustment.setAdjustment(values[0u].toInt(), values[1u].toInt(), values[2u].toInt()); } @@ -814,7 +829,7 @@ void JsonAPI::handleAdjustmentCommand(const QJsonObject& message, const QString& sendSuccessReply(command, tan); } -void JsonAPI::handleSourceSelectCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleSourceSelectCommand(const QJsonObject &message, const QString &command, int tan) { if (message.contains("auto")) { @@ -832,7 +847,7 @@ void JsonAPI::handleSourceSelectCommand(const QJsonObject& message, const QStrin sendSuccessReply(command, tan); } -void JsonAPI::handleConfigCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleConfigCommand(const QJsonObject &message, const QString &command, int tan) { QString subcommand = message["subcommand"].toString(""); QString full_command = command + "-" + subcommand; @@ -876,14 +891,14 @@ void JsonAPI::handleConfigCommand(const QJsonObject& message, const QString& com } } -void JsonAPI::handleConfigSetCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleConfigSetCommand(const QJsonObject &message, const QString &command, int tan) { if (message.contains("config")) { QJsonObject config = message["config"].toObject(); if (API::isHyperionEnabled()) { - if (API::saveSettings(config)) + if ( API::saveSettings(config) ) { sendSuccessReply(command, tan); } @@ -897,7 +912,7 @@ void JsonAPI::handleConfigSetCommand(const QJsonObject& message, const QString& } } -void JsonAPI::handleSchemaGetCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleSchemaGetCommand(const QJsonObject &message, const QString &command, int tan) { // create result QJsonObject schemaJson, alldevices, properties; @@ -912,7 +927,7 @@ void JsonAPI::handleSchemaGetCommand(const QJsonObject& message, const QString& { schemaJson = QJsonFactory::readSchema(schemaFile); } - catch (const std::runtime_error& error) + catch (const std::runtime_error &error) { throw std::runtime_error(error.what()); } @@ -949,9 +964,9 @@ void JsonAPI::handleSchemaGetCommand(const QJsonObject& message, const QString& sendSuccessDataReply(QJsonDocument(schemaJson), command, tan); } -void JsonAPI::handleComponentStateCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleComponentStateCommand(const QJsonObject &message, const QString &command, int tan) { - const QJsonObject& componentState = message["componentstate"].toObject(); + const QJsonObject &componentState = message["componentstate"].toObject(); QString comp = componentState["component"].toString("invalid"); bool compState = componentState["state"].toBool(true); QString replyMsg; @@ -964,7 +979,7 @@ void JsonAPI::handleComponentStateCommand(const QJsonObject& message, const QStr sendSuccessReply(command, tan); } -void JsonAPI::handleLedColorsCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleLedColorsCommand(const QJsonObject &message, const QString &command, int tan) { // create result QString subcommand = message["subcommand"].toString(""); @@ -978,22 +993,22 @@ void JsonAPI::handleLedColorsCommand(const QJsonObject& message, const QString& _streaming_leds_reply["command"] = command + "-ledstream-update"; _streaming_leds_reply["tan"] = tan; - connect(_hyperion, &Hyperion::rawLedColors, this, [=](const std::vector& ledValues) { + connect(_hyperion, &Hyperion::rawLedColors, this, [=](const std::vector &ledValues) { _currentLedValues = ledValues; // necessary because Qt::UniqueConnection for lambdas does not work until 5.9 // see: https://bugreports.qt.io/browse/QTBUG-52438 if (!_ledStreamConnection) _ledStreamConnection = connect(_ledStreamTimer, &QTimer::timeout, this, [=]() { - emit streamLedcolorsUpdate(_currentLedValues); - }, - Qt::UniqueConnection); + emit streamLedcolorsUpdate(_currentLedValues); + }, + Qt::UniqueConnection); // start the timer if (!_ledStreamTimer->isActive() || _ledStreamTimer->interval() != streaming_interval) _ledStreamTimer->start(streaming_interval); - }, - Qt::UniqueConnection); + }, + Qt::UniqueConnection); // push once _hyperion->update(); } @@ -1023,7 +1038,7 @@ void JsonAPI::handleLedColorsCommand(const QJsonObject& message, const QString& sendSuccessReply(command + "-" + subcommand, tan); } -void JsonAPI::handleLoggingCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleLoggingCommand(const QJsonObject &message, const QString &command, int tan) { // create result QString subcommand = message["subcommand"].toString(""); @@ -1065,25 +1080,25 @@ void JsonAPI::handleLoggingCommand(const QJsonObject& message, const QString& co } } -void JsonAPI::handleProcessingCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleProcessingCommand(const QJsonObject &message, const QString &command, int tan) { API::setLedMappingType(ImageProcessor::mappingTypeToInt(message["mappingType"].toString("multicolor_mean"))); sendSuccessReply(command, tan); } -void JsonAPI::handleVideoModeCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleVideoModeCommand(const QJsonObject &message, const QString &command, int tan) { API::setVideoMode(parse3DMode(message["videoMode"].toString("2D"))); sendSuccessReply(command, tan); } -void JsonAPI::handleAuthorizeCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleAuthorizeCommand(const QJsonObject &message, const QString &command, int tan) { - const QString& subc = message["subcommand"].toString().trimmed(); - const QString& id = message["id"].toString().trimmed(); - const QString& password = message["password"].toString().trimmed(); - const QString& newPassword = message["newPassword"].toString().trimmed(); - const QString& comment = message["comment"].toString().trimmed(); + const QString &subc = message["subcommand"].toString().trimmed(); + const QString &id = message["id"].toString().trimmed(); + const QString &password = message["password"].toString().trimmed(); + const QString &newPassword = message["newPassword"].toString().trimmed(); + const QString &comment = message["comment"].toString().trimmed(); // catch test if auth is required if (subc == "tokenRequired") @@ -1194,8 +1209,8 @@ void JsonAPI::handleAuthorizeCommand(const QJsonObject& message, const QString& if (subc == "requestToken") { // use id/comment - const QString& comment = message["comment"].toString().trimmed(); - const bool& acc = message["accept"].toBool(true); + const QString &comment = message["comment"].toString().trimmed(); + const bool &acc = message["accept"].toBool(true); if (acc) API::setNewTokenRequest(comment, id, tan); else @@ -1211,7 +1226,7 @@ void JsonAPI::handleAuthorizeCommand(const QJsonObject& message, const QString& if (API::getPendingTokenRequests(vec)) { QJsonArray arr; - for (const auto& entry : vec) + for (const auto &entry : vec) { QJsonObject obj; obj["comment"] = entry.comment; @@ -1233,7 +1248,7 @@ void JsonAPI::handleAuthorizeCommand(const QJsonObject& message, const QString& if (subc == "answerRequest") { // use id - const bool& accept = message["accept"].toBool(false); + const bool &accept = message["accept"].toBool(false); if (!API::handlePendingTokenRequest(id, accept)) sendErrorReply("No Authorization", command + "-" + subc, tan); return; @@ -1246,7 +1261,7 @@ void JsonAPI::handleAuthorizeCommand(const QJsonObject& message, const QString& if (API::getTokenList(defVect)) { QJsonArray tArr; - for (const auto& entry : defVect) + for (const auto &entry : defVect) { QJsonObject subO; subO["comment"] = entry.comment; @@ -1265,7 +1280,7 @@ void JsonAPI::handleAuthorizeCommand(const QJsonObject& message, const QString& // login if (subc == "login") { - const QString& token = message["token"].toString().trimmed(); + const QString &token = message["token"].toString().trimmed(); // catch token if (!token.isEmpty()) @@ -1313,11 +1328,11 @@ void JsonAPI::handleAuthorizeCommand(const QJsonObject& message, const QString& } } -void JsonAPI::handleInstanceCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleInstanceCommand(const QJsonObject &message, const QString &command, int tan) { - const QString& subc = message["subcommand"].toString(); - const quint8& inst = message["instance"].toInt(); - const QString& name = message["name"].toString(); + const QString &subc = message["subcommand"].toString(); + const quint8 &inst = message["instance"].toInt(); + const QString &name = message["name"].toString(); if (subc == "switchTo") { @@ -1334,7 +1349,7 @@ void JsonAPI::handleInstanceCommand(const QJsonObject& message, const QString& c if (subc == "startInstance") { - connect(this, &API::onStartInstanceResponse, [=](const int& tan) { sendSuccessReply(command + "-" + subc, tan); }); + connect(this, &API::onStartInstanceResponse, [=] (const int &tan) { sendSuccessReply(command + "-" + subc, tan); }); if (!API::startInstance(inst, tan)) sendErrorReply("Can't start Hyperion instance index " + QString::number(inst), command + "-" + subc, tan); @@ -1384,12 +1399,12 @@ void JsonAPI::handleInstanceCommand(const QJsonObject& message, const QString& c } } -void JsonAPI::handleLedDeviceCommand(const QJsonObject& message, const QString& command, int tan) +void JsonAPI::handleLedDeviceCommand(const QJsonObject &message, const QString &command, int tan) { - Debug(_log, "message: [%s]", QString(QJsonDocument(message).toJson(QJsonDocument::Compact)).toUtf8().constData()); + Debug(_log, "message: [%s]", QString(QJsonDocument(message).toJson(QJsonDocument::Compact)).toUtf8().constData() ); - const QString& subc = message["subcommand"].toString().trimmed(); - const QString& devType = message["ledDeviceType"].toString().trimmed(); + const QString &subc = message["subcommand"].toString().trimmed(); + const QString &devType = message["ledDeviceType"].toString().trimmed(); QString full_command = command + "-" + subc; @@ -1399,7 +1414,7 @@ void JsonAPI::handleLedDeviceCommand(const QJsonObject& message, const QString& sendErrorReply("Unknown device", full_command, tan); } else -*/ { +*/ { QJsonObject config; config.insert("type", devType); LedDevice* ledDevice = nullptr; @@ -1407,27 +1422,27 @@ void JsonAPI::handleLedDeviceCommand(const QJsonObject& message, const QString& if (subc == "discover") { ledDevice = LedDeviceFactory::construct(config); - const QJsonObject& params = message["params"].toObject(); + const QJsonObject ¶ms = message["params"].toObject(); const QJsonObject devicesDiscovered = ledDevice->discover(params); - Debug(_log, "response: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + Debug(_log, "response: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() ); sendSuccessDataReply(QJsonDocument(devicesDiscovered), full_command, tan); } else if (subc == "getProperties") { ledDevice = LedDeviceFactory::construct(config); - const QJsonObject& params = message["params"].toObject(); + const QJsonObject ¶ms = message["params"].toObject(); const QJsonObject deviceProperties = ledDevice->getProperties(params); - Debug(_log, "response: [%s]", QString(QJsonDocument(deviceProperties).toJson(QJsonDocument::Compact)).toUtf8().constData()); + Debug(_log, "response: [%s]", QString(QJsonDocument(deviceProperties).toJson(QJsonDocument::Compact)).toUtf8().constData() ); sendSuccessDataReply(QJsonDocument(deviceProperties), full_command, tan); } else if (subc == "identify") { ledDevice = LedDeviceFactory::construct(config); - const QJsonObject& params = message["params"].toObject(); + const QJsonObject ¶ms = message["params"].toObject(); ledDevice->identify(params); sendSuccessReply(full_command, tan); @@ -1441,12 +1456,152 @@ void JsonAPI::handleLedDeviceCommand(const QJsonObject& message, const QString& } } -void JsonAPI::handleNotImplemented(const QString& command, int tan) +void JsonAPI::handleInputSourceCommand(const QJsonObject& message, const QString& command, int tan) +{ + DebugIf(verbose, _log, "message: [%s]", QString(QJsonDocument(message).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + const QString& subc = message["subcommand"].toString().trimmed(); + const QString& sourceType = message["sourceType"].toString().trimmed(); + + QString full_command = command + "-" + subc; + + // TODO: Validate that source type is a valid one +/* if ( ! valid type ) + { + sendErrorReply("Unknown device", full_command, tan); + } + else +*/ { + if (subc == "discover") + { + QJsonObject inputSourcesDiscovered; + inputSourcesDiscovered.insert("sourceType", sourceType); + QJsonArray videoInputs; + +#if defined(ENABLE_V4L2) || defined(ENABLE_MF) + + if (sourceType == "video" ) + { +#if defined(ENABLE_MF) + MFGrabber* grabber = new MFGrabber(); +#elif defined(ENABLE_V4L2) + V4L2Grabber* grabber = new V4L2Grabber(); +#endif + QJsonObject params; + videoInputs = grabber->discover(params); + delete grabber; + } + else +#endif + { + DebugIf(verbose, _log, "sourceType: [%s]", QSTRING_CSTR(sourceType)); + + if (sourceType == "screen") + { + QJsonObject params; + + QJsonObject device; + #ifdef ENABLE_QT + QtGrabber* qtgrabber = new QtGrabber(); + device = qtgrabber->discover(params); + if (!device.isEmpty() ) + { + videoInputs.append(device); + } + delete qtgrabber; + #endif + + #ifdef ENABLE_DX + DirectXGrabber* dxgrabber = new DirectXGrabber(); + device = dxgrabber->discover(params); + if (!device.isEmpty() ) + { + videoInputs.append(device); + } + delete dxgrabber; + #endif + + #ifdef ENABLE_X11 + X11Grabber* x11Grabber = new X11Grabber(); + device = x11Grabber->discover(params); + if (!device.isEmpty() ) + { + videoInputs.append(device); + } + delete x11Grabber; + #endif + + #ifdef ENABLE_XCB + XcbGrabber* xcbGrabber = new XcbGrabber(); + device = xcbGrabber->discover(params); + if (!device.isEmpty() ) + { + videoInputs.append(device); + } + delete xcbGrabber; + #endif + + #ifdef ENABLE_FB + FramebufferFrameGrabber* fbGrabber = new FramebufferFrameGrabber(); + device = fbGrabber->discover(params); + if (!device.isEmpty() ) + { + videoInputs.append(device); + } + delete fbGrabber; + #endif + + #if defined(ENABLE_DISPMANX) + DispmanxFrameGrabber* dispmanx = new DispmanxFrameGrabber(); + device = dispmanx->discover(params); + if (!device.isEmpty() ) + { + videoInputs.append(device); + } + delete dispmanx; + #endif + + #if defined(ENABLE_AMLOGIC) + AmlogicGrabber* amlGrabber = new AmlogicGrabber(); + device = amlGrabber->discover(params); + if (!device.isEmpty() ) + { + videoInputs.append(device); + } + delete amlGrabber; + #endif + + #if defined(ENABLE_OSX) + OsxFrameGrabber* osxGrabber = new OsxFrameGrabber(); + device = osxGrabber->discover(params); + if (!device.isEmpty() ) + { + videoInputs.append(device); + } + delete osxGrabber; + #endif + } + + } + inputSourcesDiscovered["video_sources"] = videoInputs; + + DebugIf(verbose, _log, "response: [%s]", QString(QJsonDocument(inputSourcesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + sendSuccessDataReply(QJsonDocument(inputSourcesDiscovered), full_command, tan); + } + else + { + sendErrorReply("Unknown or missing subcommand", full_command, tan); + } + } +} + +void JsonAPI::handleNotImplemented(const QString &command, int tan) { sendErrorReply("Command not implemented", command, tan); } -void JsonAPI::sendSuccessReply(const QString& command, int tan) +void JsonAPI::sendSuccessReply(const QString &command, int tan) { // create reply QJsonObject reply; @@ -1458,7 +1613,7 @@ void JsonAPI::sendSuccessReply(const QString& command, int tan) emit callbackMessage(reply); } -void JsonAPI::sendSuccessDataReply(const QJsonDocument& doc, const QString& command, int tan) +void JsonAPI::sendSuccessDataReply(const QJsonDocument &doc, const QString &command, int tan) { QJsonObject reply; reply["success"] = true; @@ -1472,7 +1627,7 @@ void JsonAPI::sendSuccessDataReply(const QJsonDocument& doc, const QString& comm emit callbackMessage(reply); } -void JsonAPI::sendErrorReply(const QString& error, const QString& command, int tan) +void JsonAPI::sendErrorReply(const QString &error, const QString &command, int tan) { // create reply QJsonObject reply; @@ -1485,12 +1640,12 @@ void JsonAPI::sendErrorReply(const QString& error, const QString& command, int t emit callbackMessage(reply); } -void JsonAPI::streamLedcolorsUpdate(const std::vector& ledColors) +void JsonAPI::streamLedcolorsUpdate(const std::vector &ledColors) { QJsonObject result; QJsonArray leds; - for (const auto& color : ledColors) + for (const auto &color : ledColors) { leds << QJsonValue(color.red) << QJsonValue(color.green) << QJsonValue(color.blue); } @@ -1502,9 +1657,9 @@ void JsonAPI::streamLedcolorsUpdate(const std::vector& ledColors) emit callbackMessage(_streaming_leds_reply); } -void JsonAPI::setImage(const Image& image) +void JsonAPI::setImage(const Image &image) { - QImage jpgImage((const uint8_t*)image.memptr(), image.width(), image.height(), 3 * image.width(), QImage::Format_RGB888); + QImage jpgImage((const uint8_t *)image.memptr(), image.width(), image.height(), 3 * image.width(), QImage::Format_RGB888); QByteArray ba; QBuffer buffer(&ba); buffer.open(QIODevice::WriteOnly); @@ -1516,7 +1671,7 @@ void JsonAPI::setImage(const Image& image) emit callbackMessage(_streaming_image_reply); } -void JsonAPI::incommingLogMessage(const Logger::T_LOG_MESSAGE& msg) +void JsonAPI::incommingLogMessage(const Logger::T_LOG_MESSAGE &msg) { QJsonObject result, message; QJsonArray messageArray; @@ -1524,7 +1679,7 @@ void JsonAPI::incommingLogMessage(const Logger::T_LOG_MESSAGE& msg) if (!_streaming_logging_activated) { _streaming_logging_activated = true; - const QList* logBuffer = LoggerManager::getInstance()->getLogMessageBuffer(); + const QList *logBuffer = LoggerManager::getInstance()->getLogMessageBuffer(); for (int i = 0; i < logBuffer->length(); i++) { message["appName"] = logBuffer->at(i).appName; @@ -1560,7 +1715,7 @@ void JsonAPI::incommingLogMessage(const Logger::T_LOG_MESSAGE& msg) emit callbackMessage(_streaming_logging_reply); } -void JsonAPI::newPendingTokenRequest(const QString& id, const QString& comment) +void JsonAPI::newPendingTokenRequest(const QString &id, const QString &comment) { QJsonObject obj; obj["comment"] = comment; @@ -1570,7 +1725,7 @@ void JsonAPI::newPendingTokenRequest(const QString& id, const QString& comment) sendSuccessDataReply(QJsonDocument(obj), "authorize-tokenRequest", 1); } -void JsonAPI::handleTokenResponse(bool success, const QString& token, const QString& comment, const QString& id, const int& tan) +void JsonAPI::handleTokenResponse(bool success, const QString &token, const QString &comment, const QString &id, const int &tan) { const QString cmd = "authorize-requestToken"; QJsonObject result; @@ -1584,7 +1739,7 @@ void JsonAPI::handleTokenResponse(bool success, const QString& token, const QStr sendErrorReply("Token request timeout or denied", cmd, tan); } -void JsonAPI::handleInstanceStateChange(InstanceState state, quint8 instance, const QString& name) +void JsonAPI::handleInstanceStateChange(InstanceState state, quint8 instance, const QString &name) { switch (state) { diff --git a/libsrc/effectengine/Effect.cpp b/libsrc/effectengine/Effect.cpp index d1d97518..b30335fd 100644 --- a/libsrc/effectengine/Effect.cpp +++ b/libsrc/effectengine/Effect.cpp @@ -19,6 +19,7 @@ Effect::Effect(Hyperion *hyperion, int priority, int timeout, const QString &scr , _hyperion(hyperion) , _priority(priority) , _timeout(timeout) + , _isEndless(timeout <= ENDLESS) , _script(script) , _name(name) , _args(args) @@ -51,7 +52,7 @@ Effect::~Effect() bool Effect::isInterruptionRequested() { - return _interupt || getRemaining() < ENDLESS; + return _interupt || (!_isEndless && getRemaining() <= 0); } int Effect::getRemaining() const @@ -59,12 +60,11 @@ int Effect::getRemaining() const // determine the timeout int timeout = _timeout; - if (timeout > 0) + if (timeout >= 0) { timeout = static_cast( _endTime - QDateTime::currentMSecsSinceEpoch()); - return timeout; } - return ENDLESS; + return timeout; } void Effect::setModuleParameters() diff --git a/libsrc/grabber/CMakeLists.txt b/libsrc/grabber/CMakeLists.txt index 302aece4..5c0feea1 100644 --- a/libsrc/grabber/CMakeLists.txt +++ b/libsrc/grabber/CMakeLists.txt @@ -12,24 +12,24 @@ endif (ENABLE_FB) if (ENABLE_OSX) add_subdirectory(osx) -endif() +endif(ENABLE_OSX) -if (ENABLE_V4L2) - add_subdirectory(v4l2) -endif (ENABLE_V4L2) +if (ENABLE_V4L2 OR ENABLE_MF) + add_subdirectory(video) +endif () if (ENABLE_X11) add_subdirectory(x11) -endif() +endif(ENABLE_X11) if (ENABLE_XCB) add_subdirectory(xcb) -endif() +endif(ENABLE_XCB) if (ENABLE_QT) add_subdirectory(qt) -endif() +endif(ENABLE_QT) if (ENABLE_DX) add_subdirectory(directx) -endif() +endif(ENABLE_DX) diff --git a/libsrc/grabber/amlogic/AmlogicGrabber.cpp b/libsrc/grabber/amlogic/AmlogicGrabber.cpp index d4ee2f08..19981e65 100644 --- a/libsrc/grabber/amlogic/AmlogicGrabber.cpp +++ b/libsrc/grabber/amlogic/AmlogicGrabber.cpp @@ -2,7 +2,6 @@ #include #include #include -#include // Linux includes #include @@ -12,156 +11,323 @@ #include #include +// qt +#include +#include +#include +#include +#include + // Local includes #include #include #include "Amvideocap.h" -#define VIDEO_DEVICE "/dev/amvideo" -#define CAPTURE_DEVICE "/dev/amvideocap0" +// Constants +namespace { +const bool verbose = false; -AmlogicGrabber::AmlogicGrabber(unsigned width, unsigned height) - : Grabber("AMLOGICGRABBER", qMax(160u, width), qMax(160u, height)) // Minimum required width or height is 160 - , _captureDev(-1) - , _videoDev(-1) - , _lastError(0) - , _fbGrabber("/dev/fb0",width,height) - , _grabbingModeNotification(0) +const char DEFAULT_FB_DEVICE[] = "/dev/fb0"; +const char DEFAULT_VIDEO_DEVICE[] = "/dev/amvideo"; +const char DEFAULT_CAPTURE_DEVICE[] = "/dev/amvideocap0"; + +const int AMVIDEOCAP_WAIT_MAX_MS = 50; + +} //End of constants + +AmlogicGrabber::AmlogicGrabber() + : Grabber("AMLOGICGRABBER") // Minimum required width or height is 160 + , _captureDev(-1) + , _videoDev(-1) + , _lastError(0) + , _fbGrabber(DEFAULT_FB_DEVICE) + , _grabbingModeNotification(0) { - Debug(_log, "constructed(%d x %d), grabber device: %s",_width,_height, CAPTURE_DEVICE); - - _image_bgr.resize(_width, _height); - _bytesToRead = _image_bgr.size(); _image_ptr = _image_bgr.memptr(); + + _useImageResampler = true; } AmlogicGrabber::~AmlogicGrabber() { - closeDev(_captureDev); - closeDev(_videoDev); + closeDevice(_captureDev); + closeDevice(_videoDev); } -bool AmlogicGrabber::openDev(int &fd, const char* dev) +bool AmlogicGrabber::setupScreen() +{ + bool rc (false); + + QSize screenSize = _fbGrabber.getScreenSize(DEFAULT_FB_DEVICE); + if ( !screenSize.isEmpty() ) + { + if (setWidthHeight(screenSize.width(), screenSize.height())) + { + rc = _fbGrabber.setupScreen(); + } + } + return rc; +} + +bool AmlogicGrabber::openDevice(int &fd, const char* dev) { if (fd<0) { - fd = open(dev, O_RDWR); + fd = ::open(dev, O_RDWR); } return fd >= 0; } -void AmlogicGrabber::closeDev(int &fd) +void AmlogicGrabber::closeDevice(int &fd) { if (fd >= 0) { - close(fd); + ::close(fd); fd = -1; } } bool AmlogicGrabber::isVideoPlaying() { - if(!QFile::exists(VIDEO_DEVICE)) return false; + bool rc = false; + if(QFile::exists(DEFAULT_VIDEO_DEVICE)) + { - int videoDisabled = 1; - if (!openDev(_videoDev, VIDEO_DEVICE)) - { - Error(_log, "Failed to open video device(%s): %d - %s", VIDEO_DEVICE, errno, strerror(errno)); - return false; - } - else - { - // Check the video disabled flag - if(ioctl(_videoDev, AMSTREAM_IOC_GET_VIDEO_DISABLE, &videoDisabled) < 0) + int videoDisabled = 1; + if (!openDevice(_videoDev, DEFAULT_VIDEO_DEVICE)) { - Error(_log, "Failed to retrieve video state from device: %d - %s", errno, strerror(errno)); - closeDev(_videoDev); - return false; + Error(_log, "Failed to open video device(%s): %d - %s", DEFAULT_VIDEO_DEVICE, errno, strerror(errno)); + } + else + { + // Check the video disabled flag + if(ioctl(_videoDev, AMSTREAM_IOC_GET_VIDEO_DISABLE, &videoDisabled) < 0) + { + Error(_log, "Failed to retrieve video state from device: %d - %s", errno, strerror(errno)); + closeDevice(_videoDev); + } + else + { + if ( videoDisabled == 0 ) + { + rc = true; + } + } } - } - return videoDisabled == 0; + } + return rc; } int AmlogicGrabber::grabFrame(Image & image) { - if (!_enabled) return 0; - - // Make sure video is playing, else there is nothing to grab - if (isVideoPlaying()) + int rc = 0; + if (_isEnabled && !_isDeviceInError) { - if (_grabbingModeNotification!=1) + // Make sure video is playing, else there is nothing to grab + if (isVideoPlaying()) { - Info(_log, "VPU mode"); - _grabbingModeNotification = 1; - _lastError = 0; - } + if (_grabbingModeNotification!=1) + { + Info(_log, "Switch to VPU capture mode"); + _grabbingModeNotification = 1; + _lastError = 0; + } - if (grabFrame_amvideocap(image) < 0) - closeDev(_captureDev); - } - else - { - if (_grabbingModeNotification!=2) + if (grabFrame_amvideocap(image) < 0) { + closeDevice(_captureDev); + rc = -1; + } + } + else { - Info( _log, "FB mode"); - _grabbingModeNotification = 2; - _lastError = 0; + if (_grabbingModeNotification!=2) + { + Info( _log, "Switch to Framebuffer capture mode"); + _grabbingModeNotification = 2; + _lastError = 0; + } + rc = _fbGrabber.grabFrame(image); + + //usleep(50 * 1000); } - _fbGrabber.grabFrame(image); - - usleep(50 * 1000); } - - return 0; + return rc; } - int AmlogicGrabber::grabFrame_amvideocap(Image & image) { + int rc = 0; + // If the device is not open, attempt to open it if (_captureDev < 0) { - if (! openDev(_captureDev, CAPTURE_DEVICE)) + if (! openDevice(_captureDev, DEFAULT_CAPTURE_DEVICE)) { ErrorIf( _lastError != 1, _log,"Failed to open the AMLOGIC device (%d - %s):", errno, strerror(errno)); _lastError = 1; - return -1; + rc = -1; + return rc; } + } - long r1 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_WIDTH, _width); - long r2 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_HEIGHT, _height); - long r3 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_AT_FLAGS, CAP_FLAG_AT_END); - long r4 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_WAIT_MAX_MS, 500); + long r1 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_WIDTH, _width); + long r2 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_HEIGHT, _height); + long r3 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_AT_FLAGS, CAP_FLAG_AT_END); + long r4 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_WAIT_MAX_MS, AMVIDEOCAP_WAIT_MAX_MS); - if (r1<0 || r2<0 || r3<0 || r4<0 || _height==0 || _width==0) + if (r1<0 || r2<0 || r3<0 || r4<0 || _height==0 || _width==0) + { + ErrorIf(_lastError != 2,_log,"Failed to configure capture device (%d - %s)", errno, strerror(errno)); + _lastError = 2; + rc = -1; + } + else + { + int linelen = ((_width + 31) & ~31) * 3; + size_t _bytesToRead = linelen * _height; + + // Read the snapshot into the memory + ssize_t bytesRead = pread(_captureDev, _image_ptr, _bytesToRead, 0); + if (bytesRead < 0) { - ErrorIf(_lastError != 2,_log,"Failed to configure capture device (%d - %s)", errno, strerror(errno)); - _lastError = 2; - return -1; + int state; + ioctl(_captureDev, AMVIDEOCAP_IOR_GET_STATE, &state); + if (state == AMVIDEOCAP_STATE_ON_CAPTURE) + { + DebugIf(_lastError != 5, _log,"Video playback has been paused"); + _lastError = 5; + } + else + { + ErrorIf(_lastError != 3, _log,"Read of device failed: %d - %s", errno, strerror(errno)); + _lastError = 3; + } + rc = -1; + } + else + { + if (static_cast(_bytesToRead) != bytesRead) + { + // Read of snapshot failed + ErrorIf(_lastError != 4, _log,"Capture failed to grab entire image [bytesToRead(%d) != bytesRead(%d)]", _bytesToRead, bytesRead); + _lastError = 4; + rc = -1; + } + else { + _imageResampler.processImage(static_cast(_image_ptr), + _width, + _height, + linelen, + PixelFormat::BGR24, image); + _lastError = 0; + rc = 0; + } + } + } + return rc; +} + +QJsonObject AmlogicGrabber::discover(const QJsonObject& params) +{ + DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + QJsonObject inputsDiscovered; + + if(QFile::exists(DEFAULT_VIDEO_DEVICE) && QFile::exists(DEFAULT_CAPTURE_DEVICE) ) + { + QJsonArray video_inputs; + + QSize screenSize = _fbGrabber.getScreenSize(); + if ( !screenSize.isEmpty() ) + { + int fbIdx = _fbGrabber.getPath().rightRef(1).toInt(); + + DebugIf(verbose, _log, "FB device [%s] found with resolution: %dx%d", QSTRING_CSTR(_fbGrabber.getPath()), screenSize.width(), screenSize.height()); + QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 }; + + QJsonObject in; + + QString displayName; + displayName = QString("Display%1").arg(fbIdx); + + in["name"] = displayName; + in["inputIdx"] = fbIdx; + + QJsonArray formats; + QJsonObject format; + + QJsonArray resolutionArray; + + QJsonObject resolution; + + resolution["width"] = screenSize.width(); + resolution["height"] = screenSize.height(); + resolution["fps"] = fps; + + resolutionArray.append(resolution); + + format["resolutions"] = resolutionArray; + formats.append(format); + + in["formats"] = formats; + video_inputs.append(in); + } + + if (!video_inputs.isEmpty()) + { + inputsDiscovered["device"] = "amlogic"; + inputsDiscovered["device_name"] = "AmLogic"; + inputsDiscovered["type"] = "screen"; + inputsDiscovered["video_inputs"] = video_inputs; } } - // Read the snapshot into the memory - ssize_t bytesRead = pread(_captureDev, _image_ptr, _bytesToRead, 0); - - if (bytesRead < 0) + if (inputsDiscovered.isEmpty()) { - ErrorIf(_lastError != 3, _log,"Read of device failed: %d - %s", errno, strerror(errno)); - _lastError = 3; - return -1; - } - else if (_bytesToRead != bytesRead) - { - // Read of snapshot failed - ErrorIf(_lastError != 4, _log,"Capture failed to grab entire image [bytesToRead(%d) != bytesRead(%d)]", _bytesToRead, bytesRead); - _lastError = 4; - return -1; + DebugIf(verbose, _log, "No displays found to capture from!"); } - _useImageResampler = true; - _imageResampler.processImage((const uint8_t*)_image_ptr, _width, _height, (_width << 1) + _width, PixelFormat::BGR24, image); - _lastError = 0; + DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); - return 0; + return inputsDiscovered; +} + +void AmlogicGrabber::setVideoMode(VideoMode mode) +{ + Grabber::setVideoMode(mode); + _fbGrabber.setVideoMode(mode); +} + +bool AmlogicGrabber::setPixelDecimation(int pixelDecimation) +{ + return ( Grabber::setPixelDecimation( pixelDecimation) && + _fbGrabber.setPixelDecimation( pixelDecimation)); +} + +void AmlogicGrabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) +{ + Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom); + _fbGrabber.setCropping(cropLeft, cropRight, cropTop, cropBottom); +} + +bool AmlogicGrabber::setWidthHeight(int width, int height) +{ + bool rc (false); + if ( Grabber::setWidthHeight(width, height) ) + { + _image_bgr.resize(static_cast(width), static_cast(height)); + _width = width; + _height = height; + _bytesToRead = _image_bgr.size(); + _image_ptr = _image_bgr.memptr(); + rc = _fbGrabber.setWidthHeight(width, height); + } + return rc; +} + +bool AmlogicGrabber::setFramerate(int fps) +{ + return (Grabber::setFramerate(fps) && + _fbGrabber.setFramerate(fps)); } diff --git a/libsrc/grabber/amlogic/AmlogicWrapper.cpp b/libsrc/grabber/amlogic/AmlogicWrapper.cpp index a330bdd8..25581b2c 100644 --- a/libsrc/grabber/amlogic/AmlogicWrapper.cpp +++ b/libsrc/grabber/amlogic/AmlogicWrapper.cpp @@ -1,9 +1,11 @@ #include -AmlogicWrapper::AmlogicWrapper(unsigned grabWidth, unsigned grabHeight) - : GrabberWrapper("AmLogic", &_grabber, grabWidth, grabHeight) - , _grabber(grabWidth, grabHeight) -{} +AmlogicWrapper::AmlogicWrapper(int pixelDecimation, int updateRate_Hz) + : GrabberWrapper("Amlogic", &_grabber, updateRate_Hz) + , _grabber() +{ + _grabber.setPixelDecimation(pixelDecimation); +} void AmlogicWrapper::action() { diff --git a/libsrc/grabber/amlogic/Amvideocap.h b/libsrc/grabber/amlogic/Amvideocap.h index 40c58b33..f90b9714 100644 --- a/libsrc/grabber/amlogic/Amvideocap.h +++ b/libsrc/grabber/amlogic/Amvideocap.h @@ -11,11 +11,35 @@ #define CAP_FLAG_AT_TIME_WINDOW 1 #define CAP_FLAG_AT_END 2 -// #define AMVIDEOCAP_IOW_SET_WANTFRAME_FORMAT _IOW(AMVIDEOCAP_IOC_MAGIC, 0x01, int) +#define AMVIDEOCAP_IOW_SET_WANTFRAME_FORMAT _IOW(AMVIDEOCAP_IOC_MAGIC, 0x01, int) #define AMVIDEOCAP_IOW_SET_WANTFRAME_WIDTH _IOW(AMVIDEOCAP_IOC_MAGIC, 0x02, int) #define AMVIDEOCAP_IOW_SET_WANTFRAME_HEIGHT _IOW(AMVIDEOCAP_IOC_MAGIC, 0x03, int) +#define AMVIDEOCAP_IOW_SET_WANTFRAME_TIMESTAMP_MS _IOW(AMVIDEOCAP_IOC_MAGIC, 0x04, unsigned long long) #define AMVIDEOCAP_IOW_SET_WANTFRAME_WAIT_MAX_MS _IOW(AMVIDEOCAP_IOC_MAGIC, 0x05, unsigned long long) #define AMVIDEOCAP_IOW_SET_WANTFRAME_AT_FLAGS _IOW(AMVIDEOCAP_IOC_MAGIC, 0x06, int) +#define AMVIDEOCAP_IOR_GET_FRAME_FORMAT _IOR(AMVIDEOCAP_IOC_MAGIC, 0x10, int) +#define AMVIDEOCAP_IOR_GET_FRAME_WIDTH _IOR(AMVIDEOCAP_IOC_MAGIC, 0x11, int) +#define AMVIDEOCAP_IOR_GET_FRAME_HEIGHT _IOR(AMVIDEOCAP_IOC_MAGIC, 0x12, int) +#define AMVIDEOCAP_IOR_GET_FRAME_TIMESTAMP_MS _IOR(AMVIDEOCAP_IOC_MAGIC, 0x13, int) + +#define AMVIDEOCAP_IOR_GET_SRCFRAME_FORMAT _IOR(AMVIDEOCAP_IOC_MAGIC, 0x20, int) +#define AMVIDEOCAP_IOR_GET_SRCFRAME_WIDTH _IOR(AMVIDEOCAP_IOC_MAGIC, 0x21, int) +#define AMVIDEOCAP_IOR_GET_SRCFRAME_HEIGHT _IOR(AMVIDEOCAP_IOC_MAGIC, 0x22, int) + +#define AMVIDEOCAP_IOR_GET_STATE _IOR(AMVIDEOCAP_IOC_MAGIC, 0x31, int) +#define AMVIDEOCAP_IOW_SET_START_CAPTURE _IOW(AMVIDEOCAP_IOC_MAGIC, 0x32, int) +#define AMVIDEOCAP_IOW_SET_CANCEL_CAPTURE _IOW(AMVIDEOCAP_IOC_MAGIC, 0x33, int) + #define _A_M 'S' -#define AMSTREAM_IOC_GET_VIDEO_DISABLE _IOR((_A_M), 0x48, int) +#define AMSTREAM_IOC_GET_VIDEO_DISABLE _IOR((_A_M), 0x48, int) + +#define AMVIDEOCAP_IOC_MAGIC 'V' +#define AMVIDEOCAP_IOW_SET_START_CAPTURE _IOW(AMVIDEOCAP_IOC_MAGIC, 0x32, int) + + enum amvideocap_state{ + AMVIDEOCAP_STATE_INIT=0, + AMVIDEOCAP_STATE_ON_CAPTURE=200, + AMVIDEOCAP_STATE_FINISHED_CAPTURE=300, + AMVIDEOCAP_STATE_ERROR=0xffff, + }; diff --git a/libsrc/grabber/directx/DirectXGrabber.cpp b/libsrc/grabber/directx/DirectXGrabber.cpp index 6edadb74..581d80b3 100644 --- a/libsrc/grabber/directx/DirectXGrabber.cpp +++ b/libsrc/grabber/directx/DirectXGrabber.cpp @@ -4,9 +4,13 @@ #pragma comment(lib, "d3d9.lib") #pragma comment(lib,"d3dx9.lib") -DirectXGrabber::DirectXGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display) - : Grabber("DXGRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom) - , _pixelDecimation(pixelDecimation) +// Constants +namespace { + const bool verbose = true; +} //End of constants + +DirectXGrabber::DirectXGrabber(int display, int cropLeft, int cropRight, int cropTop, int cropBottom) + : Grabber("DXGRABBER", cropLeft, cropRight, cropTop, cropBottom) , _display(unsigned(display)) , _displayWidth(0) , _displayHeight(0) @@ -15,8 +19,6 @@ DirectXGrabber::DirectXGrabber(int cropLeft, int cropRight, int cropTop, int cro , _device(nullptr) , _surface(nullptr) { - // init - setupDisplay(); } DirectXGrabber::~DirectXGrabber() @@ -140,15 +142,24 @@ bool DirectXGrabber::setupDisplay() int DirectXGrabber::grabFrame(Image & image) { - if (!_enabled) + if (!_isEnabled) + { + qDebug() << "AUS"; return 0; + } + + if (_device == nullptr) + { + // reinit, this will disable capture on failure + bool result = setupDisplay(); + setEnabled(result); + return -1; + } if (FAILED(_device->GetFrontBufferData(0, _surface))) { - // reinit, this will disable capture on failure Error(_log, "Unable to get Buffer Surface Data"); - setEnabled(setupDisplay()); - return -1; + return 0; } D3DXLoadSurfaceFromSurface(_surfaceDest, nullptr, nullptr, _surface, nullptr, _srcRect, D3DX_DEFAULT, 0); @@ -181,22 +192,91 @@ void DirectXGrabber::setVideoMode(VideoMode mode) setupDisplay(); } -void DirectXGrabber::setPixelDecimation(int pixelDecimation) +bool DirectXGrabber::setPixelDecimation(int pixelDecimation) { - _pixelDecimation = pixelDecimation; + if(Grabber::setPixelDecimation(pixelDecimation)) + return setupDisplay(); + + return false; } -void DirectXGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) +void DirectXGrabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) { Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom); setupDisplay(); } -void DirectXGrabber::setDisplayIndex(int index) +bool DirectXGrabber::setDisplayIndex(int index) { + bool rc (true); if(_display != unsigned(index)) { _display = unsigned(index); - setupDisplay(); + rc = setupDisplay(); } + return rc; +} + +QJsonObject DirectXGrabber::discover(const QJsonObject& params) +{ + DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + QJsonObject inputsDiscovered; + if ((_d3d9 = Direct3DCreate9(D3D_SDK_VERSION)) != nullptr) + { + int adapterCount = (int)_d3d9->GetAdapterCount(); + if(adapterCount > 0) + { + inputsDiscovered["device"] = "dx"; + inputsDiscovered["device_name"] = "DX"; + inputsDiscovered["type"] = "screen"; + + QJsonArray video_inputs; + QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 }; + + for(int adapter = 0; adapter < adapterCount; adapter++) + { + QJsonObject in; + in["inputIdx"] = adapter; + + D3DADAPTER_IDENTIFIER9 identifier; + _d3d9->GetAdapterIdentifier(adapter, D3DENUM_WHQL_LEVEL, &identifier); + + QString name = identifier.DeviceName; + int pos = name.lastIndexOf('\\'); + if (pos != -1) + name = name.right(name.length()-pos-1); + + in["name"] = name; + + D3DDISPLAYMODE ddm; + _d3d9->GetAdapterDisplayMode(adapter, &ddm); + + QJsonArray formats, resolutionArray; + QJsonObject format, resolution; + + resolution["width"] = (int)ddm.Width; + resolution["height"] = (int)ddm.Height; + resolution["fps"] = fps; + + resolutionArray.append(resolution); + format["resolutions"] = resolutionArray; + + formats.append(format); + + + in["formats"] = formats; + video_inputs.append(in); + } + inputsDiscovered["video_inputs"] = video_inputs; + } + else + { + DebugIf(verbose, _log, "No displays found to capture from!"); + } + } + DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + return inputsDiscovered; + } diff --git a/libsrc/grabber/directx/DirectXWrapper.cpp b/libsrc/grabber/directx/DirectXWrapper.cpp index 26018903..3c4188e5 100644 --- a/libsrc/grabber/directx/DirectXWrapper.cpp +++ b/libsrc/grabber/directx/DirectXWrapper.cpp @@ -1,9 +1,16 @@ #include -DirectXWrapper::DirectXWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display, const unsigned updateRate_Hz) - : GrabberWrapper("DirectX", &_grabber, 0, 0, updateRate_Hz) - , _grabber(cropLeft, cropRight, cropTop, cropBottom, pixelDecimation, display) -{} +DirectXWrapper::DirectXWrapper( int updateRate_Hz, + int display, + int pixelDecimation, + int cropLeft, int cropRight, int cropTop, int cropBottom + ) + : GrabberWrapper("DirectX", &_grabber, updateRate_Hz) + , _grabber(display, cropLeft, cropRight, cropTop, cropBottom) + +{ + _grabber.setPixelDecimation(pixelDecimation); +} void DirectXWrapper::action() { diff --git a/libsrc/grabber/dispmanx/DispmanxFrameGrabber.cpp b/libsrc/grabber/dispmanx/DispmanxFrameGrabber.cpp index 709168c5..631ff28e 100644 --- a/libsrc/grabber/dispmanx/DispmanxFrameGrabber.cpp +++ b/libsrc/grabber/dispmanx/DispmanxFrameGrabber.cpp @@ -3,48 +3,34 @@ #include #include +//Qt +#include +#include +#include +#include + +// Constants +namespace { +const bool verbose = false; +const int DEFAULT_DEVICE = 0; + +} //End of constants + // Local includes #include "grabber/DispmanxFrameGrabber.h" -DispmanxFrameGrabber::DispmanxFrameGrabber(unsigned width, unsigned height) - : Grabber("DISPMANXGRABBER", 0, 0) - , _vc_display(0) - , _vc_resource(0) - , _vc_flags(0) - , _captureBuffer(new ColorRgba[0]) - , _captureBufferSize(0) - , _image_rgba(width, height) +DispmanxFrameGrabber::DispmanxFrameGrabber() + : Grabber("DISPMANXGRABBER") + , _vc_display(0) + , _vc_resource(0) + , _vc_flags(DISPMANX_TRANSFORM_T(0)) + , _captureBuffer(new ColorRgba[0]) + , _captureBufferSize(0) + , _image_rgba() { - _useImageResampler = false; - - // Initiase BCM + _useImageResampler = true; + // Initialise BCM bcm_host_init(); - - // Check if the display can be opened and display the current resolution - // Open the connection to the display - _vc_display = vc_dispmanx_display_open(0); - assert(_vc_display > 0); - - // Obtain the display information - DISPMANX_MODEINFO_T vc_info; - int result = vc_dispmanx_display_get_info(_vc_display, &vc_info); - // Keep compiler happy in 'release' mode - (void)result; - - // Close the display - vc_dispmanx_display_close(_vc_display); - - if(result != 0) - { - Error(_log, "Failed to open display! Probably no permissions to access the capture interface"); - setEnabled(false); - return; - } - else - Info(_log, "Display opened with resolution: %dx%d", vc_info.width, vc_info.height); - - // init the resource and capture rectangle - setWidthHeight(width, height); } DispmanxFrameGrabber::~DispmanxFrameGrabber() @@ -55,6 +41,28 @@ DispmanxFrameGrabber::~DispmanxFrameGrabber() bcm_host_deinit(); } +bool DispmanxFrameGrabber::setupScreen() +{ + bool rc (false); + + int deviceIdx (DEFAULT_DEVICE); + + QSize screenSize = getScreenSize(deviceIdx); + if ( screenSize.isEmpty() ) + { + Error(_log, "Failed to open display [%d]! Probably no permissions to access the capture interface", deviceIdx); + setEnabled(false); + } + else + { + setWidthHeight(screenSize.width(), screenSize.height()); + Info(_log, "Display [%d] opened with resolution: %dx%d", deviceIdx, screenSize.width(), screenSize.height()); + setEnabled(true); + rc = true; + } + return rc; +} + void DispmanxFrameGrabber::freeResources() { delete[] _captureBuffer; @@ -64,152 +72,219 @@ void DispmanxFrameGrabber::freeResources() bool DispmanxFrameGrabber::setWidthHeight(int width, int height) { + bool rc = false; if(Grabber::setWidthHeight(width, height)) { - if(_vc_resource != 0) + if(_vc_resource != 0) { vc_dispmanx_resource_delete(_vc_resource); - // Create the resources for capturing image + } + + Debug(_log,"Create the resources for capturing image"); uint32_t vc_nativeImageHandle; _vc_resource = vc_dispmanx_resource_create( - VC_IMAGE_RGBA32, - width, - height, - &vc_nativeImageHandle); + VC_IMAGE_RGBA32, + width, + height, + &vc_nativeImageHandle); assert(_vc_resource); - // Define the capture rectangle with the same size - vc_dispmanx_rect_set(&_rectangle, 0, 0, width, height); - return true; + if (_vc_resource != 0) + { + Debug(_log,"Define the capture rectangle with the same size"); + vc_dispmanx_rect_set(&_rectangle, 0, 0, width, height); + rc = true; + } } - return false; + return rc; } -void DispmanxFrameGrabber::setFlags(int vc_flags) +void DispmanxFrameGrabber::setFlags(DISPMANX_TRANSFORM_T vc_flags) { _vc_flags = vc_flags; } int DispmanxFrameGrabber::grabFrame(Image & image) { - if (!_enabled) return 0; - - int ret; - - // vc_dispmanx_resource_read_data doesn't seem to work well - // with arbitrary positions so we have to handle cropping by ourselves - unsigned cropLeft = _cropLeft; - unsigned cropRight = _cropRight; - unsigned cropTop = _cropTop; - unsigned cropBottom = _cropBottom; - - if (_vc_flags & DISPMANX_SNAPSHOT_FILL) + int rc = 0; + if (_isEnabled && !_isDeviceInError) { - // disable cropping, we are capturing the video overlay window - cropLeft = cropRight = cropTop = cropBottom = 0; - } + // vc_dispmanx_resource_read_data doesn't seem to work well + // with arbitrary positions so we have to handle cropping by ourselves + int cropLeft = _cropLeft; + int cropRight = _cropRight; + int cropTop = _cropTop; + int cropBottom = _cropBottom; - unsigned imageWidth = _width - cropLeft - cropRight; - unsigned imageHeight = _height - cropTop - cropBottom; - - // calculate final image dimensions and adjust top/left cropping in 3D modes - switch (_videoMode) - { - case VideoMode::VIDEO_3DSBS: - imageWidth /= 2; - cropLeft /= 2; - break; - case VideoMode::VIDEO_3DTAB: - imageHeight /= 2; - cropTop /= 2; - break; - case VideoMode::VIDEO_2D: - default: - break; - } - - // resize the given image if needed - if (image.width() != imageWidth || image.height() != imageHeight) - { - image.resize(imageWidth, imageHeight); - } - - if (_image_rgba.width() != imageWidth || _image_rgba.height() != imageHeight) - { - _image_rgba.resize(imageWidth, imageHeight); - } - - // Open the connection to the display - _vc_display = vc_dispmanx_display_open(0); - if (_vc_display < 0) - { - Error(_log, "Cannot open display: %d", _vc_display); - return -1; - } - - // Create the snapshot (incl down-scaling) - ret = vc_dispmanx_snapshot(_vc_display, _vc_resource, (DISPMANX_TRANSFORM_T) _vc_flags); - if (ret < 0) - { - Error(_log, "Snapshot failed: %d", ret); - vc_dispmanx_display_close(_vc_display); - return ret; - } - - // Read the snapshot into the memory - void* imagePtr = _image_rgba.memptr(); - void* capturePtr = imagePtr; - - unsigned imagePitch = imageWidth * sizeof(ColorRgba); - - // dispmanx seems to require the pitch to be a multiple of 64 - unsigned capturePitch = (_rectangle.width * sizeof(ColorRgba) + 63) & (~63); - - // grab to temp buffer if image pitch isn't valid or if we are cropping - if (imagePitch != capturePitch - || (unsigned)_rectangle.width != imageWidth - || (unsigned)_rectangle.height != imageHeight) - { - // check if we need to resize the capture buffer - unsigned captureSize = capturePitch * _rectangle.height / sizeof(ColorRgba); - if (_captureBufferSize != captureSize) + if (_vc_flags & DISPMANX_SNAPSHOT_FILL) { - delete[] _captureBuffer; - _captureBuffer = new ColorRgba[captureSize]; - _captureBufferSize = captureSize; + // disable cropping, we are capturing the video overlay window + Debug(_log,"Disable cropping, as the video overlay window is captured"); + cropLeft = cropRight = cropTop = cropBottom = 0; } - capturePtr = &_captureBuffer[0]; - } + unsigned imageWidth = static_cast(_width - cropLeft - cropRight); + unsigned imageHeight = static_cast(_height - cropTop - cropBottom); - ret = vc_dispmanx_resource_read_data(_vc_resource, &_rectangle, capturePtr, capturePitch); - if (ret < 0) - { - Error(_log, "vc_dispmanx_resource_read_data failed: %d", ret); - vc_dispmanx_display_close(_vc_display); - return ret; - } - - // copy capture data to image if we captured to temp buffer - if (imagePtr != capturePtr) - { - // adjust source pointer to top/left cropping - uint8_t* src_ptr = (uint8_t*) capturePtr - + cropLeft * sizeof(ColorRgba) - + cropTop * capturePitch; - - for (unsigned y = 0; y < imageHeight; y++) + // resize the given image if needed + if (image.width() != imageWidth || image.height() != imageHeight) { - memcpy((uint8_t*)imagePtr + y * imagePitch, - src_ptr + y * capturePitch, - imagePitch); + image.resize(imageWidth, imageHeight); + } + + if (_image_rgba.width() != imageWidth || _image_rgba.height() != imageHeight) + { + _image_rgba.resize(imageWidth, imageHeight); + } + + // Open the connection to the display + _vc_display = vc_dispmanx_display_open(DEFAULT_DEVICE); + if (_vc_display < 0) + { + Error(_log, "Cannot open display: %d", DEFAULT_DEVICE); + rc = -1; + } + else { + + // Create the snapshot (incl down-scaling) + int ret = vc_dispmanx_snapshot(_vc_display, _vc_resource, _vc_flags); + if (ret < 0) + { + Error(_log, "Snapshot failed: %d", ret); + rc = ret; + } + else + { + // Read the snapshot into the memory + void* imagePtr = _image_rgba.memptr(); + void* capturePtr = imagePtr; + + unsigned imagePitch = imageWidth * sizeof(ColorRgba); + + // dispmanx seems to require the pitch to be a multiple of 64 + unsigned capturePitch = (_rectangle.width * sizeof(ColorRgba) + 63) & (~63); + + // grab to temp buffer if image pitch isn't valid or if we are cropping + if (imagePitch != capturePitch + || static_cast(_rectangle.width) != imageWidth + || static_cast(_rectangle.height) != imageHeight) + { + // check if we need to resize the capture buffer + unsigned captureSize = capturePitch * static_cast(_rectangle.height) / sizeof(ColorRgba); + if (_captureBufferSize != captureSize) + { + delete[] _captureBuffer; + _captureBuffer = new ColorRgba[captureSize]; + _captureBufferSize = captureSize; + } + + capturePtr = &_captureBuffer[0]; + } + + ret = vc_dispmanx_resource_read_data(_vc_resource, &_rectangle, capturePtr, capturePitch); + if (ret < 0) + { + Error(_log, "vc_dispmanx_resource_read_data failed: %d", ret); + rc = ret; + } + else + { + _imageResampler.processImage(static_cast(capturePtr), + _width, + _height, + static_cast(capturePitch), + PixelFormat::RGB32, + image); + } + } + vc_dispmanx_display_close(_vc_display); } } - - // Close the displaye - vc_dispmanx_display_close(_vc_display); - - // image to output image - _image_rgba.toRgb(image); - - return 0; + return rc; +} + +QSize DispmanxFrameGrabber::getScreenSize(int device) const +{ + int width (0); + int height(0); + + DISPMANX_DISPLAY_HANDLE_T vc_display = vc_dispmanx_display_open(device); + if ( vc_display > 0) + { + // Obtain the display information + DISPMANX_MODEINFO_T vc_info; + int result = vc_dispmanx_display_get_info(vc_display, &vc_info); + (void)result; + + if (result == 0) + { + width = vc_info.width; + height = vc_info.height; + + DebugIf(verbose, _log, "Display found with resolution: %dx%d", width, height); + } + // Close the display + vc_dispmanx_display_close(vc_display); + } + + return QSize(width, height); +} + +QJsonObject DispmanxFrameGrabber::discover(const QJsonObject& params) +{ + DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + QJsonObject inputsDiscovered; + + int deviceIdx (DEFAULT_DEVICE); + QJsonArray video_inputs; + + QSize screenSize = getScreenSize(deviceIdx); + if ( !screenSize.isEmpty() ) + { + QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 }; + + QJsonObject in; + + QString displayName; + displayName = QString("Screen:%1").arg(deviceIdx); + + in["name"] = displayName; + in["inputIdx"] = deviceIdx; + + QJsonArray formats; + QJsonObject format; + + QJsonArray resolutionArray; + + QJsonObject resolution; + + resolution["width"] = screenSize.width(); + resolution["height"] = screenSize.height(); + resolution["fps"] = fps; + + resolutionArray.append(resolution); + + format["resolutions"] = resolutionArray; + formats.append(format); + + in["formats"] = formats; + video_inputs.append(in); + } + + if (!video_inputs.isEmpty()) + { + inputsDiscovered["device"] = "dispmanx"; + inputsDiscovered["device_name"] = "DispmanX"; + inputsDiscovered["type"] = "screen"; + inputsDiscovered["video_inputs"] = video_inputs; + } + + if (inputsDiscovered.isEmpty()) + { + DebugIf(verbose, _log, "No displays found to capture from!"); + } + + DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + return inputsDiscovered; } diff --git a/libsrc/grabber/dispmanx/DispmanxFrameGrabberMock.cpp b/libsrc/grabber/dispmanx/DispmanxFrameGrabberMock.cpp index 87c7f143..5c27ec18 100644 --- a/libsrc/grabber/dispmanx/DispmanxFrameGrabberMock.cpp +++ b/libsrc/grabber/dispmanx/DispmanxFrameGrabberMock.cpp @@ -5,6 +5,7 @@ unsigned __bcm_frame_counter = 0; const int __screenWidth = 800; const int __screenHeight = 600; +const int __display_num = 0; void bcm_host_init() { @@ -27,6 +28,7 @@ int vc_dispmanx_display_get_info(int, DISPMANX_MODEINFO_T *vc_info) { vc_info->width = __screenWidth; vc_info->height = __screenHeight; + vc_info->display_num = __display_num; return 0; } @@ -54,7 +56,7 @@ void vc_dispmanx_rect_set(VC_RECT_T *rectangle, int left, int top, int width, in rectangle->top = top; } -int vc_dispmanx_snapshot(int, DISPMANX_RESOURCE_HANDLE_T resource, int vc_flags) +int vc_dispmanx_snapshot(DISPMANX_DISPLAY_HANDLE_T /*display*/, DISPMANX_RESOURCE_HANDLE_T resource, DISPMANX_TRANSFORM_T /*vc_flags*/) { __bcm_frame_counter++; if (__bcm_frame_counter > 100) @@ -66,7 +68,7 @@ int vc_dispmanx_snapshot(int, DISPMANX_RESOURCE_HANDLE_T resource, int vc_flags) if (__bcm_frame_counter < 25) { color[0] = ColorRgba::WHITE; -0 color[1] = ColorRgba::RED; + color[1] = ColorRgba::RED; color[2] = ColorRgba::BLUE; color[3] = ColorRgba::GREEN; } diff --git a/libsrc/grabber/dispmanx/DispmanxWrapper.cpp b/libsrc/grabber/dispmanx/DispmanxWrapper.cpp index b37c1b7e..915a4bdc 100644 --- a/libsrc/grabber/dispmanx/DispmanxWrapper.cpp +++ b/libsrc/grabber/dispmanx/DispmanxWrapper.cpp @@ -1,10 +1,12 @@ #include -DispmanxWrapper::DispmanxWrapper(unsigned grabWidth, unsigned grabHeight, unsigned updateRate_Hz) - : GrabberWrapper("Dispmanx", &_grabber, grabWidth, grabHeight, updateRate_Hz) - , _grabber(grabWidth, grabHeight) +DispmanxWrapper::DispmanxWrapper( int updateRate_Hz, + int pixelDecimation + ) + : GrabberWrapper("Dispmanx", &_grabber, updateRate_Hz) + , _grabber() { - + _grabber.setPixelDecimation(pixelDecimation); } void DispmanxWrapper::action() diff --git a/libsrc/grabber/framebuffer/FramebufferFrameGrabber.cpp b/libsrc/grabber/framebuffer/FramebufferFrameGrabber.cpp index db4580c9..9016a862 100644 --- a/libsrc/grabber/framebuffer/FramebufferFrameGrabber.cpp +++ b/libsrc/grabber/framebuffer/FramebufferFrameGrabber.cpp @@ -10,102 +10,261 @@ // STL includes #include +//Qt +#include +#include +#include +#include +#include + +// Constants +namespace { +const bool verbose = false; + +// fb discovery service +const char DISCOVERY_DIRECTORY[] = "/dev/"; +const char DISCOVERY_FILEPATTERN[] = "fb?"; + +} //End of constants + // Local includes #include -FramebufferFrameGrabber::FramebufferFrameGrabber(const QString & device, unsigned width, unsigned height) - : Grabber("FRAMEBUFFERGRABBER", width, height) - , _fbDevice() +FramebufferFrameGrabber::FramebufferFrameGrabber(const QString & device) + : Grabber("FRAMEBUFFERGRABBER") + , _fbDevice(device) + , _fbfd (-1) { - setDevicePath(device); + _useImageResampler = true; +} + +FramebufferFrameGrabber::~FramebufferFrameGrabber() +{ + closeDevice(); +} + +bool FramebufferFrameGrabber::setupScreen() +{ + bool rc (false); + + if ( _fbfd >= 0 ) + { + closeDevice(); + } + + rc = getScreenInfo(); + setEnabled(rc); + + return rc; +} + +bool FramebufferFrameGrabber::setWidthHeight(int width, int height) +{ + bool rc (false); + if(Grabber::setWidthHeight(width, height)) + { + rc = setupScreen(); + } + return rc; } int FramebufferFrameGrabber::grabFrame(Image & image) { - if (!_enabled) return 0; + int rc = 0; - struct fb_var_screeninfo vinfo; - unsigned capSize, bytesPerPixel; - PixelFormat pixelFormat; - - /* Open the framebuffer device */ - int fbfd = open(QSTRING_CSTR(_fbDevice), O_RDONLY); - if (fbfd == -1) + if (_isEnabled && !_isDeviceInError) { - Error(_log, "Error opening %s, %s : ", QSTRING_CSTR(_fbDevice), std::strerror(errno)); - return -1; - } - - /* get variable screen information */ - ioctl (fbfd, FBIOGET_VSCREENINFO, &vinfo); - - bytesPerPixel = vinfo.bits_per_pixel / 8; - capSize = vinfo.xres * vinfo.yres * bytesPerPixel; - - switch (vinfo.bits_per_pixel) - { - case 16: pixelFormat = PixelFormat::BGR16; break; - case 24: pixelFormat = PixelFormat::BGR24; break; -#ifdef ENABLE_AMLOGIC - case 32: pixelFormat = PixelFormat::PIXELFORMAT_RGB32; break; -#else - case 32: pixelFormat = PixelFormat::BGR32; break; -#endif - default: - Error(_log, "Unknown pixel format: %d bits per pixel", vinfo.bits_per_pixel); - close(fbfd); - return -1; - } - - /* map the device to memory */ - unsigned char * fbp = (unsigned char*)mmap(0, capSize, PROT_READ, MAP_PRIVATE | MAP_NORESERVE, fbfd, 0); - if (fbp == MAP_FAILED) { - Error(_log, "Error mapping %s, %s : ", QSTRING_CSTR(_fbDevice), std::strerror(errno)); - return -1; - } - - _imageResampler.setHorizontalPixelDecimation(vinfo.xres/_width); - _imageResampler.setVerticalPixelDecimation(vinfo.yres/_height); - _imageResampler.processImage(fbp, - vinfo.xres, - vinfo.yres, - vinfo.xres * bytesPerPixel, - pixelFormat, - image); - - munmap(fbp, capSize); - close(fbfd); - - return 0; -} - -void FramebufferFrameGrabber::setDevicePath(const QString& path) -{ - if(_fbDevice != path) - { - _fbDevice = path; - int result; - struct fb_var_screeninfo vinfo; - - // Check if the framebuffer device can be opened and display the current resolution - int fbfd = open(QSTRING_CSTR(_fbDevice), O_RDONLY); - if (fbfd == -1) + if ( getScreenInfo() ) { - Error(_log, "Error opening %s, %s : ", QSTRING_CSTR(_fbDevice), std::strerror(errno)); - } - else - { - // get variable screen information - result = ioctl (fbfd, FBIOGET_VSCREENINFO, &vinfo); - if (result != 0) - { - Error(_log, "Could not get screen information, %s", std::strerror(errno)); + /* map the device to memory */ + uint8_t * fbp = static_cast(mmap(nullptr, _fixInfo.smem_len, PROT_READ, MAP_PRIVATE | MAP_NORESERVE, _fbfd, 0)); + if (fbp == MAP_FAILED) { + + QString errorReason = QString ("Error mapping %1, [%2] %3").arg(_fbDevice).arg(errno).arg(std::strerror(errno)); + this->setInError ( errorReason ); + closeDevice(); + rc = -1; } else { - Info(_log, "Display opened with resolution: %dx%d@%dbit", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel); + _imageResampler.processImage(fbp, + static_cast(_varInfo.xres), + static_cast(_varInfo.yres), + static_cast(_fixInfo.line_length), + _pixelFormat, + image); + munmap(fbp, _fixInfo.smem_len); + } + } + closeDevice(); + } + return rc; +} + +bool FramebufferFrameGrabber::openDevice() +{ + bool rc = true; + + /* Open the framebuffer device */ + _fbfd = ::open(QSTRING_CSTR(_fbDevice), O_RDONLY); + if (_fbfd < 0) + { + QString errorReason = QString ("Error opening %1, [%2] %3").arg(_fbDevice).arg(errno).arg(std::strerror(errno)); + this->setInError ( errorReason ); + rc = false; + } + return rc; +} + +bool FramebufferFrameGrabber::closeDevice() +{ + bool rc = false; + if (_fbfd >= 0) + { + if( ::close(_fbfd) == 0) { + rc = true; + } + _fbfd = -1; + } + return rc; +} + +QSize FramebufferFrameGrabber::getScreenSize() const +{ + return getScreenSize(_fbDevice); +} + +QSize FramebufferFrameGrabber::getScreenSize(const QString& device) const +{ + int width (0); + int height(0); + + int fbfd = ::open(QSTRING_CSTR(device), O_RDONLY); + if (fbfd != -1) + { + struct fb_var_screeninfo vinfo; + int result = ioctl (fbfd, FBIOGET_VSCREENINFO, &vinfo); + if (result == 0) + { + width = static_cast(vinfo.xres); + height = static_cast(vinfo.yres); + DebugIf(verbose, _log, "FB device [%s] found with resolution: %dx%d", QSTRING_CSTR(device), width, height); + } + ::close(fbfd); + } + return QSize(width, height); +} + +bool FramebufferFrameGrabber::getScreenInfo() +{ + bool rc (false); + + if ( openDevice() ) + { + if (ioctl(_fbfd, FBIOGET_FSCREENINFO, &_fixInfo) < 0 || ioctl (_fbfd, FBIOGET_VSCREENINFO, &_varInfo) < 0) + { + QString errorReason = QString ("Error getting screen information for %1, [%2] %3").arg(_fbDevice).arg(errno).arg(std::strerror(errno)); + this->setInError ( errorReason ); + closeDevice(); + } + else + { + rc = true; + switch (_varInfo.bits_per_pixel) + { + case 16: _pixelFormat = PixelFormat::BGR16; + break; + case 24: _pixelFormat = PixelFormat::BGR24; + break; +#ifdef ENABLE_AMLOGIC + case 32: _pixelFormat = PixelFormat::PIXELFORMAT_RGB32; + break; +#else + case 32: _pixelFormat = PixelFormat::BGR32; + break; +#endif + default: + rc= false; + QString errorReason = QString ("Unknown pixel format: %1 bits per pixel").arg(static_cast(_varInfo.bits_per_pixel)); + this->setInError ( errorReason ); + closeDevice(); } - close(fbfd); } } + return rc; +} + +QJsonObject FramebufferFrameGrabber::discover(const QJsonObject& params) +{ + DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + QJsonObject inputsDiscovered; + + //Find framebuffer devices 0-9 + QDir deviceDirectory (DISCOVERY_DIRECTORY); + QStringList deviceFilter(DISCOVERY_FILEPATTERN); + deviceDirectory.setNameFilters(deviceFilter); + deviceDirectory.setSorting(QDir::Name); + QFileInfoList deviceFiles = deviceDirectory.entryInfoList(QDir::System); + + int fbIdx (0); + QJsonArray video_inputs; + + QFileInfoList::const_iterator deviceFileIterator; + for (deviceFileIterator = deviceFiles.constBegin(); deviceFileIterator != deviceFiles.constEnd(); ++deviceFileIterator) + { + fbIdx = (*deviceFileIterator).fileName().rightRef(1).toInt(); + QString device = (*deviceFileIterator).absoluteFilePath(); + DebugIf(verbose, _log, "FB device [%s] found", QSTRING_CSTR(device)); + + QSize screenSize = getScreenSize(device); + if ( !screenSize.isEmpty() ) + { + QJsonArray fps = { "1", "5", "10", "15", "20", "25", "30", "40", "50", "60" }; + + QJsonObject in; + + QString displayName; + displayName = QString("FB%1").arg(fbIdx); + + in["name"] = displayName; + in["inputIdx"] = fbIdx; + + QJsonArray formats; + QJsonObject format; + + QJsonArray resolutionArray; + + QJsonObject resolution; + + resolution["width"] = screenSize.width(); + resolution["height"] = screenSize.height(); + resolution["fps"] = fps; + + resolutionArray.append(resolution); + + format["resolutions"] = resolutionArray; + formats.append(format); + + in["formats"] = formats; + video_inputs.append(in); + } + + if (!video_inputs.isEmpty()) + { + inputsDiscovered["device"] = "framebuffer"; + inputsDiscovered["device_name"] = "Framebuffer"; + inputsDiscovered["type"] = "screen"; + inputsDiscovered["video_inputs"] = video_inputs; + } + } + + if (inputsDiscovered.isEmpty()) + { + DebugIf(verbose, _log, "No displays found to capture from!"); + } + + DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + return inputsDiscovered; } diff --git a/libsrc/grabber/framebuffer/FramebufferWrapper.cpp b/libsrc/grabber/framebuffer/FramebufferWrapper.cpp index 7b4cf095..7d99f527 100644 --- a/libsrc/grabber/framebuffer/FramebufferWrapper.cpp +++ b/libsrc/grabber/framebuffer/FramebufferWrapper.cpp @@ -1,9 +1,13 @@ #include -FramebufferWrapper::FramebufferWrapper(const QString & device, unsigned grabWidth, unsigned grabHeight, unsigned updateRate_Hz) - : GrabberWrapper("FrameBuffer", &_grabber, grabWidth, grabHeight, updateRate_Hz) - , _grabber(device, grabWidth, grabHeight) -{} +FramebufferWrapper::FramebufferWrapper( int updateRate_Hz, + const QString & device, + int pixelDecimation) + : GrabberWrapper("FrameBuffer", &_grabber, updateRate_Hz) + , _grabber(device) +{ + _grabber.setPixelDecimation(pixelDecimation); +} void FramebufferWrapper::action() { diff --git a/libsrc/grabber/osx/OsxFrameGrabber.cpp b/libsrc/grabber/osx/OsxFrameGrabber.cpp index 69a8a705..899f1868 100644 --- a/libsrc/grabber/osx/OsxFrameGrabber.cpp +++ b/libsrc/grabber/osx/OsxFrameGrabber.cpp @@ -5,94 +5,204 @@ // Local includes #include -OsxFrameGrabber::OsxFrameGrabber(unsigned display, unsigned width, unsigned height) - : Grabber("OSXGRABBER", width, height) - , _screenIndex(100) +//Qt +#include +#include +#include + +// Constants +namespace { +const bool verbose = false; +} //End of constants + +OsxFrameGrabber::OsxFrameGrabber(int display) + : Grabber("OSXGRABBER") + , _screenIndex(display) { - // check if display is available - setDisplayIndex(display); + _isEnabled = false; + _useImageResampler = true; } OsxFrameGrabber::~OsxFrameGrabber() { } -int OsxFrameGrabber::grabFrame(Image & image) +bool OsxFrameGrabber::setupDisplay() { - if (!_enabled) return 0; + bool rc (false); - CGImageRef dispImage; - CFDataRef imgData; - unsigned char * pImgData; - unsigned dspWidth, dspHeight; + rc = setDisplayIndex(_screenIndex); - dispImage = CGDisplayCreateImage(_display); - - // display lost, use main - if (dispImage == NULL && _display) - { - dispImage = CGDisplayCreateImage(kCGDirectMainDisplay); - // no displays connected, return - if (dispImage == NULL) - { - Error(_log, "No display connected..."); - return -1; - } - } - imgData = CGDataProviderCopyData(CGImageGetDataProvider(dispImage)); - pImgData = (unsigned char*) CFDataGetBytePtr(imgData); - dspWidth = CGImageGetWidth(dispImage); - dspHeight = CGImageGetHeight(dispImage); - - _imageResampler.setHorizontalPixelDecimation(dspWidth/_width); - _imageResampler.setVerticalPixelDecimation(dspHeight/_height); - _imageResampler.processImage( pImgData, - dspWidth, - dspHeight, - CGImageGetBytesPerRow(dispImage), - PixelFormat::BGR32, - image); - - CFRelease(imgData); - CGImageRelease(dispImage); - - return 0; + return rc; } -void OsxFrameGrabber::setDisplayIndex(int index) +int OsxFrameGrabber::grabFrame(Image & image) { - if(_screenIndex != index) + int rc = 0; + if (_isEnabled && !_isDeviceInError) + { + + CGImageRef dispImage; + CFDataRef imgData; + unsigned char * pImgData; + unsigned dspWidth; + unsigned dspHeight; + + dispImage = CGDisplayCreateImage(_display); + + // display lost, use main + if (dispImage == nullptr && _display != 0) + { + dispImage = CGDisplayCreateImage(kCGDirectMainDisplay); + // no displays connected, return + if (dispImage == nullptr) + { + Error(_log, "No display connected..."); + return -1; + } + } + imgData = CGDataProviderCopyData(CGImageGetDataProvider(dispImage)); + pImgData = (unsigned char*) CFDataGetBytePtr(imgData); + dspWidth = CGImageGetWidth(dispImage); + dspHeight = CGImageGetHeight(dispImage); + + _imageResampler.processImage( pImgData, + static_cast(dspWidth), + static_cast(dspHeight), + static_cast(CGImageGetBytesPerRow(dispImage)), + PixelFormat::BGR32, + image); + + CFRelease(imgData); + CGImageRelease(dispImage); + + } + return rc; +} + +bool OsxFrameGrabber::setDisplayIndex(int index) +{ + bool rc (true); + if(_screenIndex != index || !_isEnabled) { _screenIndex = index; - CGImageRef image; - CGDisplayCount displayCount; - CGDirectDisplayID displays[8]; - // get list of displays - CGGetActiveDisplayList(8, displays, &displayCount); - if (_screenIndex + 1 > displayCount) + CGDisplayCount dspyCnt = 0 ; + CGDisplayErr err; + err = CGGetActiveDisplayList(0, nullptr, &dspyCnt); + if (err == kCGErrorSuccess && dspyCnt > 0) { - Error(_log, "Display with index %d is not available. Using main display", _screenIndex); - _display = kCGDirectMainDisplay; + CGDirectDisplayID *activeDspys = new CGDirectDisplayID [dspyCnt] ; + err = CGGetActiveDisplayList(dspyCnt, activeDspys, &dspyCnt) ; + if (err == kCGErrorSuccess) + { + CGImageRef image; + + if (_screenIndex + 1 > static_cast(dspyCnt)) + { + Error(_log, "Display with index %d is not available.", _screenIndex); + rc = false; + } + else + { + _display = activeDspys[_screenIndex]; + + image = CGDisplayCreateImage(_display); + if(image == nullptr) + { + setEnabled(false); + Error(_log, "Failed to open main display, disable capture interface"); + rc = false; + } + else + { + setEnabled(true); + rc = true; + Info(_log, "Display [%u] opened with resolution: %ux%u@%ubit", _display, CGImageGetWidth(image), CGImageGetHeight(image), CGImageGetBitsPerPixel(image)); + } + CGImageRelease(image); + } + } } else { - _display = displays[_screenIndex]; + rc=false; } - - image = CGDisplayCreateImage(_display); - if(image == NULL) - { - Error(_log, "Failed to open main display, disable capture interface"); - setEnabled(false); - return; - } - else - setEnabled(true); - - Info(_log, "Display opened with resolution: %dx%d@%dbit", CGImageGetWidth(image), CGImageGetHeight(image), CGImageGetBitsPerPixel(image)); - - CGImageRelease(image); } + return rc; +} + +QJsonObject OsxFrameGrabber::discover(const QJsonObject& params) +{ + DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + QJsonObject inputsDiscovered; + + // get list of displays + CGDisplayCount dspyCnt = 0 ; + CGDisplayErr err; + err = CGGetActiveDisplayList(0, nullptr, &dspyCnt); + if (err == kCGErrorSuccess && dspyCnt > 0) + { + CGDirectDisplayID *activeDspys = new CGDirectDisplayID [dspyCnt] ; + err = CGGetActiveDisplayList(dspyCnt, activeDspys, &dspyCnt) ; + if (err == kCGErrorSuccess) + { + inputsDiscovered["device"] = "osx"; + inputsDiscovered["device_name"] = "OSX"; + inputsDiscovered["type"] = "screen"; + + QJsonArray video_inputs; + QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 }; + + for (int i = 0; i < static_cast(dspyCnt); ++i) + { + QJsonObject in; + + CGDirectDisplayID did = activeDspys[i]; + + QString displayName; + displayName = QString("Display:%1").arg(did); + + in["name"] = displayName; + in["inputIdx"] = i; + + QJsonArray formats; + QJsonObject format; + + QJsonArray resolutionArray; + + QJsonObject resolution; + + + CGDisplayModeRef dispMode = CGDisplayCopyDisplayMode(did); + CGRect rect = CGDisplayBounds(did); + resolution["width"] = static_cast(rect.size.width); + resolution["height"] = static_cast(rect.size.height); + CGDisplayModeRelease(dispMode); + + resolution["fps"] = fps; + + resolutionArray.append(resolution); + + format["resolutions"] = resolutionArray; + formats.append(format); + + in["formats"] = formats; + video_inputs.append(in); + } + inputsDiscovered["video_inputs"] = video_inputs; + } + delete [] activeDspys; + } + + if (inputsDiscovered.isEmpty()) + { + DebugIf(verbose, _log, "No displays found to capture from!"); + } + DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + return inputsDiscovered; + } diff --git a/libsrc/grabber/osx/OsxFrameGrabberMock.cpp b/libsrc/grabber/osx/OsxFrameGrabberMock.cpp index 5de645ec..2bcab484 100644 --- a/libsrc/grabber/osx/OsxFrameGrabberMock.cpp +++ b/libsrc/grabber/osx/OsxFrameGrabberMock.cpp @@ -5,15 +5,33 @@ unsigned __osx_frame_counter = 0; const int __screenWidth = 800; const int __screenHeight = 600; -void CGGetActiveDisplayList(int max, CGDirectDisplayID *displays, CGDisplayCount *displayCount) +CGError CGGetActiveDisplayList(uint32_t maxDisplays, CGDirectDisplayID *activeDisplays, uint32_t *displayCount) { - *displayCount = 1; - displays[0] = 1; + if (maxDisplays == 0 || activeDisplays == nullptr) + { + *displayCount = 2; + } + else + { + displayCount = &maxDisplays; + if (activeDisplays != nullptr) + { + for (CGDirectDisplayID i = 0; i < maxDisplays; ++i) + { + activeDisplays[i] = i; + } + } + else + { + return kCGErrorFailure; + } + } + return kCGErrorSuccess; } CGImageRef CGDisplayCreateImage(CGDirectDisplayID display) { - CGImageRef image = new CGImage(__screenWidth, __screenHeight); + CGImageRef image = new CGImage(__screenWidth / (display+1), __screenHeight / (display+1)); return image; } @@ -123,4 +141,19 @@ void CFRelease(CFDataRef imgData) delete imgData; } +CGDisplayModeRef CGDisplayCopyDisplayMode(CGDirectDisplayID display) +{ + return nullptr; +} +CGRect CGDisplayBounds(CGDirectDisplayID display) +{ + CGRect rect; + rect.size.width = __screenWidth / (display+1); + rect.size.height = __screenHeight / (display+1); + return rect; +} +void CGDisplayModeRelease(CGDisplayModeRef mode) +{ +} + #endif diff --git a/libsrc/grabber/osx/OsxWrapper.cpp b/libsrc/grabber/osx/OsxWrapper.cpp index 2a0666f9..724be2fa 100644 --- a/libsrc/grabber/osx/OsxWrapper.cpp +++ b/libsrc/grabber/osx/OsxWrapper.cpp @@ -1,9 +1,14 @@ #include -OsxWrapper::OsxWrapper(unsigned display, unsigned grabWidth, unsigned grabHeight, unsigned updateRate_Hz) - : GrabberWrapper("OSX FrameGrabber", &_grabber, grabWidth, grabHeight, updateRate_Hz) - , _grabber(display, grabWidth, grabHeight) -{} +OsxWrapper::OsxWrapper( int updateRate_Hz, + int display, + int pixelDecimation + ) + : GrabberWrapper("OSX", &_grabber, updateRate_Hz) + , _grabber(display) +{ + _grabber.setPixelDecimation(pixelDecimation); +} void OsxWrapper::action() { diff --git a/libsrc/grabber/qt/QtGrabber.cpp b/libsrc/grabber/qt/QtGrabber.cpp index aca6092d..d81150ae 100644 --- a/libsrc/grabber/qt/QtGrabber.cpp +++ b/libsrc/grabber/qt/QtGrabber.cpp @@ -7,23 +7,30 @@ #include #include #include +#include +#include +#include -QtGrabber::QtGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display) - : Grabber("QTGRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom) - , _display(unsigned(display)) - , _pixelDecimation(pixelDecimation) - , _screenWidth(0) - , _screenHeight(0) - , _src_x(0) - , _src_y(0) - , _src_x_max(0) - , _src_y_max(0) - , _screen(nullptr) +// Constants +namespace { +const bool verbose = false; +} //End of constants + +QtGrabber::QtGrabber(int display, int cropLeft, int cropRight, int cropTop, int cropBottom) + : Grabber("QTGRABBER", cropLeft, cropRight, cropTop, cropBottom) + , _display(display) + , _calculatedWidth(0) + , _calculatedHeight(0) + , _src_x(0) + , _src_y(0) + , _src_x_max(0) + , _src_y_max(0) + , _isWayland(false) + , _screen(nullptr) + , _isVirtual(false) { + _logger = Logger::getInstance("Qt"); _useImageResampler = false; - - // init - setupDisplay(); } QtGrabber::~QtGrabber() @@ -36,51 +43,111 @@ void QtGrabber::freeResources() // Qt seems to hold the ownership of the QScreen pointers } +bool QtGrabber::open() +{ + bool rc = false; + +#ifndef _WIN32 + if (getenv("WAYLAND_DISPLAY") != nullptr) + { + _isWayland = true; + } + else +#endif + { + rc = true; + } + return rc; +} + bool QtGrabber::setupDisplay() { - // cleanup last screen - freeResources(); - - QScreen* primary = QGuiApplication::primaryScreen(); - QList screens = QGuiApplication::screens(); - // inject main screen at 0, if not nullptr - if(primary != nullptr) + bool result = false; + if ( ! open() ) { - screens.prepend(primary); - // remove last main screen if twice in list - if(screens.lastIndexOf(primary) > 0) - screens.removeAt(screens.lastIndexOf(primary)); + if ( _isWayland ) + { + Error(_log, "Grabber does not work under Wayland!"); + } } - - if(screens.isEmpty()) + else { - Error(_log, "No displays found to capture from!"); - return false; + // cleanup last screen + freeResources(); + _numberOfSDisplays = 0; + + QScreen* primary = QGuiApplication::primaryScreen(); + QList screens = QGuiApplication::screens(); + // inject main screen at 0, if not nullptr + if(primary != nullptr) + { + screens.prepend(primary); + // remove last main screen if twice in list + if(screens.lastIndexOf(primary) > 0) + { + screens.removeAt(screens.lastIndexOf(primary)); + } + } + + if(screens.isEmpty()) + { + Error(_log, "No displays found to capture from!"); + result = false; + } + else + { + _numberOfSDisplays = screens.size(); + + Info(_log,"Available Displays:"); + int index = 0; + for(auto * screen : qAsConst(screens)) + { + const QRect geo = screen->geometry(); + Info(_log,"Display %d: Name: %s Geometry: (L,T,R,B) %d,%d,%d,%d Depth:%dbit", index, QSTRING_CSTR(screen->name()), geo.left(), geo.top() ,geo.right(), geo.bottom(), screen->depth()); + ++index; + } + + if (screens.at(0)->size() != screens.at(0)->virtualSize()) + { + const QRect vgeo = screens.at(0)->virtualGeometry(); + Info(_log,"Display %d: Name: %s Geometry: (L,T,R,B) %d,%d,%d,%d Depth:%dbit", _numberOfSDisplays, "All Displays", vgeo.left(), vgeo.top() ,vgeo.right(), vgeo.bottom(), screens.at(0)->depth()); + } + + _isVirtual = false; + // be sure the index is available + if (_display > _numberOfSDisplays - 1 ) + { + + if ((screens.at(0)->size() != screens.at(0)->virtualSize()) && (_display == _numberOfSDisplays)) + { + _isVirtual = true; + _display = 0; + + } + else + { + Info(_log, "The requested display index '%d' is not available, falling back to display 0", _display); + _display = 0; + } + } + + // init the requested display + _screen = screens.at(_display); + connect(_screen, &QScreen::geometryChanged, this, &QtGrabber::geometryChanged); + updateScreenDimensions(true); + + if (_isVirtual) + { + Info(_log, "Using virtual display across all screens"); + } + else + { + Info(_log,"Initialized display %d", _display); + } + result = true; + } } - - Info(_log,"Available Displays:"); - int index = 0; - for(auto screen : screens) - { - const QRect geo = screen->geometry(); - Info(_log,"Display %d: Name:%s Geometry: (L,T,R,B) %d,%d,%d,%d Depth:%dbit", index, QSTRING_CSTR(screen->name()), geo.left(), geo.top() ,geo.right(), geo.bottom(), screen->depth()); - index++; - } - - // be sure the index is available - if(_display > unsigned(screens.size()-1)) - { - Info(_log, "The requested display index '%d' is not available, falling back to display 0", _display); - _display = 0; - } - - // init the requested display - _screen = screens.at(_display); - connect(_screen, &QScreen::geometryChanged, this, &QtGrabber::geometryChanged); - updateScreenDimensions(true); - - Info(_log,"Initialized display %d", _display); - return true; + return result; } void QtGrabber::geometryChanged(const QRect &geo) @@ -91,90 +158,109 @@ void QtGrabber::geometryChanged(const QRect &geo) int QtGrabber::grabFrame(Image & image) { - if (!_enabled) return 0; - if(_screen == nullptr) + int rc = 0; + if (_isEnabled && !_isDeviceInError) { - // reinit, this will disable capture on failure - setEnabled(setupDisplay()); - return -1; - } - QPixmap originalPixmap = _screen->grabWindow(0, _src_x, _src_y, _src_x_max, _src_y_max); - QPixmap resizedPixmap = originalPixmap.scaled(_width,_height); - QImage imageFrame = resizedPixmap.toImage().convertToFormat( QImage::Format_RGB888); - image.resize(imageFrame.width(), imageFrame.height()); - - for (int y=0; ygrabWindow(0, _src_x, _src_y, _src_x_max, _src_y_max); + if (originalPixmap.isNull()) + { + rc = -1; + } + else + { + QImage imageFrame = originalPixmap.toImage().scaled(_calculatedWidth, _calculatedHeight).convertToFormat( QImage::Format_RGB888); + image.resize(static_cast(_calculatedWidth), static_cast(_calculatedHeight)); + + for (int y = 0; y < imageFrame.height(); y++) + { + memcpy((unsigned char*)image.memptr() + y * image.width() * 3, static_cast(imageFrame.scanLine(y)), imageFrame.width() * 3); + } + } + } + } + return rc; } int QtGrabber::updateScreenDimensions(bool force) { - if(!_screen) + if(_screen == nullptr) + { return -1; + } - const QRect& geo = _screen->geometry(); - if (!force && _screenWidth == unsigned(geo.right()) && _screenHeight == unsigned(geo.bottom())) + QRect geo; + + if (_isVirtual) + { + geo = _screen->virtualGeometry(); + } + else + { + geo = _screen->geometry(); + } + if (!force && _width == geo.width() && _height == geo.height()) { // No update required return 0; } - Info(_log, "Update of screen resolution: [%dx%d] to [%dx%d]", _screenWidth, _screenHeight, geo.right(), geo.bottom()); - _screenWidth = geo.right() - geo.left(); - _screenHeight = geo.bottom() - geo.top(); + Info(_log, "Update of screen resolution: [%dx%d] to [%dx%d]", _width, _height, geo.width(), geo.height()); + _width = geo.width(); + _height = geo.height(); - int width=0, height=0; + int width=0; + int height=0; // Image scaling is performed by Qt - width = (_screenWidth > unsigned(_cropLeft + _cropRight)) - ? ((_screenWidth - _cropLeft - _cropRight) / _pixelDecimation) - : (_screenWidth / _pixelDecimation); + width = (_width > (_cropLeft + _cropRight)) + ? ((_width - _cropLeft - _cropRight) / _pixelDecimation) + : (_width / _pixelDecimation); - height = (_screenHeight > unsigned(_cropTop + _cropBottom)) - ? ((_screenHeight - _cropTop - _cropBottom) / _pixelDecimation) - : (_screenHeight / _pixelDecimation); + height = (_height > (_cropTop + _cropBottom)) + ? ((_height - _cropTop - _cropBottom) / _pixelDecimation) + : (_height / _pixelDecimation); // calculate final image dimensions and adjust top/left cropping in 3D modes switch (_videoMode) { case VideoMode::VIDEO_3DSBS: - _width = width /2; - _height = height; + _calculatedWidth = width /2; + _calculatedHeight = height; _src_x = _cropLeft / 2; _src_y = _cropTop; - _src_x_max = (_screenWidth / 2) - _cropRight; - _src_y_max = _screenHeight - _cropBottom; + _src_x_max = (_width / 2) - _cropRight - _cropLeft; + _src_y_max = _height - _cropBottom - _cropTop; break; case VideoMode::VIDEO_3DTAB: - _width = width; - _height = height / 2; + _calculatedWidth = width; + _calculatedHeight = height / 2; _src_x = _cropLeft; _src_y = _cropTop / 2; - _src_x_max = _screenWidth - _cropRight; - _src_y_max = (_screenHeight / 2) - _cropBottom; + _src_x_max = _width - _cropRight - _cropLeft; + _src_y_max = (_height / 2) - _cropBottom - _cropTop; break; case VideoMode::VIDEO_2D: default: - _width = width; - _height = height; + _calculatedWidth = width; + _calculatedHeight = height; _src_x = _cropLeft; _src_y = _cropTop; - _src_x_max = _screenWidth - _cropRight; - _src_y_max = _screenHeight - _cropBottom; + _src_x_max = _width - _cropRight - _cropLeft; + _src_y_max = _height - _cropBottom - _cropTop; break; } - Info(_log, "Update output image resolution to [%dx%d]", _width, _height); + Info(_log, "Update output image resolution to [%dx%d]", _calculatedWidth, _calculatedHeight); return 1; } @@ -184,22 +270,129 @@ void QtGrabber::setVideoMode(VideoMode mode) updateScreenDimensions(true); } -void QtGrabber::setPixelDecimation(int pixelDecimation) +bool QtGrabber::setPixelDecimation(int pixelDecimation) { - _pixelDecimation = pixelDecimation; + bool rc (true); + if(Grabber::setPixelDecimation(pixelDecimation)) + { + if ( updateScreenDimensions(true) < 0) + { + rc = false; + } + } + return rc; } -void QtGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) +void QtGrabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) { Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom); updateScreenDimensions(true); } -void QtGrabber::setDisplayIndex(int index) +bool QtGrabber::setDisplayIndex(int index) { - if(_display != unsigned(index)) + bool rc (true); + if (_display != index) { - _display = unsigned(index); - setupDisplay(); + if (index <= _numberOfSDisplays) + { + _display = index; + } + else { + _display = 0; + } + rc = setupDisplay(); } + return rc; +} + +QJsonObject QtGrabber::discover(const QJsonObject& params) +{ + DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + QJsonObject inputsDiscovered; + if ( open() ) + { + QList screens = QGuiApplication::screens(); + if (!screens.isEmpty()) + { + inputsDiscovered["device"] = "qt"; + inputsDiscovered["device_name"] = "QT"; + inputsDiscovered["type"] = "screen"; + + QJsonArray video_inputs; + QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 }; + + for (int i = 0; i < screens.size(); ++i) + { + QJsonObject in; + + QString name = screens.at(i)->name(); + int pos = name.lastIndexOf('\\'); + if (pos != -1) + { + name = name.right(name.length()-pos-1); + } + + in["name"] = name; + in["inputIdx"] = i; + + QJsonArray formats; + QJsonObject format; + + QJsonArray resolutionArray; + + QJsonObject resolution; + + resolution["width"] = screens.at(i)->size().width(); + resolution["height"] = screens.at(i)->size().height(); + resolution["fps"] = fps; + + resolutionArray.append(resolution); + + format["resolutions"] = resolutionArray; + formats.append(format); + + in["formats"] = formats; + video_inputs.append(in); + } + + if (screens.at(0)->size() != screens.at(0)->virtualSize()) + { + QJsonObject in; + in["name"] = "All Displays"; + in["inputIdx"] = screens.size(); + in["virtual"] = true; + + QJsonArray formats; + QJsonObject format; + + QJsonArray resolutionArray; + + QJsonObject resolution; + + resolution["width"] = screens.at(0)->virtualSize().width(); + resolution["height"] = screens.at(0)->virtualSize().height(); + resolution["fps"] = fps; + + resolutionArray.append(resolution); + + format["resolutions"] = resolutionArray; + formats.append(format); + + in["formats"] = formats; + video_inputs.append(in); + } + inputsDiscovered["video_inputs"] = video_inputs; + } + + if (inputsDiscovered.isEmpty()) + { + DebugIf(verbose, _log, "No displays found to capture from!"); + } + } + DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + return inputsDiscovered; + } diff --git a/libsrc/grabber/qt/QtWrapper.cpp b/libsrc/grabber/qt/QtWrapper.cpp index 2d525198..90cb489b 100644 --- a/libsrc/grabber/qt/QtWrapper.cpp +++ b/libsrc/grabber/qt/QtWrapper.cpp @@ -1,9 +1,20 @@ #include -QtWrapper::QtWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display, unsigned updateRate_Hz) - : GrabberWrapper("Qt", &_grabber, 0, 0, updateRate_Hz) - , _grabber(cropLeft, cropRight, cropTop, cropBottom, pixelDecimation, display) -{} +QtWrapper::QtWrapper( int updateRate_Hz, + int display, + int pixelDecimation, + int cropLeft, int cropRight, int cropTop, int cropBottom + ) + : GrabberWrapper("Qt", &_grabber, updateRate_Hz) + , _grabber(display, cropLeft, cropRight, cropTop, cropBottom) +{ + _grabber.setPixelDecimation(pixelDecimation); +} + +bool QtWrapper::open() +{ + return _grabber.open(); +} void QtWrapper::action() { diff --git a/libsrc/grabber/v4l2/CMakeLists.txt b/libsrc/grabber/v4l2/CMakeLists.txt deleted file mode 100644 index 9a41bbe5..00000000 --- a/libsrc/grabber/v4l2/CMakeLists.txt +++ /dev/null @@ -1,18 +0,0 @@ -# Define the current source locations -SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber) -SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/v4l2) - -FILE ( GLOB V4L2_SOURCES "${CURRENT_HEADER_DIR}/V4L2*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp" ) - -add_library(v4l2-grabber ${V4L2_SOURCES} ) - -target_link_libraries(v4l2-grabber - hyperion - ${QT_LIBRARIES} -) - -if(TURBOJPEG_FOUND) - target_link_libraries(v4l2-grabber ${TurboJPEG_LIBRARY}) -elseif (JPEG_FOUND) - target_link_libraries(v4l2-grabber ${JPEG_LIBRARY}) -endif(TURBOJPEG_FOUND) diff --git a/libsrc/grabber/v4l2/V4L2Grabber.cpp b/libsrc/grabber/v4l2/V4L2Grabber.cpp deleted file mode 100644 index fce3f348..00000000 --- a/libsrc/grabber/v4l2/V4L2Grabber.cpp +++ /dev/null @@ -1,1415 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include - -#include -#include - -#include "grabber/V4L2Grabber.h" - -#define CLEAR(x) memset(&(x), 0, sizeof(x)) - -#ifndef V4L2_CAP_META_CAPTURE -#define V4L2_CAP_META_CAPTURE 0x00800000 // Specified in kernel header v4.16. Required for backward compatibility. -#endif - -V4L2Grabber::V4L2Grabber(const QString & device - , unsigned width - , unsigned height - , unsigned fps - , unsigned input - , VideoStandard videoStandard - , PixelFormat pixelFormat - , int pixelDecimation - ) - : Grabber("V4L2:"+device) - , _deviceName() - , _videoStandard(videoStandard) - , _ioMethod(IO_METHOD_MMAP) - , _fileDescriptor(-1) - , _buffers() - , _pixelFormat(pixelFormat) - , _pixelDecimation(-1) - , _lineLength(-1) - , _frameByteSize(-1) - , _noSignalCounterThreshold(40) - , _noSignalThresholdColor(ColorRgb{0,0,0}) - , _signalDetectionEnabled(true) - , _cecDetectionEnabled(true) - , _cecStandbyActivated(false) - , _noSignalDetected(false) - , _noSignalCounter(0) - , _x_frac_min(0.25) - , _y_frac_min(0.25) - , _x_frac_max(0.75) - , _y_frac_max(0.75) - , _streamNotifier(nullptr) - , _initialized(false) - , _deviceAutoDiscoverEnabled(false) -{ - setPixelDecimation(pixelDecimation); - getV4Ldevices(); - - // init - setInput(input); - setWidthHeight(width, height); - setFramerate(fps); - setDeviceVideoStandard(device, videoStandard); -} - -V4L2Grabber::~V4L2Grabber() -{ - uninit(); -} - -void V4L2Grabber::uninit() -{ - // stop if the grabber was not stopped - if (_initialized) - { - Debug(_log,"uninit grabber: %s", QSTRING_CSTR(_deviceName)); - stop(); - } -} - -bool V4L2Grabber::init() -{ - if (!_initialized) - { - getV4Ldevices(); - QString v4lDevices_str; - - // show list only once - if (!_deviceName.startsWith("/dev/")) - { - for (auto& dev: _v4lDevices) - { - v4lDevices_str += "\t"+ dev.first + "\t" + dev.second + "\n"; - } - if (!v4lDevices_str.isEmpty()) - Info(_log, "available V4L2 devices:\n%s", QSTRING_CSTR(v4lDevices_str)); - } - - if (_deviceName == "auto") - { - _deviceAutoDiscoverEnabled = true; - _deviceName = "unknown"; - Info( _log, "search for usable video devices" ); - for (auto& dev: _v4lDevices) - { - _deviceName = dev.first; - if (init()) - { - Info(_log, "found usable v4l2 device: %s (%s)",QSTRING_CSTR(dev.first), QSTRING_CSTR(dev.second)); - _deviceAutoDiscoverEnabled = false; - return _initialized; - } - } - Info(_log, "no usable device found"); - } - else if (!_deviceName.startsWith("/dev/")) - { - for (auto& dev: _v4lDevices) - { - if (_deviceName.toLower() == dev.second.toLower()) - { - _deviceName = dev.first; - Info(_log, "found v4l2 device with configured name: %s (%s)", QSTRING_CSTR(dev.second), QSTRING_CSTR(dev.first) ); - break; - } - } - } - else - { - Info(_log, "%s v4l device: %s", (_deviceAutoDiscoverEnabled? "test" : "configured"), QSTRING_CSTR(_deviceName)); - } - - bool opened = false; - try - { - // do not init with unknown device - if (_deviceName != "unknown") - { - if (open_device()) - { - opened = true; - init_device(_videoStandard); - _initialized = true; - } - } - } - catch(std::exception& e) - { - if (opened) - { - uninit_device(); - close_device(); - } - ErrorIf( !_deviceAutoDiscoverEnabled, _log, "V4l2 init failed (%s)", e.what()); - } - } - - return _initialized; -} - -void V4L2Grabber::getV4Ldevices() -{ - QDirIterator it("/sys/class/video4linux/", QDirIterator::NoIteratorFlags); - _deviceProperties.clear(); - while(it.hasNext()) - { - //_v4lDevices - QString dev = it.next(); - if (it.fileName().startsWith("video")) - { - QString devName = "/dev/" + it.fileName(); - int fd = open(QSTRING_CSTR(devName), O_RDWR | O_NONBLOCK, 0); - - if (fd < 0) - { - throw_errno_exception("Cannot open '" + devName + "'"); - continue; - } - - struct v4l2_capability cap; - CLEAR(cap); - - if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) - { - throw_errno_exception("'" + devName + "' is no V4L2 device"); - close(fd); - continue; - } - - if (cap.device_caps & V4L2_CAP_META_CAPTURE) // this device has bit 23 set (and bit 1 reset), so it doesn't have capture. - { - close(fd); - continue; - } - - // get the current settings - struct v4l2_format fmt; - CLEAR(fmt); - - fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0) - { - close(fd); - continue; - } - - V4L2Grabber::DeviceProperties properties; - - // collect available device inputs (index & name) - int inputIndex; - if (xioctl(fd, VIDIOC_G_INPUT, &inputIndex) == 0) - { - struct v4l2_input input; - CLEAR(input); - - input.index = 0; - while (xioctl(fd, VIDIOC_ENUMINPUT, &input) >= 0) - { - properties.inputs.insert(QString((char*)input.name), input.index); - input.index++; - } - } - - // collect available device resolutions & frame rates - struct v4l2_frmsizeenum frmsizeenum; - CLEAR(frmsizeenum); - - frmsizeenum.index = 0; - frmsizeenum.pixel_format = fmt.fmt.pix.pixelformat; - while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) >= 0) - { - switch (frmsizeenum.type) - { - case V4L2_FRMSIZE_TYPE_DISCRETE: - { - properties.resolutions << QString::number(frmsizeenum.discrete.width) + "x" + QString::number(frmsizeenum.discrete.height); - enumFrameIntervals(properties.framerates, fd, fmt.fmt.pix.pixelformat, frmsizeenum.discrete.width, frmsizeenum.discrete.height); - } - break; - case V4L2_FRMSIZE_TYPE_CONTINUOUS: - case V4L2_FRMSIZE_TYPE_STEPWISE: - // We do not take care of V4L2_FRMSIZE_TYPE_CONTINUOUS or V4L2_FRMSIZE_TYPE_STEPWISE - break; - } - frmsizeenum.index++; - } - - if (close(fd) < 0) continue; - - QFile devNameFile(dev+"/name"); - if (devNameFile.exists()) - { - devNameFile.open(QFile::ReadOnly); - devName = devNameFile.readLine(); - devName = devName.trimmed(); - properties.name = devName; - devNameFile.close(); - } - _v4lDevices.emplace("/dev/"+it.fileName(), devName); - _deviceProperties.insert("/dev/"+it.fileName(), properties); - } - } -} - -void V4L2Grabber::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold) -{ - _noSignalThresholdColor.red = uint8_t(255*redSignalThreshold); - _noSignalThresholdColor.green = uint8_t(255*greenSignalThreshold); - _noSignalThresholdColor.blue = uint8_t(255*blueSignalThreshold); - _noSignalCounterThreshold = qMax(1, noSignalCounterThreshold); - - Info(_log, "Signal threshold set to: {%d, %d, %d}", _noSignalThresholdColor.red, _noSignalThresholdColor.green, _noSignalThresholdColor.blue ); -} - -void V4L2Grabber::setSignalDetectionOffset(double horizontalMin, double verticalMin, double horizontalMax, double verticalMax) -{ - // rainbow 16 stripes 0.47 0.2 0.49 0.8 - // unicolor: 0.25 0.25 0.75 0.75 - - _x_frac_min = horizontalMin; - _y_frac_min = verticalMin; - _x_frac_max = horizontalMax; - _y_frac_max = verticalMax; - - Info(_log, "Signal detection area set to: %f,%f x %f,%f", _x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max ); -} - -bool V4L2Grabber::start() -{ - try - { - if (init() && _streamNotifier != nullptr && !_streamNotifier->isEnabled()) - { - _streamNotifier->setEnabled(true); - start_capturing(); - Info(_log, "Started"); - return true; - } - } - catch(std::exception& e) - { - Error(_log, "start failed (%s)", e.what()); - } - - return false; -} - -void V4L2Grabber::stop() -{ - if (_streamNotifier != nullptr && _streamNotifier->isEnabled()) - { - stop_capturing(); - _streamNotifier->setEnabled(false); - uninit_device(); - close_device(); - _initialized = false; - _deviceProperties.clear(); - Info(_log, "Stopped"); - } -} - -bool V4L2Grabber::open_device() -{ - struct stat st; - - if (-1 == stat(QSTRING_CSTR(_deviceName), &st)) - { - throw_errno_exception("Cannot identify '" + _deviceName + "'"); - return false; - } - - if (!S_ISCHR(st.st_mode)) - { - throw_exception("'" + _deviceName + "' is no device"); - return false; - } - - _fileDescriptor = open(QSTRING_CSTR(_deviceName), O_RDWR | O_NONBLOCK, 0); - - if (-1 == _fileDescriptor) - { - throw_errno_exception("Cannot open '" + _deviceName + "'"); - return false; - } - - // create the notifier for when a new frame is available - _streamNotifier = new QSocketNotifier(_fileDescriptor, QSocketNotifier::Read); - _streamNotifier->setEnabled(false); - connect(_streamNotifier, &QSocketNotifier::activated, this, &V4L2Grabber::read_frame); - return true; -} - -void V4L2Grabber::close_device() -{ - if (-1 == close(_fileDescriptor)) - { - throw_errno_exception("close"); - return; - } - - _fileDescriptor = -1; - - delete _streamNotifier; - _streamNotifier = nullptr; -} - -void V4L2Grabber::init_read(unsigned int buffer_size) -{ - _buffers.resize(1); - - _buffers[0].length = buffer_size; - _buffers[0].start = malloc(buffer_size); - - if (!_buffers[0].start) - { - throw_exception("Out of memory"); - return; - } -} - -void V4L2Grabber::init_mmap() -{ - struct v4l2_requestbuffers req; - - CLEAR(req); - - req.count = 4; - req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - req.memory = V4L2_MEMORY_MMAP; - - if (-1 == xioctl(VIDIOC_REQBUFS, &req)) - { - if (EINVAL == errno) - { - throw_exception("'" + _deviceName + "' does not support memory mapping"); - return; - } - else - { - throw_errno_exception("VIDIOC_REQBUFS"); - return; - } - } - - if (req.count < 2) - { - throw_exception("Insufficient buffer memory on " + _deviceName); - return; - } - - _buffers.resize(req.count); - - for (size_t n_buffers = 0; n_buffers < req.count; ++n_buffers) - { - struct v4l2_buffer buf; - - CLEAR(buf); - - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buf.memory = V4L2_MEMORY_MMAP; - buf.index = n_buffers; - - if (-1 == xioctl(VIDIOC_QUERYBUF, &buf)) - { - throw_errno_exception("VIDIOC_QUERYBUF"); - return; - } - - _buffers[n_buffers].length = buf.length; - _buffers[n_buffers].start = mmap(NULL /* start anywhere */, - buf.length, - PROT_READ | PROT_WRITE /* required */, - MAP_SHARED /* recommended */, - _fileDescriptor, buf.m.offset - ); - - if (MAP_FAILED == _buffers[n_buffers].start) - { - throw_errno_exception("mmap"); - return; - } - } -} - -void V4L2Grabber::init_userp(unsigned int buffer_size) -{ - struct v4l2_requestbuffers req; - - CLEAR(req); - - req.count = 4; - req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - req.memory = V4L2_MEMORY_USERPTR; - - if (-1 == xioctl(VIDIOC_REQBUFS, &req)) - { - if (EINVAL == errno) - { - throw_exception("'" + _deviceName + "' does not support user pointer"); - return; - } - else - { - throw_errno_exception("VIDIOC_REQBUFS"); - return; - } - } - - _buffers.resize(4); - - for (size_t n_buffers = 0; n_buffers < 4; ++n_buffers) - { - _buffers[n_buffers].length = buffer_size; - _buffers[n_buffers].start = malloc(buffer_size); - - if (!_buffers[n_buffers].start) - { - throw_exception("Out of memory"); - return; - } - } -} - -void V4L2Grabber::init_device(VideoStandard videoStandard) -{ - struct v4l2_capability cap; - CLEAR(cap); - - if (-1 == xioctl(VIDIOC_QUERYCAP, &cap)) - { - if (EINVAL == errno) - { - throw_exception("'" + _deviceName + "' is no V4L2 device"); - return; - } - else - { - throw_errno_exception("VIDIOC_QUERYCAP"); - return; - } - } - - if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) - { - throw_exception("'" + _deviceName + "' is no video capture device"); - return; - } - - switch (_ioMethod) - { - case IO_METHOD_READ: - { - if (!(cap.capabilities & V4L2_CAP_READWRITE)) - { - throw_exception("'" + _deviceName + "' does not support read i/o"); - return; - } - } - break; - - case IO_METHOD_MMAP: - case IO_METHOD_USERPTR: - { - if (!(cap.capabilities & V4L2_CAP_STREAMING)) - { - throw_exception("'" + _deviceName + "' does not support streaming i/o"); - return; - } - } - break; - } - - - /* Select video input, video standard and tune here. */ - - struct v4l2_cropcap cropcap; - CLEAR(cropcap); - - cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - - if (0 == xioctl(VIDIOC_CROPCAP, &cropcap)) - { - struct v4l2_crop crop; - crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - crop.c = cropcap.defrect; /* reset to default */ - - if (-1 == xioctl(VIDIOC_S_CROP, &crop)) - { - switch (errno) - { - case EINVAL: /* Cropping not supported. */ - default: /* Errors ignored. */ - break; - } - } - } - else - { - /* Errors ignored. */ - } - - // set input if needed and supported - struct v4l2_input v4l2Input; - CLEAR(v4l2Input); - v4l2Input.index = _input; - - if (_input >= 0 && 0 == xioctl(VIDIOC_ENUMINPUT, &v4l2Input)) - { - (-1 == xioctl(VIDIOC_S_INPUT, &_input)) - ? Debug(_log, "Input settings not supported.") - : Debug(_log, "Set device input to: %s", v4l2Input.name); - } - - // set the video standard if needed and supported - struct v4l2_standard standard; - CLEAR(standard); - - if (-1 != xioctl(VIDIOC_ENUMSTD, &standard)) - { - switch (videoStandard) - { - case VideoStandard::PAL: - { - standard.id = V4L2_STD_PAL; - if (-1 == xioctl(VIDIOC_S_STD, &standard.id)) - { - throw_errno_exception("VIDIOC_S_STD"); - break; - } - Debug(_log, "Video standard=PAL"); - } - break; - - case VideoStandard::NTSC: - { - standard.id = V4L2_STD_NTSC; - if (-1 == xioctl(VIDIOC_S_STD, &standard.id)) - { - throw_errno_exception("VIDIOC_S_STD"); - break; - } - Debug(_log, "Video standard=NTSC"); - } - break; - - case VideoStandard::SECAM: - { - standard.id = V4L2_STD_SECAM; - if (-1 == xioctl(VIDIOC_S_STD, &standard.id)) - { - throw_errno_exception("VIDIOC_S_STD"); - break; - } - Debug(_log, "Video standard=SECAM"); - } - break; - - case VideoStandard::NO_CHANGE: - default: - // No change to device settings - break; - } - } - - // get the current settings - struct v4l2_format fmt; - CLEAR(fmt); - - fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (-1 == xioctl(VIDIOC_G_FMT, &fmt)) - { - throw_errno_exception("VIDIOC_G_FMT"); - return; - } - - // set the requested pixel format - switch (_pixelFormat) - { - case PixelFormat::UYVY: - fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; - break; - - case PixelFormat::YUYV: - fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; - break; - - case PixelFormat::RGB32: - fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB32; - break; - -#ifdef HAVE_JPEG_DECODER - case PixelFormat::MJPEG: - { - fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; - fmt.fmt.pix.field = V4L2_FIELD_ANY; - } - break; -#endif - - case PixelFormat::NO_CHANGE: - default: - // No change to device settings - break; - } - - // set custom resolution for width and height if they are not zero - if(_width && _height) - { - fmt.fmt.pix.width = _width; - fmt.fmt.pix.height = _height; - } - - // set the settings - if (-1 == xioctl(VIDIOC_S_FMT, &fmt)) - { - throw_errno_exception("VIDIOC_S_FMT"); - return; - } - - // initialize current width and height - _width = fmt.fmt.pix.width; - _height = fmt.fmt.pix.height; - - // display the used width and height - Debug(_log, "Set resolution to width=%d height=%d", _width, _height ); - - // Trying to set frame rate - struct v4l2_streamparm streamparms; - CLEAR(streamparms); - - streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - // Check that the driver knows about framerate get/set - if (xioctl(VIDIOC_G_PARM, &streamparms) >= 0) - { - // Check if the device is able to accept a capture framerate set. - if (streamparms.parm.capture.capability == V4L2_CAP_TIMEPERFRAME) - { - streamparms.parm.capture.timeperframe.numerator = 1; - streamparms.parm.capture.timeperframe.denominator = _fps; - (-1 == xioctl(VIDIOC_S_PARM, &streamparms)) - ? Debug(_log, "Frame rate settings not supported.") - : Debug(_log, "Set framerate to %d fps", streamparms.parm.capture.timeperframe.denominator); - } - } - - // set the line length - _lineLength = fmt.fmt.pix.bytesperline; - - // check pixel format and frame size - switch (fmt.fmt.pix.pixelformat) - { - case V4L2_PIX_FMT_UYVY: - { - _pixelFormat = PixelFormat::UYVY; - _frameByteSize = _width * _height * 2; - Debug(_log, "Pixel format=UYVY"); - } - break; - - case V4L2_PIX_FMT_YUYV: - { - _pixelFormat = PixelFormat::YUYV; - _frameByteSize = _width * _height * 2; - Debug(_log, "Pixel format=YUYV"); - } - break; - - case V4L2_PIX_FMT_RGB32: - { - _pixelFormat = PixelFormat::RGB32; - _frameByteSize = _width * _height * 4; - Debug(_log, "Pixel format=RGB32"); - } - break; - -#ifdef HAVE_JPEG_DECODER - case V4L2_PIX_FMT_MJPEG: - { - _pixelFormat = PixelFormat::MJPEG; - Debug(_log, "Pixel format=MJPEG"); - } - break; -#endif - - default: -#ifdef HAVE_JPEG_DECODER - throw_exception("Only pixel formats UYVY, YUYV, RGB32 and MJPEG are supported"); -#else - throw_exception("Only pixel formats UYVY, YUYV, and RGB32 are supported"); -#endif - return; - } - - switch (_ioMethod) - { - case IO_METHOD_READ: - init_read(fmt.fmt.pix.sizeimage); - break; - - case IO_METHOD_MMAP: - init_mmap(); - break; - - case IO_METHOD_USERPTR: - init_userp(fmt.fmt.pix.sizeimage); - break; - } -} - -void V4L2Grabber::uninit_device() -{ - switch (_ioMethod) - { - case IO_METHOD_READ: - free(_buffers[0].start); - break; - - case IO_METHOD_MMAP: - { - for (size_t i = 0; i < _buffers.size(); ++i) - if (-1 == munmap(_buffers[i].start, _buffers[i].length)) - { - throw_errno_exception("munmap"); - return; - } - } - break; - - case IO_METHOD_USERPTR: - { - for (size_t i = 0; i < _buffers.size(); ++i) - free(_buffers[i].start); - } - break; - } - - _buffers.resize(0); -} - -void V4L2Grabber::start_capturing() -{ - switch (_ioMethod) - { - case IO_METHOD_READ: - /* Nothing to do. */ - break; - - case IO_METHOD_MMAP: - { - for (size_t i = 0; i < _buffers.size(); ++i) - { - struct v4l2_buffer buf; - - CLEAR(buf); - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buf.memory = V4L2_MEMORY_MMAP; - buf.index = i; - - if (-1 == xioctl(VIDIOC_QBUF, &buf)) - { - throw_errno_exception("VIDIOC_QBUF"); - return; - } - } - v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (-1 == xioctl(VIDIOC_STREAMON, &type)) - { - throw_errno_exception("VIDIOC_STREAMON"); - return; - } - break; - } - case IO_METHOD_USERPTR: - { - for (size_t i = 0; i < _buffers.size(); ++i) - { - struct v4l2_buffer buf; - - CLEAR(buf); - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buf.memory = V4L2_MEMORY_USERPTR; - buf.index = i; - buf.m.userptr = (unsigned long)_buffers[i].start; - buf.length = _buffers[i].length; - - if (-1 == xioctl(VIDIOC_QBUF, &buf)) - { - throw_errno_exception("VIDIOC_QBUF"); - return; - } - } - v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (-1 == xioctl(VIDIOC_STREAMON, &type)) - { - throw_errno_exception("VIDIOC_STREAMON"); - return; - } - break; - } - } -} - -void V4L2Grabber::stop_capturing() -{ - enum v4l2_buf_type type; - - switch (_ioMethod) - { - case IO_METHOD_READ: - break; /* Nothing to do. */ - - case IO_METHOD_MMAP: - case IO_METHOD_USERPTR: - { - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - ErrorIf((xioctl(VIDIOC_STREAMOFF, &type) == -1), _log, "VIDIOC_STREAMOFF error code %d, %s", errno, strerror(errno)); - } - break; - } -} - -int V4L2Grabber::read_frame() -{ - bool rc = false; - - try - { - struct v4l2_buffer buf; - - switch (_ioMethod) - { - case IO_METHOD_READ: - { - int size; - if ((size = read(_fileDescriptor, _buffers[0].start, _buffers[0].length)) == -1) - { - switch (errno) - { - case EAGAIN: - return 0; - - case EIO: /* Could ignore EIO, see spec. */ - default: - throw_errno_exception("read"); - return 0; - } - } - - rc = process_image(_buffers[0].start, size); - } - break; - - case IO_METHOD_MMAP: - { - CLEAR(buf); - - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buf.memory = V4L2_MEMORY_MMAP; - - if (-1 == xioctl(VIDIOC_DQBUF, &buf)) - { - switch (errno) - { - case EAGAIN: - return 0; - - case EIO: /* Could ignore EIO, see spec. */ - default: - { - throw_errno_exception("VIDIOC_DQBUF"); - stop(); - getV4Ldevices(); - } - return 0; - } - } - - assert(buf.index < _buffers.size()); - - rc = process_image(_buffers[buf.index].start, buf.bytesused); - - if (-1 == xioctl(VIDIOC_QBUF, &buf)) - { - throw_errno_exception("VIDIOC_QBUF"); - return 0; - } - } - break; - - case IO_METHOD_USERPTR: - { - CLEAR(buf); - - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buf.memory = V4L2_MEMORY_USERPTR; - - if (-1 == xioctl(VIDIOC_DQBUF, &buf)) - { - switch (errno) - { - case EAGAIN: - return 0; - - case EIO: /* Could ignore EIO, see spec. */ - default: - { - throw_errno_exception("VIDIOC_DQBUF"); - stop(); - getV4Ldevices(); - } - return 0; - } - } - - for (size_t i = 0; i < _buffers.size(); ++i) - { - if (buf.m.userptr == (unsigned long)_buffers[i].start && buf.length == _buffers[i].length) - { - break; - } - } - - rc = process_image((void *)buf.m.userptr, buf.bytesused); - - if (-1 == xioctl(VIDIOC_QBUF, &buf)) - { - throw_errno_exception("VIDIOC_QBUF"); - return 0; - } - } - break; - } - } - catch (std::exception& e) - { - emit readError(e.what()); - rc = false; - } - - return rc ? 1 : 0; -} - -bool V4L2Grabber::process_image(const void *p, int size) -{ - // We do want a new frame... -#ifdef HAVE_JPEG_DECODER - if (size < _frameByteSize && _pixelFormat != PixelFormat::MJPEG) -#else - if (size < _frameByteSize) -#endif - { - Error(_log, "Frame too small: %d != %d", size, _frameByteSize); - } - else - { - process_image(reinterpret_cast(p), size); - return true; - } - - return false; -} - -void V4L2Grabber::process_image(const uint8_t * data, int size) -{ - if (_cecDetectionEnabled && _cecStandbyActivated) - return; - - Image image(_width, _height); - -/* ---------------------------------------------------------- - * ----------- BEGIN of JPEG decoder related code ----------- - * --------------------------------------------------------*/ - -#ifdef HAVE_JPEG_DECODER - if (_pixelFormat == PixelFormat::MJPEG) - { -#endif -#ifdef HAVE_JPEG - _decompress = new jpeg_decompress_struct; - _error = new errorManager; - - _decompress->err = jpeg_std_error(&_error->pub); - _error->pub.error_exit = &errorHandler; - _error->pub.output_message = &outputHandler; - - jpeg_create_decompress(_decompress); - - if (setjmp(_error->setjmp_buffer)) - { - jpeg_abort_decompress(_decompress); - jpeg_destroy_decompress(_decompress); - delete _decompress; - delete _error; - return; - } - - jpeg_mem_src(_decompress, const_cast(data), size); - - if (jpeg_read_header(_decompress, (bool) TRUE) != JPEG_HEADER_OK) - { - jpeg_abort_decompress(_decompress); - jpeg_destroy_decompress(_decompress); - delete _decompress; - delete _error; - return; - } - - _decompress->scale_num = 1; - _decompress->scale_denom = 1; - _decompress->out_color_space = JCS_RGB; - _decompress->dct_method = JDCT_IFAST; - - if (!jpeg_start_decompress(_decompress)) - { - jpeg_abort_decompress(_decompress); - jpeg_destroy_decompress(_decompress); - delete _decompress; - delete _error; - return; - } - - if (_decompress->out_color_components != 3) - { - jpeg_abort_decompress(_decompress); - jpeg_destroy_decompress(_decompress); - delete _decompress; - delete _error; - return; - } - - QImage imageFrame = QImage(_decompress->output_width, _decompress->output_height, QImage::Format_RGB888); - - int y = 0; - while (_decompress->output_scanline < _decompress->output_height) - { - uchar *row = imageFrame.scanLine(_decompress->output_scanline); - jpeg_read_scanlines(_decompress, &row, 1); - y++; - } - - jpeg_finish_decompress(_decompress); - jpeg_destroy_decompress(_decompress); - delete _decompress; - delete _error; - - if (imageFrame.isNull() || _error->pub.num_warnings > 0) - return; -#endif -#ifdef HAVE_TURBO_JPEG - _decompress = tjInitDecompress(); - if (_decompress == nullptr) - return; - - if (tjDecompressHeader2(_decompress, const_cast(data), size, &_width, &_height, &_subsamp) != 0) - { - tjDestroy(_decompress); - return; - } - - QImage imageFrame = QImage(_width, _height, QImage::Format_RGB888); - if (tjDecompress2(_decompress, const_cast(data), size, imageFrame.bits(), _width, 0, _height, TJPF_RGB, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE) != 0) - { - tjDestroy(_decompress); - return; - } - - tjDestroy(_decompress); - - if (imageFrame.isNull()) - return; -#endif -#ifdef HAVE_JPEG_DECODER - QRect rect(_cropLeft, _cropTop, imageFrame.width() - _cropLeft - _cropRight, imageFrame.height() - _cropTop - _cropBottom); - imageFrame = imageFrame.copy(rect); - imageFrame = imageFrame.scaled(imageFrame.width() / _pixelDecimation, imageFrame.height() / _pixelDecimation,Qt::KeepAspectRatio); - - if ((image.width() != unsigned(imageFrame.width())) || (image.height() != unsigned(imageFrame.height()))) - image.resize(imageFrame.width(), imageFrame.height()); - - for (int y=0; y= _noSignalCounterThreshold) - { - _noSignalDetected = true; - Info(_log, "Signal detected"); - } - - _noSignalCounter = 0; - } - - if ( _noSignalCounter < _noSignalCounterThreshold) - { - emit newFrame(image); - } - else if (_noSignalCounter == _noSignalCounterThreshold) - { - _noSignalDetected = false; - Info(_log, "Signal lost"); - } - } - else - { - emit newFrame(image); - } -} - -int V4L2Grabber::xioctl(int request, void *arg) -{ - int r; - - do - { - r = ioctl(_fileDescriptor, request, arg); - } - while (-1 == r && EINTR == errno); - - return r; -} - -int V4L2Grabber::xioctl(int fileDescriptor, int request, void *arg) -{ - int r; - - do - { - r = ioctl(fileDescriptor, request, arg); - } - while (r < 0 && errno == EINTR ); - - return r; -} - -void V4L2Grabber::enumFrameIntervals(QStringList &framerates, int fileDescriptor, int pixelformat, int width, int height) -{ - // collect available frame rates - struct v4l2_frmivalenum frmivalenum; - CLEAR(frmivalenum); - - frmivalenum.index = 0; - frmivalenum.pixel_format = pixelformat; - frmivalenum.width = width; - frmivalenum.height = height; - - while (xioctl(fileDescriptor, VIDIOC_ENUM_FRAMEINTERVALS, &frmivalenum) >= 0) - { - int rate; - switch (frmivalenum.type) - { - case V4L2_FRMSIZE_TYPE_DISCRETE: - { - if (frmivalenum.discrete.numerator != 0) - { - rate = frmivalenum.discrete.denominator / frmivalenum.discrete.numerator; - if (!framerates.contains(QString::number(rate))) - framerates.append(QString::number(rate)); - } - } - break; - case V4L2_FRMSIZE_TYPE_CONTINUOUS: - case V4L2_FRMSIZE_TYPE_STEPWISE: - { - if (frmivalenum.stepwise.min.denominator != 0) - { - rate = frmivalenum.stepwise.min.denominator / frmivalenum.stepwise.min.numerator; - if (!framerates.contains(QString::number(rate))) - framerates.append(QString::number(rate)); - } - } - } - frmivalenum.index++; - } -} - -void V4L2Grabber::setSignalDetectionEnable(bool enable) -{ - if (_signalDetectionEnabled != enable) - { - _signalDetectionEnabled = enable; - Info(_log, "Signal detection is now %s", enable ? "enabled" : "disabled"); - } -} - -void V4L2Grabber::setCecDetectionEnable(bool enable) -{ - if (_cecDetectionEnabled != enable) - { - _cecDetectionEnabled = enable; - Info(_log, QString("CEC detection is now %1").arg(enable ? "enabled" : "disabled").toLocal8Bit()); - } -} - -void V4L2Grabber::setPixelDecimation(int pixelDecimation) -{ - if (_pixelDecimation != pixelDecimation) - { - _pixelDecimation = pixelDecimation; - _imageResampler.setHorizontalPixelDecimation(pixelDecimation); - _imageResampler.setVerticalPixelDecimation(pixelDecimation); - } -} - -void V4L2Grabber::setDeviceVideoStandard(QString device, VideoStandard videoStandard) -{ - if (_deviceName != device || _videoStandard != videoStandard) - { - // extract input of device - QChar input = device.at(device.size() - 1); - _input = input.isNumber() ? input.digitValue() : -1; - - bool started = _initialized; - uninit(); - _deviceName = device; - _videoStandard = videoStandard; - - if(started) start(); - } -} - -bool V4L2Grabber::setInput(int input) -{ - if(Grabber::setInput(input)) - { - bool started = _initialized; - uninit(); - if(started) start(); - return true; - } - return false; -} - -bool V4L2Grabber::setWidthHeight(int width, int height) -{ - if(Grabber::setWidthHeight(width,height)) - { - bool started = _initialized; - uninit(); - if(started) start(); - return true; - } - return false; -} - -bool V4L2Grabber::setFramerate(int fps) -{ - if(Grabber::setFramerate(fps)) - { - bool started = _initialized; - uninit(); - if(started) start(); - return true; - } - return false; -} - -QStringList V4L2Grabber::getV4L2devices() const -{ - QStringList result = QStringList(); - for (auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it) - { - result << it.key(); - } - return result; -} - -QString V4L2Grabber::getV4L2deviceName(const QString& devicePath) const -{ - return _deviceProperties.value(devicePath).name; -} - -QMultiMap V4L2Grabber::getV4L2deviceInputs(const QString& devicePath) const -{ - return _deviceProperties.value(devicePath).inputs; -} - -QStringList V4L2Grabber::getResolutions(const QString& devicePath) const -{ - return _deviceProperties.value(devicePath).resolutions; -} - -QStringList V4L2Grabber::getFramerates(const QString& devicePath) const -{ - return _deviceProperties.value(devicePath).framerates; -} - -void V4L2Grabber::handleCecEvent(CECEvent event) -{ - switch (event) - { - case CECEvent::On : - Debug(_log,"CEC on event received"); - _cecStandbyActivated = false; - return; - case CECEvent::Off : - Debug(_log,"CEC off event received"); - _cecStandbyActivated = true; - return; - default: break; - } -} diff --git a/libsrc/grabber/v4l2/V4L2Wrapper.cpp b/libsrc/grabber/v4l2/V4L2Wrapper.cpp deleted file mode 100644 index 98207df0..00000000 --- a/libsrc/grabber/v4l2/V4L2Wrapper.cpp +++ /dev/null @@ -1,156 +0,0 @@ -#include - -#include - -// qt -#include - -V4L2Wrapper::V4L2Wrapper(const QString &device, - unsigned grabWidth, - unsigned grabHeight, - unsigned fps, - unsigned input, - VideoStandard videoStandard, - PixelFormat pixelFormat, - int pixelDecimation ) - : GrabberWrapper("V4L2:"+device, &_grabber, grabWidth, grabHeight, 10) - , _grabber(device, - grabWidth, - grabHeight, - fps, - input, - videoStandard, - pixelFormat, - pixelDecimation) -{ - _ggrabber = &_grabber; - - // register the image type - qRegisterMetaType>("Image"); - - // Handle the image in the captured thread using a direct connection - connect(&_grabber, &V4L2Grabber::newFrame, this, &V4L2Wrapper::newFrame, Qt::DirectConnection); - connect(&_grabber, &V4L2Grabber::readError, this, &V4L2Wrapper::readError, Qt::DirectConnection); -} - -V4L2Wrapper::~V4L2Wrapper() -{ - stop(); -} - -bool V4L2Wrapper::start() -{ - return ( _grabber.start() && GrabberWrapper::start()); -} - -void V4L2Wrapper::stop() -{ - _grabber.stop(); - GrabberWrapper::stop(); -} - -void V4L2Wrapper::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold) -{ - _grabber.setSignalThreshold( redSignalThreshold, greenSignalThreshold, blueSignalThreshold, 50); -} - -void V4L2Wrapper::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) -{ - _grabber.setCropping(cropLeft, cropRight, cropTop, cropBottom); -} - -void V4L2Wrapper::setSignalDetectionOffset(double verticalMin, double horizontalMin, double verticalMax, double horizontalMax) -{ - _grabber.setSignalDetectionOffset(verticalMin, horizontalMin, verticalMax, horizontalMax); -} - -void V4L2Wrapper::newFrame(const Image &image) -{ - emit systemImage(_grabberName, image); -} - -void V4L2Wrapper::readError(const char* err) -{ - Error(_log, "stop grabber, because reading device failed. (%s)", err); - stop(); -} - -void V4L2Wrapper::action() -{ - // dummy as v4l get notifications from stream -} - -void V4L2Wrapper::setSignalDetectionEnable(bool enable) -{ - _grabber.setSignalDetectionEnable(enable); -} - -bool V4L2Wrapper::getSignalDetectionEnable() const -{ - return _grabber.getSignalDetectionEnabled(); -} - -void V4L2Wrapper::setCecDetectionEnable(bool enable) -{ - _grabber.setCecDetectionEnable(enable); -} - -bool V4L2Wrapper::getCecDetectionEnable() const -{ - return _grabber.getCecDetectionEnabled(); -} - -void V4L2Wrapper::setDeviceVideoStandard(const QString& device, VideoStandard videoStandard) -{ - _grabber.setDeviceVideoStandard(device, videoStandard); -} - -void V4L2Wrapper::handleCecEvent(CECEvent event) -{ - _grabber.handleCecEvent(event); -} - -void V4L2Wrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config) -{ - if(type == settings::V4L2 && _grabberName.startsWith("V4L")) - { - // extract settings - const QJsonObject& obj = config.object(); - - // pixel decimation for v4l - _grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8)); - - // crop for v4l - _grabber.setCropping( - obj["cropLeft"].toInt(0), - obj["cropRight"].toInt(0), - obj["cropTop"].toInt(0), - obj["cropBottom"].toInt(0)); - - // device input - _grabber.setInput(obj["input"].toInt(-1)); - - // device resolution - _grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0)); - - // device framerate - _grabber.setFramerate(obj["fps"].toInt(15)); - - // CEC Standby - _grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true)); - - _grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true)); - _grabber.setSignalDetectionOffset( - obj["sDHOffsetMin"].toDouble(0.25), - obj["sDVOffsetMin"].toDouble(0.25), - obj["sDHOffsetMax"].toDouble(0.75), - obj["sDVOffsetMax"].toDouble(0.75)); - _grabber.setSignalThreshold( - obj["redSignalThreshold"].toDouble(0.0)/100.0, - obj["greenSignalThreshold"].toDouble(0.0)/100.0, - obj["blueSignalThreshold"].toDouble(0.0)/100.0); - _grabber.setDeviceVideoStandard( - obj["device"].toString("auto"), - parseVideoStandard(obj["standard"].toString("no-change"))); - } -} diff --git a/libsrc/grabber/video/CMakeLists.txt b/libsrc/grabber/video/CMakeLists.txt new file mode 100644 index 00000000..43a0e580 --- /dev/null +++ b/libsrc/grabber/video/CMakeLists.txt @@ -0,0 +1,33 @@ +# Common cmake definition for external video grabber + +# Add Turbo JPEG library +if (ENABLE_V4L2 OR ENABLE_MF) + find_package(TurboJPEG) + if (TURBOJPEG_FOUND) + add_definitions(-DHAVE_TURBO_JPEG) + message( STATUS "Using Turbo JPEG library: ${TurboJPEG_LIBRARY}") + include_directories(${TurboJPEG_INCLUDE_DIRS}) + else () + message( STATUS "Turbo JPEG library not found, MJPEG camera format won't work.") + endif () +endif() + +# Define the wrapper/header/source locations and collect them +SET(WRAPPER_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video) +SET(HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber) +if (ENABLE_MF) + project(mf-grabber) + SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/mediafoundation) + FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/MF*.h" "${HEADER_DIR}/Encoder*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp") +elseif(ENABLE_V4L2) + project(v4l2-grabber) + SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/v4l2) + FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/V4L2*.h" "${HEADER_DIR}/Encoder*.h" "${CURRENT_SOURCE_DIR}/*.cpp") +endif() + +add_library(${PROJECT_NAME} ${SOURCES}) +target_link_libraries(${PROJECT_NAME} hyperion ${QT_LIBRARIES}) + +if(TURBOJPEG_FOUND) + target_link_libraries(${PROJECT_NAME} ${TurboJPEG_LIBRARY}) +endif() diff --git a/libsrc/grabber/video/EncoderThread.cpp b/libsrc/grabber/video/EncoderThread.cpp new file mode 100644 index 00000000..a8098362 --- /dev/null +++ b/libsrc/grabber/video/EncoderThread.cpp @@ -0,0 +1,203 @@ +#include "grabber/EncoderThread.h" + +EncoderThread::EncoderThread() + : _localData(nullptr) + , _scalingFactorsCount(0) + , _imageResampler() +#ifdef HAVE_TURBO_JPEG + , _transform(nullptr) + , _decompress(nullptr) + , _scalingFactors(nullptr) + , _xform(nullptr) +#endif +{} + +EncoderThread::~EncoderThread() +{ +#ifdef HAVE_TURBO_JPEG + if (_transform) + tjDestroy(_transform); + + if (_decompress) + tjDestroy(_decompress); +#endif + + if (_localData) +#ifdef HAVE_TURBO_JPEG + tjFree(_localData); +#else + delete[] _localData; +#endif +} + +void EncoderThread::setup( + PixelFormat pixelFormat, uint8_t* sharedData, + int size, int width, int height, int lineLength, + unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight, + VideoMode videoMode, FlipMode flipMode, int pixelDecimation) +{ + _lineLength = lineLength; + _pixelFormat = pixelFormat; + _size = (unsigned long) size; + _width = width; + _height = height; + _cropLeft = cropLeft; + _cropTop = cropTop; + _cropBottom = cropBottom; + _cropRight = cropRight; + _flipMode = flipMode; + _pixelDecimation = pixelDecimation; + + _imageResampler.setVideoMode(videoMode); + _imageResampler.setFlipMode(_flipMode); + _imageResampler.setCropping(cropLeft, cropRight, cropTop, cropBottom); + _imageResampler.setHorizontalPixelDecimation(_pixelDecimation); + _imageResampler.setVerticalPixelDecimation(_pixelDecimation); + +#ifdef HAVE_TURBO_JPEG + if (_localData) + tjFree(_localData); + + _localData = (uint8_t*)tjAlloc(size + 1); +#else + delete[] _localData; + _localData = nullptr; + _localData = new uint8_t(size + 1); +#endif + + memcpy(_localData, sharedData, size); +} + +void EncoderThread::process() +{ + _busy = true; + if (_width > 0 && _height > 0) + { +#ifdef HAVE_TURBO_JPEG + if (_pixelFormat == PixelFormat::MJPEG) + { + processImageMjpeg(); + } + else +#endif + { + if (_pixelFormat == PixelFormat::BGR24) + { + if (_flipMode == FlipMode::NO_CHANGE) + _imageResampler.setFlipMode(FlipMode::HORIZONTAL); + else if (_flipMode == FlipMode::HORIZONTAL) + _imageResampler.setFlipMode(FlipMode::NO_CHANGE); + else if (_flipMode == FlipMode::VERTICAL) + _imageResampler.setFlipMode(FlipMode::BOTH); + else if (_flipMode == FlipMode::BOTH) + _imageResampler.setFlipMode(FlipMode::VERTICAL); + } + + Image image = Image(); + _imageResampler.processImage( + _localData, + _width, + _height, + _lineLength, +#if defined(ENABLE_V4L2) + _pixelFormat, +#else + PixelFormat::BGR24, +#endif + image + ); + + emit newFrame(image); + } + } + _busy = false; +} + +#ifdef HAVE_TURBO_JPEG +void EncoderThread::processImageMjpeg() +{ + if (!_transform && _flipMode != FlipMode::NO_CHANGE) + { + _transform = tjInitTransform(); + _xform = new tjtransform(); + } + + if (_flipMode == FlipMode::BOTH || _flipMode == FlipMode::HORIZONTAL) + { + _xform->op = TJXOP_HFLIP; + tjTransform(_transform, _localData, _size, 1, &_localData, &_size, _xform, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE); + } + + if (_flipMode == FlipMode::BOTH || _flipMode == FlipMode::VERTICAL) + { + _xform->op = TJXOP_VFLIP; + tjTransform(_transform, _localData, _size, 1, &_localData, &_size, _xform, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE); + } + + if (!_decompress) + { + _decompress = tjInitDecompress(); + _scalingFactors = tjGetScalingFactors(&_scalingFactorsCount); + } + + int subsamp = 0; + if (tjDecompressHeader2(_decompress, _localData, _size, &_width, &_height, &subsamp) != 0) + return; + + int scaledWidth = _width, scaledHeight = _height; + if(_scalingFactors != nullptr && _pixelDecimation > 1) + { + for (int i = 0; i < _scalingFactorsCount ; i++) + { + const int tempWidth = TJSCALED(_width, _scalingFactors[i]); + const int tempHeight = TJSCALED(_height, _scalingFactors[i]); + if (tempWidth <= _width/_pixelDecimation && tempHeight <= _height/_pixelDecimation) + { + scaledWidth = tempWidth; + scaledHeight = tempHeight; + break; + } + } + + if (scaledWidth == _width && scaledHeight == _height) + { + scaledWidth = TJSCALED(_width, _scalingFactors[_scalingFactorsCount-1]); + scaledHeight = TJSCALED(_height, _scalingFactors[_scalingFactorsCount-1]); + } + } + + Image srcImage(scaledWidth, scaledHeight); + + if (tjDecompress2(_decompress, _localData , _size, (unsigned char*)srcImage.memptr(), scaledWidth, 0, scaledHeight, TJPF_RGB, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE) != 0) + return; + + // got image, process it + if (!(_cropLeft > 0 || _cropTop > 0 || _cropBottom > 0 || _cropRight > 0)) + emit newFrame(srcImage); + else + { + // calculate the output size + int outputWidth = (_width - _cropLeft - _cropRight); + int outputHeight = (_height - _cropTop - _cropBottom); + + if (outputWidth <= 0 || outputHeight <= 0) + return; + + Image destImage(outputWidth, outputHeight); + + for (unsigned int y = 0; y < destImage.height(); y++) + { + unsigned char* source = (unsigned char*)srcImage.memptr() + (y + _cropTop)*srcImage.width()*3 + _cropLeft*3; + unsigned char* dest = (unsigned char*)destImage.memptr() + y*destImage.width()*3; + memcpy(dest, source, destImage.width()*3); + free(source); + source = nullptr; + free(dest); + dest = nullptr; + } + + // emit + emit newFrame(destImage); + } +} +#endif diff --git a/libsrc/grabber/video/VideoWrapper.cpp b/libsrc/grabber/video/VideoWrapper.cpp new file mode 100644 index 00000000..bd5ef76c --- /dev/null +++ b/libsrc/grabber/video/VideoWrapper.cpp @@ -0,0 +1,149 @@ +#include + +#include + +// qt includes +#include + +VideoWrapper::VideoWrapper() +#if defined(ENABLE_V4L2) + : GrabberWrapper("V4L2", &_grabber) +#elif defined(ENABLE_MF) + : GrabberWrapper("V4L2:MEDIA_FOUNDATION", &_grabber) +#endif + , _grabber() +{ + // register the image type + qRegisterMetaType>("Image"); + + // Handle the image in the captured thread (Media Foundation/V4L2) using a direct connection + connect(&_grabber, SIGNAL(newFrame(const Image&)), this, SLOT(newFrame(const Image&)), Qt::DirectConnection); + connect(&_grabber, SIGNAL(readError(const char*)), this, SLOT(readError(const char*)), Qt::DirectConnection); +} + +VideoWrapper::~VideoWrapper() +{ + stop(); +} + +bool VideoWrapper::start() +{ + return (_grabber.prepare() && _grabber.start() && GrabberWrapper::start()); +} + +void VideoWrapper::stop() +{ + _grabber.stop(); + GrabberWrapper::stop(); +} + +#if defined(ENABLE_CEC) && !defined(ENABLE_MF) + +void VideoWrapper::handleCecEvent(CECEvent event) +{ + _grabber.handleCecEvent(event); +} + +#endif + +void VideoWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config) +{ + if(type == settings::V4L2 && _grabberName.startsWith("V4L2")) + { + // extract settings + const QJsonObject& obj = config.object(); + + // set global grabber state + setV4lGrabberState(obj["enable"].toBool(false)); + + if (getV4lGrabberState()) + { +#if defined(ENABLE_MF) + // Device path + _grabber.setDevice(obj["device"].toString("none")); +#endif + +#if defined(ENABLE_V4L2) + // Device path and name + _grabber.setDevice(obj["device"].toString("none"), obj["available_devices"].toString("none")); +#endif + + // Device input + _grabber.setInput(obj["input"].toInt(0)); + + // Device resolution + _grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0)); + + // Device framerate + _grabber.setFramerate(obj["fps"].toInt(15)); + + // Device encoding format + _grabber.setEncoding(obj["encoding"].toString("NO_CHANGE")); + + // Video standard + _grabber.setVideoStandard(parseVideoStandard(obj["standard"].toString("NO_CHANGE"))); + + // Image size decimation + _grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8)); + + // Flip mode + _grabber.setFlipMode(parseFlipMode(obj["flip"].toString("NO_CHANGE"))); + + // Image cropping + _grabber.setCropping( + obj["cropLeft"].toInt(0), + obj["cropRight"].toInt(0), + obj["cropTop"].toInt(0), + obj["cropBottom"].toInt(0)); + + // Brightness, Contrast, Saturation, Hue + _grabber.setBrightnessContrastSaturationHue( + obj["hardware_brightness"].toInt(0), + obj["hardware_contrast"].toInt(0), + obj["hardware_saturation"].toInt(0), + obj["hardware_hue"].toInt(0)); + +#if defined(ENABLE_CEC) && defined(ENABLE_V4L2) + // CEC Standby + _grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true)); +#endif + + // Software frame skipping + _grabber.setFpsSoftwareDecimation(obj["fpsSoftwareDecimation"].toInt(1)); + + // Signal detection + _grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true)); + _grabber.setSignalDetectionOffset( + obj["sDHOffsetMin"].toDouble(0.25), + obj["sDVOffsetMin"].toDouble(0.25), + obj["sDHOffsetMax"].toDouble(0.75), + obj["sDVOffsetMax"].toDouble(0.75)); + _grabber.setSignalThreshold( + obj["redSignalThreshold"].toDouble(0.0)/100.0, + obj["greenSignalThreshold"].toDouble(0.0)/100.0, + obj["blueSignalThreshold"].toDouble(0.0)/100.0, + obj["noSignalCounterThreshold"].toInt(50)); + + // Reload the Grabber if any settings have been changed that require it + _grabber.reload(getV4lGrabberState()); + } + else + stop(); + } +} + +void VideoWrapper::newFrame(const Image &image) +{ + emit systemImage(_grabberName, image); +} + +void VideoWrapper::readError(const char* err) +{ + Error(_log, "Stop grabber, because reading device failed. (%s)", err); + stop(); +} + +void VideoWrapper::action() +{ + // dummy as v4l get notifications from stream +} diff --git a/libsrc/grabber/video/mediafoundation/MFGrabber.cpp b/libsrc/grabber/video/mediafoundation/MFGrabber.cpp new file mode 100644 index 00000000..a7892479 --- /dev/null +++ b/libsrc/grabber/video/mediafoundation/MFGrabber.cpp @@ -0,0 +1,813 @@ +#include "MFSourceReaderCB.h" +#include "grabber/MFGrabber.h" + +// Constants +namespace { const bool verbose = false; } + +// Need more video properties? Visit https://docs.microsoft.com/en-us/windows/win32/api/strmif/ne-strmif-videoprocampproperty +using VideoProcAmpPropertyMap = QMap; +inline QMap initVideoProcAmpPropertyMap() +{ + QMap propertyMap + { + {VideoProcAmp_Brightness, "brightness" }, + {VideoProcAmp_Contrast , "contrast" }, + {VideoProcAmp_Saturation, "saturation" }, + {VideoProcAmp_Hue , "hue" } + }; + + return propertyMap; +}; + +Q_GLOBAL_STATIC_WITH_ARGS(VideoProcAmpPropertyMap, _videoProcAmpPropertyMap, (initVideoProcAmpPropertyMap())); + +MFGrabber::MFGrabber() + : Grabber("V4L2:MEDIA_FOUNDATION") + , _currentDeviceName("none") + , _newDeviceName("none") + , _hr(S_FALSE) + , _sourceReader(nullptr) + , _sourceReaderCB(nullptr) + , _threadManager(nullptr) + , _pixelFormat(PixelFormat::NO_CHANGE) + , _pixelFormatConfig(PixelFormat::NO_CHANGE) + , _lineLength(-1) + , _frameByteSize(-1) + , _noSignalCounterThreshold(40) + , _noSignalCounter(0) + , _brightness(0) + , _contrast(0) + , _saturation(0) + , _hue(0) + , _currentFrame(0) + , _noSignalThresholdColor(ColorRgb{0,0,0}) + , _signalDetectionEnabled(true) + , _noSignalDetected(false) + , _initialized(false) + , _reload(false) + , _x_frac_min(0.25) + , _y_frac_min(0.25) + , _x_frac_max(0.75) + , _y_frac_max(0.75) +{ + CoInitializeEx(0, COINIT_MULTITHREADED); + _hr = MFStartup(MF_VERSION, MFSTARTUP_NOSOCKET); + if (FAILED(_hr)) + CoUninitialize(); +} + +MFGrabber::~MFGrabber() +{ + uninit(); + + SAFE_RELEASE(_sourceReader); + + if (_sourceReaderCB != nullptr) + while (_sourceReaderCB->isBusy()) {} + + SAFE_RELEASE(_sourceReaderCB); + + if (_threadManager) + delete _threadManager; + _threadManager = nullptr; + + if (SUCCEEDED(_hr) && SUCCEEDED(MFShutdown())) + CoUninitialize(); +} + +bool MFGrabber::prepare() +{ + if (SUCCEEDED(_hr)) + { + if (!_sourceReaderCB) + _sourceReaderCB = new SourceReaderCB(this); + + if (!_threadManager) + _threadManager = new EncoderThreadManager(this); + + return (_sourceReaderCB != nullptr && _threadManager != nullptr); + } + + return false; +} + +bool MFGrabber::start() +{ + if (!_initialized) + { + if (init()) + { + connect(_threadManager, &EncoderThreadManager::newFrame, this, &MFGrabber::newThreadFrame); + _threadManager->start(); + DebugIf(verbose, _log, "Decoding threads: %d", _threadManager->_threadCount); + + start_capturing(); + Info(_log, "Started"); + return true; + } + else + { + Error(_log, "The Media Foundation Grabber could not be started"); + return false; + } + } + else + return true; +} + +void MFGrabber::stop() +{ + if (_initialized) + { + _initialized = false; + _threadManager->stop(); + disconnect(_threadManager, nullptr, nullptr, nullptr); + _sourceReader->Flush(MF_SOURCE_READER_FIRST_VIDEO_STREAM); + SAFE_RELEASE(_sourceReader); + _deviceProperties.clear(); + _deviceControls.clear(); + Info(_log, "Stopped"); + } +} + +bool MFGrabber::init() +{ + // enumerate the video capture devices on the user's system + enumVideoCaptureDevices(); + + if (!_initialized && SUCCEEDED(_hr)) + { + int deviceIndex = -1; + bool noDeviceName = _currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName.compare("auto", Qt::CaseInsensitive) == 0; + + if (noDeviceName) + return false; + + if (!_deviceProperties.contains(_currentDeviceName)) + { + Debug(_log, "Configured device '%s' is not available.", QSTRING_CSTR(_currentDeviceName)); + return false; + } + + Debug(_log, "Searching for %s %d x %d @ %d fps (%s)", QSTRING_CSTR(_currentDeviceName), _width, _height,_fps, QSTRING_CSTR(pixelFormatToString(_pixelFormat))); + + QList dev = _deviceProperties[_currentDeviceName]; + for ( int i = 0; i < dev.count() && deviceIndex < 0; ++i ) + { + if (dev[i].width != _width || dev[i].height != _height || dev[i].fps != _fps || dev[i].pf != _pixelFormat) + continue; + else + deviceIndex = i; + } + + if (deviceIndex >= 0 && SUCCEEDED(init_device(_currentDeviceName, dev[deviceIndex]))) + { + _initialized = true; + _newDeviceName = _currentDeviceName; + } + else + { + Debug(_log, "Configured device '%s' is not available.", QSTRING_CSTR(_currentDeviceName)); + return false; + } + + } + + return _initialized; +} + +void MFGrabber::uninit() +{ + // stop if the grabber was not stopped + if (_initialized) + { + Debug(_log,"Uninit grabber: %s", QSTRING_CSTR(_newDeviceName)); + stop(); + } +} + +HRESULT MFGrabber::init_device(QString deviceName, DeviceProperties props) +{ + PixelFormat pixelformat = GetPixelFormatForGuid(props.guid); + QString error; + IMFMediaSource* device = nullptr; + IMFAttributes* deviceAttributes = nullptr, *sourceReaderAttributes = nullptr; + IMFMediaType* type = nullptr; + HRESULT hr = S_OK; + + Debug(_log, "Init %s, %d x %d @ %d fps (%s)", QSTRING_CSTR(deviceName), props.width, props.height, props.fps, QSTRING_CSTR(pixelFormatToString(pixelformat))); + DebugIf (verbose, _log, "Symbolic link: %s", QSTRING_CSTR(props.symlink)); + + hr = MFCreateAttributes(&deviceAttributes, 2); + if (FAILED(hr)) + { + error = QString("Could not create device attributes (%1)").arg(hr); + goto done; + } + + hr = deviceAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); + if (FAILED(hr)) + { + error = QString("SetGUID_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE (%1)").arg(hr); + goto done; + } + + if (FAILED(deviceAttributes->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, (LPCWSTR)props.symlink.utf16()))) + { + error = QString("IMFAttributes_SetString_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK (%1)").arg(hr); + goto done; + } + + hr = MFCreateDeviceSource(deviceAttributes, &device); + if (FAILED(hr)) + { + error = QString("MFCreateDeviceSource (%1)").arg(hr); + goto done; + } + + if (!device) + { + error = QString("Could not open device (%1)").arg(hr); + goto done; + } + else + Debug(_log, "Device opened"); + + IAMVideoProcAmp *pProcAmp = nullptr; + if (SUCCEEDED(device->QueryInterface(IID_PPV_ARGS(&pProcAmp)))) + { + for (auto control : _deviceControls[deviceName]) + { + switch (_videoProcAmpPropertyMap->key(control.property)) + { + case VideoProcAmpProperty::VideoProcAmp_Brightness: + if (_brightness >= control.minValue && _brightness <= control.maxValue && _brightness != control.currentValue) + { + Debug(_log,"Set brightness to %i", _brightness); + pProcAmp->Set(VideoProcAmp_Brightness, _brightness, VideoProcAmp_Flags_Manual); + } + break; + case VideoProcAmpProperty::VideoProcAmp_Contrast: + if (_contrast >= control.minValue && _contrast <= control.maxValue && _contrast != control.currentValue) + { + Debug(_log,"Set contrast to %i", _contrast); + pProcAmp->Set(VideoProcAmp_Contrast, _contrast, VideoProcAmp_Flags_Manual); + } + break; + case VideoProcAmpProperty::VideoProcAmp_Saturation: + if (_saturation >= control.minValue && _saturation <= control.maxValue && _saturation != control.currentValue) + { + Debug(_log,"Set saturation to %i", _saturation); + pProcAmp->Set(VideoProcAmp_Saturation, _saturation, VideoProcAmp_Flags_Manual); + } + break; + case VideoProcAmpProperty::VideoProcAmp_Hue: + if (_hue >= control.minValue && _hue <= control.maxValue && _hue != control.currentValue) + { + Debug(_log,"Set hue to %i", _hue); + pProcAmp->Set(VideoProcAmp_Hue, _hue, VideoProcAmp_Flags_Manual); + } + break; + default: + break; + } + } + } + + hr = MFCreateAttributes(&sourceReaderAttributes, 1); + if (FAILED(hr)) + { + error = QString("Could not create Source Reader attributes (%1)").arg(hr); + goto done; + } + + hr = sourceReaderAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, (IMFSourceReaderCallback *)_sourceReaderCB); + if (FAILED(hr)) + { + error = QString("Could not set stream parameter: SetUnknown_MF_SOURCE_READER_ASYNC_CALLBACK (%1)").arg(hr); + hr = E_INVALIDARG; + goto done; + } + + hr = MFCreateSourceReaderFromMediaSource(device, sourceReaderAttributes, &_sourceReader); + if (FAILED(hr)) + { + error = QString("Could not create the Source Reader (%1)").arg(hr); + goto done; + } + + hr = MFCreateMediaType(&type); + if (FAILED(hr)) + { + error = QString("Could not create an empty media type (%1)").arg(hr); + goto done; + } + + hr = type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + if (FAILED(hr)) + { + error = QString("Could not set stream parameter: SetGUID_MF_MT_MAJOR_TYPE (%1)").arg(hr); + goto done; + } + + hr = type->SetGUID(MF_MT_SUBTYPE, props.guid); + if (FAILED(hr)) + { + error = QString("Could not set stream parameter: SetGUID_MF_MT_SUBTYPE (%1)").arg(hr); + goto done; + } + + hr = MFSetAttributeSize(type, MF_MT_FRAME_SIZE, props.width, props.height); + if (FAILED(hr)) + { + error = QString("Could not set stream parameter: SMFSetAttributeSize_MF_MT_FRAME_SIZE (%1)").arg(hr); + goto done; + } + + hr = MFSetAttributeSize(type, MF_MT_FRAME_RATE, props.numerator, props.denominator); + if (FAILED(hr)) + { + error = QString("Could not set stream parameter: MFSetAttributeSize_MF_MT_FRAME_RATE (%1)").arg(hr); + goto done; + } + + hr = MFSetAttributeRatio(type, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); + if (FAILED(hr)) + { + error = QString("Could not set stream parameter: MFSetAttributeRatio_MF_MT_PIXEL_ASPECT_RATIO (%1)").arg(hr); + goto done; + } + + hr = _sourceReaderCB->InitializeVideoEncoder(type, pixelformat); + if (FAILED(hr)) + { + error = QString("Failed to initialize the Video Encoder (%1)").arg(hr); + goto done; + } + + hr = _sourceReader->SetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, nullptr, type); + if (FAILED(hr)) + { + error = QString("Failed to set media type on Source Reader (%1)").arg(hr); + } + +done: + if (FAILED(hr)) + { + emit readError(QSTRING_CSTR(error)); + SAFE_RELEASE(_sourceReader); + } + else + { + _pixelFormat = props.pf; + _width = props.width; + _height = props.height; + _frameByteSize = _width * _height * 3; + _lineLength = _width * 3; + } + + // Cleanup + SAFE_RELEASE(deviceAttributes); + SAFE_RELEASE(device); + SAFE_RELEASE(pProcAmp); + SAFE_RELEASE(type); + SAFE_RELEASE(sourceReaderAttributes); + + return hr; +} + +void MFGrabber::enumVideoCaptureDevices() +{ + _deviceProperties.clear(); + _deviceControls.clear(); + + IMFAttributes* attr; + if (SUCCEEDED(MFCreateAttributes(&attr, 1))) + { + if (SUCCEEDED(attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID))) + { + UINT32 count; + IMFActivate** devices; + if (SUCCEEDED(MFEnumDeviceSources(attr, &devices, &count))) + { + DebugIf (verbose, _log, "Detected devices: %u", count); + for (UINT32 i = 0; i < count; i++) + { + UINT32 length; + LPWSTR name; + LPWSTR symlink; + + if (SUCCEEDED(devices[i]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &name, &length))) + { + if (SUCCEEDED(devices[i]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &symlink, &length))) + { + QList devicePropertyList; + QString dev = QString::fromUtf16((const ushort*)name); + + IMFMediaSource *pSource = nullptr; + if (SUCCEEDED(devices[i]->ActivateObject(IID_PPV_ARGS(&pSource)))) + { + DebugIf (verbose, _log, "Found capture device: %s", QSTRING_CSTR(dev)); + + IMFMediaType *pType = nullptr; + IMFSourceReader* reader; + if (SUCCEEDED(MFCreateSourceReaderFromMediaSource(pSource, NULL, &reader))) + { + for (DWORD i = 0; ; i++) + { + if (FAILED(reader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, i, &pType))) + break; + + GUID format; + UINT32 width = 0, height = 0, numerator = 0, denominator = 0; + + if ( SUCCEEDED(pType->GetGUID(MF_MT_SUBTYPE, &format)) && + SUCCEEDED(MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height)) && + SUCCEEDED(MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &numerator, &denominator))) + { + PixelFormat pixelformat = GetPixelFormatForGuid(format); + if (pixelformat != PixelFormat::NO_CHANGE) + { + DeviceProperties properties; + properties.symlink = QString::fromUtf16((const ushort*)symlink); + properties.width = width; + properties.height = height; + properties.fps = numerator / denominator; + properties.numerator = numerator; + properties.denominator = denominator; + properties.pf = pixelformat; + properties.guid = format; + devicePropertyList.append(properties); + + DebugIf (verbose, _log, "%s %d x %d @ %d fps (%s)", QSTRING_CSTR(dev), properties.width, properties.height, properties.fps, QSTRING_CSTR(pixelFormatToString(properties.pf))); + } + } + + SAFE_RELEASE(pType); + } + + IAMVideoProcAmp *videoProcAmp = nullptr; + if (SUCCEEDED(pSource->QueryInterface(IID_PPV_ARGS(&videoProcAmp)))) + { + QList deviceControlList; + for (auto it = _videoProcAmpPropertyMap->begin(); it != _videoProcAmpPropertyMap->end(); it++) + { + long minVal, maxVal, stepVal, defaultVal, flag; + if (SUCCEEDED(videoProcAmp->GetRange(it.key(), &minVal, &maxVal, &stepVal, &defaultVal, &flag))) + { + if (flag & VideoProcAmp_Flags_Manual) + { + DeviceControls control; + control.property = it.value(); + control.minValue = minVal; + control.maxValue = maxVal; + control.step = stepVal; + control.default = defaultVal; + + long currentVal; + if (SUCCEEDED(videoProcAmp->Get(it.key(), ¤tVal, &flag))) + { + control.currentValue = currentVal; + DebugIf(verbose, _log, "%s: min=%i, max=%i, step=%i, default=%i, current=%i", QSTRING_CSTR(it.value()), minVal, maxVal, stepVal, defaultVal, currentVal); + } + else + break; + + deviceControlList.append(control); + } + } + + } + + if (!deviceControlList.isEmpty()) + _deviceControls.insert(dev, deviceControlList); + } + + SAFE_RELEASE(videoProcAmp); + SAFE_RELEASE(reader); + } + + SAFE_RELEASE(pSource); + } + + if (!devicePropertyList.isEmpty()) + _deviceProperties.insert(dev, devicePropertyList); + } + + CoTaskMemFree(symlink); + } + + CoTaskMemFree(name); + SAFE_RELEASE(devices[i]); + } + + CoTaskMemFree(devices); + } + + SAFE_RELEASE(attr); + } + } +} + +void MFGrabber::start_capturing() +{ + if (_initialized && _sourceReader && _threadManager) + { + HRESULT hr = _sourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL); + if (!SUCCEEDED(hr)) + Error(_log, "ReadSample (%i)", hr); + } +} + +void MFGrabber::process_image(const void *frameImageBuffer, int size) +{ + int processFrameIndex = _currentFrame++; + + // frame skipping + if ((processFrameIndex % (_fpsSoftwareDecimation + 1) != 0) && (_fpsSoftwareDecimation > 0)) + return; + + // We do want a new frame... + if (size < _frameByteSize && _pixelFormat != PixelFormat::MJPEG) + Error(_log, "Frame too small: %d != %d", size, _frameByteSize); + else if (_threadManager != nullptr) + { + for (int i = 0; i < _threadManager->_threadCount; i++) + { + if (!_threadManager->_threads[i]->isBusy()) + { + _threadManager->_threads[i]->setup(_pixelFormat, (uint8_t*)frameImageBuffer, size, _width, _height, _lineLength, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, _flipMode, _pixelDecimation); + _threadManager->_threads[i]->process(); + break; + } + } + } +} + +void MFGrabber::receive_image(const void *frameImageBuffer, int size) +{ + process_image(frameImageBuffer, size); + start_capturing(); +} + +void MFGrabber::newThreadFrame(Image image) +{ + if (_signalDetectionEnabled) + { + // check signal (only in center of the resulting image, because some grabbers have noise values along the borders) + bool noSignal = true; + + // top left + unsigned xOffset = image.width() * _x_frac_min; + unsigned yOffset = image.height() * _y_frac_min; + + // bottom right + unsigned xMax = image.width() * _x_frac_max; + unsigned yMax = image.height() * _y_frac_max; + + for (unsigned x = xOffset; noSignal && x < xMax; ++x) + for (unsigned y = yOffset; noSignal && y < yMax; ++y) + noSignal &= (ColorRgb&)image(x, y) <= _noSignalThresholdColor; + + if (noSignal) + ++_noSignalCounter; + else + { + if (_noSignalCounter >= _noSignalCounterThreshold) + { + _noSignalDetected = true; + Info(_log, "Signal detected"); + } + + _noSignalCounter = 0; + } + + if ( _noSignalCounter < _noSignalCounterThreshold) + { + emit newFrame(image); + } + else if (_noSignalCounter == _noSignalCounterThreshold) + { + _noSignalDetected = false; + Info(_log, "Signal lost"); + } + } + else + emit newFrame(image); +} + +void MFGrabber::setDevice(const QString& device) +{ + if (_currentDeviceName != device) + { + _currentDeviceName = device; + _reload = true; + } +} + +bool MFGrabber::setInput(int input) +{ + if (Grabber::setInput(input)) + { + _reload = true; + return true; + } + + return false; +} + +bool MFGrabber::setWidthHeight(int width, int height) +{ + if (Grabber::setWidthHeight(width, height)) + { + _reload = true; + return true; + } + + return false; +} + +void MFGrabber::setEncoding(QString enc) +{ + if (_pixelFormatConfig != parsePixelFormat(enc)) + { + _pixelFormatConfig = parsePixelFormat(enc); + if (_initialized) + { + Debug(_log,"Set hardware encoding to: %s", QSTRING_CSTR(enc.toUpper())); + _reload = true; + } + else + _pixelFormat = _pixelFormatConfig; + } +} + +void MFGrabber::setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue) +{ + if (_brightness != brightness || _contrast != contrast || _saturation != saturation || _hue != hue) + { + _brightness = brightness; + _contrast = contrast; + _saturation = saturation; + _hue = hue; + + _reload = true; + } +} + +void MFGrabber::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold) +{ + _noSignalThresholdColor.red = uint8_t(255*redSignalThreshold); + _noSignalThresholdColor.green = uint8_t(255*greenSignalThreshold); + _noSignalThresholdColor.blue = uint8_t(255*blueSignalThreshold); + _noSignalCounterThreshold = qMax(1, noSignalCounterThreshold); + + if (_signalDetectionEnabled) + Info(_log, "Signal threshold set to: {%d, %d, %d} and frames: %d", _noSignalThresholdColor.red, _noSignalThresholdColor.green, _noSignalThresholdColor.blue, _noSignalCounterThreshold ); +} + +void MFGrabber::setSignalDetectionOffset(double horizontalMin, double verticalMin, double horizontalMax, double verticalMax) +{ + // rainbow 16 stripes 0.47 0.2 0.49 0.8 + // unicolor: 0.25 0.25 0.75 0.75 + + _x_frac_min = horizontalMin; + _y_frac_min = verticalMin; + _x_frac_max = horizontalMax; + _y_frac_max = verticalMax; + + if (_signalDetectionEnabled) + Info(_log, "Signal detection area set to: %f,%f x %f,%f", _x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max ); +} + +void MFGrabber::setSignalDetectionEnable(bool enable) +{ + if (_signalDetectionEnabled != enable) + { + _signalDetectionEnabled = enable; + if (_initialized) + Info(_log, "Signal detection is now %s", enable ? "enabled" : "disabled"); + } +} + +bool MFGrabber::reload(bool force) +{ + if (_reload || force) + { + if (_sourceReader) + { + Info(_log,"Reloading Media Foundation Grabber"); + uninit(); + _pixelFormat = _pixelFormatConfig; + _newDeviceName = _currentDeviceName; + } + + _reload = false; + return prepare() && start(); + } + + return false; +} + +QJsonArray MFGrabber::discover(const QJsonObject& params) +{ + DebugIf (verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + enumVideoCaptureDevices(); + + QJsonArray inputsDiscovered; + for (auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it) + { + QJsonObject device, in; + QJsonArray video_inputs, formats; + + device["device"] = it.key(); + device["device_name"] = it.key(); + device["type"] = "v4l2"; + + in["name"] = ""; + in["inputIdx"] = 0; + + QStringList encodingFormats = QStringList(); + for (int i = 0; i < _deviceProperties[it.key()].count(); ++i ) + if (!encodingFormats.contains(pixelFormatToString(_deviceProperties[it.key()][i].pf), Qt::CaseInsensitive)) + encodingFormats << pixelFormatToString(_deviceProperties[it.key()][i].pf).toLower(); + + for (auto encodingFormat : encodingFormats) + { + QJsonObject format; + QJsonArray resolutionArray; + + format["format"] = encodingFormat; + + QMultiMap deviceResolutions = QMultiMap(); + for (int i = 0; i < _deviceProperties[it.key()].count(); ++i ) + if (!deviceResolutions.contains(_deviceProperties[it.key()][i].width, _deviceProperties[it.key()][i].height) && _deviceProperties[it.key()][i].pf == parsePixelFormat(encodingFormat)) + deviceResolutions.insert(_deviceProperties[it.key()][i].width, _deviceProperties[it.key()][i].height); + + for (auto width_height = deviceResolutions.begin(); width_height != deviceResolutions.end(); width_height++) + { + QJsonObject resolution; + QJsonArray fps; + + resolution["width"] = width_height.key(); + resolution["height"] = width_height.value(); + + QIntList framerates = QIntList(); + for (int i = 0; i < _deviceProperties[it.key()].count(); ++i ) + { + int fps = _deviceProperties[it.key()][i].numerator / _deviceProperties[it.key()][i].denominator; + if (!framerates.contains(fps) && _deviceProperties[it.key()][i].pf == parsePixelFormat(encodingFormat) && _deviceProperties[it.key()][i].width == width_height.key() && _deviceProperties[it.key()][i].height == width_height.value()) + framerates << fps; + } + + for (auto framerate : framerates) + fps.append(framerate); + + resolution["fps"] = fps; + resolutionArray.append(resolution); + } + + format["resolutions"] = resolutionArray; + formats.append(format); + } + in["formats"] = formats; + video_inputs.append(in); + device["video_inputs"] = video_inputs; + + QJsonObject controls, controls_default; + for (auto control : _deviceControls[it.key()]) + { + QJsonObject property; + property["minValue"] = control.minValue; + property["maxValue"] = control.maxValue; + property["step"] = control.step; + property["current"] = control.currentValue; + controls[control.property] = property; + controls_default[control.property] = control.default; + } + device["properties"] = controls; + + QJsonObject defaults, video_inputs_default, format_default, resolution_default; + resolution_default["width"] = 640; + resolution_default["height"] = 480; + resolution_default["fps"] = 25; + format_default["format"] = "bgr24"; + format_default["resolution"] = resolution_default; + video_inputs_default["inputIdx"] = 0; + video_inputs_default["standards"] = "PAL"; + video_inputs_default["formats"] = format_default; + + defaults["video_input"] = video_inputs_default; + defaults["properties"] = controls_default; + device["default"] = defaults; + + inputsDiscovered.append(device); + } + + _deviceProperties.clear(); + _deviceControls.clear(); + DebugIf (verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + return inputsDiscovered; +} diff --git a/libsrc/grabber/video/mediafoundation/MFSourceReaderCB.h b/libsrc/grabber/video/mediafoundation/MFSourceReaderCB.h new file mode 100644 index 00000000..0185bafa --- /dev/null +++ b/libsrc/grabber/video/mediafoundation/MFSourceReaderCB.h @@ -0,0 +1,401 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#pragma comment (lib, "ole32.lib") +#pragma comment (lib, "mf.lib") +#pragma comment (lib, "mfplat.lib") +#pragma comment (lib, "mfuuid.lib") +#pragma comment (lib, "mfreadwrite.lib") +#pragma comment (lib, "strmiids.lib") +#pragma comment (lib, "wmcodecdspuuid.lib") + +#include + +#define SAFE_RELEASE(x) if(x) { x->Release(); x = nullptr; } + +// Need more supported formats? Visit https://docs.microsoft.com/en-us/windows/win32/medfound/colorconverter +static PixelFormat GetPixelFormatForGuid(const GUID guid) +{ + if (IsEqualGUID(guid, MFVideoFormat_RGB32)) return PixelFormat::RGB32; + if (IsEqualGUID(guid, MFVideoFormat_RGB24)) return PixelFormat::BGR24; + if (IsEqualGUID(guid, MFVideoFormat_YUY2)) return PixelFormat::YUYV; + if (IsEqualGUID(guid, MFVideoFormat_UYVY)) return PixelFormat::UYVY; + if (IsEqualGUID(guid, MFVideoFormat_MJPG)) return PixelFormat::MJPEG; + if (IsEqualGUID(guid, MFVideoFormat_NV12)) return PixelFormat::NV12; + if (IsEqualGUID(guid, MFVideoFormat_I420)) return PixelFormat::I420; + return PixelFormat::NO_CHANGE; +}; + +class SourceReaderCB : public IMFSourceReaderCallback +{ +public: + SourceReaderCB(MFGrabber* grabber) + : _nRefCount(1) + , _grabber(grabber) + , _bEOS(FALSE) + , _hrStatus(S_OK) + , _isBusy(false) + , _transform(nullptr) + , _pixelformat(PixelFormat::NO_CHANGE) + { + // Initialize critical section. + InitializeCriticalSection(&_critsec); + } + + // IUnknown methods + STDMETHODIMP QueryInterface(REFIID iid, void** ppv) + { + static const QITAB qit[] = + { + QITABENT(SourceReaderCB, IMFSourceReaderCallback), + { 0 }, + }; + return QISearch(this, qit, iid, ppv); + } + + STDMETHODIMP_(ULONG) AddRef() + { + return InterlockedIncrement(&_nRefCount); + } + + STDMETHODIMP_(ULONG) Release() + { + ULONG uCount = InterlockedDecrement(&_nRefCount); + if (uCount == 0) + { + delete this; + } + return uCount; + } + + // IMFSourceReaderCallback methods + STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD /*dwStreamIndex*/, + DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample* pSample) + { + EnterCriticalSection(&_critsec); + _isBusy = true; + + if (_grabber->_sourceReader == nullptr) + { + _isBusy = false; + LeaveCriticalSection(&_critsec); + return S_OK; + } + + if (dwStreamFlags & MF_SOURCE_READERF_STREAMTICK) + { + Debug(_grabber->_log, "Skipping stream gap"); + LeaveCriticalSection(&_critsec); + _grabber->_sourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, nullptr, nullptr, nullptr, nullptr); + return S_OK; + } + + if (dwStreamFlags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED) + { + IMFMediaType* type = nullptr; + GUID format; + _grabber->_sourceReader->GetNativeMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, MF_SOURCE_READER_CURRENT_TYPE_INDEX, &type); + type->GetGUID(MF_MT_SUBTYPE, &format); + Debug(_grabber->_log, "Native media type changed"); + InitializeVideoEncoder(type, GetPixelFormatForGuid(format)); + SAFE_RELEASE(type); + } + + if (dwStreamFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) + { + IMFMediaType* type = nullptr; + GUID format; + _grabber->_sourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, &type); + type->GetGUID(MF_MT_SUBTYPE, &format); + Debug(_grabber->_log, "Current media type changed"); + InitializeVideoEncoder(type, GetPixelFormatForGuid(format)); + SAFE_RELEASE(type); + } + + // Variables declaration + IMFMediaBuffer* buffer = nullptr; + + if (FAILED(hrStatus)) + { + _hrStatus = hrStatus; + _com_error error(_hrStatus); + Error(_grabber->_log, "%s", error.ErrorMessage()); + goto done; + } + + if (!pSample) + { + Error(_grabber->_log, "Media sample is empty"); + goto done; + } + + if (_pixelformat != PixelFormat::MJPEG && _pixelformat != PixelFormat::BGR24 && _pixelformat != PixelFormat::NO_CHANGE) + pSample = TransformSample(_transform, pSample); + + _hrStatus = pSample->ConvertToContiguousBuffer(&buffer); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Buffer conversion failed => %s", error.ErrorMessage()); + goto done; + } + + BYTE* data = nullptr; + DWORD maxLength = 0, currentLength = 0; + _hrStatus = buffer->Lock(&data, &maxLength, ¤tLength); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Access to the buffer memory failed => %s", error.ErrorMessage()); + goto done; + } + + _grabber->receive_image(data, currentLength); + + _hrStatus = buffer->Unlock(); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Unlocking the buffer memory failed => %s", error.ErrorMessage()); + } + + done: + SAFE_RELEASE(buffer); + + if (MF_SOURCE_READERF_ENDOFSTREAM & dwStreamFlags) + _bEOS = TRUE; // Reached the end of the stream. + + if (_pixelformat != PixelFormat::MJPEG && _pixelformat != PixelFormat::BGR24 && _pixelformat != PixelFormat::NO_CHANGE) + SAFE_RELEASE(pSample); + + _isBusy = false; + LeaveCriticalSection(&_critsec); + return _hrStatus; + } + + HRESULT SourceReaderCB::InitializeVideoEncoder(IMFMediaType* type, PixelFormat format) + { + _pixelformat = format; + if (format == PixelFormat::MJPEG || format == PixelFormat::BGR24 || format == PixelFormat::NO_CHANGE) + return S_OK; + + // Variable declaration + IMFMediaType* output = nullptr; + DWORD mftStatus = 0; + QString error = ""; + + // Create instance of IMFTransform interface pointer as CColorConvertDMO + _hrStatus = CoCreateInstance(CLSID_CColorConvertDMO, nullptr, CLSCTX_INPROC_SERVER, IID_IMFTransform, (void**)&_transform); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Creation of the Color Converter failed => %s", error.ErrorMessage()); + goto done; + } + + // Set input type as media type of our input stream + _hrStatus = _transform->SetInputType(0, type, 0); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Setting the input media type failed => %s", error.ErrorMessage()); + goto done; + } + + // Create new media type + _hrStatus = MFCreateMediaType(&output); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Creating a new media type failed => %s", error.ErrorMessage()); + goto done; + } + + // Copy all attributes from input type to output media type + _hrStatus = type->CopyAllItems(output); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Copying of all attributes from input to output media type failed => %s", error.ErrorMessage()); + goto done; + } + + UINT32 width, height; + UINT32 numerator, denominator; + + // Fill the missing attributes + + if (FAILED(output->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)) || + FAILED(output->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24)) || + FAILED(output->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE)) || + FAILED(output->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)) || + FAILED(output->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)) || + FAILED(MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width, &height)) || + FAILED(MFSetAttributeSize(output, MF_MT_FRAME_SIZE, width, height)) || + FAILED(MFGetAttributeRatio(type, MF_MT_FRAME_RATE, &numerator, &denominator)) || + FAILED(MFSetAttributeRatio(output, MF_MT_PIXEL_ASPECT_RATIO, 1, 1))) + { + Error(_grabber->_log, "Setting output media type attributes failed"); + goto done; + } + + // Set transform output type + _hrStatus = _transform->SetOutputType(0, output, 0); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Setting the output media type failed => %s", error.ErrorMessage()); + goto done; + } + + // Check if encoder parameters set properly + _hrStatus = _transform->GetInputStatus(0, &mftStatus); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Failed to query the input stream for more data => %s", error.ErrorMessage()); + goto done; + } + + if (MFT_INPUT_STATUS_ACCEPT_DATA == mftStatus) + { + // Notify the transform we are about to begin streaming data + if (FAILED(_transform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, 0)) || + FAILED(_transform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0)) || + FAILED(_transform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0))) + { + Error(_grabber->_log, "Failed to begin streaming data"); + } + } + + done: + SAFE_RELEASE(output); + return _hrStatus; + } + + BOOL SourceReaderCB::isBusy() + { + EnterCriticalSection(&_critsec); + BOOL result = _isBusy; + LeaveCriticalSection(&_critsec); + + return result; + } + + STDMETHODIMP OnEvent(DWORD, IMFMediaEvent*) { return S_OK; } + STDMETHODIMP OnFlush(DWORD) { return S_OK; } + +private: + virtual ~SourceReaderCB() + { + if (_transform) + { + _transform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0); + _transform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, 0); + } + + SAFE_RELEASE(_transform); + + // Delete critical section. + DeleteCriticalSection(&_critsec); + } + + IMFSample* SourceReaderCB::TransformSample(IMFTransform* transform, IMFSample* in_sample) + { + IMFSample* result = nullptr; + IMFMediaBuffer* out_buffer = nullptr; + MFT_OUTPUT_DATA_BUFFER outputDataBuffer = { 0 }; + + // Process the input sample + _hrStatus = transform->ProcessInput(0, in_sample, 0); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Failed to process the input sample => %s", error.ErrorMessage()); + goto done; + } + + // Gets the buffer demand for the output stream + MFT_OUTPUT_STREAM_INFO streamInfo; + _hrStatus = transform->GetOutputStreamInfo(0, &streamInfo); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Failed to retrieve buffer requirement for output current => %s", error.ErrorMessage()); + goto done; + } + + // Create an output media buffer + _hrStatus = MFCreateMemoryBuffer(streamInfo.cbSize, &out_buffer); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Failed to create an output media buffer => %s", error.ErrorMessage()); + goto done; + } + + // Create an empty media sample + _hrStatus = MFCreateSample(&result); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Failed to create an empty media sample => %s", error.ErrorMessage()); + goto done; + } + + // Add the output media buffer to the media sample + _hrStatus = result->AddBuffer(out_buffer); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Failed to add the output media buffer to the media sample => %s", error.ErrorMessage()); + goto done; + } + + // Create the output buffer structure + memset(&outputDataBuffer, 0, sizeof outputDataBuffer); + outputDataBuffer.dwStreamID = 0; + outputDataBuffer.dwStatus = 0; + outputDataBuffer.pEvents = nullptr; + outputDataBuffer.pSample = result; + + DWORD status = 0; + + // Generate the output sample + _hrStatus = transform->ProcessOutput(0, 1, &outputDataBuffer, &status); + if (FAILED(_hrStatus)) + { + _com_error error(_hrStatus); + Error(_grabber->_log, "Failed to generate the output sample => %s", error.ErrorMessage()); + } + else + { + SAFE_RELEASE(out_buffer); + return result; + } + + done: + SAFE_RELEASE(out_buffer); + return nullptr; + } + +private: + long _nRefCount; + CRITICAL_SECTION _critsec; + MFGrabber* _grabber; + BOOL _bEOS; + HRESULT _hrStatus; + IMFTransform* _transform; + PixelFormat _pixelformat; + std::atomic _isBusy; +}; diff --git a/libsrc/grabber/video/v4l2/V4L2Grabber.cpp b/libsrc/grabber/video/v4l2/V4L2Grabber.cpp new file mode 100644 index 00000000..2ba222f1 --- /dev/null +++ b/libsrc/grabber/video/v4l2/V4L2Grabber.cpp @@ -0,0 +1,1645 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include + +#include "grabber/V4L2Grabber.h" + +#define CLEAR(x) memset(&(x), 0, sizeof(x)) + +#ifndef V4L2_CAP_META_CAPTURE + #define V4L2_CAP_META_CAPTURE 0x00800000 // Specified in kernel header v4.16. Required for backward compatibility. +#endif + +// Constants +namespace { const bool verbose = false; } + +// Need more video properties? Visit https://www.kernel.org/doc/html/v4.14/media/uapi/v4l/control.html +using ControlIDPropertyMap = QMap; +inline QMap initControlIDPropertyMap() +{ + QMap propertyMap + { + {V4L2_CID_BRIGHTNESS , "brightness" }, + {V4L2_CID_CONTRAST , "contrast" }, + {V4L2_CID_SATURATION , "saturation" }, + {V4L2_CID_HUE , "hue" } + }; + + return propertyMap; +}; + +Q_GLOBAL_STATIC_WITH_ARGS(ControlIDPropertyMap, _controlIDPropertyMap, (initControlIDPropertyMap())); + +static PixelFormat GetPixelFormat(const unsigned int format) +{ + if (format == V4L2_PIX_FMT_RGB32) return PixelFormat::RGB32; + if (format == V4L2_PIX_FMT_RGB24) return PixelFormat::BGR24; + if (format == V4L2_PIX_FMT_YUYV) return PixelFormat::YUYV; + if (format == V4L2_PIX_FMT_UYVY) return PixelFormat::UYVY; + if (format == V4L2_PIX_FMT_NV12) return PixelFormat::NV12; + if (format == V4L2_PIX_FMT_YUV420) return PixelFormat::I420; +#ifdef HAVE_TURBO_JPEG + if (format == V4L2_PIX_FMT_MJPEG) return PixelFormat::MJPEG; +#endif + return PixelFormat::NO_CHANGE; +}; + +V4L2Grabber::V4L2Grabber() + : Grabber("V4L2") + , _currentDevicePath("none") + , _currentDeviceName("none") + , _threadManager(nullptr) + , _ioMethod(IO_METHOD_MMAP) + , _fileDescriptor(-1) + , _pixelFormat(PixelFormat::NO_CHANGE) + , _pixelFormatConfig(PixelFormat::NO_CHANGE) + , _lineLength(-1) + , _frameByteSize(-1) + , _currentFrame(0) + , _noSignalCounterThreshold(40) + , _noSignalThresholdColor(ColorRgb{0,0,0}) + , _cecDetectionEnabled(true) + , _cecStandbyActivated(false) + , _signalDetectionEnabled(true) + , _noSignalDetected(false) + , _noSignalCounter(0) + , _brightness(0) + , _contrast(0) + , _saturation(0) + , _hue(0) + , _x_frac_min(0.25) + , _y_frac_min(0.25) + , _x_frac_max(0.75) + , _y_frac_max(0.75) + , _streamNotifier(nullptr) + , _initialized(false) + , _reload(false) +{ +} + +V4L2Grabber::~V4L2Grabber() +{ + uninit(); + + if (_threadManager) + delete _threadManager; + _threadManager = nullptr; +} + +bool V4L2Grabber::prepare() +{ + if (!_threadManager) + _threadManager = new EncoderThreadManager(this); + + return (_threadManager != nullptr); +} + +void V4L2Grabber::uninit() +{ + // stop if the grabber was not stopped + if (_initialized) + { + Debug(_log,"Uninit grabber: %s (%s)", QSTRING_CSTR(_currentDeviceName), QSTRING_CSTR(_currentDevicePath)); + stop(); + } +} + +bool V4L2Grabber::init() +{ + if (!_initialized) + { + bool noDevicePath = _currentDevicePath.compare("none", Qt::CaseInsensitive) == 0 || _currentDevicePath.compare("auto", Qt::CaseInsensitive) == 0; + + // enumerate the video capture devices on the user's system + enumVideoCaptureDevices(); + + if(noDevicePath) + return false; + + if(!_deviceProperties.contains(_currentDevicePath)) + { + Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDevicePath)); + _currentDevicePath = "none"; + return false; + } + else + { + if (HyperionIManager::getInstance()) + if (_currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName != _deviceProperties.value(_currentDevicePath).name) + return false; + + Debug(_log, "Set device (path) to: %s (%s)", QSTRING_CSTR(_deviceProperties.value(_currentDevicePath).name), QSTRING_CSTR(_currentDevicePath)); + } + + // correct invalid parameters + QMap::const_iterator inputIterator = _deviceProperties.value(_currentDevicePath).inputs.find(_input); + if (inputIterator == _deviceProperties.value(_currentDevicePath).inputs.end()) + setInput(_deviceProperties.value(_currentDevicePath).inputs.firstKey()); + + QMultiMap::const_iterator encodingIterator = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.find(_pixelFormat); + if (encodingIterator == _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.end()) + setEncoding(pixelFormatToString(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.firstKey())); + + bool validDimensions = false; + for (auto enc = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.begin(); enc != _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.end(); enc++) + if(enc.key() == _pixelFormat && enc.value().width == _width && enc.value().height == _height) + { + validDimensions = true; + break; + } + + if (!validDimensions) + setWidthHeight(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.first().width, _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.first().height); + + QList availableframerates = _deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.value(_pixelFormat).framerates; + if (!availableframerates.isEmpty() && !availableframerates.contains(_fps)) + setFramerate(_deviceProperties.value(_currentDevicePath).inputs.value(_input).encodingFormats.value(_pixelFormat).framerates.first()); + + bool opened = false; + try + { + if (open_device()) + { + opened = true; + init_device(_videoStandard); + _initialized = true; + } + } + catch(std::exception& e) + { + if (opened) + { + uninit_device(); + close_device(); + } + + Error(_log, "V4l2 init failed (%s)", e.what()); + } + } + + return _initialized; +} + +bool V4L2Grabber::start() +{ + try + { + if (init() && _streamNotifier != nullptr && !_streamNotifier->isEnabled()) + { + connect(_threadManager, &EncoderThreadManager::newFrame, this, &V4L2Grabber::newThreadFrame); + _threadManager->start(); + DebugIf(verbose, _log, "Decoding threads: %d", _threadManager->_threadCount); + + _streamNotifier->setEnabled(true); + start_capturing(); + Info(_log, "Started"); + return true; + } + } + catch(std::exception& e) + { + Error(_log, "start failed (%s)", e.what()); + } + + return false; +} + +void V4L2Grabber::stop() +{ + if (_streamNotifier != nullptr && _streamNotifier->isEnabled()) + { + _initialized = false; + _threadManager->stop(); + disconnect(_threadManager, nullptr, nullptr, nullptr); + stop_capturing(); + _streamNotifier->setEnabled(false); + uninit_device(); + close_device(); + _deviceProperties.clear(); + _deviceControls.clear(); + Info(_log, "Stopped"); + } +} + +bool V4L2Grabber::open_device() +{ + struct stat st; + + if (-1 == stat(QSTRING_CSTR(_currentDevicePath), &st)) + { + throw_errno_exception("Cannot identify '" + _currentDevicePath + "'"); + return false; + } + + if (!S_ISCHR(st.st_mode)) + { + throw_exception("'" + _currentDevicePath + "' is no device"); + return false; + } + + _fileDescriptor = open(QSTRING_CSTR(_currentDevicePath), O_RDWR | O_NONBLOCK, 0); + + if (-1 == _fileDescriptor) + { + throw_errno_exception("Cannot open '" + _currentDevicePath + "'"); + return false; + } + + // create the notifier for when a new frame is available + _streamNotifier = new QSocketNotifier(_fileDescriptor, QSocketNotifier::Read); + _streamNotifier->setEnabled(false); + connect(_streamNotifier, &QSocketNotifier::activated, this, &V4L2Grabber::read_frame); + return true; +} + +void V4L2Grabber::close_device() +{ + if (-1 == close(_fileDescriptor)) + { + throw_errno_exception("close"); + return; + } + + _fileDescriptor = -1; + + delete _streamNotifier; + _streamNotifier = nullptr; +} + +void V4L2Grabber::init_read(unsigned int buffer_size) +{ + _buffers.resize(1); + + _buffers[0].length = buffer_size; + _buffers[0].start = malloc(buffer_size); + + if (!_buffers[0].start) + { + throw_exception("Out of memory"); + return; + } +} + +void V4L2Grabber::init_mmap() +{ + struct v4l2_requestbuffers req; + + CLEAR(req); + + req.count = 4; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req.memory = V4L2_MEMORY_MMAP; + + if (-1 == xioctl(VIDIOC_REQBUFS, &req)) + { + if (EINVAL == errno) + { + throw_exception("'" + _currentDevicePath + "' does not support memory mapping"); + return; + } + else + { + throw_errno_exception("VIDIOC_REQBUFS"); + return; + } + } + + if (req.count < 2) + { + throw_exception("Insufficient buffer memory on " + _currentDevicePath); + return; + } + + _buffers.resize(req.count); + + for (size_t n_buffers = 0; n_buffers < req.count; ++n_buffers) + { + struct v4l2_buffer buf; + + CLEAR(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = n_buffers; + + if (-1 == xioctl(VIDIOC_QUERYBUF, &buf)) + { + throw_errno_exception("VIDIOC_QUERYBUF"); + return; + } + + _buffers[n_buffers].length = buf.length; + _buffers[n_buffers].start = mmap(NULL /* start anywhere */, + buf.length, + PROT_READ | PROT_WRITE /* required */, + MAP_SHARED /* recommended */, + _fileDescriptor, buf.m.offset + ); + + if (MAP_FAILED == _buffers[n_buffers].start) + { + throw_errno_exception("mmap"); + return; + } + } +} + +void V4L2Grabber::init_userp(unsigned int buffer_size) +{ + struct v4l2_requestbuffers req; + + CLEAR(req); + + req.count = 4; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req.memory = V4L2_MEMORY_USERPTR; + + if (-1 == xioctl(VIDIOC_REQBUFS, &req)) + { + if (EINVAL == errno) + { + throw_exception("'" + _currentDevicePath + "' does not support user pointer"); + return; + } + else + { + throw_errno_exception("VIDIOC_REQBUFS"); + return; + } + } + + _buffers.resize(4); + + for (size_t n_buffers = 0; n_buffers < 4; ++n_buffers) + { + _buffers[n_buffers].length = buffer_size; + _buffers[n_buffers].start = malloc(buffer_size); + + if (!_buffers[n_buffers].start) + { + throw_exception("Out of memory"); + return; + } + } +} + +void V4L2Grabber::init_device(VideoStandard videoStandard) +{ + struct v4l2_capability cap; + CLEAR(cap); + + if (-1 == xioctl(VIDIOC_QUERYCAP, &cap)) + { + if (EINVAL == errno) + { + throw_exception("'" + _currentDevicePath + "' is no V4L2 device"); + return; + } + else + { + throw_errno_exception("VIDIOC_QUERYCAP"); + return; + } + } + + if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) + { + throw_exception("'" + _currentDevicePath + "' is no video capture device"); + return; + } + + switch (_ioMethod) + { + case IO_METHOD_READ: + { + if (!(cap.capabilities & V4L2_CAP_READWRITE)) + { + throw_exception("'" + _currentDevicePath + "' does not support read i/o"); + return; + } + } + break; + + case IO_METHOD_MMAP: + case IO_METHOD_USERPTR: + { + if (!(cap.capabilities & V4L2_CAP_STREAMING)) + { + throw_exception("'" + _currentDevicePath + "' does not support streaming i/o"); + return; + } + } + break; + } + + /* Select video input, video standard and tune here. */ + + struct v4l2_cropcap cropcap; + CLEAR(cropcap); + + cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (0 == xioctl(VIDIOC_CROPCAP, &cropcap)) + { + struct v4l2_crop crop; + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + crop.c = cropcap.defrect; /* reset to default */ + + if (-1 == xioctl(VIDIOC_S_CROP, &crop)) + { + switch (errno) + { + case EINVAL: /* Cropping not supported. */ + default: /* Errors ignored. */ + break; + } + } + } + + // set input if needed and supported + struct v4l2_input v4l2Input; + CLEAR(v4l2Input); + v4l2Input.index = _input; + + if (_input >= 0 && 0 == xioctl(VIDIOC_ENUMINPUT, &v4l2Input)) + { + (-1 == xioctl(VIDIOC_S_INPUT, &_input)) + ? Debug(_log, "Input settings not supported.") + : Debug(_log, "Set device input to: %s", v4l2Input.name); + } + + // set the video standard if needed and supported + struct v4l2_standard standard; + CLEAR(standard); + + if (-1 != xioctl(VIDIOC_ENUMSTD, &standard)) + { + switch (videoStandard) + { + case VideoStandard::PAL: + { + standard.id = V4L2_STD_PAL; + if (-1 == xioctl(VIDIOC_S_STD, &standard.id)) + { + throw_errno_exception("VIDIOC_S_STD"); + break; + } + Debug(_log, "Video standard=PAL"); + } + break; + + case VideoStandard::NTSC: + { + standard.id = V4L2_STD_NTSC; + if (-1 == xioctl(VIDIOC_S_STD, &standard.id)) + { + throw_errno_exception("VIDIOC_S_STD"); + break; + } + Debug(_log, "Video standard=NTSC"); + } + break; + + case VideoStandard::SECAM: + { + standard.id = V4L2_STD_SECAM; + if (-1 == xioctl(VIDIOC_S_STD, &standard.id)) + { + throw_errno_exception("VIDIOC_S_STD"); + break; + } + Debug(_log, "Video standard=SECAM"); + } + break; + + case VideoStandard::NO_CHANGE: + default: + // No change to device settings + break; + } + } + + // get the current settings + struct v4l2_format fmt; + CLEAR(fmt); + + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == xioctl(VIDIOC_G_FMT, &fmt)) + { + throw_errno_exception("VIDIOC_G_FMT"); + return; + } + + // set the requested pixel format + switch (_pixelFormat) + { + case PixelFormat::RGB32: + fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB32; + break; + + case PixelFormat::BGR24: + fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + break; + + case PixelFormat::YUYV: + fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + break; + + case PixelFormat::UYVY: + fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; + break; + + case PixelFormat::NV12: + fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12; + break; + + case PixelFormat::I420: + fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; + break; + +#ifdef HAVE_TURBO_JPEG + case PixelFormat::MJPEG: + { + fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; + fmt.fmt.pix.field = V4L2_FIELD_ANY; + } + break; +#endif + + case PixelFormat::NO_CHANGE: + default: + // No change to device settings + break; + } + + // set custom resolution for width and height if they are not zero + if(_width != 0 && _height != 0) + { + fmt.fmt.pix.width = _width; + fmt.fmt.pix.height = _height; + } + + // set the settings + if (-1 == xioctl(VIDIOC_S_FMT, &fmt)) + { + throw_errno_exception("VIDIOC_S_FMT"); + return; + } + + // initialize current width and height + _width = fmt.fmt.pix.width; + _height = fmt.fmt.pix.height; + + // display the used width and height + Debug(_log, "Set resolution to width=%d height=%d", _width, _height ); + + // Trying to set frame rate + struct v4l2_streamparm streamparms; + CLEAR(streamparms); + + streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + // Check that the driver knows about framerate get/set + if (xioctl(VIDIOC_G_PARM, &streamparms) >= 0) + { + // Check if the device is able to accept a capture framerate set. + if (streamparms.parm.capture.capability == V4L2_CAP_TIMEPERFRAME) + { + streamparms.parm.capture.timeperframe.numerator = 1; + streamparms.parm.capture.timeperframe.denominator = _fps; + (-1 == xioctl(VIDIOC_S_PARM, &streamparms)) + ? Debug(_log, "Frame rate settings not supported.") + : Debug(_log, "Set framerate to %d fps", streamparms.parm.capture.timeperframe.denominator); + } + } + + // set the line length + _lineLength = fmt.fmt.pix.bytesperline; + + // set brightness, contrast, saturation, hue + for (auto control : _deviceControls[_currentDevicePath]) + { + struct v4l2_control control_S; + CLEAR(control_S); + control_S.id = _controlIDPropertyMap->key(control.property); + + if (_controlIDPropertyMap->key(control.property) == V4L2_CID_BRIGHTNESS) + { + if (_brightness >= control.minValue && _brightness <= control.maxValue && _brightness != control.currentValue) + { + control_S.value = _brightness; + if (xioctl(VIDIOC_S_CTRL, &control_S) >= 0) + Debug(_log,"Set brightness to %i", _brightness); + } + } + else if (_controlIDPropertyMap->key(control.property) == V4L2_CID_CONTRAST) + { + if (_contrast >= control.minValue && _contrast <= control.maxValue && _contrast != control.currentValue) + { + control_S.value = _contrast; + if (xioctl(VIDIOC_S_CTRL, &control_S) >= 0) + Debug(_log,"Set contrast to %i", _contrast); + } + } + else if (_controlIDPropertyMap->key(control.property) == V4L2_CID_SATURATION) + { + if (_saturation >= control.minValue && _saturation <= control.maxValue && _saturation != control.currentValue) + { + control_S.value = _saturation; + if (xioctl(VIDIOC_S_CTRL, &control_S) >= 0) + Debug(_log,"Set saturation to %i", _saturation); + } + } + else if (_controlIDPropertyMap->key(control.property) == V4L2_CID_HUE) + { + if (_hue >= control.minValue && _hue <= control.maxValue && _hue != control.currentValue) + { + control_S.value = _hue; + if (xioctl(VIDIOC_S_CTRL, &control_S) >= 0) + Debug(_log,"Set hue to %i", _hue); + } + } + } + + // check pixel format and frame size + switch (fmt.fmt.pix.pixelformat) + { + case V4L2_PIX_FMT_RGB32: + { + _pixelFormat = PixelFormat::RGB32; + _frameByteSize = _width * _height * 4; + Debug(_log, "Pixel format=RGB32"); + } + break; + + case V4L2_PIX_FMT_RGB24: + { + _pixelFormat = PixelFormat::BGR24; + _frameByteSize = _width * _height * 3; + Debug(_log, "Pixel format=BGR24"); + } + break; + + + case V4L2_PIX_FMT_YUYV: + { + _pixelFormat = PixelFormat::YUYV; + _frameByteSize = _width * _height * 2; + Debug(_log, "Pixel format=YUYV"); + } + break; + + case V4L2_PIX_FMT_UYVY: + { + _pixelFormat = PixelFormat::UYVY; + _frameByteSize = _width * _height * 2; + Debug(_log, "Pixel format=UYVY"); + } + break; + + case V4L2_PIX_FMT_NV12: + { + _pixelFormat = PixelFormat::NV12; + _frameByteSize = (_width * _height * 6) / 4; + Debug(_log, "Pixel format=NV12"); + } + break; + + case V4L2_PIX_FMT_YUV420: + { + _pixelFormat = PixelFormat::I420; + _frameByteSize = (_width * _height * 6) / 4; + Debug(_log, "Pixel format=I420"); + } + break; + +#ifdef HAVE_TURBO_JPEG + case V4L2_PIX_FMT_MJPEG: + { + _pixelFormat = PixelFormat::MJPEG; + Debug(_log, "Pixel format=MJPEG"); + } + break; +#endif + + default: +#ifdef HAVE_TURBO_JPEG + throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12, I420 and MJPEG are supported"); +#else + throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12 and I420 are supported"); +#endif + return; + } + + switch (_ioMethod) + { + case IO_METHOD_READ: + init_read(fmt.fmt.pix.sizeimage); + break; + + case IO_METHOD_MMAP: + init_mmap(); + break; + + case IO_METHOD_USERPTR: + init_userp(fmt.fmt.pix.sizeimage); + break; + } +} + +void V4L2Grabber::uninit_device() +{ + switch (_ioMethod) + { + case IO_METHOD_READ: + free(_buffers[0].start); + break; + + case IO_METHOD_MMAP: + { + for (size_t i = 0; i < _buffers.size(); ++i) + if (-1 == munmap(_buffers[i].start, _buffers[i].length)) + { + throw_errno_exception("munmap"); + return; + } + } + break; + + case IO_METHOD_USERPTR: + { + for (size_t i = 0; i < _buffers.size(); ++i) + free(_buffers[i].start); + } + break; + } + + _buffers.resize(0); +} + +void V4L2Grabber::start_capturing() +{ + switch (_ioMethod) + { + case IO_METHOD_READ: + /* Nothing to do. */ + break; + + case IO_METHOD_MMAP: + { + for (size_t i = 0; i < _buffers.size(); ++i) + { + struct v4l2_buffer buf; + + CLEAR(buf); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = i; + + if (-1 == xioctl(VIDIOC_QBUF, &buf)) + { + throw_errno_exception("VIDIOC_QBUF"); + return; + } + } + v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == xioctl(VIDIOC_STREAMON, &type)) + { + throw_errno_exception("VIDIOC_STREAMON"); + return; + } + break; + } + case IO_METHOD_USERPTR: + { + for (size_t i = 0; i < _buffers.size(); ++i) + { + struct v4l2_buffer buf; + + CLEAR(buf); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_USERPTR; + buf.index = i; + buf.m.userptr = (unsigned long)_buffers[i].start; + buf.length = _buffers[i].length; + + if (-1 == xioctl(VIDIOC_QBUF, &buf)) + { + throw_errno_exception("VIDIOC_QBUF"); + return; + } + } + v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == xioctl(VIDIOC_STREAMON, &type)) + { + throw_errno_exception("VIDIOC_STREAMON"); + return; + } + break; + } + } +} + +void V4L2Grabber::stop_capturing() +{ + enum v4l2_buf_type type; + + switch (_ioMethod) + { + case IO_METHOD_READ: + break; /* Nothing to do. */ + + case IO_METHOD_MMAP: + case IO_METHOD_USERPTR: + { + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + ErrorIf((xioctl(VIDIOC_STREAMOFF, &type) == -1), _log, "VIDIOC_STREAMOFF error code %d, %s", errno, strerror(errno)); + } + break; + } +} + +int V4L2Grabber::read_frame() +{ + bool rc = false; + + try + { + struct v4l2_buffer buf; + + switch (_ioMethod) + { + case IO_METHOD_READ: + { + int size; + if ((size = read(_fileDescriptor, _buffers[0].start, _buffers[0].length)) == -1) + { + switch (errno) + { + case EAGAIN: + return 0; + + case EIO: /* Could ignore EIO, see spec. */ + default: + throw_errno_exception("read"); + return 0; + } + } + + rc = process_image(_buffers[0].start, size); + } + break; + + case IO_METHOD_MMAP: + { + CLEAR(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + + if (-1 == xioctl(VIDIOC_DQBUF, &buf)) + { + switch (errno) + { + case EAGAIN: + return 0; + + case EIO: /* Could ignore EIO, see spec. */ + default: + { + throw_errno_exception("VIDIOC_DQBUF"); + stop(); + enumVideoCaptureDevices(); + } + return 0; + } + } + + assert(buf.index < _buffers.size()); + + rc = process_image(_buffers[buf.index].start, buf.bytesused); + + if (-1 == xioctl(VIDIOC_QBUF, &buf)) + { + throw_errno_exception("VIDIOC_QBUF"); + return 0; + } + } + break; + + case IO_METHOD_USERPTR: + { + CLEAR(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_USERPTR; + + if (-1 == xioctl(VIDIOC_DQBUF, &buf)) + { + switch (errno) + { + case EAGAIN: + return 0; + + case EIO: /* Could ignore EIO, see spec. */ + default: + { + throw_errno_exception("VIDIOC_DQBUF"); + stop(); + enumVideoCaptureDevices(); + } + return 0; + } + } + + for (size_t i = 0; i < _buffers.size(); ++i) + { + if (buf.m.userptr == (unsigned long)_buffers[i].start && buf.length == _buffers[i].length) + { + break; + } + } + + rc = process_image((void *)buf.m.userptr, buf.bytesused); + + if (!rc && -1 == xioctl(VIDIOC_QBUF, &buf)) + { + throw_errno_exception("VIDIOC_QBUF"); + return 0; + } + } + break; + } + } + catch (std::exception& e) + { + emit readError(e.what()); + rc = false; + } + + return rc ? 1 : 0; +} + +bool V4L2Grabber::process_image(const void *p, int size) +{ + int processFrameIndex = _currentFrame++, result = false; + + // frame skipping + if ((processFrameIndex % (_fpsSoftwareDecimation + 1) != 0) && (_fpsSoftwareDecimation > 0)) + return result; + +#ifdef HAVE_TURBO_JPEG + if (size < _frameByteSize && _pixelFormat != PixelFormat::MJPEG) +#else + if (size < _frameByteSize) +#endif + { + Error(_log, "Frame too small: %d != %d", size, _frameByteSize); + } + else if (_threadManager != nullptr) + { + for (int i = 0; i < _threadManager->_threadCount; i++) + { + if (!_threadManager->_threads[i]->isBusy()) + { + _threadManager->_threads[i]->setup(_pixelFormat, (uint8_t*)p, size, _width, _height, _lineLength, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, _flipMode, _pixelDecimation); + _threadManager->_threads[i]->process(); + result = true; + break; + } + } + } + + return result; +} + +void V4L2Grabber::newThreadFrame(Image image) +{ + if (_cecDetectionEnabled && _cecStandbyActivated) + return; + + if (_signalDetectionEnabled) + { + // check signal (only in center of the resulting image, because some grabbers have noise values along the borders) + bool noSignal = true; + + // top left + unsigned xOffset = image.width() * _x_frac_min; + unsigned yOffset = image.height() * _y_frac_min; + + // bottom right + unsigned xMax = image.width() * _x_frac_max; + unsigned yMax = image.height() * _y_frac_max; + + for (unsigned x = xOffset; noSignal && x < xMax; ++x) + for (unsigned y = yOffset; noSignal && y < yMax; ++y) + noSignal &= (ColorRgb&)image(x, y) <= _noSignalThresholdColor; + + if (noSignal) + ++_noSignalCounter; + else + { + if (_noSignalCounter >= _noSignalCounterThreshold) + { + _noSignalDetected = true; + Info(_log, "Signal detected"); + } + + _noSignalCounter = 0; + } + + if ( _noSignalCounter < _noSignalCounterThreshold) + { + emit newFrame(image); + } + else if (_noSignalCounter == _noSignalCounterThreshold) + { + _noSignalDetected = false; + Info(_log, "Signal lost"); + } + } + else + emit newFrame(image); +} + +int V4L2Grabber::xioctl(int request, void *arg) +{ + int r; + + do + { + r = ioctl(_fileDescriptor, request, arg); + } + while (-1 == r && EINTR == errno); + + return r; +} + +int V4L2Grabber::xioctl(int fileDescriptor, int request, void *arg) +{ + int r; + + do + { + r = ioctl(fileDescriptor, request, arg); + } + while (r < 0 && errno == EINTR ); + + return r; +} + +void V4L2Grabber::setDevice(const QString& devicePath, const QString& deviceName) +{ + if (_currentDevicePath != devicePath || _currentDeviceName != deviceName) + { + _currentDevicePath = devicePath; + _currentDeviceName = deviceName; + _reload = true; + } +} + +bool V4L2Grabber::setInput(int input) +{ + if(Grabber::setInput(input)) + { + _reload = true; + return true; + } + + return false; +} + +bool V4L2Grabber::setWidthHeight(int width, int height) +{ + if(Grabber::setWidthHeight(width, height)) + { + _reload = true; + return true; + } + + return false; +} + +void V4L2Grabber::setEncoding(QString enc) +{ + if(_pixelFormatConfig != parsePixelFormat(enc)) + { + _pixelFormatConfig = parsePixelFormat(enc); + if(_initialized) + { + Debug(_log,"Set hardware encoding to: %s", QSTRING_CSTR(enc.toUpper())); + _reload = true; + } + else + _pixelFormat = _pixelFormatConfig; + } +} + +void V4L2Grabber::setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue) +{ + if (_brightness != brightness || _contrast != contrast || _saturation != saturation || _hue != hue) + { + _brightness = brightness; + _contrast = contrast; + _saturation = saturation; + _hue = hue; + + _reload = true; + } +} + +void V4L2Grabber::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold) +{ + _noSignalThresholdColor.red = uint8_t(255*redSignalThreshold); + _noSignalThresholdColor.green = uint8_t(255*greenSignalThreshold); + _noSignalThresholdColor.blue = uint8_t(255*blueSignalThreshold); + _noSignalCounterThreshold = qMax(1, noSignalCounterThreshold); + + if(_signalDetectionEnabled) + Info(_log, "Signal threshold set to: {%d, %d, %d}", _noSignalThresholdColor.red, _noSignalThresholdColor.green, _noSignalThresholdColor.blue ); +} + +void V4L2Grabber::setSignalDetectionOffset(double horizontalMin, double verticalMin, double horizontalMax, double verticalMax) +{ + // rainbow 16 stripes 0.47 0.2 0.49 0.8 + // unicolor: 0.25 0.25 0.75 0.75 + + _x_frac_min = horizontalMin; + _y_frac_min = verticalMin; + _x_frac_max = horizontalMax; + _y_frac_max = verticalMax; + + if(_signalDetectionEnabled) + Info(_log, "Signal detection area set to: %f,%f x %f,%f", _x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max ); +} + +void V4L2Grabber::setSignalDetectionEnable(bool enable) +{ + if (_signalDetectionEnabled != enable) + { + _signalDetectionEnabled = enable; + if(_initialized) + Info(_log, "Signal detection is now %s", enable ? "enabled" : "disabled"); + } +} + +void V4L2Grabber::setCecDetectionEnable(bool enable) +{ + if (_cecDetectionEnabled != enable) + { + _cecDetectionEnabled = enable; + if(_initialized) + Info(_log, QString("CEC detection is now %1").arg(enable ? "enabled" : "disabled").toLocal8Bit()); + } +} + +bool V4L2Grabber::reload(bool force) +{ + if (_reload || force) + { + if (_streamNotifier != nullptr && _streamNotifier->isEnabled()) + { + Info(_log,"Reloading V4L2 Grabber"); + uninit(); + _pixelFormat = _pixelFormatConfig; + } + + _reload = false; + return prepare() && start(); + } + + return false; +} + +#if defined(ENABLE_CEC) + +void V4L2Grabber::handleCecEvent(CECEvent event) +{ + switch (event) + { + case CECEvent::On : + Debug(_log,"CEC on event received"); + _cecStandbyActivated = false; + return; + case CECEvent::Off : + Debug(_log,"CEC off event received"); + _cecStandbyActivated = true; + return; + default: break; + } +} + +#endif + +QJsonArray V4L2Grabber::discover(const QJsonObject& params) +{ + DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + enumVideoCaptureDevices(); + + QJsonArray inputsDiscovered; + for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it) + { + QJsonObject device, in; + QJsonArray video_inputs, formats; + + QMultiMap inputs = QMultiMap(); + for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i) + if (i.key() == it.key()) + for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++) + if (!inputs.contains(y.value().inputName, y.key())) + inputs.insert(y.value().inputName, y.key()); + + if (!inputs.isEmpty()) + { + device["device"] = it.key(); + device["device_name"] = _deviceProperties.value(it.key()).name; + device["type"] = "v4l2"; + + Debug( _log, "inputs size [%d], isEmpty [%d]", inputs.size(), inputs.isEmpty()); + + for (auto input = inputs.begin(); input != inputs.end(); input++) + { + in["name"] = input.key(); + in["inputIdx"] = input.value(); + + QJsonArray standards; + QList videoStandards = QList(); + for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i) + if (i.key() == it.key()) + for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++) + if (y.key() == input.value()) + for (auto std = y.value().standards.begin(); std != y.value().standards.end(); std++) + if(!videoStandards.contains(*std)) + videoStandards << *std; + + for (auto standard : videoStandards) + standards.append(VideoStandard2String(standard)); + + if (!standards.isEmpty()) + in["standards"] = standards; + + QList encodingFormats = QList(); + for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i) + if (i.key() == it.key()) + for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++) + if (y.key() == input.value()) + for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++) + if (!encodingFormats.contains(enc.key())) + encodingFormats << enc.key(); + + for (auto encodingFormat : encodingFormats) + { + QJsonObject format; + QJsonArray resolutionArray; + + format["format"] = pixelFormatToString(encodingFormat); + + QMultiMap deviceResolutions = QMultiMap(); + for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i) + if (i.key() == it.key()) + for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++) + if (y.key() == input.value()) + for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++) + if (enc.key() == encodingFormat && !deviceResolutions.contains(enc.value().width, enc.value().height)) + deviceResolutions.insert(enc.value().width, enc.value().height); + + for (auto width_height = deviceResolutions.begin(); width_height != deviceResolutions.end(); width_height++) + { + QJsonObject resolution; + QJsonArray fps; + + resolution["width"] = int(width_height.key()); + resolution["height"] = int(width_height.value()); + + QIntList framerates = QIntList(); + for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i) + if (i.key() == it.key()) + for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++) + if (y.key() == input.value()) + for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++) + if(enc.key() == encodingFormat && enc.value().width == width_height.key() && enc.value().height == width_height.value()) + for (auto fps = enc.value().framerates.begin(); fps != enc.value().framerates.end(); fps++) + if(!framerates.contains(*fps)) + framerates << *fps; + + for (auto framerate : framerates) + fps.append(framerate); + + resolution["fps"] = fps; + resolutionArray.append(resolution); + } + + format["resolutions"] = resolutionArray; + formats.append(format); + } + in["formats"] = formats; + video_inputs.append(in); + + } + + device["video_inputs"] = video_inputs; + + QJsonObject controls, controls_default; + for (auto control : _deviceControls[it.key()]) + { + QJsonObject property; + property["minValue"] = control.minValue; + property["maxValue"] = control.maxValue; + property["step"] = control.step; + property["current"] = control.currentValue; + controls[control.property] = property; + controls_default[control.property] = control.defaultValue; + } + device["properties"] = controls; + + QJsonObject defaults, video_inputs_default, format_default, resolution_default; + resolution_default["width"] = 640; + resolution_default["height"] = 480; + resolution_default["fps"] = 25; + format_default["format"] = "yuyv"; + format_default["resolution"] = resolution_default; + video_inputs_default["inputIdx"] = 0; + video_inputs_default["standards"] = "PAL"; + video_inputs_default["formats"] = format_default; + + defaults["video_input"] = video_inputs_default; + defaults["properties"] = controls_default; + device["default"] = defaults; + + inputsDiscovered.append(device); + } + } + + _deviceProperties.clear(); + _deviceControls.clear(); + DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + return inputsDiscovered; +} + +void V4L2Grabber::enumVideoCaptureDevices() +{ + QDirIterator it("/sys/class/video4linux/", QDirIterator::NoIteratorFlags); + _deviceProperties.clear(); + _deviceControls.clear(); + + while(it.hasNext()) + { + //_v4lDevices + QString dev = it.next(); + if (it.fileName().startsWith("video")) + { + QString devName = "/dev/" + it.fileName(); + int fd = open(QSTRING_CSTR(devName), O_RDWR | O_NONBLOCK, 0); + + if (fd < 0) + { + throw_errno_exception("Cannot open '" + devName + "'"); + continue; + } + + struct v4l2_capability cap; + CLEAR(cap); + + if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) + { + throw_errno_exception("'" + devName + "' is no V4L2 device"); + close(fd); + continue; + } + + if (cap.device_caps & V4L2_CAP_META_CAPTURE) // this device has bit 23 set (and bit 1 reset), so it doesn't have capture. + { + close(fd); + continue; + } + + // get the current settings + struct v4l2_format fmt; + CLEAR(fmt); + + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0) + { + close(fd); + continue; + } + + V4L2Grabber::DeviceProperties properties; + + // collect available device inputs (index & name) + struct v4l2_input input; + CLEAR(input); + + input.index = 0; + while (xioctl(fd, VIDIOC_ENUMINPUT, &input) >= 0) + { + V4L2Grabber::DeviceProperties::InputProperties inputProperties; + inputProperties.inputName = QString((char*)input.name); + + // Enumerate video standards + struct v4l2_standard standard; + CLEAR(standard); + + standard.index = 0; + while (xioctl(fd, VIDIOC_ENUMSTD, &standard) >= 0) + { + if (standard.id & input.std) + { + if (standard.id == V4L2_STD_PAL) + inputProperties.standards.append(VideoStandard::PAL); + else if (standard.id == V4L2_STD_NTSC) + inputProperties.standards.append(VideoStandard::NTSC); + else if (standard.id == V4L2_STD_SECAM) + inputProperties.standards.append(VideoStandard::SECAM); + } + + standard.index++; + } + + // Enumerate pixel formats + struct v4l2_fmtdesc desc; + CLEAR(desc); + + desc.index = 0; + desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + while (xioctl(fd, VIDIOC_ENUM_FMT, &desc) == 0) + { + PixelFormat encodingFormat = GetPixelFormat(desc.pixelformat); + if (encodingFormat != PixelFormat::NO_CHANGE) + { + V4L2Grabber::DeviceProperties::InputProperties::EncodingProperties encodingProperties; + + // Enumerate frame sizes and frame rates + struct v4l2_frmsizeenum frmsizeenum; + CLEAR(frmsizeenum); + + frmsizeenum.index = 0; + frmsizeenum.pixel_format = desc.pixelformat; + while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) >= 0) + { + switch (frmsizeenum.type) + { + case V4L2_FRMSIZE_TYPE_DISCRETE: + { + encodingProperties.width = frmsizeenum.discrete.width; + encodingProperties.height = frmsizeenum.discrete.height; + enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, frmsizeenum.discrete.width, frmsizeenum.discrete.height); + } + break; + case V4L2_FRMSIZE_TYPE_CONTINUOUS: + case V4L2_FRMSIZE_TYPE_STEPWISE: // We do not take care of V4L2_FRMSIZE_TYPE_CONTINUOUS or V4L2_FRMSIZE_TYPE_STEPWISE + break; + } + + inputProperties.encodingFormats.insert(encodingFormat, encodingProperties); + frmsizeenum.index++; + } + + // Failsafe: In case VIDIOC_ENUM_FRAMESIZES fails, insert current heigth, width and fps. + if (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == -1) + { + encodingProperties.width = fmt.fmt.pix.width; + encodingProperties.height = fmt.fmt.pix.height; + enumFrameIntervals(encodingProperties.framerates, fd, desc.pixelformat, encodingProperties.width, encodingProperties.height); + inputProperties.encodingFormats.insert(encodingFormat, encodingProperties); + } + } + + desc.index++; + } + + properties.inputs.insert(input.index, inputProperties); + input.index++; + } + + // Enumerate video control IDs + QList deviceControlList; + for (auto it = _controlIDPropertyMap->begin(); it != _controlIDPropertyMap->end(); it++) + { + struct v4l2_queryctrl queryctrl; + CLEAR(queryctrl); + + queryctrl.id = it.key(); + if (xioctl(fd, VIDIOC_QUERYCTRL, &queryctrl) < 0) + break; + if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) + break; + + DeviceControls control; + control.property = it.value(); + control.minValue = queryctrl.minimum; + control.maxValue = queryctrl.maximum; + control.step = queryctrl.step; + control.defaultValue = queryctrl.default_value; + + struct v4l2_ext_control ctrl; + struct v4l2_ext_controls ctrls; + CLEAR(ctrl); + CLEAR(ctrls); + + ctrl.id = it.key(); + ctrls.count = 1; + ctrls.controls = &ctrl; + if (xioctl(fd, VIDIOC_G_EXT_CTRLS, &ctrls) == 0) + { + control.currentValue = ctrl.value; + DebugIf(verbose, _log, "%s: min=%i, max=%i, step=%i, default=%i, current=%i", QSTRING_CSTR(it.value()), control.minValue, control.maxValue, control.step, control.defaultValue, control.currentValue); + } + else + break; + + deviceControlList.append(control); + } + + if (!deviceControlList.isEmpty()) + _deviceControls.insert("/dev/"+it.fileName(), deviceControlList); + + if (close(fd) < 0) continue; + + QFile devNameFile(dev+"/name"); + if (devNameFile.exists()) + { + devNameFile.open(QFile::ReadOnly); + devName = devNameFile.readLine(); + devName = devName.trimmed(); + properties.name = devName; + devNameFile.close(); + } + + _deviceProperties.insert("/dev/"+it.fileName(), properties); + } + } +} + +void V4L2Grabber::enumFrameIntervals(QList &framerates, int fileDescriptor, int pixelformat, int width, int height) +{ + // collect available frame rates + struct v4l2_frmivalenum frmivalenum; + CLEAR(frmivalenum); + + frmivalenum.index = 0; + frmivalenum.pixel_format = pixelformat; + frmivalenum.width = width; + frmivalenum.height = height; + + while (xioctl(fileDescriptor, VIDIOC_ENUM_FRAMEINTERVALS, &frmivalenum) >= 0) + { + int rate; + switch (frmivalenum.type) + { + case V4L2_FRMSIZE_TYPE_DISCRETE: + { + if (frmivalenum.discrete.numerator != 0) + { + rate = frmivalenum.discrete.denominator / frmivalenum.discrete.numerator; + if (!framerates.contains(rate)) + framerates.append(rate); + } + } + break; + case V4L2_FRMSIZE_TYPE_CONTINUOUS: + case V4L2_FRMSIZE_TYPE_STEPWISE: + { + if (frmivalenum.stepwise.min.denominator != 0) + { + rate = frmivalenum.stepwise.min.denominator / frmivalenum.stepwise.min.numerator; + if (!framerates.contains(rate)) + framerates.append(rate); + } + } + } + frmivalenum.index++; + } + + // If VIDIOC_ENUM_FRAMEINTERVALS fails, try to read the current fps via VIDIOC_G_PARM if possible and insert it into 'framerates'. + if (xioctl(fileDescriptor, VIDIOC_ENUM_FRAMESIZES, &frmivalenum) == -1) + { + struct v4l2_streamparm streamparms; + CLEAR(streamparms); + streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (xioctl(fileDescriptor, VIDIOC_G_PARM, &streamparms) >= 0) + framerates.append(streamparms.parm.capture.timeperframe.denominator / streamparms.parm.capture.timeperframe.numerator); + } +} diff --git a/libsrc/grabber/x11/X11Grabber.cpp b/libsrc/grabber/x11/X11Grabber.cpp index 645e30e9..ffa87f9b 100644 --- a/libsrc/grabber/x11/X11Grabber.cpp +++ b/libsrc/grabber/x11/X11Grabber.cpp @@ -4,21 +4,33 @@ #include #include -X11Grabber::X11Grabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation) - : Grabber("X11GRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom) +// Constants +namespace { + const bool verbose = false; +} //End of constants + +X11Grabber::X11Grabber(int cropLeft, int cropRight, int cropTop, int cropBottom) + : Grabber("X11GRABBER", cropLeft, cropRight, cropTop, cropBottom) , _x11Display(nullptr) + , _xImage(nullptr) , _pixmap(None) , _srcFormat(nullptr) , _dstFormat(nullptr) , _srcPicture(None) , _dstPicture(None) - , _pixelDecimation(pixelDecimation) - , _screenWidth(0) - , _screenHeight(0) + , _calculatedWidth(0) + , _calculatedHeight(0) , _src_x(cropLeft) , _src_y(cropTop) + , _XShmAvailable(false) + , _XRenderAvailable(false) + , _XRandRAvailable(false) + , _isWayland (false) + , _logger{} , _image(0,0) { + _logger = Logger::getInstance("X11"); + _useImageResampler = false; _imageResampler.setCropping(0, 0, 0, 0); // cropping is performed by XRender, XShmGetImage or XGetImage memset(&_pictAttr, 0, sizeof(_pictAttr)); @@ -37,7 +49,10 @@ X11Grabber::~X11Grabber() void X11Grabber::freeResources() { // Cleanup allocated resources of the X11 grab - XDestroyImage(_xImage); + if (_xImage != nullptr) + { + XDestroyImage(_xImage); + } if (_XRandRAvailable) { qApp->removeNativeEventFilter(this); @@ -65,7 +80,7 @@ void X11Grabber::setupResources() if(_XShmAvailable) { - _xImage = XShmCreateImage(_x11Display, _windowAttr.visual, _windowAttr.depth, ZPixmap, NULL, &_shminfo, _width, _height); + _xImage = XShmCreateImage(_x11Display, _windowAttr.visual, _windowAttr.depth, ZPixmap, NULL, &_shminfo, _calculatedWidth, _calculatedHeight); _shminfo.shmid = shmget(IPC_PRIVATE, (size_t) _xImage->bytes_per_line * _xImage->height, IPC_CREAT|0777); _xImage->data = (char*)shmat(_shminfo.shmid,0,0); _shminfo.shmaddr = _xImage->data; @@ -75,17 +90,17 @@ void X11Grabber::setupResources() if (_XRenderAvailable) { - _useImageResampler = false; + _useImageResampler = false; _imageResampler.setHorizontalPixelDecimation(1); _imageResampler.setVerticalPixelDecimation(1); if(_XShmPixmapAvailable) { - _pixmap = XShmCreatePixmap(_x11Display, _window, _xImage->data, &_shminfo, _width, _height, _windowAttr.depth); + _pixmap = XShmCreatePixmap(_x11Display, _window, _xImage->data, &_shminfo, _calculatedWidth, _calculatedHeight, _windowAttr.depth); } else { - _pixmap = XCreatePixmap(_x11Display, _window, _width, _height, _windowAttr.depth); + _pixmap = XCreatePixmap(_x11Display, _window, _calculatedWidth, _calculatedHeight, _windowAttr.depth); } _srcFormat = XRenderFindVisualFormat(_x11Display, _windowAttr.visual); _dstFormat = XRenderFindVisualFormat(_x11Display, _windowAttr.visual); @@ -96,49 +111,82 @@ void X11Grabber::setupResources() } else { - _useImageResampler = true; + _useImageResampler = true; _imageResampler.setHorizontalPixelDecimation(_pixelDecimation); _imageResampler.setVerticalPixelDecimation(_pixelDecimation); } - } -bool X11Grabber::Setup() +bool X11Grabber::open() { - _x11Display = XOpenDisplay(NULL); - if (_x11Display == nullptr) + bool rc = false; + + if (getenv("WAYLAND_DISPLAY") != nullptr) { - Error(_log, "Unable to open display"); - if (getenv("DISPLAY")) + _isWayland = true; + } + else + { + _x11Display = XOpenDisplay(nullptr); + if (_x11Display != nullptr) { - Error(_log, "%s",getenv("DISPLAY")); + rc = true; + } + } + return rc; +} + +bool X11Grabber::setupDisplay() +{ + bool result = false; + + if ( ! open() ) + { + if ( _isWayland ) + { + Error(_log, "Grabber does not work under Wayland!"); } else { - Error(_log, "DISPLAY environment variable not set"); + if (getenv("DISPLAY") != nullptr) + { + Error(_log, "Unable to open display [%s]",getenv("DISPLAY")); + } + else + { + Error(_log, "DISPLAY environment variable not set"); + } } - return false; } + else + { + _window = DefaultRootWindow(_x11Display); - _window = DefaultRootWindow(_x11Display); + int dummy, pixmaps_supported; - int dummy, pixmaps_supported; + _XRandRAvailable = XRRQueryExtension(_x11Display, &_XRandREventBase, &dummy); + _XRenderAvailable = XRenderQueryExtension(_x11Display, &dummy, &dummy); + _XShmAvailable = XShmQueryExtension(_x11Display); + XShmQueryVersion(_x11Display, &dummy, &dummy, &pixmaps_supported); + _XShmPixmapAvailable = pixmaps_supported && XShmPixmapFormat(_x11Display) == ZPixmap; - _XRandRAvailable = XRRQueryExtension(_x11Display, &_XRandREventBase, &dummy); - _XRenderAvailable = XRenderQueryExtension(_x11Display, &dummy, &dummy); - _XShmAvailable = XShmQueryExtension(_x11Display); - XShmQueryVersion(_x11Display, &dummy, &dummy, &pixmaps_supported); - _XShmPixmapAvailable = pixmaps_supported && XShmPixmapFormat(_x11Display) == ZPixmap; + Info(_log, QString("XRandR=[%1] XRender=[%2] XShm=[%3] XPixmap=[%4]") + .arg(_XRandRAvailable ? "available" : "unavailable") + .arg(_XRenderAvailable ? "available" : "unavailable") + .arg(_XShmAvailable ? "available" : "unavailable") + .arg(_XShmPixmapAvailable ? "available" : "unavailable") + .toStdString().c_str()); - bool result = (updateScreenDimensions(true) >=0); - ErrorIf(!result, _log, "X11 Grabber start failed"); - setEnabled(result); + result = (updateScreenDimensions(true) >=0); + ErrorIf(!result, _log, "X11 Grabber start failed"); + setEnabled(result); + } return result; } int X11Grabber::grabFrame(Image & image, bool forceUpdate) { - if (!_enabled) return 0; + if (!_isEnabled) return 0; if (forceUpdate) updateScreenDimensions(forceUpdate); @@ -176,7 +224,7 @@ int X11Grabber::grabFrame(Image & image, bool forceUpdate) // src_y = cropTop, mask_x, mask_y, dest_x, dest_y, width, height XRenderComposite( _x11Display, PictOpSrc, _srcPicture, None, _dstPicture, ( _src_x/_pixelDecimation), - (_src_y/_pixelDecimation), 0, 0, 0, 0, _width, _height); + (_src_y/_pixelDecimation), 0, 0, 0, 0, _calculatedWidth, _calculatedHeight); XSync(_x11Display, False); @@ -186,7 +234,7 @@ int X11Grabber::grabFrame(Image & image, bool forceUpdate) } else { - _xImage = XGetImage(_x11Display, _pixmap, 0, 0, _width, _height, AllPlanes, ZPixmap); + _xImage = XGetImage(_x11Display, _pixmap, 0, 0, _calculatedWidth, _calculatedHeight, AllPlanes, ZPixmap); } } else if (_XShmAvailable) @@ -197,7 +245,7 @@ int X11Grabber::grabFrame(Image & image, bool forceUpdate) else { // all things done by xgetimage - _xImage = XGetImage(_x11Display, _window, _src_x, _src_y, _width, _height, AllPlanes, ZPixmap); + _xImage = XGetImage(_x11Display, _window, _src_x, _src_y, _calculatedWidth, _calculatedHeight, AllPlanes, ZPixmap); } if (_xImage == nullptr) @@ -220,45 +268,46 @@ int X11Grabber::updateScreenDimensions(bool force) return -1; } - if (!force && _screenWidth == unsigned(_windowAttr.width) && _screenHeight == unsigned(_windowAttr.height)) + if (!force && _width == _windowAttr.width && _height == _windowAttr.height) { // No update required return 0; } - if (_screenWidth || _screenHeight) + if (_width || _height) { freeResources(); } - Info(_log, "Update of screen resolution: [%dx%d] to [%dx%d]", _screenWidth, _screenHeight, _windowAttr.width, _windowAttr.height); - _screenWidth = _windowAttr.width; - _screenHeight = _windowAttr.height; + Info(_log, "Update of screen resolution: [%dx%d] to [%dx%d]", _width, _height, _windowAttr.width, _windowAttr.height); + _width = _windowAttr.width; + _height = _windowAttr.height; - int width=0, height=0; + int width=0; + int height=0; // Image scaling is performed by XRender when available, otherwise by ImageResampler if (_XRenderAvailable) { - width = (_screenWidth > unsigned(_cropLeft + _cropRight)) - ? ((_screenWidth - _cropLeft - _cropRight) / _pixelDecimation) - : _screenWidth / _pixelDecimation; + width = (_width > (_cropLeft + _cropRight)) + ? ((_width - _cropLeft - _cropRight) / _pixelDecimation) + : _width / _pixelDecimation; - height = (_screenHeight > unsigned(_cropTop + _cropBottom)) - ? ((_screenHeight - _cropTop - _cropBottom) / _pixelDecimation) - : _screenHeight / _pixelDecimation; + height = (_height > (_cropTop + _cropBottom)) + ? ((_height - _cropTop - _cropBottom) / _pixelDecimation) + : _height / _pixelDecimation; Info(_log, "Using XRender for grabbing"); } else { - width = (_screenWidth > unsigned(_cropLeft + _cropRight)) - ? (_screenWidth - _cropLeft - _cropRight) - : _screenWidth; + width = (_width > (_cropLeft + _cropRight)) + ? (_width - _cropLeft - _cropRight) + : _width; - height = (_screenHeight > unsigned(_cropTop + _cropBottom)) - ? (_screenHeight - _cropTop - _cropBottom) - : _screenHeight; + height = (_height > (_cropTop + _cropBottom)) + ? (_height - _cropTop - _cropBottom) + : _height; Info(_log, "Using XGetImage for grabbing"); } @@ -267,29 +316,29 @@ int X11Grabber::updateScreenDimensions(bool force) switch (_videoMode) { case VideoMode::VIDEO_3DSBS: - _width = width /2; - _height = height; + _calculatedWidth = width /2; + _calculatedHeight = height; _src_x = _cropLeft / 2; _src_y = _cropTop; break; case VideoMode::VIDEO_3DTAB: - _width = width; - _height = height / 2; + _calculatedWidth = width; + _calculatedHeight = height / 2; _src_x = _cropLeft; _src_y = _cropTop / 2; break; case VideoMode::VIDEO_2D: default: - _width = width; - _height = height; + _calculatedWidth = width; + _calculatedHeight = height; _src_x = _cropLeft; _src_y = _cropTop; break; } - Info(_log, "Update output image resolution: [%dx%d] to [%dx%d]", _image.width(), _image.height(), _width, _height); + Info(_log, "Update output image resolution: [%dx%d] to [%dx%d]", _image.width(), _image.height(), _calculatedWidth, _calculatedHeight); - _image.resize(_width, _height); + _image.resize(_calculatedWidth, _calculatedHeight); setupResources(); return 1; @@ -298,22 +347,35 @@ int X11Grabber::updateScreenDimensions(bool force) void X11Grabber::setVideoMode(VideoMode mode) { Grabber::setVideoMode(mode); - updateScreenDimensions(true); -} - -void X11Grabber::setPixelDecimation(int pixelDecimation) -{ - if(_pixelDecimation != pixelDecimation) + if(_x11Display != nullptr) { - _pixelDecimation = pixelDecimation; updateScreenDimensions(true); } } -void X11Grabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) +bool X11Grabber::setPixelDecimation(int pixelDecimation) +{ + bool rc (true); + if (Grabber::setPixelDecimation(pixelDecimation)) + { + if(_x11Display != nullptr) + { + if ( updateScreenDimensions(true) < 0 ) + { + rc = false; + } + } + } + return rc; +} + +void X11Grabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) { Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom); - if(_x11Display != nullptr) updateScreenDimensions(true); // segfault on init + if(_x11Display != nullptr) + { + updateScreenDimensions(true); // segfault on init + } } bool X11Grabber::nativeEventFilter(const QByteArray & eventType, void * message, long int * /*result*/) @@ -332,3 +394,78 @@ bool X11Grabber::nativeEventFilter(const QByteArray & eventType, void * message, return false; } + +QJsonObject X11Grabber::discover(const QJsonObject& params) +{ + DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + QJsonObject inputsDiscovered; + if ( open() ) + { + inputsDiscovered["device"] = "x11"; + inputsDiscovered["device_name"] = "X11"; + inputsDiscovered["type"] = "screen"; + + QJsonArray video_inputs; + + if (_x11Display != nullptr) + { + QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 }; + + // Iterate through all X screens + for (int i = 0; i < XScreenCount(_x11Display); ++i) + { + _window = DefaultRootWindow(_x11Display); + + const Status status = XGetWindowAttributes(_x11Display, _window, &_windowAttr); + if (status == 0) + { + Debug(_log, "Failed to obtain window attributes"); + } + else + { + QJsonObject in; + + QString displayName; + char* name; + if ( XFetchName(_x11Display, _window, &name) > 0 ) + { + displayName = name; + } + else { + displayName = QString("Display:%1").arg(i); + } + + in["name"] = displayName; + in["inputIdx"] = i; + + QJsonArray formats; + QJsonArray resolutionArray; + QJsonObject format; + QJsonObject resolution; + + resolution["width"] = _windowAttr.width; + resolution["height"] = _windowAttr.height; + resolution["fps"] = fps; + + resolutionArray.append(resolution); + + format["resolutions"] = resolutionArray; + formats.append(format); + + in["formats"] = formats; + video_inputs.append(in); + } + } + + if ( !video_inputs.isEmpty() ) + { + inputsDiscovered["video_inputs"] = video_inputs; + } + } + } + DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + return inputsDiscovered; +} + diff --git a/libsrc/grabber/x11/X11Wrapper.cpp b/libsrc/grabber/x11/X11Wrapper.cpp index a02e3627..a453fc19 100644 --- a/libsrc/grabber/x11/X11Wrapper.cpp +++ b/libsrc/grabber/x11/X11Wrapper.cpp @@ -1,10 +1,14 @@ #include -X11Wrapper::X11Wrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, unsigned updateRate_Hz) - : GrabberWrapper("X11", &_grabber, 0, 0, updateRate_Hz) - , _grabber(cropLeft, cropRight, cropTop, cropBottom, pixelDecimation) - , _init(false) -{} +X11Wrapper::X11Wrapper( int updateRate_Hz, + int pixelDecimation, + int cropLeft, int cropRight, int cropTop, int cropBottom) + : GrabberWrapper("X11", &_grabber, updateRate_Hz) + , _grabber(cropLeft, cropRight, cropTop, cropBottom) + , _init(false) +{ + _grabber.setPixelDecimation(pixelDecimation); +} X11Wrapper::~X11Wrapper() { @@ -19,7 +23,7 @@ void X11Wrapper::action() if (! _init ) { _init = true; - if ( ! _grabber.Setup() ) + if ( ! _grabber.setupDisplay() ) { stop(); } diff --git a/libsrc/grabber/xcb/XcbCommandExecutor.h b/libsrc/grabber/xcb/XcbCommandExecutor.h index f87f4c70..b58a43fc 100644 --- a/libsrc/grabber/xcb/XcbCommandExecutor.h +++ b/libsrc/grabber/xcb/XcbCommandExecutor.h @@ -22,7 +22,7 @@ void check_error(xcb_generic_error_t * error) // Requests with void response type template typename std::enable_if::value, void>::type - query(xcb_connection_t * connection, Args&& ...args) + static query(xcb_connection_t * connection, Args&& ...args) { auto cookie = Request::RequestFunction(connection, std::forward(args)...); @@ -33,9 +33,8 @@ template // Requests with non-void response type template - typename std::enable_if::value, - std::unique_ptr>::type - query(xcb_connection_t * connection, Args&& ...args) + typename std::enable_if::value, std::unique_ptr>::type + static query(xcb_connection_t * connection, Args&& ...args) { auto cookie = Request::RequestFunction(connection, std::forward(args)...); diff --git a/libsrc/grabber/xcb/XcbCommands.h b/libsrc/grabber/xcb/XcbCommands.h index 973e87bf..6f42b7cf 100644 --- a/libsrc/grabber/xcb/XcbCommands.h +++ b/libsrc/grabber/xcb/XcbCommands.h @@ -21,6 +21,14 @@ struct GetGeometry static constexpr auto ReplyFunction = xcb_get_geometry_reply; }; +struct GetProperty +{ + typedef xcb_get_property_reply_t ResponseType; + + static constexpr auto RequestFunction = xcb_get_property; + static constexpr auto ReplyFunction = xcb_get_property_reply; +}; + struct ShmQueryVersion { typedef xcb_shm_query_version_reply_t ResponseType; diff --git a/libsrc/grabber/xcb/XcbGrabber.cpp b/libsrc/grabber/xcb/XcbGrabber.cpp index ffffc0c7..b824289e 100644 --- a/libsrc/grabber/xcb/XcbGrabber.cpp +++ b/libsrc/grabber/xcb/XcbGrabber.cpp @@ -14,10 +14,15 @@ #include +// Constants +namespace { + const bool verbose = false; +} //End of constants + #define DOUBLE_TO_FIXED(d) ((xcb_render_fixed_t) ((d) * 65536)) -XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation) - : Grabber("XCBGRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom) +XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom) + : Grabber("XCBGRABBER", cropLeft, cropRight, cropTop, cropBottom) , _connection{} , _screen{} , _pixmap{} @@ -27,7 +32,6 @@ XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, , _dstPicture{} , _transform{} , _shminfo{} - , _pixelDecimation(pixelDecimation) , _screenWidth{} , _screenHeight{} , _src_x(cropLeft) @@ -36,6 +40,7 @@ XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, , _XcbRandRAvailable{} , _XcbShmAvailable{} , _XcbShmPixmapAvailable{} + , _isWayland (false) , _logger{} , _shmData{} , _XcbRandREventBase{-1} @@ -181,54 +186,83 @@ void XcbGrabber::setupShm() } } -bool XcbGrabber::Setup() +bool XcbGrabber::open() { - int screen_num; - _connection = xcb_connect(nullptr, &screen_num); + bool rc = false; - int ret = xcb_connection_has_error(_connection); - if (ret != 0) + if (getenv("WAYLAND_DISPLAY") != nullptr) { - Error(_logger, "Cannot open display, error %d", ret); - return false; + _isWayland = true; } - - const xcb_setup_t * setup = xcb_get_setup(_connection); - _screen = getScreen(setup, screen_num); - - if (!_screen) + else { - Error(_log, "Unable to open display, screen %d does not exist", screen_num); + _connection = xcb_connect(nullptr, &_screen_num); - if (getenv("DISPLAY")) - Error(_log, "%s", getenv("DISPLAY")); + int ret = xcb_connection_has_error(_connection); + if (ret != 0) + { + Debug(_logger, "Cannot open display, error %d", ret); + } else - Error(_log, "DISPLAY environment variable not set"); - - freeResources(); - return false; + { + const xcb_setup_t * setup = xcb_get_setup(_connection); + _screen = getScreen(setup, _screen_num); + if ( _screen != nullptr) + { + rc = true; + } + } } - setupRandr(); - setupRender(); - setupShm(); + return rc; +} - Info(_log, QString("XcbRandR=[%1] XcbRender=[%2] XcbShm=[%3] XcbPixmap=[%4]") - .arg(_XcbRandRAvailable ? "available" : "unavailable") - .arg(_XcbRenderAvailable ? "available" : "unavailable") - .arg(_XcbShmAvailable ? "available" : "unavailable") - .arg(_XcbShmPixmapAvailable ? "available" : "unavailable") - .toStdString().c_str()); +bool XcbGrabber::setupDisplay() +{ + bool result = false; - bool result = (updateScreenDimensions(true) >= 0); - ErrorIf(!result, _log, "XCB Grabber start failed"); - setEnabled(result); + if ( ! open() ) + { + if ( _isWayland ) + { + Error(_log, "Grabber does not work under Wayland!"); + } + else + { + if (getenv("DISPLAY") != nullptr) + { + Error(_log, "Unable to open display [%s], screen %d does not exist", getenv("DISPLAY"), _screen_num); + } + else + { + Error(_log, "DISPLAY environment variable not set"); + } + freeResources(); + } + } + else + { + setupRandr(); + setupRender(); + setupShm(); + + Info(_log, QString("XcbRandR=[%1] XcbRender=[%2] XcbShm=[%3] XcbPixmap=[%4]") + .arg(_XcbRandRAvailable ? "available" : "unavailable") + .arg(_XcbRenderAvailable ? "available" : "unavailable") + .arg(_XcbShmAvailable ? "available" : "unavailable") + .arg(_XcbShmPixmapAvailable ? "available" : "unavailable") + .toStdString().c_str()); + + result = (updateScreenDimensions(true) >= 0); + ErrorIf(!result, _log, "XCB Grabber start failed"); + setEnabled(result); + } return result; } int XcbGrabber::grabFrame(Image & image, bool forceUpdate) { - if (!_enabled) + if (!_isEnabled) return 0; if (forceUpdate) @@ -316,7 +350,7 @@ int XcbGrabber::updateScreenDimensions(bool force) return -1; } - if (!_enabled) + if (!_isEnabled) setEnabled(true); if (!force && _screenWidth == unsigned(geometry->width) && @@ -391,19 +425,29 @@ int XcbGrabber::updateScreenDimensions(bool force) void XcbGrabber::setVideoMode(VideoMode mode) { Grabber::setVideoMode(mode); - updateScreenDimensions(true); -} - -void XcbGrabber::setPixelDecimation(int pixelDecimation) -{ - if(_pixelDecimation != pixelDecimation) + if(_connection != nullptr) { - _pixelDecimation = pixelDecimation; updateScreenDimensions(true); } } -void XcbGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) +bool XcbGrabber::setPixelDecimation(int pixelDecimation) +{ + bool rc (true); + if (Grabber::setPixelDecimation(pixelDecimation)) + { + if(_connection != nullptr) + { + if ( updateScreenDimensions(true) < 0 ) + { + rc = false; + } + } + } + return rc; +} + +void XcbGrabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) { Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom); if(_connection != nullptr) @@ -459,3 +503,89 @@ xcb_render_pictformat_t XcbGrabber::findFormatForVisual(xcb_visualid_t visual) c } return {}; } + +QJsonObject XcbGrabber::discover(const QJsonObject& params) +{ + DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + QJsonObject inputsDiscovered; + if ( open() ) + { + inputsDiscovered["device"] = "xcb"; + inputsDiscovered["device_name"] = "XCB"; + inputsDiscovered["type"] = "screen"; + + QJsonArray video_inputs; + + if (_connection != nullptr && _screen != nullptr ) + { + QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 }; + + const xcb_setup_t * setup = xcb_get_setup(_connection); + + xcb_screen_iterator_t it = xcb_setup_roots_iterator(setup); + xcb_screen_t * screen = nullptr; + + int i = 0; + // Iterate through all X screens + for (; it.rem > 0; xcb_screen_next(&it)) + { + screen = it.data; + + auto geometry = query(_connection, screen->root); + if (geometry == nullptr) + { + Debug(_log, "Failed to obtain screen geometry for screen [%d]", i); + } + else + { + QJsonObject in; + + QString displayName; + auto property = query(_connection, 0, screen->root, XCB_ATOM_WM_NAME, XCB_ATOM_STRING, 0, 0); + if ( property != nullptr ) + { + if ( xcb_get_property_value_length(property.get()) > 0 ) + { + displayName = (char *) xcb_get_property_value(property.get()); + } + } + + if (displayName.isEmpty()) + { + displayName = QString("Display:%1").arg(i); + } + + in["name"] = displayName; + in["inputIdx"] = i; + + QJsonArray formats; + QJsonArray resolutionArray; + QJsonObject format; + QJsonObject resolution; + + resolution["width"] = geometry->width; + resolution["height"] = geometry->height; + resolution["fps"] = fps; + + resolutionArray.append(resolution); + + format["resolutions"] = resolutionArray; + formats.append(format); + + in["formats"] = formats; + video_inputs.append(in); + } + ++i; + } + + if ( !video_inputs.isEmpty() ) + { + inputsDiscovered["video_inputs"] = video_inputs; + } + } + } + DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData()); + + return inputsDiscovered; +} diff --git a/libsrc/grabber/xcb/XcbWrapper.cpp b/libsrc/grabber/xcb/XcbWrapper.cpp index 699b5bd4..339cb4e8 100644 --- a/libsrc/grabber/xcb/XcbWrapper.cpp +++ b/libsrc/grabber/xcb/XcbWrapper.cpp @@ -1,10 +1,14 @@ #include -XcbWrapper::XcbWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, const unsigned updateRate_Hz) - : GrabberWrapper("Xcb", &_grabber, 0, 0, updateRate_Hz) - , _grabber(cropLeft, cropRight, cropTop, cropBottom, pixelDecimation) +XcbWrapper::XcbWrapper( int updateRate_Hz, + int pixelDecimation, + int cropLeft, int cropRight, int cropTop, int cropBottom) + : GrabberWrapper("Xcb", &_grabber, updateRate_Hz) + , _grabber(cropLeft, cropRight, cropTop, cropBottom) , _init(false) -{} +{ + _grabber.setPixelDecimation(pixelDecimation); +} XcbWrapper::~XcbWrapper() { @@ -19,7 +23,7 @@ void XcbWrapper::action() if (! _init ) { _init = true; - if ( ! _grabber.Setup() ) + if ( ! _grabber.setupDisplay() ) { stop(); } diff --git a/libsrc/hyperion/CaptureCont.cpp b/libsrc/hyperion/CaptureCont.cpp index 7389f37d..3cb566ed 100644 --- a/libsrc/hyperion/CaptureCont.cpp +++ b/libsrc/hyperion/CaptureCont.cpp @@ -47,6 +47,7 @@ void CaptureCont::handleV4lImage(const QString& name, const Image & im { _hyperion->registerInput(_v4lCaptPrio, hyperion::COMP_V4L, "System", name); _v4lCaptName = name; + emit GlobalSignals::getInstance()->requestSource(hyperion::COMP_V4L, int(_hyperion->getInstanceIndex()), _v4lCaptEnabled); } _v4lInactiveTimer->start(); _hyperion->setInputImage(_v4lCaptPrio, image); @@ -58,6 +59,7 @@ void CaptureCont::handleSystemImage(const QString& name, const Image& { _hyperion->registerInput(_systemCaptPrio, hyperion::COMP_GRABBER, "System", name); _systemCaptName = name; + emit GlobalSignals::getInstance()->requestSource(hyperion::COMP_GRABBER, int(_hyperion->getInstanceIndex()), _systemCaptEnabled); } _systemInactiveTimer->start(); _hyperion->setInputImage(_systemCaptPrio, image); @@ -75,7 +77,7 @@ void CaptureCont::setSystemCaptureEnable(bool enable) } else { - disconnect(GlobalSignals::getInstance(), &GlobalSignals::setSystemImage, 0, 0); + disconnect(GlobalSignals::getInstance(), &GlobalSignals::setSystemImage, this, 0); _hyperion->clear(_systemCaptPrio); _systemInactiveTimer->stop(); _systemCaptName = ""; @@ -98,7 +100,7 @@ void CaptureCont::setV4LCaptureEnable(bool enable) } else { - disconnect(GlobalSignals::getInstance(), &GlobalSignals::setV4lImage, 0, 0); + disconnect(GlobalSignals::getInstance(), &GlobalSignals::setV4lImage, this, 0); _hyperion->clear(_v4lCaptPrio); _v4lInactiveTimer->stop(); _v4lCaptName = ""; @@ -125,8 +127,8 @@ void CaptureCont::handleSettingsUpdate(settings::type type, const QJsonDocument& _systemCaptPrio = obj["systemPriority"].toInt(250); } - setV4LCaptureEnable(obj["v4lEnable"].toBool(true)); - setSystemCaptureEnable(obj["systemEnable"].toBool(true)); + setV4LCaptureEnable(obj["v4lEnable"].toBool(false)); + setSystemCaptureEnable(obj["systemEnable"].toBool(false)); } } diff --git a/libsrc/hyperion/Grabber.cpp b/libsrc/hyperion/Grabber.cpp index 85633ff1..376da7ff 100644 --- a/libsrc/hyperion/Grabber.cpp +++ b/libsrc/hyperion/Grabber.cpp @@ -1,33 +1,46 @@ #include +#include -Grabber::Grabber(const QString& grabberName, int width, int height, int cropLeft, int cropRight, int cropTop, int cropBottom) - : _imageResampler() +Grabber::Grabber(const QString& grabberName, int cropLeft, int cropRight, int cropTop, int cropBottom) + : _grabberName(grabberName) + , _log(Logger::getInstance(_grabberName.toUpper())) , _useImageResampler(true) , _videoMode(VideoMode::VIDEO_2D) - , _width(width) - , _height(height) - , _fps(15) + , _videoStandard(VideoStandard::NO_CHANGE) + , _pixelDecimation(GrabberWrapper::DEFAULT_PIXELDECIMATION) + , _flipMode(FlipMode::NO_CHANGE) + , _width(0) + , _height(0) + , _fps(GrabberWrapper::DEFAULT_RATE_HZ) + , _fpsSoftwareDecimation(0) , _input(-1) , _cropLeft(0) , _cropRight(0) , _cropTop(0) , _cropBottom(0) - , _enabled(true) - , _log(Logger::getInstance(grabberName.toUpper())) + , _isEnabled(true) + , _isDeviceInError(false) { - Grabber::setVideoMode(VideoMode::VIDEO_2D); Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom); } void Grabber::setEnabled(bool enable) { Info(_log,"Capture interface is now %s", enable ? "enabled" : "disabled"); - _enabled = enable; + _isEnabled = enable; +} + +void Grabber::setInError(const QString& errorMsg) +{ + _isDeviceInError = true; + _isEnabled = false; + + Error(_log, "Grabber disabled, device '%s' signals error: '%s'", QSTRING_CSTR(_grabberName), QSTRING_CSTR(errorMsg)); } void Grabber::setVideoMode(VideoMode mode) { - Debug(_log,"Set videomode to %d", mode); + Info(_log,"Set videomode to %s", QSTRING_CSTR(videoMode2String(mode))); _videoMode = mode; if ( _useImageResampler ) { @@ -35,11 +48,46 @@ void Grabber::setVideoMode(VideoMode mode) } } -void Grabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) +void Grabber::setVideoStandard(VideoStandard videoStandard) +{ + if (_videoStandard != videoStandard) { + _videoStandard = videoStandard; + } +} + +bool Grabber::setPixelDecimation(int pixelDecimation) +{ + if (_pixelDecimation != pixelDecimation) + { + Info(_log,"Set image size decimation to %d", pixelDecimation); + _pixelDecimation = pixelDecimation; + if ( _useImageResampler ) + { + _imageResampler.setHorizontalPixelDecimation(pixelDecimation); + _imageResampler.setVerticalPixelDecimation(pixelDecimation); + } + + return true; + } + + return false; +} + +void Grabber::setFlipMode(FlipMode mode) +{ + Info(_log,"Set flipmode to %s", QSTRING_CSTR(flipModeToString(mode))); + _flipMode = mode; + if ( _useImageResampler ) + { + _imageResampler.setFlipMode(_flipMode); + } +} + +void Grabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) { if (_width>0 && _height>0) { - if (cropLeft + cropRight >= (unsigned)_width || cropTop + cropBottom >= (unsigned)_height) + if (cropLeft + cropRight >= _width || cropTop + cropBottom >= _height) { Error(_log, "Rejecting invalid crop values: left: %d, right: %d, top: %d, bottom: %d, higher than height/width %d/%d", cropLeft, cropRight, cropTop, cropBottom, _height, _width); return; @@ -79,29 +127,45 @@ bool Grabber::setInput(int input) bool Grabber::setWidthHeight(int width, int height) { + bool rc (false); // eval changes with crop if ( (width>0 && height>0) && (_width != width || _height != height) ) { if (_cropLeft + _cropRight >= width || _cropTop + _cropBottom >= height) { Error(_log, "Rejecting invalid width/height values as it collides with image cropping: width: %d, height: %d", width, height); - return false; + rc = false; + } + else + { + Debug(_log, "Set new width: %d, height: %d for capture", width, height); + _width = width; + _height = height; + rc = true; } - Debug(_log, "Set new width: %d, height: %d for capture", width, height); - _width = width; - _height = height; - return true; } - return false; + return rc; } bool Grabber::setFramerate(int fps) { if((fps > 0) && (_fps != fps)) { + Info(_log,"Set new frames per second to: %i fps", fps); _fps = fps; return true; } return false; } + +void Grabber::setFpsSoftwareDecimation(int decimation) +{ + if((_fpsSoftwareDecimation != decimation)) + { + _fpsSoftwareDecimation = decimation; + if(decimation > 0){ + Debug(_log,"Skip %i frame per second", decimation); + } + } +} diff --git a/libsrc/hyperion/GrabberWrapper.cpp b/libsrc/hyperion/GrabberWrapper.cpp index 51f04fe3..0235d807 100644 --- a/libsrc/hyperion/GrabberWrapper.cpp +++ b/libsrc/hyperion/GrabberWrapper.cpp @@ -10,22 +10,31 @@ #include GrabberWrapper* GrabberWrapper::instance = nullptr; +const int GrabberWrapper::DEFAULT_RATE_HZ = 10; +const int GrabberWrapper::DEFAULT_MIN_GRAB_RATE_HZ = 1; +const int GrabberWrapper::DEFAULT_MAX_GRAB_RATE_HZ = 30; +const int GrabberWrapper::DEFAULT_PIXELDECIMATION = 8; -GrabberWrapper::GrabberWrapper(const QString& grabberName, Grabber * ggrabber, unsigned width, unsigned height, unsigned updateRate_Hz) +/// Map of Hyperion instances with grabber name that requested screen capture +QMap GrabberWrapper::GRABBER_SYS_CLIENTS = QMap(); +QMap GrabberWrapper::GRABBER_V4L_CLIENTS = QMap(); +bool GrabberWrapper::GLOBAL_GRABBER_SYS_ENABLE = false; +bool GrabberWrapper::GLOBAL_GRABBER_V4L_ENABLE = false; + +GrabberWrapper::GrabberWrapper(const QString& grabberName, Grabber * ggrabber, int updateRate_Hz) : _grabberName(grabberName) - , _timer(new QTimer(this)) - , _updateInterval_ms(1000/updateRate_Hz) - , _log(Logger::getInstance(grabberName)) - , _ggrabber(ggrabber) - , _image(0,0) + , _log(Logger::getInstance(grabberName.toUpper())) + , _timer(new QTimer(this)) + , _updateInterval_ms(1000/updateRate_Hz) + , _ggrabber(ggrabber) + , _image(0,0) { GrabberWrapper::instance = this; // Configure the timer to generate events every n milliseconds + _timer->setTimerType(Qt::PreciseTimer); _timer->setInterval(_updateInterval_ms); - _image.resize(width, height); - connect(_timer, &QTimer::timeout, this, &GrabberWrapper::action); // connect the image forwarding @@ -44,17 +53,26 @@ GrabberWrapper::~GrabberWrapper() bool GrabberWrapper::start() { - // Start the timer with the pre configured interval - Debug(_log,"Grabber start()"); - _timer->start(); - return _timer->isActive(); + bool rc = false; + if ( open() ) + { + if (!_timer->isActive()) + { + // Start the timer with the pre configured interval + Debug(_log,"Grabber start()"); + _timer->start(); + } + + rc = _timer->isActive(); + } + return rc; } void GrabberWrapper::stop() { if (_timer->isActive()) { - // Stop the timer, effectivly stopping the process + // Stop the timer, effectively stopping the process Debug(_log,"Grabber stop()"); _timer->stop(); } @@ -65,50 +83,58 @@ bool GrabberWrapper::isActive() const return _timer->isActive(); } -QString GrabberWrapper::getActive() const +QStringList GrabberWrapper::getActive(int inst) const { - return _grabberName; + QStringList result = QStringList(); + + if(GRABBER_V4L_CLIENTS.contains(inst)) + result << GRABBER_V4L_CLIENTS.value(inst); + + if(GRABBER_SYS_CLIENTS.contains(inst)) + result << GRABBER_SYS_CLIENTS.value(inst); + + return result; } QStringList GrabberWrapper::availableGrabbers() { QStringList grabbers; - #ifdef ENABLE_DISPMANX +#ifdef ENABLE_DISPMANX grabbers << "dispmanx"; - #endif +#endif - #ifdef ENABLE_V4L2 +#if defined(ENABLE_V4L2) || defined(ENABLE_MF) grabbers << "v4l2"; - #endif +#endif - #ifdef ENABLE_FB +#ifdef ENABLE_FB grabbers << "framebuffer"; - #endif +#endif - #ifdef ENABLE_AMLOGIC +#ifdef ENABLE_AMLOGIC grabbers << "amlogic"; - #endif +#endif - #ifdef ENABLE_OSX +#ifdef ENABLE_OSX grabbers << "osx"; - #endif +#endif - #ifdef ENABLE_X11 +#ifdef ENABLE_X11 grabbers << "x11"; - #endif +#endif - #ifdef ENABLE_XCB +#ifdef ENABLE_XCB grabbers << "xcb"; - #endif +#endif - #ifdef ENABLE_QT +#ifdef ENABLE_QT grabbers << "qt"; - #endif +#endif - #ifdef ENABLE_DX - grabbers << "dx"; - #endif +#ifdef ENABLE_DX + grabbers << "dx"; +#endif return grabbers; } @@ -117,12 +143,17 @@ void GrabberWrapper::setVideoMode(VideoMode mode) { if (_ggrabber != nullptr) { - Info(_log,"setvideomode"); + Info(_log,"setVideoMode"); _ggrabber->setVideoMode(mode); } } -void GrabberWrapper::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) +void GrabberWrapper::setFlipMode(const QString& flipMode) +{ + _ggrabber->setFlipMode(parseFlipMode(flipMode)); +} + +void GrabberWrapper::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) { _ggrabber->setCropping(cropLeft, cropRight, cropTop, cropBottom); } @@ -143,33 +174,40 @@ void GrabberWrapper::updateTimer(int interval) } void GrabberWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config) -{ - if(type == settings::SYSTEMCAPTURE && !_grabberName.startsWith("V4L")) +{ if(type == settings::SYSTEMCAPTURE && !_grabberName.startsWith("V4L")) { // extract settings const QJsonObject& obj = config.object(); - // width/height - _ggrabber->setWidthHeight(obj["width"].toInt(96), obj["height"].toInt(96)); + // set global grabber state + setSysGrabberState(obj["enable"].toBool(false)); - // display index for MAC - _ggrabber->setDisplayIndex(obj["display"].toInt(0)); + if (getSysGrabberState()) + { + // width/height + _ggrabber->setWidthHeight(obj["width"].toInt(96), obj["height"].toInt(96)); - // device path for Framebuffer - _ggrabber->setDevicePath(obj["device"].toString("/dev/fb0")); + // display index for MAC + _ggrabber->setDisplayIndex(obj["input"].toInt(0)); - // pixel decimation for x11 - _ggrabber->setPixelDecimation(obj["pixelDecimation"].toInt(8)); + // pixel decimation for x11 + _ggrabber->setPixelDecimation(obj["pixelDecimation"].toInt(DEFAULT_PIXELDECIMATION)); - // crop for system capture - _ggrabber->setCropping( - obj["cropLeft"].toInt(0), - obj["cropRight"].toInt(0), - obj["cropTop"].toInt(0), - obj["cropBottom"].toInt(0)); + // crop for system capture + _ggrabber->setCropping( + obj["cropLeft"].toInt(0), + obj["cropRight"].toInt(0), + obj["cropTop"].toInt(0), + obj["cropBottom"].toInt(0)); - // eval new update time - updateTimer(1000/obj["frequency_Hz"].toInt(10)); + _ggrabber->setFramerate(obj["fps"].toInt(DEFAULT_RATE_HZ)); + // eval new update time + updateTimer(_ggrabber->getUpdateInterval()); + } + else + { + stop(); + } } } @@ -177,24 +215,24 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp { if(component == hyperion::Components::COMP_GRABBER && !_grabberName.startsWith("V4L")) { - if(listen && !GRABBER_SYS_CLIENTS.contains(hyperionInd)) - GRABBER_SYS_CLIENTS.append(hyperionInd); - else if (!listen) - GRABBER_SYS_CLIENTS.removeOne(hyperionInd); + if(listen) + GRABBER_SYS_CLIENTS.insert(hyperionInd, _grabberName); + else + GRABBER_SYS_CLIENTS.remove(hyperionInd); - if(GRABBER_SYS_CLIENTS.empty()) + if(GRABBER_SYS_CLIENTS.empty() || !getSysGrabberState()) stop(); else start(); } else if(component == hyperion::Components::COMP_V4L && _grabberName.startsWith("V4L")) { - if(listen && !GRABBER_V4L_CLIENTS.contains(hyperionInd)) - GRABBER_V4L_CLIENTS.append(hyperionInd); - else if (!listen) - GRABBER_V4L_CLIENTS.removeOne(hyperionInd); + if(listen) + GRABBER_V4L_CLIENTS.insert(hyperionInd, _grabberName); + else + GRABBER_V4L_CLIENTS.remove(hyperionInd); - if(GRABBER_V4L_CLIENTS.empty()) + if(GRABBER_V4L_CLIENTS.empty() || !getV4lGrabberState()) stop(); else start(); @@ -204,48 +242,6 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp void GrabberWrapper::tryStart() { // verify start condition - if((_grabberName.startsWith("V4L") && !GRABBER_V4L_CLIENTS.empty()) || (!_grabberName.startsWith("V4L") && !GRABBER_SYS_CLIENTS.empty())) - { + if(!_grabberName.startsWith("V4L") && !GRABBER_SYS_CLIENTS.empty() && getSysGrabberState()) start(); - } -} - -QStringList GrabberWrapper::getV4L2devices() const -{ - if(_grabberName.startsWith("V4L")) - return _ggrabber->getV4L2devices(); - - return QStringList(); -} - -QString GrabberWrapper::getV4L2deviceName(const QString& devicePath) const -{ - if(_grabberName.startsWith("V4L")) - return _ggrabber->getV4L2deviceName(devicePath); - - return QString(); -} - -QMultiMap GrabberWrapper::getV4L2deviceInputs(const QString& devicePath) const -{ - if(_grabberName.startsWith("V4L")) - return _ggrabber->getV4L2deviceInputs(devicePath); - - return QMultiMap(); -} - -QStringList GrabberWrapper::getResolutions(const QString& devicePath) const -{ - if(_grabberName.startsWith("V4L")) - return _ggrabber->getResolutions(devicePath); - - return QStringList(); -} - -QStringList GrabberWrapper::getFramerates(const QString& devicePath) const -{ - if(_grabberName.startsWith("V4L")) - return _ggrabber->getFramerates(devicePath); - - return QStringList(); } diff --git a/libsrc/hyperion/Hyperion.cpp b/libsrc/hyperion/Hyperion.cpp index aee85769..23ba6e82 100644 --- a/libsrc/hyperion/Hyperion.cpp +++ b/libsrc/hyperion/Hyperion.cpp @@ -56,7 +56,7 @@ Hyperion::Hyperion(quint8 instance, bool readonlyMode) , _hwLedCount() , _ledGridSize(hyperion::getLedLayoutGridSize(getSetting(settings::LEDS).array())) , _BGEffectHandler(nullptr) - ,_captureCont(nullptr) + , _captureCont(nullptr) , _ledBuffer(_ledString.leds().size(), ColorRgb::BLACK) , _boblightServer(nullptr) , _readOnlyMode(readonlyMode) diff --git a/libsrc/hyperion/HyperionIManager.cpp b/libsrc/hyperion/HyperionIManager.cpp index 307bd073..f1080156 100644 --- a/libsrc/hyperion/HyperionIManager.cpp +++ b/libsrc/hyperion/HyperionIManager.cpp @@ -88,7 +88,6 @@ bool HyperionIManager::startInstance(quint8 inst, bool block, QObject* caller, i // from Hyperion connect(hyperion, &Hyperion::settingsChanged, this, &HyperionIManager::settingsChanged); connect(hyperion, &Hyperion::videoMode, this, &HyperionIManager::requestVideoMode); - connect(hyperion, &Hyperion::compStateChangeRequest, this, &HyperionIManager::compStateChangeRequest); // to Hyperion connect(this, &HyperionIManager::newVideoMode, hyperion, &Hyperion::newVideoMode); diff --git a/libsrc/hyperion/PriorityMuxer.cpp b/libsrc/hyperion/PriorityMuxer.cpp index 01c04db2..3c277341 100644 --- a/libsrc/hyperion/PriorityMuxer.cpp +++ b/libsrc/hyperion/PriorityMuxer.cpp @@ -14,14 +14,17 @@ const int PriorityMuxer::FG_PRIORITY = 1; const int PriorityMuxer::BG_PRIORITY = 254; +const int PriorityMuxer::MANUAL_SELECTED_PRIORITY = 256; const int PriorityMuxer::LOWEST_PRIORITY = std::numeric_limits::max(); +const int PriorityMuxer::TIMEOUT_NOT_ACTIVE_PRIO = -100; PriorityMuxer::PriorityMuxer(int ledCount, QObject * parent) : QObject(parent) , _log(Logger::getInstance("HYPERION")) , _currentPriority(PriorityMuxer::LOWEST_PRIORITY) , _previousPriority(_currentPriority) - , _manualSelectedPriority(256) + , _manualSelectedPriority(MANUAL_SELECTED_PRIORITY) + , _prevVisComp (hyperion::Components::COMP_COLOR) , _activeInputs() , _lowestPriorityInfo() , _sourceAutoSelectEnabled(true) @@ -101,7 +104,7 @@ void PriorityMuxer::updateLedColorsLength(int ledCount) { for (auto infoIt = _activeInputs.begin(); infoIt != _activeInputs.end();) { - if (infoIt->ledColors.size() >= 1) + if (!infoIt->ledColors.empty()) { infoIt->ledColors.resize(ledCount, infoIt->ledColors.at(0)); } @@ -151,7 +154,7 @@ void PriorityMuxer::registerInput(int priority, hyperion::Components component, InputInfo& input = _activeInputs[priority]; input.priority = priority; - input.timeoutTime_ms = newInput ? -100 : input.timeoutTime_ms; + input.timeoutTime_ms = newInput ? TIMEOUT_NOT_ACTIVE_PRIO : input.timeoutTime_ms; input.componentId = component; input.origin = origin; input.smooth_cfg = smooth_cfg; @@ -162,7 +165,9 @@ void PriorityMuxer::registerInput(int priority, hyperion::Components component, Debug(_log,"Register new input '%s/%s' with priority %d as inactive", QSTRING_CSTR(origin), hyperion::componentToIdString(component), priority); // emit 'prioritiesChanged' only if _sourceAutoSelectEnabled is false if (!_sourceAutoSelectEnabled) + { emit prioritiesChanged(); + } return; } @@ -180,19 +185,26 @@ bool PriorityMuxer::setInput(int priority, const std::vector& ledColor return false; } - // calc final timeout - if(timeout_ms > 0) - timeout_ms = QDateTime::currentMSecsSinceEpoch() + timeout_ms; - - InputInfo& input = _activeInputs[priority]; + InputInfo& input = _activeInputs[priority]; // detect active <-> inactive changes bool activeChange = false; bool active = true; - if(input.timeoutTime_ms == -100 && timeout_ms != -100) + + // calculate final timeout + if (timeout_ms >= 0) + { + timeout_ms = QDateTime::currentMSecsSinceEpoch() + timeout_ms; + } + else if (input.timeoutTime_ms >= 0) + { + timeout_ms = QDateTime::currentMSecsSinceEpoch(); + } + + if(input.timeoutTime_ms == TIMEOUT_NOT_ACTIVE_PRIO && timeout_ms != TIMEOUT_NOT_ACTIVE_PRIO) { activeChange = true; } - else if(timeout_ms == -100 && input.timeoutTime_ms != -100) + else if(timeout_ms == TIMEOUT_NOT_ACTIVE_PRIO && input.timeoutTime_ms != TIMEOUT_NOT_ACTIVE_PRIO) { active = false; activeChange = true; @@ -224,19 +236,26 @@ bool PriorityMuxer::setInputImage(int priority, const Image& image, in return false; } - // calculate final timeout - if(timeout_ms > 0) - timeout_ms = QDateTime::currentMSecsSinceEpoch() + timeout_ms; - - InputInfo& input = _activeInputs[priority]; + InputInfo& input = _activeInputs[priority]; // detect active <-> inactive changes bool activeChange = false; bool active = true; - if(input.timeoutTime_ms == -100 && timeout_ms != -100) + + // calculate final timeout + if (timeout_ms >= 0) + { + timeout_ms = QDateTime::currentMSecsSinceEpoch() + timeout_ms; + } + else if (input.timeoutTime_ms >= 0) + { + timeout_ms = QDateTime::currentMSecsSinceEpoch(); + } + + if(input.timeoutTime_ms == TIMEOUT_NOT_ACTIVE_PRIO && timeout_ms != TIMEOUT_NOT_ACTIVE_PRIO) { activeChange = true; } - else if(timeout_ms == -100 && input.timeoutTime_ms != -100) + else if(timeout_ms == TIMEOUT_NOT_ACTIVE_PRIO && input.timeoutTime_ms != TIMEOUT_NOT_ACTIVE_PRIO) { active = false; activeChange = true; @@ -251,7 +270,9 @@ bool PriorityMuxer::setInputImage(int priority, const Image& image, in { Debug(_log, "Priority %d is now %s", priority, active ? "active" : "inactive"); if (_currentPriority < priority) + { emit prioritiesChanged(); + } setCurrentTime(); } @@ -261,12 +282,12 @@ bool PriorityMuxer::setInputImage(int priority, const Image& image, in bool PriorityMuxer::setInputInactive(int priority) { Image image; - return setInputImage(priority, image, -100); + return setInputImage(priority, image, TIMEOUT_NOT_ACTIVE_PRIO); } bool PriorityMuxer::clearInput(int priority) { - if (priority < PriorityMuxer::LOWEST_PRIORITY && _activeInputs.remove(priority)) + if (priority < PriorityMuxer::LOWEST_PRIORITY && (_activeInputs.remove(priority) > 0)) { Debug(_log,"Removed source priority %d",priority); // on clear success update _currentPriority @@ -318,14 +339,15 @@ void PriorityMuxer::setCurrentTime() } else { - // timeoutTime of -100 is awaiting data (inactive); skip - if(infoIt->timeoutTime_ms > -100) + // timeoutTime of TIMEOUT_NOT_ACTIVE_PRIO is awaiting data (inactive); skip + if(infoIt->timeoutTime_ms > TIMEOUT_NOT_ACTIVE_PRIO) newPriority = qMin(newPriority, infoIt->priority); // call timeTrigger when effect or color is running with timeout > 0, blacklist prio 255 - if(infoIt->priority < BG_PRIORITY && infoIt->timeoutTime_ms > 0 && (infoIt->componentId == hyperion::COMP_EFFECT || infoIt->componentId == hyperion::COMP_COLOR || infoIt->componentId == hyperion::COMP_IMAGE)) + if (infoIt->priority < BG_PRIORITY && infoIt->timeoutTime_ms > 0 && (infoIt->componentId == hyperion::COMP_EFFECT || infoIt->componentId == hyperion::COMP_COLOR || infoIt->componentId == hyperion::COMP_IMAGE)) + { emit signalTimeTrigger(); // as signal to prevent Threading issues - + } ++infoIt; } } diff --git a/libsrc/hyperion/SettingsManager.cpp b/libsrc/hyperion/SettingsManager.cpp index dca21298..b92202c7 100644 --- a/libsrc/hyperion/SettingsManager.cpp +++ b/libsrc/hyperion/SettingsManager.cpp @@ -391,12 +391,67 @@ bool SettingsManager::handleConfigUpgrade(QJsonObject& config) Warning(_log, "Instance [%u]: HwLedCount/Layout mismatch! Setting Hardware LED count to number of LEDs configured via layout", _instance); hwLedcount = layoutLedCount; newDeviceConfig["hardwareLedCount"] = hwLedcount; - - config["device"] = newDeviceConfig; migrated = true; } } } + + if (newDeviceConfig.contains("type")) + { + QString type = newDeviceConfig["type"].toString(); + if (type == "atmoorb" || type == "fadecandy" || type == "philipshue" ) + { + if (newDeviceConfig.contains("output")) + { + newDeviceConfig["host"] = newDeviceConfig["output"].toString(); + newDeviceConfig.remove("output"); + migrated = true; + } + } + } + + if (migrated) + { + config["device"] = newDeviceConfig; + Debug(_log, "LED-Device records migrated"); + } + } + + if (config.contains("grabberV4L2")) + { + QJsonObject newGrabberV4L2Config = config["grabberV4L2"].toObject(); + + if (newGrabberV4L2Config.contains("encoding_format")) + { + newGrabberV4L2Config.remove("encoding_format"); + config["grabberV4L2"] = newGrabberV4L2Config; + migrated = true; + Debug(_log, "GrabberV4L2 records migrated"); + } + } + + if (config.contains("framegrabber")) + { + QJsonObject newFramegrabberConfig = config["framegrabber"].toObject(); + + //Align element namings with grabberV4L2 + //Rename element type -> device + if (newFramegrabberConfig.contains("type")) + { + newFramegrabberConfig["device"] = newFramegrabberConfig["type"]; + newFramegrabberConfig.remove("type"); + migrated = true; + } + //Rename element frequency_Hz -> fps + if (newFramegrabberConfig.contains("frequency_Hz")) + { + newFramegrabberConfig["fps"] = newFramegrabberConfig["frequency_Hz"]; + newFramegrabberConfig.remove("frequency_Hz"); + migrated = true; + } + + config["framegrabber"] = newFramegrabberConfig; + Debug(_log, "Framegrabber records migrated"); } } } diff --git a/libsrc/hyperion/resource.qrc b/libsrc/hyperion/resource.qrc index 905770d4..fdc66d5f 100644 --- a/libsrc/hyperion/resource.qrc +++ b/libsrc/hyperion/resource.qrc @@ -1,7 +1,6 @@ hyperion.schema.json - ../../config/hyperion.config.json.default schema/schema-general.json schema/schema-logger.json schema/schema-device.json diff --git a/libsrc/hyperion/schema/schema-framegrabber.json b/libsrc/hyperion/schema/schema-framegrabber.json index aad0a8ff..4b0e6a4d 100644 --- a/libsrc/hyperion/schema/schema-framegrabber.json +++ b/libsrc/hyperion/schema/schema-framegrabber.json @@ -1,100 +1,142 @@ { "type" : "object", "title" : "edt_conf_fg_heading_title", - "properties" : + "properties": { - "type" : - { - "type" : "string", - "title" : "edt_conf_fg_type_title", - "enum" : ["auto","amlogic","dispmanx","dx","framebuffer","osx","qt","x11", "xcb"], - "options": - { - "enum_titles": ["edt_conf_enum_automatic","AMLogic","DispmanX","DirectX9","Framebuffer","OSX","QT","X11","XCB"] - + "enable": { + "type": "boolean", + "title": "edt_conf_general_enable_title", + "required": true, + "default": false, + "propertyOrder": 1 + }, + "available_devices": { + "type": "string", + "title": "edt_conf_grabber_discovered_title", + "default": "edt_conf_grabber_discovery_inprogress", + "options": { + "infoText": "edt_conf_grabber_discovered_title_info" }, - "default" : "auto", - "propertyOrder" : 1 + "propertyOrder": 2, + "required": false }, - "width" : - { - "type" : "integer", - "title" : "edt_conf_fg_width_title", - "minimum" : 10, - "default" : 80, - "append" : "edt_append_pixel", - "propertyOrder" : 2 + "device": { + "type": "string", + "title": "edt_conf_enum_custom", + "options": { + "hidden": true + }, + "required": true, + "comment": "The 'available_devices' settings are dynamically inserted into the WebUI under PropertyOrder '2'.", + "propertyOrder": 3 }, - "height" : - { - "type" : "integer", - "title" : "edt_conf_fg_height_title", - "minimum" : 10, - "default" : 45, - "append" : "edt_append_pixel", - "propertyOrder" : 3 + "device_inputs": { + "type": "string", + "title": "edt_conf_v4l2_input_title", + "propertyOrder": 4, + "required": false }, - "frequency_Hz" : - { - "type" : "integer", - "title" : "edt_conf_fg_frequency_Hz_title", - "minimum" : 1, - "default" : 10, - "append" : "edt_append_hz", - "propertyOrder" : 4 + "input": { + "type": "integer", + "title": "edt_conf_enum_custom", + "minimum": 0, + "default": 0, + "options": { + "hidden": true + }, + "required": true, + "propertyOrder": 5, + "comment": "The 'device_inputs' settings are dynamically inserted into the WebUI under PropertyOrder '4'." }, - "cropLeft" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_cropLeft_title", - "minimum" : 0, - "default" : 0, - "append" : "edt_append_pixel", - "propertyOrder" : 5 + "resolutions": { + "type": "string", + "title": "edt_conf_v4l2_resolution_title", + "propertyOrder": 6, + "required": false }, - "cropRight" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_cropRight_title", - "minimum" : 0, - "default" : 0, - "append" : "edt_append_pixel", - "propertyOrder" : 6 + "width": { + "type": "integer", + "title": "edt_conf_enum_custom", + "minimum": 10, + "default": 80, + "append": "edt_append_pixel", + "options": { + "hidden": true + }, + "required": true, + "propertyOrder": 9, + "comment": "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '6'." }, - "cropTop" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_cropTop_title", - "minimum" : 0, - "default" : 0, - "append" : "edt_append_pixel", - "propertyOrder" : 7 + "height": { + "type": "integer", + "title": "edt_conf_enum_custom", + "append": "edt_append_pixel", + "options": { + "hidden": true + }, + "required": true, + "propertyOrder": 10, + "comment": "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '6'." }, - "cropBottom" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_cropBottom_title", - "minimum" : 0, - "default" : 0, - "append" : "edt_append_pixel", - "propertyOrder" : 8 + "framerates": { + "type": "string", + "title": "edt_conf_fg_frequency_Hz_title", + "propertyOrder": 11, + "required": false }, - "pixelDecimation" : - { - "type" : "integer", - "title" : "edt_conf_fg_pixelDecimation_title", - "minimum" : 1, - "maximum" : 30, - "default" : 8, - "propertyOrder" : 9 + "fps": { + "type": "integer", + "title": "edt_conf_enum_custom", + "default":10, + "minimum": 1, + "append": "fps", + "options": { + "hidden": true + }, + "required": true, + "propertyOrder": 12, + "comment": "The 'framerates' setting is dynamically inserted into the WebUI under PropertyOrder '11'." }, - "display" : - { - "type" : "integer", - "title" : "edt_conf_fg_display_title", - "minimum" : 0, - "default" : 0, - "propertyOrder" : 10 + "pixelDecimation": { + "type": "integer", + "title": "edt_conf_fg_pixelDecimation_title", + "minimum": 1, + "maximum": 30, + "default": 8, + "required": true, + "propertyOrder": 13 + }, + "cropLeft": { + "type": "integer", + "title": "edt_conf_v4l2_cropLeft_title", + "minimum": 0, + "default": 0, + "append": "edt_append_pixel", + "propertyOrder": 14 + }, + "cropRight": { + "type": "integer", + "title": "edt_conf_v4l2_cropRight_title", + "minimum": 0, + "default": 0, + "append": "edt_append_pixel", + "propertyOrder": 15 + }, + "cropTop": { + "type": "integer", + "title": "edt_conf_v4l2_cropTop_title", + "minimum": 0, + "default": 0, + "append": "edt_append_pixel", + "propertyOrder": 16 + }, + "cropBottom": { + "type": "integer", + "title": "edt_conf_v4l2_cropBottom_title", + "minimum": 0, + "default": 0, + "append": "edt_append_pixel", + "propertyOrder": 17 } }, "additionalProperties" : false diff --git a/libsrc/hyperion/schema/schema-grabberV4L2.json b/libsrc/hyperion/schema/schema-grabberV4L2.json index 4816fd8b..1651cb67 100644 --- a/libsrc/hyperion/schema/schema-grabberV4L2.json +++ b/libsrc/hyperion/schema/schema-grabberV4L2.json @@ -2,263 +2,359 @@ "type" : "object", "required" : true, "title" : "edt_conf_v4l2_heading_title", - "properties" : + "properties": { - "device" : - { - "type" : "string", - "title" : "edt_conf_enum_custom", - "default" : "auto", - "options" : { - "hidden":true + "enable": { + "type": "boolean", + "title": "edt_conf_general_enable_title", + "required": true, + "default": false, + "propertyOrder": 1 + }, + "available_devices": { + "type": "string", + "title": "edt_conf_grabber_discovered_title", + "default": "edt_conf_grabber_discovery_inprogress", + "options": { + "infoText": "edt_conf_grabber_discovered_title_info" }, - "required" : true, - "propertyOrder" : 2, - "comment" : "The 'available_devices' settings are dynamically inserted into the WebUI under PropertyOrder '1'." + "propertyOrder": 2, + "required": false }, - "input" : - { - "type" : "integer", - "title" : "edt_conf_enum_custom", - "default" : 0, - "options" : { - "hidden":true + "device": { + "type": "string", + "title": "edt_conf_enum_custom", + "options": { + "hidden": true }, - "required" : true, - "propertyOrder" : 4, - "comment" : "The 'device_inputs' settings are dynamically inserted into the WebUI under PropertyOrder '3'." + "required": true, + "comment": "The 'available_devices' settings are dynamically inserted into the WebUI under PropertyOrder '2'.", + "propertyOrder": 3 }, - "standard" : - { - "type" : "string", - "title" : "edt_conf_v4l2_standard_title", - "enum" : ["NO_CHANGE", "PAL","NTSC","SECAM"], - "default" : "NO_CHANGE", - "options" : { - "enum_titles" : ["edt_conf_enum_NO_CHANGE", "edt_conf_enum_PAL", "edt_conf_enum_NTSC", "edt_conf_enum_SECAM"] + "device_inputs": { + "type": "string", + "title": "edt_conf_v4l2_input_title", + "propertyOrder": 4, + "required": false + }, + "input": { + "type": "integer", + "title": "edt_conf_enum_custom", + "default": 0, + "options": { + "hidden": true }, - "required" : true, - "propertyOrder" : 5 + "required": true, + "propertyOrder": 5, + "comment": "The 'device_inputs' settings are dynamically inserted into the WebUI under PropertyOrder '4'." }, - "width" : - { - "type" : "integer", - "title" : "edt_conf_fg_width_title", - "default" : 0, - "minimum" : 0, - "append" : "edt_append_pixel", - "options" : { - "hidden":true + + "standard": { + "type": "string", + "title": "edt_conf_v4l2_standard_title", + "required": false, + "propertyOrder": 6 + }, + "encoding": { + "type": "string", + "title": "edt_conf_v4l2_encoding_title", + "required": false, + "access": "advanced", + "propertyOrder": 7 + }, + "resolutions": { + "type": "string", + "title": "edt_conf_v4l2_resolution_title", + "propertyOrder": 8, + "required": false + }, + "width": { + "type": "integer", + "title": "edt_conf_fg_width_title", + "default": 0, + "minimum": 0, + "append": "edt_append_pixel", + "options": { + "hidden": true }, - "required" : true, - "propertyOrder" : 7, - "comment" : "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '6'." + "required": true, + "propertyOrder": 9, + "comment": "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '8'." }, - "height" : - { - "type" : "integer", - "title" : "edt_conf_fg_height_title", - "default" : 0, - "minimum" : 0, - "append" : "edt_append_pixel", - "options" : { - "hidden":true + "height": { + "type": "integer", + "title": "edt_conf_fg_height_title", + "default": 0, + "minimum": 0, + "append": "edt_append_pixel", + "options": { + "hidden": true }, - "required" : true, - "propertyOrder" : 8 + "required": true, + "propertyOrder": 10, + "comment": "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '8'." }, - "fps" : - { - "type" : "integer", - "title" : "edt_conf_enum_custom", - "default" : 15, - "minimum" : 1, - "append" : "fps", - "options" : { - "hidden":true + "framerates": { + "type": "string", + "title": "edt_conf_v4l2_framerate_title", + "propertyOrder": 11, + "required": false + }, + "fps": { + "type": "integer", + "title": "edt_conf_enum_custom", + "default": 15, + "minimum": 0, + "append": "fps", + "options": { + "hidden": true }, - "required" : true, - "propertyOrder" : 10, - "comment" : "The 'framerates' setting is dynamically inserted into the WebUI under PropertyOrder '9'." + "required": true, + "propertyOrder": 12, + "comment": "The 'framerates' setting is dynamically inserted into the WebUI under PropertyOrder '11'." }, - "sizeDecimation" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_sizeDecimation_title", - "minimum" : 1, - "maximum" : 30, - "default" : 6, - "required" : true, - "propertyOrder" : 11 + "fpsSoftwareDecimation": { + "type": "integer", + "title": "edt_conf_v4l2_fpsSoftwareDecimation_title", + "minimum": 0, + "maximum": 60, + "default": 0, + "required": true, + "access": "expert", + "propertyOrder": 13 }, - "cropLeft" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_cropLeft_title", - "minimum" : 0, - "default" : 0, - "append" : "edt_append_pixel", - "required" : true, - "propertyOrder" : 12 + "flip": { + "type": "string", + "title": "edt_conf_v4l2_flip_title", + "enum": [ "NO_CHANGE", "HORIZONTAL", "VERTICAL", "BOTH" ], + "default": "NO_CHANGE", + "options": { + "enum_titles": [ "edt_conf_enum_NO_CHANGE", "edt_conf_enum_HORIZONTAL", "edt_conf_enum_VERTICAL", "edt_conf_enum_BOTH" ] + }, + "required": true, + "access": "advanced", + "propertyOrder": 14 }, - "cropRight" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_cropRight_title", - "minimum" : 0, - "default" : 0, - "append" : "edt_append_pixel", - "required" : true, - "propertyOrder" : 13 + "sizeDecimation": { + "type": "integer", + "title": "edt_conf_v4l2_sizeDecimation_title", + "minimum": 1, + "maximum": 30, + "default": 8, + "required": true, + "propertyOrder": 15 }, - "cropTop" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_cropTop_title", - "minimum" : 0, - "default" : 0, - "append" : "edt_append_pixel", - "required" : true, - "propertyOrder" : 14 + "hardware_brightness": { + "type": "integer", + "title": "edt_conf_v4l2_hardware_brightness_title", + "default": 0, + "required": true, + "access": "expert", + "propertyOrder": 16 }, - "cropBottom" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_cropBottom_title", - "minimum" : 0, - "default" : 0, - "append" : "edt_append_pixel", - "required" : true, - "propertyOrder" : 15 + "hardware_contrast": { + "type": "integer", + "title": "edt_conf_v4l2_hardware_contrast_title", + "default": 0, + "required": true, + "access": "expert", + "propertyOrder": 17 }, - "cecDetection" : - { - "type" : "boolean", - "title" : "edt_conf_v4l2_cecDetection_title", - "default" : false, - "required" : true, - "propertyOrder" : 16 + "hardware_saturation": { + "type": "integer", + "title": "edt_conf_v4l2_hardware_saturation_title", + "default": 0, + "required": true, + "access": "expert", + "propertyOrder": 18 }, - "signalDetection" : - { - "type" : "boolean", - "title" : "edt_conf_v4l2_signalDetection_title", - "default" : false, - "required" : true, - "propertyOrder" : 17 + "hardware_hue": { + "type": "integer", + "title": "edt_conf_v4l2_hardware_hue_title", + "default": 0, + "required": true, + "access": "expert", + "propertyOrder": 19 }, - "redSignalThreshold" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_redSignalThreshold_title", - "minimum" : 0, - "maximum" : 100, - "default" : 5, - "append" : "edt_append_percent", + "cropLeft": { + "type": "integer", + "title": "edt_conf_v4l2_cropLeft_title", + "minimum": 0, + "default": 0, + "append": "edt_append_pixel", + "required": true, + "propertyOrder": 20 + }, + "cropRight": { + "type": "integer", + "title": "edt_conf_v4l2_cropRight_title", + "minimum": 0, + "default": 0, + "append": "edt_append_pixel", + "required": true, + "propertyOrder": 21 + }, + "cropTop": { + "type": "integer", + "title": "edt_conf_v4l2_cropTop_title", + "minimum": 0, + "default": 0, + "append": "edt_append_pixel", + "required": true, + "propertyOrder": 22 + }, + "cropBottom": { + "type": "integer", + "title": "edt_conf_v4l2_cropBottom_title", + "minimum": 0, + "default": 0, + "append": "edt_append_pixel", + "required": true, + "propertyOrder": 23 + }, + "cecDetection": { + "type": "boolean", + "title": "edt_conf_v4l2_cecDetection_title", + "default": false, + "required": true, + "access": "advanced", + "propertyOrder": 24 + }, + "signalDetection": { + "type": "boolean", + "title": "edt_conf_v4l2_signalDetection_title", + "default": false, + "required": true, + "access": "expert", + "propertyOrder": 25 + }, + "redSignalThreshold": { + "type": "integer", + "title": "edt_conf_v4l2_redSignalThreshold_title", + "minimum": 0, + "maximum": 100, + "default": 0, + "append": "edt_append_percent", "options": { "dependencies": { "signalDetection": true } }, - "required" : true, - "propertyOrder" : 18 + "access": "expert", + "required": true, + "propertyOrder": 26 }, - "greenSignalThreshold" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_greenSignalThreshold_title", - "minimum" : 0, - "maximum" : 100, - "default" : 5, - "append" : "edt_append_percent", + "greenSignalThreshold": { + "type": "integer", + "title": "edt_conf_v4l2_greenSignalThreshold_title", + "minimum": 0, + "maximum": 100, + "default": 100, + "append": "edt_append_percent", "options": { "dependencies": { "signalDetection": true } }, - "required" : true, - "propertyOrder" : 19 + "required": true, + "access": "expert", + "propertyOrder": 27 }, - "blueSignalThreshold" : - { - "type" : "integer", - "title" : "edt_conf_v4l2_blueSignalThreshold_title", - "minimum" : 0, - "maximum" : 100, - "default" : 5, - "append" : "edt_append_percent", + "blueSignalThreshold": { + "type": "integer", + "title": "edt_conf_v4l2_blueSignalThreshold_title", + "minimum": 0, + "maximum": 100, + "default": 0, + "append": "edt_append_percent", "options": { "dependencies": { "signalDetection": true } }, - "required" : true, - "propertyOrder" : 20 + "required": true, + "access": "expert", + "propertyOrder": 28 }, - "sDVOffsetMin" : - { - "type" : "number", - "title" : "edt_conf_v4l2_sDVOffsetMin_title", - "minimum" : 0.0, - "maximum" : 1.0, - "default" : 0.25, - "step" : 0.01, + "noSignalCounterThreshold": { + "type": "integer", + "title": "edt_conf_v4l2_noSignalCounterThreshold_title", + "minimum": 1, + "maximum": 1000, + "default": 200, "options": { "dependencies": { "signalDetection": true } }, - "required" : true, - "propertyOrder" : 21 + "required": true, + "access": "expert", + "propertyOrder": 29 }, - "sDVOffsetMax" : - { - "type" : "number", - "title" : "edt_conf_v4l2_sDVOffsetMax_title", - "minimum" : 0.0, - "maximum" : 1.0, - "default" : 0.75, - "step" : 0.01, + "sDVOffsetMin": { + "type": "number", + "title": "edt_conf_v4l2_sDVOffsetMin_title", + "minimum": 0.0, + "maximum": 1.0, + "default": 0.1, + "step": 0.01, "options": { "dependencies": { "signalDetection": true } }, - "required" : true, - "propertyOrder" : 22 + "required": true, + "access": "expert", + "propertyOrder": 30 }, - "sDHOffsetMin" : - { - "type" : "number", - "title" : "edt_conf_v4l2_sDHOffsetMin_title", - "minimum" : 0.0, - "maximum" : 1.0, - "default" : 0.25, - "step" : 0.01, + "sDVOffsetMax": { + "type": "number", + "title": "edt_conf_v4l2_sDVOffsetMax_title", + "minimum": 0.0, + "maximum": 1.0, + "default": 0.9, + "step": 0.01, "options": { "dependencies": { "signalDetection": true } }, - "required" : true, - "propertyOrder" : 23 + "required": true, + "access": "expert", + "propertyOrder": 31 }, - "sDHOffsetMax" : - { - "type" : "number", - "title" : "edt_conf_v4l2_sDHOffsetMax_title", - "minimum" : 0.0, - "maximum" : 1.0, - "default" : 0.75, - "step" : 0.01, + "sDHOffsetMin": { + "type": "number", + "title": "edt_conf_v4l2_sDHOffsetMin_title", + "minimum": 0.0, + "maximum": 1.0, + "default": 0.4, + "step": 0.01, "options": { "dependencies": { "signalDetection": true } }, - "required" : true, - "propertyOrder" : 24 + "required": true, + "access": "expert", + "propertyOrder": 32 + }, + "sDHOffsetMax": { + "type": "number", + "title": "edt_conf_v4l2_sDHOffsetMax_title", + "minimum": 0.0, + "maximum": 1.0, + "default": 0.46, + "step": 0.01, + "options": { + "dependencies": { + "signalDetection": true + } + }, + "required": true, + "access": "expert", + "propertyOrder": 33 } }, - "additionalProperties" : true -} + "additionalProperties": true + } diff --git a/libsrc/hyperion/schema/schema-instCapture.json b/libsrc/hyperion/schema/schema-instCapture.json index 608717c1..28fe9f4c 100644 --- a/libsrc/hyperion/schema/schema-instCapture.json +++ b/libsrc/hyperion/schema/schema-instCapture.json @@ -2,43 +2,52 @@ "type" : "object", "required" : true, "title" : "edt_conf_instC_heading_title", - "properties" : - { - "systemEnable" : - { - "type" : "boolean", - "required" : true, - "title" : "edt_conf_instC_systemEnable_title", - "default" : true, - "propertyOrder" : 1 + "properties": { + "systemEnable": { + "type": "boolean", + "required": true, + "title": "edt_conf_instC_systemEnable_title", + "default": false, + "propertyOrder": 1 }, - "systemPriority" : - { - "type" : "integer", - "required" : true, - "title" : "edt_conf_general_priority_title", - "minimum" : 100, - "maximum" : 253, - "default" : 250, - "propertyOrder" : 2 + "systemGrabberDevice": { + "type": "string", + "required": true, + "title": "edt_conf_instC_screen_grabber_device_title", + "default": "NONE", + "propertyOrder": 2 }, - "v4lEnable" : - { - "type" : "boolean", - "required" : true, - "title" : "edt_conf_instC_v4lEnable_title", - "default" : false, - "propertyOrder" : 3 + "systemPriority": { + "type": "integer", + "required": true, + "title": "edt_conf_general_priority_title", + "minimum": 100, + "maximum": 253, + "default": 250, + "propertyOrder": 3 }, - "v4lPriority" : - { - "type" : "integer", - "required" : true, - "title" : "edt_conf_general_priority_title", - "minimum" : 100, - "maximum" : 253, - "default" : 240, - "propertyOrder" : 4 + "v4lEnable": { + "type": "boolean", + "required": true, + "title": "edt_conf_instC_v4lEnable_title", + "default": false, + "propertyOrder": 4 + }, + "v4lGrabberDevice": { + "type": "string", + "required": true, + "title": "edt_conf_instC_video_grabber_device_title", + "default": "NONE", + "propertyOrder": 5 + }, + "v4lPriority": { + "type": "integer", + "required": true, + "title": "edt_conf_general_priority_title", + "minimum": 100, + "maximum": 253, + "default": 240, + "propertyOrder": 6 } }, "additionalProperties" : false diff --git a/libsrc/hyperion/schema/schema-ledConfig.json b/libsrc/hyperion/schema/schema-ledConfig.json index 4fa79f87..5e9651e5 100644 --- a/libsrc/hyperion/schema/schema-ledConfig.json +++ b/libsrc/hyperion/schema/schema-ledConfig.json @@ -147,7 +147,6 @@ "ledBlacklist": { "type": "array", "title": "conf_leds_layout_blacklist_rules_title", - "minimum": 1, "uniqueItems": true, "items": { "type": "object", diff --git a/libsrc/leddevice/dev_net/LedDeviceAtmoOrb.cpp b/libsrc/leddevice/dev_net/LedDeviceAtmoOrb.cpp index b5bb2140..d55c0ef4 100644 --- a/libsrc/leddevice/dev_net/LedDeviceAtmoOrb.cpp +++ b/libsrc/leddevice/dev_net/LedDeviceAtmoOrb.cpp @@ -50,7 +50,7 @@ bool LedDeviceAtmoOrb::init(const QJsonObject &deviceConfig) if ( LedDevice::init(deviceConfig) ) { - _multicastGroup = deviceConfig["output"].toString(MULTICAST_GROUP_DEFAULT_ADDRESS); + _multicastGroup = deviceConfig["host"].toString(MULTICAST_GROUP_DEFAULT_ADDRESS); _multiCastGroupPort = static_cast(deviceConfig["port"].toInt(MULTICAST_GROUP_DEFAULT_PORT)); _useOrbSmoothing = deviceConfig["useOrbSmoothing"].toBool(false); _skipSmoothingDiff = deviceConfig["skipSmoothingDiff"].toInt(0); diff --git a/libsrc/leddevice/dev_net/LedDeviceFadeCandy.cpp b/libsrc/leddevice/dev_net/LedDeviceFadeCandy.cpp index 7ab59b3b..9d546916 100644 --- a/libsrc/leddevice/dev_net/LedDeviceFadeCandy.cpp +++ b/libsrc/leddevice/dev_net/LedDeviceFadeCandy.cpp @@ -55,7 +55,7 @@ bool LedDeviceFadeCandy::init(const QJsonObject& deviceConfig) } else { - _host = deviceConfig["output"].toString("127.0.0.1"); + _host = deviceConfig["host"].toString("127.0.0.1"); _port = deviceConfig["port"].toInt(STREAM_DEFAULT_PORT); //If host not configured the init fails diff --git a/libsrc/leddevice/dev_net/LedDevicePhilipsHue.cpp b/libsrc/leddevice/dev_net/LedDevicePhilipsHue.cpp index f5cb0f86..470caa57 100644 --- a/libsrc/leddevice/dev_net/LedDevicePhilipsHue.cpp +++ b/libsrc/leddevice/dev_net/LedDevicePhilipsHue.cpp @@ -12,7 +12,7 @@ namespace { bool verbose = false; // Configuration settings -const char CONFIG_ADDRESS[] = "output"; +const char CONFIG_ADDRESS[] = "host"; //const char CONFIG_PORT[] = "port"; const char CONFIG_USERNAME[] = "username"; const char CONFIG_CLIENTKEY[] = "clientkey"; diff --git a/libsrc/leddevice/schemas/schema-atmoorb.json b/libsrc/leddevice/schemas/schema-atmoorb.json index 4b6cc4aa..9015fa03 100644 --- a/libsrc/leddevice/schemas/schema-atmoorb.json +++ b/libsrc/leddevice/schemas/schema-atmoorb.json @@ -16,11 +16,10 @@ "access": "advanced", "propertyOrder": 2 }, - "output": { + "host": { "type": "string", "title": "edt_dev_spec_multicastGroup_title", "default": "239.255.255.250", - "access": "expert", "propertyOrder": 3 }, "port": { diff --git a/libsrc/leddevice/schemas/schema-fadecandy.json b/libsrc/leddevice/schemas/schema-fadecandy.json index 0a491f2f..d6d777f5 100644 --- a/libsrc/leddevice/schemas/schema-fadecandy.json +++ b/libsrc/leddevice/schemas/schema-fadecandy.json @@ -1,110 +1,110 @@ { "type":"object", "required":true, - "properties":{ - "output" : { + "properties": { + "host": { "type": "string", - "title":"edt_dev_spec_targetIp_title", - "default" : "127.0.0.1", - "propertyOrder" : 1 + "title": "edt_dev_spec_targetIp_title", + "default": "127.0.0.1", + "propertyOrder": 1 }, - "port" : { + "port": { "type": "number", - "title":"edt_dev_spec_port_title", + "title": "edt_dev_spec_port_title", "default": 7890, - "propertyOrder" : 2 + "propertyOrder": 2 }, "latchTime": { "type": "integer", - "title":"edt_dev_spec_latchtime_title", + "title": "edt_dev_spec_latchtime_title", "default": 0, - "append" : "edt_append_ms", + "append": "edt_append_ms", "minimum": 0, "maximum": 1000, - "access" : "expert", - "propertyOrder" : 3 + "access": "expert", + "propertyOrder": 3 }, "setFcConfig": { "type": "boolean", - "title":"edt_dev_spec_FCsetConfig_title", + "title": "edt_dev_spec_FCsetConfig_title", "default": false, - "propertyOrder" : 4 + "propertyOrder": 4 }, "manualLed": { "type": "boolean", - "title":"edt_dev_spec_FCmanualControl_title", + "title": "edt_dev_spec_FCmanualControl_title", "default": false, "options": { "dependencies": { "setFcConfig": true } }, - "propertyOrder" : 5 + "propertyOrder": 5 }, "ledOn": { "type": "boolean", - "title":"edt_dev_spec_FCledToOn_title", + "title": "edt_dev_spec_FCledToOn_title", "default": false, "options": { "dependencies": { "setFcConfig": true } }, - "propertyOrder" : 6 + "propertyOrder": 6 }, "interpolation": { "type": "boolean", - "title":"edt_dev_spec_interpolation_title", + "title": "edt_dev_spec_interpolation_title", "default": false, "options": { "dependencies": { "setFcConfig": true } }, - "propertyOrder" : 7 + "propertyOrder": 7 }, "dither": { "type": "boolean", - "title":"edt_dev_spec_dithering_title", + "title": "edt_dev_spec_dithering_title", "default": false, "options": { "dependencies": { "setFcConfig": true } }, - "propertyOrder" : 8 + "propertyOrder": 8 }, - "gamma" : { - "type" : "number", - "title" : "edt_dev_spec_gamma_title", + "gamma": { + "type": "number", + "title": "edt_dev_spec_gamma_title", "default": 1.0, - "minimum" : 0.1, + "minimum": 0.1, "maximum": 5.0, "options": { "dependencies": { "setFcConfig": true } }, - "propertyOrder" : 9 + "propertyOrder": 9 }, - "whitepoint" : { - "type" : "array", - "title" : "edt_dev_spec_whitepoint_title", + "whitepoint": { + "type": "array", + "title": "edt_dev_spec_whitepoint_title", "options": { "dependencies": { "setFcConfig": true } }, - "propertyOrder" : 10, - "default" : [255,255,255], - "maxItems" : 3, - "minItems" : 3, - "format" : "colorpicker", - "items" : { - "type" : "integer", - "minimum" : 0, + "propertyOrder": 10, + "default": [ 255, 255, 255 ], + "maxItems": 3, + "minItems": 3, + "format": "colorpicker", + "items": { + "type": "integer", + "minimum": 0, "maximum": 255, - "default" : 255 + "default": 255 } } }, diff --git a/libsrc/leddevice/schemas/schema-philipshue.json b/libsrc/leddevice/schemas/schema-philipshue.json index 834f2f1b..afd648ef 100644 --- a/libsrc/leddevice/schemas/schema-philipshue.json +++ b/libsrc/leddevice/schemas/schema-philipshue.json @@ -2,7 +2,7 @@ "type": "object", "required": true, "properties": { - "output": { + "host": { "type": "string", "title": "edt_dev_spec_targetIp_title", "default": "", diff --git a/libsrc/utils/FileUtils.cpp b/libsrc/utils/FileUtils.cpp index faf5e67f..0784ef88 100644 --- a/libsrc/utils/FileUtils.cpp +++ b/libsrc/utils/FileUtils.cpp @@ -3,7 +3,6 @@ // qt incl #include #include -#include // hyperion include #include diff --git a/libsrc/utils/ImageResampler.cpp b/libsrc/utils/ImageResampler.cpp index 6bda56d6..68500d8a 100644 --- a/libsrc/utils/ImageResampler.cpp +++ b/libsrc/utils/ImageResampler.cpp @@ -3,30 +3,17 @@ #include ImageResampler::ImageResampler() - : _horizontalDecimation(1) - , _verticalDecimation(1) + : _horizontalDecimation(8) + , _verticalDecimation(8) , _cropLeft(0) , _cropRight(0) , _cropTop(0) , _cropBottom(0) , _videoMode(VideoMode::VIDEO_2D) + , _flipMode(FlipMode::NO_CHANGE) { } -ImageResampler::~ImageResampler() -{ -} - -void ImageResampler::setHorizontalPixelDecimation(int decimator) -{ - _horizontalDecimation = decimator; -} - -void ImageResampler::setVerticalPixelDecimation(int decimator) -{ - _verticalDecimation = decimator; -} - void ImageResampler::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom) { _cropLeft = cropLeft; @@ -35,15 +22,12 @@ void ImageResampler::setCropping(int cropLeft, int cropRight, int cropTop, int c _cropBottom = cropBottom; } -void ImageResampler::setVideoMode(VideoMode mode) -{ - _videoMode = mode; -} - void ImageResampler::processImage(const uint8_t * data, int width, int height, int lineLength, PixelFormat pixelFormat, Image &outputImage) const { int cropRight = _cropRight; int cropBottom = _cropBottom; + int xDestFlip = 0, yDestFlip = 0; + int uOffset = 0, vOffset = 0; // handle 3D mode switch (_videoMode) @@ -67,11 +51,40 @@ void ImageResampler::processImage(const uint8_t * data, int width, int height, i for (int yDest = 0, ySource = _cropTop + (_verticalDecimation >> 1); yDest < outputHeight; ySource += _verticalDecimation, ++yDest) { int yOffset = lineLength * ySource; + if (pixelFormat == PixelFormat::NV12) + { + uOffset = (height + ySource / 2) * lineLength; + } + else if (pixelFormat == PixelFormat::I420) + { + uOffset = width * height + (ySource/2) * width/2; + vOffset = width * height * 1.25 + (ySource/2) * width/2; + } for (int xDest = 0, xSource = _cropLeft + (_horizontalDecimation >> 1); xDest < outputWidth; xSource += _horizontalDecimation, ++xDest) { - ColorRgb & rgb = outputImage(xDest, yDest); + switch (_flipMode) + { + case FlipMode::HORIZONTAL: + xDestFlip = xDest; + yDestFlip = outputHeight-yDest-1; + break; + case FlipMode::VERTICAL: + xDestFlip = outputWidth-xDest-1; + yDestFlip = yDest; + break; + case FlipMode::BOTH: + xDestFlip = outputWidth-xDest-1; + yDestFlip = outputHeight-yDest-1; + break; + case FlipMode::NO_CHANGE: + xDestFlip = xDest; + yDestFlip = yDest; + break; + } + + ColorRgb &rgb = outputImage(xDestFlip, yDestFlip); switch (pixelFormat) { case PixelFormat::UYVY: @@ -124,7 +137,24 @@ void ImageResampler::processImage(const uint8_t * data, int width, int height, i rgb.red = data[index+2]; } break; -#ifdef HAVE_JPEG_DECODER + case PixelFormat::NV12: + { + uint8_t y = data[yOffset + xSource]; + uint8_t u = data[uOffset + ((xSource >> 1) << 1)]; + uint8_t v = data[uOffset + ((xSource >> 1) << 1) + 1]; + ColorSys::yuv2rgb(y, u, v, rgb.red, rgb.green, rgb.blue); + } + break; + case PixelFormat::I420: + { + int y = data[yOffset + xSource]; + int u = data[uOffset + (xSource >> 1)]; + int v = data[vOffset + (xSource >> 1)]; + ColorSys::yuv2rgb(y, u, v, rgb.red, rgb.green, rgb.blue); + break; + } + break; +#ifdef HAVE_TURBO_JPEG case PixelFormat::MJPEG: break; #endif diff --git a/resources/resources.qrc.in b/resources/resources.qrc.in index e15247f3..d5ed6c80 100644 --- a/resources/resources.qrc.in +++ b/resources/resources.qrc.in @@ -1,5 +1,6 @@ + ${CMAKE_BINARY_DIR}/config/hyperion.config.json.default ${HYPERION_RES} diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml index 2135ee58..d1fae2af 100644 --- a/snap/snapcraft.yaml +++ b/snap/snapcraft.yaml @@ -44,7 +44,6 @@ parts: - libxrender-dev - libavahi-core-dev - libavahi-compat-libdnssd-dev - - libjpeg-dev - libturbojpeg0-dev - libssl-dev - zlib1g-dev diff --git a/src/hyperion-aml/AmlogicWrapper.cpp b/src/hyperion-aml/AmlogicWrapper.cpp index cde574cc..a26de291 100644 --- a/src/hyperion-aml/AmlogicWrapper.cpp +++ b/src/hyperion-aml/AmlogicWrapper.cpp @@ -1,18 +1,27 @@ -// Hyperion-AmLogic includes #include "AmlogicWrapper.h" // Linux includes #include -AmlogicWrapper::AmlogicWrapper(unsigned grabWidth, unsigned grabHeight) : - _thread(this), - _grabber(grabWidth, grabHeight) +AmlogicWrapper::AmlogicWrapper( int updateRate_Hz, + int pixelDecimation, + int cropLeft, int cropRight, + int cropTop, int cropBottom + ) : + _timer(this), + _grabber() { - _thread.setObjectName("AmlogicWrapperThread"); + _grabber.setFramerate(updateRate_Hz); + _grabber.setCropping(cropLeft, cropRight, cropTop, cropBottom); + _grabber.setPixelDecimation(pixelDecimation); + + _timer.setTimerType(Qt::PreciseTimer); + _timer.setSingleShot(false); + _timer.setInterval(_grabber.getUpdateInterval()); // Connect capturing to the timeout signal of the timer - connect(&_thread, SIGNAL (started()), this, SLOT(capture())); + connect(&_timer, SIGNAL(timeout()), this, SLOT(capture())); } const Image & AmlogicWrapper::getScreenshot() @@ -23,20 +32,27 @@ const Image & AmlogicWrapper::getScreenshot() void AmlogicWrapper::start() { - _thread.start(); + _timer.start(); } void AmlogicWrapper::stop() { - _thread.quit(); + _timer.stop(); +} + +bool AmlogicWrapper::screenInit() +{ + return _grabber.setupScreen(); } void AmlogicWrapper::capture() { - while (_thread.isRunning()) - { - _grabber.grabFrame(_screenshot); - emit sig_screenshot(_screenshot); - usleep(1 * 1000); - } + _grabber.grabFrame(_screenshot); + + emit sig_screenshot(_screenshot); +} + +void AmlogicWrapper::setVideoMode(VideoMode mode) +{ + _grabber.setVideoMode(mode); } diff --git a/src/hyperion-aml/AmlogicWrapper.h b/src/hyperion-aml/AmlogicWrapper.h index 778533c9..e6b70d07 100644 --- a/src/hyperion-aml/AmlogicWrapper.h +++ b/src/hyperion-aml/AmlogicWrapper.h @@ -1,15 +1,19 @@ - + // QT includes -#include +#include -// Hyperion-Dispmanx includes #include +#include class AmlogicWrapper : public QObject { Q_OBJECT public: - AmlogicWrapper(unsigned grabWidth, unsigned grabHeight); + AmlogicWrapper( int updateRate_Hz=GrabberWrapper::DEFAULT_RATE_HZ, + int pixelDecimation=GrabberWrapper::DEFAULT_PIXELDECIMATION, + int cropLeft=0, int cropRight=0, + int cropTop=0, int cropBottom=0 + ); const Image & getScreenshot(); @@ -20,9 +24,18 @@ public: void stop(); + bool screenInit(); + signals: void sig_screenshot(const Image & screenshot); +public slots: + /// + /// Set the video mode (2D/3D) + /// @param[in] mode The new video mode + /// + void setVideoMode(VideoMode videoMode); + private slots: /// /// Performs screenshot captures and publishes the capture screenshot on the screenshot signal. @@ -30,8 +43,9 @@ private slots: void capture(); private: - /// The QT thread to generate capture-publish events - QThread _thread; + + /// The QT timer to generate capture-publish events + QTimer _timer; /// The grabber for creating screenshots AmlogicGrabber _grabber; diff --git a/src/hyperion-aml/hyperion-aml.cpp b/src/hyperion-aml/hyperion-aml.cpp index 2dfed16f..16a8eef4 100644 --- a/src/hyperion-aml/hyperion-aml.cpp +++ b/src/hyperion-aml/hyperion-aml.cpp @@ -26,6 +26,9 @@ void saveScreenshot(QString filename, const Image & image) int main(int argc, char ** argv) { + Logger *log = Logger::getInstance("AMLOGIC"); + Logger::setLogLevel(Logger::INFO); + std::cout << "hyperion-aml:" << std::endl << "\tVersion : " << HYPERION_VERSION << " (" << HYPERION_BUILD_ID << ")" << std::endl @@ -40,24 +43,64 @@ int main(int argc, char ** argv) // create the option parser and initialize all parser Parser parser("AmLogic capture application for Hyperion. Will automatically search a Hyperion server if -a option isn't used. Please note that if you have more than one server running it's more or less random which one will be used."); - IntOption & argWidth = parser.add (0x0, "width", "Width of the captured image [default: %1]", "160", 160, 4096); - IntOption & argHeight = parser.add (0x0, "height", "Height of the captured image [default: %1]", "160", 160, 4096); - BooleanOption & argScreenshot = parser.add(0x0, "screenshot", "Take a single screenshot, save it to file and quit"); - Option & argAddress = parser.add