- New Media Foundation grabber

- JsonAPI available grabber fix
- commented json config removed
This commit is contained in:
Paulchen Panther 2020-12-18 17:38:21 +01:00
parent a42aae44d1
commit c672ae6075
30 changed files with 2097 additions and 666 deletions

View File

@ -170,6 +170,11 @@ jobs:
run: | run: |
choco install --no-progress python nsis openssl directx-sdk -y choco install --no-progress python nsis openssl directx-sdk -y
- name: Install libjpeg-turbo
run: |
Invoke-WebRequest https://netcologne.dl.sourceforge.net/project/libjpeg-turbo/2.0.6/libjpeg-turbo-2.0.6-vc64.exe -OutFile libjpeg-turbo.exe
.\libjpeg-turbo /S
- name: Set up x64 build architecture environment - name: Set up x64 build architecture environment
shell: cmd shell: cmd
run: call "${{env.VCINSTALLDIR}}\Auxiliary\Build\vcvars64.bat" run: call "${{env.VCINSTALLDIR}}\Auxiliary\Build\vcvars64.bat"

View File

@ -50,6 +50,7 @@ SET ( DEFAULT_USE_SYSTEM_PROTO_LIBS OFF )
SET ( DEFAULT_USE_SYSTEM_MBEDTLS_LIBS OFF ) SET ( DEFAULT_USE_SYSTEM_MBEDTLS_LIBS OFF )
SET ( DEFAULT_TESTS OFF ) SET ( DEFAULT_TESTS OFF )
SET ( DEFAULT_EXPERIMENTAL OFF ) SET ( DEFAULT_EXPERIMENTAL OFF )
SET ( DEFAULT_MF OFF )
IF ( ${CMAKE_SYSTEM} MATCHES "Linux" ) IF ( ${CMAKE_SYSTEM} MATCHES "Linux" )
SET ( DEFAULT_V4L2 ON ) SET ( DEFAULT_V4L2 ON )
@ -60,6 +61,7 @@ IF ( ${CMAKE_SYSTEM} MATCHES "Linux" )
SET ( DEFAULT_CEC ON ) SET ( DEFAULT_CEC ON )
ELSEIF ( WIN32 ) ELSEIF ( WIN32 )
SET ( DEFAULT_DX ON ) SET ( DEFAULT_DX ON )
SET ( DEFAULT_MF ON )
ELSE() ELSE()
SET ( DEFAULT_V4L2 OFF ) SET ( DEFAULT_V4L2 OFF )
SET ( DEFAULT_FB OFF ) SET ( DEFAULT_FB OFF )
@ -159,7 +161,7 @@ else()
endif() endif()
message(STATUS "ENABLE_FB = ${ENABLE_FB}") message(STATUS "ENABLE_FB = ${ENABLE_FB}")
option(ENABLE_OSX "Enable the osx grabber" ${DEFAULT_OSX} ) option(ENABLE_OSX "Enable the OSX grabber" ${DEFAULT_OSX} )
message(STATUS "ENABLE_OSX = ${ENABLE_OSX}") message(STATUS "ENABLE_OSX = ${ENABLE_OSX}")
option(ENABLE_SPIDEV "Enable the SPIDEV device" ${DEFAULT_SPIDEV} ) option(ENABLE_SPIDEV "Enable the SPIDEV device" ${DEFAULT_SPIDEV} )
@ -171,6 +173,9 @@ message(STATUS "ENABLE_TINKERFORGE = ${ENABLE_TINKERFORGE}")
option(ENABLE_V4L2 "Enable the V4L2 grabber" ${DEFAULT_V4L2}) option(ENABLE_V4L2 "Enable the V4L2 grabber" ${DEFAULT_V4L2})
message(STATUS "ENABLE_V4L2 = ${ENABLE_V4L2}") message(STATUS "ENABLE_V4L2 = ${ENABLE_V4L2}")
option(ENABLE_MF "Enable the Media Foundation grabber" ${DEFAULT_MF})
message(STATUS "ENABLE_MF = ${ENABLE_MF}")
option(ENABLE_WS281XPWM "Enable the WS281x-PWM device" ${DEFAULT_WS281XPWM} ) option(ENABLE_WS281XPWM "Enable the WS281x-PWM device" ${DEFAULT_WS281XPWM} )
message(STATUS "ENABLE_WS281XPWM = ${ENABLE_WS281XPWM}") message(STATUS "ENABLE_WS281XPWM = ${ENABLE_WS281XPWM}")
@ -189,7 +194,7 @@ message(STATUS "ENABLE_X11 = ${ENABLE_X11}")
option(ENABLE_XCB "Enable the XCB grabber" ${DEFAULT_XCB}) option(ENABLE_XCB "Enable the XCB grabber" ${DEFAULT_XCB})
message(STATUS "ENABLE_XCB = ${ENABLE_XCB}") message(STATUS "ENABLE_XCB = ${ENABLE_XCB}")
option(ENABLE_QT "Enable the qt grabber" ${DEFAULT_QT}) option(ENABLE_QT "Enable the Qt grabber" ${DEFAULT_QT})
message(STATUS "ENABLE_QT = ${ENABLE_QT}") message(STATUS "ENABLE_QT = ${ENABLE_QT}")
option(ENABLE_DX "Enable the DirectX grabber" ${DEFAULT_DX}) option(ENABLE_DX "Enable the DirectX grabber" ${DEFAULT_DX})
@ -303,7 +308,7 @@ if (CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
# The Qt5_DIR should point to Qt5Config.cmake -> C:/Qt/5.xx/msvc2017_64/lib/cmake/Qt5 # The Qt5_DIR should point to Qt5Config.cmake -> C:/Qt/5.xx/msvc2017_64/lib/cmake/Qt5
# The CMAKE_PREFIX_PATH should point to the install directory -> C:/Qt/5.xx/msvc2017_64 # The CMAKE_PREFIX_PATH should point to the install directory -> C:/Qt/5.xx/msvc2017_64
# #
# Alternatively, use Qt5_BASE_DIR environment variable to point to Qt version to be used # Alternatively, use Qt5_BASE_DIR environment variable to point to Qt version to be used
# In MSVC19 add into CMakeSettings.json # In MSVC19 add into CMakeSettings.json
# #
# "environments": [ # "environments": [
@ -333,7 +338,7 @@ if (CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
message(STATUS "Add ${qt_module_path} to CMAKE_MODULE_PATH") message(STATUS "Add ${qt_module_path} to CMAKE_MODULE_PATH")
SET(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${qt_module_path}") SET(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${qt_module_path}")
#message(STATUS "CMAKE_PREFIX_PATH: ${CMAKE_PREFIX_PATH}") #message(STATUS "CMAKE_PREFIX_PATH: ${CMAKE_PREFIX_PATH}")
#message(STATUS "CMAKE_MODULE_PATH: ${CMAKE_MODULE_PATH}") #message(STATUS "CMAKE_MODULE_PATH: ${CMAKE_MODULE_PATH}")
@ -394,14 +399,14 @@ find_package(Threads REQUIRED)
add_definitions(${QT_DEFINITIONS}) add_definitions(${QT_DEFINITIONS})
# Add JPEG library # Add JPEG library
if (ENABLE_V4L2) if (ENABLE_V4L2 OR ENABLE_MF)
# Turbo JPEG # Turbo JPEG
find_package(TurboJPEG) find_package(TurboJPEG)
if (TURBOJPEG_FOUND) if (TURBOJPEG_FOUND)
add_definitions(-DHAVE_TURBO_JPEG) add_definitions(-DHAVE_TURBO_JPEG)
message( STATUS "Using Turbo JPEG library: ${TurboJPEG_LIBRARY}") message( STATUS "Using Turbo JPEG library: ${TurboJPEG_LIBRARY}")
include_directories(${TurboJPEG_INCLUDE_DIRS}) include_directories(${TurboJPEG_INCLUDE_DIRS})
else() elseif(ENABLE_V4L2)
# System JPEG # System JPEG
find_package(JPEG) find_package(JPEG)
if (JPEG_FOUND) if (JPEG_FOUND)
@ -411,7 +416,7 @@ if (ENABLE_V4L2)
else() else()
message( STATUS "JPEG library not found, MJPEG camera format won't work in V4L2 grabber.") message( STATUS "JPEG library not found, MJPEG camera format won't work in V4L2 grabber.")
endif() endif()
endif (TURBOJPEG_FOUND) endif ()
if (TURBOJPEG_FOUND OR JPEG_FOUND) if (TURBOJPEG_FOUND OR JPEG_FOUND)
add_definitions(-DHAVE_JPEG_DECODER) add_definitions(-DHAVE_JPEG_DECODER)

View File

@ -1,48 +1,51 @@
// Generated config file // Generated config file
// Define to enable the dispmanx grabber // Define to enable the DispmanX grabber
#cmakedefine ENABLE_DISPMANX #cmakedefine ENABLE_DISPMANX
// Define to enable the v4l2 grabber // Define to enable the V4L2 grabber
#cmakedefine ENABLE_V4L2 #cmakedefine ENABLE_V4L2
// Define to enable the framebuffer grabber // Define to enable the Media Foundation grabber
#cmakedefine ENABLE_MF
// Define to enable the Framebuffer grabber
#cmakedefine ENABLE_FB #cmakedefine ENABLE_FB
// Define to enable the amlogic grabber // Define to enable the AMLogic grabber
#cmakedefine ENABLE_AMLOGIC #cmakedefine ENABLE_AMLOGIC
// Define to enable the osx grabber // Define to enable the OSX grabber
#cmakedefine ENABLE_OSX #cmakedefine ENABLE_OSX
// Define to enable the x11 grabber // Define to enable the X11 grabber
#cmakedefine ENABLE_X11 #cmakedefine ENABLE_X11
// Define to enable the xcb grabber // Define to enable the XCB grabber
#cmakedefine ENABLE_XCB #cmakedefine ENABLE_XCB
// Define to enable the qt grabber // Define to enable the Qt grabber
#cmakedefine ENABLE_QT #cmakedefine ENABLE_QT
// Define to enable the DirectX grabber // Define to enable the DirectX grabber
#cmakedefine ENABLE_DX #cmakedefine ENABLE_DX
// Define to enable the spi-device // Define to enable the SPI-Device
#cmakedefine ENABLE_SPIDEV #cmakedefine ENABLE_SPIDEV
// Define to enable the ws281x-pwm-via-dma-device using jgarff's library // Define to enable the WS281x-PWM-via-DMA-device using jgarff's library
#cmakedefine ENABLE_WS281XPWM #cmakedefine ENABLE_WS281XPWM
// Define to enable the tinkerforge device // Define to enable the Tinkerforge device
#cmakedefine ENABLE_TINKERFORGE #cmakedefine ENABLE_TINKERFORGE
// Define to enable avahi // Define to enable AVAHI
#cmakedefine ENABLE_AVAHI #cmakedefine ENABLE_AVAHI
// Define to enable cec // Define to enable CEC
#cmakedefine ENABLE_CEC #cmakedefine ENABLE_CEC
// Define to enable the usb / hid devices // Define to enable the USB / HID devices
#cmakedefine ENABLE_USB_HID #cmakedefine ENABLE_USB_HID
// Define to enable profiler for development purpose // Define to enable profiler for development purpose

View File

@ -424,6 +424,20 @@
"edt_conf_v4l2_sizeDecimation_title": "Size decimation", "edt_conf_v4l2_sizeDecimation_title": "Size decimation",
"edt_conf_v4l2_standard_expl": "Select the video standard for your region. 'Automatic' keeps the value chosen by the v4l2 interface.", "edt_conf_v4l2_standard_expl": "Select the video standard for your region. 'Automatic' keeps the value chosen by the v4l2 interface.",
"edt_conf_v4l2_standard_title": "Video standard", "edt_conf_v4l2_standard_title": "Video standard",
"edt_conf_v4l2_fpsSoftwareDecimation_title" : "Software frame skipping",
"edt_conf_v4l2_fpsSoftwareDecimation_expl" : "To save resources every n'th frame will be processed only. For ex. if grabber is set to 30FPS with this option set to 5 the final result will be around 6FPS (1 - disabled)",
"edt_conf_v4l2_encoding_title" : "Encoding format",
"edt_conf_v4l2_encoding_expl" : "Force video encoding for multiformat capable grabbers",
"edt_conf_v4l2_hardware_brightness_title" : "Hardware brightness control",
"edt_conf_v4l2_hardware_brightness_expl" : "Set hardware brightness if device supports it, check logs (0=disabled)",
"edt_conf_v4l2_hardware_contrast_title" : "Hardware contrast control",
"edt_conf_v4l2_hardware_contrast_expl" : "Set hardware contrast if device supports it, check logs (0=disabled)",
"edt_conf_v4l2_noSignalCounterThreshold_title" : "Signal Counter Threshold",
"edt_conf_v4l2_noSignalCounterThreshold_expl" : "Count of frames (check that with grabber's current FPS mode) after which the no signal is triggered",
"edt_conf_v4l2_hardware_saturation_title" : "Hardware saturation control",
"edt_conf_v4l2_hardware_saturation_expl" : "Set hardware saturation if device supports it, check logs (0=disabled)",
"edt_conf_v4l2_hardware_hue_title" : "Hardware hue control",
"edt_conf_v4l2_hardware_hue_expl" : "Set hardware hue if device supports it, check logs (0=disabled)",
"edt_conf_webc_crtPath_expl": "Path to the certification file (format should be PEM)", "edt_conf_webc_crtPath_expl": "Path to the certification file (format should be PEM)",
"edt_conf_webc_crtPath_title": "Certificate path", "edt_conf_webc_crtPath_title": "Certificate path",
"edt_conf_webc_docroot_expl": "Local webinterface root path (just for webui developer)", "edt_conf_webc_docroot_expl": "Local webinterface root path (just for webui developer)",
@ -935,4 +949,4 @@
"wiz_yeelight_intro1": "This wizards configures Hyperion for the Yeelight system. Features are the Yeelighs' auto detection, setting each light to a specific position on your picture or disable it and tune the Hyperion settings automatically! So in short: All you need are some clicks and you are done!", "wiz_yeelight_intro1": "This wizards configures Hyperion for the Yeelight system. Features are the Yeelighs' auto detection, setting each light to a specific position on your picture or disable it and tune the Hyperion settings automatically! So in short: All you need are some clicks and you are done!",
"wiz_yeelight_title": "Yeelight Wizard", "wiz_yeelight_title": "Yeelight Wizard",
"wiz_yeelight_unsupported": "Unsupported" "wiz_yeelight_unsupported": "Unsupported"
} }

View File

@ -22,18 +22,25 @@ $(document).ready(function () {
"propertyOrder": 3, "propertyOrder": 3,
"required": true "required": true
}, },
"encoding_format":
{
"type": "string",
"title": "edt_conf_v4l2_encoding_title",
"propertyOrder": 5,
"required": true
},
"resolutions": "resolutions":
{ {
"type": "string", "type": "string",
"title": "edt_conf_v4l2_resolution_title", "title": "edt_conf_v4l2_resolution_title",
"propertyOrder": 6, "propertyOrder": 8,
"required": true "required": true
}, },
"framerates": "framerates":
{ {
"type": "string", "type": "string",
"title": "edt_conf_v4l2_framerate_title", "title": "edt_conf_v4l2_framerate_title",
"propertyOrder": 9, "propertyOrder": 11,
"required": true "required": true
} }
}; };
@ -53,7 +60,7 @@ $(document).ready(function () {
? enumTitelVals.push(v4l2_properties[i]['name']) ? enumTitelVals.push(v4l2_properties[i]['name'])
: enumTitelVals.push(v4l2_properties[i]['device']); : enumTitelVals.push(v4l2_properties[i]['device']);
} }
} else if (key == 'resolutions' || key == 'framerates') { } else if (key == 'resolutions' || key == 'framerates' || key == 'encoding_format') {
for (var i = 0; i < v4l2_properties.length; i++) { for (var i = 0; i < v4l2_properties.length; i++) {
if (v4l2_properties[i]['device'] == device) { if (v4l2_properties[i]['device'] == device) {
enumVals = enumTitelVals = v4l2_properties[i][key]; enumVals = enumTitelVals = v4l2_properties[i][key];
@ -105,7 +112,7 @@ $(document).ready(function () {
var val = ed.getValue(); var val = ed.getValue();
if (key == 'available_devices') { if (key == 'available_devices') {
var V4L2properties = ['device_inputs', 'resolutions', 'framerates']; var V4L2properties = ['device_inputs', 'resolutions', 'framerates', 'encoding_format'];
if (val == 'custom') { if (val == 'custom') {
var grabberV4L2 = ed.parent; var grabberV4L2 = ed.parent;
V4L2properties.forEach(function (item) { V4L2properties.forEach(function (item) {
@ -134,7 +141,7 @@ $(document).ready(function () {
(toggleOption('device', false), toggleOption('input', false), (toggleOption('device', false), toggleOption('input', false),
toggleOption('width', false), toggleOption('height', false), toggleOption('width', false), toggleOption('height', false),
toggleOption('fps', false)); toggleOption('fps', false), toggleOption('encoding', false));
} else { } else {
var grabberV4L2 = ed.parent; var grabberV4L2 = ed.parent;
V4L2properties.forEach(function (item) { V4L2properties.forEach(function (item) {
@ -169,6 +176,11 @@ $(document).ready(function () {
val != 'custom' val != 'custom'
? toggleOption('input', false) ? toggleOption('input', false)
: toggleOption('input', true); : toggleOption('input', true);
if (key == 'encoding_format')
val != 'custom'
? toggleOption('encoding', false)
: toggleOption('encoding', true);
}); });
}); });
}; };
@ -239,8 +251,16 @@ $(document).ready(function () {
if (window.serverInfo.grabbers.active) if (window.serverInfo.grabbers.active)
{ {
var activegrabber = window.serverInfo.grabbers.active.toLowerCase(); var activegrabbers = window.serverInfo.grabbers.active.map(v => v.toLowerCase());
$("#" + selector + " option[value='" + activegrabber + "']").attr('selected', 'selected'); options = $("#" + selector + " option");
for (var i = 0; i < options.length; i++) {
var type = options[i].value.toLowerCase();
if (activegrabbers.indexOf(type) > -1) {
$("#" + selector + " option[value='" + type + "']").attr('selected', 'selected');
break;
}
}
} }
var selectedType = $("#root_framegrabber_type").val(); var selectedType = $("#root_framegrabber_type").val();
@ -272,7 +292,7 @@ $(document).ready(function () {
conf_editor_v4l2.getEditor('root.grabberV4L2.available_devices').setValue('auto'); conf_editor_v4l2.getEditor('root.grabberV4L2.available_devices').setValue('auto');
if (window.serverConfig.grabberV4L2.available_devices == 'auto') { if (window.serverConfig.grabberV4L2.available_devices == 'auto') {
['device_inputs', 'standard', 'resolutions', 'framerates'].forEach(function (item) { ['device_inputs', 'standard', 'resolutions', 'framerates', 'encoding_format'].forEach(function (item) {
conf_editor_v4l2.getEditor('root.grabberV4L2.' + item).setValue('auto'); conf_editor_v4l2.getEditor('root.grabberV4L2.' + item).setValue('auto');
conf_editor_v4l2.getEditor('root.grabberV4L2.' + item).disable(); conf_editor_v4l2.getEditor('root.grabberV4L2.' + item).disable();
}); });
@ -286,6 +306,9 @@ $(document).ready(function () {
if (window.serverConfig.grabberV4L2.framerates == 'custom' && window.serverConfig.grabberV4L2.device != 'auto') if (window.serverConfig.grabberV4L2.framerates == 'custom' && window.serverConfig.grabberV4L2.device != 'auto')
toggleOption('fps', true); toggleOption('fps', true);
if (window.serverConfig.grabberV4L2.encoding_format == 'custom' && window.serverConfig.grabberV4L2.device != 'auto')
toggleOption('encoding', true);
}); });
$('#btn_submit_v4l2').off().on('click', function () { $('#btn_submit_v4l2').off().on('click', function () {
@ -303,6 +326,12 @@ $(document).ready(function () {
if (v4l2Options.grabberV4L2.device_inputs == 'auto') if (v4l2Options.grabberV4L2.device_inputs == 'auto')
v4l2Options.grabberV4L2.input = -1; v4l2Options.grabberV4L2.input = -1;
if (v4l2Options.grabberV4L2.encoding_format != 'custom' && v4l2Options.grabberV4L2.encoding_format != 'auto' && v4l2Options.grabberV4L2.available_devices != 'auto')
v4l2Options.grabberV4L2.encoding = v4l2Options.grabberV4L2.encoding_format;
if (v4l2Options.grabberV4L2.encoding_format == 'auto' || v4l2Options.grabberV4L2.encoding_format == 'NO_CHANGE')
v4l2Options.grabberV4L2.encoding = 'NO_CHANGE';
if (v4l2Options.grabberV4L2.resolutions != 'custom' && v4l2Options.grabberV4L2.resolutions != 'auto' && v4l2Options.grabberV4L2.available_devices != 'auto') if (v4l2Options.grabberV4L2.resolutions != 'custom' && v4l2Options.grabberV4L2.resolutions != 'auto' && v4l2Options.grabberV4L2.available_devices != 'auto')
(v4l2Options.grabberV4L2.width = parseInt(v4l2Options.grabberV4L2.resolutions.split('x')[0]), (v4l2Options.grabberV4L2.width = parseInt(v4l2Options.grabberV4L2.resolutions.split('x')[0]),
v4l2Options.grabberV4L2.height = parseInt(v4l2Options.grabberV4L2.resolutions.split('x')[1])); v4l2Options.grabberV4L2.height = parseInt(v4l2Options.grabberV4L2.resolutions.split('x')[1]));

View File

@ -3,15 +3,31 @@
# TurboJPEG_INCLUDE_DIRS # TurboJPEG_INCLUDE_DIRS
# TurboJPEG_LIBRARY # TurboJPEG_LIBRARY
find_path(TurboJPEG_INCLUDE_DIRS if (ENABLE_MF)
NAMES turbojpeg.h find_path(TurboJPEG_INCLUDE_DIRS
PATH_SUFFIXES include NAMES turbojpeg.h
) PATHS
"C:/libjpeg-turbo64"
PATH_SUFFIXES include
)
find_library(TurboJPEG_LIBRARY find_library(TurboJPEG_LIBRARY
NAMES turbojpeg turbojpeg-static NAMES turbojpeg turbojpeg-static
PATH_SUFFIXES bin lib PATHS
) "C:/libjpeg-turbo64"
PATH_SUFFIXES bin lib
)
else()
find_path(TurboJPEG_INCLUDE_DIRS
NAMES turbojpeg.h
PATH_SUFFIXES include
)
find_library(TurboJPEG_LIBRARY
NAMES turbojpeg turbojpeg-static
PATH_SUFFIXES bin lib
)
endif()
if(TurboJPEG_INCLUDE_DIRS AND TurboJPEG_LIBRARY) if(TurboJPEG_INCLUDE_DIRS AND TurboJPEG_LIBRARY)
include(CheckCSourceCompiles) include(CheckCSourceCompiles)
@ -26,7 +42,7 @@ if(TurboJPEG_INCLUDE_DIRS AND TurboJPEG_LIBRARY)
endif() endif()
include(FindPackageHandleStandardArgs) include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(TurboJpeg find_package_handle_standard_args(TurboJPEG
FOUND_VAR TURBOJPEG_FOUND FOUND_VAR TURBOJPEG_FOUND
REQUIRED_VARS TurboJPEG_LIBRARY TurboJPEG_INCLUDE_DIRS TURBOJPEG_WORKS REQUIRED_VARS TurboJPEG_LIBRARY TurboJPEG_INCLUDE_DIRS TURBOJPEG_WORKS
TurboJPEG_INCLUDE_DIRS TurboJPEG_LIBRARY TurboJPEG_INCLUDE_DIRS TurboJPEG_LIBRARY

View File

@ -1,535 +0,0 @@
// This is a example config (hyperion.config.json) with comments, in any case you need to create your own one with HyperCon!
// location of all configs: /etc/hyperion
// Webpage: https://www.hyperion-project.org
{
/// general Settings
/// * 'name' : The user friendly name of the hyperion instance (used for network things)
/// * 'versionBranch' : Which branch should be used for hyperion version
/// * 'showOptHelp' : Show option expanations at the webui. Highly recommended for beginners.
"general" :
{
"name" : "MyHyperionConfig",
"watchedVersionBranch" : "Stable",
"showOptHelp" : true
},
/// set log level: silent warn verbose debug
"logger" :
{
"level" : "warn"
},
/// Device configuration contains the following fields:
/// * 'name' : The user friendly name of the device (only used for display purposes)
/// * 'type' : The type of the device
/// * [device type specific configuration]
/// * 'colorOrder' : The order of the color bytes ('rgb', 'rbg', 'bgr', etc.).
/// * 'rewriteTime': in ms. Data is resend to leds, if no new data is available in thistime. 0 means no refresh
"device" :
{
"type" : "file",
"hardwareLedCount" : 1,
"output" : "/dev/null",
"rate" : 1000000,
"colorOrder" : "rgb",
"rewriteTime": 5000
},
/// Color manipulation configuration used to tune the output colors to specific surroundings.
/// The configuration contains a list of color-transforms. Each transform contains the
/// following fields:
/// * 'imageToLedMappingType' : multicolor_mean - every led has it's own calculatedmean color
/// unicolor_mean - every led has same color, color is the mean of whole image
/// * 'channelAdjustment'
/// * 'id' : The unique identifier of the channel adjustments (eg 'device_1')
/// * 'leds' : The indices (or index ranges) of the leds to which this channel adjustment applies
/// (eg '0-5, 9, 11, 12-17'). The indices are zero based.
/// * 'white'/'red'/'green'/'blue'/'cyan'/'magenta'/'yellow' : Array of RGB to adjust the output color
/// * 'gammaRed'/'gammaGreen'/'gammaBlue' : Gamma value for each channel
/// * 'id' : The unique identifier of the channel adjustments (eg 'device_1')
/// * 'id' : The unique identifier of the channel adjustments (eg 'device_1')
/// * 'backlightThreshold' : Minimum brightness (backlight)
/// * 'backlightColored' : backlight with color, instead of white
/// * 'brightness' : overall brightness
/// * 'brightnessCompensation' : 100 means brightness differences are compensated (white is as bright as red, is as bright as yellow.
/// 0 means white is 3x brighter than red, yellow is 2x brighter than red
"color" :
{
"imageToLedMappingType" : "multicolor_mean",
"channelAdjustment" :
[
{
"id" : "default",
"leds" : "*",
"white" : [255,255,255],
"red" : [255,0,0],
"green" : [0,255,0],
"blue" : [0,0,255],
"cyan" : [0,255,255],
"magenta" : [255,0,255],
"yellow" : [255,255,0],
"gammaRed" : 1.5,
"gammaGreen" : 1.5,
"gammaBlue" : 1.5,
"backlightThreshold" : 0,
"backlightColored" : false,
"brightness" : 100,
"brightnessCompensation" : 80
}
]
},
/// smoothing
/// * 'smoothing' : Smoothing of the colors in the time-domain with the following tuning
/// parameters:
/// - 'enable' Enable or disable the smoothing (true/false)
/// - 'type' The type of smoothing algorithm ('linear' or 'none')
/// - 'time_ms' The time constant for smoothing algorithm in milliseconds
/// - 'updateFrequency' The update frequency of the leds in Hz
/// - 'updateDelay' The delay of the output to leds (in periods of smoothing)
/// - 'continuousOutput' Flag for enabling continuous output to Leds regardless of new input or not
"smoothing" :
{
"enable" : true,
"type" : "linear",
"time_ms" : 200,
"updateFrequency" : 25.0000,
"updateDelay" : 0,
"continuousOutput" : true
},
/// Configuration for the embedded V4L2 grabber
/// * device : V4L2 Device to use [default="auto"] (Auto detection)
/// * width : The width of the grabbed frames (pixels) [default=0]
/// * height : The height of the grabbed frames (pixels) [default=0]
/// * standard : Video standard (PAL/NTSC/SECAM/NO_CHANGE) [default="NO_CHANGE"]
/// * sizeDecimation : Size decimation factor [default=8]
/// * cropLeft : Cropping from the left [default=0]
/// * cropRight : Cropping from the right [default=0]
/// * cropTop : Cropping from the top [default=0]
/// * cropBottom : Cropping from the bottom [default=0]
/// * signalDetection : enable/disable signal detection [default=false]
/// * cecDetection : enable/disable cec detection [default=false]
/// * redSignalThreshold : Signal threshold for the red channel between 0 and 100 [default=5]
/// * greenSignalThreshold : Signal threshold for the green channel between 0 and 100 [default=5]
/// * blueSignalThreshold : Signal threshold for the blue channel between 0 and 100 [default=5]
/// * sDHOffsetMin : area for signal detection - horizontal minimum offset value. Values between 0.0 and 1.0
/// * sDVOffsetMin : area for signal detection - vertical minimum offset value. Values between 0.0 and 1.0
/// * sDHOffsetMax : area for signal detection - horizontal maximum offset value. Values between 0.0 and 1.0
/// * sDVOffsetMax : area for signal detection - vertical maximum offset value. Values between 0.0 and 1.0
"grabberV4L2" :
{
"device" : "auto",
"width" : 0,
"height" : 0,
"standard" : "NO_CHANGE",
"sizeDecimation" : 8,
"priority" : 240,
"cropLeft" : 0,
"cropRight" : 0,
"cropTop" : 0,
"cropBottom" : 0,
"redSignalThreshold" : 5,
"greenSignalThreshold" : 5,
"blueSignalThreshold" : 5,
"signalDetection" : false,
"cecDetection" : false,
"sDVOffsetMin" : 0.25,
"sDHOffsetMin" : 0.25,
"sDVOffsetMax" : 0.75,
"sDHOffsetMax" : 0.75
},
/// The configuration for the frame-grabber, contains the following items:
/// * type : type of grabber. (auto|osx|dispmanx|amlogic|x11|xcb|framebuffer|qt) [auto]
/// * width : The width of the grabbed frames [pixels]
/// * height : The height of the grabbed frames [pixels]
/// * frequency_Hz : The frequency of the frame grab [Hz]
/// * ATTENTION : Power-of-Two resolution is not supported and leads to unexpected behaviour!
"framegrabber" :
{
// for all type of grabbers
"type" : "framebuffer",
"frequency_Hz" : 10,
"cropLeft" : 0,
"cropRight" : 0,
"cropTop" : 0,
"cropBottom" : 0,
// valid for grabber: osx|dispmanx|amlogic|framebuffer
"width" : 96,
"height" : 96,
// valid for x11|xcb|qt
"pixelDecimation" : 8,
// valid for qt
"display" 0
},
/// The black border configuration, contains the following items:
/// * enable : true if the detector should be activated
/// * threshold : Value below which a pixel is regarded as black (value between 0 and 100 [%])
/// * unknownFrameCnt : Number of frames without any detection before the border is set to 0 (default 600)
/// * borderFrameCnt : Number of frames before a consistent detected border gets set (default 50)
/// * maxInconsistentCnt : Number of inconsistent frames that are ignored before a new border gets a chance to proof consistency
/// * blurRemoveCnt : Number of pixels that get removed from the detected border to cut away blur (default 1)
/// * mode : Border detection mode (values=default,classic,osd,letterbox)
"blackborderdetector" :
{
"enable" : true,
"threshold" : 5,
"unknownFrameCnt" : 600,
"borderFrameCnt" : 50,
"maxInconsistentCnt" : 10,
"blurRemoveCnt" : 1,
"mode" : "default"
},
/// foregroundEffect sets a "booteffect" or "bootcolor" during startup for a given period in ms (duration_ms)
/// * enable : if true, foreground effect is enabled
/// * type : choose between "color" or "effect"
/// * color : if type is color, a color is used (RGB) (example: [0,0,255])
/// * effect : if type is effect a effect is used (example: "Rainbow swirl fast")
/// * duration_ms : The duration of the selected effect or color (0=endless)
/// HINT: "foregroundEffect" starts always with priority 0, so it blocks all remotes and grabbers if the duration_ms is endless (0)
"foregroundEffect" :
{
"enable" : true,
"type" : "effect",
"color" : [0,0,255],
"effect" : "Rainbow swirl fast",
"duration_ms" : 3000
},
/// backgroundEffect sets a background effect or color. It is used when all capture devices are stopped (manual via remote). Could be also selected via priorities selection.
/// * enable : if true, background effect is enabled
/// * type : choose between "color" or "effect"
/// * color : if type is color, a color is used (RGB) (example: [255,134,0])
/// * effect : if type is effect a effect is used (example: "Rainbow swirl fast")
"backgroundEffect" :
{
"enable" : true,
"type" : "effect",
"color" : [255,138,0],
"effect" : "Warm mood blobs"
},
/// The configuration of the Json/Proto forwarder. Forward messages to multiple instances of Hyperion on same and/or other hosts
/// 'proto' is mostly used for video streams and 'json' for effects
/// * enable : Enable or disable the forwarder (true/false)
/// * proto : Proto server adress and port of your target. Syntax:[IP:PORT] -> ["127.0.0.1:19401"] or more instances to forward ["127.0.0.1:19401","192.168.0.24:19403"]
/// * json : Json server adress and port of your target. Syntax:[IP:PORT] -> ["127.0.0.1:19446"] or more instances to forward ["127.0.0.1:19446","192.168.0.24:19448"]
/// HINT:If you redirect to "127.0.0.1" (localhost) you could start a second hyperion with another device/led config!
/// Be sure your client(s) is/are listening on the configured ports. The second Hyperion (if used) also needs to be configured! (WebUI -> Settings Level (Expert) -> Configuration -> Network Services -> Forwarder)
"forwarder" :
{
"enable" : false,
"flat" : ["127.0.0.1:19401"],
"json" : ["127.0.0.1:19446"]
},
/// The configuration of the Json server which enables the json remote interface
/// * port : Port at which the json server is started
"jsonServer" :
{
"port" : 19444
},
/// The configuration of the Flatbuffer server which enables the Flatbuffer remote interface
/// * port : Port at which the flatbuffer server is started
"flatbufServer" :
{
"enable" : true,
"port" : 19400,
"timeout" : 5
},
/// The configuration of the Protobuffer server which enables the Protobuffer remote interface
/// * port : Port at which the protobuffer server is started
"protoServer" :
{
"enable" : true,
"port" : 19445,
"timeout" : 5
},
/// The configuration of the boblight server which enables the boblight remote interface
/// * enable : Enable or disable the boblight server (true/false)
/// * port : Port at which the boblight server is started
/// * priority : Priority of the boblight server (Default=128) HINT: lower value result in HIGHER priority!
"boblightServer" :
{
"enable" : false,
"port" : 19333,
"priority" : 128
},
/// Configuration of the Hyperion webserver
/// * document_root : path to hyperion webapp files (webconfig developer only)
/// * port : the port where hyperion webapp is accasible
/// * sslPort : the secure (HTTPS) port of the hyperion webapp
/// * crtPath : the path to a certificate file to allow HTTPS connections. Should be in PEM format
/// * keyPath : the path to a private key file to allow HTTPS connections. Should be in PEM format and RSA encrypted
/// * keyPassPhrase : optional: If the key file requires a password add it here
"webConfig" :
{
"document_root" : "/path/to/files",
"port" : 8090,
"sslPort" : 8092,
"crtPath" : "/path/to/mycert.crt",
"keyPath" : "/path/to/mykey.key",
"keyPassPhrase" : ""
},
/// The configuration of the effect engine, contains the following items:
/// * paths : An array with absolute location(s) of directories with effects,
/// $ROOT is a keyword which will be replaced with the current rootPath that can be specified on startup from the commandline (defaults to your home directory)
/// * disable : An array with effect names that shouldn't be loaded
"effects" :
{
"paths" :
[
"$ROOT/custom-effects",
"/usr/share/hyperion/effects"
],
"disable" :
[
"Rainbow swirl",
"X-Mas"
]
},
"instCapture" :
{
"systemEnable" : true,
"systemPriority" : 250,
"v4lEnable" : false,
"v4lPriority" : 240
},
/// The configuration of the network security restrictions, contains the following items:
/// * internetAccessAPI : When true allows connection from internet to the API. When false it blocks all outside connections
/// * restirctedInternetAccessAPI : webui voodoo only - ignore it
/// * ipWhitelist : Whitelist ip addresses from the internet to allow access to the API
/// * apiAuth : When true the API requires authentication through tokens to use the API. Read also "localApiAuth"
/// * localApiAuth : When false connections from the local network don't require an API authentification.
/// * localAdminApiAuth : When false connections from the local network don't require an authentification for administration access.
"network" :
{
"internetAccessAPI" : false,
"restirctedInternetAccessAPI" : false,
"ipWhitelist" : [],
"apiAuth" : true,
"localApiAuth" : false,
"localAdminAuth": true
},
/// Recreate and save led layouts made with web config. These values are just helpers for ui, not for Hyperion.
"ledConfig" :
{
"classic":
{
"top" : 8,
"bottom" : 8,
"left" : 5,
"right" : 5,
"glength" : 0,
"gpos" : 0,
"position" : 0,
"reverse" : false,
"hdepth" : 8,
"vdepth" : 5,
"overlap" : 0,
"edgegap" : 0,
"ptlh" : 0,
"ptlv" : 0,
"ptrh" : 100,
"ptrv" : 0,
"pblh" : 0,
"pblv" : 100,
"pbrh" : 100,
"pbrv" : 100
},
"matrix":
{
"ledshoriz": 10,
"ledsvert" : 10,
"cabling" : "snake",
"start" : "top-left"
}
},
/// The configuration for each individual led. This contains the specification of the area
/// averaged of an input image for each led to determine its color. Each item in the list
/// contains the following fields:
/// * hmin: The fractional part of the image along the horizontal used for the averaging (minimum)
/// * hmax: The fractional part of the image along the horizontal used for the averaging (maximum)
/// * vmin: The fractional part of the image along the vertical used for the averaging (minimum)
/// * vmax: The fractional part of the image along the vertical used for the averaging (maximum)
/// * colorOrder: Usually the global colorOrder is set at the device section, you can overwrite it here per led
"leds":
[
{
"hmax": 0.125,
"hmin": 0,
"vmax": 0.08,
"vmin": 0
},
{
"hmax": 0.25,
"hmin": 0.125,
"vmax": 0.08,
"vmin": 0
},
{
"hmax": 0.375,
"hmin": 0.25,
"vmax": 0.08,
"vmin": 0
},
{
"hmax": 0.5,
"hmin": 0.375,
"vmax": 0.08,
"vmin": 0
},
{
"hmax": 0.625,
"hmin": 0.5,
"vmax": 0.08,
"vmin": 0
},
{
"hmax": 0.75,
"hmin": 0.625,
"vmax": 0.08,
"vmin": 0
},
{
"hmax": 0.875,
"hmin": 0.75,
"vmax": 0.08,
"vmin": 0
},
{
"hmax": 1,
"hmin": 0.875,
"vmax": 0.08,
"vmin": 0
},
{
"hmax": 1,
"hmin": 0.95,
"vmax": 0.2,
"vmin": 0
},
{
"hmax": 1,
"hmin": 0.95,
"vmax": 0.4,
"vmin": 0.2
},
{
"hmax": 1,
"hmin": 0.95,
"vmax": 0.6,
"vmin": 0.4
},
{
"hmax": 1,
"hmin": 0.95,
"vmax": 0.8,
"vmin": 0.6
},
{
"hmax": 1,
"hmin": 0.95,
"vmax": 1,
"vmin": 0.8
},
{
"hmax": 1,
"hmin": 0.875,
"vmax": 1,
"vmin": 0.92
},
{
"hmax": 0.875,
"hmin": 0.75,
"vmax": 1,
"vmin": 0.92
},
{
"hmax": 0.75,
"hmin": 0.625,
"vmax": 1,
"vmin": 0.92
},
{
"hmax": 0.625,
"hmin": 0.5,
"vmax": 1,
"vmin": 0.92
},
{
"hmax": 0.5,
"hmin": 0.375,
"vmax": 1,
"vmin": 0.92
},
{
"hmax": 0.375,
"hmin": 0.25,
"vmax": 1,
"vmin": 0.92
},
{
"hmax": 0.25,
"hmin": 0.125,
"vmax": 1,
"vmin": 0.92
},
{
"hmax": 0.125,
"hmin": 0,
"vmax": 1,
"vmin": 0.92
},
{
"hmax": 0.05,
"hmin": 0,
"vmax": 1,
"vmin": 0.8
},
{
"hmax": 0.05,
"hmin": 0,
"vmax": 0.8,
"vmin": 0.6
},
{
"hmax": 0.05,
"hmin": 0,
"vmax": 0.6,
"vmin": 0.4
},
{
"hmax": 0.05,
"hmin": 0,
"vmax": 0.4,
"vmin": 0.2
},
{
"hmax": 0.05,
"hmin": 0,
"vmax": 0.2,
"vmin": 0
}
]
}

View File

@ -64,10 +64,12 @@
{ {
"device" : "auto", "device" : "auto",
"input" : 0, "input" : 0,
"encoding" : "NO_CHANGE",
"width" : 0, "width" : 0,
"height" : 0, "height" : 0,
"fps" : 15, "fps" : 15,
"standard" : "NO_CHANGE", "standard" : "NO_CHANGE",
"fpsSoftwareDecimation" : 1,
"sizeDecimation" : 8, "sizeDecimation" : 8,
"cropLeft" : 0, "cropLeft" : 0,
"cropRight" : 0, "cropRight" : 0,
@ -77,11 +79,16 @@
"greenSignalThreshold" : 5, "greenSignalThreshold" : 5,
"blueSignalThreshold" : 5, "blueSignalThreshold" : 5,
"signalDetection" : false, "signalDetection" : false,
"noSignalCounterThreshold" : 200,
"cecDetection" : false, "cecDetection" : false,
"sDVOffsetMin" : 0.25, "sDVOffsetMin" : 0.25,
"sDHOffsetMin" : 0.25, "sDHOffsetMin" : 0.25,
"sDVOffsetMax" : 0.75, "sDVOffsetMax" : 0.75,
"sDHOffsetMax" : 0.75 "sDHOffsetMax" : 0.75,
"hardware_brightness" : 0,
"hardware_contrast" : 0,
"hardware_saturation" : 0,
"hardware_hue" : 0
}, },
"framegrabber" : "framegrabber" :

138
include/grabber/MFGrabber.h Normal file
View File

@ -0,0 +1,138 @@
#pragma once
// COM includes
#include <Guiddef.h>
// Qt includes
#include <QObject>
#include <QRectF>
#include <QMap>
#include <QMultiMap>
// utils includes
#include <utils/PixelFormat.h>
#include <utils/Components.h>
#include <hyperion/Grabber.h>
// decoder thread includes
#include <grabber/MFThread.h>
// TurboJPEG decoder
#ifdef HAVE_TURBO_JPEG
#include <turbojpeg.h>
#endif
/// Forward class declaration
class SourceReaderCB;
/// Forward struct declaration
struct IMFSourceReader;
///
/// Media Foundation capture class
///
class MFGrabber : public Grabber
{
Q_OBJECT
friend class SourceReaderCB;
public:
struct DevicePropertiesItem
{
int x, y,fps,fps_a,fps_b;
PixelFormat pf;
GUID guid;
};
struct DeviceProperties
{
QString name = QString();
QMultiMap<QString, int> inputs = QMultiMap<QString, int>();
QStringList displayResolutions = QStringList();
QStringList framerates = QStringList();
QList<DevicePropertiesItem> valid = QList<DevicePropertiesItem>();
};
MFGrabber(const QString & device, const unsigned width, const unsigned height, const unsigned fps, const unsigned input, int pixelDecimation);
~MFGrabber() override;
void receive_image(const void *frameImageBuffer, int size, QString message);
QRectF getSignalDetectionOffset() const { return QRectF(_x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max); }
bool getSignalDetectionEnabled() const { return _signalDetectionEnabled; }
bool getCecDetectionEnabled() const { return _cecDetectionEnabled; }
QStringList getV4L2devices() const override;
QString getV4L2deviceName(const QString& devicePath) const override { return devicePath; }
QMultiMap<QString, int> getV4L2deviceInputs(const QString& devicePath) const override { return _deviceProperties.value(devicePath).inputs; }
QStringList getResolutions(const QString& devicePath) const override { return _deviceProperties.value(devicePath).displayResolutions; }
QStringList getFramerates(const QString& devicePath) const override { return _deviceProperties.value(devicePath).framerates; }
QStringList getV4L2EncodingFormats(const QString& devicePath) const override;
void setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold) override;
void setSignalDetectionOffset( double verticalMin, double horizontalMin, double verticalMax, double horizontalMax) override;
void setSignalDetectionEnable(bool enable) override;
void setPixelDecimation(int pixelDecimation) override;
void setCecDetectionEnable(bool enable) override;
void setDeviceVideoStandard(QString device, VideoStandard videoStandard) override;
bool setInput(int input) override;
bool setWidthHeight(int width, int height) override;
bool setFramerate(int fps) override;
void setFpsSoftwareDecimation(int decimation);
void setEncoding(QString enc);
void setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue);
public slots:
bool start();
void stop();
void newThreadFrame(unsigned int _workerIndex, const Image<ColorRgb>& image,unsigned int sourceCount);
signals:
void newFrame(const Image<ColorRgb> & image);
private:
struct buffer
{
void *start;
size_t length;
};
bool init();
void uninit();
bool init_device(QString device, DevicePropertiesItem props);
void uninit_device();
void enumVideoCaptureDevices();
void start_capturing();
bool process_image(const void *frameImageBuffer, int size);
void checkSignalDetectionEnabled(Image<ColorRgb> image);
QString _deviceName;
QMap<QString, MFGrabber::DeviceProperties> _deviceProperties;
std::vector<buffer> _buffers;
HRESULT _hr;
SourceReaderCB* _sourceReaderCB;
PixelFormat _pixelFormat;
int _pixelDecimation,
_lineLength,
_frameByteSize,
_noSignalCounterThreshold,
_noSignalCounter,
_fpsSoftwareDecimation,
_brightness,
_contrast,
_saturation,
_hue;
volatile unsigned int _currentFrame;
ColorRgb _noSignalThresholdColor;
bool _signalDetectionEnabled,
_cecDetectionEnabled,
_noSignalDetected,
_initialized;
double _x_frac_min,
_y_frac_min,
_x_frac_max,
_y_frac_max;
MFThreadManager _threadManager;
IMFSourceReader* _sourceReader;
#ifdef HAVE_TURBO_JPEG
int _subsamp;
#endif
};

138
include/grabber/MFThread.h Normal file
View File

@ -0,0 +1,138 @@
#pragma once
// Qt includes
#include <QThread>
#include <QSemaphore>
// util includes
#include <utils/PixelFormat.h>
#include <utils/ImageResampler.h>
// TurboJPEG decoder
#ifdef HAVE_TURBO_JPEG
#include <QImage>
#include <QColor>
#include <turbojpeg.h>
#endif
// Forward class declaration
class MFThreadManager;
/// Encoder thread for USB devices
class MFThread : public QThread
{
Q_OBJECT
friend class MFThreadManager;
public:
MFThread();
~MFThread();
void setup(
unsigned int threadIndex, PixelFormat pixelFormat, uint8_t* sharedData,
int size, int width, int height, int lineLength,
int subsamp, unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight,
VideoMode videoMode, int currentFrame, int pixelDecimation);
void run();
bool isBusy();
void noBusy();
signals:
void newFrame(unsigned int threadIndex, const Image<ColorRgb>& data, unsigned int sourceCount);
private:
void processImageMjpeg();
#ifdef HAVE_TURBO_JPEG
tjhandle _decompress;
int _scalingFactorsCount = 0;
tjscalingfactor* _scalingFactors = nullptr;
#endif
static volatile bool _isActive;
volatile bool _isBusy;
QSemaphore _semaphore;
unsigned int _workerIndex;
PixelFormat _pixelFormat;
uint8_t* _localData;
int _localDataSize;
int _size;
int _width;
int _height;
int _lineLength;
int _subsamp;
unsigned _cropLeft;
unsigned _cropTop;
unsigned _cropBottom;
unsigned _cropRight;
int _currentFrame;
int _pixelDecimation;
ImageResampler _imageResampler;
};
class MFThreadManager : public QObject
{
Q_OBJECT
public:
MFThreadManager() : _threads(nullptr)
{
int select = QThread::idealThreadCount();
if (select >= 2 && select <= 3)
select = 2;
else if (select > 3 && select <= 5)
select = 3;
else if (select > 5)
select = 4;
_maxThreads = qMax(select, 1);
}
~MFThreadManager()
{
if (_threads != nullptr)
{
for(unsigned i=0; i < _maxThreads; i++)
if (_threads[i] != nullptr)
{
_threads[i]->deleteLater();
_threads[i] = nullptr;
}
delete[] _threads;
_threads = nullptr;
}
}
void initThreads()
{
if (_maxThreads >= 1)
{
_threads = new MFThread*[_maxThreads];
for (unsigned i=0; i < _maxThreads; i++)
_threads[i] = new MFThread();
}
}
void start() { MFThread::_isActive = true; }
bool isActive() { return MFThread::_isActive; }
void stop()
{
MFThread::_isActive = false;
if (_threads != nullptr)
{
for(unsigned i=0; i < _maxThreads; i++)
if (_threads[i] != nullptr)
{
_threads[i]->quit();
_threads[i]->wait();
}
}
}
unsigned int _maxThreads;
MFThread** _threads;
};

View File

@ -0,0 +1,46 @@
#pragma once
#include <hyperion/GrabberWrapper.h>
#include <grabber/MFGrabber.h>
class MFWrapper : public GrabberWrapper
{
Q_OBJECT
public:
MFWrapper(const QString & device, const unsigned grabWidth, const unsigned grabHeight, const unsigned fps, const unsigned input, int pixelDecimation);
~MFWrapper() override;
bool getSignalDetectionEnable() const;
bool getCecDetectionEnable() const;
public slots:
bool start() override;
void stop() override;
void setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold);
void setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom) override;
void setSignalDetectionOffset(double verticalMin, double horizontalMin, double verticalMax, double horizontalMax);
void setSignalDetectionEnable(bool enable);
void setCecDetectionEnable(bool enable);
void setDeviceVideoStandard(const QString& device, VideoStandard videoStandard);
void handleSettingsUpdate(settings::type type, const QJsonDocument& config) override;
///
/// @brief set software decimation (v4l2)
///
void setFpsSoftwareDecimation(int decimation);
void setEncoding(QString enc);
void setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue);
private slots:
void newFrame(const Image<ColorRgb> & image);
void action() override;
private:
/// The Media Foundation grabber
MFGrabber _grabber;
};

View File

@ -51,15 +51,7 @@ public:
QStringList framerates = QStringList(); QStringList framerates = QStringList();
}; };
V4L2Grabber(const QString & device, V4L2Grabber(const QString & device, const unsigned width, const unsigned height, const unsigned fps, const unsigned input, VideoStandard videoStandard, PixelFormat pixelFormat, int pixelDecimation);
const unsigned width,
const unsigned height,
const unsigned fps,
const unsigned input,
VideoStandard videoStandard,
PixelFormat pixelFormat,
int pixelDecimation
);
~V4L2Grabber() override; ~V4L2Grabber() override;
QRectF getSignalDetectionOffset() const QRectF getSignalDetectionOffset() const

View File

@ -15,7 +15,7 @@ public:
const unsigned input, const unsigned input,
VideoStandard videoStandard, VideoStandard videoStandard,
PixelFormat pixelFormat, PixelFormat pixelFormat,
int pixelDecimation ); int pixelDecimation);
~V4L2Wrapper() override; ~V4L2Wrapper() override;
bool getSignalDetectionEnable() const; bool getSignalDetectionEnable() const;

View File

@ -135,6 +135,13 @@ public:
/// ///
virtual QMultiMap<QString, int> getV4L2deviceInputs(const QString& /*devicePath*/) const { return QMultiMap<QString, int>(); } virtual QMultiMap<QString, int> getV4L2deviceInputs(const QString& /*devicePath*/) const { return QMultiMap<QString, int>(); }
///
/// @brief Get a list of supported hardware encoding formats
/// @param devicePath The device path
/// @return List of hardware encoding formats on success else empty List
///
virtual QStringList getV4L2EncodingFormats(const QString& /*devicePath*/) const { return QStringList(); }
/// ///
/// @brief Get a list of supported device resolutions /// @brief Get a list of supported device resolutions
/// @param devicePath The device path /// @param devicePath The device path

View File

@ -18,9 +18,9 @@ class Grabber;
class GlobalSignals; class GlobalSignals;
class QTimer; class QTimer;
/// List of Hyperion instances that requested screen capt /// Map of Hyperion instances with grabber name that requested screen capture
static QList<int> GRABBER_SYS_CLIENTS; static QMap<int, QString> GRABBER_SYS_CLIENTS;
static QList<int> GRABBER_V4L_CLIENTS; static QMap<int, QString> GRABBER_V4L_CLIENTS;
/// ///
/// This class will be inherted by FramebufferWrapper and others which contains the real capture interface /// This class will be inherted by FramebufferWrapper and others which contains the real capture interface
@ -76,6 +76,13 @@ public:
/// ///
virtual QMultiMap<QString, int> getV4L2deviceInputs(const QString& devicePath) const; virtual QMultiMap<QString, int> getV4L2deviceInputs(const QString& devicePath) const;
///
/// @brief Get a list of supported hardware encoding formats
/// @param devicePath The device path
/// @return List of hardware encoding formats on success else empty List
///
virtual QStringList getV4L2EncodingFormats(const QString& devicePath) const;
/// ///
/// @brief Get a list of supported device resolutions /// @brief Get a list of supported device resolutions
/// @param devicePath The device path /// @param devicePath The device path
@ -92,9 +99,10 @@ public:
/// ///
/// @brief Get active grabber name /// @brief Get active grabber name
/// @return Active grabber name /// @param hyperionInd The instance index
/// @return Active grabbers
/// ///
virtual QString getActive() const; virtual QStringList getActive(int inst) const;
static QStringList availableGrabbers(); static QStringList availableGrabbers();

View File

@ -23,32 +23,32 @@ inline PixelFormat parsePixelFormat(const QString& pixelFormat)
// convert to lower case // convert to lower case
QString format = pixelFormat.toLower(); QString format = pixelFormat.toLower();
if (format.compare("yuyv") ) if (format.compare("yuyv") == 0)
{ {
return PixelFormat::YUYV; return PixelFormat::YUYV;
} }
else if (format.compare("uyvy") ) else if (format.compare("uyvy") == 0)
{ {
return PixelFormat::UYVY; return PixelFormat::UYVY;
} }
else if (format.compare("bgr16") ) else if (format.compare("bgr16") == 0)
{ {
return PixelFormat::BGR16; return PixelFormat::BGR16;
} }
else if (format.compare("bgr24") ) else if (format.compare("bgr24") == 0)
{ {
return PixelFormat::BGR24; return PixelFormat::BGR24;
} }
else if (format.compare("rgb32") ) else if (format.compare("rgb32") == 0)
{ {
return PixelFormat::RGB32; return PixelFormat::RGB32;
} }
else if (format.compare("bgr32") ) else if (format.compare("bgr32") == 0)
{ {
return PixelFormat::BGR32; return PixelFormat::BGR32;
} }
#ifdef HAVE_JPEG_DECODER #ifdef HAVE_JPEG_DECODER
else if (format.compare("mjpeg") ) else if (format.compare("mjpeg") == 0)
{ {
return PixelFormat::MJPEG; return PixelFormat::MJPEG;
} }
@ -57,3 +57,41 @@ inline PixelFormat parsePixelFormat(const QString& pixelFormat)
// return the default NO_CHANGE // return the default NO_CHANGE
return PixelFormat::NO_CHANGE; return PixelFormat::NO_CHANGE;
} }
inline QString pixelFormatToString(const PixelFormat& pixelFormat)
{
if ( pixelFormat == PixelFormat::YUYV)
{
return "yuyv";
}
else if (pixelFormat == PixelFormat::UYVY)
{
return "uyvy";
}
else if (pixelFormat == PixelFormat::BGR16)
{
return "bgr16";
}
else if (pixelFormat == PixelFormat::BGR24)
{
return "bgr24";
}
else if (pixelFormat == PixelFormat::RGB32)
{
return "rgb32";
}
else if (pixelFormat == PixelFormat::BGR32)
{
return "bgr32";
}
#ifdef HAVE_JPEG_DECODER
else if (pixelFormat == PixelFormat::MJPEG)
{
return "mjpeg";
}
#endif
// return the default NO_CHANGE
return "NO_CHANGE";
}

View File

@ -467,11 +467,18 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject &message, const QString
QJsonObject grabbers; QJsonObject grabbers;
QJsonArray availableGrabbers; QJsonArray availableGrabbers;
#if defined(ENABLE_DISPMANX) || defined(ENABLE_V4L2) || defined(ENABLE_FB) || defined(ENABLE_AMLOGIC) || defined(ENABLE_OSX) || defined(ENABLE_X11) || defined(ENABLE_XCB) || defined(ENABLE_QT) #if defined(ENABLE_DISPMANX) || defined(ENABLE_V4L2) || defined(ENABLE_MF) || defined(ENABLE_FB) || defined(ENABLE_AMLOGIC) || defined(ENABLE_OSX) || defined(ENABLE_X11) || defined(ENABLE_XCB) || defined(ENABLE_QT)
if ( GrabberWrapper::getInstance() != nullptr ) if ( GrabberWrapper::getInstance() != nullptr )
{ {
grabbers["active"] = GrabberWrapper::getInstance()->getActive(); QStringList activeGrabbers = GrabberWrapper::getInstance()->getActive(_hyperion->getInstanceIndex());
QJsonArray activeGrabberNames;
for (auto grabberName : activeGrabbers)
{
activeGrabberNames.append(grabberName);
}
grabbers["active"] = activeGrabberNames;
} }
// get available grabbers // get available grabbers
@ -482,7 +489,7 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject &message, const QString
#endif #endif
#if defined(ENABLE_V4L2) #if defined(ENABLE_V4L2) || defined(ENABLE_MF)
QJsonArray availableV4L2devices; QJsonArray availableV4L2devices;
for (const auto& devicePath : GrabberWrapper::getInstance()->getV4L2devices()) for (const auto& devicePath : GrabberWrapper::getInstance()->getV4L2devices())
@ -502,6 +509,14 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject &message, const QString
} }
device.insert("inputs", availableInputs); device.insert("inputs", availableInputs);
QJsonArray availableEncodingFormats;
QStringList encodingFormats = GrabberWrapper::getInstance()->getV4L2EncodingFormats(devicePath);
for (auto encodingFormat : encodingFormats)
{
availableEncodingFormats.append(encodingFormat);
}
device.insert("encoding_format", availableEncodingFormats);
QJsonArray availableResolutions; QJsonArray availableResolutions;
QStringList resolutions = GrabberWrapper::getInstance()->getResolutions(devicePath); QStringList resolutions = GrabberWrapper::getInstance()->getResolutions(devicePath);
for (auto resolution : resolutions) for (auto resolution : resolutions)

View File

@ -12,24 +12,28 @@ endif (ENABLE_FB)
if (ENABLE_OSX) if (ENABLE_OSX)
add_subdirectory(osx) add_subdirectory(osx)
endif() endif(ENABLE_OSX)
if (ENABLE_V4L2) if (ENABLE_V4L2)
add_subdirectory(v4l2) add_subdirectory(v4l2)
endif (ENABLE_V4L2) endif (ENABLE_V4L2)
if (ENABLE_MF)
add_subdirectory(mediafoundation)
endif (ENABLE_MF)
if (ENABLE_X11) if (ENABLE_X11)
add_subdirectory(x11) add_subdirectory(x11)
endif() endif(ENABLE_X11)
if (ENABLE_XCB) if (ENABLE_XCB)
add_subdirectory(xcb) add_subdirectory(xcb)
endif() endif(ENABLE_XCB)
if (ENABLE_QT) if (ENABLE_QT)
add_subdirectory(qt) add_subdirectory(qt)
endif() endif(ENABLE_QT)
if (ENABLE_DX) if (ENABLE_DX)
add_subdirectory(directx) add_subdirectory(directx)
endif() endif(ENABLE_DX)

View File

@ -0,0 +1,16 @@
# Define the current source locations
SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/mediafoundation)
FILE ( GLOB MF_SOURCES "${CURRENT_HEADER_DIR}/MF*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp" )
add_library(mf-grabber ${MF_SOURCES} )
target_link_libraries(mf-grabber
hyperion
${QT_LIBRARIES}
)
if(TURBOJPEG_FOUND)
target_link_libraries(mf-grabber ${TurboJPEG_LIBRARY})
endif(TURBOJPEG_FOUND)

View File

@ -0,0 +1,868 @@
#include "MFSourceReaderCB.h"
#include "grabber/MFGrabber.h"
static PixelFormat GetPixelFormatForGuid(const GUID guid)
{
if (IsEqualGUID(guid, MFVideoFormat_RGB32)) return PixelFormat::RGB32;
if (IsEqualGUID(guid, MFVideoFormat_YUY2)) return PixelFormat::YUYV;
if (IsEqualGUID(guid, MFVideoFormat_UYVY)) return PixelFormat::UYVY;
if (IsEqualGUID(guid, MFVideoFormat_MJPG)) return PixelFormat::MJPEG;
return PixelFormat::NO_CHANGE;
};
MFGrabber::MFGrabber(const QString & device, unsigned width, unsigned height, unsigned fps, unsigned input, int pixelDecimation)
: Grabber("V4L2:"+device)
, _deviceName(device)
, _buffers()
, _hr(S_FALSE)
, _sourceReader(nullptr)
, _pixelDecimation(pixelDecimation)
, _lineLength(-1)
, _frameByteSize(-1)
, _noSignalCounterThreshold(40)
, _noSignalCounter(0)
, _fpsSoftwareDecimation(1)
, _brightness(0)
, _contrast(0)
, _saturation(0)
, _hue(0)
, _currentFrame(0)
, _noSignalThresholdColor(ColorRgb{0,0,0})
, _signalDetectionEnabled(true)
, _cecDetectionEnabled(true)
, _noSignalDetected(false)
, _initialized(false)
, _x_frac_min(0.25)
, _y_frac_min(0.25)
, _x_frac_max(0.75)
, _y_frac_max(0.75)
{
setInput(input);
setWidthHeight(width, height);
setFramerate(fps);
// setDeviceVideoStandard(device, videoStandard);
CoInitializeEx(0, COINIT_MULTITHREADED);
_hr = MFStartup(MF_VERSION, MFSTARTUP_NOSOCKET);
if (FAILED(_hr))
CoUninitialize();
else
_sourceReaderCB = new SourceReaderCB(this);
}
MFGrabber::~MFGrabber()
{
uninit();
SAFE_RELEASE(_sourceReader);
SAFE_RELEASE(_sourceReaderCB);
if (SUCCEEDED(_hr) && SUCCEEDED(MFShutdown()))
CoUninitialize();
}
bool MFGrabber::init()
{
if (!_initialized && SUCCEEDED(_hr))
{
QString foundDevice = "";
int foundIndex = -1, bestGuess = -1, bestGuessMinX = INT_MAX, bestGuessMinFPS = INT_MAX;
bool autoDiscovery = (QString::compare(_deviceName, "auto", Qt::CaseInsensitive) == 0 );
// enumerate the video capture devices on the user's system
enumVideoCaptureDevices();
if (!autoDiscovery && !_deviceProperties.contains(_deviceName))
{
Debug(_log, "Device '%s' is not available. Changing to auto.", QSTRING_CSTR(_deviceName));
autoDiscovery = true;
}
if (autoDiscovery)
{
Debug(_log, "Forcing auto discovery device");
if (_deviceProperties.count()>0)
{
foundDevice = _deviceProperties.firstKey();
_deviceName = foundDevice;
Debug(_log, "Auto discovery set to %s", QSTRING_CSTR(_deviceName));
}
}
else
foundDevice = _deviceName;
if (foundDevice.isNull() || foundDevice.isEmpty() || !_deviceProperties.contains(foundDevice))
{
Error(_log, "Could not find any capture device");
return false;
}
MFGrabber::DeviceProperties dev = _deviceProperties[foundDevice];
Debug(_log, "Searching for %s %d x %d @ %d fps (%s)", QSTRING_CSTR(foundDevice), _width, _height,_fps, QSTRING_CSTR(pixelFormatToString(_pixelFormat)));
for( int i = 0; i < dev.valid.count() && foundIndex < 0; ++i )
{
bool strict = false;
const auto& val = dev.valid[i];
if (bestGuess == -1 || (val.x <= bestGuessMinX && val.x >= 640 && val.fps <= bestGuessMinFPS && val.fps >= 10))
{
bestGuess = i;
bestGuessMinFPS = val.fps;
bestGuessMinX = val.x;
}
if(_width && _height)
{
strict = true;
if (val.x != _width || val.y != _height)
continue;
}
if(_fps && _fps!=15)
{
strict = true;
if (val.fps != _fps)
continue;
}
if(_pixelFormat != PixelFormat::NO_CHANGE)
{
strict = true;
if (val.pf != _pixelFormat)
continue;
}
if (strict)
foundIndex = i;
}
if (foundIndex < 0 && bestGuess >= 0)
{
if (!autoDiscovery)
Warning(_log, "Selected resolution not found in supported modes. Forcing best resolution");
else
Debug(_log, "Forcing best resolution");
foundIndex = bestGuess;
}
if (foundIndex>=0)
{
if (init_device(foundDevice, dev.valid[foundIndex]))
_initialized = true;
}
else
Error(_log, "Could not find any capture device settings");
}
return _initialized;
}
void MFGrabber::uninit()
{
// stop if the grabber was not stopped
if (_initialized)
{
Debug(_log,"uninit grabber: %s", QSTRING_CSTR(_deviceName));
stop();
}
}
bool MFGrabber::init_device(QString deviceName, DevicePropertiesItem props)
{
bool setStreamParamOK = false;
PixelFormat pixelformat = GetPixelFormatForGuid(props.guid);
QString error, guid = _deviceProperties[deviceName].name;
HRESULT hr,hr1,hr2;
Debug(_log, "Init %s, %d x %d @ %d fps (%s) => %s", QSTRING_CSTR(deviceName), props.x, props.y, props.fps, QSTRING_CSTR(pixelFormatToString(pixelformat)), QSTRING_CSTR(guid));
IMFMediaSource* device = nullptr;
IMFAttributes* attr;
hr = MFCreateAttributes(&attr, 2);
if (SUCCEEDED(hr))
{
hr = attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
if (SUCCEEDED(hr))
{
int size = guid.length() + 1024;
wchar_t *name = new wchar_t[size];
memset(name, 0, size);
guid.toWCharArray(name);
if (SUCCEEDED(attr->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, (LPCWSTR)name)) && _sourceReaderCB)
{
hr = MFCreateDeviceSource(attr, &device);
if (FAILED(hr))
{
SAFE_RELEASE(device);;
error = QString("MFCreateDeviceSource %1").arg(hr);
}
}
else
error = QString("IMFAttributes_SetString_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK %1").arg(hr);
delete[] name;
}
SAFE_RELEASE(attr);
}
else
{
SAFE_RELEASE(attr);
error = QString("MFCreateAttributes_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE %1").arg(hr);
}
if (device)
{
Debug(_log, "Device opened");
if (_brightness != 0 || _contrast != 0 || _saturation != 0 || _hue != 0)
{
IAMVideoProcAmp *pProcAmp = NULL;
if (SUCCEEDED(device->QueryInterface(IID_PPV_ARGS(&pProcAmp))))
{
long lMin, lMax, lStep, lDefault, lCaps, Val;
if (_brightness != 0)
{
if (SUCCEEDED(pProcAmp->GetRange(VideoProcAmp_Brightness, &lMin, &lMax, &lStep, &lDefault, &lCaps)))
{
Debug(_log, "Brightness: min=%i, max=%i, default=%i", lMin, lMax, lDefault);
if (SUCCEEDED(pProcAmp->Get(VideoProcAmp_Brightness, &Val, &lCaps)))
Debug(_log, "Current brightness set to: %i",Val);
if (SUCCEEDED(pProcAmp->Set(VideoProcAmp_Brightness, _brightness, VideoProcAmp_Flags_Manual)))
Debug(_log, "Brightness set to: %i",_brightness);
else
Error(_log, "Could not set brightness");
}
else
Error(_log, "Brightness is not supported by the grabber");
}
if (_contrast != 0)
{
if (SUCCEEDED(pProcAmp->GetRange(VideoProcAmp_Contrast, &lMin, &lMax, &lStep, &lDefault, &lCaps)))
{
Debug(_log, "Contrast: min=%i, max=%i, default=%i", lMin, lMax, lDefault);
if (SUCCEEDED(pProcAmp->Get(VideoProcAmp_Contrast, &Val, &lCaps)))
Debug(_log, "Current contrast set to: %i",Val);
if (SUCCEEDED(pProcAmp->Set(VideoProcAmp_Contrast, _contrast, VideoProcAmp_Flags_Manual)))
Debug(_log, "Contrast set to: %i",_contrast);
else
Error(_log, "Could not set contrast");
}
else
Error(_log, "Contrast is not supported by the grabber");
}
if (_saturation != 0)
{
if (SUCCEEDED(pProcAmp->GetRange(VideoProcAmp_Saturation, &lMin, &lMax, &lStep, &lDefault, &lCaps)))
{
Debug(_log, "Saturation: min=%i, max=%i, default=%i", lMin, lMax, lDefault);
if (SUCCEEDED(pProcAmp->Get(VideoProcAmp_Saturation, &Val, &lCaps)))
Debug(_log, "Current saturation set to: %i",Val);
if (SUCCEEDED(pProcAmp->Set(VideoProcAmp_Saturation, _saturation, VideoProcAmp_Flags_Manual)))
Debug(_log, "Saturation set to: %i",_saturation);
else
Error(_log, "Could not set saturation");
}
else
Error(_log, "Saturation is not supported by the grabber");
}
if (_hue != 0)
{
hr = pProcAmp->GetRange(VideoProcAmp_Hue, &lMin, &lMax, &lStep, &lDefault, &lCaps);
if (SUCCEEDED(hr))
{
Debug(_log, "Hue: min=%i, max=%i, default=%i", lMin, lMax, lDefault);
hr = pProcAmp->Get(VideoProcAmp_Hue, &Val, &lCaps);
if (SUCCEEDED(hr))
Debug(_log, "Current hue set to: %i",Val);
hr = pProcAmp->Set(VideoProcAmp_Hue, _hue, VideoProcAmp_Flags_Manual);
if (SUCCEEDED(hr))
Debug(_log, "Hue set to: %i",_hue);
else
Error(_log, "Could not set hue");
}
else
Error(_log, "Hue is not supported by the grabber");
}
pProcAmp->Release();
}
}
IMFAttributes* pAttributes;
hr1 = MFCreateAttributes(&pAttributes, 1);
if (SUCCEEDED(hr1))
hr2 = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, (IMFSourceReaderCallback *)_sourceReaderCB);
if (SUCCEEDED(hr1) && SUCCEEDED(hr2))
hr = MFCreateSourceReaderFromMediaSource(device, pAttributes, &_sourceReader);
else
hr = E_INVALIDARG;
if (SUCCEEDED(hr1))
pAttributes->Release();
device->Release();
if (SUCCEEDED(hr))
{
IMFMediaType* type;
hr = MFCreateMediaType(&type);
if (SUCCEEDED(hr))
{
hr = type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
if (SUCCEEDED(hr))
{
hr = type->SetGUID(MF_MT_SUBTYPE, props.guid);
if (SUCCEEDED(hr))
{
hr = MFSetAttributeSize(type, MF_MT_FRAME_SIZE, props.x, props.y);
if (SUCCEEDED(hr))
{
hr = MFSetAttributeSize(type, MF_MT_FRAME_RATE, props.fps_a, props.fps_b);
if (SUCCEEDED(hr))
{
MFSetAttributeRatio(type, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
hr = _sourceReader->SetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, type);
if (SUCCEEDED(hr))
{
setStreamParamOK = true;
}
else
error = QString("SetCurrentMediaType %1").arg(hr);
}
else
error = QString("MFSetAttributeSize_MF_MT_FRAME_RATE %1").arg(hr);
}
else
error = QString("SMFSetAttributeSize_MF_MT_FRAME_SIZE %1").arg(hr);
}
else
error = QString("SetGUID_MF_MT_SUBTYPE %1").arg(hr);
}
else
error = QString("SetGUID_MF_MT_MAJOR_TYPE %1").arg(hr);
type->Release();
}
else
error = QString("IMFAttributes_SetString %1").arg(hr);
if (!setStreamParamOK)
Error(_log, "Could not stream set params (%s)", QSTRING_CSTR(error));
}
else
Error(_log, "MFCreateSourceReaderFromMediaSource (%i)", hr);
}
else
Error(_log, "Could not open device (%s)", QSTRING_CSTR(error));
if (!setStreamParamOK)
{
SAFE_RELEASE(_sourceReader);
}
else
{
_pixelFormat = props.pf;
_width = props.x;
_height = props.y;
switch (_pixelFormat)
{
case PixelFormat::UYVY:
case PixelFormat::YUYV:
{
_frameByteSize = props.x * props.y * 2;
_lineLength = props.x * 2;
}
break;
case PixelFormat::RGB32:
{
_frameByteSize = props.x * props.y * 4;
_lineLength = props.x * 3;
}
break;
case PixelFormat::MJPEG:
{
_lineLength = props.x * 3;
}
break;
}
}
return setStreamParamOK;
}
void MFGrabber::uninit_device()
{
SAFE_RELEASE(_sourceReader);
}
void MFGrabber::enumVideoCaptureDevices()
{
if (FAILED(_hr))
{
Error(_log, "enumVideoCaptureDevices(): Media Foundation not initialized");
return;
}
_deviceProperties.clear();
IMFAttributes* attr;
if(SUCCEEDED(MFCreateAttributes(&attr, 1)))
{
if(SUCCEEDED(attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID)))
{
UINT32 count;
IMFActivate** devices;
if(SUCCEEDED(MFEnumDeviceSources(attr, &devices, &count)))
{
Debug(_log, "Detected devices: %u", count);
for (UINT32 i = 0; i < count; i++)
{
UINT32 length;
LPWSTR name;
LPWSTR symlink;
if(SUCCEEDED(devices[i]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &name, &length)))
{
if(SUCCEEDED(devices[i]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &symlink, &length)))
{
QString dev = QString::fromUtf16((const ushort*)name);
MFGrabber::DeviceProperties properties;
properties.name = QString::fromUtf16((const ushort*)symlink);
Info(_log, "Found capture device: %s", QSTRING_CSTR(dev));
IMFMediaSource *pSource = nullptr;
if(SUCCEEDED(devices[i]->ActivateObject(IID_PPV_ARGS(&pSource))))
{
IMFMediaType *pType = nullptr;
IMFSourceReader* reader;
if(SUCCEEDED(MFCreateSourceReaderFromMediaSource(pSource, NULL, &reader)))
{
for (DWORD i = 0; ; i++)
{
if (FAILED(reader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, i, &pType)))
break;
GUID format;
UINT64 frame_size;
UINT64 frame_rate;
if( SUCCEEDED(pType->GetGUID(MF_MT_SUBTYPE, &format)) &&
SUCCEEDED(pType->GetUINT64(MF_MT_FRAME_SIZE, &frame_size)) &&
SUCCEEDED(pType->GetUINT64(MF_MT_FRAME_RATE, &frame_rate)) &&
frame_rate > 0)
{
PixelFormat pixelformat = GetPixelFormatForGuid(format);
DWORD w = frame_size >> 32;
DWORD h = (DWORD) frame_size;
DWORD fr1 = frame_rate >> 32;
DWORD fr2 = (DWORD) frame_rate;
if (pixelformat != PixelFormat::NO_CHANGE)
{
int framerate = fr1/fr2;
QString sFrame = QString::number(framerate).rightJustified(2,' ');
QString displayResolutions = QString::number(w).rightJustified(4,' ') +"x"+ QString::number(h).rightJustified(4,' ');
if (!properties.displayResolutions.contains(displayResolutions))
properties.displayResolutions << displayResolutions;
if (!properties.framerates.contains(sFrame))
properties.framerates << sFrame;
DevicePropertiesItem di;
di.x = w;
di.y = h;
di.fps = framerate;
di.fps_a = fr1;
di.fps_b = fr2;
di.pf = pixelformat;
di.guid = format;
properties.valid.append(di);
}
}
pType->Release();
}
reader->Release();
}
pSource->Release();
}
properties.displayResolutions.sort();
properties.framerates.sort();
_deviceProperties.insert(dev, properties);
}
CoTaskMemFree(symlink);
}
CoTaskMemFree(name);
devices[i]->Release();
}
CoTaskMemFree(devices);
}
attr->Release();
}
}
}
void MFGrabber::start_capturing()
{
if (_sourceReader)
{
HRESULT hr = _sourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
0, NULL, NULL, NULL, NULL);
if (!SUCCEEDED(hr))
Error(_log, "ReadSample (%i)", hr);
}
}
bool MFGrabber::process_image(const void *frameImageBuffer, int size)
{
bool frameSend = false;
unsigned int processFrameIndex = _currentFrame++;
// frame skipping
if ( (processFrameIndex % _fpsSoftwareDecimation != 0) && (_fpsSoftwareDecimation > 1))
return frameSend;
// CEC detection
if (_cecDetectionEnabled)
return frameSend;
// We do want a new frame...
if (size < _frameByteSize && _pixelFormat != PixelFormat::MJPEG)
Error(_log, "Frame too small: %d != %d", size, _frameByteSize);
else
{
if (_threadManager.isActive())
{
if (_threadManager._threads == nullptr)
{
_threadManager.initThreads();
Debug(_log, "Max thread count = %d", _threadManager._maxThreads);
for (unsigned int i=0; i < _threadManager._maxThreads && _threadManager._threads != nullptr; i++)
{
MFThread* _thread=_threadManager._threads[i];
connect(_thread, SIGNAL(newFrame(unsigned int, const Image<ColorRgb> &,unsigned int)), this , SLOT(newThreadFrame(unsigned int, const Image<ColorRgb> &, unsigned int)));
}
}
for (unsigned int i=0;_threadManager.isActive() && i < _threadManager._maxThreads && _threadManager._threads != nullptr; i++)
{
if ((_threadManager._threads[i]->isFinished() || !_threadManager._threads[i]->isRunning()))
// aquire lock
if ( _threadManager._threads[i]->isBusy() == false)
{
MFThread* _thread = _threadManager._threads[i];
_thread->setup(i, _pixelFormat, (uint8_t *)frameImageBuffer, size, _width, _height, _lineLength, _subsamp, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, processFrameIndex, _pixelDecimation);
if (_threadManager._maxThreads > 1)
_threadManager._threads[i]->start();
frameSend = true;
break;
}
}
}
}
return frameSend;
}
void MFGrabber::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold)
{
_noSignalThresholdColor.red = uint8_t(255*redSignalThreshold);
_noSignalThresholdColor.green = uint8_t(255*greenSignalThreshold);
_noSignalThresholdColor.blue = uint8_t(255*blueSignalThreshold);
_noSignalCounterThreshold = qMax(1, noSignalCounterThreshold);
Info(_log, "Signal threshold set to: {%d, %d, %d} and frames: %d", _noSignalThresholdColor.red, _noSignalThresholdColor.green, _noSignalThresholdColor.blue, _noSignalCounterThreshold );
}
void MFGrabber::setSignalDetectionOffset(double horizontalMin, double verticalMin, double horizontalMax, double verticalMax)
{
// rainbow 16 stripes 0.47 0.2 0.49 0.8
// unicolor: 0.25 0.25 0.75 0.75
_x_frac_min = horizontalMin;
_y_frac_min = verticalMin;
_x_frac_max = horizontalMax;
_y_frac_max = verticalMax;
Info(_log, "Signal detection area set to: %f,%f x %f,%f", _x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max );
}
bool MFGrabber::start()
{
try
{
_threadManager.start();
Info(_log, "Decoding threads: %d",_threadManager._maxThreads );
if (init())
{
start_capturing();
Info(_log, "Started");
return true;
}
}
catch(std::exception& e)
{
Error(_log, "Start failed (%s)", e.what());
}
return false;
}
void MFGrabber::stop()
{
if (_initialized)
{
_threadManager.stop();
uninit_device();
_deviceProperties.clear();
_initialized = false;
Info(_log, "Stopped");
}
}
void MFGrabber::receive_image(const void *frameImageBuffer, int size, QString message)
{
if (frameImageBuffer == NULL || size ==0)
Error(_log, "Received empty image frame: %s", QSTRING_CSTR(message));
else
{
if (!message.isEmpty())
Debug(_log, "Received image frame: %s", QSTRING_CSTR(message));
process_image(frameImageBuffer, size);
}
start_capturing();
}
void MFGrabber::newThreadFrame(unsigned int threadIndex, const Image<ColorRgb>& image, unsigned int sourceCount)
{
checkSignalDetectionEnabled(image);
// get next frame
if (threadIndex >_threadManager._maxThreads)
Error(_log, "Frame index %d out of range", sourceCount);
if (threadIndex <= _threadManager._maxThreads)
_threadManager._threads[threadIndex]->noBusy();
}
void MFGrabber::checkSignalDetectionEnabled(Image<ColorRgb> image)
{
if (_signalDetectionEnabled)
{
// check signal (only in center of the resulting image, because some grabbers have noise values along the borders)
bool noSignal = true;
// top left
unsigned xOffset = image.width() * _x_frac_min;
unsigned yOffset = image.height() * _y_frac_min;
// bottom right
unsigned xMax = image.width() * _x_frac_max;
unsigned yMax = image.height() * _y_frac_max;
for (unsigned x = xOffset; noSignal && x < xMax; ++x)
for (unsigned y = yOffset; noSignal && y < yMax; ++y)
noSignal &= (ColorRgb&)image(x, y) <= _noSignalThresholdColor;
if (noSignal)
++_noSignalCounter;
else
{
if (_noSignalCounter >= _noSignalCounterThreshold)
{
_noSignalDetected = true;
Info(_log, "Signal detected");
}
_noSignalCounter = 0;
}
if ( _noSignalCounter < _noSignalCounterThreshold)
{
emit newFrame(image);
}
else if (_noSignalCounter == _noSignalCounterThreshold)
{
_noSignalDetected = false;
Info(_log, "Signal lost");
}
}
else
emit newFrame(image);
}
QStringList MFGrabber::getV4L2devices() const
{
QStringList result = QStringList();
for (auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
result << it.key();
return result;
}
QStringList MFGrabber::getV4L2EncodingFormats(const QString& devicePath) const
{
QStringList result = QStringList();
for(int i = 0; i < _deviceProperties[devicePath].valid.count(); ++i )
if (!result.contains(pixelFormatToString(_deviceProperties[devicePath].valid[i].pf), Qt::CaseInsensitive))
result << pixelFormatToString(_deviceProperties[devicePath].valid[i].pf).toLower();
return result;
}
void MFGrabber::setSignalDetectionEnable(bool enable)
{
if (_signalDetectionEnabled != enable)
{
_signalDetectionEnabled = enable;
Info(_log, "Signal detection is now %s", enable ? "enabled" : "disabled");
}
}
void MFGrabber::setCecDetectionEnable(bool enable)
{
if (_cecDetectionEnabled != enable)
{
_cecDetectionEnabled = enable;
Info(_log, QString("CEC detection is now %1").arg(enable ? "enabled" : "disabled").toLocal8Bit());
}
}
void MFGrabber::setPixelDecimation(int pixelDecimation)
{
if (_pixelDecimation != pixelDecimation)
_pixelDecimation = pixelDecimation;
}
void MFGrabber::setDeviceVideoStandard(QString device, VideoStandard videoStandard)
{
if (_deviceName != device)
{
_deviceName = device;
if (_initialized && !device.isEmpty())
{
Debug(_log,"Restarting Media Foundation grabber");
uninit();
start();
}
}
}
bool MFGrabber::setInput(int input)
{
if(Grabber::setInput(input))
{
bool started = _initialized;
uninit();
if(started)
start();
return true;
}
return false;
}
bool MFGrabber::setWidthHeight(int width, int height)
{
if(Grabber::setWidthHeight(width,height))
{
Debug(_log,"Set width:height to: %i:&i", width, height);
if (_initialized)
{
Debug(_log,"Restarting Media Foundation grabber");
uninit();
start();
}
return true;
}
return false;
}
bool MFGrabber::setFramerate(int fps)
{
if(Grabber::setFramerate(fps))
{
Debug(_log,"Set fps to: %i", fps);
if (_initialized)
{
Debug(_log,"Restarting Media Foundation grabber");
uninit();
start();
}
return true;
}
return false;
}
void MFGrabber::setFpsSoftwareDecimation(int decimation)
{
_fpsSoftwareDecimation = decimation;
if (decimation > 1)
Debug(_log,"Every %ith image per second are processed", decimation);
}
void MFGrabber::setEncoding(QString enc)
{
if (_pixelFormat != parsePixelFormat(enc))
{
Debug(_log,"Set encoding to: %s", QSTRING_CSTR(enc));
_pixelFormat = parsePixelFormat(enc);
if (_initialized)
{
Debug(_log,"Restarting Media Foundation Grabber");
uninit();
start();
}
}
}
void MFGrabber::setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue)
{
if (_brightness != brightness || _contrast != contrast || _saturation != saturation || _hue != hue)
{
_brightness = brightness;
_contrast = contrast;
_saturation = saturation;
_hue = hue;
Debug(_log,"Set brightness to %i, contrast to %i, saturation to %i, hue to %i", _brightness, _contrast, _saturation, _hue);
if (_initialized)
{
Debug(_log,"Restarting Media Foundation Grabber");
uninit();
start();
}
}
}

View File

@ -0,0 +1,134 @@
#pragma once
#include <windows.h>
#include <mfapi.h>
#include <mfidl.h>
#include <mfreadwrite.h>
#include <shlwapi.h>
#include <mferror.h>
#include <strmif.h>
#pragma comment (lib, "ole32.lib")
#pragma comment (lib, "mf.lib")
#pragma comment (lib, "mfplat.lib")
#pragma comment (lib, "mfuuid.lib")
#pragma comment (lib, "mfreadwrite.lib")
#include <grabber/MFGrabber.h>
#define SAFE_RELEASE(x) if(x) { x->Release(); x = nullptr; }
class SourceReaderCB : public IMFSourceReaderCallback
{
public:
SourceReaderCB(MFGrabber* grabber)
: _nRefCount(1)
, _grabber(grabber)
, _bEOS(FALSE)
, _hrStatus(S_OK)
{
InitializeCriticalSection(&_critsec);
}
// IUnknown methods
STDMETHODIMP QueryInterface(REFIID iid, void** ppv)
{
static const QITAB qit[] =
{
QITABENT(SourceReaderCB, IMFSourceReaderCallback),
{ 0 },
};
return QISearch(this, qit, iid, ppv);
}
STDMETHODIMP_(ULONG) AddRef()
{
return InterlockedIncrement(&_nRefCount);
}
STDMETHODIMP_(ULONG) Release()
{
ULONG uCount = InterlockedDecrement(&_nRefCount);
if (uCount == 0)
{
delete this;
}
return uCount;
}
// IMFSourceReaderCallback methods
STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex,
DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample)
{
EnterCriticalSection(&_critsec);
if (SUCCEEDED(hrStatus))
{
QString error = "";
bool frameSend = false;
if (pSample)
{
IMFMediaBuffer* buffer;
hrStatus = pSample->ConvertToContiguousBuffer(&buffer);
if (SUCCEEDED(hrStatus))
{
BYTE* data = nullptr;
DWORD maxLength = 0, currentLength = 0;
hrStatus = buffer->Lock(&data, &maxLength, &currentLength);
if(SUCCEEDED(hrStatus))
{
frameSend = true;
_grabber->receive_image(data,currentLength,error);
buffer->Unlock();
}
else
error = QString("buffer->Lock failed => %1").arg(hrStatus);
SAFE_RELEASE(buffer);
}
else
error = QString("pSample->ConvertToContiguousBuffer failed => %1").arg(hrStatus);
}
else
error = "pSample is NULL";
if (!frameSend)
_grabber->receive_image(NULL,0,error);
}
else
{
// Streaming error.
NotifyError(hrStatus);
}
if (MF_SOURCE_READERF_ENDOFSTREAM & dwStreamFlags)
{
// Reached the end of the stream.
_bEOS = TRUE;
}
_hrStatus = hrStatus;
LeaveCriticalSection(&_critsec);
return S_OK;
}
STDMETHODIMP OnEvent(DWORD, IMFMediaEvent *) { return S_OK; }
STDMETHODIMP OnFlush(DWORD) { return S_OK; }
private:
virtual ~SourceReaderCB() {}
void NotifyError(HRESULT hr) { Error(_grabber->_log, "Source Reader error"); }
private:
long _nRefCount;
CRITICAL_SECTION _critsec;
MFGrabber* _grabber;
BOOL _bEOS;
HRESULT _hrStatus;
};

View File

@ -0,0 +1,175 @@
#include "grabber/MFThread.h"
#include <QDebug>
volatile bool MFThread::_isActive = false;
MFThread::MFThread():
_localData(nullptr),
_localDataSize(0),
_decompress(nullptr),
_isBusy(false),
_semaphore(1),
_imageResampler()
{
}
MFThread::~MFThread()
{
if (_decompress == nullptr)
tjDestroy(_decompress);
if (_localData != NULL)
{
free(_localData);
_localData = NULL;
_localDataSize = 0;
}
}
void MFThread::setup(
unsigned int threadIndex, PixelFormat pixelFormat, uint8_t* sharedData,
int size, int width, int height, int lineLength,
int subsamp, unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight,
VideoMode videoMode, int currentFrame, int pixelDecimation)
{
_workerIndex = threadIndex;
_lineLength = lineLength;
_pixelFormat = pixelFormat;
_size = size;
_width = width;
_height = height;
_subsamp = subsamp;
_cropLeft = cropLeft;
_cropTop = cropTop;
_cropBottom = cropBottom;
_cropRight = cropRight;
_currentFrame = currentFrame;
_pixelDecimation = pixelDecimation;
_imageResampler.setVideoMode(videoMode);
_imageResampler.setCropping(cropLeft, cropRight, cropTop, cropBottom);
_imageResampler.setHorizontalPixelDecimation(_pixelDecimation);
_imageResampler.setVerticalPixelDecimation(_pixelDecimation);
if (size > _localDataSize)
{
if (_localData != NULL)
{
free(_localData);
_localData = NULL;
_localDataSize = 0;
}
_localData = (uint8_t *) malloc(size+1);
_localDataSize = size;
}
memcpy(_localData, sharedData, size);
}
void MFThread::run()
{
if (_isActive && _width > 0 && _height > 0)
{
if (_pixelFormat == PixelFormat::MJPEG)
{
processImageMjpeg();
}
else
{
Image<ColorRgb> image = Image<ColorRgb>();
_imageResampler.processImage(_localData, _width, _height, _lineLength, _pixelFormat, image);
emit newFrame(_workerIndex, image, _currentFrame);
}
}
}
bool MFThread::isBusy()
{
bool temp;
_semaphore.acquire();
if (_isBusy)
temp = true;
else
{
temp = false;
_isBusy = true;
}
_semaphore.release();
return temp;
}
void MFThread::noBusy()
{
_semaphore.acquire();
_isBusy = false;
_semaphore.release();
}
void MFThread::processImageMjpeg()
{
if (_decompress == nullptr)
{
_decompress = tjInitDecompress();
_scalingFactors = tjGetScalingFactors (&_scalingFactorsCount);
}
if (tjDecompressHeader2(_decompress, _localData, _size, &_width, &_height, &_subsamp) != 0)
{
if (tjGetErrorCode(_decompress) == TJERR_FATAL)
return;
}
int scaledWidth = _width, scaledHeight = _height;
if(_scalingFactors != nullptr && _pixelDecimation > 1)
{
for (int i = 0; i < _scalingFactorsCount ; i++)
{
const int tempWidth = TJSCALED(_width, _scalingFactors[i]);
const int tempHeight = TJSCALED(_height, _scalingFactors[i]);
if (tempWidth <= _width/_pixelDecimation && tempHeight <= _height/_pixelDecimation)
{
scaledWidth = tempWidth;
scaledHeight = tempHeight;
break;
}
}
if (scaledWidth == _width && scaledHeight == _height)
{
scaledWidth = TJSCALED(_width, _scalingFactors[_scalingFactorsCount-1]);
scaledHeight = TJSCALED(_height, _scalingFactors[_scalingFactorsCount-1]);
}
}
Image<ColorRgb> srcImage(scaledWidth, scaledHeight);
if (tjDecompress2(_decompress, _localData , _size, (unsigned char*)srcImage.memptr(), scaledWidth, 0, scaledHeight, TJPF_RGB, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE) != 0)
{
if (tjGetErrorCode(_decompress) == TJERR_FATAL)
return;
}
// got image, process it
if ( !(_cropLeft > 0 || _cropTop > 0 || _cropBottom > 0 || _cropRight > 0))
emit newFrame(_workerIndex, srcImage, _currentFrame);
else
{
// calculate the output size
int outputWidth = (_width - _cropLeft - _cropRight);
int outputHeight = (_height - _cropTop - _cropBottom);
if (outputWidth <= 0 || outputHeight <= 0)
return;
Image<ColorRgb> destImage(outputWidth, outputHeight);
for (unsigned int y = 0; y < destImage.height(); y++)
{
unsigned char* source = (unsigned char*)srcImage.memptr() + (y + _cropTop)*srcImage.width()*3 + _cropLeft*3;
unsigned char* dest = (unsigned char*)destImage.memptr() + y*destImage.width()*3;
memcpy(dest, source, destImage.width()*3);
}
// emit
emit newFrame(_workerIndex, destImage, _currentFrame);
}
}

View File

@ -0,0 +1,161 @@
#include <QMetaType>
#include <grabber/MFWrapper.h>
// qt
#include <QTimer>
MFWrapper::MFWrapper(const QString &device, unsigned grabWidth, unsigned grabHeight, unsigned fps, unsigned input, int pixelDecimation )
: GrabberWrapper("V4L2:"+device, &_grabber, grabWidth, grabHeight, 10)
, _grabber(device, grabWidth, grabHeight, fps, input, pixelDecimation)
{
_ggrabber = &_grabber;
// register the image type
qRegisterMetaType<Image<ColorRgb>>("Image<ColorRgb>");
// Handle the image in the captured thread using a direct connection
connect(&_grabber, &MFGrabber::newFrame, this, &MFWrapper::newFrame, Qt::DirectConnection);
}
MFWrapper::~MFWrapper()
{
stop();
}
bool MFWrapper::start()
{
return ( _grabber.start() && GrabberWrapper::start());
}
void MFWrapper::stop()
{
_grabber.stop();
GrabberWrapper::stop();
}
void MFWrapper::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold)
{
_grabber.setSignalThreshold( redSignalThreshold, greenSignalThreshold, blueSignalThreshold, noSignalCounterThreshold);
}
void MFWrapper::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
{
_grabber.setCropping(cropLeft, cropRight, cropTop, cropBottom);
}
void MFWrapper::setSignalDetectionOffset(double verticalMin, double horizontalMin, double verticalMax, double horizontalMax)
{
_grabber.setSignalDetectionOffset(verticalMin, horizontalMin, verticalMax, horizontalMax);
}
void MFWrapper::newFrame(const Image<ColorRgb> &image)
{
emit systemImage(_grabberName, image);
}
void MFWrapper::action()
{
// dummy
}
void MFWrapper::setSignalDetectionEnable(bool enable)
{
_grabber.setSignalDetectionEnable(enable);
}
bool MFWrapper::getSignalDetectionEnable() const
{
return _grabber.getSignalDetectionEnabled();
}
void MFWrapper::setCecDetectionEnable(bool enable)
{
_grabber.setCecDetectionEnable(enable);
}
bool MFWrapper::getCecDetectionEnable() const
{
return _grabber.getCecDetectionEnabled();
}
void MFWrapper::setDeviceVideoStandard(const QString& device, VideoStandard videoStandard)
{
_grabber.setDeviceVideoStandard(device, VideoStandard::NO_CHANGE);
}
void MFWrapper::setFpsSoftwareDecimation(int decimation)
{
_grabber.setFpsSoftwareDecimation(decimation);
}
void MFWrapper::setEncoding(QString enc)
{
_grabber.setEncoding(enc);
}
void MFWrapper::setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue)
{
_grabber.setBrightnessContrastSaturationHue(brightness, contrast, saturation, hue);
}
void MFWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
{
if(type == settings::V4L2 && _grabberName.startsWith("V4L2"))
{
// extract settings
const QJsonObject& obj = config.object();
// device name, video standard
_grabber.setDeviceVideoStandard(
obj["device"].toString("auto"),
parseVideoStandard(obj["standard"].toString("no-change")));
// device input
_grabber.setInput(obj["input"].toInt(-1));
// device resolution
_grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0));
// device framerate
_grabber.setFramerate(obj["fps"].toInt(15));
// image size decimation
_grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8));
// image cropping
_grabber.setCropping(
obj["cropLeft"].toInt(0),
obj["cropRight"].toInt(0),
obj["cropTop"].toInt(0),
obj["cropBottom"].toInt(0));
// Brightness, Contrast, Saturation, Hue
_grabber.setBrightnessContrastSaturationHue(obj["hardware_brightness"].toInt(0),
obj["hardware_contrast"].toInt(0),
obj["hardware_saturation"].toInt(0),
obj["hardware_hue"].toInt(0));
// CEC Standby
_grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true));
// software frame skipping
_grabber.setFpsSoftwareDecimation(obj["fpsSoftwareDecimation"].toInt(1));
// Signal detection
_grabber.setSignalDetectionOffset(
obj["sDHOffsetMin"].toDouble(0.25),
obj["sDVOffsetMin"].toDouble(0.25),
obj["sDHOffsetMax"].toDouble(0.75),
obj["sDVOffsetMax"].toDouble(0.75));
_grabber.setSignalThreshold(
obj["redSignalThreshold"].toDouble(0.0)/100.0,
obj["greenSignalThreshold"].toDouble(0.0)/100.0,
obj["blueSignalThreshold"].toDouble(0.0)/100.0,
obj["noSignalCounterThreshold"].toInt(50) );
_grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true));
// Hardware encoding format
_grabber.setEncoding(obj["encoding"].toString("NO_CHANGE"));
}
}

View File

@ -30,15 +30,7 @@
#define V4L2_CAP_META_CAPTURE 0x00800000 // Specified in kernel header v4.16. Required for backward compatibility. #define V4L2_CAP_META_CAPTURE 0x00800000 // Specified in kernel header v4.16. Required for backward compatibility.
#endif #endif
V4L2Grabber::V4L2Grabber(const QString & device V4L2Grabber::V4L2Grabber(const QString & device, unsigned width, unsigned height, unsigned fps, unsigned input, VideoStandard videoStandard, PixelFormat pixelFormat, int pixelDecimation)
, unsigned width
, unsigned height
, unsigned fps
, unsigned input
, VideoStandard videoStandard
, PixelFormat pixelFormat
, int pixelDecimation
)
: Grabber("V4L2:"+device) : Grabber("V4L2:"+device)
, _deviceName() , _deviceName()
, _videoStandard(videoStandard) , _videoStandard(videoStandard)
@ -46,7 +38,7 @@ V4L2Grabber::V4L2Grabber(const QString & device
, _fileDescriptor(-1) , _fileDescriptor(-1)
, _buffers() , _buffers()
, _pixelFormat(pixelFormat) , _pixelFormat(pixelFormat)
, _pixelDecimation(-1) , _pixelDecimation(pixelDecimation)
, _lineLength(-1) , _lineLength(-1)
, _frameByteSize(-1) , _frameByteSize(-1)
, _noSignalCounterThreshold(40) , _noSignalCounterThreshold(40)

View File

@ -12,7 +12,7 @@ V4L2Wrapper::V4L2Wrapper(const QString &device,
unsigned input, unsigned input,
VideoStandard videoStandard, VideoStandard videoStandard,
PixelFormat pixelFormat, PixelFormat pixelFormat,
int pixelDecimation ) int pixelDecimation)
: GrabberWrapper("V4L2:"+device, &_grabber, grabWidth, grabHeight, 10) : GrabberWrapper("V4L2:"+device, &_grabber, grabWidth, grabHeight, 10)
, _grabber(device, , _grabber(device,
grabWidth, grabWidth,

View File

@ -65,9 +65,17 @@ bool GrabberWrapper::isActive() const
return _timer->isActive(); return _timer->isActive();
} }
QString GrabberWrapper::getActive() const QStringList GrabberWrapper::getActive(int inst) const
{ {
return _grabberName; QStringList result = QStringList();
if(GRABBER_V4L_CLIENTS.contains(inst))
result << GRABBER_V4L_CLIENTS.value(inst);
if(GRABBER_SYS_CLIENTS.contains(inst))
result << GRABBER_SYS_CLIENTS.value(inst);
return result;
} }
QStringList GrabberWrapper::availableGrabbers() QStringList GrabberWrapper::availableGrabbers()
@ -78,7 +86,7 @@ QStringList GrabberWrapper::availableGrabbers()
grabbers << "dispmanx"; grabbers << "dispmanx";
#endif #endif
#ifdef ENABLE_V4L2 #if defined(ENABLE_V4L2) || defined(ENABLE_MF)
grabbers << "v4l2"; grabbers << "v4l2";
#endif #endif
@ -178,9 +186,9 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp
if(component == hyperion::Components::COMP_GRABBER && !_grabberName.startsWith("V4L")) if(component == hyperion::Components::COMP_GRABBER && !_grabberName.startsWith("V4L"))
{ {
if(listen && !GRABBER_SYS_CLIENTS.contains(hyperionInd)) if(listen && !GRABBER_SYS_CLIENTS.contains(hyperionInd))
GRABBER_SYS_CLIENTS.append(hyperionInd); GRABBER_SYS_CLIENTS.insert(hyperionInd, _grabberName);
else if (!listen) else if (!listen)
GRABBER_SYS_CLIENTS.removeOne(hyperionInd); GRABBER_SYS_CLIENTS.remove(hyperionInd);
if(GRABBER_SYS_CLIENTS.empty()) if(GRABBER_SYS_CLIENTS.empty())
stop(); stop();
@ -190,9 +198,9 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp
else if(component == hyperion::Components::COMP_V4L && _grabberName.startsWith("V4L")) else if(component == hyperion::Components::COMP_V4L && _grabberName.startsWith("V4L"))
{ {
if(listen && !GRABBER_V4L_CLIENTS.contains(hyperionInd)) if(listen && !GRABBER_V4L_CLIENTS.contains(hyperionInd))
GRABBER_V4L_CLIENTS.append(hyperionInd); GRABBER_V4L_CLIENTS.insert(hyperionInd, _grabberName);
else if (!listen) else if (!listen)
GRABBER_V4L_CLIENTS.removeOne(hyperionInd); GRABBER_V4L_CLIENTS.remove(hyperionInd);
if(GRABBER_V4L_CLIENTS.empty()) if(GRABBER_V4L_CLIENTS.empty())
stop(); stop();
@ -234,6 +242,14 @@ QMultiMap<QString, int> GrabberWrapper::getV4L2deviceInputs(const QString& devic
return QMultiMap<QString, int>(); return QMultiMap<QString, int>();
} }
QStringList GrabberWrapper::getV4L2EncodingFormats(const QString& devicePath) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getV4L2EncodingFormats(devicePath);
return QStringList();
}
QStringList GrabberWrapper::getResolutions(const QString& devicePath) const QStringList GrabberWrapper::getResolutions(const QString& devicePath) const
{ {
if(_grabberName.startsWith("V4L")) if(_grabberName.startsWith("V4L"))

View File

@ -28,6 +28,18 @@
"propertyOrder" : 4, "propertyOrder" : 4,
"comment" : "The 'device_inputs' settings are dynamically inserted into the WebUI under PropertyOrder '3'." "comment" : "The 'device_inputs' settings are dynamically inserted into the WebUI under PropertyOrder '3'."
}, },
"encoding" :
{
"type" : "string",
"title" : "edt_conf_enum_custom",
"default" : "auto",
"options" : {
"hidden":true
},
"required" : true,
"propertyOrder" : 6,
"comment" : "The 'device_encodings' settings are dynamically inserted into the WebUI under PropertyOrder '5'."
},
"standard" : "standard" :
{ {
"type" : "string", "type" : "string",
@ -38,7 +50,7 @@
"enum_titles" : ["edt_conf_enum_NO_CHANGE", "edt_conf_enum_PAL", "edt_conf_enum_NTSC", "edt_conf_enum_SECAM"] "enum_titles" : ["edt_conf_enum_NO_CHANGE", "edt_conf_enum_PAL", "edt_conf_enum_NTSC", "edt_conf_enum_SECAM"]
}, },
"required" : true, "required" : true,
"propertyOrder" : 5 "propertyOrder" : 7
}, },
"width" : "width" :
{ {
@ -51,8 +63,8 @@
"hidden":true "hidden":true
}, },
"required" : true, "required" : true,
"propertyOrder" : 7, "propertyOrder" : 9,
"comment" : "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '6'." "comment" : "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '8'."
}, },
"height" : "height" :
{ {
@ -65,7 +77,8 @@
"hidden":true "hidden":true
}, },
"required" : true, "required" : true,
"propertyOrder" : 8 "propertyOrder" : 10,
"comment" : "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '6'."
}, },
"fps" : "fps" :
{ {
@ -78,8 +91,18 @@
"hidden":true "hidden":true
}, },
"required" : true, "required" : true,
"propertyOrder" : 10, "propertyOrder" : 12,
"comment" : "The 'framerates' setting is dynamically inserted into the WebUI under PropertyOrder '9'." "comment" : "The 'framerates' setting is dynamically inserted into the WebUI under PropertyOrder '11'."
},
"fpsSoftwareDecimation" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_fpsSoftwareDecimation_title",
"minimum" : 1,
"maximum" : 60,
"default" : 1,
"required" : true,
"propertyOrder" : 13
}, },
"sizeDecimation" : "sizeDecimation" :
{ {
@ -89,7 +112,7 @@
"maximum" : 30, "maximum" : 30,
"default" : 6, "default" : 6,
"required" : true, "required" : true,
"propertyOrder" : 11 "propertyOrder" : 14
}, },
"cropLeft" : "cropLeft" :
{ {
@ -99,7 +122,7 @@
"default" : 0, "default" : 0,
"append" : "edt_append_pixel", "append" : "edt_append_pixel",
"required" : true, "required" : true,
"propertyOrder" : 12 "propertyOrder" : 15
}, },
"cropRight" : "cropRight" :
{ {
@ -109,7 +132,7 @@
"default" : 0, "default" : 0,
"append" : "edt_append_pixel", "append" : "edt_append_pixel",
"required" : true, "required" : true,
"propertyOrder" : 13 "propertyOrder" : 16
}, },
"cropTop" : "cropTop" :
{ {
@ -119,7 +142,7 @@
"default" : 0, "default" : 0,
"append" : "edt_append_pixel", "append" : "edt_append_pixel",
"required" : true, "required" : true,
"propertyOrder" : 14 "propertyOrder" : 17
}, },
"cropBottom" : "cropBottom" :
{ {
@ -129,7 +152,7 @@
"default" : 0, "default" : 0,
"append" : "edt_append_pixel", "append" : "edt_append_pixel",
"required" : true, "required" : true,
"propertyOrder" : 15 "propertyOrder" : 18
}, },
"cecDetection" : "cecDetection" :
{ {
@ -137,7 +160,7 @@
"title" : "edt_conf_v4l2_cecDetection_title", "title" : "edt_conf_v4l2_cecDetection_title",
"default" : false, "default" : false,
"required" : true, "required" : true,
"propertyOrder" : 16 "propertyOrder" : 19
}, },
"signalDetection" : "signalDetection" :
{ {
@ -145,7 +168,7 @@
"title" : "edt_conf_v4l2_signalDetection_title", "title" : "edt_conf_v4l2_signalDetection_title",
"default" : false, "default" : false,
"required" : true, "required" : true,
"propertyOrder" : 17 "propertyOrder" : 20
}, },
"redSignalThreshold" : "redSignalThreshold" :
{ {
@ -161,7 +184,7 @@
} }
}, },
"required" : true, "required" : true,
"propertyOrder" : 18 "propertyOrder" : 21
}, },
"greenSignalThreshold" : "greenSignalThreshold" :
{ {
@ -177,7 +200,7 @@
} }
}, },
"required" : true, "required" : true,
"propertyOrder" : 19 "propertyOrder" : 22
}, },
"blueSignalThreshold" : "blueSignalThreshold" :
{ {
@ -193,7 +216,22 @@
} }
}, },
"required" : true, "required" : true,
"propertyOrder" : 20 "propertyOrder" : 23
},
"noSignalCounterThreshold" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_noSignalCounterThreshold_title",
"minimum" : 1,
"maximum" : 1000,
"default" : 200,
"options": {
"dependencies": {
"signalDetection": true
}
},
"required" : true,
"propertyOrder" : 24
}, },
"sDVOffsetMin" : "sDVOffsetMin" :
{ {
@ -209,7 +247,7 @@
} }
}, },
"required" : true, "required" : true,
"propertyOrder" : 21 "propertyOrder" : 25
}, },
"sDVOffsetMax" : "sDVOffsetMax" :
{ {
@ -225,7 +263,7 @@
} }
}, },
"required" : true, "required" : true,
"propertyOrder" : 22 "propertyOrder" : 26
}, },
"sDHOffsetMin" : "sDHOffsetMin" :
{ {
@ -241,7 +279,7 @@
} }
}, },
"required" : true, "required" : true,
"propertyOrder" : 23 "propertyOrder" : 27
}, },
"sDHOffsetMax" : "sDHOffsetMax" :
{ {
@ -257,7 +295,39 @@
} }
}, },
"required" : true, "required" : true,
"propertyOrder" : 24 "propertyOrder" : 28
},
"hardware_brightness" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_hardware_brightness_title",
"default" : 0,
"required" : true,
"propertyOrder" : 29
},
"hardware_contrast" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_hardware_contrast_title",
"default" : 0,
"required" : true,
"propertyOrder" : 30
},
"hardware_saturation" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_hardware_saturation_title",
"default" : 0,
"required" : true,
"propertyOrder" : 31
},
"hardware_hue" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_hardware_hue_title",
"default" : 0,
"required" : true,
"propertyOrder" : 32
} }
}, },
"additionalProperties" : true "additionalProperties" : true

View File

@ -12,7 +12,7 @@ find_package(Qt5Widgets REQUIRED)
if (WIN32) if (WIN32)
include(${CMAKE_SOURCE_DIR}/cmake/win/win_rc.cmake) include(${CMAKE_SOURCE_DIR}/cmake/win/win_rc.cmake)
generate_win_rc_file(hyperiond) generate_win_rc_file(hyperiond)
endif() endif(WIN32)
add_executable(hyperiond add_executable(hyperiond
console.h console.h
@ -27,7 +27,7 @@ add_executable(hyperiond
# promote hyperiond as GUI app # promote hyperiond as GUI app
if (WIN32) if (WIN32)
target_link_options(hyperiond PUBLIC /SUBSYSTEM:WINDOWS /ENTRY:mainCRTStartup) target_link_options(hyperiond PUBLIC /SUBSYSTEM:WINDOWS /ENTRY:mainCRTStartup)
endif() endif(WIN32)
target_link_libraries(hyperiond target_link_libraries(hyperiond
commandline commandline
@ -52,14 +52,14 @@ endif()
if (ENABLE_AVAHI) if (ENABLE_AVAHI)
target_link_libraries(hyperiond bonjour) target_link_libraries(hyperiond bonjour)
endif () endif (ENABLE_AVAHI)
if (ENABLE_AMLOGIC) if (ENABLE_AMLOGIC)
target_link_libraries(hyperiond target_link_libraries(hyperiond
Qt5::Core Qt5::Core
pcre16 dl z pcre16 dl z
) )
endif() endif(ENABLE_AMLOGIC)
if (ENABLE_DISPMANX) if (ENABLE_DISPMANX)
IF ( "${PLATFORM}" MATCHES rpi) IF ( "${PLATFORM}" MATCHES rpi)
@ -70,47 +70,51 @@ if (ENABLE_DISPMANX)
SET(BCM_LIBRARIES "") SET(BCM_LIBRARIES "")
ENDIF() ENDIF()
target_link_libraries(hyperiond dispmanx-grabber) target_link_libraries(hyperiond dispmanx-grabber)
endif () endif (ENABLE_DISPMANX)
if (ENABLE_FB) if (ENABLE_FB)
target_link_libraries(hyperiond framebuffer-grabber) target_link_libraries(hyperiond framebuffer-grabber)
endif () endif (ENABLE_FB)
if (ENABLE_OSX) if (ENABLE_OSX)
target_link_libraries(hyperiond osx-grabber) target_link_libraries(hyperiond osx-grabber)
endif () endif (ENABLE_OSX)
if (ENABLE_V4L2) if (ENABLE_V4L2)
target_link_libraries(hyperiond v4l2-grabber) target_link_libraries(hyperiond v4l2-grabber)
endif () endif ()
if (ENABLE_MF)
target_link_libraries(hyperiond mf-grabber)
endif (ENABLE_MF)
if (ENABLE_AMLOGIC) if (ENABLE_AMLOGIC)
target_link_libraries(hyperiond amlogic-grabber) target_link_libraries(hyperiond amlogic-grabber)
endif () endif (ENABLE_AMLOGIC)
if (ENABLE_X11) if (ENABLE_X11)
if(APPLE) if(APPLE)
include_directories("/opt/X11/include") include_directories("/opt/X11/include")
endif(APPLE) endif(APPLE)
target_link_libraries(hyperiond x11-grabber) target_link_libraries(hyperiond x11-grabber)
endif () endif (ENABLE_X11)
if (ENABLE_XCB) if (ENABLE_XCB)
target_link_libraries(hyperiond xcb-grabber) target_link_libraries(hyperiond xcb-grabber)
endif () endif (ENABLE_XCB)
if (ENABLE_QT) if (ENABLE_QT)
target_link_libraries(hyperiond qt-grabber) target_link_libraries(hyperiond qt-grabber)
endif () endif (ENABLE_QT)
if (ENABLE_DX) if (ENABLE_DX)
include_directories(${DIRECTX9_INCLUDE_DIRS}) include_directories(${DIRECTX9_INCLUDE_DIRS})
target_link_libraries(hyperiond directx-grabber) target_link_libraries(hyperiond directx-grabber)
endif () endif (ENABLE_DX)
if (ENABLE_CEC) if (ENABLE_CEC)
target_link_libraries(hyperiond cechandler) target_link_libraries(hyperiond cechandler)
endif () endif (ENABLE_CEC)
if(NOT WIN32) if(NOT WIN32)
install ( TARGETS hyperiond DESTINATION "share/hyperion/bin" COMPONENT "Hyperion" ) install ( TARGETS hyperiond DESTINATION "share/hyperion/bin" COMPONENT "Hyperion" )

View File

@ -74,6 +74,7 @@ HyperionDaemon::HyperionDaemon(const QString& rootPath, QObject* parent, bool lo
, _sslWebserver(nullptr) , _sslWebserver(nullptr)
, _jsonServer(nullptr) , _jsonServer(nullptr)
, _v4l2Grabber(nullptr) , _v4l2Grabber(nullptr)
, _mfGrabber(nullptr)
, _dispmanx(nullptr) , _dispmanx(nullptr)
, _x11Grabber(nullptr) , _x11Grabber(nullptr)
, _xcbGrabber(nullptr) , _xcbGrabber(nullptr)
@ -145,7 +146,7 @@ HyperionDaemon::HyperionDaemon(const QString& rootPath, QObject* parent, bool lo
// init system capture (framegrabber) // init system capture (framegrabber)
handleSettingsUpdate(settings::SYSTEMCAPTURE, getSetting(settings::SYSTEMCAPTURE)); handleSettingsUpdate(settings::SYSTEMCAPTURE, getSetting(settings::SYSTEMCAPTURE));
// init v4l2 capture // init v4l2 && media foundation capture
handleSettingsUpdate(settings::V4L2, getSetting(settings::V4L2)); handleSettingsUpdate(settings::V4L2, getSetting(settings::V4L2));
// ---- network services ----- // ---- network services -----
@ -253,15 +254,16 @@ void HyperionDaemon::freeObjects()
delete _qtGrabber; delete _qtGrabber;
delete _dxGrabber; delete _dxGrabber;
delete _v4l2Grabber; delete _v4l2Grabber;
delete _mfGrabber;
_v4l2Grabber = nullptr; _v4l2Grabber = nullptr;
_mfGrabber = nullptr;
_amlGrabber = nullptr; _amlGrabber = nullptr;
_dispmanx = nullptr; _dispmanx = nullptr;
_fbGrabber = nullptr; _fbGrabber = nullptr;
_osxGrabber = nullptr; _osxGrabber = nullptr;
_qtGrabber = nullptr; _qtGrabber = nullptr;
_dxGrabber = nullptr; _dxGrabber = nullptr;
} }
void HyperionDaemon::startNetworkServices() void HyperionDaemon::startNetworkServices()
@ -579,7 +581,7 @@ void HyperionDaemon::handleSettingsUpdate(settings::type settingsType, const QJs
else if (settingsType == settings::V4L2) else if (settingsType == settings::V4L2)
{ {
#if defined(ENABLE_CEC) || defined(ENABLE_V4L2) #if defined(ENABLE_CEC) || defined(ENABLE_V4L2) || defined(ENABLE_MF)
const QJsonObject& grabberConfig = config.object(); const QJsonObject& grabberConfig = config.object();
#endif #endif
@ -594,6 +596,62 @@ void HyperionDaemon::handleSettingsUpdate(settings::type settingsType, const QJs
} }
#endif #endif
#if defined(ENABLE_MF)
if (_mfGrabber == nullptr)
{
_mfGrabber = new MFWrapper(
grabberConfig["device"].toString("auto"),
grabberConfig["width"].toInt(0),
grabberConfig["height"].toInt(0),
grabberConfig["fps"].toInt(15),
grabberConfig["input"].toInt(-1),
grabberConfig["sizeDecimation"].toInt(8));
// Image cropping
_mfGrabber->setCropping(
grabberConfig["cropLeft"].toInt(0),
grabberConfig["cropRight"].toInt(0),
grabberConfig["cropTop"].toInt(0),
grabberConfig["cropBottom"].toInt(0));
// Software frame decimation
_mfGrabber->setFpsSoftwareDecimation(grabberConfig["fpsSoftwareDecimation"].toInt(1));
// Hardware encoding format
_mfGrabber->setEncoding(grabberConfig["encoding"].toString("NONE"));
// Signal detection
_mfGrabber->setSignalDetectionOffset(
grabberConfig["sDHOffsetMin"].toDouble(0.25),
grabberConfig["sDVOffsetMin"].toDouble(0.25),
grabberConfig["sDHOffsetMax"].toDouble(0.75),
grabberConfig["sDVOffsetMax"].toDouble(0.75));
_mfGrabber->setSignalThreshold(
grabberConfig["redSignalThreshold"].toDouble(0.0) / 100.0,
grabberConfig["greenSignalThreshold"].toDouble(0.0) / 100.0,
grabberConfig["blueSignalThreshold"].toDouble(0.0) / 100.0,
grabberConfig["noSignalCounterThreshold"].toInt(50) );
_mfGrabber->setSignalDetectionEnable(grabberConfig["signalDetection"].toBool(true));
// CEC Standby
_mfGrabber->setCecDetectionEnable(grabberConfig["cecDetection"].toBool(true));
// Brightness, Contrast, Saturation, Hue
_mfGrabber->setBrightnessContrastSaturationHue(grabberConfig["hardware_brightness"].toInt(0),
grabberConfig["hardware_contrast"].toInt(0),
grabberConfig["hardware_saturation"].toInt(0),
grabberConfig["hardware_hue"].toInt(0));
Debug(_log, "Media Foundation grabber created");
// connect to HyperionDaemon signal
connect(this, &HyperionDaemon::videoMode, _mfGrabber, &MFWrapper::setVideoMode);
connect(this, &HyperionDaemon::settingsChanged, _mfGrabber, &MFWrapper::handleSettingsUpdate);
}
#elif !defined(ENABLE_V4L2)
Warning(_log, "The Media Foundation grabber can not be instantiated, because it has been left out from the build");
#endif
if (_v4l2Grabber != nullptr) if (_v4l2Grabber != nullptr)
{ {
return; return;
@ -632,7 +690,7 @@ void HyperionDaemon::handleSettingsUpdate(settings::type settingsType, const QJs
// connect to HyperionDaemon signal // connect to HyperionDaemon signal
connect(this, &HyperionDaemon::videoMode, _v4l2Grabber, &V4L2Wrapper::setVideoMode); connect(this, &HyperionDaemon::videoMode, _v4l2Grabber, &V4L2Wrapper::setVideoMode);
connect(this, &HyperionDaemon::settingsChanged, _v4l2Grabber, &V4L2Wrapper::handleSettingsUpdate); connect(this, &HyperionDaemon::settingsChanged, _v4l2Grabber, &V4L2Wrapper::handleSettingsUpdate);
#else #elif !defined(ENABLE_MF)
Debug(_log, "The v4l2 grabber is not supported on this platform"); Debug(_log, "The v4l2 grabber is not supported on this platform");
#endif #endif
} }

View File

@ -16,6 +16,12 @@
typedef QObject V4L2Wrapper; typedef QObject V4L2Wrapper;
#endif #endif
#ifdef ENABLE_MF
#include <grabber/MFWrapper.h>
#else
typedef QObject MFWrapper;
#endif
#ifdef ENABLE_FB #ifdef ENABLE_FB
#include <grabber/FramebufferWrapper.h> #include <grabber/FramebufferWrapper.h>
#else #else
@ -171,6 +177,7 @@ private:
WebServer* _sslWebserver; WebServer* _sslWebserver;
JsonServer* _jsonServer; JsonServer* _jsonServer;
V4L2Wrapper* _v4l2Grabber; V4L2Wrapper* _v4l2Grabber;
MFWrapper* _mfGrabber;
DispmanxWrapper* _dispmanx; DispmanxWrapper* _dispmanx;
X11Wrapper* _x11Grabber; X11Wrapper* _x11Grabber;
XcbWrapper* _xcbGrabber; XcbWrapper* _xcbGrabber;