Merge branch 'master' into Razer_Chroma_Support

# Conflicts:
#	assets/webconfig/i18n/en.json
#	assets/webconfig/js/content_leds.js
#	libsrc/leddevice/dev_net/ProviderRestApi.cpp
#	libsrc/leddevice/dev_net/ProviderRestApi.h
This commit is contained in:
LordGrey
2021-11-01 15:40:37 +01:00
474 changed files with 24405 additions and 24526 deletions

View File

@@ -10,7 +10,9 @@ add_subdirectory(jsonserver)
add_subdirectory(flatbufserver)
add_subdirectory(protoserver)
add_subdirectory(ssdp)
add_subdirectory(boblightserver)
if(ENABLE_BOBLIGHT)
add_subdirectory(boblightserver)
endif()
add_subdirectory(leddevice)
add_subdirectory(utils)
add_subdirectory(effectengine)

View File

@@ -1,3 +1,12 @@
# Find the BCM-package (VC control)
IF ( "${PLATFORM}" MATCHES rpi)
find_package(BCM REQUIRED)
include_directories(${BCM_INCLUDE_DIRS})
ELSE()
SET(BCM_INCLUDE_DIRS "")
SET(BCM_LIBRARIES "")
ENDIF()
# Define the current source locations
SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/api)
@@ -12,6 +21,11 @@ add_library(hyperion-api
${Api_RESOURCES}
)
if(ENABLE_DX)
include_directories(${DIRECTX9_INCLUDE_DIRS})
target_link_libraries(hyperion-api ${DIRECTX9_LIBRARIES})
endif(ENABLE_DX)
target_link_libraries(hyperion-api
hyperion
hyperion-utils

View File

@@ -0,0 +1,28 @@
{
"type":"object",
"required":true,
"properties": {
"command": {
"type": "string",
"required": true,
"enum": [ "inputsource" ]
},
"tan": {
"type": "integer"
},
"subcommand": {
"type": "string",
"required": true,
"enum": [ "discover", "getProperties" ]
},
"sourceType": {
"type": "string",
"required": true
},
"params": {
"type": "object",
"required": false
}
},
"additionalProperties": false
}

View File

@@ -5,7 +5,7 @@
"command": {
"type" : "string",
"required" : true,
"enum" : ["color", "image", "effect", "create-effect", "delete-effect", "serverinfo", "clear", "clearall", "adjustment", "sourceselect", "config", "componentstate", "ledcolors", "logging", "processing", "sysinfo", "videomode", "authorize", "instance", "leddevice", "transform", "correction" , "temperature"]
"enum": [ "color", "image", "effect", "create-effect", "delete-effect", "serverinfo", "clear", "clearall", "adjustment", "sourceselect", "config", "componentstate", "ledcolors", "logging", "processing", "sysinfo", "videomode", "authorize", "instance", "leddevice", "inputsource", "transform", "correction", "temperature" ]
}
}
}

View File

@@ -20,7 +20,8 @@
<file alias="schema-videomode">JSONRPC_schema/schema-videomode.json</file>
<file alias="schema-authorize">JSONRPC_schema/schema-authorize.json</file>
<file alias="schema-instance">JSONRPC_schema/schema-instance.json</file>
<file alias="schema-leddevice">JSONRPC_schema/schema-leddevice.json</file>
<file alias="schema-leddevice">JSONRPC_schema/schema-leddevice.json</file>
<file alias="schema-inputsource">JSONRPC_schema/schema-inputsource.json</file>
<!-- The following schemas are derecated but used to ensure backward compatibility with hyperion Classic remote control-->
<file alias="schema-transform">JSONRPC_schema/schema-hyperion-classic.json</file>
<file alias="schema-correction">JSONRPC_schema/schema-hyperion-classic.json</file>

View File

@@ -16,7 +16,45 @@
#include <leddevice/LedDevice.h>
#include <leddevice/LedDeviceFactory.h>
#include <HyperionConfig.h> // Required to determine the cmake options
#include <hyperion/GrabberWrapper.h>
#include <grabber/QtGrabber.h>
#if defined(ENABLE_MF)
#include <grabber/MFGrabber.h>
#elif defined(ENABLE_V4L2)
#include <grabber/V4L2Grabber.h>
#endif
#if defined(ENABLE_X11)
#include <grabber/X11Grabber.h>
#endif
#if defined(ENABLE_XCB)
#include <grabber/XcbGrabber.h>
#endif
#if defined(ENABLE_DX)
#include <grabber/DirectXGrabber.h>
#endif
#if defined(ENABLE_FB)
#include <grabber/FramebufferFrameGrabber.h>
#endif
#if defined(ENABLE_DISPMANX)
#include <grabber/DispmanxFrameGrabber.h>
#endif
#if defined(ENABLE_AMLOGIC)
#include <grabber/AmlogicGrabber.h>
#endif
#if defined(ENABLE_OSX)
#include <grabber/OsxFrameGrabber.h>
#endif
#include <utils/jsonschema/QJsonFactory.h>
#include <utils/jsonschema/QJsonSchemaChecker.h>
#include <HyperionConfig.h>
@@ -41,6 +79,9 @@
using namespace hyperion;
// Constants
namespace { const bool verbose = false; }
JsonAPI::JsonAPI(QString peerAddress, Logger *log, bool localConnection, QObject *parent, bool noListener)
: API(log, localConnection, parent)
{
@@ -57,8 +98,8 @@ void JsonAPI::initialize()
{
// init API, REQUIRED!
API::init();
// REMOVE when jsonCB is migrated
handleInstanceSwitch(0);
// Initialise jsonCB with current instance
_jsonCB->setSubscriptionsTo(_hyperion);
// setup auth interface
connect(this, &API::onPendingTokenRequest, this, &JsonAPI::newPendingTokenRequest);
@@ -90,6 +131,8 @@ void JsonAPI::handleMessage(const QString &messageString, const QString &httpAut
{
const QString ident = "JsonRpc@" + _peerAddress;
QJsonObject message;
//std::cout << "JsonAPI::handleMessage | [" << static_cast<int>(_hyperion->getInstanceIndex()) << "] Received: ["<< messageString.toStdString() << "]" << std::endl;
// parse the message
if (!JsonUtils::parse(ident, messageString, message, _log))
{
@@ -174,6 +217,8 @@ proceed:
handleInstanceCommand(message, command, tan);
else if (command == "leddevice")
handleLedDeviceCommand(message, command, tan);
else if (command == "inputsource")
handleInputSourceCommand(message, command, tan);
// BEGIN | The following commands are deprecated but used to ensure backward compatibility with hyperion Classic remote control
else if (command == "clearall")
@@ -285,6 +330,7 @@ void JsonAPI::handleSysInfoCommand(const QJsonObject &, const QString &command,
system["prettyName"] = data.prettyName;
system["hostName"] = data.hostName;
system["domainName"] = data.domainName;
system["isUserAdmin"] = data.isUserAdmin;
system["qtVersion"] = data.qtVersion;
system["pyVersion"] = data.pyVersion;
info["system"] = system;
@@ -295,6 +341,7 @@ void JsonAPI::handleSysInfoCommand(const QJsonObject &, const QString &command,
hyperion["gitremote"] = QString(HYPERION_GIT_REMOTE);
hyperion["time"] = QString(__DATE__ " " __TIME__);
hyperion["id"] = _authManager->getID();
hyperion["rootPath"] = _instanceManager->getRootPath();
hyperion["readOnlyMode"] = _hyperion->getReadOnlyMode();
info["hyperion"] = hyperion;
@@ -467,11 +514,18 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject &message, const QString
QJsonObject grabbers;
QJsonArray availableGrabbers;
#if defined(ENABLE_DISPMANX) || defined(ENABLE_V4L2) || defined(ENABLE_FB) || defined(ENABLE_AMLOGIC) || defined(ENABLE_OSX) || defined(ENABLE_X11) || defined(ENABLE_XCB) || defined(ENABLE_QT)
#if defined(ENABLE_DISPMANX) || defined(ENABLE_V4L2) || defined(ENABLE_MF) || defined(ENABLE_FB) || defined(ENABLE_AMLOGIC) || defined(ENABLE_OSX) || defined(ENABLE_X11) || defined(ENABLE_XCB) || defined(ENABLE_QT)
if ( GrabberWrapper::getInstance() != nullptr )
{
grabbers["active"] = GrabberWrapper::getInstance()->getActive();
QStringList activeGrabbers = GrabberWrapper::getInstance()->getActive(_hyperion->getInstanceIndex());
QJsonArray activeGrabberNames;
for (auto grabberName : activeGrabbers)
{
activeGrabberNames.append(grabberName);
}
grabbers["active"] = activeGrabberNames;
}
// get available grabbers
@@ -480,55 +534,20 @@ void JsonAPI::handleServerInfoCommand(const QJsonObject &message, const QString
availableGrabbers.append(grabber);
}
#endif
#if defined(ENABLE_V4L2)
QJsonArray availableV4L2devices;
for (const auto& devicePath : GrabberWrapper::getInstance()->getV4L2devices())
{
QJsonObject device;
device["device"] = devicePath;
device["name"] = GrabberWrapper::getInstance()->getV4L2deviceName(devicePath);
QJsonArray availableInputs;
QMultiMap<QString, int> inputs = GrabberWrapper::getInstance()->getV4L2deviceInputs(devicePath);
for (auto input = inputs.begin(); input != inputs.end(); input++)
{
QJsonObject availableInput;
availableInput["inputName"] = input.key();
availableInput["inputIndex"] = input.value();
availableInputs.append(availableInput);
}
device.insert("inputs", availableInputs);
QJsonArray availableResolutions;
QStringList resolutions = GrabberWrapper::getInstance()->getResolutions(devicePath);
for (auto resolution : resolutions)
{
availableResolutions.append(resolution);
}
device.insert("resolutions", availableResolutions);
QJsonArray availableFramerates;
QStringList framerates = GrabberWrapper::getInstance()->getFramerates(devicePath);
for (auto framerate : framerates)
{
availableFramerates.append(framerate);
}
device.insert("framerates", availableFramerates);
availableV4L2devices.append(device);
}
grabbers["v4l2_properties"] = availableV4L2devices;
#endif
grabbers["available"] = availableGrabbers;
info["videomode"] = QString(videoMode2String(_hyperion->getCurrentVideoMode()));
info["grabbers"] = grabbers;
QJsonObject cecInfo;
#if defined(ENABLE_CEC)
cecInfo["enabled"] = true;
#else
cecInfo["enabled"] = false;
#endif
info["cec"] = cecInfo;
// get available components
QJsonArray component;
std::map<hyperion::Components, bool> components = _hyperion->getComponentRegister().getRegister();
@@ -923,36 +942,25 @@ void JsonAPI::handleSchemaGetCommand(const QJsonObject &message, const QString &
properties.insert("alldevices", alldevices);
// collect all available effect schemas
QJsonObject pyEffectSchemas, pyEffectSchema;
QJsonArray in, ex;
const std::list<EffectSchema> &effectsSchemas = _hyperion->getEffectSchemas();
for (const EffectSchema &effectSchema : effectsSchemas)
QJsonArray schemaList;
const std::list<EffectSchema>& effectsSchemas = _hyperion->getEffectSchemas();
for (const EffectSchema& effectSchema : effectsSchemas)
{
if (effectSchema.pyFile.mid(0, 1) == ":")
QJsonObject schema;
schema.insert("script", effectSchema.pyFile);
schema.insert("schemaLocation", effectSchema.schemaFile);
schema.insert("schemaContent", effectSchema.pySchema);
if (effectSchema.pyFile.startsWith(':'))
{
QJsonObject internal;
internal.insert("script", effectSchema.pyFile);
internal.insert("schemaLocation", effectSchema.schemaFile);
internal.insert("schemaContent", effectSchema.pySchema);
in.append(internal);
schema.insert("type", "system");
}
else
{
QJsonObject external;
external.insert("script", effectSchema.pyFile);
external.insert("schemaLocation", effectSchema.schemaFile);
external.insert("schemaContent", effectSchema.pySchema);
ex.append(external);
schema.insert("type", "custom");
}
schemaList.append(schema);
}
if (!in.empty())
pyEffectSchema.insert("internal", in);
if (!ex.empty())
pyEffectSchema.insert("external", ex);
pyEffectSchemas = pyEffectSchema;
properties.insert("effectSchemas", pyEffectSchemas);
properties.insert("effectSchemas", schemaList);
schemaJson.insert("properties", properties);
@@ -998,13 +1006,13 @@ void JsonAPI::handleLedColorsCommand(const QJsonObject &message, const QString &
_ledStreamConnection = connect(_ledStreamTimer, &QTimer::timeout, this, [=]() {
emit streamLedcolorsUpdate(_currentLedValues);
},
Qt::UniqueConnection);
Qt::UniqueConnection);
// start the timer
if (!_ledStreamTimer->isActive() || _ledStreamTimer->interval() != streaming_interval)
_ledStreamTimer->start(streaming_interval);
},
Qt::UniqueConnection);
Qt::UniqueConnection);
// push once
_hyperion->update();
}
@@ -1452,6 +1460,148 @@ void JsonAPI::handleLedDeviceCommand(const QJsonObject &message, const QString &
}
}
void JsonAPI::handleInputSourceCommand(const QJsonObject& message, const QString& command, int tan)
{
DebugIf(verbose, _log, "message: [%s]", QString(QJsonDocument(message).toJson(QJsonDocument::Compact)).toUtf8().constData());
const QString& subc = message["subcommand"].toString().trimmed();
const QString& sourceType = message["sourceType"].toString().trimmed();
QString full_command = command + "-" + subc;
// TODO: Validate that source type is a valid one
/* if ( ! valid type )
{
sendErrorReply("Unknown device", full_command, tan);
}
else
*/ {
if (subc == "discover")
{
QJsonObject inputSourcesDiscovered;
inputSourcesDiscovered.insert("sourceType", sourceType);
QJsonArray videoInputs;
#if defined(ENABLE_V4L2) || defined(ENABLE_MF)
if (sourceType == "video" )
{
#if defined(ENABLE_MF)
MFGrabber* grabber = new MFGrabber();
#elif defined(ENABLE_V4L2)
V4L2Grabber* grabber = new V4L2Grabber();
#endif
QJsonObject params;
videoInputs = grabber->discover(params);
delete grabber;
}
else
#endif
{
DebugIf(verbose, _log, "sourceType: [%s]", QSTRING_CSTR(sourceType));
if (sourceType == "screen")
{
QJsonObject params;
QJsonObject device;
#ifdef ENABLE_QT
QtGrabber* qtgrabber = new QtGrabber();
device = qtgrabber->discover(params);
if (!device.isEmpty() )
{
videoInputs.append(device);
}
delete qtgrabber;
#endif
#ifdef ENABLE_DX
DirectXGrabber* dxgrabber = new DirectXGrabber();
device = dxgrabber->discover(params);
if (!device.isEmpty() )
{
videoInputs.append(device);
}
delete dxgrabber;
#endif
#ifdef ENABLE_X11
X11Grabber* x11Grabber = new X11Grabber();
device = x11Grabber->discover(params);
if (!device.isEmpty() )
{
videoInputs.append(device);
}
delete x11Grabber;
#endif
#ifdef ENABLE_XCB
XcbGrabber* xcbGrabber = new XcbGrabber();
device = xcbGrabber->discover(params);
if (!device.isEmpty() )
{
videoInputs.append(device);
}
delete xcbGrabber;
#endif
//Ignore FB for Amlogic, as it is embedded in the Amlogic grabber itself
#if defined(ENABLE_FB) && !defined(ENABLE_AMLOGIC)
FramebufferFrameGrabber* fbGrabber = new FramebufferFrameGrabber();
device = fbGrabber->discover(params);
if (!device.isEmpty() )
{
videoInputs.append(device);
}
delete fbGrabber;
#endif
#if defined(ENABLE_DISPMANX)
DispmanxFrameGrabber* dispmanx = new DispmanxFrameGrabber();
device = dispmanx->discover(params);
if (!device.isEmpty() )
{
videoInputs.append(device);
}
delete dispmanx;
#endif
#if defined(ENABLE_AMLOGIC)
AmlogicGrabber* amlGrabber = new AmlogicGrabber();
device = amlGrabber->discover(params);
if (!device.isEmpty() )
{
videoInputs.append(device);
}
delete amlGrabber;
#endif
#if defined(ENABLE_OSX)
OsxFrameGrabber* osxGrabber = new OsxFrameGrabber();
device = osxGrabber->discover(params);
if (!device.isEmpty() )
{
videoInputs.append(device);
}
delete osxGrabber;
#endif
}
}
inputSourcesDiscovered["video_sources"] = videoInputs;
DebugIf(verbose, _log, "response: [%s]", QString(QJsonDocument(inputSourcesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
sendSuccessDataReply(QJsonDocument(inputSourcesDiscovered), full_command, tan);
}
else
{
sendErrorReply("Unknown or missing subcommand", full_command, tan);
}
}
}
void JsonAPI::handleNotImplemented(const QString &command, int tan)
{
sendErrorReply("Command not implemented", command, tan);

View File

@@ -73,7 +73,7 @@ bool JsonCB::subscribeFor(const QString& type, bool unsubscribe)
if(type == "priorities-update")
{
if (unsubscribe)
disconnect(_prioMuxer,0 ,0 ,0);
disconnect(_prioMuxer, &PriorityMuxer::prioritiesChanged, this, &JsonCB::handlePriorityUpdate);
else
connect(_prioMuxer, &PriorityMuxer::prioritiesChanged, this, &JsonCB::handlePriorityUpdate, Qt::UniqueConnection);
}
@@ -156,6 +156,8 @@ void JsonCB::resetSubscriptions()
void JsonCB::setSubscriptionsTo(Hyperion* hyperion)
{
//std::cout << "JsonCB::setSubscriptions for instance [" << static_cast<int>(hyperion->getInstanceIndex()) << "] " << std::endl;
// get current subs
QStringList currSubs(getSubscribedCommands());
@@ -179,11 +181,13 @@ void JsonCB::doCallback(const QString& cmd, const QVariant& data)
QJsonObject obj;
obj["command"] = cmd;
if(static_cast<QMetaType::Type>(data.type()) == QMetaType::QJsonArray)
if(data.userType() == QMetaType::QJsonArray)
obj["data"] = data.toJsonArray();
else
obj["data"] = data.toJsonObject();
//std::cout << "JsonCB::doCallback | [" << static_cast<int>(_hyperion->getInstanceIndex()) << "] Send: [" << QJsonDocument(obj).toJson(QJsonDocument::Compact).toStdString() << "]" << std::endl;
emit newCallback(obj);
}

View File

@@ -21,11 +21,19 @@
#include "HyperionConfig.h"
#include <hyperion/Hyperion.h>
#include <utils/QStringUtils.h>
#include <hyperion/PriorityMuxer.h>
// project includes
#include "BoblightClientConnection.h"
BoblightClientConnection::BoblightClientConnection(Hyperion* hyperion, QTcpSocket *socket, int priority)
// Constants
namespace {
const int BOBLIGHT_DEFAULT_PRIORITY = 128;
const int BOBLIGHT_MIN_PRIORITY = PriorityMuxer::FG_PRIORITY+1;
const int BOBLIGHT_MAX_PRIORITY = PriorityMuxer::BG_PRIORITY-1;
} //End of constants
BoblightClientConnection::BoblightClientConnection(Hyperion* hyperion, QTcpSocket* socket, int priority)
: QObject()
, _locale(QLocale::C)
, _socket(socket)
@@ -47,8 +55,8 @@ BoblightClientConnection::BoblightClientConnection(Hyperion* hyperion, QTcpSocke
BoblightClientConnection::~BoblightClientConnection()
{
// clear the current channel
if (_priority != 0 && _priority >= 128 && _priority < 254)
// clear the current channel
if (_priority != 0 && _priority >= BOBLIGHT_MIN_PRIORITY && _priority <= BOBLIGHT_MAX_PRIORITY)
_hyperion->clear(_priority);
delete _socket;
@@ -59,7 +67,7 @@ void BoblightClientConnection::readData()
_receiveBuffer.append(_socket->readAll());
int bytes = _receiveBuffer.indexOf('\n') + 1;
while(bytes > 0)
while (bytes > 0)
{
// create message string (strip the newline)
const QString message = readMessage(_receiveBuffer.data(), bytes);
@@ -71,7 +79,7 @@ void BoblightClientConnection::readData()
_receiveBuffer.remove(0, bytes);
// drop messages if the buffer is too full
if (_receiveBuffer.size() > 100*1024)
if (_receiveBuffer.size() > 100 * 1024)
{
Debug(_log, "server drops messages (buffer full)");
_receiveBuffer.clear();
@@ -82,9 +90,9 @@ void BoblightClientConnection::readData()
}
}
QString BoblightClientConnection::readMessage(const char *data, const size_t size) const
QString BoblightClientConnection::readMessage(const char* data, const size_t size) const
{
char *end = (char *)data + size - 1;
char* end = (char*)data + size - 1;
// Trim left
while (data < end && std::isspace(*data))
@@ -100,7 +108,7 @@ QString BoblightClientConnection::readMessage(const char *data, const size_t siz
// create message string (strip the newline)
const int len = end - data + 1;
const QString message = QString::fromLatin1(data, len);
const QString message = QString::fromLatin1(data, len);
//std::cout << bytes << ": \"" << message.toUtf8().constData() << "\"" << std::endl;
@@ -109,15 +117,15 @@ QString BoblightClientConnection::readMessage(const char *data, const size_t siz
void BoblightClientConnection::socketClosed()
{
// clear the current channel
if (_priority >= 128 && _priority < 254)
// clear the current channel
if (_priority >= BOBLIGHT_MIN_PRIORITY && _priority <= BOBLIGHT_MAX_PRIORITY)
_hyperion->clear(_priority);
emit connectionClosed(this);
}
void BoblightClientConnection::handleMessage(const QString & message)
void BoblightClientConnection::handleMessage(const QString& message)
{
//std::cout << "boblight message: " << message.toStdString() << std::endl;
const QVector<QStringRef> messageParts = QStringUtils::splitRef(message, ' ', QStringUtils::SplitBehavior::SkipEmptyParts);
@@ -166,16 +174,16 @@ void BoblightClientConnection::handleMessage(const QString & message)
if (rc1 && rc2 && rc3)
{
ColorRgb & rgb = _ledColors[ledIndex];
ColorRgb& rgb = _ledColors[ledIndex];
rgb.red = red;
rgb.green = green;
rgb.blue = blue;
if (_priority == 0 || _priority < 128 || _priority >= 254)
if (_priority == 0 || _priority < BOBLIGHT_MIN_PRIORITY || _priority > BOBLIGHT_MAX_PRIORITY)
return;
// send current color values to hyperion if this is the last led assuming leds values are send in order of id
if (ledIndex == _ledColors.size() -1)
if (ledIndex == _ledColors.size() - 1)
{
_hyperion->setInput(_priority, _ledColors);
}
@@ -183,10 +191,10 @@ void BoblightClientConnection::handleMessage(const QString & message)
return;
}
}
else if(messageParts[3] == "speed" ||
messageParts[3] == "interpolation" ||
messageParts[3] == "use" ||
messageParts[3] == "singlechange")
else if (messageParts[3] == "speed" ||
messageParts[3] == "interpolation" ||
messageParts[3] == "use" ||
messageParts[3] == "singlechange")
{
// these message are ignored by Hyperion
return;
@@ -202,16 +210,17 @@ void BoblightClientConnection::handleMessage(const QString & message)
if (_priority != 0 && _hyperion->getPriorityInfo(_priority).componentId == hyperion::COMP_BOBLIGHTSERVER)
_hyperion->clear(_priority);
if (prio < 128 || prio >= 254)
if (prio < BOBLIGHT_MIN_PRIORITY || prio > BOBLIGHT_MAX_PRIORITY)
{
_priority = 128;
_priority = BOBLIGHT_DEFAULT_PRIORITY;
while (_hyperion->getActivePriorities().contains(_priority))
{
_priority += 1;
}
// warn against invalid priority
Warning(_log, "The priority %i is not in the priority range between 128 and 253. Priority %i is used instead.", prio, _priority);
Warning(_log, "The priority %i is not in the priority range of [%d-%d]. Priority %i is used instead.",
prio, BOBLIGHT_MIN_PRIORITY, BOBLIGHT_MAX_PRIORITY, _priority);
// register new priority (previously modified)
_hyperion->registerInput(_priority, hyperion::COMP_BOBLIGHTSERVER, QString("Boblight@%1").arg(_socket->peerAddress().toString()));
}
@@ -228,7 +237,7 @@ void BoblightClientConnection::handleMessage(const QString & message)
}
else if (messageParts[0] == "sync")
{
if ( _priority >= 128 && _priority < 254)
if (_priority >= BOBLIGHT_MIN_PRIORITY && _priority <= BOBLIGHT_MAX_PRIORITY)
_hyperion->setInput(_priority, _ledColors); // send current color values to hyperion
return;
@@ -248,9 +257,9 @@ const float ipows[] = {
1.0f / 100000.0f,
1.0f / 1000000.0f,
1.0f / 10000000.0f,
1.0f / 100000000.0f};
1.0f / 100000000.0f };
float BoblightClientConnection::parseFloat(const QStringRef& s, bool *ok) const
float BoblightClientConnection::parseFloat(const QStringRef& s, bool* ok) const
{
// We parse radix 10
const char MIN_DIGIT = '0';
@@ -331,7 +340,7 @@ float BoblightClientConnection::parseFloat(const QStringRef& s, bool *ok) const
return f;
}
unsigned BoblightClientConnection::parseUInt(const QStringRef& s, bool *ok) const
unsigned BoblightClientConnection::parseUInt(const QStringRef& s, bool* ok) const
{
// We parse radix 10
const char MIN_DIGIT = '0';
@@ -363,7 +372,7 @@ unsigned BoblightClientConnection::parseUInt(const QStringRef& s, bool *ok) cons
return n;
}
uint8_t BoblightClientConnection::parseByte(const QStringRef& s, bool *ok) const
uint8_t BoblightClientConnection::parseByte(const QStringRef& s, bool* ok) const
{
const int LO = 0;
const int HI = 255;
@@ -389,7 +398,7 @@ void BoblightClientConnection::sendLightMessage()
for (int i = 0; i < _hyperion->getLedCount(); ++i)
{
_imageProcessor->getScanParameters(i, h0, h1, v0, v1);
n = snprintf(buffer, sizeof(buffer), "light %03d scan %f %f %f %f\n", i, 100*v0, 100*v1, 100*h0, 100*h1);
n = snprintf(buffer, sizeof(buffer), "light %03d scan %f %f %f %f\n", i, 100 * v0, 100 * v1, 100 * h0, 100 * h1);
sendMessage(QByteArray(buffer, n));
}
}

View File

@@ -12,17 +12,20 @@
// python utils
#include <python/PythonProgram.h>
const int Effect::ENDLESS = -1;
Effect::Effect(Hyperion *hyperion, int priority, int timeout, const QString &script, const QString &name, const QJsonObject &args, const QString &imageData)
: QThread()
, _hyperion(hyperion)
, _priority(priority)
, _timeout(timeout)
, _isEndless(timeout <= ENDLESS)
, _script(script)
, _name(name)
, _args(args)
, _imageData(imageData)
, _endTime(-1)
, _colors()
, _interupt(false)
, _imageSize(hyperion->getLedGridSize())
, _image(_imageSize,QImage::Format_ARGB32_Premultiplied)
{
@@ -47,6 +50,23 @@ Effect::~Effect()
_imageStack.clear();
}
bool Effect::isInterruptionRequested()
{
return _interupt || (!_isEndless && getRemaining() <= 0);
}
int Effect::getRemaining() const
{
// determine the timeout
int timeout = _timeout;
if (timeout >= 0)
{
timeout = static_cast<int>( _endTime - QDateTime::currentMSecsSinceEpoch());
}
return timeout;
}
void Effect::setModuleParameters()
{
// import the buildtin Hyperion module

View File

@@ -19,8 +19,6 @@
EffectEngine::EffectEngine(Hyperion * hyperion)
: _hyperion(hyperion)
, _availableEffects()
, _activeEffects()
, _log(Logger::getInstance("EFFECTENGINE"))
, _effectFileHandler(EffectFileHandler::getInstance())
{
@@ -202,7 +200,7 @@ void EffectEngine::allChannelsCleared()
{
for (Effect * effect : _activeEffects)
{
if (effect->getPriority() != 254 && !effect->isInterruptionRequested())
if (effect->getPriority() != PriorityMuxer::BG_PRIORITY && !effect->isInterruptionRequested())
{
effect->requestInterruption();
}

View File

@@ -11,10 +11,10 @@
#include <QByteArray>
// createEffect helper
struct find_schema: std::unary_function<EffectSchema, bool>
struct find_schema : std::unary_function<EffectSchema, bool>
{
QString pyFile;
find_schema(QString pyFile):pyFile(pyFile) { }
find_schema(QString pyFile) :pyFile(std::move(pyFile)) { }
bool operator()(EffectSchema const& schema) const
{
return schema.pyFile == pyFile;
@@ -22,10 +22,10 @@ struct find_schema: std::unary_function<EffectSchema, bool>
};
// deleteEffect helper
struct find_effect: std::unary_function<EffectDefinition, bool>
struct find_effect : std::unary_function<EffectDefinition, bool>
{
QString effectName;
find_effect(QString effectName) :effectName(effectName) { }
find_effect(QString effectName) :effectName(std::move(effectName)) { }
bool operator()(EffectDefinition const& effectDefinition) const
{
return effectDefinition.name == effectName;
@@ -36,7 +36,6 @@ EffectFileHandler* EffectFileHandler::efhInstance;
EffectFileHandler::EffectFileHandler(const QString& rootPath, const QJsonDocument& effectConfig, QObject* parent)
: QObject(parent)
, _effectConfig()
, _log(Logger::getInstance("EFFECTFILES"))
, _rootPath(rootPath)
{
@@ -50,7 +49,7 @@ EffectFileHandler::EffectFileHandler(const QString& rootPath, const QJsonDocumen
void EffectFileHandler::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
{
if(type == settings::EFFECTS)
if (type == settings::EFFECTS)
{
_effectConfig = config.object();
// update effects and schemas
@@ -67,15 +66,17 @@ QString EffectFileHandler::deleteEffect(const QString& effectName)
if (it != effectsDefinition.end())
{
QFileInfo effectConfigurationFile(it->file);
if (effectConfigurationFile.absoluteFilePath().mid(0, 1) != ":" )
if (!effectConfigurationFile.absoluteFilePath().startsWith(':'))
{
if (effectConfigurationFile.exists())
{
if ( (it->script == ":/effects/gif.py") && !it->args.value("image").toString("").isEmpty())
if ((it->script == ":/effects/gif.py") && !it->args.value("file").toString("").isEmpty())
{
QFileInfo effectImageFile(effectConfigurationFile.absolutePath() + "/" + it->args.value("image").toString());
if (effectImageFile.exists())
QFile::remove(effectImageFile.absoluteFilePath());
QFileInfo effectImageFile(it->args.value("file").toString());
if (effectImageFile.exists())
{
QFile::remove(effectImageFile.absoluteFilePath());
}
}
bool result = QFile::remove(effectConfigurationFile.absoluteFilePath());
@@ -83,15 +84,27 @@ QString EffectFileHandler::deleteEffect(const QString& effectName)
if (result)
{
updateEffects();
return "";
} else
resultMsg = "";
}
else
{
resultMsg = "Can't delete effect configuration file: " + effectConfigurationFile.absoluteFilePath() + ". Please check permissions";
} else
}
}
else
{
resultMsg = "Can't find effect configuration file: " + effectConfigurationFile.absoluteFilePath();
} else
}
}
else
{
resultMsg = "Can't delete internal effect: " + effectName;
} else
}
}
else
{
resultMsg = "Effect " + effectName + " not found";
}
return resultMsg;
}
@@ -101,17 +114,14 @@ QString EffectFileHandler::saveEffect(const QJsonObject& message)
QString resultMsg;
if (!message["args"].toObject().isEmpty())
{
QString scriptName;
(message["script"].toString().mid(0, 1) == ":" )
? scriptName = ":/effects//" + message["script"].toString().mid(1)
: scriptName = message["script"].toString();
QString scriptName = message["script"].toString();
std::list<EffectSchema> effectsSchemas = getEffectSchemas();
std::list<EffectSchema>::iterator it = std::find_if(effectsSchemas.begin(), effectsSchemas.end(), find_schema(scriptName));
if (it != effectsSchemas.end())
{
if(!JsonUtils::validate("EffectFileHandler", message["args"].toObject(), it->schemaFile, _log))
if (!JsonUtils::validate("EffectFileHandler", message["args"].toObject(), it->schemaFile, _log))
{
return "Error during arg validation against schema, please consult the Hyperion Log";
}
@@ -120,9 +130,9 @@ QString EffectFileHandler::saveEffect(const QJsonObject& message)
QJsonArray effectArray;
effectArray = _effectConfig["paths"].toArray();
if (effectArray.size() > 0)
if (!effectArray.empty())
{
if (message["name"].toString().trimmed().isEmpty() || message["name"].toString().trimmed().startsWith("."))
if (message["name"].toString().trimmed().isEmpty() || message["name"].toString().trimmed().startsWith(":"))
{
return "Can't save new effect. Effect name is empty or begins with a dot.";
}
@@ -138,41 +148,63 @@ QString EffectFileHandler::saveEffect(const QJsonObject& message)
if (iter != availableEffects.end())
{
newFileName.setFile(iter->file);
if (newFileName.absoluteFilePath().mid(0, 1) == ":")
if (newFileName.absoluteFilePath().startsWith(':'))
{
return "The effect name '" + message["name"].toString() + "' is assigned to an internal effect. Please rename your effekt.";
return "The effect name '" + message["name"].toString() + "' is assigned to an internal effect. Please rename your effect.";
}
} else
}
else
{
// TODO global special keyword handling
QString f = effectArray[0].toString().replace("$ROOT",_rootPath) + "/" + message["name"].toString().replace(QString(" "), QString("")) + QString(".json");
QString f = effectArray[0].toString().replace("$ROOT", _rootPath) + '/' + message["name"].toString().replace(QString(" "), QString("")) + QString(".json");
newFileName.setFile(f);
}
//TODO check if filename exist
if (!message["imageData"].toString("").isEmpty() && !message["args"].toObject().value("image").toString("").isEmpty())
if (!message["imageData"].toString("").isEmpty() && !message["args"].toObject().value("file").toString("").isEmpty())
{
QFileInfo imageFileName(effectArray[0].toString().replace("$ROOT",_rootPath) + "/" + message["args"].toObject().value("image").toString());
if(!FileUtils::writeFile(imageFileName.absoluteFilePath(), QByteArray::fromBase64(message["imageData"].toString("").toUtf8()), _log))
QJsonObject args = message["args"].toObject();
QString imageFilePath = effectArray[0].toString().replace("$ROOT", _rootPath) + '/' + args.value("file").toString();
QFileInfo imageFileName(imageFilePath);
if (!FileUtils::writeFile(imageFileName.absoluteFilePath(), QByteArray::fromBase64(message["imageData"].toString("").toUtf8()), _log))
{
return "Error while saving image file '" + message["args"].toObject().value("image").toString() + ", please check the Hyperion Log";
return "Error while saving image file '" + message["args"].toObject().value("file").toString() + ", please check the Hyperion Log";
}
//Update json with image file location
args["file"] = imageFilePath;
effectJson["args"] = args;
}
if(!JsonUtils::write(newFileName.absoluteFilePath(), effectJson, _log))
if (message["args"].toObject().value("imageSource").toString("") == "url" || message["args"].toObject().value("imageSource").toString("") == "file")
{
QJsonObject args = message["args"].toObject();
args.remove(args.value("imageSource").toString("") == "url" ? "file" : "url");
effectJson["args"] = args;
}
if (!JsonUtils::write(newFileName.absoluteFilePath(), effectJson, _log))
{
return "Error while saving effect, please check the Hyperion Log";
}
Info(_log, "Reload effect list");
updateEffects();
return "";
} else
resultMsg = "";
}
else
{
resultMsg = "Can't save new effect. Effect path empty";
} else
}
}
else
{
resultMsg = "Missing schema file for Python script " + message["script"].toString();
} else
}
}
else
{
resultMsg = "Missing or empty Object 'args'";
}
return resultMsg;
}
@@ -184,50 +216,56 @@ void EffectFileHandler::updateEffects()
_effectSchemas.clear();
// read all effects
const QJsonArray & paths = _effectConfig["paths"].toArray();
const QJsonArray & disabledEfx = _effectConfig["disable"].toArray();
const QJsonArray& paths = _effectConfig["paths"].toArray();
const QJsonArray& disabledEfx = _effectConfig["disable"].toArray();
QStringList efxPathList;
efxPathList << ":/effects/";
QStringList disableList;
for(auto p : paths)
for (const auto& p : paths)
{
efxPathList << p.toString().replace("$ROOT",_rootPath);
QString effectPath = p.toString();
if (!effectPath.endsWith('/'))
{
effectPath.append('/');
}
efxPathList << effectPath.replace("$ROOT", _rootPath);
}
for(auto efx : disabledEfx)
for (const auto& efx : disabledEfx)
{
disableList << efx.toString();
}
QMap<QString, EffectDefinition> availableEffects;
for (const QString & path : efxPathList )
for (const QString& path : qAsConst(efxPathList))
{
QDir directory(path);
if (!directory.exists())
{
if(directory.mkpath(path))
if (directory.mkpath(path))
{
Info(_log, "New Effect path \"%s\" created successfully", QSTRING_CSTR(path) );
Info(_log, "New Effect path \"%s\" created successfully", QSTRING_CSTR(path));
}
else
{
Warning(_log, "Failed to create Effect path \"%s\", please check permissions", QSTRING_CSTR(path) );
Warning(_log, "Failed to create Effect path \"%s\", please check permissions", QSTRING_CSTR(path));
}
}
else
{
int efxCount = 0;
QStringList filenames = directory.entryList(QStringList() << "*.json", QDir::Files, QDir::Name | QDir::IgnoreCase);
for (const QString & filename : filenames)
for (const QString& filename : qAsConst(filenames))
{
EffectDefinition def;
if (loadEffectDefinition(path, filename, def))
{
InfoIf(availableEffects.find(def.name) != availableEffects.end(), _log,
"effect overload effect '%s' is now taken from '%s'", QSTRING_CSTR(def.name), QSTRING_CSTR(path) );
"effect overload effect '%s' is now taken from '%s'", QSTRING_CSTR(def.name), QSTRING_CSTR(path));
if ( disableList.contains(def.name) )
if (disableList.contains(def.name))
{
Info(_log, "effect '%s' not loaded, because it is disabled in hyperion config", QSTRING_CSTR(def.name));
}
@@ -242,64 +280,65 @@ void EffectFileHandler::updateEffects()
// collect effect schemas
efxCount = 0;
directory.setPath(path.endsWith("/") ? (path + "schema/") : (path + "/schema/"));
QStringList pynames = directory.entryList(QStringList() << "*.json", QDir::Files, QDir::Name | QDir::IgnoreCase);
for (const QString & pyname : pynames)
QString schemaPath = path + "schema" + '/';
directory.setPath(schemaPath);
QStringList schemaFileNames = directory.entryList(QStringList() << "*.json", QDir::Files, QDir::Name | QDir::IgnoreCase);
for (const QString& schemaFileName : qAsConst(schemaFileNames))
{
EffectSchema pyEffect;
if (loadEffectSchema(path, pyname, pyEffect))
if (loadEffectSchema(path, directory.filePath(schemaFileName), pyEffect))
{
_effectSchemas.push_back(pyEffect);
efxCount++;
}
}
InfoIf(efxCount > 0, _log, "%d effect schemas loaded from directory %s", efxCount, QSTRING_CSTR((path + "schema/")));
InfoIf(efxCount > 0, _log, "%d effect schemas loaded from directory %s", efxCount, QSTRING_CSTR(schemaPath));
}
}
for(auto item : availableEffects)
for (const auto& item : qAsConst(availableEffects))
{
_availableEffects.push_back(item);
}
ErrorIf(_availableEffects.size()==0, _log, "no effects found, check your effect directories");
ErrorIf(_availableEffects.empty(), _log, "no effects found, check your effect directories");
emit effectListChanged();
}
bool EffectFileHandler::loadEffectDefinition(const QString &path, const QString &effectConfigFile, EffectDefinition & effectDefinition)
bool EffectFileHandler::loadEffectDefinition(const QString& path, const QString& effectConfigFile, EffectDefinition& effectDefinition)
{
QString fileName = path + QDir::separator() + effectConfigFile;
QString fileName = path + effectConfigFile;
// Read and parse the effect json config file
QJsonObject configEffect;
if(!JsonUtils::readFile(fileName, configEffect, _log))
if (!JsonUtils::readFile(fileName, configEffect, _log)) {
return false;
}
// validate effect config with effect schema(path)
if(!JsonUtils::validate(fileName, configEffect, ":effect-schema", _log))
if (!JsonUtils::validate(fileName, configEffect, ":effect-schema", _log)) {
return false;
}
// setup the definition
effectDefinition.file = fileName;
QJsonObject config = configEffect;
QString scriptName = config["script"].toString();
effectDefinition.name = config["name"].toString();
if (scriptName.isEmpty())
if (scriptName.isEmpty()) {
return false;
}
QFile fileInfo(scriptName);
if (scriptName.mid(0, 1) == ":" )
if (!fileInfo.exists())
{
(!fileInfo.exists())
? effectDefinition.script = ":/effects/"+scriptName.mid(1)
: effectDefinition.script = scriptName;
} else
effectDefinition.script = path + scriptName;
}
else
{
(!fileInfo.exists())
? effectDefinition.script = path + QDir::separator() + scriptName
: effectDefinition.script = scriptName;
effectDefinition.script = scriptName;
}
effectDefinition.args = config["args"].toObject();
@@ -307,31 +346,31 @@ bool EffectFileHandler::loadEffectDefinition(const QString &path, const QString
return true;
}
bool EffectFileHandler::loadEffectSchema(const QString &path, const QString &effectSchemaFile, EffectSchema & effectSchema)
bool EffectFileHandler::loadEffectSchema(const QString& path, const QString& schemaFilePath, EffectSchema& effectSchema)
{
QString fileName = path + "schema/" + QDir::separator() + effectSchemaFile;
// Read and parse the effect schema file
QJsonObject schemaEffect;
if(!JsonUtils::readFile(fileName, schemaEffect, _log))
return false;
// setup the definition
QString scriptName = schemaEffect["script"].toString();
effectSchema.schemaFile = fileName;
fileName = path + QDir::separator() + scriptName;
QFile pyFile(fileName);
if (scriptName.isEmpty() || !pyFile.open(QIODevice::ReadOnly))
if (!JsonUtils::readFile(schemaFilePath, schemaEffect, _log))
{
fileName = path + "schema/" + QDir::separator() + effectSchemaFile;
Error( _log, "Python script '%s' in effect schema '%s' could not be loaded", QSTRING_CSTR(scriptName), QSTRING_CSTR(fileName));
return false;
}
pyFile.close();
// setup the definition
QString scriptName = schemaEffect["script"].toString();
effectSchema.schemaFile = schemaFilePath;
effectSchema.pyFile = (scriptName.mid(0, 1) == ":" ) ? ":/effects/"+scriptName.mid(1) : path + QDir::separator() + scriptName;
QString scriptFilePath = path + scriptName;
QFile pyScriptFile(scriptFilePath);
if (scriptName.isEmpty() || !pyScriptFile.open(QIODevice::ReadOnly))
{
Error(_log, "Python script '%s' in effect schema '%s' could not be loaded", QSTRING_CSTR(scriptName), QSTRING_CSTR(schemaFilePath));
return false;
}
pyScriptFile.close();
effectSchema.pyFile = scriptFilePath;
effectSchema.pySchema = schemaEffect;
return true;

View File

@@ -12,6 +12,10 @@
#include <QDateTime>
#include <QImageReader>
#include <QBuffer>
#include <QUrl>
#include <QNetworkReply>
#include <QNetworkAccessManager>
#include <QEventLoop>
// Get the effect from the capsule
#define getEffect() static_cast<Effect*>((Effect*)PyCapsule_Import("hyperion.__effectObj", 0))
@@ -121,19 +125,6 @@ PyMethodDef EffectModule::effectMethods[] = {
PyObject* EffectModule::wrapSetColor(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
// determine the timeout
int timeout = getEffect()->_timeout;
if (timeout > 0)
{
timeout = getEffect()->_endTime - QDateTime::currentMSecsSinceEpoch();
// we are done if the time has passed
if (timeout <= 0) Py_RETURN_NONE;
}
// check the number of arguments
int argCount = PyTuple_Size(args);
if (argCount == 3)
@@ -144,7 +135,7 @@ PyObject* EffectModule::wrapSetColor(PyObject *self, PyObject *args)
{
getEffect()->_colors.fill(color);
QVector<ColorRgb> _cQV = getEffect()->_colors;
emit getEffect()->setInput(getEffect()->_priority, std::vector<ColorRgb>( _cQV.begin(), _cQV.end() ), timeout, false);
emit getEffect()->setInput(getEffect()->_priority, std::vector<ColorRgb>( _cQV.begin(), _cQV.end() ), getEffect()->getRemaining(), false);
Py_RETURN_NONE;
}
return nullptr;
@@ -163,7 +154,7 @@ PyObject* EffectModule::wrapSetColor(PyObject *self, PyObject *args)
char * data = PyByteArray_AS_STRING(bytearray);
memcpy(getEffect()->_colors.data(), data, length);
QVector<ColorRgb> _cQV = getEffect()->_colors;
emit getEffect()->setInput(getEffect()->_priority, std::vector<ColorRgb>( _cQV.begin(), _cQV.end() ), timeout, false);
emit getEffect()->setInput(getEffect()->_priority, std::vector<ColorRgb>( _cQV.begin(), _cQV.end() ), getEffect()->getRemaining(), false);
Py_RETURN_NONE;
}
else
@@ -192,19 +183,6 @@ PyObject* EffectModule::wrapSetColor(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapSetImage(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
// determine the timeout
int timeout = getEffect()->_timeout;
if (timeout > 0)
{
timeout = getEffect()->_endTime - QDateTime::currentMSecsSinceEpoch();
// we are done if the time has passed
if (timeout <= 0) Py_RETURN_NONE;
}
// bytearray of values
int width, height;
PyObject * bytearray = nullptr;
@@ -218,7 +196,7 @@ PyObject* EffectModule::wrapSetImage(PyObject *self, PyObject *args)
Image<ColorRgb> image(width, height);
char * data = PyByteArray_AS_STRING(bytearray);
memcpy(image.memptr(), data, length);
emit getEffect()->setInputImage(getEffect()->_priority, image, timeout, false);
emit getEffect()->setInputImage(getEffect()->_priority, image, getEffect()->getRemaining(), false);
Py_RETURN_NONE;
}
else
@@ -245,34 +223,57 @@ PyObject* EffectModule::wrapSetImage(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapGetImage(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
QString file;
QBuffer buffer;
QImageReader reader;
char *source;
int cropLeft = 0, cropTop = 0, cropRight = 0, cropBottom = 0;
bool grayscale = false;
if (getEffect()->_imageData.isEmpty())
{
Q_INIT_RESOURCE(EffectEngine);
char *source;
if(!PyArg_ParseTuple(args, "s", &source))
if(!PyArg_ParseTuple(args, "s|iiiii", &source, &cropLeft, &cropTop, &cropRight, &cropBottom, &grayscale))
{
PyErr_SetString(PyExc_TypeError, "String required");
return nullptr;
}
file = QString::fromUtf8(source);
const QUrl url = QUrl(source);
if (url.isValid())
{
QNetworkAccessManager *networkManager = new QNetworkAccessManager();
QNetworkReply * networkReply = networkManager->get(QNetworkRequest(url));
if (file.mid(0, 1) == ":")
file = ":/effects/"+file.mid(1);
QEventLoop eventLoop;
connect(networkReply, &QNetworkReply::finished, &eventLoop, &QEventLoop::quit);
eventLoop.exec();
reader.setDecideFormatFromContent(true);
reader.setFileName(file);
if (networkReply->error() == QNetworkReply::NoError)
{
buffer.setData(networkReply->readAll());
buffer.open(QBuffer::ReadOnly);
reader.setDecideFormatFromContent(true);
reader.setDevice(&buffer);
}
delete networkReply;
delete networkManager;
}
else
{
QString file = QString::fromUtf8(source);
if (file.mid(0, 1) == ":")
file = ":/effects/"+file.mid(1);
reader.setDecideFormatFromContent(true);
reader.setFileName(file);
}
}
else
{
PyArg_ParseTuple(args, "|siiiii", &source, &cropLeft, &cropTop, &cropRight, &cropBottom, &grayscale);
buffer.setData(QByteArray::fromBase64(getEffect()->_imageData.toUtf8()));
buffer.open(QBuffer::ReadOnly);
reader.setDecideFormatFromContent(true);
@@ -289,19 +290,33 @@ PyObject* EffectModule::wrapGetImage(PyObject *self, PyObject *args)
if (reader.canRead())
{
QImage qimage = reader.read();
int width = qimage.width();
int height = qimage.height();
if (cropLeft > 0 || cropTop > 0 || cropRight > 0 || cropBottom > 0)
{
if (cropLeft + cropRight >= width || cropTop + cropBottom >= height)
{
QString errorStr = QString("Rejecting invalid crop values: left: %1, right: %2, top: %3, bottom: %4, higher than height/width %5/%6").arg(cropLeft).arg(cropRight).arg(cropTop).arg(cropBottom).arg(height).arg(width);
PyErr_SetString(PyExc_RuntimeError, qPrintable(errorStr));
return nullptr;
}
qimage = qimage.copy(cropLeft, cropTop, width - cropLeft - cropRight, height - cropTop - cropBottom);
width = qimage.width();
height = qimage.height();
}
QByteArray binaryImage;
for (int i = 0; i<height; ++i)
for (int i = 0; i<height; i++)
{
const QRgb *scanline = reinterpret_cast<const QRgb *>(qimage.scanLine(i));
for (int j = 0; j< width; ++j)
const QRgb *end = scanline + qimage.width();
for (; scanline != end; scanline++)
{
binaryImage.append((char) qRed(scanline[j]));
binaryImage.append((char) qGreen(scanline[j]));
binaryImage.append((char) qBlue(scanline[j]));
binaryImage.append(!grayscale ? (char) qRed(scanline[0]) : (char) qGray(scanline[0]));
binaryImage.append(!grayscale ? (char) qGreen(scanline[1]) : (char) qGray(scanline[1]));
binaryImage.append(!grayscale ? (char) qBlue(scanline[2]) : (char) qGray(scanline[2]));
}
}
PyList_SET_ITEM(result, i, Py_BuildValue("{s:i,s:i,s:O}", "imageWidth", width, "imageHeight", height, "imageData", PyByteArray_FromStringAndSize(binaryImage.constData(),binaryImage.size())));
@@ -312,6 +327,7 @@ PyObject* EffectModule::wrapGetImage(PyObject *self, PyObject *args)
return nullptr;
}
}
return result;
}
else
@@ -329,19 +345,6 @@ PyObject* EffectModule::wrapAbort(PyObject *self, PyObject *)
PyObject* EffectModule::wrapImageShow(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
// determine the timeout
int timeout = getEffect()->_timeout;
if (timeout > 0)
{
timeout = getEffect()->_endTime - QDateTime::currentMSecsSinceEpoch();
// we are done if the time has passed
if (timeout <= 0) Py_RETURN_NONE;
}
int argCount = PyTuple_Size(args);
int imgId = -1;
bool argsOk = (argCount == 0);
@@ -375,16 +378,13 @@ PyObject* EffectModule::wrapImageShow(PyObject *self, PyObject *args)
}
memcpy(image.memptr(), binaryImage.data(), binaryImage.size());
emit getEffect()->setInputImage(getEffect()->_priority, image, timeout, false);
emit getEffect()->setInputImage(getEffect()->_priority, image, getEffect()->getRemaining(), false);
return Py_BuildValue("");
}
PyObject* EffectModule::wrapImageLinearGradient(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
PyObject * bytearray = nullptr;
int startRX = 0;
@@ -452,9 +452,6 @@ PyObject* EffectModule::wrapImageLinearGradient(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageConicalGradient(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
PyObject * bytearray = nullptr;
int centerX, centerY, angle;
@@ -521,9 +518,6 @@ PyObject* EffectModule::wrapImageConicalGradient(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageRadialGradient(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
PyObject * bytearray = nullptr;
int centerX, centerY, radius, focalX, focalY, focalRadius, spread;
@@ -602,9 +596,6 @@ PyObject* EffectModule::wrapImageRadialGradient(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageDrawPolygon(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
PyObject * bytearray = nullptr;
int argCount = PyTuple_Size(args);
@@ -663,9 +654,6 @@ PyObject* EffectModule::wrapImageDrawPolygon(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageDrawPie(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
PyObject * bytearray = nullptr;
QString brush;
@@ -760,9 +748,6 @@ PyObject* EffectModule::wrapImageDrawPie(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageSolidFill(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int r, g, b;
int a = 255;
@@ -802,9 +787,6 @@ PyObject* EffectModule::wrapImageSolidFill(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageDrawLine(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int r, g, b;
int a = 255;
@@ -843,9 +825,6 @@ PyObject* EffectModule::wrapImageDrawLine(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageDrawPoint(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int r, g, b, x, y;
int a = 255;
@@ -879,9 +858,6 @@ PyObject* EffectModule::wrapImageDrawPoint(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageDrawRect(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int r, g, b;
int a = 255;
@@ -921,9 +897,6 @@ PyObject* EffectModule::wrapImageDrawRect(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageSetPixel(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int r, g, b, x, y;
@@ -939,9 +912,6 @@ PyObject* EffectModule::wrapImageSetPixel(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageGetPixel(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int x, y;
@@ -955,9 +925,6 @@ PyObject* EffectModule::wrapImageGetPixel(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageSave(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
QImage img(getEffect()->_image.copy());
getEffect()->_imageStack.append(img);
@@ -966,9 +933,6 @@ PyObject* EffectModule::wrapImageSave(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageMinSize(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int w, h;
int width = getEffect()->_imageSize.width();
@@ -991,25 +955,16 @@ PyObject* EffectModule::wrapImageMinSize(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageWidth(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
return Py_BuildValue("i", getEffect()->_imageSize.width());
}
PyObject* EffectModule::wrapImageHeight(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
return Py_BuildValue("i", getEffect()->_imageSize.height());
}
PyObject* EffectModule::wrapImageCRotate(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int angle;
@@ -1024,9 +979,6 @@ PyObject* EffectModule::wrapImageCRotate(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageCOffset(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int offsetX = 0;
int offsetY = 0;
int argCount = PyTuple_Size(args);
@@ -1042,9 +994,6 @@ PyObject* EffectModule::wrapImageCOffset(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageCShear(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int sh,sv;
int argCount = PyTuple_Size(args);
@@ -1058,9 +1007,6 @@ PyObject* EffectModule::wrapImageCShear(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageResetT(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
getEffect()->_painter->resetTransform();
Py_RETURN_NONE;
}

View File

@@ -12,24 +12,24 @@ endif (ENABLE_FB)
if (ENABLE_OSX)
add_subdirectory(osx)
endif()
endif(ENABLE_OSX)
if (ENABLE_V4L2)
add_subdirectory(v4l2)
endif (ENABLE_V4L2)
if (ENABLE_V4L2 OR ENABLE_MF)
add_subdirectory(video)
endif ()
if (ENABLE_X11)
add_subdirectory(x11)
endif()
endif(ENABLE_X11)
if (ENABLE_XCB)
add_subdirectory(xcb)
endif()
endif(ENABLE_XCB)
if (ENABLE_QT)
add_subdirectory(qt)
endif()
endif(ENABLE_QT)
if (ENABLE_DX)
add_subdirectory(directx)
endif()
endif(ENABLE_DX)

View File

@@ -2,7 +2,6 @@
#include <algorithm>
#include <cassert>
#include <iostream>
#include <QFile>
// Linux includes
#include <errno.h>
@@ -12,155 +11,325 @@
#include <sys/stat.h>
#include <sys/types.h>
// qt
#include <QFile>
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
#include <QSize>
// Local includes
#include <utils/Logger.h>
#include <grabber/AmlogicGrabber.h>
#include "Amvideocap.h"
#define VIDEO_DEVICE "/dev/amvideo"
#define CAPTURE_DEVICE "/dev/amvideocap0"
// Constants
namespace {
const bool verbose = false;
AmlogicGrabber::AmlogicGrabber(unsigned width, unsigned height)
: Grabber("AMLOGICGRABBER", qMax(160u, width), qMax(160u, height)) // Minimum required width or height is 160
, _captureDev(-1)
, _videoDev(-1)
, _lastError(0)
, _fbGrabber("/dev/fb0",width,height)
, _grabbingModeNotification(0)
const char DEFAULT_FB_DEVICE[] = "/dev/fb0";
const char DEFAULT_VIDEO_DEVICE[] = "/dev/amvideo";
const char DEFAULT_CAPTURE_DEVICE[] = "/dev/amvideocap0";
const int AMVIDEOCAP_WAIT_MAX_MS = 40;
const int AMVIDEOCAP_DEFAULT_RATE_HZ = 25;
} //End of constants
AmlogicGrabber::AmlogicGrabber()
: Grabber("AMLOGICGRABBER") // Minimum required width or height is 160
, _captureDev(-1)
, _videoDev(-1)
, _lastError(0)
, _fbGrabber(DEFAULT_FB_DEVICE)
, _grabbingModeNotification(0)
{
Debug(_log, "constructed(%d x %d), grabber device: %s",_width,_height, CAPTURE_DEVICE);
_image_bgr.resize(_width, _height);
_bytesToRead = _image_bgr.size();
_image_ptr = _image_bgr.memptr();
_useImageResampler = true;
}
AmlogicGrabber::~AmlogicGrabber()
{
closeDev(_captureDev);
closeDev(_videoDev);
closeDevice(_captureDev);
closeDevice(_videoDev);
}
bool AmlogicGrabber::openDev(int &fd, const char* dev)
bool AmlogicGrabber::setupScreen()
{
bool rc (false);
QSize screenSize = _fbGrabber.getScreenSize(DEFAULT_FB_DEVICE);
if ( !screenSize.isEmpty() )
{
if (setWidthHeight(screenSize.width(), screenSize.height()))
{
rc = _fbGrabber.setupScreen();
}
}
return rc;
}
bool AmlogicGrabber::openDevice(int &fd, const char* dev)
{
bool rc = true;
if (fd<0)
{
fd = open(dev, O_RDWR);
fd = ::open(dev, O_RDWR);
if ( fd < 0)
{
rc = false;
}
}
return fd >= 0;
return rc;
}
void AmlogicGrabber::closeDev(int &fd)
void AmlogicGrabber::closeDevice(int &fd)
{
if (fd >= 0)
{
close(fd);
::close(fd);
fd = -1;
}
}
bool AmlogicGrabber::isVideoPlaying()
{
if(!QFile::exists(VIDEO_DEVICE)) return false;
int videoDisabled = 1;
if (!openDev(_videoDev, VIDEO_DEVICE))
bool rc = false;
if(QFile::exists(DEFAULT_VIDEO_DEVICE))
{
Error(_log, "Failed to open video device(%s): %d - %s", VIDEO_DEVICE, errno, strerror(errno));
return false;
}
else
{
// Check the video disabled flag
if(ioctl(_videoDev, AMSTREAM_IOC_GET_VIDEO_DISABLE, &videoDisabled) < 0)
int videoDisabled = 1;
if (!openDevice(_videoDev, DEFAULT_VIDEO_DEVICE))
{
Error(_log, "Failed to retrieve video state from device: %d - %s", errno, strerror(errno));
closeDev(_videoDev);
return false;
Error(_log, "Failed to open video device(%s): %d - %s", DEFAULT_VIDEO_DEVICE, errno, strerror(errno));
}
else
{
// Check the video disabled flag
if(ioctl(_videoDev, AMSTREAM_IOC_GET_VIDEO_DISABLE, &videoDisabled) < 0)
{
Error(_log, "Failed to retrieve video state from device: %d - %s", errno, strerror(errno));
closeDevice(_videoDev);
}
else
{
if ( videoDisabled == 0 )
{
rc = true;
}
}
}
}
return videoDisabled == 0;
}
return rc;
}
int AmlogicGrabber::grabFrame(Image<ColorRgb> & image)
{
if (!_enabled) return 0;
// Make sure video is playing, else there is nothing to grab
if (isVideoPlaying())
int rc = 0;
if (_isEnabled && !_isDeviceInError)
{
if (_grabbingModeNotification!=1)
// Make sure video is playing, else there is nothing to grab
if (isVideoPlaying())
{
Info(_log, "VPU mode");
_grabbingModeNotification = 1;
_lastError = 0;
}
if (_grabbingModeNotification!=1)
{
Info(_log, "Switch to VPU capture mode");
_grabbingModeNotification = 1;
_lastError = 0;
}
if (grabFrame_amvideocap(image) < 0)
closeDev(_captureDev);
}
else
{
if (_grabbingModeNotification!=2)
if (grabFrame_amvideocap(image) < 0) {
closeDevice(_captureDev);
rc = -1;
}
}
else
{
Info( _log, "FB mode");
_grabbingModeNotification = 2;
_lastError = 0;
if (_grabbingModeNotification!=2)
{
Info( _log, "Switch to Framebuffer capture mode");
_grabbingModeNotification = 2;
_lastError = 0;
}
rc = _fbGrabber.grabFrame(image);
}
_fbGrabber.grabFrame(image);
usleep(50 * 1000);
}
return 0;
return rc;
}
int AmlogicGrabber::grabFrame_amvideocap(Image<ColorRgb> & image)
{
int rc = 0;
// If the device is not open, attempt to open it
if (_captureDev < 0)
{
if (! openDev(_captureDev, CAPTURE_DEVICE))
if (! openDevice(_captureDev, DEFAULT_CAPTURE_DEVICE))
{
ErrorIf( _lastError != 1, _log,"Failed to open the AMLOGIC device (%d - %s):", errno, strerror(errno));
_lastError = 1;
return -1;
rc = -1;
return rc;
}
}
long r1 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_WIDTH, _width);
long r2 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_HEIGHT, _height);
long r3 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_AT_FLAGS, CAP_FLAG_AT_END);
long r1 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_WIDTH, _width);
long r2 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_HEIGHT, _height);
long r3 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_AT_FLAGS, CAP_FLAG_AT_END);
long r4 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_WAIT_MAX_MS, AMVIDEOCAP_WAIT_MAX_MS);
if (r1<0 || r2<0 || r3<0 || _height==0 || _width==0)
if (r1<0 || r2<0 || r3<0 || r4<0 || _height==0 || _width==0)
{
ErrorIf(_lastError != 2,_log,"Failed to configure capture device (%d - %s)", errno, strerror(errno));
_lastError = 2;
rc = -1;
}
else
{
int linelen = ((_width + 31) & ~31) * 3;
size_t _bytesToRead = linelen * _height;
// Read the snapshot into the memory
ssize_t bytesRead = pread(_captureDev, _image_ptr, _bytesToRead, 0);
if ( bytesRead < 0 && !EAGAIN && errno > 0 )
{
ErrorIf(_lastError != 2,_log,"Failed to configure capture device (%d - %s)", errno, strerror(errno));
_lastError = 2;
return -1;
ErrorIf(_lastError != 3, _log,"Capture frame failed failed - Retrying. Error [%d] - %s", errno, strerror(errno));
_lastError = 3;
rc = -1;
}
else
{
if (bytesRead != -1 && static_cast<ssize_t>(_bytesToRead) != bytesRead)
{
// Read of snapshot failed
ErrorIf(_lastError != 4, _log,"Capture failed to grab entire image [bytesToRead(%d) != bytesRead(%d)]", _bytesToRead, bytesRead);
_lastError = 4;
rc = -1;
}
else {
//If bytesRead = -1 but no error or EAGAIN or ENODATA, return last image to cover video pausing scenario
// EAGAIN : // 11 - Resource temporarily unavailable
// ENODATA: // 61 - No data available
_imageResampler.processImage(static_cast<uint8_t*>(_image_ptr),
_width,
_height,
linelen,
PixelFormat::BGR24, image);
_lastError = 0;
rc = 0;
}
}
}
return rc;
}
QJsonObject AmlogicGrabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
if(QFile::exists(DEFAULT_VIDEO_DEVICE) && QFile::exists(DEFAULT_CAPTURE_DEVICE) )
{
QJsonArray video_inputs;
QSize screenSize = _fbGrabber.getScreenSize();
if ( !screenSize.isEmpty() )
{
int fbIdx = _fbGrabber.getPath().rightRef(1).toInt();
DebugIf(verbose, _log, "FB device [%s] found with resolution: %dx%d", QSTRING_CSTR(_fbGrabber.getPath()), screenSize.width(), screenSize.height());
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30};
QJsonObject in;
QString displayName;
displayName = QString("Display%1").arg(fbIdx);
in["name"] = displayName;
in["inputIdx"] = fbIdx;
QJsonArray formats;
QJsonObject format;
QJsonArray resolutionArray;
QJsonObject resolution;
resolution["width"] = screenSize.width();
resolution["height"] = screenSize.height();
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
if (!video_inputs.isEmpty())
{
inputsDiscovered["device"] = "amlogic";
inputsDiscovered["device_name"] = "AmLogic";
inputsDiscovered["type"] = "screen";
inputsDiscovered["video_inputs"] = video_inputs;
QJsonObject defaults, video_inputs_default, resolution_default;
resolution_default["fps"] = AMVIDEOCAP_DEFAULT_RATE_HZ;
video_inputs_default["resolution"] = resolution_default;
video_inputs_default["inputIdx"] = 0;
defaults["video_input"] = video_inputs_default;
inputsDiscovered["default"] = defaults;
}
}
// Read the snapshot into the memory
ssize_t bytesRead = pread(_captureDev, _image_ptr, _bytesToRead, 0);
if (bytesRead < 0)
if (inputsDiscovered.isEmpty())
{
ErrorIf(_lastError != 3, _log,"Read of device failed: %d - %s", errno, strerror(errno));
_lastError = 3;
return -1;
}
else if (_bytesToRead != bytesRead)
{
// Read of snapshot failed
ErrorIf(_lastError != 4, _log,"Capture failed to grab entire image [bytesToRead(%d) != bytesRead(%d)]", _bytesToRead, bytesRead);
_lastError = 4;
return -1;
DebugIf(verbose, _log, "No displays found to capture from!");
}
_useImageResampler = true;
_imageResampler.processImage((const uint8_t*)_image_ptr, _width, _height, (_width << 1) + _width, PixelFormat::BGR24, image);
_lastError = 0;
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return 0;
return inputsDiscovered;
}
void AmlogicGrabber::setVideoMode(VideoMode mode)
{
Grabber::setVideoMode(mode);
_fbGrabber.setVideoMode(mode);
}
bool AmlogicGrabber::setPixelDecimation(int pixelDecimation)
{
return ( Grabber::setPixelDecimation( pixelDecimation) &&
_fbGrabber.setPixelDecimation( pixelDecimation));
}
void AmlogicGrabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom)
{
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
_fbGrabber.setCropping(cropLeft, cropRight, cropTop, cropBottom);
}
bool AmlogicGrabber::setWidthHeight(int width, int height)
{
bool rc (false);
if ( Grabber::setWidthHeight(width, height) )
{
_image_bgr.resize(static_cast<unsigned>(width), static_cast<unsigned>(height));
_width = width;
_height = height;
_bytesToRead = _image_bgr.size();
_image_ptr = _image_bgr.memptr();
rc = _fbGrabber.setWidthHeight(width, height);
}
return rc;
}
bool AmlogicGrabber::setFramerate(int fps)
{
return (Grabber::setFramerate(fps) &&
_fbGrabber.setFramerate(fps));
}

View File

@@ -1,9 +1,11 @@
#include <grabber/AmlogicWrapper.h>
AmlogicWrapper::AmlogicWrapper(unsigned grabWidth, unsigned grabHeight)
: GrabberWrapper("AmLogic", &_grabber, grabWidth, grabHeight)
, _grabber(grabWidth, grabHeight)
{}
AmlogicWrapper::AmlogicWrapper(int pixelDecimation, int updateRate_Hz)
: GrabberWrapper("Amlogic", &_grabber, updateRate_Hz)
, _grabber()
{
_grabber.setPixelDecimation(pixelDecimation);
}
void AmlogicWrapper::action()
{

View File

@@ -11,10 +11,32 @@
#define CAP_FLAG_AT_TIME_WINDOW 1
#define CAP_FLAG_AT_END 2
// #define AMVIDEOCAP_IOW_SET_WANTFRAME_FORMAT _IOW(AMVIDEOCAP_IOC_MAGIC, 0x01, int)
#define AMVIDEOCAP_IOW_SET_WANTFRAME_FORMAT _IOW(AMVIDEOCAP_IOC_MAGIC, 0x01, int)
#define AMVIDEOCAP_IOW_SET_WANTFRAME_WIDTH _IOW(AMVIDEOCAP_IOC_MAGIC, 0x02, int)
#define AMVIDEOCAP_IOW_SET_WANTFRAME_HEIGHT _IOW(AMVIDEOCAP_IOC_MAGIC, 0x03, int)
#define AMVIDEOCAP_IOW_SET_WANTFRAME_TIMESTAMP_MS _IOW(AMVIDEOCAP_IOC_MAGIC, 0x04, unsigned long long)
#define AMVIDEOCAP_IOW_SET_WANTFRAME_WAIT_MAX_MS _IOW(AMVIDEOCAP_IOC_MAGIC, 0x05, unsigned long long)
#define AMVIDEOCAP_IOW_SET_WANTFRAME_AT_FLAGS _IOW(AMVIDEOCAP_IOC_MAGIC, 0x06, int)
#define _A_M 'S'
#define AMSTREAM_IOC_GET_VIDEO_DISABLE _IOR((_A_M), 0x48, int)
#define AMVIDEOCAP_IOR_GET_FRAME_FORMAT _IOR(AMVIDEOCAP_IOC_MAGIC, 0x10, int)
#define AMVIDEOCAP_IOR_GET_FRAME_WIDTH _IOR(AMVIDEOCAP_IOC_MAGIC, 0x11, int)
#define AMVIDEOCAP_IOR_GET_FRAME_HEIGHT _IOR(AMVIDEOCAP_IOC_MAGIC, 0x12, int)
#define AMVIDEOCAP_IOR_GET_FRAME_TIMESTAMP_MS _IOR(AMVIDEOCAP_IOC_MAGIC, 0x13, int)
#define AMVIDEOCAP_IOR_GET_SRCFRAME_FORMAT _IOR(AMVIDEOCAP_IOC_MAGIC, 0x20, int)
#define AMVIDEOCAP_IOR_GET_SRCFRAME_WIDTH _IOR(AMVIDEOCAP_IOC_MAGIC, 0x21, int)
#define AMVIDEOCAP_IOR_GET_SRCFRAME_HEIGHT _IOR(AMVIDEOCAP_IOC_MAGIC, 0x22, int)
#define AMVIDEOCAP_IOR_GET_STATE _IOR(AMVIDEOCAP_IOC_MAGIC, 0x31, int)
#define AMVIDEOCAP_IOW_SET_START_CAPTURE _IOW(AMVIDEOCAP_IOC_MAGIC, 0x32, int)
#define AMVIDEOCAP_IOW_SET_CANCEL_CAPTURE _IOW(AMVIDEOCAP_IOC_MAGIC, 0x33, int)
#define AMSTREAM_IOC_MAGIC 'S'
#define AMSTREAM_IOC_GET_VIDEO_DISABLE _IOR((AMSTREAM_IOC_MAGIC), 0x48, int)
enum amvideocap_state{
AMVIDEOCAP_STATE_INIT=0,
AMVIDEOCAP_STATE_ON_CAPTURE=200,
AMVIDEOCAP_STATE_FINISHED_CAPTURE=300,
AMVIDEOCAP_STATE_ERROR=0xffff,
};

View File

@@ -1,12 +1,17 @@
#include <windows.h>
#include <grabber/DirectXGrabber.h>
#include <QImage>
#pragma comment(lib, "d3d9.lib")
#pragma comment(lib,"d3dx9.lib")
DirectXGrabber::DirectXGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display)
: Grabber("DXGRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom)
, _pixelDecimation(pixelDecimation)
// Constants
namespace {
const bool verbose = true;
} //End of constants
DirectXGrabber::DirectXGrabber(int display, int cropLeft, int cropRight, int cropTop, int cropBottom)
: Grabber("DXGRABBER", cropLeft, cropRight, cropTop, cropBottom)
, _display(unsigned(display))
, _displayWidth(0)
, _displayHeight(0)
, _srcRect(0)
@@ -14,8 +19,6 @@ DirectXGrabber::DirectXGrabber(int cropLeft, int cropRight, int cropTop, int cro
, _device(nullptr)
, _surface(nullptr)
{
// init
setupDisplay();
}
DirectXGrabber::~DirectXGrabber()
@@ -43,6 +46,8 @@ bool DirectXGrabber::setupDisplay()
D3DDISPLAYMODE ddm;
D3DPRESENT_PARAMETERS d3dpp;
HMONITOR hMonitor = nullptr;
MONITORINFO monitorInfo = { 0 };
if ((_d3d9 = Direct3DCreate9(D3D_SDK_VERSION)) == nullptr)
{
@@ -50,7 +55,17 @@ bool DirectXGrabber::setupDisplay()
return false;
}
if (FAILED(_d3d9->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &ddm)))
SecureZeroMemory(&monitorInfo, sizeof(monitorInfo));
monitorInfo.cbSize = sizeof(MONITORINFO);
hMonitor = _d3d9->GetAdapterMonitor(_display);
if (hMonitor == nullptr || GetMonitorInfo(hMonitor, &monitorInfo) == FALSE)
{
Info(_log, "Specified display %d is not available. Primary display %d is used", _display, D3DADAPTER_DEFAULT);
_display = D3DADAPTER_DEFAULT;
}
if (FAILED(_d3d9->GetAdapterDisplayMode(_display, &ddm)))
{
Error(_log, "Failed to get current display mode");
return false;
@@ -69,7 +84,7 @@ bool DirectXGrabber::setupDisplay()
d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
d3dpp.FullScreen_RefreshRateInHz = D3DPRESENT_RATE_DEFAULT;
if (FAILED(_d3d9->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, nullptr, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &_device)))
if (FAILED(_d3d9->CreateDevice(_display, D3DDEVTYPE_HAL, nullptr, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &_device)))
{
Error(_log, "CreateDevice failed");
return false;
@@ -127,15 +142,24 @@ bool DirectXGrabber::setupDisplay()
int DirectXGrabber::grabFrame(Image<ColorRgb> & image)
{
if (!_enabled)
if (!_isEnabled)
{
qDebug() << "AUS";
return 0;
}
if (_device == nullptr)
{
// reinit, this will disable capture on failure
bool result = setupDisplay();
setEnabled(result);
return -1;
}
if (FAILED(_device->GetFrontBufferData(0, _surface)))
{
// reinit, this will disable capture on failure
Error(_log, "Unable to get Buffer Surface Data");
setEnabled(setupDisplay());
return -1;
return 0;
}
D3DXLoadSurfaceFromSurface(_surfaceDest, nullptr, nullptr, _surface, nullptr, _srcRect, D3DX_DEFAULT, 0);
@@ -147,12 +171,11 @@ int DirectXGrabber::grabFrame(Image<ColorRgb> & image)
return 0;
}
memcpy(image.memptr(), lockedRect.pBits, _width * _height * 3);
for(int i=0 ; i < _height ; i++)
memcpy((unsigned char*)image.memptr() + i * _width * 3, (unsigned char*)lockedRect.pBits + i * lockedRect.Pitch, _width * 3);
for (int idx = 0; idx < _width * _height; idx++)
{
const ColorRgb & color = image.memptr()[idx];
image.memptr()[idx] = ColorRgb{color.blue, color.green, color.red};
}
image.memptr()[idx] = ColorRgb{image.memptr()[idx].blue, image.memptr()[idx].green, image.memptr()[idx].red};
if (FAILED(_surfaceDest->UnlockRect()))
{
@@ -169,13 +192,100 @@ void DirectXGrabber::setVideoMode(VideoMode mode)
setupDisplay();
}
void DirectXGrabber::setPixelDecimation(int pixelDecimation)
bool DirectXGrabber::setPixelDecimation(int pixelDecimation)
{
_pixelDecimation = pixelDecimation;
if(Grabber::setPixelDecimation(pixelDecimation))
return setupDisplay();
return false;
}
void DirectXGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
void DirectXGrabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom)
{
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
setupDisplay();
}
bool DirectXGrabber::setDisplayIndex(int index)
{
bool rc (true);
if(_display != unsigned(index))
{
_display = unsigned(index);
rc = setupDisplay();
}
return rc;
}
QJsonObject DirectXGrabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
if ((_d3d9 = Direct3DCreate9(D3D_SDK_VERSION)) != nullptr)
{
int adapterCount = (int)_d3d9->GetAdapterCount();
if(adapterCount > 0)
{
inputsDiscovered["device"] = "dx";
inputsDiscovered["device_name"] = "DX";
inputsDiscovered["type"] = "screen";
QJsonArray video_inputs;
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
for(int adapter = 0; adapter < adapterCount; adapter++)
{
QJsonObject in;
in["inputIdx"] = adapter;
D3DADAPTER_IDENTIFIER9 identifier;
_d3d9->GetAdapterIdentifier(adapter, D3DENUM_WHQL_LEVEL, &identifier);
QString name = identifier.DeviceName;
int pos = name.lastIndexOf('\\');
if (pos != -1)
name = name.right(name.length()-pos-1);
in["name"] = name;
D3DDISPLAYMODE ddm;
_d3d9->GetAdapterDisplayMode(adapter, &ddm);
QJsonArray formats, resolutionArray;
QJsonObject format, resolution;
resolution["width"] = (int)ddm.Width;
resolution["height"] = (int)ddm.Height;
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
inputsDiscovered["video_inputs"] = video_inputs;
QJsonObject defaults, video_inputs_default, resolution_default;
resolution_default["fps"] = _fps;
video_inputs_default["resolution"] = resolution_default;
video_inputs_default["inputIdx"] = 0;
defaults["video_input"] = video_inputs_default;
inputsDiscovered["default"] = defaults;
}
else
{
DebugIf(verbose, _log, "No displays found to capture from!");
}
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -1,9 +1,16 @@
#include <grabber/DirectXWrapper.h>
DirectXWrapper::DirectXWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display, const unsigned updateRate_Hz)
: GrabberWrapper("DirectX", &_grabber, 0, 0, updateRate_Hz)
, _grabber(cropLeft, cropRight, cropTop, cropBottom, pixelDecimation, display)
{}
DirectXWrapper::DirectXWrapper( int updateRate_Hz,
int display,
int pixelDecimation,
int cropLeft, int cropRight, int cropTop, int cropBottom
)
: GrabberWrapper("DirectX", &_grabber, updateRate_Hz)
, _grabber(display, cropLeft, cropRight, cropTop, cropBottom)
{
_grabber.setPixelDecimation(pixelDecimation);
}
void DirectXWrapper::action()
{

View File

@@ -3,48 +3,34 @@
#include <cassert>
#include <iostream>
//Qt
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
#include <QSize>
// Constants
namespace {
const bool verbose = false;
const int DEFAULT_DEVICE = 0;
} //End of constants
// Local includes
#include "grabber/DispmanxFrameGrabber.h"
DispmanxFrameGrabber::DispmanxFrameGrabber(unsigned width, unsigned height)
: Grabber("DISPMANXGRABBER", 0, 0)
, _vc_display(0)
, _vc_resource(0)
, _vc_flags(0)
, _captureBuffer(new ColorRgba[0])
, _captureBufferSize(0)
, _image_rgba(width, height)
DispmanxFrameGrabber::DispmanxFrameGrabber()
: Grabber("DISPMANXGRABBER")
, _vc_display(0)
, _vc_resource(0)
, _vc_flags(DISPMANX_TRANSFORM_T(0))
, _captureBuffer(new ColorRgba[0])
, _captureBufferSize(0)
, _image_rgba()
{
_useImageResampler = false;
// Initiase BCM
_useImageResampler = true;
// Initialise BCM
bcm_host_init();
// Check if the display can be opened and display the current resolution
// Open the connection to the display
_vc_display = vc_dispmanx_display_open(0);
assert(_vc_display > 0);
// Obtain the display information
DISPMANX_MODEINFO_T vc_info;
int result = vc_dispmanx_display_get_info(_vc_display, &vc_info);
// Keep compiler happy in 'release' mode
(void)result;
// Close the display
vc_dispmanx_display_close(_vc_display);
if(result != 0)
{
Error(_log, "Failed to open display! Probably no permissions to access the capture interface");
setEnabled(false);
return;
}
else
Info(_log, "Display opened with resolution: %dx%d", vc_info.width, vc_info.height);
// init the resource and capture rectangle
setWidthHeight(width, height);
}
DispmanxFrameGrabber::~DispmanxFrameGrabber()
@@ -55,6 +41,28 @@ DispmanxFrameGrabber::~DispmanxFrameGrabber()
bcm_host_deinit();
}
bool DispmanxFrameGrabber::setupScreen()
{
bool rc (false);
int deviceIdx (DEFAULT_DEVICE);
QSize screenSize = getScreenSize(deviceIdx);
if ( screenSize.isEmpty() )
{
Error(_log, "Failed to open display [%d]! Probably no permissions to access the capture interface", deviceIdx);
setEnabled(false);
}
else
{
setWidthHeight(screenSize.width(), screenSize.height());
Info(_log, "Display [%d] opened with resolution: %dx%d", deviceIdx, screenSize.width(), screenSize.height());
setEnabled(true);
rc = true;
}
return rc;
}
void DispmanxFrameGrabber::freeResources()
{
delete[] _captureBuffer;
@@ -64,152 +72,226 @@ void DispmanxFrameGrabber::freeResources()
bool DispmanxFrameGrabber::setWidthHeight(int width, int height)
{
bool rc = false;
if(Grabber::setWidthHeight(width, height))
{
if(_vc_resource != 0)
if(_vc_resource != 0) {
vc_dispmanx_resource_delete(_vc_resource);
// Create the resources for capturing image
}
Debug(_log,"Create the resources for capturing image");
uint32_t vc_nativeImageHandle;
_vc_resource = vc_dispmanx_resource_create(
VC_IMAGE_RGBA32,
width,
height,
&vc_nativeImageHandle);
VC_IMAGE_RGBA32,
width,
height,
&vc_nativeImageHandle);
assert(_vc_resource);
// Define the capture rectangle with the same size
vc_dispmanx_rect_set(&_rectangle, 0, 0, width, height);
return true;
if (_vc_resource != 0)
{
Debug(_log,"Define the capture rectangle with the same size");
vc_dispmanx_rect_set(&_rectangle, 0, 0, width, height);
rc = true;
}
}
return false;
return rc;
}
void DispmanxFrameGrabber::setFlags(int vc_flags)
void DispmanxFrameGrabber::setFlags(DISPMANX_TRANSFORM_T vc_flags)
{
_vc_flags = vc_flags;
}
int DispmanxFrameGrabber::grabFrame(Image<ColorRgb> & image)
{
if (!_enabled) return 0;
int ret;
// vc_dispmanx_resource_read_data doesn't seem to work well
// with arbitrary positions so we have to handle cropping by ourselves
unsigned cropLeft = _cropLeft;
unsigned cropRight = _cropRight;
unsigned cropTop = _cropTop;
unsigned cropBottom = _cropBottom;
if (_vc_flags & DISPMANX_SNAPSHOT_FILL)
int rc = 0;
if (_isEnabled && !_isDeviceInError)
{
// disable cropping, we are capturing the video overlay window
cropLeft = cropRight = cropTop = cropBottom = 0;
}
// vc_dispmanx_resource_read_data doesn't seem to work well
// with arbitrary positions so we have to handle cropping by ourselves
int cropLeft = _cropLeft;
int cropRight = _cropRight;
int cropTop = _cropTop;
int cropBottom = _cropBottom;
unsigned imageWidth = _width - cropLeft - cropRight;
unsigned imageHeight = _height - cropTop - cropBottom;
// calculate final image dimensions and adjust top/left cropping in 3D modes
switch (_videoMode)
{
case VideoMode::VIDEO_3DSBS:
imageWidth /= 2;
cropLeft /= 2;
break;
case VideoMode::VIDEO_3DTAB:
imageHeight /= 2;
cropTop /= 2;
break;
case VideoMode::VIDEO_2D:
default:
break;
}
// resize the given image if needed
if (image.width() != imageWidth || image.height() != imageHeight)
{
image.resize(imageWidth, imageHeight);
}
if (_image_rgba.width() != imageWidth || _image_rgba.height() != imageHeight)
{
_image_rgba.resize(imageWidth, imageHeight);
}
// Open the connection to the display
_vc_display = vc_dispmanx_display_open(0);
if (_vc_display < 0)
{
Error(_log, "Cannot open display: %d", _vc_display);
return -1;
}
// Create the snapshot (incl down-scaling)
ret = vc_dispmanx_snapshot(_vc_display, _vc_resource, (DISPMANX_TRANSFORM_T) _vc_flags);
if (ret < 0)
{
Error(_log, "Snapshot failed: %d", ret);
vc_dispmanx_display_close(_vc_display);
return ret;
}
// Read the snapshot into the memory
void* imagePtr = _image_rgba.memptr();
void* capturePtr = imagePtr;
unsigned imagePitch = imageWidth * sizeof(ColorRgba);
// dispmanx seems to require the pitch to be a multiple of 64
unsigned capturePitch = (_rectangle.width * sizeof(ColorRgba) + 63) & (~63);
// grab to temp buffer if image pitch isn't valid or if we are cropping
if (imagePitch != capturePitch
|| (unsigned)_rectangle.width != imageWidth
|| (unsigned)_rectangle.height != imageHeight)
{
// check if we need to resize the capture buffer
unsigned captureSize = capturePitch * _rectangle.height / sizeof(ColorRgba);
if (_captureBufferSize != captureSize)
if (_vc_flags & DISPMANX_SNAPSHOT_FILL)
{
delete[] _captureBuffer;
_captureBuffer = new ColorRgba[captureSize];
_captureBufferSize = captureSize;
// disable cropping, we are capturing the video overlay window
Debug(_log,"Disable cropping, as the video overlay window is captured");
cropLeft = cropRight = cropTop = cropBottom = 0;
}
capturePtr = &_captureBuffer[0];
}
unsigned imageWidth = static_cast<unsigned>(_width - cropLeft - cropRight);
unsigned imageHeight = static_cast<unsigned>(_height - cropTop - cropBottom);
ret = vc_dispmanx_resource_read_data(_vc_resource, &_rectangle, capturePtr, capturePitch);
if (ret < 0)
{
Error(_log, "vc_dispmanx_resource_read_data failed: %d", ret);
vc_dispmanx_display_close(_vc_display);
return ret;
}
// copy capture data to image if we captured to temp buffer
if (imagePtr != capturePtr)
{
// adjust source pointer to top/left cropping
uint8_t* src_ptr = (uint8_t*) capturePtr
+ cropLeft * sizeof(ColorRgba)
+ cropTop * capturePitch;
for (unsigned y = 0; y < imageHeight; y++)
// resize the given image if needed
if (image.width() != imageWidth || image.height() != imageHeight)
{
memcpy((uint8_t*)imagePtr + y * imagePitch,
src_ptr + y * capturePitch,
imagePitch);
image.resize(imageWidth, imageHeight);
}
if (_image_rgba.width() != imageWidth || _image_rgba.height() != imageHeight)
{
_image_rgba.resize(imageWidth, imageHeight);
}
// Open the connection to the display
_vc_display = vc_dispmanx_display_open(DEFAULT_DEVICE);
if (_vc_display < 0)
{
Error(_log, "Cannot open display: %d", DEFAULT_DEVICE);
rc = -1;
}
else {
// Create the snapshot (incl down-scaling)
int ret = vc_dispmanx_snapshot(_vc_display, _vc_resource, _vc_flags);
if (ret < 0)
{
Error(_log, "Snapshot failed: %d", ret);
rc = ret;
}
else
{
// Read the snapshot into the memory
void* imagePtr = _image_rgba.memptr();
void* capturePtr = imagePtr;
unsigned imagePitch = imageWidth * sizeof(ColorRgba);
// dispmanx seems to require the pitch to be a multiple of 64
unsigned capturePitch = (_rectangle.width * sizeof(ColorRgba) + 63) & (~63);
// grab to temp buffer if image pitch isn't valid or if we are cropping
if (imagePitch != capturePitch
|| static_cast<unsigned>(_rectangle.width) != imageWidth
|| static_cast<unsigned>(_rectangle.height) != imageHeight)
{
// check if we need to resize the capture buffer
unsigned captureSize = capturePitch * static_cast<unsigned>(_rectangle.height) / sizeof(ColorRgba);
if (_captureBufferSize != captureSize)
{
delete[] _captureBuffer;
_captureBuffer = new ColorRgba[captureSize];
_captureBufferSize = captureSize;
}
capturePtr = &_captureBuffer[0];
}
ret = vc_dispmanx_resource_read_data(_vc_resource, &_rectangle, capturePtr, capturePitch);
if (ret < 0)
{
Error(_log, "vc_dispmanx_resource_read_data failed: %d", ret);
rc = ret;
}
else
{
_imageResampler.processImage(static_cast<uint8_t*>(capturePtr),
_width,
_height,
static_cast<int>(capturePitch),
PixelFormat::RGB32,
image);
}
}
vc_dispmanx_display_close(_vc_display);
}
}
// Close the displaye
vc_dispmanx_display_close(_vc_display);
// image to output image
_image_rgba.toRgb(image);
return 0;
return rc;
}
QSize DispmanxFrameGrabber::getScreenSize(int device) const
{
int width (0);
int height(0);
DISPMANX_DISPLAY_HANDLE_T vc_display = vc_dispmanx_display_open(device);
if ( vc_display > 0)
{
// Obtain the display information
DISPMANX_MODEINFO_T vc_info;
int result = vc_dispmanx_display_get_info(vc_display, &vc_info);
(void)result;
if (result == 0)
{
width = vc_info.width;
height = vc_info.height;
DebugIf(verbose, _log, "Display found with resolution: %dx%d", width, height);
}
// Close the display
vc_dispmanx_display_close(vc_display);
}
return QSize(width, height);
}
QJsonObject DispmanxFrameGrabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
int deviceIdx (DEFAULT_DEVICE);
QJsonArray video_inputs;
QSize screenSize = getScreenSize(deviceIdx);
if ( !screenSize.isEmpty() )
{
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
QJsonObject in;
QString displayName;
displayName = QString("Screen:%1").arg(deviceIdx);
in["name"] = displayName;
in["inputIdx"] = deviceIdx;
QJsonArray formats;
QJsonObject format;
QJsonArray resolutionArray;
QJsonObject resolution;
resolution["width"] = screenSize.width();
resolution["height"] = screenSize.height();
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
if (!video_inputs.isEmpty())
{
inputsDiscovered["device"] = "dispmanx";
inputsDiscovered["device_name"] = "DispmanX";
inputsDiscovered["type"] = "screen";
inputsDiscovered["video_inputs"] = video_inputs;
QJsonObject defaults, video_inputs_default, resolution_default;
resolution_default["fps"] = _fps;
video_inputs_default["resolution"] = resolution_default;
video_inputs_default["inputIdx"] = 0;
defaults["video_input"] = video_inputs_default;
inputsDiscovered["default"] = defaults;
}
if (inputsDiscovered.isEmpty())
{
DebugIf(verbose, _log, "No displays found to capture from!");
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -5,6 +5,7 @@
unsigned __bcm_frame_counter = 0;
const int __screenWidth = 800;
const int __screenHeight = 600;
const int __display_num = 0;
void bcm_host_init()
{
@@ -27,6 +28,7 @@ int vc_dispmanx_display_get_info(int, DISPMANX_MODEINFO_T *vc_info)
{
vc_info->width = __screenWidth;
vc_info->height = __screenHeight;
vc_info->display_num = __display_num;
return 0;
}
@@ -54,7 +56,7 @@ void vc_dispmanx_rect_set(VC_RECT_T *rectangle, int left, int top, int width, in
rectangle->top = top;
}
int vc_dispmanx_snapshot(int, DISPMANX_RESOURCE_HANDLE_T resource, int vc_flags)
int vc_dispmanx_snapshot(DISPMANX_DISPLAY_HANDLE_T /*display*/, DISPMANX_RESOURCE_HANDLE_T resource, DISPMANX_TRANSFORM_T /*vc_flags*/)
{
__bcm_frame_counter++;
if (__bcm_frame_counter > 100)
@@ -66,7 +68,7 @@ int vc_dispmanx_snapshot(int, DISPMANX_RESOURCE_HANDLE_T resource, int vc_flags)
if (__bcm_frame_counter < 25)
{
color[0] = ColorRgba::WHITE;
0 color[1] = ColorRgba::RED;
color[1] = ColorRgba::RED;
color[2] = ColorRgba::BLUE;
color[3] = ColorRgba::GREEN;
}

View File

@@ -1,10 +1,12 @@
#include <grabber/DispmanxWrapper.h>
DispmanxWrapper::DispmanxWrapper(unsigned grabWidth, unsigned grabHeight, unsigned updateRate_Hz)
: GrabberWrapper("Dispmanx", &_grabber, grabWidth, grabHeight, updateRate_Hz)
, _grabber(grabWidth, grabHeight)
DispmanxWrapper::DispmanxWrapper( int updateRate_Hz,
int pixelDecimation
)
: GrabberWrapper("Dispmanx", &_grabber, updateRate_Hz)
, _grabber()
{
_grabber.setPixelDecimation(pixelDecimation);
}
void DispmanxWrapper::action()

View File

@@ -10,102 +10,263 @@
// STL includes
#include <iostream>
//Qt
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
#include <QDir>
#include <QSize>
// Constants
namespace {
const bool verbose = false;
// fb discovery service
const char DISCOVERY_DIRECTORY[] = "/dev/";
const char DISCOVERY_FILEPATTERN[] = "fb?";
} //End of constants
// Local includes
#include <grabber/FramebufferFrameGrabber.h>
FramebufferFrameGrabber::FramebufferFrameGrabber(const QString & device, unsigned width, unsigned height)
: Grabber("FRAMEBUFFERGRABBER", width, height)
, _fbDevice()
FramebufferFrameGrabber::FramebufferFrameGrabber(const QString & device)
: Grabber("FRAMEBUFFERGRABBER")
, _fbDevice(device)
, _fbfd (-1)
{
setDevicePath(device);
_useImageResampler = true;
}
FramebufferFrameGrabber::~FramebufferFrameGrabber()
{
closeDevice();
}
bool FramebufferFrameGrabber::setupScreen()
{
bool rc (false);
if ( _fbfd >= 0 )
{
closeDevice();
}
rc = getScreenInfo();
setEnabled(rc);
return rc;
}
bool FramebufferFrameGrabber::setWidthHeight(int width, int height)
{
bool rc (false);
if(Grabber::setWidthHeight(width, height))
{
rc = setupScreen();
}
return rc;
}
int FramebufferFrameGrabber::grabFrame(Image<ColorRgb> & image)
{
if (!_enabled) return 0;
int rc = 0;
struct fb_var_screeninfo vinfo;
unsigned capSize, bytesPerPixel;
PixelFormat pixelFormat;
/* Open the framebuffer device */
int fbfd = open(QSTRING_CSTR(_fbDevice), O_RDONLY);
if (fbfd == -1)
if (_isEnabled && !_isDeviceInError)
{
Error(_log, "Error opening %s, %s : ", QSTRING_CSTR(_fbDevice), std::strerror(errno));
return -1;
}
/* get variable screen information */
ioctl (fbfd, FBIOGET_VSCREENINFO, &vinfo);
bytesPerPixel = vinfo.bits_per_pixel / 8;
capSize = vinfo.xres * vinfo.yres * bytesPerPixel;
switch (vinfo.bits_per_pixel)
{
case 16: pixelFormat = PixelFormat::BGR16; break;
case 24: pixelFormat = PixelFormat::BGR24; break;
#ifdef ENABLE_AMLOGIC
case 32: pixelFormat = PixelFormat::PIXELFORMAT_RGB32; break;
#else
case 32: pixelFormat = PixelFormat::BGR32; break;
#endif
default:
Error(_log, "Unknown pixel format: %d bits per pixel", vinfo.bits_per_pixel);
close(fbfd);
return -1;
}
/* map the device to memory */
unsigned char * fbp = (unsigned char*)mmap(0, capSize, PROT_READ, MAP_PRIVATE | MAP_NORESERVE, fbfd, 0);
if (fbp == MAP_FAILED) {
Error(_log, "Error mapping %s, %s : ", QSTRING_CSTR(_fbDevice), std::strerror(errno));
return -1;
}
_imageResampler.setHorizontalPixelDecimation(vinfo.xres/_width);
_imageResampler.setVerticalPixelDecimation(vinfo.yres/_height);
_imageResampler.processImage(fbp,
vinfo.xres,
vinfo.yres,
vinfo.xres * bytesPerPixel,
pixelFormat,
image);
munmap(fbp, capSize);
close(fbfd);
return 0;
}
void FramebufferFrameGrabber::setDevicePath(const QString& path)
{
if(_fbDevice != path)
{
_fbDevice = path;
int result;
struct fb_var_screeninfo vinfo;
// Check if the framebuffer device can be opened and display the current resolution
int fbfd = open(QSTRING_CSTR(_fbDevice), O_RDONLY);
if (fbfd == -1)
if ( getScreenInfo() )
{
Error(_log, "Error opening %s, %s : ", QSTRING_CSTR(_fbDevice), std::strerror(errno));
}
else
{
// get variable screen information
result = ioctl (fbfd, FBIOGET_VSCREENINFO, &vinfo);
if (result != 0)
{
Error(_log, "Could not get screen information, %s", std::strerror(errno));
/* map the device to memory */
uint8_t * fbp = static_cast<uint8_t*>(mmap(nullptr, _fixInfo.smem_len, PROT_READ, MAP_PRIVATE | MAP_NORESERVE, _fbfd, 0));
if (fbp == MAP_FAILED) {
QString errorReason = QString ("Error mapping %1, [%2] %3").arg(_fbDevice).arg(errno).arg(std::strerror(errno));
this->setInError ( errorReason );
closeDevice();
rc = -1;
}
else
{
Info(_log, "Display opened with resolution: %dx%d@%dbit", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel);
_imageResampler.processImage(fbp,
static_cast<int>(_varInfo.xres),
static_cast<int>(_varInfo.yres),
static_cast<int>(_fixInfo.line_length),
_pixelFormat,
image);
munmap(fbp, _fixInfo.smem_len);
}
}
closeDevice();
}
return rc;
}
bool FramebufferFrameGrabber::openDevice()
{
bool rc = true;
/* Open the framebuffer device */
_fbfd = ::open(QSTRING_CSTR(_fbDevice), O_RDONLY);
if (_fbfd < 0)
{
QString errorReason = QString ("Error opening %1, [%2] %3").arg(_fbDevice).arg(errno).arg(std::strerror(errno));
this->setInError ( errorReason );
rc = false;
}
return rc;
}
bool FramebufferFrameGrabber::closeDevice()
{
bool rc = false;
if (_fbfd >= 0)
{
if( ::close(_fbfd) == 0) {
rc = true;
}
_fbfd = -1;
}
return rc;
}
QSize FramebufferFrameGrabber::getScreenSize() const
{
return getScreenSize(_fbDevice);
}
QSize FramebufferFrameGrabber::getScreenSize(const QString& device) const
{
int width (0);
int height(0);
int fbfd = ::open(QSTRING_CSTR(device), O_RDONLY);
if (fbfd != -1)
{
struct fb_var_screeninfo vinfo;
int result = ioctl (fbfd, FBIOGET_VSCREENINFO, &vinfo);
if (result == 0)
{
width = static_cast<int>(vinfo.xres);
height = static_cast<int>(vinfo.yres);
DebugIf(verbose, _log, "FB device [%s] found with resolution: %dx%d", QSTRING_CSTR(device), width, height);
}
::close(fbfd);
}
return QSize(width, height);
}
bool FramebufferFrameGrabber::getScreenInfo()
{
bool rc (false);
if ( openDevice() )
{
if (ioctl(_fbfd, FBIOGET_FSCREENINFO, &_fixInfo) < 0 || ioctl (_fbfd, FBIOGET_VSCREENINFO, &_varInfo) < 0)
{
QString errorReason = QString ("Error getting screen information for %1, [%2] %3").arg(_fbDevice).arg(errno).arg(std::strerror(errno));
this->setInError ( errorReason );
closeDevice();
}
else
{
rc = true;
switch (_varInfo.bits_per_pixel)
{
case 16: _pixelFormat = PixelFormat::BGR16;
break;
case 24: _pixelFormat = PixelFormat::BGR24;
break;
case 32: _pixelFormat = PixelFormat::BGR32;
break;
default:
rc= false;
QString errorReason = QString ("Unknown pixel format: %1 bits per pixel").arg(static_cast<int>(_varInfo.bits_per_pixel));
this->setInError ( errorReason );
closeDevice();
}
close(fbfd);
}
}
return rc;
}
QJsonObject FramebufferFrameGrabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
//Find framebuffer devices 0-9
QDir deviceDirectory (DISCOVERY_DIRECTORY);
QStringList deviceFilter(DISCOVERY_FILEPATTERN);
deviceDirectory.setNameFilters(deviceFilter);
deviceDirectory.setSorting(QDir::Name);
QFileInfoList deviceFiles = deviceDirectory.entryInfoList(QDir::System);
int fbIdx (0);
QJsonArray video_inputs;
QFileInfoList::const_iterator deviceFileIterator;
for (deviceFileIterator = deviceFiles.constBegin(); deviceFileIterator != deviceFiles.constEnd(); ++deviceFileIterator)
{
fbIdx = (*deviceFileIterator).fileName().rightRef(1).toInt();
QString device = (*deviceFileIterator).absoluteFilePath();
DebugIf(verbose, _log, "FB device [%s] found", QSTRING_CSTR(device));
QSize screenSize = getScreenSize(device);
if ( !screenSize.isEmpty() )
{
QJsonArray fps = { "1", "5", "10", "15", "20", "25", "30", "40", "50", "60" };
QJsonObject in;
QString displayName;
displayName = QString("FB%1").arg(fbIdx);
in["name"] = displayName;
in["inputIdx"] = fbIdx;
QJsonArray formats;
QJsonObject format;
QJsonArray resolutionArray;
QJsonObject resolution;
resolution["width"] = screenSize.width();
resolution["height"] = screenSize.height();
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
if (!video_inputs.isEmpty())
{
inputsDiscovered["device"] = "framebuffer";
inputsDiscovered["device_name"] = "Framebuffer";
inputsDiscovered["type"] = "screen";
inputsDiscovered["video_inputs"] = video_inputs;
QJsonObject defaults, video_inputs_default, resolution_default;
resolution_default["fps"] = _fps;
video_inputs_default["resolution"] = resolution_default;
video_inputs_default["inputIdx"] = 0;
defaults["video_input"] = video_inputs_default;
inputsDiscovered["default"] = defaults;
}
}
if (inputsDiscovered.isEmpty())
{
DebugIf(verbose, _log, "No displays found to capture from!");
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -1,9 +1,13 @@
#include <grabber/FramebufferWrapper.h>
FramebufferWrapper::FramebufferWrapper(const QString & device, unsigned grabWidth, unsigned grabHeight, unsigned updateRate_Hz)
: GrabberWrapper("FrameBuffer", &_grabber, grabWidth, grabHeight, updateRate_Hz)
, _grabber(device, grabWidth, grabHeight)
{}
FramebufferWrapper::FramebufferWrapper( int updateRate_Hz,
const QString & device,
int pixelDecimation)
: GrabberWrapper("FrameBuffer", &_grabber, updateRate_Hz)
, _grabber(device)
{
_grabber.setPixelDecimation(pixelDecimation);
}
void FramebufferWrapper::action()
{

View File

@@ -5,94 +5,211 @@
// Local includes
#include <grabber/OsxFrameGrabber.h>
OsxFrameGrabber::OsxFrameGrabber(unsigned display, unsigned width, unsigned height)
: Grabber("OSXGRABBER", width, height)
, _screenIndex(100)
//Qt
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
// Constants
namespace {
const bool verbose = false;
} //End of constants
OsxFrameGrabber::OsxFrameGrabber(int display)
: Grabber("OSXGRABBER")
, _screenIndex(display)
{
// check if display is available
setDisplayIndex(display);
_isEnabled = false;
_useImageResampler = true;
}
OsxFrameGrabber::~OsxFrameGrabber()
{
}
int OsxFrameGrabber::grabFrame(Image<ColorRgb> & image)
bool OsxFrameGrabber::setupDisplay()
{
if (!_enabled) return 0;
bool rc (false);
CGImageRef dispImage;
CFDataRef imgData;
unsigned char * pImgData;
unsigned dspWidth, dspHeight;
rc = setDisplayIndex(_screenIndex);
dispImage = CGDisplayCreateImage(_display);
// display lost, use main
if (dispImage == NULL && _display)
{
dispImage = CGDisplayCreateImage(kCGDirectMainDisplay);
// no displays connected, return
if (dispImage == NULL)
{
Error(_log, "No display connected...");
return -1;
}
}
imgData = CGDataProviderCopyData(CGImageGetDataProvider(dispImage));
pImgData = (unsigned char*) CFDataGetBytePtr(imgData);
dspWidth = CGImageGetWidth(dispImage);
dspHeight = CGImageGetHeight(dispImage);
_imageResampler.setHorizontalPixelDecimation(dspWidth/_width);
_imageResampler.setVerticalPixelDecimation(dspHeight/_height);
_imageResampler.processImage( pImgData,
dspWidth,
dspHeight,
CGImageGetBytesPerRow(dispImage),
PixelFormat::BGR32,
image);
CFRelease(imgData);
CGImageRelease(dispImage);
return 0;
return rc;
}
void OsxFrameGrabber::setDisplayIndex(int index)
int OsxFrameGrabber::grabFrame(Image<ColorRgb> & image)
{
if(_screenIndex != index)
int rc = 0;
if (_isEnabled && !_isDeviceInError)
{
CGImageRef dispImage;
CFDataRef imgData;
unsigned char * pImgData;
unsigned dspWidth;
unsigned dspHeight;
dispImage = CGDisplayCreateImage(_display);
// display lost, use main
if (dispImage == nullptr && _display != 0)
{
dispImage = CGDisplayCreateImage(kCGDirectMainDisplay);
// no displays connected, return
if (dispImage == nullptr)
{
Error(_log, "No display connected...");
return -1;
}
}
imgData = CGDataProviderCopyData(CGImageGetDataProvider(dispImage));
pImgData = (unsigned char*) CFDataGetBytePtr(imgData);
dspWidth = CGImageGetWidth(dispImage);
dspHeight = CGImageGetHeight(dispImage);
_imageResampler.processImage( pImgData,
static_cast<int>(dspWidth),
static_cast<int>(dspHeight),
static_cast<int>(CGImageGetBytesPerRow(dispImage)),
PixelFormat::BGR32,
image);
CFRelease(imgData);
CGImageRelease(dispImage);
}
return rc;
}
bool OsxFrameGrabber::setDisplayIndex(int index)
{
bool rc (true);
if(_screenIndex != index || !_isEnabled)
{
_screenIndex = index;
CGImageRef image;
CGDisplayCount displayCount;
CGDirectDisplayID displays[8];
// get list of displays
CGGetActiveDisplayList(8, displays, &displayCount);
if (_screenIndex + 1 > displayCount)
CGDisplayCount dspyCnt = 0 ;
CGDisplayErr err;
err = CGGetActiveDisplayList(0, nullptr, &dspyCnt);
if (err == kCGErrorSuccess && dspyCnt > 0)
{
Error(_log, "Display with index %d is not available. Using main display", _screenIndex);
_display = kCGDirectMainDisplay;
CGDirectDisplayID *activeDspys = new CGDirectDisplayID [dspyCnt] ;
err = CGGetActiveDisplayList(dspyCnt, activeDspys, &dspyCnt) ;
if (err == kCGErrorSuccess)
{
CGImageRef image;
if (_screenIndex + 1 > static_cast<int>(dspyCnt))
{
Error(_log, "Display with index %d is not available.", _screenIndex);
rc = false;
}
else
{
_display = activeDspys[_screenIndex];
image = CGDisplayCreateImage(_display);
if(image == nullptr)
{
setEnabled(false);
Error(_log, "Failed to open main display, disable capture interface");
rc = false;
}
else
{
setEnabled(true);
rc = true;
Info(_log, "Display [%u] opened with resolution: %ux%u@%ubit", _display, CGImageGetWidth(image), CGImageGetHeight(image), CGImageGetBitsPerPixel(image));
}
CGImageRelease(image);
}
}
}
else
{
_display = displays[_screenIndex];
rc=false;
}
image = CGDisplayCreateImage(_display);
if(image == NULL)
{
Error(_log, "Failed to open main display, disable capture interface");
setEnabled(false);
return;
}
else
setEnabled(true);
Info(_log, "Display opened with resolution: %dx%d@%dbit", CGImageGetWidth(image), CGImageGetHeight(image), CGImageGetBitsPerPixel(image));
CGImageRelease(image);
}
return rc;
}
QJsonObject OsxFrameGrabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
// get list of displays
CGDisplayCount dspyCnt = 0 ;
CGDisplayErr err;
err = CGGetActiveDisplayList(0, nullptr, &dspyCnt);
if (err == kCGErrorSuccess && dspyCnt > 0)
{
CGDirectDisplayID *activeDspys = new CGDirectDisplayID [dspyCnt] ;
err = CGGetActiveDisplayList(dspyCnt, activeDspys, &dspyCnt) ;
if (err == kCGErrorSuccess)
{
inputsDiscovered["device"] = "osx";
inputsDiscovered["device_name"] = "OSX";
inputsDiscovered["type"] = "screen";
QJsonArray video_inputs;
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
for (int i = 0; i < static_cast<int>(dspyCnt); ++i)
{
QJsonObject in;
CGDirectDisplayID did = activeDspys[i];
QString displayName;
displayName = QString("Display:%1").arg(did);
in["name"] = displayName;
in["inputIdx"] = i;
QJsonArray formats;
QJsonObject format;
QJsonArray resolutionArray;
QJsonObject resolution;
CGDisplayModeRef dispMode = CGDisplayCopyDisplayMode(did);
CGRect rect = CGDisplayBounds(did);
resolution["width"] = static_cast<int>(rect.size.width);
resolution["height"] = static_cast<int>(rect.size.height);
CGDisplayModeRelease(dispMode);
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
inputsDiscovered["video_inputs"] = video_inputs;
QJsonObject defaults, video_inputs_default, resolution_default;
resolution_default["fps"] = _fps;
video_inputs_default["resolution"] = resolution_default;
video_inputs_default["inputIdx"] = 0;
defaults["video_input"] = video_inputs_default;
inputsDiscovered["default"] = defaults;
}
delete [] activeDspys;
}
if (inputsDiscovered.isEmpty())
{
DebugIf(verbose, _log, "No displays found to capture from!");
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -5,15 +5,33 @@ unsigned __osx_frame_counter = 0;
const int __screenWidth = 800;
const int __screenHeight = 600;
void CGGetActiveDisplayList(int max, CGDirectDisplayID *displays, CGDisplayCount *displayCount)
CGError CGGetActiveDisplayList(uint32_t maxDisplays, CGDirectDisplayID *activeDisplays, uint32_t *displayCount)
{
*displayCount = 1;
displays[0] = 1;
if (maxDisplays == 0 || activeDisplays == nullptr)
{
*displayCount = 2;
}
else
{
displayCount = &maxDisplays;
if (activeDisplays != nullptr)
{
for (CGDirectDisplayID i = 0; i < maxDisplays; ++i)
{
activeDisplays[i] = i;
}
}
else
{
return kCGErrorFailure;
}
}
return kCGErrorSuccess;
}
CGImageRef CGDisplayCreateImage(CGDirectDisplayID display)
{
CGImageRef image = new CGImage(__screenWidth, __screenHeight);
CGImageRef image = new CGImage(__screenWidth / (display+1), __screenHeight / (display+1));
return image;
}
@@ -123,4 +141,19 @@ void CFRelease(CFDataRef imgData)
delete imgData;
}
CGDisplayModeRef CGDisplayCopyDisplayMode(CGDirectDisplayID display)
{
return nullptr;
}
CGRect CGDisplayBounds(CGDirectDisplayID display)
{
CGRect rect;
rect.size.width = __screenWidth / (display+1);
rect.size.height = __screenHeight / (display+1);
return rect;
}
void CGDisplayModeRelease(CGDisplayModeRef mode)
{
}
#endif

View File

@@ -1,9 +1,14 @@
#include <grabber/OsxWrapper.h>
OsxWrapper::OsxWrapper(unsigned display, unsigned grabWidth, unsigned grabHeight, unsigned updateRate_Hz)
: GrabberWrapper("OSX FrameGrabber", &_grabber, grabWidth, grabHeight, updateRate_Hz)
, _grabber(display, grabWidth, grabHeight)
{}
OsxWrapper::OsxWrapper( int updateRate_Hz,
int display,
int pixelDecimation
)
: GrabberWrapper("OSX", &_grabber, updateRate_Hz)
, _grabber(display)
{
_grabber.setPixelDecimation(pixelDecimation);
}
void OsxWrapper::action()
{

View File

@@ -7,23 +7,34 @@
#include <QGuiApplication>
#include <QWidget>
#include <QScreen>
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
QtGrabber::QtGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display)
: Grabber("QTGRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom)
, _display(unsigned(display))
, _pixelDecimation(pixelDecimation)
, _screenWidth(0)
, _screenHeight(0)
, _src_x(0)
, _src_y(0)
, _src_x_max(0)
, _src_y_max(0)
, _screen(nullptr)
#ifdef _WIN32
#include <Windows.h>
#endif
// Constants
namespace {
const bool verbose = false;
} //End of constants
QtGrabber::QtGrabber(int display, int cropLeft, int cropRight, int cropTop, int cropBottom)
: Grabber("QTGRABBER", cropLeft, cropRight, cropTop, cropBottom)
, _display(display)
, _calculatedWidth(0)
, _calculatedHeight(0)
, _src_x(0)
, _src_y(0)
, _src_x_max(0)
, _src_y_max(0)
, _isWayland(false)
, _screen(nullptr)
, _isVirtual(false)
{
_logger = Logger::getInstance("Qt");
_useImageResampler = false;
// init
setupDisplay();
}
QtGrabber::~QtGrabber()
@@ -36,51 +47,111 @@ void QtGrabber::freeResources()
// Qt seems to hold the ownership of the QScreen pointers
}
bool QtGrabber::open()
{
bool rc = false;
#ifndef _WIN32
if (getenv("WAYLAND_DISPLAY") != nullptr)
{
_isWayland = true;
}
else
#endif
{
rc = true;
}
return rc;
}
bool QtGrabber::setupDisplay()
{
// cleanup last screen
freeResources();
QScreen* primary = QGuiApplication::primaryScreen();
QList<QScreen *> screens = QGuiApplication::screens();
// inject main screen at 0, if not nullptr
if(primary != nullptr)
bool result = false;
if ( ! open() )
{
screens.prepend(primary);
// remove last main screen if twice in list
if(screens.lastIndexOf(primary) > 0)
screens.removeAt(screens.lastIndexOf(primary));
if ( _isWayland )
{
Error(_log, "Grabber does not work under Wayland!");
}
}
if(screens.isEmpty())
else
{
Error(_log, "No displays found to capture from!");
return false;
// cleanup last screen
freeResources();
_numberOfSDisplays = 0;
QScreen* primary = QGuiApplication::primaryScreen();
QList<QScreen *> screens = QGuiApplication::screens();
// inject main screen at 0, if not nullptr
if(primary != nullptr)
{
screens.prepend(primary);
// remove last main screen if twice in list
if(screens.lastIndexOf(primary) > 0)
{
screens.removeAt(screens.lastIndexOf(primary));
}
}
if(screens.isEmpty())
{
Error(_log, "No displays found to capture from!");
result = false;
}
else
{
_numberOfSDisplays = screens.size();
Info(_log,"Available Displays:");
int index = 0;
for(auto * screen : qAsConst(screens))
{
const QRect geo = screen->geometry();
Info(_log,"Display %d: Name: %s Geometry: (L,T,R,B) %d,%d,%d,%d Depth:%dbit", index, QSTRING_CSTR(screen->name()), geo.left(), geo.top() ,geo.right(), geo.bottom(), screen->depth());
++index;
}
if (screens.at(0)->size() != screens.at(0)->virtualSize())
{
const QRect vgeo = screens.at(0)->virtualGeometry();
Info(_log,"Display %d: Name: %s Geometry: (L,T,R,B) %d,%d,%d,%d Depth:%dbit", _numberOfSDisplays, "All Displays", vgeo.left(), vgeo.top() ,vgeo.right(), vgeo.bottom(), screens.at(0)->depth());
}
_isVirtual = false;
// be sure the index is available
if (_display > _numberOfSDisplays - 1 )
{
if ((screens.at(0)->size() != screens.at(0)->virtualSize()) && (_display == _numberOfSDisplays))
{
_isVirtual = true;
_display = 0;
}
else
{
Info(_log, "The requested display index '%d' is not available, falling back to display 0", _display);
_display = 0;
}
}
// init the requested display
_screen = screens.at(_display);
connect(_screen, &QScreen::geometryChanged, this, &QtGrabber::geometryChanged);
updateScreenDimensions(true);
if (_isVirtual)
{
Info(_log, "Using virtual display across all screens");
}
else
{
Info(_log,"Initialized display %d", _display);
}
result = true;
}
}
Info(_log,"Available Displays:");
int index = 0;
for(auto screen : screens)
{
const QRect geo = screen->geometry();
Info(_log,"Display %d: Name:%s Geometry: (L,T,R,B) %d,%d,%d,%d Depth:%dbit", index, QSTRING_CSTR(screen->name()), geo.left(), geo.top() ,geo.right(), geo.bottom(), screen->depth());
index++;
}
// be sure the index is available
if(_display > unsigned(screens.size()-1))
{
Info(_log, "The requested display index '%d' is not available, falling back to display 0", _display);
_display = 0;
}
// init the requested display
_screen = screens.at(_display);
connect(_screen, &QScreen::geometryChanged, this, &QtGrabber::geometryChanged);
updateScreenDimensions(true);
Info(_log,"Initialized display %d", _display);
return true;
return result;
}
void QtGrabber::geometryChanged(const QRect &geo)
@@ -89,92 +160,167 @@ void QtGrabber::geometryChanged(const QRect &geo)
updateScreenDimensions(true);
}
#ifdef _WIN32
extern QPixmap qt_pixmapFromWinHBITMAP(HBITMAP bitmap, int format = 0);
QPixmap QtGrabber::grabWindow(quintptr window, int xIn, int yIn, int width, int height) const
{
QSize windowSize;
int x = xIn;
int y = yIn;
HWND hwnd = reinterpret_cast<HWND>(window);
if (hwnd)
{
RECT r;
GetClientRect(hwnd, &r);
windowSize = QSize(r.right - r.left, r.bottom - r.top);
}
else
{
hwnd = GetDesktopWindow();
const QRect screenGeometry = _screen->geometry();
windowSize = screenGeometry.size();
x += screenGeometry.x();
y += screenGeometry.y();
}
if (width < 0)
width = windowSize.width() - x;
if (height < 0)
height = windowSize.height() - y;
// Create and setup bitmap
HDC display_dc = GetDC(nullptr);
HDC bitmap_dc = CreateCompatibleDC(display_dc);
HBITMAP bitmap = CreateCompatibleBitmap(display_dc, width, height);
HGDIOBJ null_bitmap = SelectObject(bitmap_dc, bitmap);
// copy data
HDC window_dc = GetDC(hwnd);
BitBlt(bitmap_dc, 0, 0, width, height, window_dc, x, y, SRCCOPY);
// clean up all but bitmap
ReleaseDC(hwnd, window_dc);
SelectObject(bitmap_dc, null_bitmap);
DeleteDC(bitmap_dc);
const QPixmap pixmap = qt_pixmapFromWinHBITMAP(bitmap);
DeleteObject(bitmap);
ReleaseDC(nullptr, display_dc);
return pixmap;
}
#endif
int QtGrabber::grabFrame(Image<ColorRgb> & image)
{
if (!_enabled) return 0;
if(_screen == nullptr)
int rc = 0;
if (_isEnabled && !_isDeviceInError)
{
// reinit, this will disable capture on failure
setEnabled(setupDisplay());
return -1;
}
QPixmap originalPixmap = _screen->grabWindow(0, _src_x, _src_y, _src_x_max, _src_y_max);
QPixmap resizedPixmap = originalPixmap.scaled(_width,_height);
QImage imageFrame = resizedPixmap.toImage().convertToFormat( QImage::Format_RGB888);
image.resize(imageFrame.width(), imageFrame.height());
for (int y=0; y<imageFrame.height(); ++y)
for (int x=0; x<imageFrame.width(); ++x)
if(_screen == nullptr)
{
QColor inPixel(imageFrame.pixel(x,y));
ColorRgb & outPixel = image(x,y);
outPixel.red = inPixel.red();
outPixel.green = inPixel.green();
outPixel.blue = inPixel.blue();
// reinit, this will disable capture on failure
bool result = setupDisplay();
setEnabled(result);
}
return 0;
if (_isEnabled)
{
#ifdef _WIN32
QPixmap originalPixmap = grabWindow(0, _src_x, _src_y, _src_x_max, _src_y_max);
#else
QPixmap originalPixmap = _screen->grabWindow(0, _src_x, _src_y, _src_x_max, _src_y_max);
#endif
if (originalPixmap.isNull())
{
rc = -1;
}
else
{
QImage imageFrame = originalPixmap.toImage().scaled(_calculatedWidth, _calculatedHeight).convertToFormat( QImage::Format_RGB888);
image.resize(static_cast<uint>(_calculatedWidth), static_cast<uint>(_calculatedHeight));
for (int y = 0; y < imageFrame.height(); y++)
{
memcpy((unsigned char*)image.memptr() + y * image.width() * 3, static_cast<unsigned char*>(imageFrame.scanLine(y)), imageFrame.width() * 3);
}
}
}
}
return rc;
}
int QtGrabber::updateScreenDimensions(bool force)
{
if(!_screen)
if(_screen == nullptr)
{
return -1;
}
const QRect& geo = _screen->geometry();
if (!force && _screenWidth == unsigned(geo.right()) && _screenHeight == unsigned(geo.bottom()))
QRect geo;
if (_isVirtual)
{
geo = _screen->virtualGeometry();
}
else
{
geo = _screen->geometry();
}
if (!force && _width == geo.width() && _height == geo.height())
{
// No update required
return 0;
}
Info(_log, "Update of screen resolution: [%dx%d] to [%dx%d]", _screenWidth, _screenHeight, geo.right(), geo.bottom());
_screenWidth = geo.right() - geo.left();
_screenHeight = geo.bottom() - geo.top();
Info(_log, "Update of screen resolution: [%dx%d] to [%dx%d]", _width, _height, geo.width(), geo.height());
_width = geo.width();
_height = geo.height();
int width=0, height=0;
int width=0;
int height=0;
// Image scaling is performed by Qt
width = (_screenWidth > unsigned(_cropLeft + _cropRight))
? ((_screenWidth - _cropLeft - _cropRight) / _pixelDecimation)
: (_screenWidth / _pixelDecimation);
width = (_width > (_cropLeft + _cropRight))
? ((_width - _cropLeft - _cropRight) / _pixelDecimation)
: (_width / _pixelDecimation);
height = (_screenHeight > unsigned(_cropTop + _cropBottom))
? ((_screenHeight - _cropTop - _cropBottom) / _pixelDecimation)
: (_screenHeight / _pixelDecimation);
height = (_height > (_cropTop + _cropBottom))
? ((_height - _cropTop - _cropBottom) / _pixelDecimation)
: (_height / _pixelDecimation);
// calculate final image dimensions and adjust top/left cropping in 3D modes
switch (_videoMode)
{
case VideoMode::VIDEO_3DSBS:
_width = width /2;
_height = height;
_calculatedWidth = width /2;
_calculatedHeight = height;
_src_x = _cropLeft / 2;
_src_y = _cropTop;
_src_x_max = (_screenWidth / 2) - _cropRight;
_src_y_max = _screenHeight - _cropBottom;
_src_x_max = (_width / 2) - _cropRight - _cropLeft;
_src_y_max = _height - _cropBottom - _cropTop;
break;
case VideoMode::VIDEO_3DTAB:
_width = width;
_height = height / 2;
_calculatedWidth = width;
_calculatedHeight = height / 2;
_src_x = _cropLeft;
_src_y = _cropTop / 2;
_src_x_max = _screenWidth - _cropRight;
_src_y_max = (_screenHeight / 2) - _cropBottom;
_src_x_max = _width - _cropRight - _cropLeft;
_src_y_max = (_height / 2) - _cropBottom - _cropTop;
break;
case VideoMode::VIDEO_2D:
default:
_width = width;
_height = height;
_calculatedWidth = width;
_calculatedHeight = height;
_src_x = _cropLeft;
_src_y = _cropTop;
_src_x_max = _screenWidth - _cropRight;
_src_y_max = _screenHeight - _cropBottom;
_src_x_max = _width - _cropRight - _cropLeft;
_src_y_max = _height - _cropBottom - _cropTop;
break;
}
Info(_log, "Update output image resolution to [%dx%d]", _width, _height);
Info(_log, "Update output image resolution to [%dx%d]", _calculatedWidth, _calculatedHeight);
return 1;
}
@@ -184,22 +330,136 @@ void QtGrabber::setVideoMode(VideoMode mode)
updateScreenDimensions(true);
}
void QtGrabber::setPixelDecimation(int pixelDecimation)
bool QtGrabber::setPixelDecimation(int pixelDecimation)
{
_pixelDecimation = pixelDecimation;
bool rc (true);
if(Grabber::setPixelDecimation(pixelDecimation))
{
if ( updateScreenDimensions(true) < 0)
{
rc = false;
}
}
return rc;
}
void QtGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
void QtGrabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom)
{
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
updateScreenDimensions(true);
}
void QtGrabber::setDisplayIndex(int index)
bool QtGrabber::setDisplayIndex(int index)
{
if(_display != unsigned(index))
bool rc (true);
if (_display != index)
{
_display = unsigned(index);
setupDisplay();
if (index <= _numberOfSDisplays)
{
_display = index;
}
else {
_display = 0;
}
rc = setupDisplay();
}
return rc;
}
QJsonObject QtGrabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
if ( open() )
{
QList<QScreen*> screens = QGuiApplication::screens();
if (!screens.isEmpty())
{
inputsDiscovered["device"] = "qt";
inputsDiscovered["device_name"] = "QT";
inputsDiscovered["type"] = "screen";
QJsonArray video_inputs;
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
for (int i = 0; i < screens.size(); ++i)
{
QJsonObject in;
QString name = screens.at(i)->name();
int pos = name.lastIndexOf('\\');
if (pos != -1)
{
name = name.right(name.length()-pos-1);
}
in["name"] = name;
in["inputIdx"] = i;
QJsonArray formats;
QJsonObject format;
QJsonArray resolutionArray;
QJsonObject resolution;
resolution["width"] = screens.at(i)->size().width();
resolution["height"] = screens.at(i)->size().height();
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
if (screens.at(0)->size() != screens.at(0)->virtualSize())
{
QJsonObject in;
in["name"] = "All Displays";
in["inputIdx"] = screens.size();
in["virtual"] = true;
QJsonArray formats;
QJsonObject format;
QJsonArray resolutionArray;
QJsonObject resolution;
resolution["width"] = screens.at(0)->virtualSize().width();
resolution["height"] = screens.at(0)->virtualSize().height();
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
inputsDiscovered["video_inputs"] = video_inputs;
QJsonObject defaults, video_inputs_default, resolution_default;
resolution_default["fps"] = _fps;
video_inputs_default["resolution"] = resolution_default;
video_inputs_default["inputIdx"] = 0;
defaults["video_input"] = video_inputs_default;
inputsDiscovered["default"] = defaults;
}
if (inputsDiscovered.isEmpty())
{
DebugIf(verbose, _log, "No displays found to capture from!");
}
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -1,9 +1,20 @@
#include <grabber/QtWrapper.h>
QtWrapper::QtWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display, unsigned updateRate_Hz)
: GrabberWrapper("Qt", &_grabber, 0, 0, updateRate_Hz)
, _grabber(cropLeft, cropRight, cropTop, cropBottom, pixelDecimation, display)
{}
QtWrapper::QtWrapper( int updateRate_Hz,
int display,
int pixelDecimation,
int cropLeft, int cropRight, int cropTop, int cropBottom
)
: GrabberWrapper("Qt", &_grabber, updateRate_Hz)
, _grabber(display, cropLeft, cropRight, cropTop, cropBottom)
{
_grabber.setPixelDecimation(pixelDecimation);
}
bool QtWrapper::open()
{
return _grabber.open();
}
void QtWrapper::action()
{

View File

@@ -1,18 +0,0 @@
# Define the current source locations
SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/v4l2)
FILE ( GLOB V4L2_SOURCES "${CURRENT_HEADER_DIR}/V4L2*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp" )
add_library(v4l2-grabber ${V4L2_SOURCES} )
target_link_libraries(v4l2-grabber
hyperion
${QT_LIBRARIES}
)
if(TURBOJPEG_FOUND)
target_link_libraries(v4l2-grabber ${TurboJPEG_LIBRARY})
elseif (JPEG_FOUND)
target_link_libraries(v4l2-grabber ${JPEG_LIBRARY})
endif(TURBOJPEG_FOUND)

File diff suppressed because it is too large Load Diff

View File

@@ -1,156 +0,0 @@
#include <QMetaType>
#include <grabber/V4L2Wrapper.h>
// qt
#include <QTimer>
V4L2Wrapper::V4L2Wrapper(const QString &device,
unsigned grabWidth,
unsigned grabHeight,
unsigned fps,
unsigned input,
VideoStandard videoStandard,
PixelFormat pixelFormat,
int pixelDecimation )
: GrabberWrapper("V4L2:"+device, &_grabber, grabWidth, grabHeight, 10)
, _grabber(device,
grabWidth,
grabHeight,
fps,
input,
videoStandard,
pixelFormat,
pixelDecimation)
{
_ggrabber = &_grabber;
// register the image type
qRegisterMetaType<Image<ColorRgb>>("Image<ColorRgb>");
// Handle the image in the captured thread using a direct connection
connect(&_grabber, &V4L2Grabber::newFrame, this, &V4L2Wrapper::newFrame, Qt::DirectConnection);
connect(&_grabber, &V4L2Grabber::readError, this, &V4L2Wrapper::readError, Qt::DirectConnection);
}
V4L2Wrapper::~V4L2Wrapper()
{
stop();
}
bool V4L2Wrapper::start()
{
return ( _grabber.start() && GrabberWrapper::start());
}
void V4L2Wrapper::stop()
{
_grabber.stop();
GrabberWrapper::stop();
}
void V4L2Wrapper::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold)
{
_grabber.setSignalThreshold( redSignalThreshold, greenSignalThreshold, blueSignalThreshold, 50);
}
void V4L2Wrapper::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
{
_grabber.setCropping(cropLeft, cropRight, cropTop, cropBottom);
}
void V4L2Wrapper::setSignalDetectionOffset(double verticalMin, double horizontalMin, double verticalMax, double horizontalMax)
{
_grabber.setSignalDetectionOffset(verticalMin, horizontalMin, verticalMax, horizontalMax);
}
void V4L2Wrapper::newFrame(const Image<ColorRgb> &image)
{
emit systemImage(_grabberName, image);
}
void V4L2Wrapper::readError(const char* err)
{
Error(_log, "stop grabber, because reading device failed. (%s)", err);
stop();
}
void V4L2Wrapper::action()
{
// dummy as v4l get notifications from stream
}
void V4L2Wrapper::setSignalDetectionEnable(bool enable)
{
_grabber.setSignalDetectionEnable(enable);
}
bool V4L2Wrapper::getSignalDetectionEnable() const
{
return _grabber.getSignalDetectionEnabled();
}
void V4L2Wrapper::setCecDetectionEnable(bool enable)
{
_grabber.setCecDetectionEnable(enable);
}
bool V4L2Wrapper::getCecDetectionEnable() const
{
return _grabber.getCecDetectionEnabled();
}
void V4L2Wrapper::setDeviceVideoStandard(const QString& device, VideoStandard videoStandard)
{
_grabber.setDeviceVideoStandard(device, videoStandard);
}
void V4L2Wrapper::handleCecEvent(CECEvent event)
{
_grabber.handleCecEvent(event);
}
void V4L2Wrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
{
if(type == settings::V4L2 && _grabberName.startsWith("V4L"))
{
// extract settings
const QJsonObject& obj = config.object();
// pixel decimation for v4l
_grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8));
// crop for v4l
_grabber.setCropping(
obj["cropLeft"].toInt(0),
obj["cropRight"].toInt(0),
obj["cropTop"].toInt(0),
obj["cropBottom"].toInt(0));
// device input
_grabber.setInput(obj["input"].toInt(-1));
// device resolution
_grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0));
// device framerate
_grabber.setFramerate(obj["fps"].toInt(15));
// CEC Standby
_grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true));
_grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true));
_grabber.setSignalDetectionOffset(
obj["sDHOffsetMin"].toDouble(0.25),
obj["sDVOffsetMin"].toDouble(0.25),
obj["sDHOffsetMax"].toDouble(0.75),
obj["sDVOffsetMax"].toDouble(0.75));
_grabber.setSignalThreshold(
obj["redSignalThreshold"].toDouble(0.0)/100.0,
obj["greenSignalThreshold"].toDouble(0.0)/100.0,
obj["blueSignalThreshold"].toDouble(0.0)/100.0);
_grabber.setDeviceVideoStandard(
obj["device"].toString("auto"),
parseVideoStandard(obj["standard"].toString("no-change")));
}
}

View File

@@ -0,0 +1,33 @@
# Common cmake definition for external video grabber
# Add Turbo JPEG library
if (ENABLE_V4L2 OR ENABLE_MF)
find_package(TurboJPEG)
if (TURBOJPEG_FOUND)
add_definitions(-DHAVE_TURBO_JPEG)
message( STATUS "Using Turbo JPEG library: ${TurboJPEG_LIBRARY}")
include_directories(${TurboJPEG_INCLUDE_DIRS})
else ()
message( STATUS "Turbo JPEG library not found, MJPEG camera format won't work.")
endif ()
endif()
# Define the wrapper/header/source locations and collect them
SET(WRAPPER_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video)
SET(HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber)
if (ENABLE_MF)
project(mf-grabber)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/mediafoundation)
FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/MF*.h" "${HEADER_DIR}/Encoder*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp")
elseif(ENABLE_V4L2)
project(v4l2-grabber)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/v4l2)
FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/V4L2*.h" "${HEADER_DIR}/Encoder*.h" "${CURRENT_SOURCE_DIR}/*.cpp")
endif()
add_library(${PROJECT_NAME} ${SOURCES})
target_link_libraries(${PROJECT_NAME} hyperion ${QT_LIBRARIES})
if(TURBOJPEG_FOUND)
target_link_libraries(${PROJECT_NAME} ${TurboJPEG_LIBRARY})
endif()

View File

@@ -0,0 +1,203 @@
#include "grabber/EncoderThread.h"
EncoderThread::EncoderThread()
: _localData(nullptr)
, _scalingFactorsCount(0)
, _imageResampler()
#ifdef HAVE_TURBO_JPEG
, _transform(nullptr)
, _decompress(nullptr)
, _scalingFactors(nullptr)
, _xform(nullptr)
#endif
{}
EncoderThread::~EncoderThread()
{
#ifdef HAVE_TURBO_JPEG
if (_transform)
tjDestroy(_transform);
if (_decompress)
tjDestroy(_decompress);
#endif
if (_localData)
#ifdef HAVE_TURBO_JPEG
tjFree(_localData);
#else
delete[] _localData;
#endif
}
void EncoderThread::setup(
PixelFormat pixelFormat, uint8_t* sharedData,
int size, int width, int height, int lineLength,
unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight,
VideoMode videoMode, FlipMode flipMode, int pixelDecimation)
{
_lineLength = lineLength;
_pixelFormat = pixelFormat;
_size = (unsigned long) size;
_width = width;
_height = height;
_cropLeft = cropLeft;
_cropTop = cropTop;
_cropBottom = cropBottom;
_cropRight = cropRight;
_flipMode = flipMode;
_pixelDecimation = pixelDecimation;
_imageResampler.setVideoMode(videoMode);
_imageResampler.setFlipMode(_flipMode);
_imageResampler.setCropping(cropLeft, cropRight, cropTop, cropBottom);
_imageResampler.setHorizontalPixelDecimation(_pixelDecimation);
_imageResampler.setVerticalPixelDecimation(_pixelDecimation);
#ifdef HAVE_TURBO_JPEG
if (_localData)
tjFree(_localData);
_localData = (uint8_t*)tjAlloc(size + 1);
#else
delete[] _localData;
_localData = nullptr;
_localData = new uint8_t(size + 1);
#endif
memcpy(_localData, sharedData, size);
}
void EncoderThread::process()
{
_busy = true;
if (_width > 0 && _height > 0)
{
#ifdef HAVE_TURBO_JPEG
if (_pixelFormat == PixelFormat::MJPEG)
{
processImageMjpeg();
}
else
#endif
{
if (_pixelFormat == PixelFormat::BGR24)
{
if (_flipMode == FlipMode::NO_CHANGE)
_imageResampler.setFlipMode(FlipMode::HORIZONTAL);
else if (_flipMode == FlipMode::HORIZONTAL)
_imageResampler.setFlipMode(FlipMode::NO_CHANGE);
else if (_flipMode == FlipMode::VERTICAL)
_imageResampler.setFlipMode(FlipMode::BOTH);
else if (_flipMode == FlipMode::BOTH)
_imageResampler.setFlipMode(FlipMode::VERTICAL);
}
Image<ColorRgb> image = Image<ColorRgb>();
_imageResampler.processImage(
_localData,
_width,
_height,
_lineLength,
#if defined(ENABLE_V4L2)
_pixelFormat,
#else
PixelFormat::BGR24,
#endif
image
);
emit newFrame(image);
}
}
_busy = false;
}
#ifdef HAVE_TURBO_JPEG
void EncoderThread::processImageMjpeg()
{
if (!_transform && _flipMode != FlipMode::NO_CHANGE)
{
_transform = tjInitTransform();
_xform = new tjtransform();
}
if (_flipMode == FlipMode::BOTH || _flipMode == FlipMode::HORIZONTAL)
{
_xform->op = TJXOP_HFLIP;
tjTransform(_transform, _localData, _size, 1, &_localData, &_size, _xform, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE);
}
if (_flipMode == FlipMode::BOTH || _flipMode == FlipMode::VERTICAL)
{
_xform->op = TJXOP_VFLIP;
tjTransform(_transform, _localData, _size, 1, &_localData, &_size, _xform, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE);
}
if (!_decompress)
{
_decompress = tjInitDecompress();
_scalingFactors = tjGetScalingFactors(&_scalingFactorsCount);
}
int subsamp = 0;
if (tjDecompressHeader2(_decompress, _localData, _size, &_width, &_height, &subsamp) != 0)
return;
int scaledWidth = _width, scaledHeight = _height;
if(_scalingFactors != nullptr && _pixelDecimation > 1)
{
for (int i = 0; i < _scalingFactorsCount ; i++)
{
const int tempWidth = TJSCALED(_width, _scalingFactors[i]);
const int tempHeight = TJSCALED(_height, _scalingFactors[i]);
if (tempWidth <= _width/_pixelDecimation && tempHeight <= _height/_pixelDecimation)
{
scaledWidth = tempWidth;
scaledHeight = tempHeight;
break;
}
}
if (scaledWidth == _width && scaledHeight == _height)
{
scaledWidth = TJSCALED(_width, _scalingFactors[_scalingFactorsCount-1]);
scaledHeight = TJSCALED(_height, _scalingFactors[_scalingFactorsCount-1]);
}
}
Image<ColorRgb> srcImage(scaledWidth, scaledHeight);
if (tjDecompress2(_decompress, _localData , _size, (unsigned char*)srcImage.memptr(), scaledWidth, 0, scaledHeight, TJPF_RGB, TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE) != 0)
return;
// got image, process it
if (!(_cropLeft > 0 || _cropTop > 0 || _cropBottom > 0 || _cropRight > 0))
emit newFrame(srcImage);
else
{
// calculate the output size
int outputWidth = (_width - _cropLeft - _cropRight);
int outputHeight = (_height - _cropTop - _cropBottom);
if (outputWidth <= 0 || outputHeight <= 0)
return;
Image<ColorRgb> destImage(outputWidth, outputHeight);
for (unsigned int y = 0; y < destImage.height(); y++)
{
unsigned char* source = (unsigned char*)srcImage.memptr() + (y + _cropTop)*srcImage.width()*3 + _cropLeft*3;
unsigned char* dest = (unsigned char*)destImage.memptr() + y*destImage.width()*3;
memcpy(dest, source, destImage.width()*3);
free(source);
source = nullptr;
free(dest);
dest = nullptr;
}
// emit
emit newFrame(destImage);
}
}
#endif

View File

@@ -0,0 +1,149 @@
#include <QMetaType>
#include <grabber/VideoWrapper.h>
// qt includes
#include <QTimer>
VideoWrapper::VideoWrapper()
#if defined(ENABLE_V4L2)
: GrabberWrapper("V4L2", &_grabber)
#elif defined(ENABLE_MF)
: GrabberWrapper("V4L2:MEDIA_FOUNDATION", &_grabber)
#endif
, _grabber()
{
// register the image type
qRegisterMetaType<Image<ColorRgb>>("Image<ColorRgb>");
// Handle the image in the captured thread (Media Foundation/V4L2) using a direct connection
connect(&_grabber, SIGNAL(newFrame(const Image<ColorRgb>&)), this, SLOT(newFrame(const Image<ColorRgb>&)), Qt::DirectConnection);
connect(&_grabber, SIGNAL(readError(const char*)), this, SLOT(readError(const char*)), Qt::DirectConnection);
}
VideoWrapper::~VideoWrapper()
{
stop();
}
bool VideoWrapper::start()
{
return (_grabber.prepare() && _grabber.start() && GrabberWrapper::start());
}
void VideoWrapper::stop()
{
_grabber.stop();
GrabberWrapper::stop();
}
#if defined(ENABLE_CEC) && !defined(ENABLE_MF)
void VideoWrapper::handleCecEvent(CECEvent event)
{
_grabber.handleCecEvent(event);
}
#endif
void VideoWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
{
if(type == settings::V4L2 && _grabberName.startsWith("V4L2"))
{
// extract settings
const QJsonObject& obj = config.object();
// set global grabber state
setV4lGrabberState(obj["enable"].toBool(false));
if (getV4lGrabberState())
{
#if defined(ENABLE_MF)
// Device path
_grabber.setDevice(obj["device"].toString("none"));
#endif
#if defined(ENABLE_V4L2)
// Device path and name
_grabber.setDevice(obj["device"].toString("none"), obj["available_devices"].toString("none"));
#endif
// Device input
_grabber.setInput(obj["input"].toInt(0));
// Device resolution
_grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0));
// Device framerate
_grabber.setFramerate(obj["fps"].toInt(15));
// Device encoding format
_grabber.setEncoding(obj["encoding"].toString("NO_CHANGE"));
// Video standard
_grabber.setVideoStandard(parseVideoStandard(obj["standard"].toString("NO_CHANGE")));
// Image size decimation
_grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8));
// Flip mode
_grabber.setFlipMode(parseFlipMode(obj["flip"].toString("NO_CHANGE")));
// Image cropping
_grabber.setCropping(
obj["cropLeft"].toInt(0),
obj["cropRight"].toInt(0),
obj["cropTop"].toInt(0),
obj["cropBottom"].toInt(0));
// Brightness, Contrast, Saturation, Hue
_grabber.setBrightnessContrastSaturationHue(
obj["hardware_brightness"].toInt(0),
obj["hardware_contrast"].toInt(0),
obj["hardware_saturation"].toInt(0),
obj["hardware_hue"].toInt(0));
#if defined(ENABLE_CEC) && defined(ENABLE_V4L2)
// CEC Standby
_grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true));
#endif
// Software frame skipping
_grabber.setFpsSoftwareDecimation(obj["fpsSoftwareDecimation"].toInt(1));
// Signal detection
_grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true));
_grabber.setSignalDetectionOffset(
obj["sDHOffsetMin"].toDouble(0.25),
obj["sDVOffsetMin"].toDouble(0.25),
obj["sDHOffsetMax"].toDouble(0.75),
obj["sDVOffsetMax"].toDouble(0.75));
_grabber.setSignalThreshold(
obj["redSignalThreshold"].toDouble(0.0)/100.0,
obj["greenSignalThreshold"].toDouble(0.0)/100.0,
obj["blueSignalThreshold"].toDouble(0.0)/100.0,
obj["noSignalCounterThreshold"].toInt(50));
// Reload the Grabber if any settings have been changed that require it
_grabber.reload(getV4lGrabberState());
}
else
stop();
}
}
void VideoWrapper::newFrame(const Image<ColorRgb> &image)
{
emit systemImage(_grabberName, image);
}
void VideoWrapper::readError(const char* err)
{
Error(_log, "Stop grabber, because reading device failed. (%s)", err);
stop();
}
void VideoWrapper::action()
{
// dummy as v4l get notifications from stream
}

View File

@@ -0,0 +1,813 @@
#include "MFSourceReaderCB.h"
#include "grabber/MFGrabber.h"
// Constants
namespace { const bool verbose = false; }
// Need more video properties? Visit https://docs.microsoft.com/en-us/windows/win32/api/strmif/ne-strmif-videoprocampproperty
using VideoProcAmpPropertyMap = QMap<VideoProcAmpProperty, QString>;
inline QMap<VideoProcAmpProperty, QString> initVideoProcAmpPropertyMap()
{
QMap<VideoProcAmpProperty, QString> propertyMap
{
{VideoProcAmp_Brightness, "brightness" },
{VideoProcAmp_Contrast , "contrast" },
{VideoProcAmp_Saturation, "saturation" },
{VideoProcAmp_Hue , "hue" }
};
return propertyMap;
};
Q_GLOBAL_STATIC_WITH_ARGS(VideoProcAmpPropertyMap, _videoProcAmpPropertyMap, (initVideoProcAmpPropertyMap()));
MFGrabber::MFGrabber()
: Grabber("V4L2:MEDIA_FOUNDATION")
, _currentDeviceName("none")
, _newDeviceName("none")
, _hr(S_FALSE)
, _sourceReader(nullptr)
, _sourceReaderCB(nullptr)
, _threadManager(nullptr)
, _pixelFormat(PixelFormat::NO_CHANGE)
, _pixelFormatConfig(PixelFormat::NO_CHANGE)
, _lineLength(-1)
, _frameByteSize(-1)
, _noSignalCounterThreshold(40)
, _noSignalCounter(0)
, _brightness(0)
, _contrast(0)
, _saturation(0)
, _hue(0)
, _currentFrame(0)
, _noSignalThresholdColor(ColorRgb{0,0,0})
, _signalDetectionEnabled(true)
, _noSignalDetected(false)
, _initialized(false)
, _reload(false)
, _x_frac_min(0.25)
, _y_frac_min(0.25)
, _x_frac_max(0.75)
, _y_frac_max(0.75)
{
CoInitializeEx(0, COINIT_MULTITHREADED);
_hr = MFStartup(MF_VERSION, MFSTARTUP_NOSOCKET);
if (FAILED(_hr))
CoUninitialize();
}
MFGrabber::~MFGrabber()
{
uninit();
SAFE_RELEASE(_sourceReader);
if (_sourceReaderCB != nullptr)
while (_sourceReaderCB->isBusy()) {}
SAFE_RELEASE(_sourceReaderCB);
if (_threadManager)
delete _threadManager;
_threadManager = nullptr;
if (SUCCEEDED(_hr) && SUCCEEDED(MFShutdown()))
CoUninitialize();
}
bool MFGrabber::prepare()
{
if (SUCCEEDED(_hr))
{
if (!_sourceReaderCB)
_sourceReaderCB = new SourceReaderCB(this);
if (!_threadManager)
_threadManager = new EncoderThreadManager(this);
return (_sourceReaderCB != nullptr && _threadManager != nullptr);
}
return false;
}
bool MFGrabber::start()
{
if (!_initialized)
{
if (init())
{
connect(_threadManager, &EncoderThreadManager::newFrame, this, &MFGrabber::newThreadFrame);
_threadManager->start();
DebugIf(verbose, _log, "Decoding threads: %d", _threadManager->_threadCount);
start_capturing();
Info(_log, "Started");
return true;
}
else
{
Error(_log, "The Media Foundation Grabber could not be started");
return false;
}
}
else
return true;
}
void MFGrabber::stop()
{
if (_initialized)
{
_initialized = false;
_threadManager->stop();
disconnect(_threadManager, nullptr, nullptr, nullptr);
_sourceReader->Flush(MF_SOURCE_READER_FIRST_VIDEO_STREAM);
SAFE_RELEASE(_sourceReader);
_deviceProperties.clear();
_deviceControls.clear();
Info(_log, "Stopped");
}
}
bool MFGrabber::init()
{
// enumerate the video capture devices on the user's system
enumVideoCaptureDevices();
if (!_initialized && SUCCEEDED(_hr))
{
int deviceIndex = -1;
bool noDeviceName = _currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName.compare("auto", Qt::CaseInsensitive) == 0;
if (noDeviceName)
return false;
if (!_deviceProperties.contains(_currentDeviceName))
{
Debug(_log, "Configured device '%s' is not available.", QSTRING_CSTR(_currentDeviceName));
return false;
}
Debug(_log, "Searching for %s %d x %d @ %d fps (%s)", QSTRING_CSTR(_currentDeviceName), _width, _height,_fps, QSTRING_CSTR(pixelFormatToString(_pixelFormat)));
QList<DeviceProperties> dev = _deviceProperties[_currentDeviceName];
for ( int i = 0; i < dev.count() && deviceIndex < 0; ++i )
{
if (dev[i].width != _width || dev[i].height != _height || dev[i].fps != _fps || dev[i].pf != _pixelFormat)
continue;
else
deviceIndex = i;
}
if (deviceIndex >= 0 && SUCCEEDED(init_device(_currentDeviceName, dev[deviceIndex])))
{
_initialized = true;
_newDeviceName = _currentDeviceName;
}
else
{
Debug(_log, "Configured device '%s' is not available.", QSTRING_CSTR(_currentDeviceName));
return false;
}
}
return _initialized;
}
void MFGrabber::uninit()
{
// stop if the grabber was not stopped
if (_initialized)
{
Debug(_log,"Uninit grabber: %s", QSTRING_CSTR(_newDeviceName));
stop();
}
}
HRESULT MFGrabber::init_device(QString deviceName, DeviceProperties props)
{
PixelFormat pixelformat = GetPixelFormatForGuid(props.guid);
QString error;
IMFMediaSource* device = nullptr;
IMFAttributes* deviceAttributes = nullptr, *sourceReaderAttributes = nullptr;
IMFMediaType* type = nullptr;
HRESULT hr = S_OK;
Debug(_log, "Init %s, %d x %d @ %d fps (%s)", QSTRING_CSTR(deviceName), props.width, props.height, props.fps, QSTRING_CSTR(pixelFormatToString(pixelformat)));
DebugIf (verbose, _log, "Symbolic link: %s", QSTRING_CSTR(props.symlink));
hr = MFCreateAttributes(&deviceAttributes, 2);
if (FAILED(hr))
{
error = QString("Could not create device attributes (%1)").arg(hr);
goto done;
}
hr = deviceAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
if (FAILED(hr))
{
error = QString("SetGUID_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE (%1)").arg(hr);
goto done;
}
if (FAILED(deviceAttributes->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, (LPCWSTR)props.symlink.utf16())))
{
error = QString("IMFAttributes_SetString_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK (%1)").arg(hr);
goto done;
}
hr = MFCreateDeviceSource(deviceAttributes, &device);
if (FAILED(hr))
{
error = QString("MFCreateDeviceSource (%1)").arg(hr);
goto done;
}
if (!device)
{
error = QString("Could not open device (%1)").arg(hr);
goto done;
}
else
Debug(_log, "Device opened");
IAMVideoProcAmp *pProcAmp = nullptr;
if (SUCCEEDED(device->QueryInterface(IID_PPV_ARGS(&pProcAmp))))
{
for (auto control : _deviceControls[deviceName])
{
switch (_videoProcAmpPropertyMap->key(control.property))
{
case VideoProcAmpProperty::VideoProcAmp_Brightness:
if (_brightness >= control.minValue && _brightness <= control.maxValue && _brightness != control.currentValue)
{
Debug(_log,"Set brightness to %i", _brightness);
pProcAmp->Set(VideoProcAmp_Brightness, _brightness, VideoProcAmp_Flags_Manual);
}
break;
case VideoProcAmpProperty::VideoProcAmp_Contrast:
if (_contrast >= control.minValue && _contrast <= control.maxValue && _contrast != control.currentValue)
{
Debug(_log,"Set contrast to %i", _contrast);
pProcAmp->Set(VideoProcAmp_Contrast, _contrast, VideoProcAmp_Flags_Manual);
}
break;
case VideoProcAmpProperty::VideoProcAmp_Saturation:
if (_saturation >= control.minValue && _saturation <= control.maxValue && _saturation != control.currentValue)
{
Debug(_log,"Set saturation to %i", _saturation);
pProcAmp->Set(VideoProcAmp_Saturation, _saturation, VideoProcAmp_Flags_Manual);
}
break;
case VideoProcAmpProperty::VideoProcAmp_Hue:
if (_hue >= control.minValue && _hue <= control.maxValue && _hue != control.currentValue)
{
Debug(_log,"Set hue to %i", _hue);
pProcAmp->Set(VideoProcAmp_Hue, _hue, VideoProcAmp_Flags_Manual);
}
break;
default:
break;
}
}
}
hr = MFCreateAttributes(&sourceReaderAttributes, 1);
if (FAILED(hr))
{
error = QString("Could not create Source Reader attributes (%1)").arg(hr);
goto done;
}
hr = sourceReaderAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, (IMFSourceReaderCallback *)_sourceReaderCB);
if (FAILED(hr))
{
error = QString("Could not set stream parameter: SetUnknown_MF_SOURCE_READER_ASYNC_CALLBACK (%1)").arg(hr);
hr = E_INVALIDARG;
goto done;
}
hr = MFCreateSourceReaderFromMediaSource(device, sourceReaderAttributes, &_sourceReader);
if (FAILED(hr))
{
error = QString("Could not create the Source Reader (%1)").arg(hr);
goto done;
}
hr = MFCreateMediaType(&type);
if (FAILED(hr))
{
error = QString("Could not create an empty media type (%1)").arg(hr);
goto done;
}
hr = type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
if (FAILED(hr))
{
error = QString("Could not set stream parameter: SetGUID_MF_MT_MAJOR_TYPE (%1)").arg(hr);
goto done;
}
hr = type->SetGUID(MF_MT_SUBTYPE, props.guid);
if (FAILED(hr))
{
error = QString("Could not set stream parameter: SetGUID_MF_MT_SUBTYPE (%1)").arg(hr);
goto done;
}
hr = MFSetAttributeSize(type, MF_MT_FRAME_SIZE, props.width, props.height);
if (FAILED(hr))
{
error = QString("Could not set stream parameter: SMFSetAttributeSize_MF_MT_FRAME_SIZE (%1)").arg(hr);
goto done;
}
hr = MFSetAttributeSize(type, MF_MT_FRAME_RATE, props.numerator, props.denominator);
if (FAILED(hr))
{
error = QString("Could not set stream parameter: MFSetAttributeSize_MF_MT_FRAME_RATE (%1)").arg(hr);
goto done;
}
hr = MFSetAttributeRatio(type, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
if (FAILED(hr))
{
error = QString("Could not set stream parameter: MFSetAttributeRatio_MF_MT_PIXEL_ASPECT_RATIO (%1)").arg(hr);
goto done;
}
hr = _sourceReaderCB->InitializeVideoEncoder(type, pixelformat);
if (FAILED(hr))
{
error = QString("Failed to initialize the Video Encoder (%1)").arg(hr);
goto done;
}
hr = _sourceReader->SetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, nullptr, type);
if (FAILED(hr))
{
error = QString("Failed to set media type on Source Reader (%1)").arg(hr);
}
done:
if (FAILED(hr))
{
emit readError(QSTRING_CSTR(error));
SAFE_RELEASE(_sourceReader);
}
else
{
_pixelFormat = props.pf;
_width = props.width;
_height = props.height;
_frameByteSize = _width * _height * 3;
_lineLength = _width * 3;
}
// Cleanup
SAFE_RELEASE(deviceAttributes);
SAFE_RELEASE(device);
SAFE_RELEASE(pProcAmp);
SAFE_RELEASE(type);
SAFE_RELEASE(sourceReaderAttributes);
return hr;
}
void MFGrabber::enumVideoCaptureDevices()
{
_deviceProperties.clear();
_deviceControls.clear();
IMFAttributes* attr;
if (SUCCEEDED(MFCreateAttributes(&attr, 1)))
{
if (SUCCEEDED(attr->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID)))
{
UINT32 count;
IMFActivate** devices;
if (SUCCEEDED(MFEnumDeviceSources(attr, &devices, &count)))
{
DebugIf (verbose, _log, "Detected devices: %u", count);
for (UINT32 i = 0; i < count; i++)
{
UINT32 length;
LPWSTR name;
LPWSTR symlink;
if (SUCCEEDED(devices[i]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &name, &length)))
{
if (SUCCEEDED(devices[i]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &symlink, &length)))
{
QList<DeviceProperties> devicePropertyList;
QString dev = QString::fromUtf16((const ushort*)name);
IMFMediaSource *pSource = nullptr;
if (SUCCEEDED(devices[i]->ActivateObject(IID_PPV_ARGS(&pSource))))
{
DebugIf (verbose, _log, "Found capture device: %s", QSTRING_CSTR(dev));
IMFMediaType *pType = nullptr;
IMFSourceReader* reader;
if (SUCCEEDED(MFCreateSourceReaderFromMediaSource(pSource, NULL, &reader)))
{
for (DWORD i = 0; ; i++)
{
if (FAILED(reader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, i, &pType)))
break;
GUID format;
UINT32 width = 0, height = 0, numerator = 0, denominator = 0;
if ( SUCCEEDED(pType->GetGUID(MF_MT_SUBTYPE, &format)) &&
SUCCEEDED(MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height)) &&
SUCCEEDED(MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &numerator, &denominator)))
{
PixelFormat pixelformat = GetPixelFormatForGuid(format);
if (pixelformat != PixelFormat::NO_CHANGE)
{
DeviceProperties properties;
properties.symlink = QString::fromUtf16((const ushort*)symlink);
properties.width = width;
properties.height = height;
properties.fps = numerator / denominator;
properties.numerator = numerator;
properties.denominator = denominator;
properties.pf = pixelformat;
properties.guid = format;
devicePropertyList.append(properties);
DebugIf (verbose, _log, "%s %d x %d @ %d fps (%s)", QSTRING_CSTR(dev), properties.width, properties.height, properties.fps, QSTRING_CSTR(pixelFormatToString(properties.pf)));
}
}
SAFE_RELEASE(pType);
}
IAMVideoProcAmp *videoProcAmp = nullptr;
if (SUCCEEDED(pSource->QueryInterface(IID_PPV_ARGS(&videoProcAmp))))
{
QList<DeviceControls> deviceControlList;
for (auto it = _videoProcAmpPropertyMap->begin(); it != _videoProcAmpPropertyMap->end(); it++)
{
long minVal, maxVal, stepVal, defaultVal, flag;
if (SUCCEEDED(videoProcAmp->GetRange(it.key(), &minVal, &maxVal, &stepVal, &defaultVal, &flag)))
{
if (flag & VideoProcAmp_Flags_Manual)
{
DeviceControls control;
control.property = it.value();
control.minValue = minVal;
control.maxValue = maxVal;
control.step = stepVal;
control.default = defaultVal;
long currentVal;
if (SUCCEEDED(videoProcAmp->Get(it.key(), &currentVal, &flag)))
{
control.currentValue = currentVal;
DebugIf(verbose, _log, "%s: min=%i, max=%i, step=%i, default=%i, current=%i", QSTRING_CSTR(it.value()), minVal, maxVal, stepVal, defaultVal, currentVal);
}
else
break;
deviceControlList.append(control);
}
}
}
if (!deviceControlList.isEmpty())
_deviceControls.insert(dev, deviceControlList);
}
SAFE_RELEASE(videoProcAmp);
SAFE_RELEASE(reader);
}
SAFE_RELEASE(pSource);
}
if (!devicePropertyList.isEmpty())
_deviceProperties.insert(dev, devicePropertyList);
}
CoTaskMemFree(symlink);
}
CoTaskMemFree(name);
SAFE_RELEASE(devices[i]);
}
CoTaskMemFree(devices);
}
SAFE_RELEASE(attr);
}
}
}
void MFGrabber::start_capturing()
{
if (_initialized && _sourceReader && _threadManager)
{
HRESULT hr = _sourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL);
if (!SUCCEEDED(hr))
Error(_log, "ReadSample (%i)", hr);
}
}
void MFGrabber::process_image(const void *frameImageBuffer, int size)
{
int processFrameIndex = _currentFrame++;
// frame skipping
if ((processFrameIndex % (_fpsSoftwareDecimation + 1) != 0) && (_fpsSoftwareDecimation > 0))
return;
// We do want a new frame...
if (size < _frameByteSize && _pixelFormat != PixelFormat::MJPEG)
Error(_log, "Frame too small: %d != %d", size, _frameByteSize);
else if (_threadManager != nullptr)
{
for (int i = 0; i < _threadManager->_threadCount; i++)
{
if (!_threadManager->_threads[i]->isBusy())
{
_threadManager->_threads[i]->setup(_pixelFormat, (uint8_t*)frameImageBuffer, size, _width, _height, _lineLength, _cropLeft, _cropTop, _cropBottom, _cropRight, _videoMode, _flipMode, _pixelDecimation);
_threadManager->_threads[i]->process();
break;
}
}
}
}
void MFGrabber::receive_image(const void *frameImageBuffer, int size)
{
process_image(frameImageBuffer, size);
start_capturing();
}
void MFGrabber::newThreadFrame(Image<ColorRgb> image)
{
if (_signalDetectionEnabled)
{
// check signal (only in center of the resulting image, because some grabbers have noise values along the borders)
bool noSignal = true;
// top left
unsigned xOffset = image.width() * _x_frac_min;
unsigned yOffset = image.height() * _y_frac_min;
// bottom right
unsigned xMax = image.width() * _x_frac_max;
unsigned yMax = image.height() * _y_frac_max;
for (unsigned x = xOffset; noSignal && x < xMax; ++x)
for (unsigned y = yOffset; noSignal && y < yMax; ++y)
noSignal &= (ColorRgb&)image(x, y) <= _noSignalThresholdColor;
if (noSignal)
++_noSignalCounter;
else
{
if (_noSignalCounter >= _noSignalCounterThreshold)
{
_noSignalDetected = true;
Info(_log, "Signal detected");
}
_noSignalCounter = 0;
}
if ( _noSignalCounter < _noSignalCounterThreshold)
{
emit newFrame(image);
}
else if (_noSignalCounter == _noSignalCounterThreshold)
{
_noSignalDetected = false;
Info(_log, "Signal lost");
}
}
else
emit newFrame(image);
}
void MFGrabber::setDevice(const QString& device)
{
if (_currentDeviceName != device)
{
_currentDeviceName = device;
_reload = true;
}
}
bool MFGrabber::setInput(int input)
{
if (Grabber::setInput(input))
{
_reload = true;
return true;
}
return false;
}
bool MFGrabber::setWidthHeight(int width, int height)
{
if (Grabber::setWidthHeight(width, height))
{
_reload = true;
return true;
}
return false;
}
void MFGrabber::setEncoding(QString enc)
{
if (_pixelFormatConfig != parsePixelFormat(enc))
{
_pixelFormatConfig = parsePixelFormat(enc);
if (_initialized)
{
Debug(_log,"Set hardware encoding to: %s", QSTRING_CSTR(enc.toUpper()));
_reload = true;
}
else
_pixelFormat = _pixelFormatConfig;
}
}
void MFGrabber::setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue)
{
if (_brightness != brightness || _contrast != contrast || _saturation != saturation || _hue != hue)
{
_brightness = brightness;
_contrast = contrast;
_saturation = saturation;
_hue = hue;
_reload = true;
}
}
void MFGrabber::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold)
{
_noSignalThresholdColor.red = uint8_t(255*redSignalThreshold);
_noSignalThresholdColor.green = uint8_t(255*greenSignalThreshold);
_noSignalThresholdColor.blue = uint8_t(255*blueSignalThreshold);
_noSignalCounterThreshold = qMax(1, noSignalCounterThreshold);
if (_signalDetectionEnabled)
Info(_log, "Signal threshold set to: {%d, %d, %d} and frames: %d", _noSignalThresholdColor.red, _noSignalThresholdColor.green, _noSignalThresholdColor.blue, _noSignalCounterThreshold );
}
void MFGrabber::setSignalDetectionOffset(double horizontalMin, double verticalMin, double horizontalMax, double verticalMax)
{
// rainbow 16 stripes 0.47 0.2 0.49 0.8
// unicolor: 0.25 0.25 0.75 0.75
_x_frac_min = horizontalMin;
_y_frac_min = verticalMin;
_x_frac_max = horizontalMax;
_y_frac_max = verticalMax;
if (_signalDetectionEnabled)
Info(_log, "Signal detection area set to: %f,%f x %f,%f", _x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max );
}
void MFGrabber::setSignalDetectionEnable(bool enable)
{
if (_signalDetectionEnabled != enable)
{
_signalDetectionEnabled = enable;
if (_initialized)
Info(_log, "Signal detection is now %s", enable ? "enabled" : "disabled");
}
}
bool MFGrabber::reload(bool force)
{
if (_reload || force)
{
if (_sourceReader)
{
Info(_log,"Reloading Media Foundation Grabber");
uninit();
_pixelFormat = _pixelFormatConfig;
_newDeviceName = _currentDeviceName;
}
_reload = false;
return prepare() && start();
}
return false;
}
QJsonArray MFGrabber::discover(const QJsonObject& params)
{
DebugIf (verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
enumVideoCaptureDevices();
QJsonArray inputsDiscovered;
for (auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
{
QJsonObject device, in;
QJsonArray video_inputs, formats;
device["device"] = it.key();
device["device_name"] = it.key();
device["type"] = "v4l2";
in["name"] = "";
in["inputIdx"] = 0;
QStringList encodingFormats = QStringList();
for (int i = 0; i < _deviceProperties[it.key()].count(); ++i )
if (!encodingFormats.contains(pixelFormatToString(_deviceProperties[it.key()][i].pf), Qt::CaseInsensitive))
encodingFormats << pixelFormatToString(_deviceProperties[it.key()][i].pf).toLower();
for (auto encodingFormat : encodingFormats)
{
QJsonObject format;
QJsonArray resolutionArray;
format["format"] = encodingFormat;
QMultiMap<int, int> deviceResolutions = QMultiMap<int, int>();
for (int i = 0; i < _deviceProperties[it.key()].count(); ++i )
if (!deviceResolutions.contains(_deviceProperties[it.key()][i].width, _deviceProperties[it.key()][i].height) && _deviceProperties[it.key()][i].pf == parsePixelFormat(encodingFormat))
deviceResolutions.insert(_deviceProperties[it.key()][i].width, _deviceProperties[it.key()][i].height);
for (auto width_height = deviceResolutions.begin(); width_height != deviceResolutions.end(); width_height++)
{
QJsonObject resolution;
QJsonArray fps;
resolution["width"] = width_height.key();
resolution["height"] = width_height.value();
QIntList framerates = QIntList();
for (int i = 0; i < _deviceProperties[it.key()].count(); ++i )
{
int fps = _deviceProperties[it.key()][i].numerator / _deviceProperties[it.key()][i].denominator;
if (!framerates.contains(fps) && _deviceProperties[it.key()][i].pf == parsePixelFormat(encodingFormat) && _deviceProperties[it.key()][i].width == width_height.key() && _deviceProperties[it.key()][i].height == width_height.value())
framerates << fps;
}
for (auto framerate : framerates)
fps.append(framerate);
resolution["fps"] = fps;
resolutionArray.append(resolution);
}
format["resolutions"] = resolutionArray;
formats.append(format);
}
in["formats"] = formats;
video_inputs.append(in);
device["video_inputs"] = video_inputs;
QJsonObject controls, controls_default;
for (auto control : _deviceControls[it.key()])
{
QJsonObject property;
property["minValue"] = control.minValue;
property["maxValue"] = control.maxValue;
property["step"] = control.step;
property["current"] = control.currentValue;
controls[control.property] = property;
controls_default[control.property] = control.default;
}
device["properties"] = controls;
QJsonObject defaults, video_inputs_default, format_default, resolution_default;
resolution_default["width"] = 640;
resolution_default["height"] = 480;
resolution_default["fps"] = 25;
format_default["format"] = "bgr24";
format_default["resolution"] = resolution_default;
video_inputs_default["inputIdx"] = 0;
video_inputs_default["standards"] = "PAL";
video_inputs_default["formats"] = format_default;
defaults["video_input"] = video_inputs_default;
defaults["properties"] = controls_default;
device["default"] = defaults;
inputsDiscovered.append(device);
}
_deviceProperties.clear();
_deviceControls.clear();
DebugIf (verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -0,0 +1,401 @@
#pragma once
#include <mfapi.h>
#include <mftransform.h>
#include <dmo.h>
#include <wmcodecdsp.h>
#include <mfidl.h>
#include <mfreadwrite.h>
#include <shlwapi.h>
#include <mferror.h>
#include <strmif.h>
#include <comdef.h>
#pragma comment (lib, "ole32.lib")
#pragma comment (lib, "mf.lib")
#pragma comment (lib, "mfplat.lib")
#pragma comment (lib, "mfuuid.lib")
#pragma comment (lib, "mfreadwrite.lib")
#pragma comment (lib, "strmiids.lib")
#pragma comment (lib, "wmcodecdspuuid.lib")
#include <grabber/MFGrabber.h>
#define SAFE_RELEASE(x) if(x) { x->Release(); x = nullptr; }
// Need more supported formats? Visit https://docs.microsoft.com/en-us/windows/win32/medfound/colorconverter
static PixelFormat GetPixelFormatForGuid(const GUID guid)
{
if (IsEqualGUID(guid, MFVideoFormat_RGB32)) return PixelFormat::RGB32;
if (IsEqualGUID(guid, MFVideoFormat_RGB24)) return PixelFormat::BGR24;
if (IsEqualGUID(guid, MFVideoFormat_YUY2)) return PixelFormat::YUYV;
if (IsEqualGUID(guid, MFVideoFormat_UYVY)) return PixelFormat::UYVY;
if (IsEqualGUID(guid, MFVideoFormat_MJPG)) return PixelFormat::MJPEG;
if (IsEqualGUID(guid, MFVideoFormat_NV12)) return PixelFormat::NV12;
if (IsEqualGUID(guid, MFVideoFormat_I420)) return PixelFormat::I420;
return PixelFormat::NO_CHANGE;
};
class SourceReaderCB : public IMFSourceReaderCallback
{
public:
SourceReaderCB(MFGrabber* grabber)
: _nRefCount(1)
, _grabber(grabber)
, _bEOS(FALSE)
, _hrStatus(S_OK)
, _isBusy(false)
, _transform(nullptr)
, _pixelformat(PixelFormat::NO_CHANGE)
{
// Initialize critical section.
InitializeCriticalSection(&_critsec);
}
// IUnknown methods
STDMETHODIMP QueryInterface(REFIID iid, void** ppv)
{
static const QITAB qit[] =
{
QITABENT(SourceReaderCB, IMFSourceReaderCallback),
{ 0 },
};
return QISearch(this, qit, iid, ppv);
}
STDMETHODIMP_(ULONG) AddRef()
{
return InterlockedIncrement(&_nRefCount);
}
STDMETHODIMP_(ULONG) Release()
{
ULONG uCount = InterlockedDecrement(&_nRefCount);
if (uCount == 0)
{
delete this;
}
return uCount;
}
// IMFSourceReaderCallback methods
STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD /*dwStreamIndex*/,
DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample* pSample)
{
EnterCriticalSection(&_critsec);
_isBusy = true;
if (_grabber->_sourceReader == nullptr)
{
_isBusy = false;
LeaveCriticalSection(&_critsec);
return S_OK;
}
if (dwStreamFlags & MF_SOURCE_READERF_STREAMTICK)
{
Debug(_grabber->_log, "Skipping stream gap");
LeaveCriticalSection(&_critsec);
_grabber->_sourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, nullptr, nullptr, nullptr, nullptr);
return S_OK;
}
if (dwStreamFlags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED)
{
IMFMediaType* type = nullptr;
GUID format;
_grabber->_sourceReader->GetNativeMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, MF_SOURCE_READER_CURRENT_TYPE_INDEX, &type);
type->GetGUID(MF_MT_SUBTYPE, &format);
Debug(_grabber->_log, "Native media type changed");
InitializeVideoEncoder(type, GetPixelFormatForGuid(format));
SAFE_RELEASE(type);
}
if (dwStreamFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)
{
IMFMediaType* type = nullptr;
GUID format;
_grabber->_sourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, &type);
type->GetGUID(MF_MT_SUBTYPE, &format);
Debug(_grabber->_log, "Current media type changed");
InitializeVideoEncoder(type, GetPixelFormatForGuid(format));
SAFE_RELEASE(type);
}
// Variables declaration
IMFMediaBuffer* buffer = nullptr;
if (FAILED(hrStatus))
{
_hrStatus = hrStatus;
_com_error error(_hrStatus);
Error(_grabber->_log, "%s", error.ErrorMessage());
goto done;
}
if (!pSample)
{
Error(_grabber->_log, "Media sample is empty");
goto done;
}
if (_pixelformat != PixelFormat::MJPEG && _pixelformat != PixelFormat::BGR24 && _pixelformat != PixelFormat::NO_CHANGE)
pSample = TransformSample(_transform, pSample);
_hrStatus = pSample->ConvertToContiguousBuffer(&buffer);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Buffer conversion failed => %s", error.ErrorMessage());
goto done;
}
BYTE* data = nullptr;
DWORD maxLength = 0, currentLength = 0;
_hrStatus = buffer->Lock(&data, &maxLength, &currentLength);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Access to the buffer memory failed => %s", error.ErrorMessage());
goto done;
}
_grabber->receive_image(data, currentLength);
_hrStatus = buffer->Unlock();
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Unlocking the buffer memory failed => %s", error.ErrorMessage());
}
done:
SAFE_RELEASE(buffer);
if (MF_SOURCE_READERF_ENDOFSTREAM & dwStreamFlags)
_bEOS = TRUE; // Reached the end of the stream.
if (_pixelformat != PixelFormat::MJPEG && _pixelformat != PixelFormat::BGR24 && _pixelformat != PixelFormat::NO_CHANGE)
SAFE_RELEASE(pSample);
_isBusy = false;
LeaveCriticalSection(&_critsec);
return _hrStatus;
}
HRESULT SourceReaderCB::InitializeVideoEncoder(IMFMediaType* type, PixelFormat format)
{
_pixelformat = format;
if (format == PixelFormat::MJPEG || format == PixelFormat::BGR24 || format == PixelFormat::NO_CHANGE)
return S_OK;
// Variable declaration
IMFMediaType* output = nullptr;
DWORD mftStatus = 0;
QString error = "";
// Create instance of IMFTransform interface pointer as CColorConvertDMO
_hrStatus = CoCreateInstance(CLSID_CColorConvertDMO, nullptr, CLSCTX_INPROC_SERVER, IID_IMFTransform, (void**)&_transform);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Creation of the Color Converter failed => %s", error.ErrorMessage());
goto done;
}
// Set input type as media type of our input stream
_hrStatus = _transform->SetInputType(0, type, 0);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Setting the input media type failed => %s", error.ErrorMessage());
goto done;
}
// Create new media type
_hrStatus = MFCreateMediaType(&output);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Creating a new media type failed => %s", error.ErrorMessage());
goto done;
}
// Copy all attributes from input type to output media type
_hrStatus = type->CopyAllItems(output);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Copying of all attributes from input to output media type failed => %s", error.ErrorMessage());
goto done;
}
UINT32 width, height;
UINT32 numerator, denominator;
// Fill the missing attributes
if (FAILED(output->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)) ||
FAILED(output->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24)) ||
FAILED(output->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE)) ||
FAILED(output->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE)) ||
FAILED(output->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)) ||
FAILED(MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width, &height)) ||
FAILED(MFSetAttributeSize(output, MF_MT_FRAME_SIZE, width, height)) ||
FAILED(MFGetAttributeRatio(type, MF_MT_FRAME_RATE, &numerator, &denominator)) ||
FAILED(MFSetAttributeRatio(output, MF_MT_PIXEL_ASPECT_RATIO, 1, 1)))
{
Error(_grabber->_log, "Setting output media type attributes failed");
goto done;
}
// Set transform output type
_hrStatus = _transform->SetOutputType(0, output, 0);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Setting the output media type failed => %s", error.ErrorMessage());
goto done;
}
// Check if encoder parameters set properly
_hrStatus = _transform->GetInputStatus(0, &mftStatus);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Failed to query the input stream for more data => %s", error.ErrorMessage());
goto done;
}
if (MFT_INPUT_STATUS_ACCEPT_DATA == mftStatus)
{
// Notify the transform we are about to begin streaming data
if (FAILED(_transform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, 0)) ||
FAILED(_transform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0)) ||
FAILED(_transform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0)))
{
Error(_grabber->_log, "Failed to begin streaming data");
}
}
done:
SAFE_RELEASE(output);
return _hrStatus;
}
BOOL SourceReaderCB::isBusy()
{
EnterCriticalSection(&_critsec);
BOOL result = _isBusy;
LeaveCriticalSection(&_critsec);
return result;
}
STDMETHODIMP OnEvent(DWORD, IMFMediaEvent*) { return S_OK; }
STDMETHODIMP OnFlush(DWORD) { return S_OK; }
private:
virtual ~SourceReaderCB()
{
if (_transform)
{
_transform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0);
_transform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, 0);
}
SAFE_RELEASE(_transform);
// Delete critical section.
DeleteCriticalSection(&_critsec);
}
IMFSample* SourceReaderCB::TransformSample(IMFTransform* transform, IMFSample* in_sample)
{
IMFSample* result = nullptr;
IMFMediaBuffer* out_buffer = nullptr;
MFT_OUTPUT_DATA_BUFFER outputDataBuffer = { 0 };
// Process the input sample
_hrStatus = transform->ProcessInput(0, in_sample, 0);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Failed to process the input sample => %s", error.ErrorMessage());
goto done;
}
// Gets the buffer demand for the output stream
MFT_OUTPUT_STREAM_INFO streamInfo;
_hrStatus = transform->GetOutputStreamInfo(0, &streamInfo);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Failed to retrieve buffer requirement for output current => %s", error.ErrorMessage());
goto done;
}
// Create an output media buffer
_hrStatus = MFCreateMemoryBuffer(streamInfo.cbSize, &out_buffer);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Failed to create an output media buffer => %s", error.ErrorMessage());
goto done;
}
// Create an empty media sample
_hrStatus = MFCreateSample(&result);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Failed to create an empty media sample => %s", error.ErrorMessage());
goto done;
}
// Add the output media buffer to the media sample
_hrStatus = result->AddBuffer(out_buffer);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Failed to add the output media buffer to the media sample => %s", error.ErrorMessage());
goto done;
}
// Create the output buffer structure
memset(&outputDataBuffer, 0, sizeof outputDataBuffer);
outputDataBuffer.dwStreamID = 0;
outputDataBuffer.dwStatus = 0;
outputDataBuffer.pEvents = nullptr;
outputDataBuffer.pSample = result;
DWORD status = 0;
// Generate the output sample
_hrStatus = transform->ProcessOutput(0, 1, &outputDataBuffer, &status);
if (FAILED(_hrStatus))
{
_com_error error(_hrStatus);
Error(_grabber->_log, "Failed to generate the output sample => %s", error.ErrorMessage());
}
else
{
SAFE_RELEASE(out_buffer);
return result;
}
done:
SAFE_RELEASE(out_buffer);
return nullptr;
}
private:
long _nRefCount;
CRITICAL_SECTION _critsec;
MFGrabber* _grabber;
BOOL _bEOS;
HRESULT _hrStatus;
IMFTransform* _transform;
PixelFormat _pixelformat;
std::atomic<bool> _isBusy;
};

File diff suppressed because it is too large Load Diff

View File

@@ -4,21 +4,33 @@
#include <xcb/randr.h>
#include <xcb/xcb_event.h>
X11Grabber::X11Grabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation)
: Grabber("X11GRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom)
// Constants
namespace {
const bool verbose = false;
} //End of constants
X11Grabber::X11Grabber(int cropLeft, int cropRight, int cropTop, int cropBottom)
: Grabber("X11GRABBER", cropLeft, cropRight, cropTop, cropBottom)
, _x11Display(nullptr)
, _xImage(nullptr)
, _pixmap(None)
, _srcFormat(nullptr)
, _dstFormat(nullptr)
, _srcPicture(None)
, _dstPicture(None)
, _pixelDecimation(pixelDecimation)
, _screenWidth(0)
, _screenHeight(0)
, _calculatedWidth(0)
, _calculatedHeight(0)
, _src_x(cropLeft)
, _src_y(cropTop)
, _XShmAvailable(false)
, _XRenderAvailable(false)
, _XRandRAvailable(false)
, _isWayland (false)
, _logger{}
, _image(0,0)
{
_logger = Logger::getInstance("X11");
_useImageResampler = false;
_imageResampler.setCropping(0, 0, 0, 0); // cropping is performed by XRender, XShmGetImage or XGetImage
memset(&_pictAttr, 0, sizeof(_pictAttr));
@@ -37,7 +49,10 @@ X11Grabber::~X11Grabber()
void X11Grabber::freeResources()
{
// Cleanup allocated resources of the X11 grab
XDestroyImage(_xImage);
if (_xImage != nullptr)
{
XDestroyImage(_xImage);
}
if (_XRandRAvailable)
{
qApp->removeNativeEventFilter(this);
@@ -65,7 +80,7 @@ void X11Grabber::setupResources()
if(_XShmAvailable)
{
_xImage = XShmCreateImage(_x11Display, _windowAttr.visual, _windowAttr.depth, ZPixmap, NULL, &_shminfo, _width, _height);
_xImage = XShmCreateImage(_x11Display, _windowAttr.visual, _windowAttr.depth, ZPixmap, NULL, &_shminfo, _calculatedWidth, _calculatedHeight);
_shminfo.shmid = shmget(IPC_PRIVATE, (size_t) _xImage->bytes_per_line * _xImage->height, IPC_CREAT|0777);
_xImage->data = (char*)shmat(_shminfo.shmid,0,0);
_shminfo.shmaddr = _xImage->data;
@@ -75,17 +90,17 @@ void X11Grabber::setupResources()
if (_XRenderAvailable)
{
_useImageResampler = false;
_useImageResampler = false;
_imageResampler.setHorizontalPixelDecimation(1);
_imageResampler.setVerticalPixelDecimation(1);
if(_XShmPixmapAvailable)
{
_pixmap = XShmCreatePixmap(_x11Display, _window, _xImage->data, &_shminfo, _width, _height, _windowAttr.depth);
_pixmap = XShmCreatePixmap(_x11Display, _window, _xImage->data, &_shminfo, _calculatedWidth, _calculatedHeight, _windowAttr.depth);
}
else
{
_pixmap = XCreatePixmap(_x11Display, _window, _width, _height, _windowAttr.depth);
_pixmap = XCreatePixmap(_x11Display, _window, _calculatedWidth, _calculatedHeight, _windowAttr.depth);
}
_srcFormat = XRenderFindVisualFormat(_x11Display, _windowAttr.visual);
_dstFormat = XRenderFindVisualFormat(_x11Display, _windowAttr.visual);
@@ -96,49 +111,82 @@ void X11Grabber::setupResources()
}
else
{
_useImageResampler = true;
_useImageResampler = true;
_imageResampler.setHorizontalPixelDecimation(_pixelDecimation);
_imageResampler.setVerticalPixelDecimation(_pixelDecimation);
}
}
bool X11Grabber::Setup()
bool X11Grabber::open()
{
_x11Display = XOpenDisplay(NULL);
if (_x11Display == nullptr)
bool rc = false;
if (getenv("WAYLAND_DISPLAY") != nullptr)
{
Error(_log, "Unable to open display");
if (getenv("DISPLAY"))
_isWayland = true;
}
else
{
_x11Display = XOpenDisplay(nullptr);
if (_x11Display != nullptr)
{
Error(_log, "%s",getenv("DISPLAY"));
rc = true;
}
}
return rc;
}
bool X11Grabber::setupDisplay()
{
bool result = false;
if ( ! open() )
{
if ( _isWayland )
{
Error(_log, "Grabber does not work under Wayland!");
}
else
{
Error(_log, "DISPLAY environment variable not set");
if (getenv("DISPLAY") != nullptr)
{
Error(_log, "Unable to open display [%s]",getenv("DISPLAY"));
}
else
{
Error(_log, "DISPLAY environment variable not set");
}
}
return false;
}
else
{
_window = DefaultRootWindow(_x11Display);
_window = DefaultRootWindow(_x11Display);
int dummy, pixmaps_supported;
int dummy, pixmaps_supported;
_XRandRAvailable = XRRQueryExtension(_x11Display, &_XRandREventBase, &dummy);
_XRenderAvailable = XRenderQueryExtension(_x11Display, &dummy, &dummy);
_XShmAvailable = XShmQueryExtension(_x11Display);
XShmQueryVersion(_x11Display, &dummy, &dummy, &pixmaps_supported);
_XShmPixmapAvailable = pixmaps_supported && XShmPixmapFormat(_x11Display) == ZPixmap;
_XRandRAvailable = XRRQueryExtension(_x11Display, &_XRandREventBase, &dummy);
_XRenderAvailable = XRenderQueryExtension(_x11Display, &dummy, &dummy);
_XShmAvailable = XShmQueryExtension(_x11Display);
XShmQueryVersion(_x11Display, &dummy, &dummy, &pixmaps_supported);
_XShmPixmapAvailable = pixmaps_supported && XShmPixmapFormat(_x11Display) == ZPixmap;
Info(_log, QString("XRandR=[%1] XRender=[%2] XShm=[%3] XPixmap=[%4]")
.arg(_XRandRAvailable ? "available" : "unavailable")
.arg(_XRenderAvailable ? "available" : "unavailable")
.arg(_XShmAvailable ? "available" : "unavailable")
.arg(_XShmPixmapAvailable ? "available" : "unavailable")
.toStdString().c_str());
bool result = (updateScreenDimensions(true) >=0);
ErrorIf(!result, _log, "X11 Grabber start failed");
setEnabled(result);
result = (updateScreenDimensions(true) >=0);
ErrorIf(!result, _log, "X11 Grabber start failed");
setEnabled(result);
}
return result;
}
int X11Grabber::grabFrame(Image<ColorRgb> & image, bool forceUpdate)
{
if (!_enabled) return 0;
if (!_isEnabled) return 0;
if (forceUpdate)
updateScreenDimensions(forceUpdate);
@@ -176,7 +224,7 @@ int X11Grabber::grabFrame(Image<ColorRgb> & image, bool forceUpdate)
// src_y = cropTop, mask_x, mask_y, dest_x, dest_y, width, height
XRenderComposite(
_x11Display, PictOpSrc, _srcPicture, None, _dstPicture, ( _src_x/_pixelDecimation),
(_src_y/_pixelDecimation), 0, 0, 0, 0, _width, _height);
(_src_y/_pixelDecimation), 0, 0, 0, 0, _calculatedWidth, _calculatedHeight);
XSync(_x11Display, False);
@@ -186,7 +234,7 @@ int X11Grabber::grabFrame(Image<ColorRgb> & image, bool forceUpdate)
}
else
{
_xImage = XGetImage(_x11Display, _pixmap, 0, 0, _width, _height, AllPlanes, ZPixmap);
_xImage = XGetImage(_x11Display, _pixmap, 0, 0, _calculatedWidth, _calculatedHeight, AllPlanes, ZPixmap);
}
}
else if (_XShmAvailable)
@@ -197,7 +245,7 @@ int X11Grabber::grabFrame(Image<ColorRgb> & image, bool forceUpdate)
else
{
// all things done by xgetimage
_xImage = XGetImage(_x11Display, _window, _src_x, _src_y, _width, _height, AllPlanes, ZPixmap);
_xImage = XGetImage(_x11Display, _window, _src_x, _src_y, _calculatedWidth, _calculatedHeight, AllPlanes, ZPixmap);
}
if (_xImage == nullptr)
@@ -220,45 +268,46 @@ int X11Grabber::updateScreenDimensions(bool force)
return -1;
}
if (!force && _screenWidth == unsigned(_windowAttr.width) && _screenHeight == unsigned(_windowAttr.height))
if (!force && _width == _windowAttr.width && _height == _windowAttr.height)
{
// No update required
return 0;
}
if (_screenWidth || _screenHeight)
if (_width || _height)
{
freeResources();
}
Info(_log, "Update of screen resolution: [%dx%d] to [%dx%d]", _screenWidth, _screenHeight, _windowAttr.width, _windowAttr.height);
_screenWidth = _windowAttr.width;
_screenHeight = _windowAttr.height;
Info(_log, "Update of screen resolution: [%dx%d] to [%dx%d]", _width, _height, _windowAttr.width, _windowAttr.height);
_width = _windowAttr.width;
_height = _windowAttr.height;
int width=0, height=0;
int width=0;
int height=0;
// Image scaling is performed by XRender when available, otherwise by ImageResampler
if (_XRenderAvailable)
{
width = (_screenWidth > unsigned(_cropLeft + _cropRight))
? ((_screenWidth - _cropLeft - _cropRight) / _pixelDecimation)
: _screenWidth / _pixelDecimation;
width = (_width > (_cropLeft + _cropRight))
? ((_width - _cropLeft - _cropRight) / _pixelDecimation)
: _width / _pixelDecimation;
height = (_screenHeight > unsigned(_cropTop + _cropBottom))
? ((_screenHeight - _cropTop - _cropBottom) / _pixelDecimation)
: _screenHeight / _pixelDecimation;
height = (_height > (_cropTop + _cropBottom))
? ((_height - _cropTop - _cropBottom) / _pixelDecimation)
: _height / _pixelDecimation;
Info(_log, "Using XRender for grabbing");
}
else
{
width = (_screenWidth > unsigned(_cropLeft + _cropRight))
? (_screenWidth - _cropLeft - _cropRight)
: _screenWidth;
width = (_width > (_cropLeft + _cropRight))
? (_width - _cropLeft - _cropRight)
: _width;
height = (_screenHeight > unsigned(_cropTop + _cropBottom))
? (_screenHeight - _cropTop - _cropBottom)
: _screenHeight;
height = (_height > (_cropTop + _cropBottom))
? (_height - _cropTop - _cropBottom)
: _height;
Info(_log, "Using XGetImage for grabbing");
}
@@ -267,29 +316,29 @@ int X11Grabber::updateScreenDimensions(bool force)
switch (_videoMode)
{
case VideoMode::VIDEO_3DSBS:
_width = width /2;
_height = height;
_calculatedWidth = width /2;
_calculatedHeight = height;
_src_x = _cropLeft / 2;
_src_y = _cropTop;
break;
case VideoMode::VIDEO_3DTAB:
_width = width;
_height = height / 2;
_calculatedWidth = width;
_calculatedHeight = height / 2;
_src_x = _cropLeft;
_src_y = _cropTop / 2;
break;
case VideoMode::VIDEO_2D:
default:
_width = width;
_height = height;
_calculatedWidth = width;
_calculatedHeight = height;
_src_x = _cropLeft;
_src_y = _cropTop;
break;
}
Info(_log, "Update output image resolution: [%dx%d] to [%dx%d]", _image.width(), _image.height(), _width, _height);
Info(_log, "Update output image resolution: [%dx%d] to [%dx%d]", _image.width(), _image.height(), _calculatedWidth, _calculatedHeight);
_image.resize(_width, _height);
_image.resize(_calculatedWidth, _calculatedHeight);
setupResources();
return 1;
@@ -298,22 +347,35 @@ int X11Grabber::updateScreenDimensions(bool force)
void X11Grabber::setVideoMode(VideoMode mode)
{
Grabber::setVideoMode(mode);
updateScreenDimensions(true);
}
void X11Grabber::setPixelDecimation(int pixelDecimation)
{
if(_pixelDecimation != pixelDecimation)
if(_x11Display != nullptr)
{
_pixelDecimation = pixelDecimation;
updateScreenDimensions(true);
}
}
void X11Grabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
bool X11Grabber::setPixelDecimation(int pixelDecimation)
{
bool rc (true);
if (Grabber::setPixelDecimation(pixelDecimation))
{
if(_x11Display != nullptr)
{
if ( updateScreenDimensions(true) < 0 )
{
rc = false;
}
}
}
return rc;
}
void X11Grabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom)
{
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
if(_x11Display != nullptr) updateScreenDimensions(true); // segfault on init
if(_x11Display != nullptr)
{
updateScreenDimensions(true); // segfault on init
}
}
bool X11Grabber::nativeEventFilter(const QByteArray & eventType, void * message, long int * /*result*/)
@@ -332,3 +394,85 @@ bool X11Grabber::nativeEventFilter(const QByteArray & eventType, void * message,
return false;
}
QJsonObject X11Grabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
if ( open() )
{
inputsDiscovered["device"] = "x11";
inputsDiscovered["device_name"] = "X11";
inputsDiscovered["type"] = "screen";
QJsonArray video_inputs;
if (_x11Display != nullptr)
{
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
// Iterate through all X screens
for (int i = 0; i < XScreenCount(_x11Display); ++i)
{
_window = DefaultRootWindow(_x11Display);
const Status status = XGetWindowAttributes(_x11Display, _window, &_windowAttr);
if (status == 0)
{
Debug(_log, "Failed to obtain window attributes");
}
else
{
QJsonObject in;
QString displayName;
char* name;
if ( XFetchName(_x11Display, _window, &name) > 0 )
{
displayName = name;
}
else {
displayName = QString("Display:%1").arg(i);
}
in["name"] = displayName;
in["inputIdx"] = i;
QJsonArray formats;
QJsonArray resolutionArray;
QJsonObject format;
QJsonObject resolution;
resolution["width"] = _windowAttr.width;
resolution["height"] = _windowAttr.height;
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
}
if ( !video_inputs.isEmpty() )
{
inputsDiscovered["video_inputs"] = video_inputs;
QJsonObject defaults, video_inputs_default, resolution_default;
resolution_default["fps"] = _fps;
video_inputs_default["resolution"] = resolution_default;
video_inputs_default["inputIdx"] = 0;
defaults["video_input"] = video_inputs_default;
inputsDiscovered["default"] = defaults;
}
}
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -1,10 +1,14 @@
#include <grabber/X11Wrapper.h>
X11Wrapper::X11Wrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, unsigned updateRate_Hz)
: GrabberWrapper("X11", &_grabber, 0, 0, updateRate_Hz)
, _grabber(cropLeft, cropRight, cropTop, cropBottom, pixelDecimation)
, _init(false)
{}
X11Wrapper::X11Wrapper( int updateRate_Hz,
int pixelDecimation,
int cropLeft, int cropRight, int cropTop, int cropBottom)
: GrabberWrapper("X11", &_grabber, updateRate_Hz)
, _grabber(cropLeft, cropRight, cropTop, cropBottom)
, _init(false)
{
_grabber.setPixelDecimation(pixelDecimation);
}
X11Wrapper::~X11Wrapper()
{
@@ -19,7 +23,7 @@ void X11Wrapper::action()
if (! _init )
{
_init = true;
if ( ! _grabber.Setup() )
if ( ! _grabber.setupDisplay() )
{
stop();
}

View File

@@ -22,7 +22,7 @@ void check_error(xcb_generic_error_t * error)
// Requests with void response type
template<class Request, class ...Args>
typename std::enable_if<std::is_same<typename Request::ResponseType, xcb_void_cookie_t>::value, void>::type
query(xcb_connection_t * connection, Args&& ...args)
static query(xcb_connection_t * connection, Args&& ...args)
{
auto cookie = Request::RequestFunction(connection, std::forward<Args>(args)...);
@@ -33,9 +33,8 @@ template<class Request, class ...Args>
// Requests with non-void response type
template<class Request, class ...Args>
typename std::enable_if<!std::is_same<typename Request::ResponseType, xcb_void_cookie_t>::value,
std::unique_ptr<typename Request::ResponseType, decltype(&free)>>::type
query(xcb_connection_t * connection, Args&& ...args)
typename std::enable_if<!std::is_same<typename Request::ResponseType, xcb_void_cookie_t>::value, std::unique_ptr<typename Request::ResponseType, decltype(&free)>>::type
static query(xcb_connection_t * connection, Args&& ...args)
{
auto cookie = Request::RequestFunction(connection, std::forward<Args>(args)...);

View File

@@ -21,6 +21,14 @@ struct GetGeometry
static constexpr auto ReplyFunction = xcb_get_geometry_reply;
};
struct GetProperty
{
typedef xcb_get_property_reply_t ResponseType;
static constexpr auto RequestFunction = xcb_get_property;
static constexpr auto ReplyFunction = xcb_get_property_reply;
};
struct ShmQueryVersion
{
typedef xcb_shm_query_version_reply_t ResponseType;

View File

@@ -14,10 +14,15 @@
#include <memory>
// Constants
namespace {
const bool verbose = false;
} //End of constants
#define DOUBLE_TO_FIXED(d) ((xcb_render_fixed_t) ((d) * 65536))
XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation)
: Grabber("XCBGRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom)
XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom)
: Grabber("XCBGRABBER", cropLeft, cropRight, cropTop, cropBottom)
, _connection{}
, _screen{}
, _pixmap{}
@@ -27,7 +32,6 @@ XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom,
, _dstPicture{}
, _transform{}
, _shminfo{}
, _pixelDecimation(pixelDecimation)
, _screenWidth{}
, _screenHeight{}
, _src_x(cropLeft)
@@ -36,6 +40,7 @@ XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom,
, _XcbRandRAvailable{}
, _XcbShmAvailable{}
, _XcbShmPixmapAvailable{}
, _isWayland (false)
, _logger{}
, _shmData{}
, _XcbRandREventBase{-1}
@@ -181,54 +186,83 @@ void XcbGrabber::setupShm()
}
}
bool XcbGrabber::Setup()
bool XcbGrabber::open()
{
int screen_num;
_connection = xcb_connect(nullptr, &screen_num);
bool rc = false;
int ret = xcb_connection_has_error(_connection);
if (ret != 0)
if (getenv("WAYLAND_DISPLAY") != nullptr)
{
Error(_logger, "Cannot open display, error %d", ret);
return false;
_isWayland = true;
}
const xcb_setup_t * setup = xcb_get_setup(_connection);
_screen = getScreen(setup, screen_num);
if (!_screen)
else
{
Error(_log, "Unable to open display, screen %d does not exist", screen_num);
_connection = xcb_connect(nullptr, &_screen_num);
if (getenv("DISPLAY"))
Error(_log, "%s", getenv("DISPLAY"));
int ret = xcb_connection_has_error(_connection);
if (ret != 0)
{
Debug(_logger, "Cannot open display, error %d", ret);
}
else
Error(_log, "DISPLAY environment variable not set");
freeResources();
return false;
{
const xcb_setup_t * setup = xcb_get_setup(_connection);
_screen = getScreen(setup, _screen_num);
if ( _screen != nullptr)
{
rc = true;
}
}
}
setupRandr();
setupRender();
setupShm();
return rc;
}
Info(_log, QString("XcbRandR=[%1] XcbRender=[%2] XcbShm=[%3] XcbPixmap=[%4]")
.arg(_XcbRandRAvailable ? "available" : "unavailable")
.arg(_XcbRenderAvailable ? "available" : "unavailable")
.arg(_XcbShmAvailable ? "available" : "unavailable")
.arg(_XcbShmPixmapAvailable ? "available" : "unavailable")
.toStdString().c_str());
bool XcbGrabber::setupDisplay()
{
bool result = false;
bool result = (updateScreenDimensions(true) >= 0);
ErrorIf(!result, _log, "XCB Grabber start failed");
setEnabled(result);
if ( ! open() )
{
if ( _isWayland )
{
Error(_log, "Grabber does not work under Wayland!");
}
else
{
if (getenv("DISPLAY") != nullptr)
{
Error(_log, "Unable to open display [%s], screen %d does not exist", getenv("DISPLAY"), _screen_num);
}
else
{
Error(_log, "DISPLAY environment variable not set");
}
freeResources();
}
}
else
{
setupRandr();
setupRender();
setupShm();
Info(_log, QString("XcbRandR=[%1] XcbRender=[%2] XcbShm=[%3] XcbPixmap=[%4]")
.arg(_XcbRandRAvailable ? "available" : "unavailable")
.arg(_XcbRenderAvailable ? "available" : "unavailable")
.arg(_XcbShmAvailable ? "available" : "unavailable")
.arg(_XcbShmPixmapAvailable ? "available" : "unavailable")
.toStdString().c_str());
result = (updateScreenDimensions(true) >= 0);
ErrorIf(!result, _log, "XCB Grabber start failed");
setEnabled(result);
}
return result;
}
int XcbGrabber::grabFrame(Image<ColorRgb> & image, bool forceUpdate)
{
if (!_enabled)
if (!_isEnabled)
return 0;
if (forceUpdate)
@@ -316,7 +350,7 @@ int XcbGrabber::updateScreenDimensions(bool force)
return -1;
}
if (!_enabled)
if (!_isEnabled)
setEnabled(true);
if (!force && _screenWidth == unsigned(geometry->width) &&
@@ -391,19 +425,29 @@ int XcbGrabber::updateScreenDimensions(bool force)
void XcbGrabber::setVideoMode(VideoMode mode)
{
Grabber::setVideoMode(mode);
updateScreenDimensions(true);
}
void XcbGrabber::setPixelDecimation(int pixelDecimation)
{
if(_pixelDecimation != pixelDecimation)
if(_connection != nullptr)
{
_pixelDecimation = pixelDecimation;
updateScreenDimensions(true);
}
}
void XcbGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
bool XcbGrabber::setPixelDecimation(int pixelDecimation)
{
bool rc (true);
if (Grabber::setPixelDecimation(pixelDecimation))
{
if(_connection != nullptr)
{
if ( updateScreenDimensions(true) < 0 )
{
rc = false;
}
}
}
return rc;
}
void XcbGrabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom)
{
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
if(_connection != nullptr)
@@ -459,3 +503,96 @@ xcb_render_pictformat_t XcbGrabber::findFormatForVisual(xcb_visualid_t visual) c
}
return {};
}
QJsonObject XcbGrabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
if ( open() )
{
inputsDiscovered["device"] = "xcb";
inputsDiscovered["device_name"] = "XCB";
inputsDiscovered["type"] = "screen";
QJsonArray video_inputs;
if (_connection != nullptr && _screen != nullptr )
{
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
const xcb_setup_t * setup = xcb_get_setup(_connection);
xcb_screen_iterator_t it = xcb_setup_roots_iterator(setup);
xcb_screen_t * screen = nullptr;
int i = 0;
// Iterate through all X screens
for (; it.rem > 0; xcb_screen_next(&it))
{
screen = it.data;
auto geometry = query<GetGeometry>(_connection, screen->root);
if (geometry == nullptr)
{
Debug(_log, "Failed to obtain screen geometry for screen [%d]", i);
}
else
{
QJsonObject in;
QString displayName;
auto property = query<GetProperty>(_connection, 0, screen->root, XCB_ATOM_WM_NAME, XCB_ATOM_STRING, 0, 0);
if ( property != nullptr )
{
if ( xcb_get_property_value_length(property.get()) > 0 )
{
displayName = (char *) xcb_get_property_value(property.get());
}
}
if (displayName.isEmpty())
{
displayName = QString("Display:%1").arg(i);
}
in["name"] = displayName;
in["inputIdx"] = i;
QJsonArray formats;
QJsonArray resolutionArray;
QJsonObject format;
QJsonObject resolution;
resolution["width"] = geometry->width;
resolution["height"] = geometry->height;
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
++i;
}
if ( !video_inputs.isEmpty() )
{
inputsDiscovered["video_inputs"] = video_inputs;
QJsonObject defaults, video_inputs_default, resolution_default;
resolution_default["fps"] = _fps;
video_inputs_default["resolution"] = resolution_default;
video_inputs_default["inputIdx"] = 0;
defaults["video_input"] = video_inputs_default;
inputsDiscovered["default"] = defaults;
}
}
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -1,10 +1,14 @@
#include <grabber/XcbWrapper.h>
XcbWrapper::XcbWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, const unsigned updateRate_Hz)
: GrabberWrapper("Xcb", &_grabber, 0, 0, updateRate_Hz)
, _grabber(cropLeft, cropRight, cropTop, cropBottom, pixelDecimation)
XcbWrapper::XcbWrapper( int updateRate_Hz,
int pixelDecimation,
int cropLeft, int cropRight, int cropTop, int cropBottom)
: GrabberWrapper("Xcb", &_grabber, updateRate_Hz)
, _grabber(cropLeft, cropRight, cropTop, cropBottom)
, _init(false)
{}
{
_grabber.setPixelDecimation(pixelDecimation);
}
XcbWrapper::~XcbWrapper()
{
@@ -19,7 +23,7 @@ void XcbWrapper::action()
if (! _init )
{
_init = true;
if ( ! _grabber.Setup() )
if ( ! _grabber.setupDisplay() )
{
stop();
}

View File

@@ -22,12 +22,15 @@ target_link_libraries(hyperion
flatbufserver
flatbuffers
leddevice
boblightserver
effectengine
database
${QT_LIBRARIES}
)
if(ENABLE_BOBLIGHT)
target_link_libraries(hyperion boblightserver)
endif()
if (ENABLE_AVAHI)
target_link_libraries(hyperion bonjour)
endif ()

View File

@@ -47,6 +47,7 @@ void CaptureCont::handleV4lImage(const QString& name, const Image<ColorRgb> & im
{
_hyperion->registerInput(_v4lCaptPrio, hyperion::COMP_V4L, "System", name);
_v4lCaptName = name;
emit GlobalSignals::getInstance()->requestSource(hyperion::COMP_V4L, int(_hyperion->getInstanceIndex()), _v4lCaptEnabled);
}
_v4lInactiveTimer->start();
_hyperion->setInputImage(_v4lCaptPrio, image);
@@ -58,6 +59,7 @@ void CaptureCont::handleSystemImage(const QString& name, const Image<ColorRgb>&
{
_hyperion->registerInput(_systemCaptPrio, hyperion::COMP_GRABBER, "System", name);
_systemCaptName = name;
emit GlobalSignals::getInstance()->requestSource(hyperion::COMP_GRABBER, int(_hyperion->getInstanceIndex()), _systemCaptEnabled);
}
_systemInactiveTimer->start();
_hyperion->setInputImage(_systemCaptPrio, image);
@@ -75,7 +77,7 @@ void CaptureCont::setSystemCaptureEnable(bool enable)
}
else
{
disconnect(GlobalSignals::getInstance(), &GlobalSignals::setSystemImage, 0, 0);
disconnect(GlobalSignals::getInstance(), &GlobalSignals::setSystemImage, this, 0);
_hyperion->clear(_systemCaptPrio);
_systemInactiveTimer->stop();
_systemCaptName = "";
@@ -98,7 +100,7 @@ void CaptureCont::setV4LCaptureEnable(bool enable)
}
else
{
disconnect(GlobalSignals::getInstance(), &GlobalSignals::setV4lImage, 0, 0);
disconnect(GlobalSignals::getInstance(), &GlobalSignals::setV4lImage, this, 0);
_hyperion->clear(_v4lCaptPrio);
_v4lInactiveTimer->stop();
_v4lCaptName = "";
@@ -125,8 +127,8 @@ void CaptureCont::handleSettingsUpdate(settings::type type, const QJsonDocument&
_systemCaptPrio = obj["systemPriority"].toInt(250);
}
setV4LCaptureEnable(obj["v4lEnable"].toBool(true));
setSystemCaptureEnable(obj["systemEnable"].toBool(true));
setV4LCaptureEnable(obj["v4lEnable"].toBool(false));
setSystemCaptureEnable(obj["systemEnable"].toBool(false));
}
}

View File

@@ -11,7 +11,12 @@ ComponentRegister::ComponentRegister(Hyperion* hyperion)
{
// init all comps to false
QVector<hyperion::Components> vect;
vect << COMP_ALL << COMP_SMOOTHING << COMP_BLACKBORDER << COMP_FORWARDER << COMP_BOBLIGHTSERVER << COMP_GRABBER << COMP_V4L << COMP_LEDDEVICE;
vect << COMP_ALL << COMP_SMOOTHING << COMP_BLACKBORDER << COMP_FORWARDER << COMP_GRABBER << COMP_V4L << COMP_LEDDEVICE;
#if defined(ENABLE_BOBLIGHT)
vect << COMP_BOBLIGHTSERVER;
#endif
for(auto e : vect)
{
_componentStates.emplace(e, (e == COMP_ALL));

View File

@@ -1,33 +1,46 @@
#include <hyperion/Grabber.h>
#include <hyperion/GrabberWrapper.h>
Grabber::Grabber(const QString& grabberName, int width, int height, int cropLeft, int cropRight, int cropTop, int cropBottom)
: _imageResampler()
Grabber::Grabber(const QString& grabberName, int cropLeft, int cropRight, int cropTop, int cropBottom)
: _grabberName(grabberName)
, _log(Logger::getInstance(_grabberName.toUpper()))
, _useImageResampler(true)
, _videoMode(VideoMode::VIDEO_2D)
, _width(width)
, _height(height)
, _fps(15)
, _videoStandard(VideoStandard::NO_CHANGE)
, _pixelDecimation(GrabberWrapper::DEFAULT_PIXELDECIMATION)
, _flipMode(FlipMode::NO_CHANGE)
, _width(0)
, _height(0)
, _fps(GrabberWrapper::DEFAULT_RATE_HZ)
, _fpsSoftwareDecimation(0)
, _input(-1)
, _cropLeft(0)
, _cropRight(0)
, _cropTop(0)
, _cropBottom(0)
, _enabled(true)
, _log(Logger::getInstance(grabberName.toUpper()))
, _isEnabled(true)
, _isDeviceInError(false)
{
Grabber::setVideoMode(VideoMode::VIDEO_2D);
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
}
void Grabber::setEnabled(bool enable)
{
Info(_log,"Capture interface is now %s", enable ? "enabled" : "disabled");
_enabled = enable;
_isEnabled = enable;
}
void Grabber::setInError(const QString& errorMsg)
{
_isDeviceInError = true;
_isEnabled = false;
Error(_log, "Grabber disabled, device '%s' signals error: '%s'", QSTRING_CSTR(_grabberName), QSTRING_CSTR(errorMsg));
}
void Grabber::setVideoMode(VideoMode mode)
{
Debug(_log,"Set videomode to %d", mode);
Info(_log,"Set videomode to %s", QSTRING_CSTR(videoMode2String(mode)));
_videoMode = mode;
if ( _useImageResampler )
{
@@ -35,11 +48,46 @@ void Grabber::setVideoMode(VideoMode mode)
}
}
void Grabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
void Grabber::setVideoStandard(VideoStandard videoStandard)
{
if (_videoStandard != videoStandard) {
_videoStandard = videoStandard;
}
}
bool Grabber::setPixelDecimation(int pixelDecimation)
{
if (_pixelDecimation != pixelDecimation)
{
Info(_log,"Set image size decimation to %d", pixelDecimation);
_pixelDecimation = pixelDecimation;
if ( _useImageResampler )
{
_imageResampler.setHorizontalPixelDecimation(pixelDecimation);
_imageResampler.setVerticalPixelDecimation(pixelDecimation);
}
return true;
}
return false;
}
void Grabber::setFlipMode(FlipMode mode)
{
Info(_log,"Set flipmode to %s", QSTRING_CSTR(flipModeToString(mode)));
_flipMode = mode;
if ( _useImageResampler )
{
_imageResampler.setFlipMode(_flipMode);
}
}
void Grabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom)
{
if (_width>0 && _height>0)
{
if (cropLeft + cropRight >= (unsigned)_width || cropTop + cropBottom >= (unsigned)_height)
if (cropLeft + cropRight >= _width || cropTop + cropBottom >= _height)
{
Error(_log, "Rejecting invalid crop values: left: %d, right: %d, top: %d, bottom: %d, higher than height/width %d/%d", cropLeft, cropRight, cropTop, cropBottom, _height, _width);
return;
@@ -79,29 +127,45 @@ bool Grabber::setInput(int input)
bool Grabber::setWidthHeight(int width, int height)
{
bool rc (false);
// eval changes with crop
if ( (width>0 && height>0) && (_width != width || _height != height) )
{
if (_cropLeft + _cropRight >= width || _cropTop + _cropBottom >= height)
{
Error(_log, "Rejecting invalid width/height values as it collides with image cropping: width: %d, height: %d", width, height);
return false;
rc = false;
}
else
{
Debug(_log, "Set new width: %d, height: %d for capture", width, height);
_width = width;
_height = height;
rc = true;
}
Debug(_log, "Set new width: %d, height: %d for capture", width, height);
_width = width;
_height = height;
return true;
}
return false;
return rc;
}
bool Grabber::setFramerate(int fps)
{
if((fps > 0) && (_fps != fps))
{
Info(_log,"Set new frames per second to: %i fps", fps);
_fps = fps;
return true;
}
return false;
}
void Grabber::setFpsSoftwareDecimation(int decimation)
{
if((_fpsSoftwareDecimation != decimation))
{
_fpsSoftwareDecimation = decimation;
if(decimation > 0){
Debug(_log,"Skip %i frame per second", decimation);
}
}
}

View File

@@ -10,22 +10,31 @@
#include <QTimer>
GrabberWrapper* GrabberWrapper::instance = nullptr;
const int GrabberWrapper::DEFAULT_RATE_HZ = 25;
const int GrabberWrapper::DEFAULT_MIN_GRAB_RATE_HZ = 1;
const int GrabberWrapper::DEFAULT_MAX_GRAB_RATE_HZ = 30;
const int GrabberWrapper::DEFAULT_PIXELDECIMATION = 8;
GrabberWrapper::GrabberWrapper(const QString& grabberName, Grabber * ggrabber, unsigned width, unsigned height, unsigned updateRate_Hz)
/// Map of Hyperion instances with grabber name that requested screen capture
QMap<int, QString> GrabberWrapper::GRABBER_SYS_CLIENTS = QMap<int, QString>();
QMap<int, QString> GrabberWrapper::GRABBER_V4L_CLIENTS = QMap<int, QString>();
bool GrabberWrapper::GLOBAL_GRABBER_SYS_ENABLE = false;
bool GrabberWrapper::GLOBAL_GRABBER_V4L_ENABLE = false;
GrabberWrapper::GrabberWrapper(const QString& grabberName, Grabber * ggrabber, int updateRate_Hz)
: _grabberName(grabberName)
, _timer(new QTimer(this))
, _updateInterval_ms(1000/updateRate_Hz)
, _log(Logger::getInstance(grabberName))
, _ggrabber(ggrabber)
, _image(0,0)
, _log(Logger::getInstance(grabberName.toUpper()))
, _timer(new QTimer(this))
, _updateInterval_ms(1000/updateRate_Hz)
, _ggrabber(ggrabber)
, _image(0,0)
{
GrabberWrapper::instance = this;
// Configure the timer to generate events every n milliseconds
_timer->setTimerType(Qt::PreciseTimer);
_timer->setInterval(_updateInterval_ms);
_image.resize(width, height);
connect(_timer, &QTimer::timeout, this, &GrabberWrapper::action);
// connect the image forwarding
@@ -44,17 +53,26 @@ GrabberWrapper::~GrabberWrapper()
bool GrabberWrapper::start()
{
// Start the timer with the pre configured interval
Debug(_log,"Grabber start()");
_timer->start();
return _timer->isActive();
bool rc = false;
if ( open() )
{
if (!_timer->isActive())
{
// Start the timer with the pre configured interval
Debug(_log,"Grabber start()");
_timer->start();
}
rc = _timer->isActive();
}
return rc;
}
void GrabberWrapper::stop()
{
if (_timer->isActive())
{
// Stop the timer, effectivly stopping the process
// Stop the timer, effectively stopping the process
Debug(_log,"Grabber stop()");
_timer->stop();
}
@@ -65,50 +83,58 @@ bool GrabberWrapper::isActive() const
return _timer->isActive();
}
QString GrabberWrapper::getActive() const
QStringList GrabberWrapper::getActive(int inst) const
{
return _grabberName;
QStringList result = QStringList();
if(GRABBER_V4L_CLIENTS.contains(inst))
result << GRABBER_V4L_CLIENTS.value(inst);
if(GRABBER_SYS_CLIENTS.contains(inst))
result << GRABBER_SYS_CLIENTS.value(inst);
return result;
}
QStringList GrabberWrapper::availableGrabbers()
{
QStringList grabbers;
#ifdef ENABLE_DISPMANX
#ifdef ENABLE_DISPMANX
grabbers << "dispmanx";
#endif
#endif
#ifdef ENABLE_V4L2
#if defined(ENABLE_V4L2) || defined(ENABLE_MF)
grabbers << "v4l2";
#endif
#endif
#ifdef ENABLE_FB
#ifdef ENABLE_FB
grabbers << "framebuffer";
#endif
#endif
#ifdef ENABLE_AMLOGIC
#ifdef ENABLE_AMLOGIC
grabbers << "amlogic";
#endif
#endif
#ifdef ENABLE_OSX
#ifdef ENABLE_OSX
grabbers << "osx";
#endif
#endif
#ifdef ENABLE_X11
#ifdef ENABLE_X11
grabbers << "x11";
#endif
#endif
#ifdef ENABLE_XCB
#ifdef ENABLE_XCB
grabbers << "xcb";
#endif
#endif
#ifdef ENABLE_QT
#ifdef ENABLE_QT
grabbers << "qt";
#endif
#endif
#ifdef ENABLE_DX
grabbers << "dx";
#endif
#ifdef ENABLE_DX
grabbers << "dx";
#endif
return grabbers;
}
@@ -117,12 +143,17 @@ void GrabberWrapper::setVideoMode(VideoMode mode)
{
if (_ggrabber != nullptr)
{
Info(_log,"setvideomode");
Info(_log,"setVideoMode");
_ggrabber->setVideoMode(mode);
}
}
void GrabberWrapper::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
void GrabberWrapper::setFlipMode(const QString& flipMode)
{
_ggrabber->setFlipMode(parseFlipMode(flipMode));
}
void GrabberWrapper::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom)
{
_ggrabber->setCropping(cropLeft, cropRight, cropTop, cropBottom);
}
@@ -143,33 +174,49 @@ void GrabberWrapper::updateTimer(int interval)
}
void GrabberWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
{
if(type == settings::SYSTEMCAPTURE && !_grabberName.startsWith("V4L"))
{ if(type == settings::SYSTEMCAPTURE && !_grabberName.startsWith("V4L"))
{
// extract settings
const QJsonObject& obj = config.object();
// save current state
bool isEnabled = getSysGrabberState();
// width/height
_ggrabber->setWidthHeight(obj["width"].toInt(96), obj["height"].toInt(96));
// set global grabber state
setSysGrabberState(obj["enable"].toBool(false));
// display index for MAC
_ggrabber->setDisplayIndex(obj["display"].toInt(0));
if (getSysGrabberState())
{
// width/height
_ggrabber->setWidthHeight(obj["width"].toInt(96), obj["height"].toInt(96));
// device path for Framebuffer
_ggrabber->setDevicePath(obj["device"].toString("/dev/fb0"));
// display index for MAC
_ggrabber->setDisplayIndex(obj["input"].toInt(0));
// pixel decimation for x11
_ggrabber->setPixelDecimation(obj["pixelDecimation"].toInt(8));
// pixel decimation for x11
_ggrabber->setPixelDecimation(obj["pixelDecimation"].toInt(DEFAULT_PIXELDECIMATION));
// crop for system capture
_ggrabber->setCropping(
obj["cropLeft"].toInt(0),
obj["cropRight"].toInt(0),
obj["cropTop"].toInt(0),
obj["cropBottom"].toInt(0));
// crop for system capture
_ggrabber->setCropping(
obj["cropLeft"].toInt(0),
obj["cropRight"].toInt(0),
obj["cropTop"].toInt(0),
obj["cropBottom"].toInt(0));
// eval new update time
updateTimer(1000/obj["frequency_Hz"].toInt(10));
_ggrabber->setFramerate(obj["fps"].toInt(DEFAULT_RATE_HZ));
// eval new update time
updateTimer(_ggrabber->getUpdateInterval());
// start if current state is not true
if (!isEnabled)
{
start();
}
}
else
{
stop();
}
}
}
@@ -177,24 +224,24 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp
{
if(component == hyperion::Components::COMP_GRABBER && !_grabberName.startsWith("V4L"))
{
if(listen && !GRABBER_SYS_CLIENTS.contains(hyperionInd))
GRABBER_SYS_CLIENTS.append(hyperionInd);
else if (!listen)
GRABBER_SYS_CLIENTS.removeOne(hyperionInd);
if(listen)
GRABBER_SYS_CLIENTS.insert(hyperionInd, _grabberName);
else
GRABBER_SYS_CLIENTS.remove(hyperionInd);
if(GRABBER_SYS_CLIENTS.empty())
if(GRABBER_SYS_CLIENTS.empty() || !getSysGrabberState())
stop();
else
start();
}
else if(component == hyperion::Components::COMP_V4L && _grabberName.startsWith("V4L"))
{
if(listen && !GRABBER_V4L_CLIENTS.contains(hyperionInd))
GRABBER_V4L_CLIENTS.append(hyperionInd);
else if (!listen)
GRABBER_V4L_CLIENTS.removeOne(hyperionInd);
if(listen)
GRABBER_V4L_CLIENTS.insert(hyperionInd, _grabberName);
else
GRABBER_V4L_CLIENTS.remove(hyperionInd);
if(GRABBER_V4L_CLIENTS.empty())
if(GRABBER_V4L_CLIENTS.empty() || !getV4lGrabberState())
stop();
else
start();
@@ -204,48 +251,6 @@ void GrabberWrapper::handleSourceRequest(hyperion::Components component, int hyp
void GrabberWrapper::tryStart()
{
// verify start condition
if((_grabberName.startsWith("V4L") && !GRABBER_V4L_CLIENTS.empty()) || (!_grabberName.startsWith("V4L") && !GRABBER_SYS_CLIENTS.empty()))
{
if(!_grabberName.startsWith("V4L") && !GRABBER_SYS_CLIENTS.empty() && getSysGrabberState())
start();
}
}
QStringList GrabberWrapper::getV4L2devices() const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getV4L2devices();
return QStringList();
}
QString GrabberWrapper::getV4L2deviceName(const QString& devicePath) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getV4L2deviceName(devicePath);
return QString();
}
QMultiMap<QString, int> GrabberWrapper::getV4L2deviceInputs(const QString& devicePath) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getV4L2deviceInputs(devicePath);
return QMultiMap<QString, int>();
}
QStringList GrabberWrapper::getResolutions(const QString& devicePath) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getResolutions(devicePath);
return QStringList();
}
QStringList GrabberWrapper::getFramerates(const QString& devicePath) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getFramerates(devicePath);
return QStringList();
}

View File

@@ -37,7 +37,9 @@
#include <hyperion/CaptureCont.h>
// Boblight
#if defined(ENABLE_BOBLIGHT)
#include <boblightserver/BoblightServer.h>
#endif
Hyperion::Hyperion(quint8 instance, bool readonlyMode)
: QObject()
@@ -56,9 +58,11 @@ Hyperion::Hyperion(quint8 instance, bool readonlyMode)
, _hwLedCount()
, _ledGridSize(hyperion::getLedLayoutGridSize(getSetting(settings::LEDS).array()))
, _BGEffectHandler(nullptr)
,_captureCont(nullptr)
, _captureCont(nullptr)
, _ledBuffer(_ledString.leds().size(), ColorRgb::BLACK)
#if defined(ENABLE_BOBLIGHT)
, _boblightServer(nullptr)
#endif
, _readOnlyMode(readonlyMode)
{
@@ -83,7 +87,7 @@ void Hyperion::start()
}
// handle hwLedCount
_hwLedCount = qMax(getSetting(settings::DEVICE).object()["hardwareLedCount"].toInt(getLedCount()), getLedCount());
_hwLedCount = getSetting(settings::DEVICE).object()["hardwareLedCount"].toInt(getLedCount());
// Initialize colororder vector
for (const Led& led : _ledString.leds())
@@ -93,7 +97,7 @@ void Hyperion::start()
// connect Hyperion::update with Muxer visible priority changes as muxer updates independent
connect(&_muxer, &PriorityMuxer::visiblePriorityChanged, this, &Hyperion::update);
connect(&_muxer, &PriorityMuxer::visiblePriorityChanged, this, &Hyperion::handlePriorityChangedLedDevice);
connect(&_muxer, &PriorityMuxer::visiblePriorityChanged, this, &Hyperion::handleSourceAvailability);
connect(&_muxer, &PriorityMuxer::visibleComponentChanged, this, &Hyperion::handleVisibleComponentChanged);
// listens for ComponentRegister changes of COMP_ALL to perform core enable/disable actions
@@ -148,9 +152,11 @@ void Hyperion::start()
// if there is no startup / background effect and no sending capture interface we probably want to push once BLACK (as PrioMuxer won't emit a priority change)
update();
#if defined(ENABLE_BOBLIGHT)
// boblight, can't live in global scope as it depends on layout
_boblightServer = new BoblightServer(this, getSetting(settings::BOBLSERVER));
connect(this, &Hyperion::settingsChanged, _boblightServer, &BoblightServer::handleSettingsUpdate);
#endif
// instance initiated, enter thread event loop
emit started();
@@ -168,7 +174,9 @@ void Hyperion::freeObjects()
clear(-1,true);
// delete components on exit of hyperion core
#if defined(ENABLE_BOBLIGHT)
delete _boblightServer;
#endif
delete _captureCont;
delete _effectEngine;
delete _raw2ledAdjustment;
@@ -217,7 +225,7 @@ void Hyperion::handleSettingsUpdate(settings::type type, const QJsonDocument& co
}
// handle hwLedCount update
_hwLedCount = qMax(getSetting(settings::DEVICE).object()["hardwareLedCount"].toInt(getLedCount()), getLedCount());
_hwLedCount = getSetting(settings::DEVICE).object()["hardwareLedCount"].toInt(getLedCount());
// change in leds are also reflected in adjustment
delete _raw2ledAdjustment;
@@ -231,7 +239,7 @@ void Hyperion::handleSettingsUpdate(settings::type type, const QJsonDocument& co
QJsonObject dev = config.object();
// handle hwLedCount update
_hwLedCount = qMax(dev["hardwareLedCount"].toInt(getLedCount()), getLedCount());
_hwLedCount = dev["hardwareLedCount"].toInt(getLedCount());
// force ledString update, if device ByteOrder changed
if(_ledDeviceWrapper->getColorOrder() != dev["colorOrder"].toString("rgb"))
@@ -552,22 +560,23 @@ void Hyperion::handleVisibleComponentChanged(hyperion::Components comp)
_raw2ledAdjustment->setBacklightEnabled((comp != hyperion::COMP_COLOR && comp != hyperion::COMP_EFFECT));
}
void Hyperion::handlePriorityChangedLedDevice(const quint8& priority)
{
int previousPriority = _muxer.getPreviousPriority();
void Hyperion::handleSourceAvailability(const quint8& priority)
{ int previousPriority = _muxer.getPreviousPriority();
Debug(_log,"priority[%d], previousPriority[%d]", priority, previousPriority);
if ( priority == PriorityMuxer::LOWEST_PRIORITY)
{
Debug(_log,"No source left -> switch LED-Device off");
Debug(_log,"No source left -> Pause output processing and switch LED-Device off");
emit _ledDeviceWrapper->switchOff();
emit _deviceSmooth->setPause(true);
}
else
{
if ( previousPriority == PriorityMuxer::LOWEST_PRIORITY )
{
Debug(_log,"new source available -> switch LED-Device on");
Debug(_log,"new source available -> Resume output processing and switch LED-Device on");
emit _ledDeviceWrapper->switchOn();
emit _deviceSmooth->setPause(false);
}
}
}
@@ -580,7 +589,7 @@ void Hyperion::update()
// copy image & process OR copy ledColors from muxer
Image<ColorRgb> image = priorityInfo.image;
if(image.size() > 3)
if (image.width() > 1 || image.height() > 1)
{
emit currentImage(image);
_ledBuffer = _imageProcessor->process(image);

View File

@@ -88,7 +88,6 @@ bool HyperionIManager::startInstance(quint8 inst, bool block, QObject* caller, i
// from Hyperion
connect(hyperion, &Hyperion::settingsChanged, this, &HyperionIManager::settingsChanged);
connect(hyperion, &Hyperion::videoMode, this, &HyperionIManager::requestVideoMode);
connect(hyperion, &Hyperion::compStateChangeRequest, this, &HyperionIManager::compStateChangeRequest);
// to Hyperion
connect(this, &HyperionIManager::newVideoMode, hyperion, &Hyperion::newVideoMode);

View File

@@ -28,7 +28,7 @@ QString ImageProcessor::mappingTypeToStr(int mappingType)
ImageProcessor::ImageProcessor(const LedString& ledString, Hyperion* hyperion)
: QObject(hyperion)
, _log(Logger::getInstance("BLACKBORDER"))
, _log(Logger::getInstance("IMAGETOLED"))
, _ledString(ledString)
, _borderProcessor(new BlackBorderProcessor(hyperion, this))
, _imageToLeds(nullptr)

View File

@@ -59,7 +59,7 @@ ImageToLedsMap::ImageToLedsMap(
const auto maxYLedCount = qMin(maxY_idx, yOffset+actualHeight);
const auto maxXLedCount = qMin(maxX_idx, xOffset+actualWidth);
std::vector<unsigned> ledColors;
std::vector<int32_t> ledColors;
ledColors.reserve((size_t) maxXLedCount*maxYLedCount);
for (unsigned y = minY_idx; y < maxYLedCount; ++y)

View File

@@ -50,19 +50,17 @@ const unsigned DEFAUL_OUTPUTDEPLAY = 0; // outputdelay in ms
LinearColorSmoothing::LinearColorSmoothing(const QJsonDocument &config, Hyperion *hyperion)
: QObject(hyperion)
, _log(Logger::getInstance("SMOOTHING"))
, _hyperion(hyperion)
, _updateInterval(DEFAUL_UPDATEINTERVALL.count())
, _settlingTime(DEFAUL_SETTLINGTIME)
, _timer(new QTimer(this))
, _outputDelay(DEFAUL_OUTPUTDEPLAY)
, _smoothingType(SmoothingType::Linear)
, _writeToLedsEnable(false)
, _continuousOutput(false)
, _pause(false)
, _currentConfigId(0)
, _enabled(false)
, tempValues(std::vector<uint64_t>(0, 0L))
, _log(Logger::getInstance("SMOOTHING"))
, _hyperion(hyperion)
, _updateInterval(DEFAUL_UPDATEINTERVALL.count())
, _settlingTime(DEFAUL_SETTLINGTIME)
, _timer(new QTimer(this))
, _outputDelay(DEFAUL_OUTPUTDEPLAY)
, _smoothingType(SmoothingType::Linear)
, _pause(false)
, _currentConfigId(0)
, _enabled(false)
, tempValues(std::vector<uint64_t>(0, 0L))
{
// init cfg 0 (default)
addConfig(DEFAUL_SETTLINGTIME, DEFAUL_UPDATEFREQUENCY, DEFAUL_OUTPUTDEPLAY);
@@ -94,8 +92,6 @@ void LinearColorSmoothing::handleSettingsUpdate(settings::type type, const QJson
setEnable(obj["enable"].toBool(true));
}
_continuousOutput = obj["continuousOutput"].toBool(true);
SMOOTHING_CFG cfg = {SmoothingType::Linear,true, 0, 0, 0, 0, 0, false, 1};
const QString typeString = obj[SETTINGS_KEY_SMOOTHING_TYPE].toString();
@@ -192,7 +188,6 @@ void LinearColorSmoothing::writeDirect()
_previousWriteTime = now;
queueColors(_previousValues);
_writeToLedsEnable = _continuousOutput;
}
@@ -201,7 +196,6 @@ void LinearColorSmoothing::writeFrame()
const int64_t now = micros();
_previousWriteTime = now;
queueColors(_previousValues);
_writeToLedsEnable = _continuousOutput;
}
@@ -402,12 +396,12 @@ void LinearColorSmoothing::performDecay(const int64_t now) {
if ((now > (_renderedStatTime + 30 * 1000000)) && (_renderedCounter > _renderedStatCounter))
{
Debug(_log, "decay - rendered frames [%d] (%f/s), interpolated frames [%d] (%f/s) in [%f ms]"
, _renderedCounter - _renderedStatCounter
, (1.0F * (_renderedCounter - _renderedStatCounter) / ((now - _renderedStatTime) / 1000000.0F))
, _interpolationCounter - _interpolationStatCounter
, (1.0F * (_interpolationCounter - _interpolationStatCounter) / ((now - _renderedStatTime) / 1000000.0F))
, (now - _renderedStatTime) / 1000.0F
);
, _renderedCounter - _renderedStatCounter
, (1.0F * (_renderedCounter - _renderedStatCounter) / ((now - _renderedStatTime) / 1000000.0F))
, _interpolationCounter - _interpolationStatCounter
, (1.0F * (_interpolationCounter - _interpolationStatCounter) / ((now - _renderedStatTime) / 1000000.0F))
, (now - _renderedStatTime) / 1000.0F
);
_renderedStatTime = now;
_renderedStatCounter = _renderedCounter;
_interpolationStatCounter = _interpolationCounter;
@@ -505,30 +499,23 @@ void LinearColorSmoothing::clearRememberedFrames()
void LinearColorSmoothing::queueColors(const std::vector<ColorRgb> &ledColors)
{
//Debug(_log, "queueColors - _outputDelay[%d] _outputQueue.size() [%d], _writeToLedsEnable[%d]", _outputDelay, _outputQueue.size(), _writeToLedsEnable);
if (_outputDelay == 0)
{
// No output delay => immediate write
if (_writeToLedsEnable && !_pause)
if (!_pause)
{
// if ( ledColors.size() == 0 )
// qFatal ("No LedValues! - in LinearColorSmoothing::queueColors() - _outputDelay == 0");
// else
emit _hyperion->ledDeviceData(ledColors);
}
}
else
{
// Push new colors in the delay-buffer
if (_writeToLedsEnable)
{
_outputQueue.push_back(ledColors);
}
_outputQueue.push_back(ledColors);
// If the delay-buffer is filled pop the front and write to device
if (!_outputQueue.empty())
{
if (_outputQueue.size() > _outputDelay || !_writeToLedsEnable)
if (_outputQueue.size() > _outputDelay)
{
if (!_pause)
{
@@ -552,7 +539,6 @@ void LinearColorSmoothing::clearQueuedColors()
void LinearColorSmoothing::componentStateChange(hyperion::Components component, bool state)
{
_writeToLedsEnable = state;
if (component == hyperion::COMP_LEDDEVICE)
{
clearQueuedColors();
@@ -682,7 +668,7 @@ bool LinearColorSmoothing::selectConfig(unsigned cfg, bool force)
QMetaObject::invokeMethod(_timer, "stop", Qt::QueuedConnection);
_updateInterval = _cfgList[cfg].updateInterval;
if (this->enabled() && this->_writeToLedsEnable)
if (this->enabled())
{
//Debug( _log, "_cfgList[cfg].updateInterval != _updateInterval - Restart timer - _updateInterval [%d]", _updateInterval);
QMetaObject::invokeMethod(_timer, "start", Qt::QueuedConnection, Q_ARG(int, _updateInterval));

View File

@@ -222,12 +222,6 @@ private:
/// The queue of temporarily remembered frames
std::deque<REMEMBERED_FRAME> _frameQueue;
/// Prevent sending data to device when no intput data is sent
bool _writeToLedsEnable;
/// Flag for dis/enable continuous output to led device regardless there is new data or not
bool _continuousOutput;
/// Flag for pausing
bool _pause;

View File

@@ -5,7 +5,6 @@
// qt incl
#include <QDateTime>
#include <QTimer>
#include <QDebug>
// Hyperion includes
#include <hyperion/PriorityMuxer.h>
@@ -13,14 +12,19 @@
// utils
#include <utils/Logger.h>
const int PriorityMuxer::FG_PRIORITY = 1;
const int PriorityMuxer::BG_PRIORITY = 254;
const int PriorityMuxer::MANUAL_SELECTED_PRIORITY = 256;
const int PriorityMuxer::LOWEST_PRIORITY = std::numeric_limits<uint8_t>::max();
const int PriorityMuxer::TIMEOUT_NOT_ACTIVE_PRIO = -100;
PriorityMuxer::PriorityMuxer(int ledCount, QObject * parent)
: QObject(parent)
, _log(Logger::getInstance("HYPERION"))
, _currentPriority(PriorityMuxer::LOWEST_PRIORITY)
, _previousPriority(_currentPriority)
, _manualSelectedPriority(256)
, _manualSelectedPriority(MANUAL_SELECTED_PRIORITY)
, _prevVisComp (hyperion::Components::COMP_COLOR)
, _activeInputs()
, _lowestPriorityInfo()
, _sourceAutoSelectEnabled(true)
@@ -100,7 +104,7 @@ void PriorityMuxer::updateLedColorsLength(int ledCount)
{
for (auto infoIt = _activeInputs.begin(); infoIt != _activeInputs.end();)
{
if (infoIt->ledColors.size() >= 1)
if (!infoIt->ledColors.empty())
{
infoIt->ledColors.resize(ledCount, infoIt->ledColors.at(0));
}
@@ -150,7 +154,7 @@ void PriorityMuxer::registerInput(int priority, hyperion::Components component,
InputInfo& input = _activeInputs[priority];
input.priority = priority;
input.timeoutTime_ms = newInput ? -100 : input.timeoutTime_ms;
input.timeoutTime_ms = newInput ? TIMEOUT_NOT_ACTIVE_PRIO : input.timeoutTime_ms;
input.componentId = component;
input.origin = origin;
input.smooth_cfg = smooth_cfg;
@@ -161,7 +165,9 @@ void PriorityMuxer::registerInput(int priority, hyperion::Components component,
Debug(_log,"Register new input '%s/%s' with priority %d as inactive", QSTRING_CSTR(origin), hyperion::componentToIdString(component), priority);
// emit 'prioritiesChanged' only if _sourceAutoSelectEnabled is false
if (!_sourceAutoSelectEnabled)
{
emit prioritiesChanged();
}
return;
}
@@ -179,19 +185,26 @@ bool PriorityMuxer::setInput(int priority, const std::vector<ColorRgb>& ledColor
return false;
}
// calc final timeout
if(timeout_ms > 0)
timeout_ms = QDateTime::currentMSecsSinceEpoch() + timeout_ms;
InputInfo& input = _activeInputs[priority];
InputInfo& input = _activeInputs[priority];
// detect active <-> inactive changes
bool activeChange = false;
bool active = true;
if(input.timeoutTime_ms == -100 && timeout_ms != -100)
// calculate final timeout
if (timeout_ms >= 0)
{
timeout_ms = QDateTime::currentMSecsSinceEpoch() + timeout_ms;
}
else if (input.timeoutTime_ms >= 0)
{
timeout_ms = QDateTime::currentMSecsSinceEpoch();
}
if(input.timeoutTime_ms == TIMEOUT_NOT_ACTIVE_PRIO && timeout_ms != TIMEOUT_NOT_ACTIVE_PRIO)
{
activeChange = true;
}
else if(timeout_ms == -100 && input.timeoutTime_ms != -100)
else if(timeout_ms == TIMEOUT_NOT_ACTIVE_PRIO && input.timeoutTime_ms != TIMEOUT_NOT_ACTIVE_PRIO)
{
active = false;
activeChange = true;
@@ -223,19 +236,26 @@ bool PriorityMuxer::setInputImage(int priority, const Image<ColorRgb>& image, in
return false;
}
// calculate final timeout
if(timeout_ms > 0)
timeout_ms = QDateTime::currentMSecsSinceEpoch() + timeout_ms;
InputInfo& input = _activeInputs[priority];
InputInfo& input = _activeInputs[priority];
// detect active <-> inactive changes
bool activeChange = false;
bool active = true;
if(input.timeoutTime_ms == -100 && timeout_ms != -100)
// calculate final timeout
if (timeout_ms >= 0)
{
timeout_ms = QDateTime::currentMSecsSinceEpoch() + timeout_ms;
}
else if (input.timeoutTime_ms >= 0)
{
timeout_ms = QDateTime::currentMSecsSinceEpoch();
}
if(input.timeoutTime_ms == TIMEOUT_NOT_ACTIVE_PRIO && timeout_ms != TIMEOUT_NOT_ACTIVE_PRIO)
{
activeChange = true;
}
else if(timeout_ms == -100 && input.timeoutTime_ms != -100)
else if(timeout_ms == TIMEOUT_NOT_ACTIVE_PRIO && input.timeoutTime_ms != TIMEOUT_NOT_ACTIVE_PRIO)
{
active = false;
activeChange = true;
@@ -250,7 +270,9 @@ bool PriorityMuxer::setInputImage(int priority, const Image<ColorRgb>& image, in
{
Debug(_log, "Priority %d is now %s", priority, active ? "active" : "inactive");
if (_currentPriority < priority)
{
emit prioritiesChanged();
}
setCurrentTime();
}
@@ -260,18 +282,18 @@ bool PriorityMuxer::setInputImage(int priority, const Image<ColorRgb>& image, in
bool PriorityMuxer::setInputInactive(int priority)
{
Image<ColorRgb> image;
return setInputImage(priority, image, -100);
return setInputImage(priority, image, TIMEOUT_NOT_ACTIVE_PRIO);
}
bool PriorityMuxer::clearInput(int priority)
{
if (priority < PriorityMuxer::LOWEST_PRIORITY && _activeInputs.remove(priority))
if (priority < PriorityMuxer::LOWEST_PRIORITY && (_activeInputs.remove(priority) > 0))
{
Debug(_log,"Removed source priority %d",priority);
// on clear success update _currentPriority
setCurrentTime();
// emit 'prioritiesChanged' only if _sourceAutoSelectEnabled is false
if (!_sourceAutoSelectEnabled || _currentPriority < priority)
if ((!_sourceAutoSelectEnabled && (_currentPriority < priority)) || _currentPriority == BG_PRIORITY)
emit prioritiesChanged();
return true;
}
@@ -317,14 +339,15 @@ void PriorityMuxer::setCurrentTime()
}
else
{
// timeoutTime of -100 is awaiting data (inactive); skip
if(infoIt->timeoutTime_ms > -100)
// timeoutTime of TIMEOUT_NOT_ACTIVE_PRIO is awaiting data (inactive); skip
if(infoIt->timeoutTime_ms > TIMEOUT_NOT_ACTIVE_PRIO)
newPriority = qMin(newPriority, infoIt->priority);
// call timeTrigger when effect or color is running with timeout > 0, blacklist prio 255
if(infoIt->priority < 254 && infoIt->timeoutTime_ms > 0 && (infoIt->componentId == hyperion::COMP_EFFECT || infoIt->componentId == hyperion::COMP_COLOR || infoIt->componentId == hyperion::COMP_IMAGE))
if (infoIt->priority < BG_PRIORITY && infoIt->timeoutTime_ms > 0 && (infoIt->componentId == hyperion::COMP_EFFECT || infoIt->componentId == hyperion::COMP_COLOR || infoIt->componentId == hyperion::COMP_IMAGE))
{
emit signalTimeTrigger(); // as signal to prevent Threading issues
}
++infoIt;
}
}

View File

@@ -4,6 +4,7 @@
// util
#include <utils/JsonUtils.h>
#include <db/SettingsTable.h>
#include "HyperionConfig.h"
// json schema process
#include <utils/jsonschema/QJsonFactory.h>
@@ -12,12 +13,23 @@
// write config to filesystem
#include <utils/JsonUtils.h>
#include <utils/version.hpp>
using namespace semver;
// Constants
namespace {
const char DEFAULT_VERSION[] = "2.0.0-alpha.8";
} //End of constants
QJsonObject SettingsManager::schemaJson;
SettingsManager::SettingsManager(quint8 instance, QObject* parent, bool readonlyMode)
: QObject(parent)
, _log(Logger::getInstance("SETTINGSMGR"))
, _instance(instance)
, _sTable(new SettingsTable(instance, this))
, _configVersion(DEFAULT_VERSION)
, _previousVersion(DEFAULT_VERSION)
, _readonlyMode(readonlyMode)
{
_sTable->setReadonlyMode(_readonlyMode);
@@ -38,7 +50,9 @@ SettingsManager::SettingsManager(quint8 instance, QObject* parent, bool readonly
// get default config
QJsonObject defaultConfig;
if(!JsonUtils::readFile(":/hyperion_default.config", defaultConfig, _log))
{
throw std::runtime_error("Failed to read default config");
}
// transform json to string lists
QStringList keyList = defaultConfig.keys();
@@ -64,7 +78,7 @@ SettingsManager::SettingsManager(quint8 instance, QObject* parent, bool readonly
_sTable->createSettingsRecord(key,val);
}
// need to validate all data in database constuct the entire data object
// need to validate all data in database construct the entire data object
// TODO refactor schemaChecker to accept QJsonArray in validate(); QJsonDocument container? To validate them per entry...
QJsonObject dbConfig;
for(const auto & key : keyList)
@@ -76,8 +90,56 @@ SettingsManager::SettingsManager(quint8 instance, QObject* parent, bool readonly
dbConfig[key] = doc.object();
}
//Check, if database requires migration
bool isNewRelease = false;
// Use instance independent SettingsManager to track migration status
if ( instance == GLOABL_INSTANCE_ID)
{
if ( resolveConfigVersion(dbConfig) )
{
QJsonObject newGeneralConfig = dbConfig["general"].toObject();
semver::version BUILD_VERSION(HYPERION_VERSION);
if (!BUILD_VERSION.isValid())
{
Error(_log, "Current Hyperion version [%s] is invalid. Exiting...", BUILD_VERSION.getVersion().c_str());
exit(1);
}
if ( _configVersion > BUILD_VERSION )
{
Error(_log, "Database version [%s] is greater than current Hyperion version [%s]", _configVersion.getVersion().c_str(), BUILD_VERSION.getVersion().c_str());
// TODO: Remove version checking and Settingsmanager from components' constructor to be able to stop hyperion.
}
else
{
if ( _previousVersion < BUILD_VERSION )
{
if ( _configVersion == BUILD_VERSION )
{
newGeneralConfig["previousVersion"] = BUILD_VERSION.getVersion().c_str();
dbConfig["general"] = newGeneralConfig;
isNewRelease = true;
Info(_log, "Migration completed to version [%s]", BUILD_VERSION.getVersion().c_str());
}
else
{
Info(_log, "Migration from current version [%s] to new version [%s] started", _previousVersion.getVersion().c_str(), BUILD_VERSION.getVersion().c_str());
newGeneralConfig["previousVersion"] = _configVersion.getVersion().c_str();
newGeneralConfig["configVersion"] = BUILD_VERSION.getVersion().c_str();
dbConfig["general"] = newGeneralConfig;
isNewRelease = true;
}
}
}
}
}
// possible data upgrade steps to prevent data loss
if(handleConfigUpgrade(dbConfig))
bool migrated = handleConfigUpgrade(dbConfig);
if ( isNewRelease || migrated )
{
saveSettings(dbConfig, true);
}
@@ -114,6 +176,25 @@ QJsonDocument SettingsManager::getSetting(settings::type type) const
return _sTable->getSettingsRecord(settings::typeToString(type));
}
QJsonObject SettingsManager::getSettings() const
{
QJsonObject config;
for(const auto & key : _qconfig.keys())
{
//Read all records from database to ensure that global settings are read across instances
QJsonDocument doc = _sTable->getSettingsRecord(key);
if(doc.isArray())
{
config.insert(key, doc.array());
}
else
{
config.insert(key, doc.object());
}
}
return config;
}
bool SettingsManager::saveSettings(QJsonObject config, bool correct)
{
// optional data upgrades e.g. imported legacy/older configs
@@ -174,65 +255,273 @@ bool SettingsManager::saveSettings(QJsonObject config, bool correct)
return rc;
}
inline QString fixVersion (const QString& version)
{
QString newVersion;
//Try fixing version number, remove dot separated pre-release identifiers not supported
QRegularExpression regEx("(\\d+\\.\\d+\\.\\d+-?[a-zA-Z-\\d]*\\.?[\\d]*)", QRegularExpression::CaseInsensitiveOption | QRegularExpression::MultilineOption);
QRegularExpressionMatch match;
match = regEx.match(version);
if (match.hasMatch())
{
newVersion = match.captured(1);
}
return newVersion;
}
bool SettingsManager::resolveConfigVersion(QJsonObject& config)
{
bool isValid = false;
if (config.contains("general"))
{
QJsonObject generalConfig = config["general"].toObject();
QString configVersion = generalConfig["configVersion"].toString();
QString previousVersion = generalConfig["previousVersion"].toString();
if ( !configVersion.isEmpty() )
{
isValid = _configVersion.setVersion(configVersion.toStdString());
if (!isValid)
{
isValid = _configVersion.setVersion( fixVersion(configVersion).toStdString() );
if (isValid)
{
Info(_log, "Invalid config version [%s] fixed. Updated to [%s]", QSTRING_CSTR(configVersion), _configVersion.getVersion().c_str());
}
}
}
else
{
_configVersion.setVersion(DEFAULT_VERSION);
isValid = true;
}
if ( !previousVersion.isEmpty() && isValid )
{
isValid = _previousVersion.setVersion(previousVersion.toStdString());
if (!isValid)
{
isValid = _previousVersion.setVersion( fixVersion(previousVersion).toStdString() );
if (isValid)
{
Info(_log, "Invalid previous version [%s] fixed. Updated to [%s]", QSTRING_CSTR(previousVersion), _previousVersion.getVersion().c_str());
}
}
}
else
{
_previousVersion.setVersion(DEFAULT_VERSION);
isValid = true;
}
}
return isValid;
}
bool SettingsManager::handleConfigUpgrade(QJsonObject& config)
{
bool migrated = false;
// LED LAYOUT UPGRADE
// from { hscan: { minimum: 0.2, maximum: 0.3 }, vscan: { minimum: 0.2, maximumn: 0.3 } }
// from { h: { min: 0.2, max: 0.3 }, v: { min: 0.2, max: 0.3 } }
// to { hmin: 0.2, hmax: 0.3, vmin: 0.2, vmax: 0.3}
if(config.contains("leds"))
//Only migrate, if valid versions are available
if ( !resolveConfigVersion(config) )
{
const QJsonArray ledarr = config["leds"].toArray();
const QJsonObject led = ledarr[0].toObject();
if(led.contains("hscan") || led.contains("h"))
Warning(_log, "Invalid version information found in configuration. No database migration executed.");
}
else
{
//Do only migrate, if configuration is not up to date
if (_previousVersion < _configVersion)
{
const bool whscan = led.contains("hscan");
QJsonArray newLedarr;
for(const auto & entry : ledarr)
//Migration steps for versions <= alpha 9
semver::version targetVersion {"2.0.0-alpha.9"};
if (_previousVersion <= targetVersion )
{
const QJsonObject led = entry.toObject();
QJsonObject hscan;
QJsonObject vscan;
QJsonValue hmin;
QJsonValue hmax;
QJsonValue vmin;
QJsonValue vmax;
QJsonObject nL;
Info(_log, "Instance [%u]: Migrate LED Layout from current version [%s] to version [%s] or later", _instance, _previousVersion.getVersion().c_str(), targetVersion.getVersion().c_str());
if(whscan)
// LED LAYOUT UPGRADE
// from { hscan: { minimum: 0.2, maximum: 0.3 }, vscan: { minimum: 0.2, maximum: 0.3 } }
// from { h: { min: 0.2, max: 0.3 }, v: { min: 0.2, max: 0.3 } }
// to { hmin: 0.2, hmax: 0.3, vmin: 0.2, vmax: 0.3}
if(config.contains("leds"))
{
hscan = led["hscan"].toObject();
vscan = led["vscan"].toObject();
hmin = hscan["minimum"];
hmax = hscan["maximum"];
vmin = vscan["minimum"];
vmax = vscan["maximum"];
const QJsonArray ledarr = config["leds"].toArray();
const QJsonObject led = ledarr[0].toObject();
if(led.contains("hscan") || led.contains("h"))
{
const bool whscan = led.contains("hscan");
QJsonArray newLedarr;
for(const auto & entry : ledarr)
{
const QJsonObject led = entry.toObject();
QJsonObject hscan;
QJsonObject vscan;
QJsonValue hmin;
QJsonValue hmax;
QJsonValue vmin;
QJsonValue vmax;
QJsonObject nL;
if(whscan)
{
hscan = led["hscan"].toObject();
vscan = led["vscan"].toObject();
hmin = hscan["minimum"];
hmax = hscan["maximum"];
vmin = vscan["minimum"];
vmax = vscan["maximum"];
}
else
{
hscan = led["h"].toObject();
vscan = led["v"].toObject();
hmin = hscan["min"];
hmax = hscan["max"];
vmin = vscan["min"];
vmax = vscan["max"];
}
// append to led object
nL["hmin"] = hmin;
nL["hmax"] = hmax;
nL["vmin"] = vmin;
nL["vmax"] = vmax;
newLedarr.append(nL);
}
// replace
config["leds"] = newLedarr;
migrated = true;
Info(_log,"Instance [%u]: LED Layout migrated", _instance);
}
}
else
if(config.contains("ledConfig"))
{
hscan = led["h"].toObject();
vscan = led["v"].toObject();
hmin = hscan["min"];
hmax = hscan["max"];
vmin = vscan["min"];
vmax = vscan["max"];
QJsonObject oldLedConfig = config["ledConfig"].toObject();
if ( !oldLedConfig.contains("classic"))
{
QJsonObject newLedConfig;
newLedConfig.insert("classic", oldLedConfig );
QJsonObject defaultMatrixConfig {{"ledshoriz", 1}
,{"ledsvert", 1}
,{"cabling","snake"}
,{"start","top-left"}
};
newLedConfig.insert("matrix", defaultMatrixConfig );
config["ledConfig"] = newLedConfig;
migrated = true;
Info(_log,"Instance [%u]: LED-Config migrated", _instance);
}
}
// LED Hardware count is leading for versions after alpha 9
// Setting Hardware LED count to number of LEDs configured via layout, if layout number is greater than number of hardware LEDs
if (config.contains("device"))
{
QJsonObject newDeviceConfig = config["device"].toObject();
if (newDeviceConfig.contains("hardwareLedCount"))
{
int hwLedcount = newDeviceConfig["hardwareLedCount"].toInt();
if (config.contains("leds"))
{
const QJsonArray ledarr = config["leds"].toArray();
int layoutLedCount = ledarr.size();
if (hwLedcount < layoutLedCount )
{
Warning(_log, "Instance [%u]: HwLedCount/Layout mismatch! Setting Hardware LED count to number of LEDs configured via layout", _instance);
hwLedcount = layoutLedCount;
newDeviceConfig["hardwareLedCount"] = hwLedcount;
migrated = true;
}
}
}
if (newDeviceConfig.contains("type"))
{
QString type = newDeviceConfig["type"].toString();
if (type == "atmoorb" || type == "fadecandy" || type == "philipshue" )
{
if (newDeviceConfig.contains("output"))
{
newDeviceConfig["host"] = newDeviceConfig["output"].toString();
newDeviceConfig.remove("output");
migrated = true;
}
}
}
if (migrated)
{
config["device"] = newDeviceConfig;
Debug(_log, "LED-Device records migrated");
}
}
if (config.contains("grabberV4L2"))
{
QJsonObject newGrabberV4L2Config = config["grabberV4L2"].toObject();
if (newGrabberV4L2Config.contains("encoding_format"))
{
newGrabberV4L2Config.remove("encoding_format");
newGrabberV4L2Config["grabberV4L2"] = newGrabberV4L2Config;
migrated = true;
}
//Add new element enable
if (!newGrabberV4L2Config.contains("enable"))
{
newGrabberV4L2Config["enable"] = false;
migrated = true;
}
config["grabberV4L2"] = newGrabberV4L2Config;
Debug(_log, "GrabberV4L2 records migrated");
}
if (config.contains("framegrabber"))
{
QJsonObject newFramegrabberConfig = config["framegrabber"].toObject();
//Align element namings with grabberV4L2
//Rename element type -> device
if (newFramegrabberConfig.contains("type"))
{
newFramegrabberConfig["device"] = newFramegrabberConfig["type"].toString();
newFramegrabberConfig.remove("type");
migrated = true;
}
//Rename element frequency_Hz -> fps
if (newFramegrabberConfig.contains("frequency_Hz"))
{
newFramegrabberConfig["fps"] = newFramegrabberConfig["frequency_Hz"].toInt(25);
newFramegrabberConfig.remove("frequency_Hz");
migrated = true;
}
//Rename element display -> input
if (newFramegrabberConfig.contains("display"))
{
newFramegrabberConfig["input"] = newFramegrabberConfig["display"];
newFramegrabberConfig.remove("display");
migrated = true;
}
//Add new element enable
if (!newFramegrabberConfig.contains("enable"))
{
newFramegrabberConfig["enable"] = false;
migrated = true;
}
config["framegrabber"] = newFramegrabberConfig;
Debug(_log, "Framegrabber records migrated");
}
// append to led object
nL["hmin"] = hmin;
nL["hmax"] = hmax;
nL["vmin"] = vmin;
nL["vmax"] = vmax;
newLedarr.append(nL);
}
// replace
config["leds"] = newLedarr;
migrated = true;
Debug(_log,"LED Layout migrated");
}
}
return migrated;
}

View File

@@ -1,7 +1,6 @@
<RCC>
<qresource prefix="/">
<file alias="hyperion-schema">hyperion.schema.json</file>
<file alias="hyperion_default.config">../../config/hyperion.config.json.default</file>
<file alias="schema-general.json">schema/schema-general.json</file>
<file alias="schema-logger.json">schema/schema-logger.json</file>
<file alias="schema-device.json">schema/schema-device.json</file>

View File

@@ -23,8 +23,8 @@
{
"type" : "integer",
"title" : "edt_conf_general_priority_title",
"minimum" : 100,
"maximum" : 254,
"minimum" : 2,
"maximum" : 253,
"default" : 128,
"propertyOrder" : 3
}

View File

@@ -206,7 +206,7 @@
"required" : true,
"minimum" : 0.1,
"maximum": 100.0,
"default" : 1.5,
"default" : 2.2,
"step" : 0.1,
"propertyOrder" : 15
},
@@ -217,7 +217,7 @@
"required" : true,
"minimum" : 0.1,
"maximum": 100.0,
"default" : 1.5,
"default" : 2.2,
"step" : 0.1,
"propertyOrder" : 16
},
@@ -228,7 +228,7 @@
"required" : true,
"minimum" : 0.1,
"maximum": 100.0,
"default" : 1.5,
"default" : 2.2,
"step" : 0.1,
"propertyOrder" : 17
}

View File

@@ -1,49 +1,45 @@
{
"type" : "object",
"title" : "edt_dev_general_heading_title",
"required" : true,
"defaultProperties": ["hardwareLedCount", "colorOrder"],
"properties" :
{
"type" :
{
"type" : "string",
"propertyOrder" : 1
},
"hardwareLedCount" :
{
"type" : "integer",
"title" : "edt_dev_general_hardwareLedCount_title",
"minimum" : 1,
"default" : 1,
"access" : "expert",
"propertyOrder" : 2
},
"colorOrder" :
{
"type" : "string",
"title" : "edt_dev_general_colorOrder_title",
"enum" : ["rgb", "bgr", "rbg", "brg", "gbr", "grb"],
"default" : "rgb",
"options" : {
"enum_titles" : ["edt_conf_enum_rgb", "edt_conf_enum_bgr", "edt_conf_enum_rbg", "edt_conf_enum_brg", "edt_conf_enum_gbr", "edt_conf_enum_grb"]
},
"propertyOrder" : 3
}
},
"dependencies" :
{
"rewriteTime" :
{
"properties" :
{
"type" :
{
"enum" : ["file", "apa102", "apa104", "ws2801", "lpd6803", "lpd8806", "p9813", "sk6812spi", "sk6822spi", "sk9822", "ws2812spi","ws281x", "piblaster", "adalight", "dmx", "atmo", "hyperionusbasp", "lightpack", "multilightpack", "paintpack", "rawhid", "sedu", "tpm2", "karate"]
}
},
"additionalProperties" : true
}
},
"additionalProperties" : true
"type": "object",
"title": " ",
"defaultProperties": [ "hardwareLedCount", "colorOrder" ],
"properties": {
"type": {
"type": "string",
"propertyOrder": 1
},
"hardwareLedCount": {
"type": "integer",
"title": "edt_dev_general_hardwareLedCount_title",
"minimum": 1,
"default": 1,
"options": {
"infoText": "edt_dev_general_hardwareLedCount_title_info"
},
"propertyOrder": 2
},
"colorOrder": {
"type": "string",
"title": "edt_dev_general_colorOrder_title",
"enum": [ "rgb", "bgr", "rbg", "brg", "gbr", "grb" ],
"default": "rgb",
"required": true,
"options": {
"enum_titles": [ "edt_conf_enum_rgb", "edt_conf_enum_bgr", "edt_conf_enum_rbg", "edt_conf_enum_brg", "edt_conf_enum_gbr", "edt_conf_enum_grb" ],
"infoText": "edt_dev_general_colorOrder_title_info"
},
"access": "expert",
"propertyOrder": 3
}
},
"dependencies": {
"rewriteTime": {
"properties": {
"type": {
"enum": [ "file", "apa102", "apa104", "ws2801", "lpd6803", "lpd8806", "p9813", "sk6812spi", "sk6822spi", "sk9822", "ws2812spi", "ws281x", "piblaster", "adalight", "dmx", "atmo", "hyperionusbasp", "lightpack", "multilightpack", "paintpack", "rawhid", "sedu", "tpm2", "karate" ]
}
},
"additionalProperties": true
}
},
"additionalProperties": true
}

View File

@@ -1,100 +1,141 @@
{
"type" : "object",
"title" : "edt_conf_fg_heading_title",
"properties" :
"properties":
{
"type" :
{
"type" : "string",
"title" : "edt_conf_fg_type_title",
"enum" : ["auto","amlogic","dispmanx","dx","framebuffer","osx","qt","x11", "xcb"],
"options":
{
"enum_titles": ["edt_conf_enum_automatic","AMLogic","DispmanX","DirectX9","Framebuffer","OSX","QT","X11","XCB"]
"enable": {
"type": "boolean",
"title": "edt_conf_general_enable_title",
"required": true,
"default": false,
"propertyOrder": 1
},
"available_devices": {
"type": "string",
"title": "edt_conf_grabber_discovered_title",
"default": "edt_conf_grabber_discovery_inprogress",
"options": {
"infoText": "edt_conf_grabber_discovered_title_info"
},
"default" : "auto",
"propertyOrder" : 1
"propertyOrder": 2,
"required": false
},
"width" :
{
"type" : "integer",
"title" : "edt_conf_fg_width_title",
"minimum" : 10,
"default" : 80,
"append" : "edt_append_pixel",
"propertyOrder" : 2
"device": {
"type": "string",
"title": "edt_conf_enum_custom",
"options": {
"hidden": true
},
"required": true,
"comment": "The 'available_devices' settings are dynamically inserted into the WebUI under PropertyOrder '2'.",
"propertyOrder": 3
},
"height" :
{
"type" : "integer",
"title" : "edt_conf_fg_height_title",
"minimum" : 10,
"default" : 45,
"append" : "edt_append_pixel",
"propertyOrder" : 3
"device_inputs": {
"type": "string",
"title": "edt_conf_v4l2_input_title",
"propertyOrder": 4,
"required": false
},
"frequency_Hz" :
{
"type" : "integer",
"title" : "edt_conf_fg_frequency_Hz_title",
"minimum" : 1,
"default" : 10,
"append" : "edt_append_hz",
"propertyOrder" : 4
"input": {
"type": "integer",
"title": "edt_conf_enum_custom",
"minimum": 0,
"default": 0,
"options": {
"hidden": true
},
"required": true,
"propertyOrder": 5,
"comment": "The 'device_inputs' settings are dynamically inserted into the WebUI under PropertyOrder '4'."
},
"cropLeft" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_cropLeft_title",
"minimum" : 0,
"default" : 0,
"append" : "edt_append_pixel",
"propertyOrder" : 5
"resolutions": {
"type": "string",
"title": "edt_conf_v4l2_resolution_title",
"propertyOrder": 6,
"required": false
},
"cropRight" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_cropRight_title",
"minimum" : 0,
"default" : 0,
"append" : "edt_append_pixel",
"propertyOrder" : 6
"width": {
"type": "integer",
"title": "edt_conf_enum_custom",
"minimum": 10,
"default": 80,
"append": "edt_append_pixel",
"options": {
"hidden": true
},
"required": true,
"propertyOrder": 9,
"comment": "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '6'."
},
"cropTop" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_cropTop_title",
"minimum" : 0,
"default" : 0,
"append" : "edt_append_pixel",
"propertyOrder" : 7
"height": {
"type": "integer",
"title": "edt_conf_enum_custom",
"append": "edt_append_pixel",
"options": {
"hidden": true
},
"required": true,
"propertyOrder": 10,
"comment": "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '6'."
},
"cropBottom" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_cropBottom_title",
"minimum" : 0,
"default" : 0,
"append" : "edt_append_pixel",
"propertyOrder" : 8
"framerates": {
"type": "string",
"title": "edt_conf_fg_frequency_Hz_title",
"propertyOrder": 11,
"required": false
},
"pixelDecimation" :
{
"type" : "integer",
"title" : "edt_conf_fg_pixelDecimation_title",
"minimum" : 1,
"maximum" : 30,
"default" : 8,
"propertyOrder" : 9
"fps": {
"type": "integer",
"title": "edt_conf_enum_custom",
"default":10,
"append": "fps",
"options": {
"hidden": true
},
"required": true,
"propertyOrder": 12,
"comment": "The 'framerates' setting is dynamically inserted into the WebUI under PropertyOrder '11'."
},
"display" :
{
"type" : "integer",
"title" : "edt_conf_fg_display_title",
"minimum" : 0,
"default" : 0,
"propertyOrder" : 10
"pixelDecimation": {
"type": "integer",
"title": "edt_conf_fg_pixelDecimation_title",
"minimum": 1,
"maximum": 30,
"default": 8,
"required": true,
"propertyOrder": 13
},
"cropLeft": {
"type": "integer",
"title": "edt_conf_v4l2_cropLeft_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 14
},
"cropRight": {
"type": "integer",
"title": "edt_conf_v4l2_cropRight_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 15
},
"cropTop": {
"type": "integer",
"title": "edt_conf_v4l2_cropTop_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 16
},
"cropBottom": {
"type": "integer",
"title": "edt_conf_v4l2_cropBottom_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 17
}
},
"additionalProperties" : false

View File

@@ -34,6 +34,25 @@
"default" : true,
"required" : true,
"propertyOrder" : 3
},
"configVersion" :
{
"type" : "string",
"title" : "edt_conf_gen_configVersion_title",
"options" : {
"hidden":true
},
"access" : "expert",
"propertyOrder" : 4
},
"previousVersion" :
{
"type" : "string",
"options" : {
"hidden":true
},
"access" : "expert",
"propertyOrder" : 5
}
},
"additionalProperties" : false

View File

@@ -2,263 +2,359 @@
"type" : "object",
"required" : true,
"title" : "edt_conf_v4l2_heading_title",
"properties" :
"properties":
{
"device" :
{
"type" : "string",
"title" : "edt_conf_enum_custom",
"default" : "auto",
"options" : {
"hidden":true
"enable": {
"type": "boolean",
"title": "edt_conf_general_enable_title",
"required": true,
"default": false,
"propertyOrder": 1
},
"available_devices": {
"type": "string",
"title": "edt_conf_grabber_discovered_title",
"default": "edt_conf_grabber_discovery_inprogress",
"options": {
"infoText": "edt_conf_grabber_discovered_title_info"
},
"required" : true,
"propertyOrder" : 2,
"comment" : "The 'available_devices' settings are dynamically inserted into the WebUI under PropertyOrder '1'."
"propertyOrder": 2,
"required": false
},
"input" :
{
"type" : "integer",
"title" : "edt_conf_enum_custom",
"default" : 0,
"options" : {
"hidden":true
"device": {
"type": "string",
"title": "edt_conf_enum_custom",
"options": {
"hidden": true
},
"required" : true,
"propertyOrder" : 4,
"comment" : "The 'device_inputs' settings are dynamically inserted into the WebUI under PropertyOrder '3'."
"required": true,
"comment": "The 'available_devices' settings are dynamically inserted into the WebUI under PropertyOrder '2'.",
"propertyOrder": 3
},
"standard" :
{
"type" : "string",
"title" : "edt_conf_v4l2_standard_title",
"enum" : ["NO_CHANGE", "PAL","NTSC","SECAM"],
"default" : "NO_CHANGE",
"options" : {
"enum_titles" : ["edt_conf_enum_NO_CHANGE", "edt_conf_enum_PAL", "edt_conf_enum_NTSC", "edt_conf_enum_SECAM"]
"device_inputs": {
"type": "string",
"title": "edt_conf_v4l2_input_title",
"propertyOrder": 4,
"required": false
},
"input": {
"type": "integer",
"title": "edt_conf_enum_custom",
"default": 0,
"options": {
"hidden": true
},
"required" : true,
"propertyOrder" : 5
"required": true,
"propertyOrder": 5,
"comment": "The 'device_inputs' settings are dynamically inserted into the WebUI under PropertyOrder '4'."
},
"width" :
{
"type" : "integer",
"title" : "edt_conf_fg_width_title",
"default" : 0,
"minimum" : 0,
"append" : "edt_append_pixel",
"options" : {
"hidden":true
"standard": {
"type": "string",
"title": "edt_conf_v4l2_standard_title",
"required": false,
"propertyOrder": 6
},
"encoding": {
"type": "string",
"title": "edt_conf_v4l2_encoding_title",
"required": false,
"access": "advanced",
"propertyOrder": 7
},
"resolutions": {
"type": "string",
"title": "edt_conf_v4l2_resolution_title",
"propertyOrder": 8,
"required": false
},
"width": {
"type": "integer",
"title": "edt_conf_fg_width_title",
"default": 0,
"minimum": 0,
"append": "edt_append_pixel",
"options": {
"hidden": true
},
"required" : true,
"propertyOrder" : 7,
"comment" : "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '6'."
"required": true,
"propertyOrder": 9,
"comment": "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '8'."
},
"height" :
{
"type" : "integer",
"title" : "edt_conf_fg_height_title",
"default" : 0,
"minimum" : 0,
"append" : "edt_append_pixel",
"options" : {
"hidden":true
"height": {
"type": "integer",
"title": "edt_conf_fg_height_title",
"default": 0,
"minimum": 0,
"append": "edt_append_pixel",
"options": {
"hidden": true
},
"required" : true,
"propertyOrder" : 8
"required": true,
"propertyOrder": 10,
"comment": "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '8'."
},
"fps" :
{
"type" : "integer",
"title" : "edt_conf_enum_custom",
"default" : 15,
"minimum" : 1,
"append" : "fps",
"options" : {
"hidden":true
"framerates": {
"type": "string",
"title": "edt_conf_v4l2_framerate_title",
"propertyOrder": 11,
"required": false
},
"fps": {
"type": "integer",
"title": "edt_conf_enum_custom",
"default": 15,
"minimum": 0,
"append": "fps",
"options": {
"hidden": true
},
"required" : true,
"propertyOrder" : 10,
"comment" : "The 'framerates' setting is dynamically inserted into the WebUI under PropertyOrder '9'."
"required": true,
"propertyOrder": 12,
"comment": "The 'framerates' setting is dynamically inserted into the WebUI under PropertyOrder '11'."
},
"sizeDecimation" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_sizeDecimation_title",
"minimum" : 1,
"maximum" : 30,
"default" : 6,
"required" : true,
"propertyOrder" : 11
"fpsSoftwareDecimation": {
"type": "integer",
"title": "edt_conf_v4l2_fpsSoftwareDecimation_title",
"minimum": 0,
"maximum": 60,
"default": 0,
"required": true,
"access": "expert",
"propertyOrder": 13
},
"cropLeft" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_cropLeft_title",
"minimum" : 0,
"default" : 0,
"append" : "edt_append_pixel",
"required" : true,
"propertyOrder" : 12
"flip": {
"type": "string",
"title": "edt_conf_v4l2_flip_title",
"enum": [ "NO_CHANGE", "HORIZONTAL", "VERTICAL", "BOTH" ],
"default": "NO_CHANGE",
"options": {
"enum_titles": [ "edt_conf_enum_NO_CHANGE", "edt_conf_enum_HORIZONTAL", "edt_conf_enum_VERTICAL", "edt_conf_enum_BOTH" ]
},
"required": true,
"access": "advanced",
"propertyOrder": 14
},
"cropRight" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_cropRight_title",
"minimum" : 0,
"default" : 0,
"append" : "edt_append_pixel",
"required" : true,
"propertyOrder" : 13
"sizeDecimation": {
"type": "integer",
"title": "edt_conf_v4l2_sizeDecimation_title",
"minimum": 1,
"maximum": 30,
"default": 8,
"required": true,
"propertyOrder": 15
},
"cropTop" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_cropTop_title",
"minimum" : 0,
"default" : 0,
"append" : "edt_append_pixel",
"required" : true,
"propertyOrder" : 14
"hardware_brightness": {
"type": "integer",
"title": "edt_conf_v4l2_hardware_brightness_title",
"default": 0,
"required": true,
"access": "expert",
"propertyOrder": 16
},
"cropBottom" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_cropBottom_title",
"minimum" : 0,
"default" : 0,
"append" : "edt_append_pixel",
"required" : true,
"propertyOrder" : 15
"hardware_contrast": {
"type": "integer",
"title": "edt_conf_v4l2_hardware_contrast_title",
"default": 0,
"required": true,
"access": "expert",
"propertyOrder": 17
},
"cecDetection" :
{
"type" : "boolean",
"title" : "edt_conf_v4l2_cecDetection_title",
"default" : false,
"required" : true,
"propertyOrder" : 16
"hardware_saturation": {
"type": "integer",
"title": "edt_conf_v4l2_hardware_saturation_title",
"default": 0,
"required": true,
"access": "expert",
"propertyOrder": 18
},
"signalDetection" :
{
"type" : "boolean",
"title" : "edt_conf_v4l2_signalDetection_title",
"default" : false,
"required" : true,
"propertyOrder" : 17
"hardware_hue": {
"type": "integer",
"title": "edt_conf_v4l2_hardware_hue_title",
"default": 0,
"required": true,
"access": "expert",
"propertyOrder": 19
},
"redSignalThreshold" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_redSignalThreshold_title",
"minimum" : 0,
"maximum" : 100,
"default" : 5,
"append" : "edt_append_percent",
"cropLeft": {
"type": "integer",
"title": "edt_conf_v4l2_cropLeft_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"required": true,
"propertyOrder": 20
},
"cropRight": {
"type": "integer",
"title": "edt_conf_v4l2_cropRight_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"required": true,
"propertyOrder": 21
},
"cropTop": {
"type": "integer",
"title": "edt_conf_v4l2_cropTop_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"required": true,
"propertyOrder": 22
},
"cropBottom": {
"type": "integer",
"title": "edt_conf_v4l2_cropBottom_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"required": true,
"propertyOrder": 23
},
"cecDetection": {
"type": "boolean",
"title": "edt_conf_v4l2_cecDetection_title",
"default": false,
"required": true,
"access": "advanced",
"propertyOrder": 24
},
"signalDetection": {
"type": "boolean",
"title": "edt_conf_v4l2_signalDetection_title",
"default": false,
"required": true,
"access": "expert",
"propertyOrder": 25
},
"redSignalThreshold": {
"type": "integer",
"title": "edt_conf_v4l2_redSignalThreshold_title",
"minimum": 0,
"maximum": 100,
"default": 0,
"append": "edt_append_percent",
"options": {
"dependencies": {
"signalDetection": true
}
},
"required" : true,
"propertyOrder" : 18
"access": "expert",
"required": true,
"propertyOrder": 26
},
"greenSignalThreshold" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_greenSignalThreshold_title",
"minimum" : 0,
"maximum" : 100,
"default" : 5,
"append" : "edt_append_percent",
"greenSignalThreshold": {
"type": "integer",
"title": "edt_conf_v4l2_greenSignalThreshold_title",
"minimum": 0,
"maximum": 100,
"default": 100,
"append": "edt_append_percent",
"options": {
"dependencies": {
"signalDetection": true
}
},
"required" : true,
"propertyOrder" : 19
"required": true,
"access": "expert",
"propertyOrder": 27
},
"blueSignalThreshold" :
{
"type" : "integer",
"title" : "edt_conf_v4l2_blueSignalThreshold_title",
"minimum" : 0,
"maximum" : 100,
"default" : 5,
"append" : "edt_append_percent",
"blueSignalThreshold": {
"type": "integer",
"title": "edt_conf_v4l2_blueSignalThreshold_title",
"minimum": 0,
"maximum": 100,
"default": 0,
"append": "edt_append_percent",
"options": {
"dependencies": {
"signalDetection": true
}
},
"required" : true,
"propertyOrder" : 20
"required": true,
"access": "expert",
"propertyOrder": 28
},
"sDVOffsetMin" :
{
"type" : "number",
"title" : "edt_conf_v4l2_sDVOffsetMin_title",
"minimum" : 0.0,
"maximum" : 1.0,
"default" : 0.25,
"step" : 0.01,
"noSignalCounterThreshold": {
"type": "integer",
"title": "edt_conf_v4l2_noSignalCounterThreshold_title",
"minimum": 1,
"maximum": 1000,
"default": 200,
"options": {
"dependencies": {
"signalDetection": true
}
},
"required" : true,
"propertyOrder" : 21
"required": true,
"access": "expert",
"propertyOrder": 29
},
"sDVOffsetMax" :
{
"type" : "number",
"title" : "edt_conf_v4l2_sDVOffsetMax_title",
"minimum" : 0.0,
"maximum" : 1.0,
"default" : 0.75,
"step" : 0.01,
"sDVOffsetMin": {
"type": "number",
"title": "edt_conf_v4l2_sDVOffsetMin_title",
"minimum": 0.0,
"maximum": 1.0,
"default": 0.1,
"step": 0.01,
"options": {
"dependencies": {
"signalDetection": true
}
},
"required" : true,
"propertyOrder" : 22
"required": true,
"access": "expert",
"propertyOrder": 30
},
"sDHOffsetMin" :
{
"type" : "number",
"title" : "edt_conf_v4l2_sDHOffsetMin_title",
"minimum" : 0.0,
"maximum" : 1.0,
"default" : 0.25,
"step" : 0.01,
"sDVOffsetMax": {
"type": "number",
"title": "edt_conf_v4l2_sDVOffsetMax_title",
"minimum": 0.0,
"maximum": 1.0,
"default": 0.9,
"step": 0.01,
"options": {
"dependencies": {
"signalDetection": true
}
},
"required" : true,
"propertyOrder" : 23
"required": true,
"access": "expert",
"propertyOrder": 31
},
"sDHOffsetMax" :
{
"type" : "number",
"title" : "edt_conf_v4l2_sDHOffsetMax_title",
"minimum" : 0.0,
"maximum" : 1.0,
"default" : 0.75,
"step" : 0.01,
"sDHOffsetMin": {
"type": "number",
"title": "edt_conf_v4l2_sDHOffsetMin_title",
"minimum": 0.0,
"maximum": 1.0,
"default": 0.4,
"step": 0.01,
"options": {
"dependencies": {
"signalDetection": true
}
},
"required" : true,
"propertyOrder" : 24
"required": true,
"access": "expert",
"propertyOrder": 32
},
"sDHOffsetMax": {
"type": "number",
"title": "edt_conf_v4l2_sDHOffsetMax_title",
"minimum": 0.0,
"maximum": 1.0,
"default": 0.46,
"step": 0.01,
"options": {
"dependencies": {
"signalDetection": true
}
},
"required": true,
"access": "expert",
"propertyOrder": 33
}
},
"additionalProperties" : true
}
"additionalProperties": true
}

View File

@@ -2,43 +2,52 @@
"type" : "object",
"required" : true,
"title" : "edt_conf_instC_heading_title",
"properties" :
{
"systemEnable" :
{
"type" : "boolean",
"required" : true,
"title" : "edt_conf_instC_systemEnable_title",
"default" : true,
"propertyOrder" : 1
"properties": {
"systemEnable": {
"type": "boolean",
"required": true,
"title": "edt_conf_instC_systemEnable_title",
"default": false,
"propertyOrder": 1
},
"systemPriority" :
{
"type" : "integer",
"required" : true,
"title" : "edt_conf_general_priority_title",
"minimum" : 100,
"maximum" : 253,
"default" : 250,
"propertyOrder" : 2
"systemGrabberDevice": {
"type": "string",
"required": true,
"title": "edt_conf_instC_screen_grabber_device_title",
"default": "NONE",
"propertyOrder": 2
},
"v4lEnable" :
{
"type" : "boolean",
"required" : true,
"title" : "edt_conf_instC_v4lEnable_title",
"default" : false,
"propertyOrder" : 3
"systemPriority": {
"type": "integer",
"required": true,
"title": "edt_conf_general_priority_title",
"minimum": 100,
"maximum": 253,
"default": 250,
"propertyOrder": 3
},
"v4lPriority" :
{
"type" : "integer",
"required" : true,
"title" : "edt_conf_general_priority_title",
"minimum" : 100,
"maximum" : 253,
"default" : 240,
"propertyOrder" : 4
"v4lEnable": {
"type": "boolean",
"required": true,
"title": "edt_conf_instC_v4lEnable_title",
"default": false,
"propertyOrder": 4
},
"v4lGrabberDevice": {
"type": "string",
"required": true,
"title": "edt_conf_instC_video_grabber_device_title",
"default": "NONE",
"propertyOrder": 5
},
"v4lPriority": {
"type": "integer",
"required": true,
"title": "edt_conf_general_priority_title",
"minimum": 100,
"maximum": 253,
"default": 240,
"propertyOrder": 6
}
},
"additionalProperties" : false

View File

@@ -135,15 +135,44 @@
},
"cabling": {
"type": "string",
"enum": ["snake", "parallel"]
"enum": [ "snake", "parallel" ]
},
"start": {
"type": "string",
"enum": ["top-left", "top-right", "bottom-left", "bottom-right"]
"enum": [ "top-left", "top-right", "bottom-left", "bottom-right" ]
}
},
"additionalProperties": false
},
"ledBlacklist": {
"type": "array",
"title": "conf_leds_layout_blacklist_rules_title",
"uniqueItems": true,
"items": {
"type": "object",
"title": "conf_leds_layout_blacklist_rule_title",
"required": true,
"properties": {
"start": {
"type": "integer",
"minimum": 0,
"default": 0,
"title": "conf_leds_layout_blacklist_start_title",
"required": true,
"propertyOrder": 1
},
"num": {
"type": "integer",
"minimum": 1,
"default": 1,
"title": "conf_leds_layout_blacklist_num_title",
"required": true,
"propertyOrder": 2
}
}
},
"propertyOrder": 1
}
},
"additionalProperties": true
}
"additionalProperties": true
}

View File

@@ -1,99 +1,102 @@
{
"type" : "object",
"title" : "edt_conf_smooth_heading_title",
"properties" :
{
"enable" :
{
"type" : "boolean",
"title" : "edt_conf_general_enable_title",
"default" : true,
"propertyOrder" : 1
},
"type" :
{
"type" : "string",
"title" : "edt_conf_smooth_type_title",
"enum" : ["linear", "decay"],
"default" : "linear",
"options" : {
"enum_titles" : ["edt_conf_enum_linear", "edt_conf_enum_decay"]
},
"propertyOrder" : 2
},
"time_ms" :
{
"type" : "integer",
"title" : "edt_conf_smooth_time_ms_title",
"minimum" : 25,
"maximum": 5000,
"default" : 200,
"append" : "edt_append_ms",
"propertyOrder" : 3
},
"updateFrequency" :
{
"type" : "number",
"title" : "edt_conf_smooth_updateFrequency_title",
"minimum" : 1.0,
"maximum" : 2000.0,
"default" : 25.0,
"append" : "edt_append_hz",
"propertyOrder" : 4
},
"interpolationRate" :
{
"type" : "number",
"title" : "edt_conf_smooth_interpolationRate_title",
"minimum" : 1.0,
"maximum": 1000.0,
"default" : 1.0,
"append" : "edt_append_hz",
"propertyOrder" : 5
},
"outputRate" :
{
"type" : "number",
"title" : "edt_conf_smooth_outputRate_title",
"minimum" : 1.0,
"maximum": 1000.0,
"default" : 1.0,
"append" : "edt_append_hz",
"propertyOrder" : 6
},
"decay" :
{
"type" : "number",
"title" : "edt_conf_smooth_decay_title",
"default" : 1.0,
"minimum" : 1.0,
"maximum": 20.0,
"propertyOrder" : 7
},
"dithering" :
{
"type" : "boolean",
"title" : "edt_conf_smooth_dithering_title",
"default" : true,
"propertyOrder" : 8
},
"updateDelay" :
{
"type" : "integer",
"title" : "edt_conf_smooth_updateDelay_title",
"minimum" : 0,
"maximum": 2048,
"default" : 0,
"append" : "edt_append_ms",
"propertyOrder" : 9
},
"continuousOutput" :
{
"type" : "boolean",
"title" : "edt_conf_smooth_continuousOutput_title",
"default" : true,
"propertyOrder" : 10
}
},
"additionalProperties" : false
"type": "object",
"title": "edt_conf_smooth_heading_title",
"properties": {
"enable": {
"type": "boolean",
"title": "edt_conf_general_enable_title",
"default": true,
"propertyOrder": 1
},
"type": {
"type": "string",
"title": "edt_conf_smooth_type_title",
"enum": [ "linear", "decay" ],
"default": "linear",
"options": {
"enum_titles": [ "edt_conf_enum_linear", "edt_conf_enum_decay" ]
},
"propertyOrder": 2
},
"time_ms": {
"type": "integer",
"title": "edt_conf_smooth_time_ms_title",
"minimum": 25,
"maximum": 5000,
"default": 200,
"append": "edt_append_ms",
"propertyOrder": 3
},
"updateFrequency": {
"type": "number",
"title": "edt_conf_smooth_updateFrequency_title",
"minimum": 1.0,
"maximum": 2000.0,
"default": 25.0,
"append": "edt_append_hz",
"propertyOrder": 4
},
"interpolationRate": {
"type": "number",
"title": "edt_conf_smooth_interpolationRate_title",
"minimum": 1.0,
"maximum": 1000.0,
"default": 1.0,
"append": "edt_append_hz",
"propertyOrder": 5,
"options": {
"dependencies": {
"type": "decay"
}
}
},
"outputRate": {
"type": "number",
"title": "edt_conf_smooth_outputRate_title",
"minimum": 1.0,
"maximum": 1000.0,
"default": 1.0,
"append": "edt_append_hz",
"propertyOrder": 6,
"options": {
"dependencies": {
"type": "decay"
}
}
},
"decay": {
"type": "number",
"title": "edt_conf_smooth_decay_title",
"default": 1.0,
"minimum": 1.0,
"maximum": 20.0,
"propertyOrder": 7,
"options": {
"dependencies": {
"type": "decay"
}
}
},
"dithering": {
"type": "boolean",
"title": "edt_conf_smooth_dithering_title",
"default": true,
"propertyOrder": 8,
"options": {
"dependencies": {
"type": "decay"
}
}
},
"updateDelay": {
"type": "integer",
"title": "edt_conf_smooth_updateDelay_title",
"minimum": 0,
"maximum": 2048,
"default": 0,
"append": "edt_append_ms",
"propertyOrder": 9
}
},
"additionalProperties": false
}

View File

@@ -57,3 +57,8 @@ void JsonClientConnection::disconnected()
{
emit connectionClosed();
}
QHostAddress JsonClientConnection::getClientAddress()
{
return _socket->peerAddress();
}

View File

@@ -4,6 +4,7 @@
#include <QString>
#include <QByteArray>
#include <QJsonObject>
#include <QHostAddress>
// util includes
#include <utils/Logger.h>
@@ -24,6 +25,7 @@ public:
/// @param socket The Socket object for this connection
///
JsonClientConnection(QTcpSocket * socket, bool localConnection);
QHostAddress getClientAddress();
signals:
void connectionClosed();

View File

@@ -102,7 +102,7 @@ void JsonServer::newConnection()
{
if(_netOrigin->accessAllowed(socket->peerAddress(), socket->localAddress()))
{
Debug(_log, "New connection from: %s ",socket->localAddress().toString().toStdString().c_str());
Debug(_log, "New connection from: %s",QSTRING_CSTR(socket->peerAddress().toString()));
JsonClientConnection * connection = new JsonClientConnection(socket, _netOrigin->isLocalAddress(socket->peerAddress(), socket->localAddress()));
_openConnections.insert(connection);
@@ -118,7 +118,7 @@ void JsonServer::newConnection()
void JsonServer::closedConnection()
{
JsonClientConnection* connection = qobject_cast<JsonClientConnection*>(sender());
Debug(_log, "Connection closed");
Debug(_log, "Connection closed for %s", QSTRING_CSTR(connection->getClientAddress().toString()));
_openConnections.remove(connection);
// schedule to delete the connection object

View File

@@ -234,20 +234,25 @@ int LedDevice::rewriteLEDs()
return retval;
}
int LedDevice::writeBlack(int numberOfBlack)
int LedDevice::writeBlack(int numberOfWrites)
{
return writeColor(ColorRgb::BLACK, numberOfWrites);
}
int LedDevice::writeColor(const ColorRgb& color, int numberOfWrites)
{
int rc = -1;
for (int i = 0; i < numberOfBlack; i++)
for (int i = 0; i < numberOfWrites; i++)
{
if ( _latchTime_ms > 0 )
if (_latchTime_ms > 0)
{
// Wait latch time before writing black
QEventLoop loop;
QTimer::singleShot(_latchTime_ms, &loop, &QEventLoop::quit);
loop.exec();
}
_lastLedValues = std::vector<ColorRgb>(static_cast<unsigned long>(_ledCount), ColorRgb::BLACK );
_lastLedValues = std::vector<ColorRgb>(static_cast<unsigned long>(_ledCount),color);
rc = write(_lastLedValues);
}
return rc;
@@ -265,12 +270,14 @@ bool LedDevice::switchOn()
{
if ( _isEnabled &&_isDeviceInitialised )
{
storeState();
if ( powerOn() )
if ( storeState() )
{
_isOn = true;
rc = true;
if ( powerOn() )
{
_isOn = true;
_isInSwitchOff = false;
rc = true;
}
}
}
}
@@ -410,28 +417,33 @@ void LedDevice::setLatchTime( int latchTime_ms )
void LedDevice::setRewriteTime( int rewriteTime_ms )
{
assert(rewriteTime_ms >= 0);
_refreshTimerInterval_ms = rewriteTime_ms;
if ( _refreshTimerInterval_ms > 0 )
//Check, if refresh timer was not initialised due to getProperties/identify sceanrios
if (_refreshTimer != nullptr)
{
_refreshTimerInterval_ms = rewriteTime_ms;
_isRefreshEnabled = true;
if (_refreshTimerInterval_ms <= _latchTime_ms )
if (_refreshTimerInterval_ms > 0)
{
int new_refresh_timer_interval = _latchTime_ms + 10;
Warning(_log, "latchTime(%d) is bigger/equal rewriteTime(%d), set rewriteTime to %dms", _latchTime_ms, _refreshTimerInterval_ms, new_refresh_timer_interval);
_refreshTimerInterval_ms = new_refresh_timer_interval;
_refreshTimer->setInterval( _refreshTimerInterval_ms );
_isRefreshEnabled = true;
if (_refreshTimerInterval_ms <= _latchTime_ms)
{
int new_refresh_timer_interval = _latchTime_ms + 10;
Warning(_log, "latchTime(%d) is bigger/equal rewriteTime(%d), set rewriteTime to %dms", _latchTime_ms, _refreshTimerInterval_ms, new_refresh_timer_interval);
_refreshTimerInterval_ms = new_refresh_timer_interval;
_refreshTimer->setInterval(_refreshTimerInterval_ms);
}
Debug(_log, "Refresh interval = %dms", _refreshTimerInterval_ms);
_refreshTimer->setInterval(_refreshTimerInterval_ms);
_lastWriteTime = QDateTime::currentDateTime();
}
Debug(_log, "Refresh interval = %dms",_refreshTimerInterval_ms );
_refreshTimer->setInterval( _refreshTimerInterval_ms );
_lastWriteTime = QDateTime::currentDateTime();
Debug(_log, "RewriteTime updated to %dms", _refreshTimerInterval_ms);
}
Debug(_log, "RewriteTime updated to %dms", _refreshTimerInterval_ms);
}
void LedDevice::printLedValues(const std::vector<ColorRgb>& ledValues)

View File

@@ -118,11 +118,11 @@ bool LedDeviceLightpack::init(const QJsonObject &deviceConfig)
QString errortext;
if (_serialNumber.isEmpty())
{
errortext = QString ("No Lightpack devices were found");
errortext = QString ("No working Lightpack devices were found");
}
else
{
errortext = QString ("No Lightpack device found with serial %1").arg( _serialNumber);
errortext = QString ("No working Lightpack device found with serial %1").arg( _serialNumber);
}
this->setInError( errortext );
}
@@ -197,9 +197,6 @@ bool LedDeviceLightpack::searchDevice(libusb_device * device, const QString & re
return false;
}
#define UNO_VENDOR_ID 0x2341
#define UNO_PRODUCT_ID 0x43
if ((deviceDescriptor.idVendor == USB_VENDOR_ID && deviceDescriptor.idProduct == USB_PRODUCT_ID) ||
(deviceDescriptor.idVendor == USB_OLD_VENDOR_ID && deviceDescriptor.idProduct == USB_OLD_PRODUCT_ID))
{
@@ -375,28 +372,30 @@ int LedDeviceLightpack::openDevice(libusb_device *device, libusb_device_handle *
Error(_log, "unable to open device(%d): %s", error, libusb_error_name(error));
rc = -1;
}
// detach kernel driver if it is active
if (libusb_kernel_driver_active(handle, LIGHTPACK_INTERFACE) == 1)
else
{
error = libusb_detach_kernel_driver(handle, LIGHTPACK_INTERFACE);
// detach kernel driver if it is active
if (libusb_kernel_driver_active(handle, LIGHTPACK_INTERFACE) == 1)
{
error = libusb_detach_kernel_driver(handle, LIGHTPACK_INTERFACE);
if (error != LIBUSB_SUCCESS)
{
Error(_log, "unable to detach kernel driver(%d): %s", error, libusb_error_name(error));
libusb_close(handle);
rc = -1;
}
}
error = libusb_claim_interface(handle, LIGHTPACK_INTERFACE);
if (error != LIBUSB_SUCCESS)
{
Error(_log, "unable to detach kernel driver(%d): %s", error, libusb_error_name(error));
Error(_log, "unable to claim interface(%d): %s", error, libusb_error_name(error));
libusb_attach_kernel_driver(handle, LIGHTPACK_INTERFACE);
libusb_close(handle);
rc = -1;
}
}
error = libusb_claim_interface(handle, LIGHTPACK_INTERFACE);
if (error != LIBUSB_SUCCESS)
{
Error(_log, "unable to claim interface(%d): %s", error, libusb_error_name(error));
libusb_attach_kernel_driver(handle, LIGHTPACK_INTERFACE);
libusb_close(handle);
rc = -1;
}
*deviceHandle = handle;
return rc;
}

View File

@@ -13,6 +13,8 @@
// Constants
namespace {
const bool verbose = false;
const bool verbose3 = false;
const QString MULTICAST_GROUP_DEFAULT_ADDRESS = "239.255.255.250";
const quint16 MULTICAST_GROUP_DEFAULT_PORT = 49692;
@@ -48,7 +50,7 @@ bool LedDeviceAtmoOrb::init(const QJsonObject &deviceConfig)
if ( LedDevice::init(deviceConfig) )
{
_multicastGroup = deviceConfig["output"].toString(MULTICAST_GROUP_DEFAULT_ADDRESS);
_multicastGroup = deviceConfig["host"].toString(MULTICAST_GROUP_DEFAULT_ADDRESS);
_multiCastGroupPort = static_cast<quint16>(deviceConfig["port"].toInt(MULTICAST_GROUP_DEFAULT_PORT));
_useOrbSmoothing = deviceConfig["useOrbSmoothing"].toBool(false);
_skipSmoothingDiff = deviceConfig["skipSmoothingDiff"].toInt(0);
@@ -272,13 +274,13 @@ void LedDeviceAtmoOrb::setColor(int orbId, const ColorRgb &color, int commandTyp
void LedDeviceAtmoOrb::sendCommand(const QByteArray &bytes)
{
//Debug ( _log, "command: [%s] -> %s:%u", QSTRING_CSTR( QString(bytes.toHex())), QSTRING_CSTR(_groupAddress.toString()), _multiCastGroupPort );
DebugIf(verbose3, _log, "command: [%s] -> %s:%u", QSTRING_CSTR( QString(bytes.toHex())), QSTRING_CSTR(_groupAddress.toString()), _multiCastGroupPort );
_udpSocket->writeDatagram(bytes.data(), bytes.size(), _groupAddress, _multiCastGroupPort);
}
QJsonObject LedDeviceAtmoOrb::discover(const QJsonObject& params)
{
//Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType );
@@ -353,14 +355,14 @@ QJsonObject LedDeviceAtmoOrb::discover(const QJsonObject& params)
}
devicesDiscovered.insert("devices", deviceList);
Debug(_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose, _log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
return devicesDiscovered;
}
void LedDeviceAtmoOrb::identify(const QJsonObject& params)
{
//Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
int orbId = 0;
if ( params["id"].isString() )

View File

@@ -1,52 +1,50 @@
#include "LedDeviceCololight.h"
#include <utils/QStringUtils.h>
#include <utils/WaitTime.h>
#include <QUdpSocket>
#include <QHostInfo>
#include <QtEndian>
#include <QEventLoop>
#include <chrono>
// Constants
namespace {
const bool verbose = false;
const bool verbose3 = false;
const bool verbose = false;
const bool verbose3 = false;
// Configuration settings
// Configuration settings
const char CONFIG_HW_LED_COUNT[] = "hardwareLedCount";
const char CONFIG_HW_LED_COUNT[] = "hardwareLedCount";
// Cololight discovery service
const int COLOLIGHT_BEADS_PER_MODULE = 19;
const int API_DEFAULT_PORT = 8900;
// Cololight discovery service
const char DISCOVERY_ADDRESS[] = "255.255.255.255";
const quint16 DISCOVERY_PORT = 12345;
const char DISCOVERY_MESSAGE[] = "Z-SEARCH * \r\n";
constexpr std::chrono::milliseconds DEFAULT_DISCOVERY_TIMEOUT{ 5000 };
constexpr std::chrono::milliseconds DEFAULT_READ_TIMEOUT{ 1000 };
constexpr std::chrono::milliseconds DEFAULT_IDENTIFY_TIME{ 2000 };
const int API_DEFAULT_PORT = 8900;
const char COLOLIGHT_MODEL[] = "mod";
const char COLOLIGHT_MODEL_TYPE[] = "subkey";
const char COLOLIGHT_MAC[] = "sn";
const char COLOLIGHT_NAME[] = "name";
const char DISCOVERY_ADDRESS[] = "255.255.255.255";
const quint16 DISCOVERY_PORT = 12345;
const char DISCOVERY_MESSAGE[] = "Z-SEARCH * \r\n";
constexpr std::chrono::milliseconds DEFAULT_DISCOVERY_TIMEOUT{ 2000 };
constexpr std::chrono::milliseconds DEFAULT_READ_TIMEOUT{ 1000 };
constexpr std::chrono::milliseconds DEFAULT_IDENTIFY_TIME{ 2000 };
const char COLOLIGHT_MODEL_IDENTIFIER[] = "OD_WE_QUAN";
const int COLOLIGHT_BEADS_PER_MODULE = 19;
const int COLOLIGHT_MIN_STRIP_SEGMENT_SIZE = 30;
const char COLOLIGHT_MODEL[] = "mod";
const char COLOLIGHT_MODEL_TYPE[] = "subkey";
const char COLOLIGHT_MAC[] = "sn";
const char COLOLIGHT_NAME[] = "name";
const char COLOLIGHT_MODEL_IDENTIFIER[] = "OD_WE_QUAN";
} //End of constants
LedDeviceCololight::LedDeviceCololight(const QJsonObject& deviceConfig)
: ProviderUdp(deviceConfig)
, _modelType(-1)
, _ledLayoutType(STRIP_LAYOUT)
, _ledBeadCount(0)
, _distance(0)
, _sequenceNumber(1)
, _modelType(-1)
, _ledLayoutType(-1)
, _ledBeadCount(0)
, _distance(0)
, _sequenceNumber(1)
{
_packetFixPart.append(reinterpret_cast<const char*>(PACKET_HEADER), sizeof(PACKET_HEADER));
_packetFixPart.append(reinterpret_cast<const char*>(PACKET_SECU), sizeof(PACKET_SECU));
@@ -94,11 +92,11 @@ bool LedDeviceCololight::initLedsConfiguration()
QString modelTypeText;
switch (_modelType) {
case 0:
case STRIP:
modelTypeText = "Strip";
_ledLayoutType = STRIP_LAYOUT;
break;
case 1:
case PLUS:
_ledLayoutType = MODLUE_LAYOUT;
modelTypeText = "Plus";
break;
@@ -116,33 +114,24 @@ bool LedDeviceCololight::initLedsConfiguration()
setLedCount(_devConfig[CONFIG_HW_LED_COUNT].toInt(0));
}
if (_modelType == STRIP && (getLedCount() % COLOLIGHT_MIN_STRIP_SEGMENT_SIZE != 0))
Debug(_log, "LedCount : %d", getLedCount());
int configuredLedCount = _devConfig["currentLedCount"].toInt(1);
if (getLedCount() < configuredLedCount)
{
QString errorReason = QString("Hardware LED count must be multiple of %1 for Cololight Strip!")
.arg(COLOLIGHT_MIN_STRIP_SEGMENT_SIZE);
QString errorReason = QString("Not enough LEDs [%1] for configured LEDs in layout [%2] found!")
.arg(getLedCount())
.arg(configuredLedCount);
this->setInError(errorReason);
}
else
{
Debug(_log, "LedCount : %d", getLedCount());
int configuredLedCount = _devConfig["currentLedCount"].toInt(1);
if (getLedCount() < configuredLedCount)
if (getLedCount() > configuredLedCount)
{
QString errorReason = QString("Not enough LEDs [%1] for configured LEDs in layout [%2] found!")
.arg(getLedCount())
.arg(configuredLedCount);
this->setInError(errorReason);
}
else
{
if (getLedCount() > configuredLedCount)
{
Info(_log, "%s: More LEDs [%d] than configured LEDs in layout [%d].", QSTRING_CSTR(this->getActiveDeviceType()), getLedCount(), configuredLedCount);
}
isInitOK = true;
Info(_log, "%s: More LEDs [%d] than configured LEDs in layout [%d].", QSTRING_CSTR(this->getActiveDeviceType()), getLedCount(), configuredLedCount);
}
isInitOK = true;
}
}
@@ -197,29 +186,42 @@ bool LedDeviceCololight::getInfo()
QByteArray response;
if (readResponse(response))
{
DebugIf(verbose, _log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
DebugIf(verbose,_log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
quint16 ledNum = qFromBigEndian<quint16>(response.data() + 1);
if (ledNum != 0xFFFF)
{
_ledBeadCount = ledNum;
// Cololight types are not identifyable currently
// Work under the assumption that modules (Cololight Plus) have a number of beads and a Colologht Strip does not have a multiple of beads
// The assumption will not hold true, if a user cuts the Strip to a multiple of beads...
if (ledNum % COLOLIGHT_BEADS_PER_MODULE == 0)
{
_modelType = MODLUE_LAYOUT;
_modelType = PLUS;
_ledLayoutType = MODLUE_LAYOUT;
_distance = ledNum / COLOLIGHT_BEADS_PER_MODULE;
setLedCount(_distance);
}
else
{
_modelType = STRIP;
_ledLayoutType = STRIP_LAYOUT;
_distance = 0;
setLedCount(ledNum);
}
isCmdOK = true;
Debug(_log, "#LEDs found [0x%x], [%u], distance [%d]", _ledBeadCount, _ledBeadCount, _distance);
}
else
{
_modelType = STRIP;
_modelType = -1;
_ledLayoutType = -1;
_distance = 0;
setLedCount(0);
isCmdOK = false;
Error(_log, "Number of LEDs cannot be resolved");
}
Debug(_log, "#LEDs found [0x%x], [%u], distance [%d]", _ledBeadCount, _ledBeadCount, _distance);
isCmdOK = true;
}
}
@@ -265,7 +267,7 @@ bool LedDeviceCololight::setColor(const uint32_t color)
QByteArray response;
if (readResponse(response))
{
DebugIf(verbose, _log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
DebugIf(verbose,_log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
isCmdOK = true;
}
}
@@ -301,7 +303,7 @@ bool LedDeviceCololight::setState(bool isOn)
QByteArray response;
if (readResponse(response))
{
DebugIf(verbose, _log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
DebugIf(verbose,_log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
isCmdOK = true;
}
}
@@ -325,7 +327,7 @@ bool LedDeviceCololight::setStateDirect(bool isOn)
QByteArray response;
if (readResponse(response))
{
DebugIf(verbose, _log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
DebugIf(verbose,_log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
isCmdOK = true;
}
}
@@ -379,7 +381,7 @@ bool LedDeviceCololight::setTL1CommandMode(bool isOn)
QByteArray response;
if (readResponse(response))
{
DebugIf(verbose, _log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
DebugIf(verbose,_log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
isCmdOK = true;
}
}
@@ -496,7 +498,7 @@ bool LedDeviceCololight::readResponse(QByteArray& response)
}
else
{
DebugIf(verbose, _log, "No additional data returned");
DebugIf(verbose,_log, "No additional data returned");
}
}
isRequestOK = true;
@@ -545,20 +547,15 @@ bool LedDeviceCololight::powerOff()
return off;
}
QJsonObject LedDeviceCololight::discover(const QJsonObject& /*params*/)
QJsonArray LedDeviceCololight::discover()
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType);
QJsonArray deviceList;
QUdpSocket udpSocket;
udpSocket.writeDatagram(QString(DISCOVERY_MESSAGE).toUtf8(), QHostAddress(DISCOVERY_ADDRESS), DISCOVERY_PORT);
if (udpSocket.waitForReadyRead(DEFAULT_DISCOVERY_TIMEOUT.count()))
{
while (udpSocket.waitForReadyRead(500))
while (udpSocket.waitForReadyRead(200))
{
QByteArray datagram;
@@ -602,12 +599,13 @@ QJsonObject LedDeviceCololight::discover(const QJsonObject& /*params*/)
}
}
QJsonArray deviceList;
QMap<QString, QMap <QString, QString>>::iterator i;
for (i = _services.begin(); i != _services.end(); ++i)
{
QJsonObject obj;
QString ipAddress = i.key();
const QString& ipAddress = i.key();
obj.insert("ip", ipAddress);
obj.insert("model", i.value().value(COLOLIGHT_MODEL));
obj.insert("type", i.value().value(COLOLIGHT_MODEL_TYPE));
@@ -647,27 +645,43 @@ QJsonObject LedDeviceCololight::discover(const QJsonObject& /*params*/)
deviceList << obj;
}
return deviceList;
}
QJsonObject LedDeviceCololight::discover(const QJsonObject& /*params*/)
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType);
QString discoveryMethod("ssdp");
QJsonArray deviceList;
deviceList = discover();
devicesDiscovered.insert("discoveryMethod", discoveryMethod);
devicesDiscovered.insert("devices", deviceList);
DebugIf(verbose, _log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return devicesDiscovered;
}
QJsonObject LedDeviceCololight::getProperties(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject properties;
QString apiHostname = params["host"].toString("");
QString hostName = params["host"].toString("");
quint16 apiPort = static_cast<quint16>(params["port"].toInt(API_DEFAULT_PORT));
if (!apiHostname.isEmpty())
QJsonObject propertiesDetails;
if (!hostName.isEmpty())
{
QJsonObject deviceConfig;
deviceConfig.insert("host", apiHostname);
deviceConfig.insert("host", hostName);
deviceConfig.insert("port", apiPort);
if (ProviderUdp::init(deviceConfig))
{
if (getInfo())
@@ -675,38 +689,43 @@ QJsonObject LedDeviceCololight::getProperties(const QJsonObject& params)
QString modelTypeText;
switch (_modelType) {
case 1:
case STRIP:
modelTypeText = "Strip";
break;
case PLUS:
modelTypeText = "Plus";
break;
default:
modelTypeText = "Strip";
break;
}
properties.insert("modelType", modelTypeText);
properties.insert("ledCount", static_cast<int>(getLedCount()));
properties.insert("ledBeadCount", _ledBeadCount);
properties.insert("distance", _distance);
propertiesDetails.insert("modelType", modelTypeText);
propertiesDetails.insert("ledCount", static_cast<int>(getLedCount()));
propertiesDetails.insert("ledBeadCount", _ledBeadCount);
propertiesDetails.insert("distance", _distance);
}
}
}
DebugIf(verbose, _log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
properties.insert("properties", propertiesDetails);
DebugIf(verbose,_log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
return properties;
}
void LedDeviceCololight::identify(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QString apiHostname = params["host"].toString("");
QString hostName = params["host"].toString("");
quint16 apiPort = static_cast<quint16>(params["port"].toInt(API_DEFAULT_PORT));
if (!apiHostname.isEmpty())
if (!hostName.isEmpty())
{
QJsonObject deviceConfig;
deviceConfig.insert("host", apiHostname);
deviceConfig.insert("host", hostName);
deviceConfig.insert("port", apiPort);
if (ProviderUdp::init(deviceConfig))
{
@@ -714,9 +733,7 @@ void LedDeviceCololight::identify(const QJsonObject& params)
{
setEffect(THE_CIRCUS);
QEventLoop loop;
QTimer::singleShot(DEFAULT_IDENTIFY_TIME.count(), &loop, &QEventLoop::quit);
loop.exec();
wait(DEFAULT_IDENTIFY_TIME);
setColor(ColorRgb::BLACK);
}

View File

@@ -284,6 +284,14 @@ private:
///
bool readResponse(QByteArray& response);
///
/// @brief Discover Cololight devices available (for configuration).
/// Cololight specific UDP Broadcast discovery
///
/// @return A JSON structure holding a list of devices found
///
QJsonArray discover();
// Cololight model, e.g. CololightPlus, CololightStrip
int _modelType;

View File

@@ -55,7 +55,7 @@ bool LedDeviceFadeCandy::init(const QJsonObject& deviceConfig)
}
else
{
_host = deviceConfig["output"].toString("127.0.0.1");
_host = deviceConfig["host"].toString("127.0.0.1");
_port = deviceConfig["port"].toInt(STREAM_DEFAULT_PORT);
//If host not configured the init fails

View File

@@ -5,7 +5,6 @@
#include <utils/QStringUtils.h>
// Qt includes
#include <QEventLoop>
#include <QNetworkReply>
#include <QtEndian>
@@ -22,11 +21,18 @@ const bool verbose3 = false;
const char CONFIG_ADDRESS[] = "host";
//const char CONFIG_PORT[] = "port";
const char CONFIG_AUTH_TOKEN[] = "token";
const char CONFIG_RESTORE_STATE[] = "restoreOriginalState";
const char CONFIG_BRIGHTNESS[] = "brightness";
const char CONFIG_BRIGHTNESS_OVERWRITE[] = "overwriteBrightness";
const char CONFIG_PANEL_ORDER_TOP_DOWN[] = "panelOrderTopDown";
const char CONFIG_PANEL_ORDER_LEFT_RIGHT[] = "panelOrderLeftRight";
const char CONFIG_PANEL_START_POS[] = "panelStartPos";
const bool DEFAULT_IS_RESTORE_STATE = true;
const bool DEFAULT_IS_BRIGHTNESS_OVERWRITE = true;
const int BRI_MAX = 100;
// Panel configuration settings
const char PANEL_LAYOUT[] = "layout";
const char PANEL_NUM[] = "numPanels";
@@ -39,9 +45,13 @@ const char PANEL_POS_Y[] = "y";
// List of State Information
const char STATE_ON[] = "on";
const char STATE_ONOFF_VALUE[] = "value";
const char STATE_VALUE_TRUE[] = "true";
const char STATE_VALUE_FALSE[] = "false";
const char STATE_BRI[] = "brightness";
const char STATE_HUE[] = "hue";
const char STATE_SAT[] = "sat";
const char STATE_CT[] = "ct";
const char STATE_COLORMODE[] = "colorMode";
const QStringList COLOR_MODES {"hs", "ct", "effect"};
const char STATE_VALUE[] = "value";
// Device Data elements
const char DEV_DATA_NAME[] = "name";
@@ -50,10 +60,7 @@ const char DEV_DATA_MANUFACTURER[] = "manufacturer";
const char DEV_DATA_FIRMWAREVERSION[] = "firmwareVersion";
// Nanoleaf Stream Control elements
//const char STREAM_CONTROL_IP[] = "streamControlIpAddr";
const char STREAM_CONTROL_PORT[] = "streamControlPort";
//const char STREAM_CONTROL_PROTOCOL[] = "streamControlProtocol";
const quint16 STREAM_CONTROL_DEFAULT_PORT = 60222; //Fixed port for Canvas;
const quint16 STREAM_CONTROL_DEFAULT_PORT = 60222;
// Nanoleaf OpenAPI URLs
const int API_DEFAULT_PORT = 16021;
@@ -65,6 +72,8 @@ const char API_STATE[] = "state";
const char API_PANELLAYOUT[] = "panelLayout";
const char API_EFFECT[] = "effects";
const char API_EFFECT_SELECT[] = "select";
//Nanoleaf Control data stream
const int STREAM_FRAME_PANEL_NUM_SIZE = 2;
const int STREAM_FRAME_PANEL_INFO_SIZE = 8;
@@ -72,19 +81,23 @@ const int STREAM_FRAME_PANEL_INFO_SIZE = 8;
// Nanoleaf ssdp services
const char SSDP_ID[] = "ssdp:all";
const char SSDP_FILTER_HEADER[] = "ST";
const char SSDP_CANVAS[] = "nanoleaf:nl29";
const char SSDP_NANOLEAF[] = "nanoleaf:nl*";
const char SSDP_LIGHTPANELS[] = "nanoleaf_aurora:light";
} //End of constants
// Nanoleaf Panel Shapetypes
enum SHAPETYPES {
TRIANGLE,
RHYTM,
SQUARE,
CONTROL_SQUARE_PRIMARY,
CONTROL_SQUARE_PASSIVE,
POWER_SUPPLY,
};
TRIANGLE = 0,
RHYTM = 1,
SQUARE = 2,
CONTROL_SQUARE_PRIMARY = 3,
CONTROL_SQUARE_PASSIVE = 4,
POWER_SUPPLY= 5,
HEXAGON_SHAPES = 7,
TRIANGE_SHAPES = 8,
MINI_TRIANGE_SHAPES = 8,
SHAPES_CONTROLLER = 12
};
// Nanoleaf external control versions
enum EXTCONTROLVERSIONS {
@@ -100,8 +113,8 @@ LedDeviceNanoleaf::LedDeviceNanoleaf(const QJsonObject& deviceConfig)
, _leftRight(true)
, _startPos(0)
, _endPos(0)
, _extControlVersion(EXTCTRLVER_V2),
_panelLedCount(0)
, _extControlVersion(EXTCTRLVER_V2)
, _panelLedCount(0)
{
}
@@ -127,7 +140,7 @@ bool LedDeviceNanoleaf::init(const QJsonObject& deviceConfig)
Info(_log, "Device Nanoleaf does not require rewrites. Refresh time is ignored.");
}
DebugIf(verbose, _log, "deviceConfig: [%s]", QString(QJsonDocument(_devConfig).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "deviceConfig: [%s]", QString(QJsonDocument(_devConfig).toJson(QJsonDocument::Compact)).toUtf8().constData());
bool isInitOK = false;
@@ -140,6 +153,14 @@ bool LedDeviceNanoleaf::init(const QJsonObject& deviceConfig)
Debug(_log, "RewriteTime : %d", this->getRewriteTime());
Debug(_log, "LatchTime : %d", this->getLatchTime());
_isRestoreOrigState = _devConfig[CONFIG_RESTORE_STATE].toBool(DEFAULT_IS_RESTORE_STATE);
_isBrightnessOverwrite = _devConfig[CONFIG_BRIGHTNESS_OVERWRITE].toBool(DEFAULT_IS_BRIGHTNESS_OVERWRITE);
_brightness = _devConfig[CONFIG_BRIGHTNESS].toInt(BRI_MAX);
Debug(_log, "RestoreOrigState : %d", _isRestoreOrigState);
Debug(_log, "Overwrite Brightn.: %d", _isBrightnessOverwrite);
Debug(_log, "Set Brightness to : %d", _brightness);
// Read panel organisation configuration
if (deviceConfig[CONFIG_PANEL_ORDER_TOP_DOWN].isString())
{
@@ -164,29 +185,29 @@ bool LedDeviceNanoleaf::init(const QJsonObject& deviceConfig)
// TODO: Allow to handle port dynamically
//Set hostname as per configuration and_defaultHost default port
_hostname = deviceConfig[CONFIG_ADDRESS].toString();
_hostName = deviceConfig[CONFIG_ADDRESS].toString();
_apiPort = API_DEFAULT_PORT;
_authToken = deviceConfig[CONFIG_AUTH_TOKEN].toString();
//If host not configured the init failed
if (_hostname.isEmpty())
if (_hostName.isEmpty())
{
this->setInError("No target hostname nor IP defined");
isInitOK = false;
}
else
{
if (initRestAPI(_hostname, _apiPort, _authToken))
if (initRestAPI(_hostName, _apiPort, _authToken))
{
// Read LedDevice configuration and validate against device configuration
if (initLedsConfiguration())
{
// Set UDP streaming host and port
_devConfig["host"] = _hostname;
_devConfig["host"] = _hostName;
_devConfig["port"] = STREAM_CONTROL_DEFAULT_PORT;
isInitOK = ProviderUdp::init(_devConfig);
Debug(_log, "Hostname/IP : %s", QSTRING_CSTR(_hostname));
Debug(_log, "Hostname/IP : %s", QSTRING_CSTR(_hostName));
Debug(_log, "Port : %d", _port);
}
}
@@ -206,7 +227,8 @@ bool LedDeviceNanoleaf::initLedsConfiguration()
httpResponse response = _restApi->get();
if (response.error())
{
this->setInError(response.getErrorReason());
QString errorReason = QString("Getting device details failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
isInitOK = false;
}
else
@@ -243,16 +265,16 @@ bool LedDeviceNanoleaf::initLedsConfiguration()
int panelshapeType = panelObj[PANEL_SHAPE_TYPE].toInt();
//int panelOrientation = panelObj[PANEL_ORIENTATION].toInt();
DebugIf(verbose, _log, "Panel [%d] (%d,%d) - Type: [%d]", panelId, panelX, panelY, panelshapeType);
DebugIf(verbose,_log, "Panel [%d] (%d,%d) - Type: [%d]", panelId, panelX, panelY, panelshapeType);
// Skip Rhythm panels
if (panelshapeType != RHYTM)
// Skip Rhythm and Shapes controller panels
if (panelshapeType != RHYTM && panelshapeType != SHAPES_CONTROLLER)
{
panelMap[panelY][panelX] = panelId;
}
else
{ // Reset non support/required features
Info(_log, "Rhythm panel skipped.");
Info(_log, "Rhythm/Shape Controller panel skipped.");
}
}
@@ -360,36 +382,24 @@ int LedDeviceNanoleaf::open()
int retval = -1;
_isDeviceReady = false;
QJsonDocument responseDoc = changeToExternalControlMode();
// Resolve port for Light Panels
QJsonObject jsonStreamControllInfo = responseDoc.object();
if (!jsonStreamControllInfo.isEmpty())
{
//Set default streaming port
_port = static_cast<uchar>(jsonStreamControllInfo[STREAM_CONTROL_PORT].toInt());
}
if (ProviderUdp::open() == 0)
{
// Everything is OK, device is ready
_isDeviceReady = true;
retval = 0;
}
return retval;
}
QJsonObject LedDeviceNanoleaf::discover(const QJsonObject& /*params*/)
QJsonArray LedDeviceNanoleaf::discover()
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType);
QJsonArray deviceList;
// Discover Nanoleaf Devices
SSDPDiscover discover;
// Search for Canvas and Light-Panels
QString searchTargetFilter = QString("%1|%2").arg(SSDP_CANVAS, SSDP_LIGHTPANELS);
QString searchTargetFilter = QString("%1|%2").arg(SSDP_NANOLEAF, SSDP_LIGHTPANELS);
discover.setSearchFilter(searchTargetFilter, SSDP_FILTER_HEADER);
QString searchTarget = SSDP_ID;
@@ -399,26 +409,41 @@ QJsonObject LedDeviceNanoleaf::discover(const QJsonObject& /*params*/)
deviceList = discover.getServicesDiscoveredJson();
}
return deviceList;
}
QJsonObject LedDeviceNanoleaf::discover(const QJsonObject& /*params*/)
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType);
QString discoveryMethod("ssdp");
QJsonArray deviceList;
deviceList = discover();
devicesDiscovered.insert("discoveryMethod", discoveryMethod);
devicesDiscovered.insert("devices", deviceList);
Debug(_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return devicesDiscovered;
}
QJsonObject LedDeviceNanoleaf::getProperties(const QJsonObject& params)
{
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject properties;
// Get Nanoleaf device properties
QString host = params["host"].toString("");
if (!host.isEmpty())
QString hostName = params["host"].toString("");
if (!hostName.isEmpty())
{
QString authToken = params["token"].toString("");
QString filter = params["filter"].toString("");
// Resolve hostname and port (or use default API port)
QStringList addressparts = QStringUtils::split(host, ":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName, ":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString apiHost = addressparts[0];
int apiPort;
@@ -443,22 +468,22 @@ QJsonObject LedDeviceNanoleaf::getProperties(const QJsonObject& params)
properties.insert("properties", response.getBody().object());
Debug(_log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
}
return properties;
}
void LedDeviceNanoleaf::identify(const QJsonObject& params)
{
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QString host = params["host"].toString("");
if (!host.isEmpty())
QString hostName = params["host"].toString("");
if (!hostName.isEmpty())
{
QString authToken = params["token"].toString("");
// Resolve hostname and port (or use default API port)
QStringList addressparts = QStringUtils::split(host, ":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName, ":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString apiHost = addressparts[0];
int apiPort;
@@ -485,44 +510,247 @@ void LedDeviceNanoleaf::identify(const QJsonObject& params)
bool LedDeviceNanoleaf::powerOn()
{
bool on = false;
if (_isDeviceReady)
{
changeToExternalControlMode();
if (changeToExternalControlMode())
{
QJsonObject newState;
//Power-on Nanoleaf device
_restApi->setPath(API_STATE);
_restApi->put(getOnOffRequest(true));
QJsonObject onValue { {STATE_VALUE, true} };
newState.insert(STATE_ON, onValue);
if ( _isBrightnessOverwrite)
{
QJsonObject briValue { {STATE_VALUE, _brightness} };
newState.insert(STATE_BRI, briValue);
}
//Power-on Nanoleaf device
_restApi->setPath(API_STATE);
httpResponse response = _restApi->put(newState);
if (response.error())
{
QString errorReason = QString("Power-on request failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
on = false;
} else {
on = true;
}
}
}
return true;
return on;
}
bool LedDeviceNanoleaf::powerOff()
{
bool off = true;
if (_isDeviceReady)
{
QJsonObject newState;
QJsonObject onValue { {STATE_VALUE, false} };
newState.insert(STATE_ON, onValue);
//Power-off the Nanoleaf device physically
_restApi->setPath(API_STATE);
_restApi->put(getOnOffRequest(false));
httpResponse response = _restApi->put(newState);
if (response.error())
{
QString errorReason = QString("Power-off request failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
off = false;
}
}
return true;
return off;
}
QString LedDeviceNanoleaf::getOnOffRequest(bool isOn) const
bool LedDeviceNanoleaf::storeState()
{
QString state = isOn ? STATE_VALUE_TRUE : STATE_VALUE_FALSE;
return QString("{\"%1\":{\"%2\":%3}}").arg(STATE_ON, STATE_ONOFF_VALUE, state);
bool rc = true;
if ( _isRestoreOrigState )
{
_restApi->setPath(API_STATE);
httpResponse response = _restApi->get();
if ( response.error() )
{
QString errorReason = QString("Storing device state failed with error: '%1'").arg(response.getErrorReason());
setInError(errorReason);
rc = false;
}
else
{
_originalStateProperties = response.getBody().object();
DebugIf(verbose, _log, "state: [%s]", QString(QJsonDocument(_originalStateProperties).toJson(QJsonDocument::Compact)).toUtf8().constData() );
QJsonObject isOn = _originalStateProperties.value(STATE_ON).toObject();
if (!isOn.isEmpty())
{
_originalIsOn = isOn[STATE_VALUE].toBool();
}
QJsonObject bri = _originalStateProperties.value(STATE_BRI).toObject();
if (!bri.isEmpty())
{
_originalBri = bri[STATE_VALUE].toInt();
}
_originalColorMode = _originalStateProperties[STATE_COLORMODE].toString();
switch(COLOR_MODES.indexOf(_originalColorMode)) {
case 0:
{
// hs
QJsonObject hue = _originalStateProperties.value(STATE_HUE).toObject();
if (!hue.isEmpty())
{
_originalHue = hue[STATE_VALUE].toInt();
}
QJsonObject sat = _originalStateProperties.value(STATE_SAT).toObject();
if (!sat.isEmpty())
{
_originalSat = sat[STATE_VALUE].toInt();
}
break;
}
case 1:
{
// ct
QJsonObject ct = _originalStateProperties.value(STATE_CT).toObject();
if (!ct.isEmpty())
{
_originalCt = ct[STATE_VALUE].toInt();
}
break;
}
case 2:
{
// effect
_restApi->setPath(API_EFFECT);
httpResponse response = _restApi->get();
if ( response.error() )
{
QString errorReason = QString("Storing device state failed with error: '%1'").arg(response.getErrorReason());
setInError(errorReason);
rc = false;
}
else
{
QJsonObject effects = response.getBody().object();
DebugIf(verbose, _log, "effects: [%s]", QString(QJsonDocument(_originalStateProperties).toJson(QJsonDocument::Compact)).toUtf8().constData() );
_originalEffect = effects[API_EFFECT_SELECT].toString();
_originalIsDynEffect = _originalEffect == "*Dynamic*" || _originalEffect == "*Solid*";
}
break;
}
default:
QString errorReason = QString("Unknown ColorMode: '%1'").arg(_originalColorMode);
setInError(errorReason);
rc = false;
break;
}
}
}
return rc;
}
QJsonDocument LedDeviceNanoleaf::changeToExternalControlMode()
bool LedDeviceNanoleaf::restoreState()
{
bool rc = true;
if ( _isRestoreOrigState )
{
QJsonObject newState;
switch(COLOR_MODES.indexOf(_originalColorMode)) {
case 0:
{ // hs
QJsonObject hueValue { {STATE_VALUE, _originalHue} };
newState.insert(STATE_HUE, hueValue);
QJsonObject satValue { {STATE_VALUE, _originalSat} };
newState.insert(STATE_SAT, satValue);
break;
}
case 1:
{ // ct
QJsonObject ctValue { {STATE_VALUE, _originalCt} };
newState.insert(STATE_CT, ctValue);
break;
}
case 2:
{ // effect
if (!_originalIsDynEffect)
{
QJsonObject newEffect;
newEffect[API_EFFECT_SELECT] = _originalEffect;
_restApi->setPath(API_EFFECT);
httpResponse response = _restApi->put(newEffect);
if ( response.error() )
{
Warning (_log, "%s restoring effect failed with error: '%s'", QSTRING_CSTR(_activeDeviceType), QSTRING_CSTR(response.getErrorReason()));
}
} else {
Warning (_log, "%s restoring effect failed with error: Cannot restore dynamic or solid effect. Turning device off", QSTRING_CSTR(_activeDeviceType));
_originalIsOn = false;
}
break;
}
default:
Warning (_log, "%s restoring failed with error: Unknown ColorMode", QSTRING_CSTR(_activeDeviceType));
rc = false;
}
if (!_originalIsDynEffect)
{
QJsonObject briValue { {STATE_VALUE, _originalBri} };
newState.insert(STATE_BRI, briValue);
}
QJsonObject onValue { {STATE_VALUE, _originalIsOn} };
newState.insert(STATE_ON, onValue);
_restApi->setPath(API_STATE);
httpResponse response = _restApi->put(newState);
if ( response.error() )
{
Warning (_log, "%s restoring state failed with error: '%s'", QSTRING_CSTR(_activeDeviceType), QSTRING_CSTR(response.getErrorReason()));
rc = false;
}
}
return rc;
}
bool LedDeviceNanoleaf::changeToExternalControlMode()
{
QJsonDocument resp;
return changeToExternalControlMode(resp);
}
bool LedDeviceNanoleaf::changeToExternalControlMode(QJsonDocument& resp)
{
bool success = false;
Debug(_log, "Set Nanoleaf to External Control (UDP) streaming mode");
_extControlVersion = EXTCTRLVER_V2;
//Enable UDP Mode v2
_restApi->setPath(API_EFFECT);
httpResponse response = _restApi->put(API_EXT_MODE_STRING_V2);
return response.getBody();
if (response.error())
{
QString errorReason = QString("Change to external control mode failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
}
else
{
resp = response.getBody();
success = true;
}
return success;
}
int LedDeviceNanoleaf::write(const std::vector<ColorRgb>& ledValues)

View File

@@ -126,6 +126,25 @@ protected:
///
bool powerOff() override;
///
/// @brief Store the device's original state.
///
/// Save the device's state before hyperion color streaming starts allowing to restore state during switchOff().
///
/// @return True if success
///
bool storeState() override;
///
/// @brief Restore the device's original state.
///
/// Restore the device's state as before hyperion color streaming started.
/// This includes the on/off state of the device.
///
/// @return True, if success
///
bool restoreState() override;
private:
///
@@ -149,22 +168,28 @@ private:
///
/// @brief Change Nanoleaf device to External Control (UDP) mode
///
/// @return Response from device
///@brief
QJsonDocument changeToExternalControlMode();
/// @return True, if success
bool changeToExternalControlMode();
///
/// @brief Change Nanoleaf device to External Control (UDP) mode
///
/// @param[out] response from device
///
/// @return True, if success
bool changeToExternalControlMode(QJsonDocument& resp);
///
/// @brief Get command to power Nanoleaf device on or off
/// @brief Discover Nanoleaf devices available (for configuration).
/// Nanoleaf specific ssdp discovery
///
/// @param isOn True, if to switch on device
/// @return Command to switch device on/off
/// @return A JSON structure holding a list of devices found
///
QString getOnOffRequest(bool isOn) const;
QJsonArray discover();
///REST-API wrapper
ProviderRestApi* _restApi;
QString _hostname;
QString _hostName;
int _apiPort;
QString _authToken;
@@ -183,6 +208,21 @@ private:
/// Array of the panel ids.
QVector<int> _panelIds;
QJsonObject _originalStateProperties;
bool _isBrightnessOverwrite;
int _brightness;
QString _originalColorMode;
bool _originalIsOn;
int _originalHue;
int _originalSat;
int _originalCt;
int _originalBri;
QString _originalEffect;
bool _originalIsDynEffect {false};
};
#endif // LEDEVICENANOLEAF_H

View File

@@ -12,7 +12,7 @@ namespace {
bool verbose = false;
// Configuration settings
const char CONFIG_ADDRESS[] = "output";
const char CONFIG_ADDRESS[] = "host";
//const char CONFIG_PORT[] = "port";
const char CONFIG_USERNAME[] = "username";
const char CONFIG_CLIENTKEY[] = "clientkey";
@@ -115,7 +115,7 @@ CiColor CiColor::rgbToCiColor(double red, double green, double blue, const CiCol
double cy;
double bri;
if(red + green + blue > 0)
if( (red + green + blue) > 0)
{
// Apply gamma correction.
double r = (red > 0.04045) ? pow((red + 0.055) / (1.0 + 0.055), 2.4) : (red / 12.92);
@@ -157,7 +157,7 @@ CiColor CiColor::rgbToCiColor(double red, double green, double blue, const CiCol
CiColor xy = { cx, cy, bri };
if(red + green + blue > 0)
if( (red + green + blue) > 0)
{
// Check if the given XY value is within the color reach of our lamps.
if (!isPointInLampsReach(xy, colorSpace))
@@ -387,8 +387,11 @@ void LedDevicePhilipsHueBridge::log(const char* msg, const char* type, ...) cons
vsnprintf(val, max_val_length, type, args);
va_end(args);
std::string s = msg;
int max = 30;
s.append(max - s.length(), ' ');
size_t max = 30;
if (max > s.length())
{
s.append(max - s.length(), ' ');
}
Debug( _log, "%s: %s", s.c_str(), val );
}
@@ -649,7 +652,7 @@ const std::set<QString> PhilipsHueLight::GAMUT_A_MODEL_IDS =
const std::set<QString> PhilipsHueLight::GAMUT_B_MODEL_IDS =
{ "LCT001", "LCT002", "LCT003", "LCT007", "LLM001" };
const std::set<QString> PhilipsHueLight::GAMUT_C_MODEL_IDS =
{ "LCA001", "LCA002", "LCA003", "LCG002", "LCP001", "LCP002", "LCT010", "LCT011", "LCT012", "LCT014", "LCT015", "LCT016", "LCT024", "LLC020", "LST002" };
{ "LCA001", "LCA002", "LCA003", "LCG002", "LCP001", "LCP002", "LCT010", "LCT011", "LCT012", "LCT014", "LCT015", "LCT016", "LCT024", "LCX001", "LLC020", "LST002" };
PhilipsHueLight::PhilipsHueLight(Logger* log, unsigned int id, QJsonObject values, unsigned int ledidx)
: _log(log)
@@ -859,7 +862,7 @@ bool LedDevicePhilipsHue::init(const QJsonObject &deviceConfig)
if( _groupId == 0 )
{
log( "Group-ID is invalid", "%d", _groupId );
Error(_log, "Disabling Entertainment API as Group-ID is invalid" );
_useHueEntertainmentAPI = false;
}
}
@@ -888,7 +891,7 @@ bool LedDevicePhilipsHue::setLights()
if( _useHueEntertainmentAPI )
{
_useHueEntertainmentAPI = false;
Debug(_log, "Group-ID [%u] is not usable - Entertainment API usage was disabled!", _groupId );
Error(_log, "Group-ID [%u] is not usable - Entertainment API usage was disabled!", _groupId );
}
lArray = _devConfig[ CONFIG_LIGHTIDS ].toArray();
}
@@ -1018,7 +1021,7 @@ bool LedDevicePhilipsHue::updateLights(const QMap<quint16, QJsonObject> &map)
if( lightsCount == 0 )
{
Debug(_log, "No usable lights found!" );
Error(_log, "No usable lights found!" );
isInitOK = false;
}
@@ -1073,18 +1076,18 @@ bool LedDevicePhilipsHue::openStream()
if( isInitOK )
{
Info(_log, "Philips Hue Entertaiment API successful connected! Start Streaming." );
Info(_log, "Philips Hue Entertainment API successful connected! Start Streaming." );
_allLightsBlack = true;
noSignalDetection();
}
else
{
Error(_log, "Philips Hue Entertaiment API not connected!" );
Error(_log, "Philips Hue Entertainment API not connected!" );
}
}
else
{
Error(_log, "Philips Hue Entertaiment API could not initialisized!" );
Error(_log, "Philips Hue Entertainment API could not be initialised!" );
}
return isInitOK;
@@ -1235,7 +1238,7 @@ QByteArray LedDevicePhilipsHue::prepareStreamData() const
CiColor lightC = light.getColor();
quint64 R = lightC.x * 0xffff;
quint64 G = lightC.y * 0xffff;
quint64 B = lightC.bri * 0xffff;
quint64 B = (lightC.x || lightC.y) ? lightC.bri * 0xffff : 0;
unsigned int id = light.getId();
const uint8_t payload[] = {
0x00, 0x00, static_cast<uint8_t>(id),
@@ -1315,7 +1318,7 @@ bool LedDevicePhilipsHue::switchOff()
stop_retry_left = 3;
if (_useHueEntertainmentAPI)
{
stopStream();
stopStream();
}
return LedDevicePhilipsHueBridge::switchOff();
@@ -1467,7 +1470,7 @@ void LedDevicePhilipsHue::setColor(PhilipsHueLight& light, CiColor& color)
if( !_useHueEntertainmentAPI )
{
const int bri = qRound(qMin(254.0, _brightnessFactor * qMax(1.0, color.bri * 254.0)));
QString stateCmd = QString("\"%1\":[%2,%3],\"%4\":%5").arg( API_XY_COORDINATES ).arg( color.x, 0, 'd', 4 ).arg( color.y, 0, 'd', 4 ).arg( API_BRIGHTNESS ).arg( bri );
QString stateCmd = QString("{\"%1\":[%2,%3],\"%4\":%5}").arg( API_XY_COORDINATES ).arg( color.x, 0, 'd', 4 ).arg( color.y, 0, 'd', 4 ).arg( API_BRIGHTNESS ).arg( bri );
setLightState( light.getId(), stateCmd );
}
else

View File

@@ -4,9 +4,15 @@ const ushort TPM2_DEFAULT_PORT = 65506;
LedDeviceTpm2net::LedDeviceTpm2net(const QJsonObject &deviceConfig)
: ProviderUdp(deviceConfig)
, _tpm2_buffer(nullptr)
{
}
LedDeviceTpm2net::~LedDeviceTpm2net()
{
free (_tpm2_buffer);
}
LedDevice* LedDeviceTpm2net::construct(const QJsonObject &deviceConfig)
{
return new LedDeviceTpm2net(deviceConfig);
@@ -23,7 +29,9 @@ bool LedDeviceTpm2net::init(const QJsonObject &deviceConfig)
{
_tpm2_max = deviceConfig["max-packet"].toInt(170);
_tpm2ByteCount = 3 * _ledCount;
_tpm2TotalPackets = 1 + _tpm2ByteCount / _tpm2_max;
_tpm2TotalPackets = (_tpm2ByteCount / _tpm2_max) + ((_tpm2ByteCount % _tpm2_max) != 0);
_tpm2_buffer = (uint8_t*) malloc(_tpm2_max+7);
isInitOK = true;
}
@@ -32,8 +40,6 @@ bool LedDeviceTpm2net::init(const QJsonObject &deviceConfig)
int LedDeviceTpm2net::write(const std::vector<ColorRgb> &ledValues)
{
uint8_t * tpm2_buffer = (uint8_t*) malloc(_tpm2_max+7);
int retVal = 0;
int _thisPacketBytes = 0;
@@ -48,23 +54,22 @@ int LedDeviceTpm2net::write(const std::vector<ColorRgb> &ledValues)
_thisPacketBytes = (_tpm2ByteCount - rawIdx < _tpm2_max) ? _tpm2ByteCount % _tpm2_max : _tpm2_max;
// is this the last packet? ? ^^ last packet : ^^ earlier packets
tpm2_buffer[0] = 0x9c; // Packet start byte
tpm2_buffer[1] = 0xda; // Packet type Data frame
tpm2_buffer[2] = (_thisPacketBytes >> 8) & 0xff; // Frame size high
tpm2_buffer[3] = _thisPacketBytes & 0xff; // Frame size low
tpm2_buffer[4] = _tpm2ThisPacket++; // Packet Number
tpm2_buffer[5] = _tpm2TotalPackets; // Number of packets
_tpm2_buffer[0] = 0x9c; // Packet start byte
_tpm2_buffer[1] = 0xda; // Packet type Data frame
_tpm2_buffer[2] = (_thisPacketBytes >> 8) & 0xff; // Frame size high
_tpm2_buffer[3] = _thisPacketBytes & 0xff; // Frame size low
_tpm2_buffer[4] = _tpm2ThisPacket++; // Packet Number
_tpm2_buffer[5] = _tpm2TotalPackets; // Number of packets
}
tpm2_buffer [6 + rawIdx%_tpm2_max] = rawdata[rawIdx];
_tpm2_buffer [6 + rawIdx%_tpm2_max] = rawdata[rawIdx];
// is this the last byte of last packet || last byte of other packets
if ( (rawIdx == _tpm2ByteCount-1) || (rawIdx %_tpm2_max == _tpm2_max-1) )
{
tpm2_buffer [6 + rawIdx%_tpm2_max +1] = 0x36; // Packet end byte
retVal &= writeBytes(_thisPacketBytes+7, tpm2_buffer);
_tpm2_buffer [6 + rawIdx%_tpm2_max +1] = 0x36; // Packet end byte
retVal &= writeBytes(_thisPacketBytes+7, _tpm2_buffer);
}
}
return retVal;
}

View File

@@ -18,6 +18,11 @@ public:
///
explicit LedDeviceTpm2net(const QJsonObject &deviceConfig);
///
/// @brief Destructor of the TPM2 LED-device
///
~LedDeviceTpm2net() override;
///
/// @brief Constructs the LED-device
///
@@ -48,6 +53,8 @@ private:
int _tpm2ByteCount;
int _tpm2TotalPackets;
int _tpm2ThisPacket;
uint8_t * _tpm2_buffer;
};
#endif // LEDEVICETPM2NET_H

View File

@@ -1,6 +1,14 @@
#include "LedDeviceUdpRaw.h"
// Constants
namespace {
const bool verbose = false;
const ushort RAW_DEFAULT_PORT=5568;
const int UDP_MAX_LED_NUM = 490;
} //End of constants
LedDeviceUdpRaw::LedDeviceUdpRaw(const QJsonObject &deviceConfig)
: ProviderUdp(deviceConfig)
@@ -16,8 +24,28 @@ bool LedDeviceUdpRaw::init(const QJsonObject &deviceConfig)
{
_port = RAW_DEFAULT_PORT;
// Initialise sub-class
bool isInitOK = ProviderUdp::init(deviceConfig);
bool isInitOK = false;
if ( LedDevice::init(deviceConfig) )
{
// Initialise LedDevice configuration and execution environment
int configuredLedCount = this->getLedCount();
Debug(_log, "DeviceType : %s", QSTRING_CSTR( this->getActiveDeviceType() ));
Debug(_log, "LedCount : %d", configuredLedCount);
Debug(_log, "ColorOrder : %s", QSTRING_CSTR( this->getColorOrder() ));
Debug(_log, "LatchTime : %d", this->getLatchTime());
if (configuredLedCount > UDP_MAX_LED_NUM)
{
QString errorReason = QString("Device type %1 can only be run with maximum %2 LEDs!").arg(this->getActiveDeviceType()).arg(UDP_MAX_LED_NUM);
this->setInError ( errorReason );
isInitOK = false;
}
else
{
// Initialise sub-class
isInitOK = ProviderUdp::init(deviceConfig);
}
}
return isInitOK;
}
@@ -27,3 +55,18 @@ int LedDeviceUdpRaw::write(const std::vector<ColorRgb> &ledValues)
return writeBytes(_ledRGBCount, dataPtr);
}
QJsonObject LedDeviceUdpRaw::getProperties(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
QJsonObject properties;
QJsonObject propertiesDetails;
propertiesDetails.insert("maxLedCount", UDP_MAX_LED_NUM);
properties.insert("properties", propertiesDetails);
DebugIf(verbose, _log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData() );
return properties;
}

View File

@@ -26,6 +26,14 @@ public:
///
static LedDevice* construct(const QJsonObject &deviceConfig);
///
/// @brief Get a UDP-Raw device's resource properties
///
/// @param[in] params Parameters to query device
/// @return A JSON structure holding the device's properties
///
QJsonObject getProperties(const QJsonObject& params) override;
protected:
///

View File

@@ -1,17 +1,27 @@
// Local-Hyperion includes
#include "LedDeviceWled.h"
#include <ssdp/SSDPDiscover.h>
#include <utils/QStringUtils.h>
#include <utils/WaitTime.h>
#include <QThread>
#include <chrono>
// Constants
namespace {
const bool verbose = false;
// Configuration settings
const char CONFIG_ADDRESS[] = "host";
const char CONFIG_RESTORE_STATE[] = "restoreOriginalState";
const char CONFIG_BRIGHTNESS[] = "brightness";
const char CONFIG_BRIGHTNESS_OVERWRITE[] = "overwriteBrightness";
const char CONFIG_SYNC_OVERWRITE[] = "overwriteSync";
// UDP elements
const quint16 STREAM_DEFAULT_PORT = 19446;
const int UDP_MAX_LED_NUM = 490;
// WLED JSON-API elements
const int API_DEFAULT_PORT = -1; //Use default port per communication scheme
@@ -24,12 +34,14 @@ const char API_PATH_STATE[] = "state";
const char STATE_ON[] = "on";
const char STATE_VALUE_TRUE[] = "true";
const char STATE_VALUE_FALSE[] = "false";
const char STATE_LIVE[] = "live";
// WLED ssdp services
// TODO: WLED - Update ssdp discovery parameters when available
const char SSDP_ID[] = "ssdp:all";
const char SSDP_FILTER[] = "(.*)";
const char SSDP_FILTER_HEADER[] = "ST";
const bool DEFAULT_IS_RESTORE_STATE = false;
const bool DEFAULT_IS_BRIGHTNESS_OVERWRITE = true;
const int BRI_MAX = 255;
const bool DEFAULT_IS_SYNC_OVERWRITE = true;
constexpr std::chrono::milliseconds DEFAULT_IDENTIFY_TIME{ 2000 };
} //End of constants
@@ -37,6 +49,11 @@ LedDeviceWled::LedDeviceWled(const QJsonObject &deviceConfig)
: ProviderUdp(deviceConfig)
,_restApi(nullptr)
,_apiPort(API_DEFAULT_PORT)
,_isBrightnessOverwrite(DEFAULT_IS_BRIGHTNESS_OVERWRITE)
,_brightness (BRI_MAX)
,_isSyncOverwrite(DEFAULT_IS_SYNC_OVERWRITE)
,_originalStateUdpnSend(false)
,_originalStateUdpnRecv(true)
{
}
@@ -53,7 +70,6 @@ LedDevice* LedDeviceWled::construct(const QJsonObject &deviceConfig)
bool LedDeviceWled::init(const QJsonObject &deviceConfig)
{
Debug(_log, "");
bool isInitOK = false;
// Initialise LedDevice sub-class, ProviderUdp::init will be executed later, if connectivity is defined
@@ -66,18 +82,35 @@ bool LedDeviceWled::init(const QJsonObject &deviceConfig)
Debug(_log, "ColorOrder : %s", QSTRING_CSTR( this->getColorOrder() ));
Debug(_log, "LatchTime : %d", this->getLatchTime());
if (configuredLedCount > UDP_MAX_LED_NUM)
{
QString errorReason = QString("Device type %1 can only be run with maximum %2 LEDs!").arg(this->getActiveDeviceType()).arg(UDP_MAX_LED_NUM);
this->setInError ( errorReason );
return false;
}
_isRestoreOrigState = _devConfig[CONFIG_RESTORE_STATE].toBool(DEFAULT_IS_RESTORE_STATE);
_isSyncOverwrite = _devConfig[CONFIG_SYNC_OVERWRITE].toBool(DEFAULT_IS_SYNC_OVERWRITE);
_isBrightnessOverwrite = _devConfig[CONFIG_BRIGHTNESS_OVERWRITE].toBool(DEFAULT_IS_BRIGHTNESS_OVERWRITE);
_brightness = _devConfig[CONFIG_BRIGHTNESS].toInt(BRI_MAX);
Debug(_log, "RestoreOrigState : %d", _isRestoreOrigState);
Debug(_log, "Overwrite Sync. : %d", _isSyncOverwrite);
Debug(_log, "Overwrite Brightn.: %d", _isBrightnessOverwrite);
Debug(_log, "Set Brightness to : %d", _brightness);
//Set hostname as per configuration
QString address = deviceConfig[ CONFIG_ADDRESS ].toString();
QString hostName = deviceConfig[ CONFIG_ADDRESS ].toString();
//If host not configured the init fails
if ( address.isEmpty() )
if ( hostName.isEmpty() )
{
this->setInError("No target hostname nor IP defined");
return false;
}
else
{
QStringList addressparts = QStringUtils::split(address,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName,":", QStringUtils::SplitBehavior::SkipEmptyParts);
_hostname = addressparts[0];
if ( addressparts.size() > 1 )
{
@@ -100,13 +133,11 @@ bool LedDeviceWled::init(const QJsonObject &deviceConfig)
}
}
}
Debug(_log, "[%d]", isInitOK);
return isInitOK;
}
bool LedDeviceWled::initRestAPI(const QString &hostname, int port)
{
Debug(_log, "");
bool isInitOK = false;
if ( _restApi == nullptr )
@@ -116,38 +147,88 @@ bool LedDeviceWled::initRestAPI(const QString &hostname, int port)
isInitOK = true;
}
Debug(_log, "[%d]", isInitOK);
return isInitOK;
}
QString LedDeviceWled::getOnOffRequest(bool isOn) const
{
QString state = isOn ? STATE_VALUE_TRUE : STATE_VALUE_FALSE;
return QString( "{\"%1\":%2}" ).arg( STATE_ON, state);
return QString( "\"%1\":%2,\"%3\":%4" ).arg( STATE_ON, state).arg( STATE_LIVE, state);
}
QString LedDeviceWled::getBrightnessRequest(int bri) const
{
return QString( "\"bri\":%1" ).arg(bri);
}
QString LedDeviceWled::getEffectRequest(int effect, int speed) const
{
return QString( "\"seg\":{\"fx\":%1,\"sx\":%2}" ).arg(effect).arg(speed);
}
QString LedDeviceWled::getLorRequest(int lor) const
{
return QString( "\"lor\":%1" ).arg(lor);
}
QString LedDeviceWled::getUdpnRequest(bool isSendOn, bool isRecvOn) const
{
QString send = isSendOn ? STATE_VALUE_TRUE : STATE_VALUE_FALSE;
QString recv = isRecvOn ? STATE_VALUE_TRUE : STATE_VALUE_FALSE;
return QString( "\"udpn\":{\"send\":%1,\"recv\":%2}" ).arg(send, recv);
}
bool LedDeviceWled::sendStateUpdateRequest(const QString &request)
{
bool rc = true;
_restApi->setPath(API_PATH_STATE);
httpResponse response1 = _restApi->put(QString("{%1}").arg(request));
if ( response1.error() )
{
rc = false;
}
return rc;
}
bool LedDeviceWled::powerOn()
{
Debug(_log, "");
bool on = true;
bool on = false;
if ( _isDeviceReady)
{
//Power-on WLED device
_restApi->setPath(API_PATH_STATE);
httpResponse response = _restApi->put(getOnOffRequest(true));
QString cmd = getOnOffRequest(true);
if ( _isBrightnessOverwrite)
{
cmd += "," + getBrightnessRequest(_brightness);
}
if (_isSyncOverwrite)
{
Debug( _log, "Disable synchronisation with other WLED devices");
cmd += "," + getUdpnRequest(false, false);
}
httpResponse response = _restApi->put(QString("{%1}").arg(cmd));
if ( response.error() )
{
this->setInError ( response.getErrorReason() );
QString errorReason = QString("Power-on request failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
on = false;
}
else
{
on = true;
}
}
return on;
}
bool LedDeviceWled::powerOff()
{
Debug(_log, "");
bool off = true;
if ( _isDeviceReady)
{
@@ -156,53 +237,104 @@ bool LedDeviceWled::powerOff()
//Power-off the WLED device physically
_restApi->setPath(API_PATH_STATE);
httpResponse response = _restApi->put(getOnOffRequest(false));
QString cmd = getOnOffRequest(false);
if (_isSyncOverwrite)
{
Debug( _log, "Restore synchronisation with other WLED devices");
cmd += "," + getUdpnRequest(_originalStateUdpnSend, _originalStateUdpnRecv);
}
httpResponse response = _restApi->put(QString("{%1}").arg(cmd));
if ( response.error() )
{
this->setInError ( response.getErrorReason() );
QString errorReason = QString("Power-off request failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
off = false;
}
}
return off;
}
bool LedDeviceWled::storeState()
{
bool rc = true;
if ( _isRestoreOrigState || _isSyncOverwrite )
{
_restApi->setPath(API_PATH_STATE);
httpResponse response = _restApi->get();
if ( response.error() )
{
QString errorReason = QString("Storing device state failed with error: '%1'").arg(response.getErrorReason());
setInError(errorReason);
rc = false;
}
else
{
_originalStateProperties = response.getBody().object();
DebugIf(verbose, _log, "state: [%s]", QString(QJsonDocument(_originalStateProperties).toJson(QJsonDocument::Compact)).toUtf8().constData() );
QJsonObject udpn = _originalStateProperties.value("udpn").toObject();
if (!udpn.isEmpty())
{
_originalStateUdpnSend = udpn["send"].toBool(false);
_originalStateUdpnRecv = udpn["recv"].toBool(true);
}
}
}
return rc;
}
bool LedDeviceWled::restoreState()
{
bool rc = true;
if ( _isRestoreOrigState )
{
_restApi->setPath(API_PATH_STATE);
_originalStateProperties[STATE_LIVE] = false;
httpResponse response = _restApi->put(QString(QJsonDocument(_originalStateProperties).toJson(QJsonDocument::Compact)).toUtf8().constData());
if ( response.error() )
{
Warning (_log, "%s restoring state failed with error: '%s'", QSTRING_CSTR(_activeDeviceType), QSTRING_CSTR(response.getErrorReason()));
}
}
return rc;
}
QJsonObject LedDeviceWled::discover(const QJsonObject& /*params*/)
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType );
QJsonArray deviceList;
// Discover WLED Devices
SSDPDiscover discover;
discover.skipDuplicateKeys(true);
discover.setSearchFilter(SSDP_FILTER, SSDP_FILTER_HEADER);
QString searchTarget = SSDP_ID;
if ( discover.discoverServices(searchTarget) > 0 )
{
deviceList = discover.getServicesDiscoveredJson();
}
devicesDiscovered.insert("devices", deviceList);
Debug(_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose, _log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
return devicesDiscovered;
}
QJsonObject LedDeviceWled::getProperties(const QJsonObject& params)
{
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
QJsonObject properties;
// Get Nanoleaf device properties
QString host = params["host"].toString("");
if ( !host.isEmpty() )
QString hostName = params["host"].toString("");
if ( !hostName.isEmpty() )
{
QString filter = params["filter"].toString("");
// Resolve hostname and port (or use default API port)
QStringList addressparts = QStringUtils::split(host,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString apiHost = addressparts[0];
int apiPort;
@@ -224,51 +356,50 @@ QJsonObject LedDeviceWled::getProperties(const QJsonObject& params)
Warning (_log, "%s get properties failed with error: '%s'", QSTRING_CSTR(_activeDeviceType), QSTRING_CSTR(response.getErrorReason()));
}
properties.insert("properties", response.getBody().object());
QJsonObject propertiesDetails = response.getBody().object();
propertiesDetails.insert("maxLedCount", UDP_MAX_LED_NUM);
Debug(_log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData() );
properties.insert("properties", propertiesDetails);
DebugIf(verbose, _log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData() );
}
return properties;
}
void LedDeviceWled::identify(const QJsonObject& /*params*/)
void LedDeviceWled::identify(const QJsonObject& params)
{
#if 0
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QString host = params["host"].toString("");
if ( !host.isEmpty() )
QString hostName = params["host"].toString("");
if ( !hostName.isEmpty() )
{
// Resolve hostname and port (or use default API port)
QStringList addressparts = QStringUtils::split(host,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString apiHost = addressparts[0];
int apiPort;
if ( addressparts.size() > 1)
{
apiPort = addressparts[1].toInt();
}
else
{
apiPort = API_DEFAULT_PORT;
}
// TODO: WLED::identify - Replace with valid identification code
initRestAPI(apiHost, apiPort);
// initRestAPI(apiHost, apiPort);
_isRestoreOrigState = true;
storeState();
// QString resource = QString("%1/%2/%3").arg( API_LIGHTS ).arg( lightId ).arg( API_STATE);
// _restApi->setPath(resource);
QString request = getOnOffRequest(true) + "," + getLorRequest(1) + "," + getEffectRequest(25);
sendStateUpdateRequest(request);
// QString stateCmd;
// stateCmd += QString("\"%1\":%2,").arg( API_STATE_ON ).arg( API_STATE_VALUE_TRUE );
// stateCmd += QString("\"%1\":\"%2\"").arg( "alert" ).arg( "select" );
// stateCmd = "{" + stateCmd + "}";
wait(DEFAULT_IDENTIFY_TIME);
// // Perform request
// httpResponse response = _restApi->put(stateCmd);
// if ( response.error() )
// {
// Warning (_log, "%s identification failed with error: '%s'", QSTRING_CSTR(_activeDeviceType), QSTRING_CSTR(response.getErrorReason()));
// }
restoreState();
}
#endif
}
int LedDeviceWled::write(const std::vector<ColorRgb> &ledValues)

View File

@@ -8,8 +8,6 @@
///
/// Implementation of a WLED-device
/// ...
///
///
class LedDeviceWled : public ProviderUdp
{
@@ -105,6 +103,25 @@ protected:
///
bool powerOff() override;
///
/// @brief Store the device's original state.
///
/// Save the device's state before hyperion color streaming starts allowing to restore state during switchOff().
///
/// @return True if success
///
bool storeState() override;
///
/// @brief Restore the device's original state.
///
/// Restore the device's state as before hyperion color streaming started.
/// This includes the on/off state of the device.
///
/// @return True, if success
///
bool restoreState() override;
private:
///
@@ -124,11 +141,27 @@ private:
///
QString getOnOffRequest (bool isOn ) const;
QString getBrightnessRequest (int bri ) const;
QString getEffectRequest(int effect, int speed=128) const;
QString getLorRequest(int lor) const;
QString getUdpnRequest(bool send, bool recv) const;
bool sendStateUpdateRequest(const QString &request);
///REST-API wrapper
ProviderRestApi* _restApi;
QString _hostname;
int _apiPort;
QJsonObject _originalStateProperties;
bool _isBrightnessOverwrite;
int _brightness;
bool _isSyncOverwrite;
bool _originalStateUdpnSend;
bool _originalStateUdpnRecv;
};
#endif // LEDDEVICEWLED_H

View File

@@ -1,4 +1,4 @@
#include "LedDeviceYeelight.h"
#include "LedDeviceYeelight.h"
#include <ssdp/SSDPDiscover.h>
#include <utils/QStringUtils.h>
@@ -234,12 +234,12 @@ int YeelightLight::writeCommand( const QJsonDocument &command, QJsonArray &resul
if ( ! _tcpSocket->waitForBytesWritten(WRITE_TIMEOUT.count()) )
{
QString errorReason = QString ("(%1) %2").arg(_tcpSocket->error()).arg( _tcpSocket->errorString());
log ( 2, "Error:", "bytesWritten: [%ll], %s", bytesWritten, QSTRING_CSTR(errorReason));
log ( 2, "Error:", "bytesWritten: [%lld], %s", bytesWritten, QSTRING_CSTR(errorReason));
this->setInError ( errorReason );
}
else
{
log ( 3, "Success:", "Bytes written [%ll]", bytesWritten );
log ( 3, "Success:", "Bytes written [%lld]", bytesWritten );
// Avoid to overrun the Yeelight Command Quota
qint64 elapsedTime = QDateTime::currentMSecsSinceEpoch() - _lastWriteTime;
@@ -258,7 +258,7 @@ int YeelightLight::writeCommand( const QJsonDocument &command, QJsonArray &resul
{
do
{
log ( 3, "Reading:", "Bytes available [%ll]", _tcpSocket->bytesAvailable() );
log ( 3, "Reading:", "Bytes available [%lld]", _tcpSocket->bytesAvailable() );
while ( _tcpSocket->canReadLine() )
{
QByteArray response = _tcpSocket->readLine();
@@ -338,7 +338,7 @@ bool YeelightLight::streamCommand( const QJsonDocument &command )
{
int error = _tcpStreamSocket->error();
QString errorReason = QString ("(%1) %2").arg(error).arg( _tcpStreamSocket->errorString());
log ( 1, "Error:", "bytesWritten: [%ll], %s", bytesWritten, QSTRING_CSTR(errorReason));
log ( 1, "Error:", "bytesWritten: [%lld], %s", bytesWritten, QSTRING_CSTR(errorReason));
if ( error == QAbstractSocket::RemoteHostClosedError )
{
@@ -353,7 +353,7 @@ bool YeelightLight::streamCommand( const QJsonDocument &command )
}
else
{
log ( 3, "Success:", "Bytes written [%ll]", bytesWritten );
log ( 3, "Success:", "Bytes written [%lld]", bytesWritten );
rc = true;
}
}
@@ -956,7 +956,10 @@ void YeelightLight::log(int logLevel, const char* msg, const char* type, ...)
va_end(args);
std::string s = msg;
uint max = 20;
s.append(max - s.length(), ' ');
if (max > s.length())
{
s.append(max - s.length(), ' ');
}
Debug( _log, "%d|%15.15s| %s: %s", logLevel, QSTRING_CSTR(_name), s.c_str(), val);
}
@@ -1015,10 +1018,9 @@ bool LedDeviceYeelight::init(const QJsonObject &deviceConfig)
//Get device specific configuration
bool ok;
if ( deviceConfig[ CONFIG_COLOR_MODEL ].isString() )
{
_outputColorModel = deviceConfig[ CONFIG_COLOR_MODEL ].toString().toInt(&ok,MODEL_RGB);
_outputColorModel = deviceConfig[ CONFIG_COLOR_MODEL ].toString(QString(MODEL_RGB)).toInt();
}
else
{
@@ -1027,7 +1029,7 @@ bool LedDeviceYeelight::init(const QJsonObject &deviceConfig)
if ( deviceConfig[ CONFIG_TRANS_EFFECT ].isString() )
{
_transitionEffect = static_cast<YeelightLight::API_EFFECT>( deviceConfig[ CONFIG_TRANS_EFFECT ].toString().toInt(&ok, YeelightLight::API_EFFECT_SMOOTH) );
_transitionEffect = static_cast<YeelightLight::API_EFFECT>( deviceConfig[ CONFIG_TRANS_EFFECT ].toString(QString(YeelightLight::API_EFFECT_SMOOTH)).toInt() );
}
else
{
@@ -1044,7 +1046,7 @@ bool LedDeviceYeelight::init(const QJsonObject &deviceConfig)
if ( deviceConfig[ CONFIG_DEBUGLEVEL ].isString() )
{
_debuglevel = deviceConfig[ CONFIG_DEBUGLEVEL ].toString().toInt();
_debuglevel = deviceConfig[ CONFIG_DEBUGLEVEL ].toString(QString("0")).toInt();
}
else
{
@@ -1076,12 +1078,12 @@ bool LedDeviceYeelight::init(const QJsonObject &deviceConfig)
int configuredYeelightsCount = 0;
for (const QJsonValueRef light : configuredYeelightLights)
{
QString host = light.toObject().value("host").toString();
QString hostName = light.toObject().value("host").toString();
int port = light.toObject().value("port").toInt(API_DEFAULT_PORT);
if ( !host.isEmpty() )
if ( !hostName.isEmpty() )
{
QString name = light.toObject().value("name").toString();
Debug(_log, "Light [%u] - %s (%s:%d)", configuredYeelightsCount, QSTRING_CSTR(name), QSTRING_CSTR(host), port );
Debug(_log, "Light [%u] - %s (%s:%d)", configuredYeelightsCount, QSTRING_CSTR(name), QSTRING_CSTR(hostName), port );
++configuredYeelightsCount;
}
}
@@ -1107,10 +1109,10 @@ bool LedDeviceYeelight::init(const QJsonObject &deviceConfig)
_lightsAddressList.clear();
for (int j = 0; j < static_cast<int>( configuredLedCount ); ++j)
{
QString address = configuredYeelightLights[j].toObject().value("host").toString();
QString hostName = configuredYeelightLights[j].toObject().value("host").toString();
int port = configuredYeelightLights[j].toObject().value("port").toInt(API_DEFAULT_PORT);
QStringList addressparts = QStringUtils::split(address,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString apiHost = addressparts[0];
int apiPort = port;
@@ -1347,14 +1349,10 @@ bool LedDeviceYeelight::restoreState()
return rc;
}
QJsonObject LedDeviceYeelight::discover(const QJsonObject& /*params*/)
QJsonArray LedDeviceYeelight::discover()
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType );
QJsonArray deviceList;
// Discover Yeelight Devices
SSDPDiscover discover;
discover.setPort(SSDP_PORT);
discover.skipDuplicateKeys(true);
@@ -1365,25 +1363,36 @@ QJsonObject LedDeviceYeelight::discover(const QJsonObject& /*params*/)
{
deviceList = discover.getServicesDiscoveredJson();
}
return deviceList;
}
QJsonObject LedDeviceYeelight::discover(const QJsonObject& /*params*/)
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType );
QString discoveryMethod("ssdp");
QJsonArray deviceList;
deviceList = discover();
devicesDiscovered.insert("devices", deviceList);
Debug(_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose,_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
return devicesDiscovered;
}
QJsonObject LedDeviceYeelight::getProperties(const QJsonObject& params)
{
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
QJsonObject properties;
QString apiHostname = params["hostname"].toString("");
QString hostName = params["hostname"].toString("");
quint16 apiPort = static_cast<quint16>( params["port"].toInt(API_DEFAULT_PORT) );
Debug (_log, "apiHost [%s], apiPort [%d]", QSTRING_CSTR(apiHostname), apiPort);
if ( !apiHostname.isEmpty() )
if ( !hostName.isEmpty() )
{
YeelightLight yeelight(_log, apiHostname, apiPort);
YeelightLight yeelight(_log, hostName, apiPort);
//yeelight.setDebuglevel(3);
if ( yeelight.open() )
@@ -1399,15 +1408,15 @@ QJsonObject LedDeviceYeelight::getProperties(const QJsonObject& params)
void LedDeviceYeelight::identify(const QJsonObject& params)
{
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
QString apiHostname = params["hostname"].toString("");
QString hostName = params["hostname"].toString("");
quint16 apiPort = static_cast<quint16>( params["port"].toInt(API_DEFAULT_PORT) );
Debug (_log, "apiHost [%s], apiPort [%d]", QSTRING_CSTR(apiHostname), apiPort);
Debug (_log, "apiHost [%s], apiPort [%d]", QSTRING_CSTR(hostName), apiPort);
if ( !apiHostname.isEmpty() )
if ( !hostName.isEmpty() )
{
YeelightLight yeelight(_log, apiHostname, apiPort);
YeelightLight yeelight(_log, hostName, apiPort);
//yeelight.setDebuglevel(3);
if ( yeelight.open() )

View File

@@ -591,6 +591,14 @@ private:
///
uint getLightsCount() const { return _lightsCount; }
///
/// @brief Discover Yeelight devices available (for configuration).
/// Yeelight specific UDP Broadcast discovery
///
/// @return A JSON structure holding a list of devices found
///
QJsonArray discover();
/// Array of the Yeelight addresses handled by the LED-device
QVector<yeelightAddress> _lightsAddressList;

View File

@@ -5,9 +5,11 @@
#include <QEventLoop>
#include <QNetworkReply>
#include <QByteArray>
#include <QJsonObject>
//std includes
#include <iostream>
#include <chrono>
// Constants
namespace {
@@ -16,6 +18,13 @@ bool verbose = false;
const QChar ONE_SLASH = '/';
const int HTTP_STATUS_NO_CONTENT = 204;
const int HTTP_STATUS_BAD_REQUEST = 400;
const int HTTP_STATUS_UNAUTHORIZED = 401;
const int HTTP_STATUS_NOT_FOUND = 404;
constexpr std::chrono::milliseconds DEFAULT_REST_TIMEOUT{ 400 };
} //End of constants
ProviderRestApi::ProviderRestApi(const QString& host, int port, const QString& basePath)
@@ -64,7 +73,7 @@ void ProviderRestApi::appendPath(const QString& path)
appendPath(_path, path);
}
void ProviderRestApi::appendPath(QString& path, const QString& appendPath) const
void ProviderRestApi::appendPath ( QString& path, const QString &appendPath)
{
if (!appendPath.isEmpty() && appendPath != ONE_SLASH)
{
@@ -123,21 +132,27 @@ httpResponse ProviderRestApi::get()
httpResponse ProviderRestApi::get(const QUrl& url)
{
DebugIf(verbose,_log, "GET: [%s]", QSTRING_CSTR(url.toString()));
// Perform request
QNetworkRequest request(url);
QNetworkReply* reply = _networkManager->get(request);
// Connect requestFinished signal to quit slot of the loop.
QEventLoop loop;
QEventLoop::connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
ReplyTimeout::set(reply, DEFAULT_REST_TIMEOUT.count());
// Go into the loop until the request is finished.
loop.exec();
httpResponse response;
if (reply->operation() == QNetworkAccessManager::GetOperation)
{
response = getResponse(reply);
if(reply->error() != QNetworkReply::NoError)
{
Debug(_log, "GET: [%s]", QSTRING_CSTR( url.toString() ));
}
response = getResponse(reply );
}
// Free space.
reply->deleteLater();
@@ -145,28 +160,37 @@ httpResponse ProviderRestApi::get(const QUrl& url)
return response;
}
httpResponse ProviderRestApi::put(const QString& body)
httpResponse ProviderRestApi::put(const QJsonObject &body)
{
return put(getUrl(), body);
return put( getUrl(), QJsonDocument(body).toJson(QJsonDocument::Compact));
}
httpResponse ProviderRestApi::put(const QUrl& url, const QString& body)
httpResponse ProviderRestApi::put(const QString &body)
{
DebugIf(verbose, _log, "PUT: [%s] [%s]", QSTRING_CSTR(url.toString()), QSTRING_CSTR(body));
// Perform request
QNetworkRequest request(_networkRequestHeaders);
request.setUrl(url);
return put( getUrl(), body.toUtf8() );
}
QNetworkReply* reply = _networkManager->put(request, body.toUtf8());
httpResponse ProviderRestApi::put(const QUrl &url, const QByteArray &body)
{
// Perform request
QNetworkRequest request(url);
QNetworkReply* reply = _networkManager->put(request, body);
// Connect requestFinished signal to quit slot of the loop.
QEventLoop loop;
QEventLoop::connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
ReplyTimeout::set(reply, DEFAULT_REST_TIMEOUT.count());
// Go into the loop until the request is finished.
loop.exec();
httpResponse response;
if (reply->operation() == QNetworkAccessManager::PutOperation)
{
if(reply->error() != QNetworkReply::NoError)
{
Debug(_log, "PUT: [%s] [%s]", QSTRING_CSTR( url.toString() ),body.constData() );
}
response = getResponse(reply);
}
// Free space.
@@ -239,14 +263,11 @@ httpResponse ProviderRestApi::getResponse(QNetworkReply* const& reply)
int httpStatusCode = reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt();
response.setHttpStatusCode(httpStatusCode);
DebugIf(verbose, _log, "Reply.error [%d], Reply.httpStatusCode [%d]", reply->error(), httpStatusCode);
response.setNetworkReplyError(reply->error());
if (reply->error() == QNetworkReply::NoError)
{
if (httpStatusCode != 204) {
if ( httpStatusCode != HTTP_STATUS_NO_CONTENT ){
QByteArray replyData = reply->readAll();
if (!replyData.isEmpty())
@@ -275,18 +296,19 @@ httpResponse ProviderRestApi::getResponse(QNetworkReply* const& reply)
}
else
{
Debug(_log, "Reply.httpStatusCode [%d]", httpStatusCode );
QString errorReason;
if (httpStatusCode > 0) {
QString httpReason = reply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString();
QString advise;
switch (httpStatusCode) {
case 400:
switch ( httpStatusCode ) {
case HTTP_STATUS_BAD_REQUEST:
advise = "Check Request Body";
break;
case 401:
case HTTP_STATUS_UNAUTHORIZED:
advise = "Check Authentication Token (API Key)";
break;
case 404:
case HTTP_STATUS_NOT_FOUND:
advise = "Check Resource given";
break;
default:
@@ -295,10 +317,20 @@ httpResponse ProviderRestApi::getResponse(QNetworkReply* const& reply)
errorReason = QString("[%3 %4] - %5").arg(QString(httpStatusCode), httpReason, advise);
}
else {
errorReason = reply->errorString();
if ( reply->error() == QNetworkReply::OperationCanceledError )
{
//Do not report errors caused by request cancellation because of timeouts
Debug(_log, "Reply: [%s]", QSTRING_CSTR(errorReason) );
}
else
{
response.setError(true);
response.setErrorReason(errorReason);
}
}
response.setError(true);
response.setErrorReason(errorReason);
// Create valid body which is empty
response.setBody(QJsonDocument());

View File

@@ -10,6 +10,48 @@
#include <QUrlQuery>
#include <QJsonDocument>
#include <QBasicTimer>
#include <QTimerEvent>
//Set QNetworkReply timeout without external timer
//https://stackoverflow.com/questions/37444539/how-to-set-qnetworkreply-timeout-without-external-timer
class ReplyTimeout : public QObject {
Q_OBJECT
public:
enum HandleMethod { Abort, Close };
ReplyTimeout(QNetworkReply* reply, const int timeout, HandleMethod method = Abort) :
QObject(reply), m_method(method)
{
Q_ASSERT(reply);
if (reply && reply->isRunning()) {
m_timer.start(timeout, this);
connect(reply, &QNetworkReply::finished, this, &QObject::deleteLater);
}
}
static void set(QNetworkReply* reply, const int timeout, HandleMethod method = Abort)
{
new ReplyTimeout(reply, timeout, method);
}
protected:
QBasicTimer m_timer;
HandleMethod m_method;
void timerEvent(QTimerEvent * ev) override {
if (!m_timer.isActive() || ev->timerId() != m_timer.timerId())
return;
auto reply = static_cast<QNetworkReply*>(parent());
if (reply->isRunning())
{
if (m_method == Close)
reply->close();
else if (m_method == Abort)
reply->abort();
m_timer.stop();
}
}
};
///
/// Response object for REST-API calls and JSON-responses
///
@@ -171,6 +213,13 @@ public:
///
httpResponse get(const QUrl& url);
/// @brief Execute PUT request
///
/// @param[in] body The body of the request in JSON
/// @return Response The body of the response in JSON
///
httpResponse put(const QJsonObject &body);
///
/// @brief Execute PUT request
///
@@ -186,15 +235,7 @@ public:
/// @param[in] body The body of the request in JSON
/// @return Response The body of the response in JSON
///
httpResponse put(const QUrl& url, const QString& body = "");
///
/// @brief Execute POST request
///
/// @param[in] body The body of the request in JSON
/// @return Response The body of the response in JSON
///
httpResponse post(const QString& body = "");
httpResponse put(const QUrl &url, const QByteArray &body);
///
/// @brief Execute POST request
@@ -243,7 +284,7 @@ private:
/// @param[in/out] path to be updated
/// @param[in] path, element to be appended
///
void appendPath(QString& path, const QString& appendPath) const;
static void appendPath (QString &path, const QString &appendPath) ;
Logger* _log;

View File

@@ -11,6 +11,7 @@
// Local Hyperion includes
#include "ProviderUdpSSL.h"
#include <utils/QStringUtils.h>
const int MAX_RETRY = 5;
const ushort MAX_PORT_SSL = 65535;
@@ -73,6 +74,10 @@ bool ProviderUdpSSL::init(const QJsonObject &deviceConfig)
if( deviceConfig.contains("hs_attempts") ) _handshake_attempts = deviceConfig["hs_attempts"].toInt(5);
QString host = deviceConfig["host"].toString(_defaultHost);
//Split hostname from API-port in case given
QStringList addressparts = QStringUtils::split(host, ":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString udpHost = addressparts[0];
QStringList debugLevels = QStringList() << "No Debug" << "Error" << "State Change" << "Informational" << "Verbose";
configLog( "SSL Streamer Debug", "%s", ( _debugStreamer ) ? "yes" : "no" );
@@ -91,24 +96,24 @@ bool ProviderUdpSSL::init(const QJsonObject &deviceConfig)
configLog( "SSL Handshake Timeout max", "%d", _handshake_timeout_max );
configLog( "SSL Handshake attempts", "%d", _handshake_attempts );
if ( _address.setAddress(host) )
if ( _address.setAddress(udpHost) )
{
Debug( _log, "Successfully parsed %s as an ip address.", QSTRING_CSTR( host ) );
Debug( _log, "Successfully parsed %s as an ip address.", QSTRING_CSTR(udpHost) );
}
else
{
Debug( _log, "Failed to parse [%s] as an ip address.", QSTRING_CSTR( host ) );
QHostInfo info = QHostInfo::fromName(host);
Debug( _log, "Failed to parse [%s] as an ip address.", QSTRING_CSTR(udpHost) );
QHostInfo info = QHostInfo::fromName(udpHost);
if ( info.addresses().isEmpty() )
{
Debug( _log, "Failed to parse [%s] as a hostname.", QSTRING_CSTR( host ) );
Debug( _log, "Failed to parse [%s] as a hostname.", QSTRING_CSTR(udpHost) );
QString errortext = QString("Invalid target address [%1]!").arg(host);
this->setInError( errortext );
isInitOK = false;
}
else
{
Debug( _log, "Successfully parsed %s as a hostname.", QSTRING_CSTR( host ) );
Debug( _log, "Successfully parsed %s as a hostname.", QSTRING_CSTR(udpHost) );
_address = info.addresses().first();
}
}

View File

@@ -114,7 +114,7 @@ int LedDeviceFile::write(const std::vector<ColorRgb> & ledValues)
}
#if (QT_VERSION >= QT_VERSION_CHECK(5, 14, 0))
out << "]" << Qt::endl;
out << QString("]") << Qt::endl;
#else
out << "]" << endl;
#endif

View File

@@ -4,11 +4,22 @@
#include <csignal>
// QT includes
#include <QDir>
#include <QFile>
// Local LedDevice includes
#include "LedDevicePiBlaster.h"
// Constants
namespace {
const bool verbose = false;
// Pi-Blaster discovery service
const char DISCOVERY_DIRECTORY[] = "/dev/";
const char DISCOVERY_FILEPATTERN[] = "pi-blaster";
} //End of constants
LedDevicePiBlaster::LedDevicePiBlaster(const QJsonObject &deviceConfig)
: LedDevice(deviceConfig)
, _fid(nullptr)
@@ -184,3 +195,31 @@ int LedDevicePiBlaster::write(const std::vector<ColorRgb> & ledValues)
return 0;
}
QJsonObject LedDevicePiBlaster::discover(const QJsonObject& /*params*/)
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType );
QJsonArray deviceList;
QDir deviceDirectory (DISCOVERY_DIRECTORY);
QStringList deviceFilter(DISCOVERY_FILEPATTERN);
deviceDirectory.setNameFilters(deviceFilter);
deviceDirectory.setSorting(QDir::Name);
QFileInfoList deviceFiles = deviceDirectory.entryInfoList(QDir::System);
QFileInfoList::const_iterator deviceFileIterator;
for (deviceFileIterator = deviceFiles.constBegin(); deviceFileIterator != deviceFiles.constEnd(); ++deviceFileIterator)
{
QJsonObject deviceInfo;
deviceInfo.insert("deviceName", (*deviceFileIterator).fileName());
deviceInfo.insert("systemLocation", (*deviceFileIterator).absoluteFilePath());
deviceList.append(deviceInfo);
}
devicesDiscovered.insert("devices", deviceList);
DebugIf(verbose,_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return devicesDiscovered;
}

View File

@@ -29,6 +29,12 @@ public:
/// @return LedDevice constructed
static LedDevice* construct(const QJsonObject &deviceConfig);
/// @param[in] params Parameters used to overwrite discovery default behaviour
///
/// @return A JSON structure holding a list of devices found
///
QJsonObject discover(const QJsonObject& params) override;
protected:
///

View File

@@ -1,4 +1,10 @@
#include "LedDeviceWS281x.h"
#include <utils/SysInfo.h>
// Constants
namespace {
const bool verbose = false;
} //End of constants
LedDeviceWS281x::LedDeviceWS281x(const QJsonObject &deviceConfig)
: LedDevice(deviceConfig)
@@ -75,19 +81,26 @@ int LedDeviceWS281x::open()
int retval = -1;
_isDeviceReady = false;
// Try to open the LedDevice
ws2811_return_t rc = ws2811_init(&_led_string);
if ( rc != WS2811_SUCCESS )
if (!SysInfo::isUserAdmin())
{
QString errortext = QString ("Failed to open. Error message: %1").arg( ws2811_get_return_t_str(rc) );
QString errortext = QString ("Hyperion must run with \"root\" privileges for this device. Current user is: \"%1\"").arg(SysInfo::userName());
this->setInError( errortext );
}
else
{
// Everything is OK, device is ready
_isDeviceReady = true;
retval = 0;
// Try to open the LedDevice
ws2811_return_t rc = ws2811_init(&_led_string);
if ( rc != WS2811_SUCCESS )
{
QString errortext = QString ("Failed to open. Error message: %1").arg( ws2811_get_return_t_str(rc) );
this->setInError( errortext );
}
else
{
// Everything is OK, device is ready
_isDeviceReady = true;
retval = 0;
}
}
return retval;
}
@@ -138,3 +151,22 @@ int LedDeviceWS281x::write(const std::vector<ColorRgb> &ledValues)
return ws2811_render(&_led_string) ? -1 : 0;
}
QJsonObject LedDeviceWS281x::discover(const QJsonObject& /*params*/)
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType);
QJsonArray deviceList;
if (SysInfo::isUserAdmin())
{
//Indicate the general availability of the device, if hyperion is run under root
deviceList << QJsonObject ({{"found",true}});
devicesDiscovered.insert("devices", deviceList);
}
DebugIf(verbose,_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return devicesDiscovered;
}

View File

@@ -29,6 +29,15 @@ public:
///
static LedDevice* construct(const QJsonObject &deviceConfig);
///
/// @brief Discover WS281x devices available (for configuration).
///
/// @param[in] params Parameters used to overwrite discovery default behaviour
///
/// @return A JSON structure holding a list of devices found
///
QJsonObject discover(const QJsonObject& params) override;
protected:
///

View File

@@ -1,6 +1,10 @@
// hyperion local includes
#include "LedDeviceAtmo.h"
namespace {
const bool verbose = false;
} //End of constants
LedDeviceAtmo::LedDeviceAtmo(const QJsonObject &deviceConfig)
: ProviderRs232(deviceConfig)
{
@@ -43,3 +47,20 @@ int LedDeviceAtmo::write(const std::vector<ColorRgb> &ledValues)
memcpy(4 + _ledBuffer.data(), ledValues.data(), _ledCount * sizeof(ColorRgb));
return writeBytes(_ledBuffer.size(), _ledBuffer.data());
}
QJsonObject LedDeviceAtmo::getProperties(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject properties;
QString serialPort = params["serialPort"].toString("");
QJsonObject propertiesDetails;
QJsonArray possibleLedCounts = { 5 };
propertiesDetails.insert("ledCount", possibleLedCounts);
properties.insert("properties", propertiesDetails);
DebugIf(verbose, _log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
return properties;
}

View File

@@ -23,6 +23,14 @@ public:
///
static LedDevice* construct(const QJsonObject &deviceConfig);
///
/// @brief Get a Atmo device's resource properties
///
/// @param[in] params Parameters to query device
/// @return A JSON structure holding the device's properties
///
QJsonObject getProperties(const QJsonObject& params) override;
private:
///

View File

@@ -1,27 +1,31 @@
// hyperion local includes
#include "LedDeviceKarate.h"
LedDeviceKarate::LedDeviceKarate(const QJsonObject &deviceConfig)
namespace {
const bool verbose = false;
} //End of constants
LedDeviceKarate::LedDeviceKarate(const QJsonObject& deviceConfig)
: ProviderRs232(deviceConfig)
{
}
LedDevice* LedDeviceKarate::construct(const QJsonObject &deviceConfig)
LedDevice* LedDeviceKarate::construct(const QJsonObject& deviceConfig)
{
return new LedDeviceKarate(deviceConfig);
}
bool LedDeviceKarate::init(const QJsonObject &deviceConfig)
bool LedDeviceKarate::init(const QJsonObject& deviceConfig)
{
bool isInitOK = false;
// Initialise sub-class
if ( ProviderRs232::init(deviceConfig) )
if (ProviderRs232::init(deviceConfig))
{
if (_ledCount != 8 && _ledCount != 16)
{
//Error( _log, "%d channels configured. This should always be 16!", _ledCount);
QString errortext = QString ("%1 channels configured. This should always be 8 or 16!").arg(_ledCount);
QString errortext = QString("%1 channels configured. This should always be 8 or 16!").arg(_ledCount);
this->setInError(errortext);
isInitOK = false;
}
@@ -33,8 +37,8 @@ bool LedDeviceKarate::init(const QJsonObject &deviceConfig)
_ledBuffer[2] = 0x00; // Checksum
_ledBuffer[3] = _ledCount * 3; // Number of Databytes send
Debug( _log, "Karatelight header for %d leds: 0x%02x 0x%02x 0x%02x 0x%02x", _ledCount,
_ledBuffer[0], _ledBuffer[1], _ledBuffer[2], _ledBuffer[3] );
Debug(_log, "Karatelight header for %d leds: 0x%02x 0x%02x 0x%02x 0x%02x", _ledCount,
_ledBuffer[0], _ledBuffer[1], _ledBuffer[2], _ledBuffer[3]);
isInitOK = true;
}
@@ -42,20 +46,37 @@ bool LedDeviceKarate::init(const QJsonObject &deviceConfig)
return isInitOK;
}
int LedDeviceKarate::write(const std::vector<ColorRgb> &ledValues)
int LedDeviceKarate::write(const std::vector<ColorRgb>& ledValues)
{
for (signed iLed=0; iLed< static_cast<int>(_ledCount); iLed++)
{
const ColorRgb& rgb = ledValues[iLed];
_ledBuffer[iLed*3+4] = rgb.green;
_ledBuffer[iLed*3+5] = rgb.blue;
_ledBuffer[iLed*3+6] = rgb.red;
}
for (signed iLed = 0; iLed < static_cast<int>(_ledCount); iLed++)
{
const ColorRgb& rgb = ledValues[iLed];
_ledBuffer[iLed * 3 + 4] = rgb.green;
_ledBuffer[iLed * 3 + 5] = rgb.blue;
_ledBuffer[iLed * 3 + 6] = rgb.red;
}
// Calc Checksum
_ledBuffer[2] = _ledBuffer[0] ^ _ledBuffer[1];
for (unsigned int i = 3; i < _ledBuffer.size(); i++)
_ledBuffer[2] ^= _ledBuffer[i];
_ledBuffer[2] = _ledBuffer[0] ^ _ledBuffer[1];
for (unsigned int i = 3; i < _ledBuffer.size(); i++)
_ledBuffer[2] ^= _ledBuffer[i];
return writeBytes(_ledBuffer.size(), _ledBuffer.data());
}
QJsonObject LedDeviceKarate::getProperties(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject properties;
QString serialPort = params["serialPort"].toString("");
QJsonObject propertiesDetails;
QJsonArray possibleLedCounts = { 16, 8 };
propertiesDetails.insert("ledCount", possibleLedCounts);
properties.insert("properties", propertiesDetails);
DebugIf(verbose, _log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
return properties;
}

View File

@@ -26,6 +26,14 @@ public:
/// @return LedDevice constructed
static LedDevice* construct(const QJsonObject &deviceConfig);
///
/// @brief Get a Karate device's resource properties
///
/// @param[in] params Parameters to query device
/// @return A JSON structure holding the device's properties
///
QJsonObject getProperties(const QJsonObject& params) override;
private:
///

View File

@@ -2,18 +2,30 @@
// LedDevice includes
#include <leddevice/LedDevice.h>
#include "ProviderRs232.h"
#include <utils/WaitTime.h>
// qt includes
#include <QSerialPortInfo>
#include <QEventLoop>
#include <QDir>
#include <chrono>
// Constants
constexpr std::chrono::milliseconds WRITE_TIMEOUT{1000}; // device write timeout in ms
constexpr std::chrono::milliseconds OPEN_TIMEOUT{5000}; // device open timeout in ms
const int MAX_WRITE_TIMEOUTS = 5; // Maximum number of allowed timeouts
const int NUM_POWEROFF_WRITE_BLACK = 2; // Number of write "BLACK" during powering off
namespace {
const bool verbose = false;
constexpr std::chrono::milliseconds WRITE_TIMEOUT{ 1000 }; // device write timeout in ms
constexpr std::chrono::milliseconds OPEN_TIMEOUT{ 5000 }; // device open timeout in ms
const int MAX_WRITE_TIMEOUTS = 5; // Maximum number of allowed timeouts
const int NUM_POWEROFF_WRITE_BLACK = 2; // Number of write "BLACK" during powering off
constexpr std::chrono::milliseconds DEFAULT_IDENTIFY_TIME{ 500 };
// tty discovery service
const char DISCOVERY_DIRECTORY[] = "/dev/";
const char DISCOVERY_FILEPATTERN[] = "tty*";
} //End of constants
ProviderRs232::ProviderRs232(const QJsonObject &deviceConfig)
: LedDevice(deviceConfig)
@@ -40,16 +52,26 @@ bool ProviderRs232::init(const QJsonObject &deviceConfig)
Debug(_log, "LatchTime : %d", this->getLatchTime());
_deviceName = deviceConfig["output"].toString("auto");
_isAutoDeviceName = _deviceName.toLower() == "auto";
// If device name was given as unix /dev/ system-location, get port name
if ( _deviceName.startsWith(QLatin1String("/dev/")) )
{
_location = _deviceName;
//Handle udev devices
QFileInfo file_info(_deviceName);
if (file_info.isSymLink())
{
_deviceName = file_info.symLinkTarget();
}
_deviceName = _deviceName.mid(5);
}
_isAutoDeviceName = _deviceName.toLower() == "auto";
_baudRate_Hz = deviceConfig["rate"].toInt();
_delayAfterConnect_ms = deviceConfig["delayAfterConnect"].toInt(1500);
Debug(_log, "deviceName : %s", QSTRING_CSTR(_deviceName));
Debug(_log, "DeviceName : %s", QSTRING_CSTR(_deviceName));
DebugIf(!_location.isEmpty(), _log, "Location : %s", QSTRING_CSTR(_location));
Debug(_log, "AutoDevice : %d", _isAutoDeviceName);
Debug(_log, "baudRate_Hz : %d", _baudRate_Hz);
Debug(_log, "delayAfCon ms: %d", _delayAfterConnect_ms);
@@ -132,7 +154,14 @@ bool ProviderRs232::tryOpen(int delayAfterConnect_ms)
if (!_rs232Port.isOpen())
{
Info(_log, "Opening UART: %s", QSTRING_CSTR(_deviceName));
if (!_location.isEmpty())
{
Info(_log, "Opening UART: %s (%s)", QSTRING_CSTR(_deviceName), QSTRING_CSTR(_location));
}
else
{
Info(_log, "Opening UART: %s", QSTRING_CSTR(_deviceName));
}
_frameDropCounter = 0;
@@ -141,18 +170,16 @@ bool ProviderRs232::tryOpen(int delayAfterConnect_ms)
Debug(_log, "_rs232Port.open(QIODevice::ReadWrite): %s, Baud rate [%d]bps", QSTRING_CSTR(_deviceName), _baudRate_Hz);
QSerialPortInfo serialPortInfo(_deviceName);
QJsonObject portInfo;
Debug(_log, "portName: %s", QSTRING_CSTR(serialPortInfo.portName()));
Debug(_log, "systemLocation: %s", QSTRING_CSTR(serialPortInfo.systemLocation()));
Debug(_log, "description: %s", QSTRING_CSTR(serialPortInfo.description()));
Debug(_log, "manufacturer: %s", QSTRING_CSTR(serialPortInfo.manufacturer()));
Debug(_log, "productIdentifier: %s", QSTRING_CSTR(QString("0x%1").arg(serialPortInfo.productIdentifier(), 0, 16)));
Debug(_log, "vendorIdentifier: %s", QSTRING_CSTR(QString("0x%1").arg(serialPortInfo.vendorIdentifier(), 0, 16)));
Debug(_log, "serialNumber: %s", QSTRING_CSTR(serialPortInfo.serialNumber()));
if (!serialPortInfo.isNull() )
{
Debug(_log, "portName: %s", QSTRING_CSTR(serialPortInfo.portName()));
Debug(_log, "systemLocation: %s", QSTRING_CSTR(serialPortInfo.systemLocation()));
Debug(_log, "description: %s", QSTRING_CSTR(serialPortInfo.description()));
Debug(_log, "manufacturer: %s", QSTRING_CSTR(serialPortInfo.manufacturer()));
Debug(_log, "vendorIdentifier: %s", QSTRING_CSTR(QString("0x%1").arg(serialPortInfo.vendorIdentifier(), 0, 16)));
Debug(_log, "productIdentifier: %s", QSTRING_CSTR(QString("0x%1").arg(serialPortInfo.productIdentifier(), 0, 16)));
Debug(_log, "serialNumber: %s", QSTRING_CSTR(serialPortInfo.serialNumber()));
if ( !_rs232Port.open(QIODevice::ReadWrite) )
{
this->setInError(_rs232Port.errorString());
@@ -161,8 +188,20 @@ bool ProviderRs232::tryOpen(int delayAfterConnect_ms)
}
else
{
QString errortext = QString("Invalid serial device name: [%1]!").arg(_deviceName);
QString errortext = QString("Invalid serial device name: %1 %2!").arg(_deviceName, _location);
this->setInError( errortext );
// List available device
for (auto &port : QSerialPortInfo::availablePorts() ) {
Debug(_log, "Avail. serial device: [%s]-(%s|%s), Manufacturer: %s, Description: %s",
QSTRING_CSTR(port.portName()),
QSTRING_CSTR(QString("0x%1").arg(port.vendorIdentifier(), 0, 16)),
QSTRING_CSTR(QString("0x%1").arg(port.productIdentifier(), 0, 16)),
QSTRING_CSTR(port.manufacturer()),
QSTRING_CSTR(port.description())
);
}
return false;
}
}
@@ -215,7 +254,7 @@ int ProviderRs232::writeBytes(const qint64 size, const uint8_t *data)
{
if ( _rs232Port.error() == QSerialPort::TimeoutError )
{
Debug(_log, "Timeout after %dms: %d frames already dropped", WRITE_TIMEOUT, _frameDropCounter);
Debug(_log, "Timeout after %dms: %d frames already dropped", WRITE_TIMEOUT.count(), _frameDropCounter);
++_frameDropCounter;
@@ -245,7 +284,7 @@ int ProviderRs232::writeBytes(const qint64 size, const uint8_t *data)
QString ProviderRs232::discoverFirst()
{
// take first available USB serial port - currently no probing!
for (auto const & port : QSerialPortInfo::availablePorts())
for (auto & port : QSerialPortInfo::availablePorts())
{
if (!port.isNull() && !port.isBusy())
{
@@ -266,7 +305,7 @@ QJsonObject ProviderRs232::discover(const QJsonObject& /*params*/)
// Discover serial Devices
for (auto &port : QSerialPortInfo::availablePorts() )
{
if ( !port.isNull() )
if ( !port.isNull() && port.vendorIdentifier() != 0)
{
QJsonObject portInfo;
portInfo.insert("description", port.description());
@@ -281,6 +320,71 @@ QJsonObject ProviderRs232::discover(const QJsonObject& /*params*/)
}
}
#ifndef _WIN32
//Check all /dev/tty* files, if they are udev-serial devices
QDir deviceDirectory (DISCOVERY_DIRECTORY);
QStringList deviceFilter(DISCOVERY_FILEPATTERN);
deviceDirectory.setNameFilters(deviceFilter);
deviceDirectory.setSorting(QDir::Name);
QFileInfoList deviceFiles = deviceDirectory.entryInfoList(QDir::AllEntries);
QFileInfoList::const_iterator deviceFileIterator;
for (deviceFileIterator = deviceFiles.constBegin(); deviceFileIterator != deviceFiles.constEnd(); ++deviceFileIterator)
{
if ((*deviceFileIterator).isSymLink())
{
QSerialPortInfo port = QSerialPortInfo(QSerialPort((*deviceFileIterator).symLinkTarget()));
QJsonObject portInfo;
portInfo.insert("portName", (*deviceFileIterator).fileName());
portInfo.insert("systemLocation", (*deviceFileIterator).absoluteFilePath());
portInfo.insert("udev", true);
portInfo.insert("description", port.description());
portInfo.insert("manufacturer", port.manufacturer());
portInfo.insert("productIdentifier", QString("0x%1").arg(port.productIdentifier(), 0, 16));
portInfo.insert("serialNumber", port.serialNumber());
portInfo.insert("vendorIdentifier", QString("0x%1").arg(port.vendorIdentifier(), 0, 16));
deviceList.append(portInfo);
}
}
#endif
devicesDiscovered.insert("devices", deviceList);
DebugIf(verbose,_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return devicesDiscovered;
}
void ProviderRs232::identify(const QJsonObject& params)
{
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QString deviceName = params["output"].toString("");
if (!deviceName.isEmpty())
{
_devConfig = params;
init(_devConfig);
{
if ( open() == 0 )
{
for (int i = 0; i < 2; ++i)
{
if (writeColor(ColorRgb::RED) == 0)
{
wait(DEFAULT_IDENTIFY_TIME);
writeColor(ColorRgb::BLACK);
wait(DEFAULT_IDENTIFY_TIME);
}
else
{
break;
}
}
close();
}
}
}
}

View File

@@ -26,6 +26,20 @@ public:
///
~ProviderRs232() override;
///
/// @brief Send an update to the RS232 device to identify it.
///
/// Following parameters are required
/// @code
/// {
/// "deviceConfig" :
/// }
///@endcode
///
/// @param[in] params Parameters to configure device
///
void identify(const QJsonObject& params) override;
protected:
///
@@ -83,6 +97,8 @@ protected:
/// The name of the output device
QString _deviceName;
/// The system location of the output device
QString _location;
/// The RS232 serial-device
QSerialPort _rs232Port;
/// The used baud-rate of the output device

Some files were not shown because too many files have changed in this diff Show More