Merge remote-tracking branch 'origin/grabberDiscovery' into mediafoundation

This commit is contained in:
Paulchen Panther
2021-04-04 12:43:29 +02:00
committed by LordGrey
187 changed files with 3716 additions and 15430 deletions

View File

@@ -16,7 +16,24 @@
#include <leddevice/LedDevice.h>
#include <leddevice/LedDeviceFactory.h>
#include <HyperionConfig.h> // Required to determine the cmake options
#include <hyperion/GrabberWrapper.h>
#include <grabber/QtGrabber.h>
#if defined(ENABLE_MF)
#include <grabber/MFGrabber.h>
#elif defined(ENABLE_V4L2)
#include <grabber/V4L2Grabber.h>
#endif
#if defined(ENABLE_X11)
#include <grabber/X11Grabber.h>
#endif
#if defined(ENABLE_XCB)
#include <grabber/XcbGrabber.h>
#endif
#include <utils/jsonschema/QJsonFactory.h>
#include <utils/jsonschema/QJsonSchemaChecker.h>
#include <HyperionConfig.h>
@@ -41,6 +58,9 @@
using namespace hyperion;
// Constants
namespace { const bool verbose = false; }
JsonAPI::JsonAPI(QString peerAddress, Logger *log, bool localConnection, QObject *parent, bool noListener)
: API(log, localConnection, parent)
{
@@ -964,13 +984,13 @@ void JsonAPI::handleLedColorsCommand(const QJsonObject &message, const QString &
_ledStreamConnection = connect(_ledStreamTimer, &QTimer::timeout, this, [=]() {
emit streamLedcolorsUpdate(_currentLedValues);
},
Qt::UniqueConnection);
Qt::UniqueConnection);
// start the timer
if (!_ledStreamTimer->isActive() || _ledStreamTimer->interval() != streaming_interval)
_ledStreamTimer->start(streaming_interval);
},
Qt::UniqueConnection);
Qt::UniqueConnection);
// push once
_hyperion->update();
}
@@ -1420,7 +1440,7 @@ void JsonAPI::handleLedDeviceCommand(const QJsonObject &message, const QString &
void JsonAPI::handleInputSourceCommand(const QJsonObject& message, const QString& command, int tan)
{
Debug(_log, "message: [%s]", QString(QJsonDocument(message).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose, _log, "message: [%s]", QString(QJsonDocument(message).toJson(QJsonDocument::Compact)).toUtf8().constData());
const QString& subc = message["subcommand"].toString().trimmed();
const QString& sourceType = message["sourceType"].toString().trimmed();
@@ -1438,86 +1458,66 @@ void JsonAPI::handleInputSourceCommand(const QJsonObject& message, const QString
{
QJsonObject inputSourcesDiscovered;
inputSourcesDiscovered.insert("sourceType", sourceType);
QJsonArray videoInputs;
#if defined(ENABLE_V4L2) || defined(ENABLE_MF)
if (sourceType == "video" )
{
//for (const auto& instance : GrabberWrapper::getInstance()->getDevices())
//{
#if defined(ENABLE_MF)
MFGrabber* grabber = new MFGrabber();
#elif defined(ENABLE_V4L2)
V4L2Grabber* grabber = new V4L2Grabber();
#endif
QJsonObject params;
videoInputs = grabber->discover(params);
delete grabber;
}
else
#endif
{
DebugIf(verbose, _log, "sourceType: [%s]", QSTRING_CSTR(sourceType));
for (const auto& devicePath : GrabberWrapper::getInstance()->getDevices())
if (sourceType == "screen")
{
QJsonObject params;
QJsonObject device;
device["device"] = devicePath;
device["device_name"] = GrabberWrapper::getInstance()->getDeviceName(devicePath);
device["type"] = "v4l2";
QJsonArray video_inputs;
QMultiMap<QString, int> inputs = GrabberWrapper::getInstance()->getDeviceInputs(devicePath);
for (auto input = inputs.begin(); input != inputs.end(); input++)
#ifdef ENABLE_QT
QtGrabber* qtgrabber = new QtGrabber();
device = qtgrabber->discover(params);
if (!device.isEmpty() )
{
QJsonObject in;
in["name"] = input.key();
in["inputIdx"] = input.value();
QJsonArray standards;
QList<VideoStandard> videoStandards = GrabberWrapper::getInstance()->getAvailableDeviceStandards(devicePath, input.value());
for (auto standard : videoStandards)
{
standards.append(VideoStandard2String(standard));
}
if (!standards.isEmpty())
{
in["standards"] = standards;
}
QJsonArray formats;
QStringList encodingFormats = GrabberWrapper::getInstance()->getAvailableEncodingFormats(devicePath, input.value());
for (auto encodingFormat : encodingFormats)
{
QJsonObject format;
format["format"] = encodingFormat;
QJsonArray resolutionArray;
QMultiMap<int, int> deviceResolutions = GrabberWrapper::getInstance()->getAvailableDeviceResolutions(devicePath, input.value(), parsePixelFormat(encodingFormat));
for (auto width_height = deviceResolutions.begin(); width_height != deviceResolutions.end(); width_height++)
{
QJsonObject resolution;
resolution["width"] = width_height.key();
resolution["height"] = width_height.value();
QJsonArray fps;
QIntList framerates = GrabberWrapper::getInstance()->getAvailableDeviceFramerates(devicePath, input.value(), parsePixelFormat(encodingFormat), width_height.key(), width_height.value());
for (auto framerate : framerates)
{
fps.append(framerate);
}
resolution["fps"] = fps;
resolutionArray.append(resolution);
}
format["resolutions"] = resolutionArray;
formats.append(format);
}
in["formats"] = formats;
video_inputs.append(in);
videoInputs.append(device);
}
delete qtgrabber;
#endif
device["video_inputs"] = video_inputs;
videoInputs.append(device);
#ifdef ENABLE_X11
X11Grabber* x11Grabber = new X11Grabber();
device = x11Grabber->discover(params);
if (!device.isEmpty() )
{
videoInputs.append(device);
}
delete x11Grabber;
#endif
#ifdef ENABLE_XCB
XcbGrabber* xcbGrabber = new XcbGrabber();
device = xcbGrabber->discover(params);
if (!device.isEmpty() )
{
videoInputs.append(device);
}
delete xcbGrabber;
#endif
}
}
#endif
inputSourcesDiscovered["video_sources"] = videoInputs;
Debug(_log, "response: [%s]", QString(QJsonDocument(inputSourcesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose, _log, "response: [%s]", QString(QJsonDocument(inputSourcesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
sendSuccessDataReply(QJsonDocument(inputSourcesDiscovered), full_command, tan);
}
@@ -1528,7 +1528,6 @@ void JsonAPI::handleInputSourceCommand(const QJsonObject& message, const QString
}
}
void JsonAPI::handleNotImplemented(const QString &command, int tan)
{
sendErrorReply("Command not implemented", command, tan);

View File

@@ -12,6 +12,8 @@
// python utils
#include <python/PythonProgram.h>
const int Effect::ENDLESS = -1;
Effect::Effect(Hyperion *hyperion, int priority, int timeout, const QString &script, const QString &name, const QJsonObject &args, const QString &imageData)
: QThread()
, _hyperion(hyperion)
@@ -22,7 +24,7 @@ Effect::Effect(Hyperion *hyperion, int priority, int timeout, const QString &scr
, _args(args)
, _imageData(imageData)
, _endTime(-1)
, _colors()
, _interupt(false)
, _imageSize(hyperion->getLedGridSize())
, _image(_imageSize,QImage::Format_ARGB32_Premultiplied)
{
@@ -47,6 +49,24 @@ Effect::~Effect()
_imageStack.clear();
}
bool Effect::isInterruptionRequested()
{
return _interupt || getRemaining() < ENDLESS;
}
int Effect::getRemaining() const
{
// determine the timeout
int timeout = _timeout;
if (timeout > 0)
{
timeout = static_cast<int>( _endTime - QDateTime::currentMSecsSinceEpoch());
return timeout;
}
return ENDLESS;
}
void Effect::setModuleParameters()
{
// import the buildtin Hyperion module

View File

@@ -19,8 +19,6 @@
EffectEngine::EffectEngine(Hyperion * hyperion)
: _hyperion(hyperion)
, _availableEffects()
, _activeEffects()
, _log(Logger::getInstance("EFFECTENGINE"))
, _effectFileHandler(EffectFileHandler::getInstance())
{
@@ -202,7 +200,7 @@ void EffectEngine::allChannelsCleared()
{
for (Effect * effect : _activeEffects)
{
if (effect->getPriority() != 254 && !effect->isInterruptionRequested())
if (effect->getPriority() != PriorityMuxer::BG_PRIORITY && !effect->isInterruptionRequested())
{
effect->requestInterruption();
}

View File

@@ -11,10 +11,10 @@
#include <QByteArray>
// createEffect helper
struct find_schema: std::unary_function<EffectSchema, bool>
struct find_schema : std::unary_function<EffectSchema, bool>
{
QString pyFile;
find_schema(QString pyFile):pyFile(pyFile) { }
find_schema(QString pyFile) :pyFile(std::move(pyFile)) { }
bool operator()(EffectSchema const& schema) const
{
return schema.pyFile == pyFile;
@@ -22,10 +22,10 @@ struct find_schema: std::unary_function<EffectSchema, bool>
};
// deleteEffect helper
struct find_effect: std::unary_function<EffectDefinition, bool>
struct find_effect : std::unary_function<EffectDefinition, bool>
{
QString effectName;
find_effect(QString effectName) :effectName(effectName) { }
find_effect(QString effectName) :effectName(std::move(effectName)) { }
bool operator()(EffectDefinition const& effectDefinition) const
{
return effectDefinition.name == effectName;
@@ -36,7 +36,6 @@ EffectFileHandler* EffectFileHandler::efhInstance;
EffectFileHandler::EffectFileHandler(const QString& rootPath, const QJsonDocument& effectConfig, QObject* parent)
: QObject(parent)
, _effectConfig()
, _log(Logger::getInstance("EFFECTFILES"))
, _rootPath(rootPath)
{
@@ -50,7 +49,7 @@ EffectFileHandler::EffectFileHandler(const QString& rootPath, const QJsonDocumen
void EffectFileHandler::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
{
if(type == settings::EFFECTS)
if (type == settings::EFFECTS)
{
_effectConfig = config.object();
// update effects and schemas
@@ -67,15 +66,17 @@ QString EffectFileHandler::deleteEffect(const QString& effectName)
if (it != effectsDefinition.end())
{
QFileInfo effectConfigurationFile(it->file);
if (effectConfigurationFile.absoluteFilePath().mid(0, 1) != ":" )
if (!effectConfigurationFile.absoluteFilePath().startsWith(':'))
{
if (effectConfigurationFile.exists())
{
if ( (it->script == ":/effects/gif.py") && !it->args.value("image").toString("").isEmpty())
if ((it->script == ":/effects/gif.py") && !it->args.value("image").toString("").isEmpty())
{
QFileInfo effectImageFile(effectConfigurationFile.absolutePath() + "/" + it->args.value("image").toString());
if (effectImageFile.exists())
QFile::remove(effectImageFile.absoluteFilePath());
QFileInfo effectImageFile(it->args.value("image").toString());
if (effectImageFile.exists())
{
QFile::remove(effectImageFile.absoluteFilePath());
}
}
bool result = QFile::remove(effectConfigurationFile.absoluteFilePath());
@@ -83,15 +84,27 @@ QString EffectFileHandler::deleteEffect(const QString& effectName)
if (result)
{
updateEffects();
return "";
} else
resultMsg = "";
}
else
{
resultMsg = "Can't delete effect configuration file: " + effectConfigurationFile.absoluteFilePath() + ". Please check permissions";
} else
}
}
else
{
resultMsg = "Can't find effect configuration file: " + effectConfigurationFile.absoluteFilePath();
} else
}
}
else
{
resultMsg = "Can't delete internal effect: " + effectName;
} else
}
}
else
{
resultMsg = "Effect " + effectName + " not found";
}
return resultMsg;
}
@@ -101,17 +114,14 @@ QString EffectFileHandler::saveEffect(const QJsonObject& message)
QString resultMsg;
if (!message["args"].toObject().isEmpty())
{
QString scriptName;
(message["script"].toString().mid(0, 1) == ":" )
? scriptName = ":/effects//" + message["script"].toString().mid(1)
: scriptName = message["script"].toString();
QString scriptName = message["script"].toString();
std::list<EffectSchema> effectsSchemas = getEffectSchemas();
std::list<EffectSchema>::iterator it = std::find_if(effectsSchemas.begin(), effectsSchemas.end(), find_schema(scriptName));
if (it != effectsSchemas.end())
{
if(!JsonUtils::validate("EffectFileHandler", message["args"].toObject(), it->schemaFile, _log))
if (!JsonUtils::validate("EffectFileHandler", message["args"].toObject(), it->schemaFile, _log))
{
return "Error during arg validation against schema, please consult the Hyperion Log";
}
@@ -120,9 +130,9 @@ QString EffectFileHandler::saveEffect(const QJsonObject& message)
QJsonArray effectArray;
effectArray = _effectConfig["paths"].toArray();
if (effectArray.size() > 0)
if (!effectArray.empty())
{
if (message["name"].toString().trimmed().isEmpty() || message["name"].toString().trimmed().startsWith("."))
if (message["name"].toString().trimmed().isEmpty() || message["name"].toString().trimmed().startsWith(":"))
{
return "Can't save new effect. Effect name is empty or begins with a dot.";
}
@@ -138,41 +148,56 @@ QString EffectFileHandler::saveEffect(const QJsonObject& message)
if (iter != availableEffects.end())
{
newFileName.setFile(iter->file);
if (newFileName.absoluteFilePath().mid(0, 1) == ":")
if (newFileName.absoluteFilePath().startsWith(':'))
{
return "The effect name '" + message["name"].toString() + "' is assigned to an internal effect. Please rename your effekt.";
return "The effect name '" + message["name"].toString() + "' is assigned to an internal effect. Please rename your effect.";
}
} else
}
else
{
// TODO global special keyword handling
QString f = effectArray[0].toString().replace("$ROOT",_rootPath) + "/" + message["name"].toString().replace(QString(" "), QString("")) + QString(".json");
QString f = effectArray[0].toString().replace("$ROOT", _rootPath) + '/' + message["name"].toString().replace(QString(" "), QString("")) + QString(".json");
newFileName.setFile(f);
}
//TODO check if filename exist
if (!message["imageData"].toString("").isEmpty() && !message["args"].toObject().value("image").toString("").isEmpty())
{
QFileInfo imageFileName(effectArray[0].toString().replace("$ROOT",_rootPath) + "/" + message["args"].toObject().value("image").toString());
if(!FileUtils::writeFile(imageFileName.absoluteFilePath(), QByteArray::fromBase64(message["imageData"].toString("").toUtf8()), _log))
QJsonObject args = message["args"].toObject();
QString imageFilePath = effectArray[0].toString().replace("$ROOT", _rootPath) + '/' + args.value("image").toString();
QFileInfo imageFileName(imageFilePath);
if (!FileUtils::writeFile(imageFileName.absoluteFilePath(), QByteArray::fromBase64(message["imageData"].toString("").toUtf8()), _log))
{
return "Error while saving image file '" + message["args"].toObject().value("image").toString() + ", please check the Hyperion Log";
}
//Update json with image file location
args["image"] = imageFilePath;
effectJson["args"] = args;
}
if(!JsonUtils::write(newFileName.absoluteFilePath(), effectJson, _log))
if (!JsonUtils::write(newFileName.absoluteFilePath(), effectJson, _log))
{
return "Error while saving effect, please check the Hyperion Log";
}
Info(_log, "Reload effect list");
updateEffects();
return "";
} else
resultMsg = "";
}
else
{
resultMsg = "Can't save new effect. Effect path empty";
} else
}
}
else
{
resultMsg = "Missing schema file for Python script " + message["script"].toString();
} else
}
}
else
{
resultMsg = "Missing or empty Object 'args'";
}
return resultMsg;
}
@@ -184,50 +209,56 @@ void EffectFileHandler::updateEffects()
_effectSchemas.clear();
// read all effects
const QJsonArray & paths = _effectConfig["paths"].toArray();
const QJsonArray & disabledEfx = _effectConfig["disable"].toArray();
const QJsonArray& paths = _effectConfig["paths"].toArray();
const QJsonArray& disabledEfx = _effectConfig["disable"].toArray();
QStringList efxPathList;
efxPathList << ":/effects/";
QStringList disableList;
for(auto p : paths)
for (const auto& p : paths)
{
efxPathList << p.toString().replace("$ROOT",_rootPath);
QString effectPath = p.toString();
if (!effectPath.endsWith('/'))
{
effectPath.append('/');
}
efxPathList << effectPath.replace("$ROOT", _rootPath);
}
for(auto efx : disabledEfx)
for (const auto& efx : disabledEfx)
{
disableList << efx.toString();
}
QMap<QString, EffectDefinition> availableEffects;
for (const QString & path : efxPathList )
for (const QString& path : qAsConst(efxPathList))
{
QDir directory(path);
if (!directory.exists())
{
if(directory.mkpath(path))
if (directory.mkpath(path))
{
Info(_log, "New Effect path \"%s\" created successfully", QSTRING_CSTR(path) );
Info(_log, "New Effect path \"%s\" created successfully", QSTRING_CSTR(path));
}
else
{
Warning(_log, "Failed to create Effect path \"%s\", please check permissions", QSTRING_CSTR(path) );
Warning(_log, "Failed to create Effect path \"%s\", please check permissions", QSTRING_CSTR(path));
}
}
else
{
int efxCount = 0;
QStringList filenames = directory.entryList(QStringList() << "*.json", QDir::Files, QDir::Name | QDir::IgnoreCase);
for (const QString & filename : filenames)
for (const QString& filename : qAsConst(filenames))
{
EffectDefinition def;
if (loadEffectDefinition(path, filename, def))
{
InfoIf(availableEffects.find(def.name) != availableEffects.end(), _log,
"effect overload effect '%s' is now taken from '%s'", QSTRING_CSTR(def.name), QSTRING_CSTR(path) );
"effect overload effect '%s' is now taken from '%s'", QSTRING_CSTR(def.name), QSTRING_CSTR(path));
if ( disableList.contains(def.name) )
if (disableList.contains(def.name))
{
Info(_log, "effect '%s' not loaded, because it is disabled in hyperion config", QSTRING_CSTR(def.name));
}
@@ -242,64 +273,65 @@ void EffectFileHandler::updateEffects()
// collect effect schemas
efxCount = 0;
directory.setPath(path.endsWith("/") ? (path + "schema/") : (path + "/schema/"));
QStringList pynames = directory.entryList(QStringList() << "*.json", QDir::Files, QDir::Name | QDir::IgnoreCase);
for (const QString & pyname : pynames)
QString schemaPath = path + "schema" + '/';
directory.setPath(schemaPath);
QStringList schemaFileNames = directory.entryList(QStringList() << "*.json", QDir::Files, QDir::Name | QDir::IgnoreCase);
for (const QString& schemaFileName : qAsConst(schemaFileNames))
{
EffectSchema pyEffect;
if (loadEffectSchema(path, pyname, pyEffect))
if (loadEffectSchema(path, directory.filePath(schemaFileName), pyEffect))
{
_effectSchemas.push_back(pyEffect);
efxCount++;
}
}
InfoIf(efxCount > 0, _log, "%d effect schemas loaded from directory %s", efxCount, QSTRING_CSTR((path + "schema/")));
InfoIf(efxCount > 0, _log, "%d effect schemas loaded from directory %s", efxCount, QSTRING_CSTR(schemaPath));
}
}
for(auto item : availableEffects)
for (const auto& item : qAsConst(availableEffects))
{
_availableEffects.push_back(item);
}
ErrorIf(_availableEffects.size()==0, _log, "no effects found, check your effect directories");
ErrorIf(_availableEffects.empty(), _log, "no effects found, check your effect directories");
emit effectListChanged();
}
bool EffectFileHandler::loadEffectDefinition(const QString &path, const QString &effectConfigFile, EffectDefinition & effectDefinition)
bool EffectFileHandler::loadEffectDefinition(const QString& path, const QString& effectConfigFile, EffectDefinition& effectDefinition)
{
QString fileName = path + QDir::separator() + effectConfigFile;
QString fileName = path + effectConfigFile;
// Read and parse the effect json config file
QJsonObject configEffect;
if(!JsonUtils::readFile(fileName, configEffect, _log))
if (!JsonUtils::readFile(fileName, configEffect, _log)) {
return false;
}
// validate effect config with effect schema(path)
if(!JsonUtils::validate(fileName, configEffect, ":effect-schema", _log))
if (!JsonUtils::validate(fileName, configEffect, ":effect-schema", _log)) {
return false;
}
// setup the definition
effectDefinition.file = fileName;
QJsonObject config = configEffect;
QString scriptName = config["script"].toString();
effectDefinition.name = config["name"].toString();
if (scriptName.isEmpty())
if (scriptName.isEmpty()) {
return false;
}
QFile fileInfo(scriptName);
if (scriptName.mid(0, 1) == ":" )
if (!fileInfo.exists())
{
(!fileInfo.exists())
? effectDefinition.script = ":/effects/"+scriptName.mid(1)
: effectDefinition.script = scriptName;
} else
effectDefinition.script = path + scriptName;
}
else
{
(!fileInfo.exists())
? effectDefinition.script = path + QDir::separator() + scriptName
: effectDefinition.script = scriptName;
effectDefinition.script = scriptName;
}
effectDefinition.args = config["args"].toObject();
@@ -307,31 +339,31 @@ bool EffectFileHandler::loadEffectDefinition(const QString &path, const QString
return true;
}
bool EffectFileHandler::loadEffectSchema(const QString &path, const QString &effectSchemaFile, EffectSchema & effectSchema)
bool EffectFileHandler::loadEffectSchema(const QString& path, const QString& schemaFilePath, EffectSchema& effectSchema)
{
QString fileName = path + "schema/" + QDir::separator() + effectSchemaFile;
// Read and parse the effect schema file
QJsonObject schemaEffect;
if(!JsonUtils::readFile(fileName, schemaEffect, _log))
return false;
// setup the definition
QString scriptName = schemaEffect["script"].toString();
effectSchema.schemaFile = fileName;
fileName = path + QDir::separator() + scriptName;
QFile pyFile(fileName);
if (scriptName.isEmpty() || !pyFile.open(QIODevice::ReadOnly))
if (!JsonUtils::readFile(schemaFilePath, schemaEffect, _log))
{
fileName = path + "schema/" + QDir::separator() + effectSchemaFile;
Error( _log, "Python script '%s' in effect schema '%s' could not be loaded", QSTRING_CSTR(scriptName), QSTRING_CSTR(fileName));
return false;
}
pyFile.close();
// setup the definition
QString scriptName = schemaEffect["script"].toString();
effectSchema.schemaFile = schemaFilePath;
effectSchema.pyFile = (scriptName.mid(0, 1) == ":" ) ? ":/effects/"+scriptName.mid(1) : path + QDir::separator() + scriptName;
QString scriptFilePath = path + scriptName;
QFile pyScriptFile(scriptFilePath);
if (scriptName.isEmpty() || !pyScriptFile.open(QIODevice::ReadOnly))
{
Error(_log, "Python script '%s' in effect schema '%s' could not be loaded", QSTRING_CSTR(scriptName), QSTRING_CSTR(schemaFilePath));
return false;
}
pyScriptFile.close();
effectSchema.pyFile = scriptFilePath;
effectSchema.pySchema = schemaEffect;
return true;

View File

@@ -121,19 +121,6 @@ PyMethodDef EffectModule::effectMethods[] = {
PyObject* EffectModule::wrapSetColor(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
// determine the timeout
int timeout = getEffect()->_timeout;
if (timeout > 0)
{
timeout = getEffect()->_endTime - QDateTime::currentMSecsSinceEpoch();
// we are done if the time has passed
if (timeout <= 0) Py_RETURN_NONE;
}
// check the number of arguments
int argCount = PyTuple_Size(args);
if (argCount == 3)
@@ -144,7 +131,7 @@ PyObject* EffectModule::wrapSetColor(PyObject *self, PyObject *args)
{
getEffect()->_colors.fill(color);
QVector<ColorRgb> _cQV = getEffect()->_colors;
emit getEffect()->setInput(getEffect()->_priority, std::vector<ColorRgb>( _cQV.begin(), _cQV.end() ), timeout, false);
emit getEffect()->setInput(getEffect()->_priority, std::vector<ColorRgb>( _cQV.begin(), _cQV.end() ), getEffect()->getRemaining(), false);
Py_RETURN_NONE;
}
return nullptr;
@@ -163,7 +150,7 @@ PyObject* EffectModule::wrapSetColor(PyObject *self, PyObject *args)
char * data = PyByteArray_AS_STRING(bytearray);
memcpy(getEffect()->_colors.data(), data, length);
QVector<ColorRgb> _cQV = getEffect()->_colors;
emit getEffect()->setInput(getEffect()->_priority, std::vector<ColorRgb>( _cQV.begin(), _cQV.end() ), timeout, false);
emit getEffect()->setInput(getEffect()->_priority, std::vector<ColorRgb>( _cQV.begin(), _cQV.end() ), getEffect()->getRemaining(), false);
Py_RETURN_NONE;
}
else
@@ -192,19 +179,6 @@ PyObject* EffectModule::wrapSetColor(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapSetImage(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
// determine the timeout
int timeout = getEffect()->_timeout;
if (timeout > 0)
{
timeout = getEffect()->_endTime - QDateTime::currentMSecsSinceEpoch();
// we are done if the time has passed
if (timeout <= 0) Py_RETURN_NONE;
}
// bytearray of values
int width, height;
PyObject * bytearray = nullptr;
@@ -218,7 +192,7 @@ PyObject* EffectModule::wrapSetImage(PyObject *self, PyObject *args)
Image<ColorRgb> image(width, height);
char * data = PyByteArray_AS_STRING(bytearray);
memcpy(image.memptr(), data, length);
emit getEffect()->setInputImage(getEffect()->_priority, image, timeout, false);
emit getEffect()->setInputImage(getEffect()->_priority, image, getEffect()->getRemaining(), false);
Py_RETURN_NONE;
}
else
@@ -245,9 +219,6 @@ PyObject* EffectModule::wrapSetImage(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapGetImage(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
QString file;
QBuffer buffer;
QImageReader reader;
@@ -329,19 +300,6 @@ PyObject* EffectModule::wrapAbort(PyObject *self, PyObject *)
PyObject* EffectModule::wrapImageShow(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
// determine the timeout
int timeout = getEffect()->_timeout;
if (timeout > 0)
{
timeout = getEffect()->_endTime - QDateTime::currentMSecsSinceEpoch();
// we are done if the time has passed
if (timeout <= 0) Py_RETURN_NONE;
}
int argCount = PyTuple_Size(args);
int imgId = -1;
bool argsOk = (argCount == 0);
@@ -375,16 +333,13 @@ PyObject* EffectModule::wrapImageShow(PyObject *self, PyObject *args)
}
memcpy(image.memptr(), binaryImage.data(), binaryImage.size());
emit getEffect()->setInputImage(getEffect()->_priority, image, timeout, false);
emit getEffect()->setInputImage(getEffect()->_priority, image, getEffect()->getRemaining(), false);
return Py_BuildValue("");
}
PyObject* EffectModule::wrapImageLinearGradient(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
PyObject * bytearray = nullptr;
int startRX = 0;
@@ -452,9 +407,6 @@ PyObject* EffectModule::wrapImageLinearGradient(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageConicalGradient(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
PyObject * bytearray = nullptr;
int centerX, centerY, angle;
@@ -521,9 +473,6 @@ PyObject* EffectModule::wrapImageConicalGradient(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageRadialGradient(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
PyObject * bytearray = nullptr;
int centerX, centerY, radius, focalX, focalY, focalRadius, spread;
@@ -602,9 +551,6 @@ PyObject* EffectModule::wrapImageRadialGradient(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageDrawPolygon(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
PyObject * bytearray = nullptr;
int argCount = PyTuple_Size(args);
@@ -663,9 +609,6 @@ PyObject* EffectModule::wrapImageDrawPolygon(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageDrawPie(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
PyObject * bytearray = nullptr;
QString brush;
@@ -760,9 +703,6 @@ PyObject* EffectModule::wrapImageDrawPie(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageSolidFill(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int r, g, b;
int a = 255;
@@ -802,9 +742,6 @@ PyObject* EffectModule::wrapImageSolidFill(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageDrawLine(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int r, g, b;
int a = 255;
@@ -843,9 +780,6 @@ PyObject* EffectModule::wrapImageDrawLine(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageDrawPoint(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int r, g, b, x, y;
int a = 255;
@@ -879,9 +813,6 @@ PyObject* EffectModule::wrapImageDrawPoint(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageDrawRect(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int r, g, b;
int a = 255;
@@ -921,9 +852,6 @@ PyObject* EffectModule::wrapImageDrawRect(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageSetPixel(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int r, g, b, x, y;
@@ -939,9 +867,6 @@ PyObject* EffectModule::wrapImageSetPixel(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageGetPixel(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int x, y;
@@ -955,9 +880,6 @@ PyObject* EffectModule::wrapImageGetPixel(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageSave(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
QImage img(getEffect()->_image.copy());
getEffect()->_imageStack.append(img);
@@ -966,9 +888,6 @@ PyObject* EffectModule::wrapImageSave(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageMinSize(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int w, h;
int width = getEffect()->_imageSize.width();
@@ -991,25 +910,16 @@ PyObject* EffectModule::wrapImageMinSize(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageWidth(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
return Py_BuildValue("i", getEffect()->_imageSize.width());
}
PyObject* EffectModule::wrapImageHeight(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
return Py_BuildValue("i", getEffect()->_imageSize.height());
}
PyObject* EffectModule::wrapImageCRotate(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int argCount = PyTuple_Size(args);
int angle;
@@ -1024,9 +934,6 @@ PyObject* EffectModule::wrapImageCRotate(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageCOffset(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int offsetX = 0;
int offsetY = 0;
int argCount = PyTuple_Size(args);
@@ -1042,9 +949,6 @@ PyObject* EffectModule::wrapImageCOffset(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageCShear(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
int sh,sv;
int argCount = PyTuple_Size(args);
@@ -1058,9 +962,6 @@ PyObject* EffectModule::wrapImageCShear(PyObject *self, PyObject *args)
PyObject* EffectModule::wrapImageResetT(PyObject *self, PyObject *args)
{
// check if we have aborted already
if (getEffect()->isInterruptionRequested()) Py_RETURN_NONE;
getEffect()->_painter->resetTransform();
Py_RETURN_NONE;
}

View File

@@ -14,13 +14,13 @@ if (ENABLE_OSX)
add_subdirectory(osx)
endif(ENABLE_OSX)
if (ENABLE_V4L2)
add_subdirectory(v4l2)
endif (ENABLE_V4L2)
# if (ENABLE_V4L2)
# add_subdirectory(v4l2)
# endif (ENABLE_V4L2)
if (ENABLE_MF)
add_subdirectory(mediafoundation)
endif (ENABLE_MF)
if (ENABLE_V4L2 OR ENABLE_MF)
add_subdirectory(video)
endif ()
if (ENABLE_X11)
add_subdirectory(x11)

View File

@@ -1,12 +1,13 @@
#include <windows.h>
#include <grabber/DirectXGrabber.h>
#include <QImage>
#pragma comment(lib, "d3d9.lib")
#pragma comment(lib,"d3dx9.lib")
DirectXGrabber::DirectXGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display)
: Grabber("DXGRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom)
, _pixelDecimation(pixelDecimation)
, _display(unsigned(display))
, _displayWidth(0)
, _displayHeight(0)
, _srcRect(0)
@@ -43,6 +44,8 @@ bool DirectXGrabber::setupDisplay()
D3DDISPLAYMODE ddm;
D3DPRESENT_PARAMETERS d3dpp;
HMONITOR hMonitor = nullptr;
MONITORINFO monitorInfo = { 0 };
if ((_d3d9 = Direct3DCreate9(D3D_SDK_VERSION)) == nullptr)
{
@@ -50,7 +53,17 @@ bool DirectXGrabber::setupDisplay()
return false;
}
if (FAILED(_d3d9->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &ddm)))
SecureZeroMemory(&monitorInfo, sizeof(monitorInfo));
monitorInfo.cbSize = sizeof(MONITORINFO);
hMonitor = _d3d9->GetAdapterMonitor(_display);
if (hMonitor == nullptr || GetMonitorInfo(hMonitor, &monitorInfo) == FALSE)
{
Info(_log, "Specified display %d is not available. Primary display %d is used", _display, D3DADAPTER_DEFAULT);
_display = D3DADAPTER_DEFAULT;
}
if (FAILED(_d3d9->GetAdapterDisplayMode(_display, &ddm)))
{
Error(_log, "Failed to get current display mode");
return false;
@@ -69,7 +82,7 @@ bool DirectXGrabber::setupDisplay()
d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT;
d3dpp.FullScreen_RefreshRateInHz = D3DPRESENT_RATE_DEFAULT;
if (FAILED(_d3d9->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, nullptr, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &_device)))
if (FAILED(_d3d9->CreateDevice(_display, D3DDEVTYPE_HAL, nullptr, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &_device)))
{
Error(_log, "CreateDevice failed");
return false;
@@ -147,12 +160,11 @@ int DirectXGrabber::grabFrame(Image<ColorRgb> & image)
return 0;
}
memcpy(image.memptr(), lockedRect.pBits, _width * _height * 3);
for(int i=0 ; i < _height ; i++)
memcpy((unsigned char*)image.memptr() + i * _width * 3, (unsigned char*)lockedRect.pBits + i * lockedRect.Pitch, _width * 3);
for (int idx = 0; idx < _width * _height; idx++)
{
const ColorRgb & color = image.memptr()[idx];
image.memptr()[idx] = ColorRgb{color.blue, color.green, color.red};
}
image.memptr()[idx] = ColorRgb{image.memptr()[idx].blue, image.memptr()[idx].green, image.memptr()[idx].red};
if (FAILED(_surfaceDest->UnlockRect()))
{
@@ -179,3 +191,12 @@ void DirectXGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
setupDisplay();
}
void DirectXGrabber::setDisplayIndex(int index)
{
if(_display != unsigned(index))
{
_display = unsigned(index);
setupDisplay();
}
}

View File

@@ -1,16 +0,0 @@
# Define the current source locations
SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/mediafoundation)
FILE ( GLOB MF_SOURCES "${CURRENT_HEADER_DIR}/MF*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp" )
add_library(mf-grabber ${MF_SOURCES} )
target_link_libraries(mf-grabber
hyperion
${QT_LIBRARIES}
)
if(TURBOJPEG_FOUND)
target_link_libraries(mf-grabber ${TurboJPEG_LIBRARY})
endif(TURBOJPEG_FOUND)

View File

@@ -1,170 +0,0 @@
#include <QMetaType>
#include <grabber/MFWrapper.h>
// qt
#include <QTimer>
MFWrapper::MFWrapper(const QString &device, unsigned grabWidth, unsigned grabHeight, unsigned fps, int pixelDecimation, QString flipMode)
: GrabberWrapper("V4L2:MEDIA_FOUNDATION", &_grabber, grabWidth, grabHeight, 10)
, _grabber(device, grabWidth, grabHeight, fps, pixelDecimation, flipMode)
{
_ggrabber = &_grabber;
// register the image type
qRegisterMetaType<Image<ColorRgb>>("Image<ColorRgb>");
// Handle the image in the captured thread using a direct connection
connect(&_grabber, &MFGrabber::newFrame, this, &MFWrapper::newFrame, Qt::DirectConnection);
}
MFWrapper::~MFWrapper()
{
stop();
}
bool MFWrapper::start()
{
return ( _grabber.start() && GrabberWrapper::start());
}
void MFWrapper::stop()
{
_grabber.stop();
GrabberWrapper::stop();
}
void MFWrapper::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold)
{
_grabber.setSignalThreshold( redSignalThreshold, greenSignalThreshold, blueSignalThreshold, noSignalCounterThreshold);
}
void MFWrapper::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
{
_grabber.setCropping(cropLeft, cropRight, cropTop, cropBottom);
}
void MFWrapper::setSignalDetectionOffset(double verticalMin, double horizontalMin, double verticalMax, double horizontalMax)
{
_grabber.setSignalDetectionOffset(verticalMin, horizontalMin, verticalMax, horizontalMax);
}
void MFWrapper::newFrame(const Image<ColorRgb> &image)
{
emit systemImage(_grabberName, image);
}
void MFWrapper::action()
{
// dummy
}
void MFWrapper::setSignalDetectionEnable(bool enable)
{
_grabber.setSignalDetectionEnable(enable);
}
bool MFWrapper::getSignalDetectionEnable() const
{
return _grabber.getSignalDetectionEnabled();
}
void MFWrapper::setCecDetectionEnable(bool enable)
{
_grabber.setCecDetectionEnable(enable);
}
bool MFWrapper::getCecDetectionEnable() const
{
return _grabber.getCecDetectionEnabled();
}
bool MFWrapper::setDevice(const QString& device)
{
return _grabber.setDevice(device);
}
void MFWrapper::setFpsSoftwareDecimation(int decimation)
{
_grabber.setFpsSoftwareDecimation(decimation);
}
bool MFWrapper::setEncoding(QString enc)
{
return _grabber.setEncoding(enc);
}
bool MFWrapper::setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue)
{
return _grabber.setBrightnessContrastSaturationHue(brightness, contrast, saturation, hue);
}
void MFWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
{
if(type == settings::V4L2 && _grabberName.startsWith("V4L2"))
{
// extract settings
const QJsonObject& obj = config.object();
// reload state
bool reload = false;
// device name, video standard
if (_grabber.setDevice(obj["device"].toString("auto")))
reload = true;
// device input
_grabber.setInput(obj["input"].toInt(-1));
// device resolution
if (_grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0)))
reload = true;
// device framerate
if (_grabber.setFramerate(obj["fps"].toInt(15)))
reload = true;
// image size decimation
_grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8));
// flip mode
_grabber.setFlipMode(obj["flip"].toString("NO_CHANGE"));
// image cropping
_grabber.setCropping(
obj["cropLeft"].toInt(0),
obj["cropRight"].toInt(0),
obj["cropTop"].toInt(0),
obj["cropBottom"].toInt(0));
// Brightness, Contrast, Saturation, Hue
if (_grabber.setBrightnessContrastSaturationHue(obj["hardware_brightness"].toInt(0), obj["hardware_contrast"].toInt(0), obj["hardware_saturation"].toInt(0), obj["hardware_hue"].toInt(0)))
reload = true;
// CEC Standby
_grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true));
// software frame skipping
_grabber.setFpsSoftwareDecimation(obj["fpsSoftwareDecimation"].toInt(1));
// Signal detection
_grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true));
_grabber.setSignalDetectionOffset(
obj["sDHOffsetMin"].toDouble(0.25),
obj["sDVOffsetMin"].toDouble(0.25),
obj["sDHOffsetMax"].toDouble(0.75),
obj["sDVOffsetMax"].toDouble(0.75));
_grabber.setSignalThreshold(
obj["redSignalThreshold"].toDouble(0.0)/100.0,
obj["greenSignalThreshold"].toDouble(0.0)/100.0,
obj["blueSignalThreshold"].toDouble(0.0)/100.0,
obj["noSignalCounterThreshold"].toInt(50) );
// Hardware encoding format
if (_grabber.setEncoding(obj["encoding"].toString("NO_CHANGE")))
reload = true;
// Reload the Grabber if any settings have been changed that require it
if (reload)
_grabber.reloadGrabber();
}
}

View File

@@ -7,10 +7,18 @@
#include <QGuiApplication>
#include <QWidget>
#include <QScreen>
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
// Constants
namespace {
const bool verbose = false;
} //End of constants
QtGrabber::QtGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation, int display)
: Grabber("QTGRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom)
, _display(unsigned(display))
, _display(display)
, _pixelDecimation(pixelDecimation)
, _calculatedWidth(0)
, _calculatedHeight(0)
@@ -18,12 +26,12 @@ QtGrabber::QtGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, i
, _src_y(0)
, _src_x_max(0)
, _src_y_max(0)
, _isWayland(false)
, _screen(nullptr)
, _isVirtual(false)
{
_logger = Logger::getInstance("Qt");
_useImageResampler = false;
// init
setupDisplay();
}
QtGrabber::~QtGrabber()
@@ -36,51 +44,97 @@ void QtGrabber::freeResources()
// Qt seems to hold the ownership of the QScreen pointers
}
bool QtGrabber::open()
{
bool rc = false;
if (getenv("WAYLAND_DISPLAY") != nullptr)
{
_isWayland = true;
}
else
{
rc = true;
}
return rc;
}
bool QtGrabber::setupDisplay()
{
// cleanup last screen
freeResources();
QScreen* primary = QGuiApplication::primaryScreen();
QList<QScreen *> screens = QGuiApplication::screens();
// inject main screen at 0, if not nullptr
if(primary != nullptr)
bool result = false;
if ( ! open() )
{
screens.prepend(primary);
// remove last main screen if twice in list
if(screens.lastIndexOf(primary) > 0)
screens.removeAt(screens.lastIndexOf(primary));
if ( _isWayland )
{
Error(_log, "Grabber does not work under Wayland!");
}
}
if(screens.isEmpty())
else
{
Error(_log, "No displays found to capture from!");
return false;
// cleanup last screen
freeResources();
_numberOfSDisplays = 0;
QScreen* primary = QGuiApplication::primaryScreen();
QList<QScreen *> screens = QGuiApplication::screens();
// inject main screen at 0, if not nullptr
if(primary != nullptr)
{
screens.prepend(primary);
// remove last main screen if twice in list
if(screens.lastIndexOf(primary) > 0)
{
screens.removeAt(screens.lastIndexOf(primary));
}
}
if(screens.isEmpty())
{
Error(_log, "No displays found to capture from!");
result = false;
}
else
{
_numberOfSDisplays = screens.size();
Info(_log,"Available Displays:");
int index = 0;
for(auto * screen : qAsConst(screens))
{
const QRect geo = screen->geometry();
Info(_log,"Display %d: Name:%s Geometry: (L,T,R,B) %d,%d,%d,%d Depth:%dbit", index, QSTRING_CSTR(screen->name()), geo.left(), geo.top() ,geo.right(), geo.bottom(), screen->depth());
++index;
}
_isVirtual = false;
// be sure the index is available
if (_display > _numberOfSDisplays - 1 )
{
if (screens.at(0)->size() != screens.at(0)->virtualSize())
{
Info(_log, "Using virtual display across all screens");
_isVirtual = true;
_display = 0;
}
else
{
Info(_log, "The requested display index '%d' is not available, falling back to display 0", _display);
_display = 0;
}
}
// init the requested display
_screen = screens.at(_display);
connect(_screen, &QScreen::geometryChanged, this, &QtGrabber::geometryChanged);
updateScreenDimensions(true);
Info(_log,"Initialized display %d", _display);
result = true;
}
}
Info(_log,"Available Displays:");
int index = 0;
for(auto screen : screens)
{
const QRect geo = screen->geometry();
Info(_log,"Display %d: Name:%s Geometry: (L,T,R,B) %d,%d,%d,%d Depth:%dbit", index, QSTRING_CSTR(screen->name()), geo.left(), geo.top() ,geo.right(), geo.bottom(), screen->depth());
index++;
}
// be sure the index is available
if(_display > unsigned(screens.size()-1))
{
Info(_log, "The requested display index '%d' is not available, falling back to display 0", _display);
_display = 0;
}
// init the requested display
_screen = screens.at(_display);
connect(_screen, &QScreen::geometryChanged, this, &QtGrabber::geometryChanged);
updateScreenDimensions(true);
Info(_log,"Initialized display %d", _display);
return true;
return result;
}
void QtGrabber::geometryChanged(const QRect &geo)
@@ -91,30 +145,48 @@ void QtGrabber::geometryChanged(const QRect &geo)
int QtGrabber::grabFrame(Image<ColorRgb> & image)
{
if (!_enabled) return 0;
if (!_enabled)
{
return 0;
}
if(_screen == nullptr)
{
// reinit, this will disable capture on failure
setEnabled(setupDisplay());
bool result = setupDisplay();
setEnabled(result);
return -1;
}
QPixmap originalPixmap = _screen->grabWindow(0, _src_x, _src_y, _src_x_max, _src_y_max);
QImage imageFrame = originalPixmap.toImage().scaled(_calculatedWidth, _calculatedHeight).convertToFormat( QImage::Format_RGB888);
image.resize(_calculatedWidth, _calculatedHeight);
image.resize(static_cast<uint>(_calculatedWidth), static_cast<uint>(_calculatedHeight));
for (int y = 0; y < imageFrame.height(); y++)
memcpy((unsigned char*)image.memptr() + y * image.width() * 3, (unsigned char*)imageFrame.scanLine(y), imageFrame.width() * 3);
{
memcpy((unsigned char*)image.memptr() + y * image.width() * 3, static_cast<unsigned char*>(imageFrame.scanLine(y)), imageFrame.width() * 3);
}
return 0;
}
int QtGrabber::updateScreenDimensions(bool force)
{
if(!_screen)
if(_screen == nullptr)
{
return -1;
}
const QRect& geo = _screen->geometry();
QRect geo;
if (_isVirtual)
{
geo = _screen->virtualGeometry();
}
else
{
geo = _screen->geometry();
}
if (!force && _width == geo.width() && _height == geo.height())
{
// No update required
@@ -125,7 +197,8 @@ int QtGrabber::updateScreenDimensions(bool force)
_width = geo.width();
_height = geo.height();
int width=0, height=0;
int width=0;
int height=0;
// Image scaling is performed by Qt
width = (_width > (_cropLeft + _cropRight))
@@ -177,11 +250,6 @@ void QtGrabber::setVideoMode(VideoMode mode)
updateScreenDimensions(true);
}
void QtGrabber::setPixelDecimation(int pixelDecimation)
{
_pixelDecimation = pixelDecimation;
}
void QtGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
{
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
@@ -190,9 +258,108 @@ void QtGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned crop
void QtGrabber::setDisplayIndex(int index)
{
if(_display != unsigned(index))
if (_display != index)
{
_display = unsigned(index);
if (index <= _numberOfSDisplays)
{
_display = index;
}
else {
_display = 0;
}
setupDisplay();
}
}
QJsonObject QtGrabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
if ( open() )
{
QList<QScreen*> screens = QGuiApplication::screens();
inputsDiscovered["device"] = "qt";
inputsDiscovered["device_name"] = "QT";
inputsDiscovered["type"] = "screen";
QJsonArray video_inputs;
if (!screens.isEmpty())
{
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
for (int i = 0; i < screens.size(); ++i)
{
QJsonObject in;
QString name = screens.at(i)->name();
int pos = name.lastIndexOf('\\');
if (pos != -1)
{
name = name.right(name.length()-pos-1);
}
in["name"] = name;
in["inputIdx"] = i;
QJsonArray formats;
QJsonObject format;
QJsonArray resolutionArray;
QJsonObject resolution;
resolution["width"] = screens.at(i)->size().width();
resolution["height"] = screens.at(i)->size().height();
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
if (screens.at(0)->size() != screens.at(0)->virtualSize())
{
QJsonObject in;
in["name"] = "All Displays";
in["inputIdx"] = screens.size();
in["virtual"] = true;
QJsonArray formats;
QJsonObject format;
QJsonArray resolutionArray;
QJsonObject resolution;
resolution["width"] = screens.at(0)->virtualSize().width();
resolution["height"] = screens.at(0)->virtualSize().height();
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
inputsDiscovered["video_inputs"] = video_inputs;
}
else
{
DebugIf(verbose, _log, "No displays found to capture from!");
}
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -5,6 +5,11 @@ QtWrapper::QtWrapper(int cropLeft, int cropRight, int cropTop, int cropBottom, i
, _grabber(cropLeft, cropRight, cropTop, cropBottom, pixelDecimation, display)
{}
bool QtWrapper::open()
{
return _grabber.open();
}
void QtWrapper::action()
{
transferFrame(_grabber);

View File

@@ -1,18 +0,0 @@
# Define the current source locations
SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/v4l2)
FILE ( GLOB V4L2_SOURCES "${CURRENT_HEADER_DIR}/V4L2*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp" )
add_library(v4l2-grabber ${V4L2_SOURCES} )
target_link_libraries(v4l2-grabber
hyperion
${QT_LIBRARIES}
)
if(TURBOJPEG_FOUND)
target_link_libraries(v4l2-grabber ${TurboJPEG_LIBRARY})
elseif (JPEG_FOUND)
target_link_libraries(v4l2-grabber ${JPEG_LIBRARY})
endif(TURBOJPEG_FOUND)

View File

@@ -1,156 +0,0 @@
#include <QMetaType>
#include <grabber/V4L2Wrapper.h>
// qt
#include <QTimer>
V4L2Wrapper::V4L2Wrapper(const QString &device,
unsigned grabWidth,
unsigned grabHeight,
unsigned fps,
unsigned input,
VideoStandard videoStandard,
PixelFormat pixelFormat,
int pixelDecimation)
: GrabberWrapper("V4L2:"+device, &_grabber, grabWidth, grabHeight, 10)
, _grabber(device,
grabWidth,
grabHeight,
fps,
input,
videoStandard,
pixelFormat,
pixelDecimation)
{
_ggrabber = &_grabber;
// register the image type
qRegisterMetaType<Image<ColorRgb>>("Image<ColorRgb>");
// Handle the image in the captured thread using a direct connection
connect(&_grabber, &V4L2Grabber::newFrame, this, &V4L2Wrapper::newFrame, Qt::DirectConnection);
connect(&_grabber, &V4L2Grabber::readError, this, &V4L2Wrapper::readError, Qt::DirectConnection);
}
V4L2Wrapper::~V4L2Wrapper()
{
stop();
}
bool V4L2Wrapper::start()
{
return ( _grabber.start() && GrabberWrapper::start());
}
void V4L2Wrapper::stop()
{
_grabber.stop();
GrabberWrapper::stop();
}
void V4L2Wrapper::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold)
{
_grabber.setSignalThreshold( redSignalThreshold, greenSignalThreshold, blueSignalThreshold, 50);
}
void V4L2Wrapper::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
{
_grabber.setCropping(cropLeft, cropRight, cropTop, cropBottom);
}
void V4L2Wrapper::setSignalDetectionOffset(double verticalMin, double horizontalMin, double verticalMax, double horizontalMax)
{
_grabber.setSignalDetectionOffset(verticalMin, horizontalMin, verticalMax, horizontalMax);
}
void V4L2Wrapper::newFrame(const Image<ColorRgb> &image)
{
emit systemImage(_grabberName, image);
}
void V4L2Wrapper::readError(const char* err)
{
Error(_log, "stop grabber, because reading device failed. (%s)", err);
stop();
}
void V4L2Wrapper::action()
{
// dummy as v4l get notifications from stream
}
void V4L2Wrapper::setSignalDetectionEnable(bool enable)
{
_grabber.setSignalDetectionEnable(enable);
}
bool V4L2Wrapper::getSignalDetectionEnable() const
{
return _grabber.getSignalDetectionEnabled();
}
void V4L2Wrapper::setCecDetectionEnable(bool enable)
{
_grabber.setCecDetectionEnable(enable);
}
bool V4L2Wrapper::getCecDetectionEnable() const
{
return _grabber.getCecDetectionEnabled();
}
void V4L2Wrapper::setDeviceVideoStandard(const QString& device, VideoStandard videoStandard)
{
_grabber.setDeviceVideoStandard(device, videoStandard);
}
void V4L2Wrapper::handleCecEvent(CECEvent event)
{
_grabber.handleCecEvent(event);
}
void V4L2Wrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
{
if(type == settings::V4L2 && _grabberName.startsWith("V4L"))
{
// extract settings
const QJsonObject& obj = config.object();
// pixel decimation for v4l
_grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8));
// crop for v4l
_grabber.setCropping(
obj["cropLeft"].toInt(0),
obj["cropRight"].toInt(0),
obj["cropTop"].toInt(0),
obj["cropBottom"].toInt(0));
// device input
_grabber.setInput(obj["input"].toInt(0));
// device resolution
_grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0));
// device framerate
_grabber.setFramerate(obj["fps"].toInt(15));
// CEC Standby
_grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true));
_grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true));
_grabber.setSignalDetectionOffset(
obj["sDHOffsetMin"].toDouble(0.25),
obj["sDVOffsetMin"].toDouble(0.25),
obj["sDHOffsetMax"].toDouble(0.75),
obj["sDVOffsetMax"].toDouble(0.75));
_grabber.setSignalThreshold(
obj["redSignalThreshold"].toDouble(0.0)/100.0,
obj["greenSignalThreshold"].toDouble(0.0)/100.0,
obj["blueSignalThreshold"].toDouble(0.0)/100.0);
_grabber.setDeviceVideoStandard(
obj["device"].toString("auto"),
parseVideoStandard(obj["standard"].toString("NO_CHANGE")));
}
}

View File

@@ -0,0 +1,23 @@
# Common cmake definition for external video grabber
# Define the wrapper/header/source locations and collect them
SET(WRAPPER_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video)
SET(HEADER_DIR ${CMAKE_SOURCE_DIR}/include/grabber)
if (ENABLE_MF)
project(mf-grabber)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/mediafoundation)
FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/MF*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp")
elseif(ENABLE_V4L2)
project(v4l2-grabber)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/grabber/video/v4l2)
FILE (GLOB SOURCES "${WRAPPER_DIR}/*.cpp" "${HEADER_DIR}/Video*.h" "${HEADER_DIR}/V4L2*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp")
endif()
add_library(${PROJECT_NAME} ${SOURCES})
target_link_libraries(${PROJECT_NAME} hyperion ${QT_LIBRARIES})
if(TURBOJPEG_FOUND)
target_link_libraries(${PROJECT_NAME} ${TurboJPEG_LIBRARY})
elseif (JPEG_FOUND)
target_link_libraries(${PROJECT_NAME} ${JPEG_LIBRARY})
endif()

View File

@@ -0,0 +1,134 @@
#include <QMetaType>
#include <grabber/VideoWrapper.h>
// qt
#include <QTimer>
VideoWrapper::VideoWrapper()
#if defined(ENABLE_V4L2)
: GrabberWrapper("V4L2", &_grabber, 0, 0, 10)
#elif defined(ENABLE_MF)
: GrabberWrapper("V4L2:MEDIA_FOUNDATION", &_grabber, 0, 0, 10)
#endif
, _grabber()
{
// register the image type
qRegisterMetaType<Image<ColorRgb>>("Image<ColorRgb>");
// Handle the image in the captured thread (Media Foundation/V4L2) using a direct connection
connect(&_grabber, SIGNAL(newFrame(const Image<ColorRgb>&)), this, SLOT(newFrame(const Image<ColorRgb>&)), Qt::DirectConnection);
connect(&_grabber, SIGNAL(readError(const char*)), this, SLOT(readError(const char*)), Qt::DirectConnection);
}
VideoWrapper::~VideoWrapper()
{
stop();
}
bool VideoWrapper::start()
{
return (_grabber.prepare() && _grabber.start() && GrabberWrapper::start());
}
void VideoWrapper::stop()
{
_grabber.stop();
GrabberWrapper::stop();
}
#if defined(ENABLE_CEC) && !defined(ENABLE_MF)
void VideoWrapper::handleCecEvent(CECEvent event)
{
_grabber.handleCecEvent(event);
}
#endif
void VideoWrapper::handleSettingsUpdate(settings::type type, const QJsonDocument& config)
{
if(type == settings::V4L2 && _grabberName.startsWith("V4L2"))
{
// extract settings
const QJsonObject& obj = config.object();
// Device
_grabber.setDevice(obj["device"].toString("auto"));
// Device input
_grabber.setInput(obj["input"].toInt(0));
// Device resolution
_grabber.setWidthHeight(obj["width"].toInt(0), obj["height"].toInt(0));
// Device framerate
_grabber.setFramerate(obj["fps"].toInt(15));
// Device encoding format
_grabber.setEncoding(obj["encoding"].toString("NO_CHANGE"));
// Video standard
_grabber.setVideoStandard(parseVideoStandard(obj["standard"].toString("NO_CHANGE")));
// Image size decimation
_grabber.setPixelDecimation(obj["sizeDecimation"].toInt(8));
// Flip mode
_grabber.setFlipMode(parseFlipMode(obj["flip"].toString("NO_CHANGE")));
// Image cropping
_grabber.setCropping(
obj["cropLeft"].toInt(0),
obj["cropRight"].toInt(0),
obj["cropTop"].toInt(0),
obj["cropBottom"].toInt(0));
// Brightness, Contrast, Saturation, Hue
_grabber.setBrightnessContrastSaturationHue(
obj["hardware_brightness"].toInt(0),
obj["hardware_contrast"].toInt(0),
obj["hardware_saturation"].toInt(0),
obj["hardware_hue"].toInt(0));
#if defined(ENABLE_CEC) && defined(ENABLE_V4L2)
// CEC Standby
_grabber.setCecDetectionEnable(obj["cecDetection"].toBool(true));
#endif
// Software frame skipping
_grabber.setFpsSoftwareDecimation(obj["fpsSoftwareDecimation"].toInt(1));
// Signal detection
_grabber.setSignalDetectionEnable(obj["signalDetection"].toBool(true));
_grabber.setSignalDetectionOffset(
obj["sDHOffsetMin"].toDouble(0.25),
obj["sDVOffsetMin"].toDouble(0.25),
obj["sDHOffsetMax"].toDouble(0.75),
obj["sDVOffsetMax"].toDouble(0.75));
_grabber.setSignalThreshold(
obj["redSignalThreshold"].toDouble(0.0)/100.0,
obj["greenSignalThreshold"].toDouble(0.0)/100.0,
obj["blueSignalThreshold"].toDouble(0.0)/100.0,
obj["noSignalCounterThreshold"].toInt(50));
// Reload the Grabber if any settings have been changed that require it
_grabber.reload();
}
}
void VideoWrapper::newFrame(const Image<ColorRgb> &image)
{
emit systemImage(_grabberName, image);
}
void VideoWrapper::readError(const char* err)
{
Error(_log, "Stop grabber, because reading device failed. (%s)", err);
stop();
}
void VideoWrapper::action()
{
// dummy as v4l get notifications from stream
}

View File

@@ -44,9 +44,11 @@ public:
, _grabber(grabber)
, _bEOS(FALSE)
, _hrStatus(S_OK)
, _isBusy(false)
, _transform(nullptr)
, _pixelformat(PixelFormat::NO_CHANGE)
{
// Initialize critical section.
InitializeCriticalSection(&_critsec);
}
@@ -78,21 +80,29 @@ public:
// IMFSourceReaderCallback methods
STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD /*dwStreamIndex*/,
DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample)
DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample* pSample)
{
EnterCriticalSection(&_critsec);
_isBusy = true;
if(dwStreamFlags & MF_SOURCE_READERF_STREAMTICK)
if (_grabber->_sourceReader == nullptr)
{
_isBusy = false;
LeaveCriticalSection(&_critsec);
return S_OK;
}
if (dwStreamFlags & MF_SOURCE_READERF_STREAMTICK)
{
Debug(_grabber->_log, "Skipping stream gap");
LeaveCriticalSection(&_critsec);
_grabber->_sourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL);
_grabber->_sourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, nullptr, nullptr, nullptr, nullptr);
return S_OK;
}
}
if (dwStreamFlags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED)
{
IMFMediaType *type = nullptr;
IMFMediaType* type = nullptr;
GUID format;
_grabber->_sourceReader->GetNativeMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, MF_SOURCE_READER_CURRENT_TYPE_INDEX, &type);
type->GetGUID(MF_MT_SUBTYPE, &format);
@@ -103,7 +113,7 @@ public:
if (dwStreamFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)
{
IMFMediaType *type = nullptr;
IMFMediaType* type = nullptr;
GUID format;
_grabber->_sourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, &type);
type->GetGUID(MF_MT_SUBTYPE, &format);
@@ -119,7 +129,7 @@ public:
{
_hrStatus = hrStatus;
_com_error error(_hrStatus);
Error(_grabber->_log, "Source Reader error => %s", error.ErrorMessage());
Error(_grabber->_log, "%s", error.ErrorMessage());
goto done;
}
@@ -129,7 +139,7 @@ public:
goto done;
}
if(_pixelformat != PixelFormat::MJPEG && _pixelformat != PixelFormat::NO_CHANGE)
if (_pixelformat != PixelFormat::MJPEG && _pixelformat != PixelFormat::BGR24 && _pixelformat != PixelFormat::NO_CHANGE)
pSample = TransformSample(_transform, pSample);
_hrStatus = pSample->ConvertToContiguousBuffer(&buffer);
@@ -150,7 +160,7 @@ public:
goto done;
}
_grabber->receive_image(data,currentLength);
_grabber->receive_image(data, currentLength);
_hrStatus = buffer->Unlock();
if (FAILED(_hrStatus))
@@ -165,6 +175,7 @@ public:
if (MF_SOURCE_READERF_ENDOFSTREAM & dwStreamFlags)
_bEOS = TRUE; // Reached the end of the stream.
_isBusy = false;
LeaveCriticalSection(&_critsec);
return _hrStatus;
}
@@ -172,11 +183,11 @@ public:
HRESULT SourceReaderCB::InitializeVideoEncoder(IMFMediaType* type, PixelFormat format)
{
_pixelformat = format;
if (format == PixelFormat::MJPEG || format == PixelFormat::NO_CHANGE)
if (format == PixelFormat::MJPEG || format == PixelFormat::BGR24 || format == PixelFormat::NO_CHANGE)
return S_OK;
// Variable declaration
IMFMediaType *output = nullptr;
IMFMediaType* output = nullptr;
DWORD mftStatus = 0;
QString error = "";
@@ -269,7 +280,16 @@ public:
return _hrStatus;
}
STDMETHODIMP OnEvent(DWORD, IMFMediaEvent *) { return S_OK; }
BOOL SourceReaderCB::isBusy()
{
EnterCriticalSection(&_critsec);
BOOL result = _isBusy;
LeaveCriticalSection(&_critsec);
return result;
}
STDMETHODIMP OnEvent(DWORD, IMFMediaEvent*) { return S_OK; }
STDMETHODIMP OnFlush(DWORD) { return S_OK; }
private:
@@ -282,13 +302,16 @@ private:
}
SAFE_RELEASE(_transform);
// Delete critical section.
DeleteCriticalSection(&_critsec);
}
IMFSample* SourceReaderCB::TransformSample(IMFTransform* transform, IMFSample* in_sample)
{
IMFSample* result = nullptr;
IMFMediaBuffer* out_buffer = nullptr;
MFT_OUTPUT_DATA_BUFFER outputDataBuffer = {0};
MFT_OUTPUT_DATA_BUFFER outputDataBuffer = { 0 };
// Process the input sample
_hrStatus = transform->ProcessInput(0, in_sample, 0);
@@ -371,4 +394,5 @@ private:
HRESULT _hrStatus;
IMFTransform* _transform;
PixelFormat _pixelformat;
std::atomic<bool> _isBusy;
};

View File

@@ -1,19 +1,14 @@
#include "grabber/MFThread.h"
volatile bool MFThread::_isActive = false;
MFThread::MFThread()
: _isBusy(false)
, _semaphore(1)
, _localData(nullptr)
: _localData(nullptr)
, _scalingFactorsCount(0)
, _scalingFactors(nullptr)
, _transform(nullptr)
, _decompress(nullptr)
, _xform(nullptr)
, _imageResampler()
{
}
{}
MFThread::~MFThread()
{
@@ -28,12 +23,11 @@ MFThread::~MFThread()
}
void MFThread::setup(
unsigned int threadIndex, PixelFormat pixelFormat, uint8_t* sharedData,
PixelFormat pixelFormat, uint8_t* sharedData,
int size, int width, int height, int lineLength,
int subsamp, unsigned cropLeft, unsigned cropTop, unsigned cropBottom, unsigned cropRight,
VideoMode videoMode, FlipMode flipMode, int currentFrame, int pixelDecimation)
VideoMode videoMode, FlipMode flipMode, int pixelDecimation)
{
_threadIndex = threadIndex;
_lineLength = lineLength;
_pixelFormat = pixelFormat;
_size = (unsigned long) size;
@@ -45,7 +39,6 @@ void MFThread::setup(
_cropBottom = cropBottom;
_cropRight = cropRight;
_flipMode = flipMode;
_currentFrame = currentFrame;
_pixelDecimation = pixelDecimation;
_imageResampler.setVideoMode(videoMode);
@@ -61,9 +54,10 @@ void MFThread::setup(
memcpy(_localData, sharedData, size);
}
void MFThread::run()
void MFThread::process()
{
if (_isActive && _width > 0 && _height > 0)
_busy = true;
if (_width > 0 && _height > 0)
{
if (_pixelFormat == PixelFormat::MJPEG)
{
@@ -85,31 +79,10 @@ void MFThread::run()
Image<ColorRgb> image = Image<ColorRgb>();
_imageResampler.processImage(_localData, _width, _height, _lineLength, PixelFormat::BGR24, image);
emit newFrame(_threadIndex, image, _currentFrame);
emit newFrame(image);
}
}
}
bool MFThread::isBusy()
{
bool temp;
_semaphore.acquire();
if (_isBusy)
temp = true;
else
{
temp = false;
_isBusy = true;
}
_semaphore.release();
return temp;
}
void MFThread::noBusy()
{
_semaphore.acquire();
_isBusy = false;
_semaphore.release();
_busy = false;
}
void MFThread::processImageMjpeg()
@@ -176,7 +149,7 @@ void MFThread::processImageMjpeg()
// got image, process it
if ( !(_cropLeft > 0 || _cropTop > 0 || _cropBottom > 0 || _cropRight > 0))
emit newFrame(_threadIndex, srcImage, _currentFrame);
emit newFrame(srcImage);
else
{
// calculate the output size
@@ -200,6 +173,6 @@ void MFThread::processImageMjpeg()
}
// emit
emit newFrame(_threadIndex, destImage, _currentFrame);
emit newFrame(destImage);
}
}

View File

@@ -27,37 +27,41 @@
#define CLEAR(x) memset(&(x), 0, sizeof(x))
#ifndef V4L2_CAP_META_CAPTURE
#define V4L2_CAP_META_CAPTURE 0x00800000 // Specified in kernel header v4.16. Required for backward compatibility.
#define V4L2_CAP_META_CAPTURE 0x00800000 // Specified in kernel header v4.16. Required for backward compatibility.
#endif
// Constants
namespace { const bool verbose = false; }
static PixelFormat GetPixelFormat(const unsigned int format)
{
if (format == V4L2_PIX_FMT_RGB32) return PixelFormat::RGB32;
if (format == V4L2_PIX_FMT_RGB24) return PixelFormat::BGR24;
if (format == V4L2_PIX_FMT_YUYV) return PixelFormat::YUYV;
if (format == V4L2_PIX_FMT_UYVY) return PixelFormat::UYVY;
if (format == V4L2_PIX_FMT_MJPEG) return PixelFormat::MJPEG;
if (format == V4L2_PIX_FMT_NV12) return PixelFormat::NV12;
if (format == V4L2_PIX_FMT_YUV420) return PixelFormat::I420;
#ifdef HAVE_JPEG_DECODER
if (format == V4L2_PIX_FMT_MJPEG) return PixelFormat::MJPEG;
#endif
return PixelFormat::NO_CHANGE;
};
V4L2Grabber::V4L2Grabber(const QString & device, unsigned width, unsigned height, unsigned fps, unsigned input, VideoStandard videoStandard, PixelFormat pixelFormat, int pixelDecimation)
: Grabber("V4L2:"+device)
, _deviceName()
, _videoStandard(videoStandard)
V4L2Grabber::V4L2Grabber()
: Grabber("V4L2")
, _currentDeviceName("none")
, _newDeviceName("none")
, _ioMethod(IO_METHOD_MMAP)
, _fileDescriptor(-1)
, _buffers()
, _pixelFormat(pixelFormat)
, _pixelDecimation(pixelDecimation)
, _pixelFormat(PixelFormat::NO_CHANGE)
, _pixelFormatConfig(PixelFormat::NO_CHANGE)
, _lineLength(-1)
, _frameByteSize(-1)
, _noSignalCounterThreshold(40)
, _noSignalThresholdColor(ColorRgb{0,0,0})
, _signalDetectionEnabled(true)
, _cecDetectionEnabled(true)
, _cecStandbyActivated(false)
, _signalDetectionEnabled(true)
, _noSignalDetected(false)
, _noSignalCounter(0)
, _x_frac_min(0.25)
@@ -66,17 +70,8 @@ V4L2Grabber::V4L2Grabber(const QString & device, unsigned width, unsigned height
, _y_frac_max(0.75)
, _streamNotifier(nullptr)
, _initialized(false)
, _deviceAutoDiscoverEnabled(false)
, _reload(false)
{
setPixelDecimation(pixelDecimation);
getV4Ldevices();
// init
setInput(input);
setWidthHeight(width, height);
setFramerate(fps);
setDeviceVideoStandard(device, videoStandard);
Debug(_log,"Init pixel format: %i", static_cast<int>(_pixelFormat));
}
V4L2Grabber::~V4L2Grabber()
@@ -89,7 +84,7 @@ void V4L2Grabber::uninit()
// stop if the grabber was not stopped
if (_initialized)
{
Debug(_log,"uninit grabber: %s", QSTRING_CSTR(_deviceName));
Debug(_log,"Uninit grabber: %s", QSTRING_CSTR(_newDeviceName));
stop();
}
}
@@ -98,66 +93,47 @@ bool V4L2Grabber::init()
{
if (!_initialized)
{
getV4Ldevices();
QString v4lDevices_str;
bool noDeviceName = _currentDeviceName.compare("none", Qt::CaseInsensitive) == 0 || _currentDeviceName.compare("auto", Qt::CaseInsensitive) == 0;
// show list only once
if (!_deviceName.startsWith("/dev/"))
// enumerate the video capture devices on the user's system
enumVideoCaptureDevices();
if(noDeviceName)
return false;
if(!_deviceProperties.contains(_currentDeviceName))
{
for (auto& dev: _v4lDevices)
{
v4lDevices_str += "\t"+ dev.first + "\t" + dev.second + "\n";
}
if (!v4lDevices_str.isEmpty())
Info(_log, "available V4L2 devices:\n%s", QSTRING_CSTR(v4lDevices_str));
Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDeviceName));
_currentDeviceName = "none";
return false;
}
if (_deviceName == "auto")
bool valid = false;
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
if (i.key() == _currentDeviceName && valid == false)
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
if (y.key() == _input && valid == false)
for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++)
if(enc.key() == _pixelFormat && enc.value().width == _width && enc.value().height == _height && valid == false)
for (auto fps = enc.value().framerates.begin(); fps != enc.value().framerates.end(); fps++)
if(*fps == _fps && valid == false)
valid = true;
if (!valid)
{
_deviceAutoDiscoverEnabled = true;
_deviceName = "unknown";
Info( _log, "search for usable video devices" );
for (auto& dev: _v4lDevices)
{
_deviceName = dev.first;
if (init())
{
Info(_log, "found usable v4l2 device: %s (%s)",QSTRING_CSTR(dev.first), QSTRING_CSTR(dev.second));
_deviceAutoDiscoverEnabled = false;
return _initialized;
}
}
Info(_log, "no usable device found");
}
else if (!_deviceName.startsWith("/dev/"))
{
for (auto& dev: _v4lDevices)
{
if (_deviceName.toLower() == dev.second.toLower())
{
_deviceName = dev.first;
Info(_log, "found v4l2 device with configured name: %s (%s)", QSTRING_CSTR(dev.second), QSTRING_CSTR(dev.first) );
break;
}
}
}
else
{
Info(_log, "%s v4l device: %s", (_deviceAutoDiscoverEnabled? "test" : "configured"), QSTRING_CSTR(_deviceName));
Debug(_log, "Configured device at '%s' is not available.", QSTRING_CSTR(_currentDeviceName));
_currentDeviceName = "none";
return false;
}
bool opened = false;
try
{
// do not init with unknown device
if (_deviceName != "unknown")
if (open_device())
{
if (open_device())
{
opened = true;
init_device(_videoStandard);
_initialized = true;
}
opened = true;
init_device(_videoStandard);
_initialized = true;
}
}
catch(std::exception& e)
@@ -167,14 +143,15 @@ bool V4L2Grabber::init()
uninit_device();
close_device();
}
ErrorIf( !_deviceAutoDiscoverEnabled, _log, "V4l2 init failed (%s)", e.what());
Error(_log, "V4l2 init failed (%s)", e.what());
}
}
return _initialized;
}
void V4L2Grabber::getV4Ldevices()
void V4L2Grabber::enumVideoCaptureDevices()
{
QDirIterator it("/sys/class/video4linux/", QDirIterator::NoIteratorFlags);
_deviceProperties.clear();
@@ -319,35 +296,12 @@ void V4L2Grabber::getV4Ldevices()
properties.name = devName;
devNameFile.close();
}
_v4lDevices.emplace("/dev/"+it.fileName(), devName);
_deviceProperties.insert("/dev/"+it.fileName(), properties);
}
}
}
void V4L2Grabber::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold)
{
_noSignalThresholdColor.red = uint8_t(255*redSignalThreshold);
_noSignalThresholdColor.green = uint8_t(255*greenSignalThreshold);
_noSignalThresholdColor.blue = uint8_t(255*blueSignalThreshold);
_noSignalCounterThreshold = qMax(1, noSignalCounterThreshold);
Info(_log, "Signal threshold set to: {%d, %d, %d}", _noSignalThresholdColor.red, _noSignalThresholdColor.green, _noSignalThresholdColor.blue );
}
void V4L2Grabber::setSignalDetectionOffset(double horizontalMin, double verticalMin, double horizontalMax, double verticalMax)
{
// rainbow 16 stripes 0.47 0.2 0.49 0.8
// unicolor: 0.25 0.25 0.75 0.75
_x_frac_min = horizontalMin;
_y_frac_min = verticalMin;
_x_frac_max = horizontalMax;
_y_frac_max = verticalMax;
Info(_log, "Signal detection area set to: %f,%f x %f,%f", _x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max );
}
bool V4L2Grabber::start()
{
try
@@ -386,23 +340,23 @@ bool V4L2Grabber::open_device()
{
struct stat st;
if (-1 == stat(QSTRING_CSTR(_deviceName), &st))
if (-1 == stat(QSTRING_CSTR(_currentDeviceName), &st))
{
throw_errno_exception("Cannot identify '" + _deviceName + "'");
throw_errno_exception("Cannot identify '" + _currentDeviceName + "'");
return false;
}
if (!S_ISCHR(st.st_mode))
{
throw_exception("'" + _deviceName + "' is no device");
throw_exception("'" + _currentDeviceName + "' is no device");
return false;
}
_fileDescriptor = open(QSTRING_CSTR(_deviceName), O_RDWR | O_NONBLOCK, 0);
_fileDescriptor = open(QSTRING_CSTR(_currentDeviceName), O_RDWR | O_NONBLOCK, 0);
if (-1 == _fileDescriptor)
{
throw_errno_exception("Cannot open '" + _deviceName + "'");
throw_errno_exception("Cannot open '" + _currentDeviceName + "'");
return false;
}
@@ -455,7 +409,7 @@ void V4L2Grabber::init_mmap()
{
if (EINVAL == errno)
{
throw_exception("'" + _deviceName + "' does not support memory mapping");
throw_exception("'" + _currentDeviceName + "' does not support memory mapping");
return;
}
else
@@ -467,7 +421,7 @@ void V4L2Grabber::init_mmap()
if (req.count < 2)
{
throw_exception("Insufficient buffer memory on " + _deviceName);
throw_exception("Insufficient buffer memory on " + _currentDeviceName);
return;
}
@@ -519,7 +473,7 @@ void V4L2Grabber::init_userp(unsigned int buffer_size)
{
if (EINVAL == errno)
{
throw_exception("'" + _deviceName + "' does not support user pointer");
throw_exception("'" + _currentDeviceName + "' does not support user pointer");
return;
}
else
@@ -553,7 +507,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
{
if (EINVAL == errno)
{
throw_exception("'" + _deviceName + "' is no V4L2 device");
throw_exception("'" + _currentDeviceName + "' is no V4L2 device");
return;
}
else
@@ -565,7 +519,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
throw_exception("'" + _deviceName + "' is no video capture device");
throw_exception("'" + _currentDeviceName + "' is no video capture device");
return;
}
@@ -575,7 +529,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
{
if (!(cap.capabilities & V4L2_CAP_READWRITE))
{
throw_exception("'" + _deviceName + "' does not support read i/o");
throw_exception("'" + _currentDeviceName + "' does not support read i/o");
return;
}
}
@@ -586,7 +540,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
{
if (!(cap.capabilities & V4L2_CAP_STREAMING))
{
throw_exception("'" + _deviceName + "' does not support streaming i/o");
throw_exception("'" + _currentDeviceName + "' does not support streaming i/o");
return;
}
}
@@ -699,16 +653,28 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
// set the requested pixel format
switch (_pixelFormat)
{
case PixelFormat::UYVY:
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
case PixelFormat::RGB32:
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB32;
break;
case PixelFormat::BGR24:
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
break;
case PixelFormat::YUYV:
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
break;
case PixelFormat::RGB32:
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB32;
case PixelFormat::UYVY:
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
break;
case PixelFormat::NV12:
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12;
break;
case PixelFormat::I420:
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
break;
#ifdef HAVE_JPEG_DECODER
@@ -727,7 +693,7 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
}
// set custom resolution for width and height if they are not zero
if(_width && _height)
if(_width != 0 && _height != 0)
{
fmt.fmt.pix.width = _width;
fmt.fmt.pix.height = _height;
@@ -772,14 +738,23 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
// check pixel format and frame size
switch (fmt.fmt.pix.pixelformat)
{
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_RGB32:
{
_pixelFormat = PixelFormat::UYVY;
_frameByteSize = _width * _height * 2;
Debug(_log, "Pixel format=UYVY");
_pixelFormat = PixelFormat::RGB32;
_frameByteSize = _width * _height * 4;
Debug(_log, "Pixel format=RGB32");
}
break;
case V4L2_PIX_FMT_RGB24:
{
_pixelFormat = PixelFormat::BGR24;
_frameByteSize = _width * _height * 3;
Debug(_log, "Pixel format=BGR24");
}
break;
case V4L2_PIX_FMT_YUYV:
{
_pixelFormat = PixelFormat::YUYV;
@@ -788,11 +763,27 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
}
break;
case V4L2_PIX_FMT_RGB32:
case V4L2_PIX_FMT_UYVY:
{
_pixelFormat = PixelFormat::RGB32;
_frameByteSize = _width * _height * 4;
Debug(_log, "Pixel format=RGB32");
_pixelFormat = PixelFormat::UYVY;
_frameByteSize = _width * _height * 2;
Debug(_log, "Pixel format=UYVY");
}
break;
case V4L2_PIX_FMT_NV12:
{
_pixelFormat = PixelFormat::NV12;
_frameByteSize = (_width * _height * 6) / 4;
Debug(_log, "Pixel format=NV12");
}
break;
case V4L2_PIX_FMT_YUV420:
{
_pixelFormat = PixelFormat::I420;
_frameByteSize = (_width * _height * 6) / 4;
Debug(_log, "Pixel format=I420");
}
break;
@@ -807,9 +798,9 @@ void V4L2Grabber::init_device(VideoStandard videoStandard)
default:
#ifdef HAVE_JPEG_DECODER
throw_exception("Only pixel formats UYVY, YUYV, RGB32 and MJPEG are supported");
throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12, I420 and MJPEG are supported");
#else
throw_exception("Only pixel formats UYVY, YUYV, and RGB32 are supported");
throw_exception("Only pixel formats RGB32, BGR24, YUYV, UYVY, NV12 and I420 are supported");
#endif
return;
}
@@ -992,7 +983,7 @@ int V4L2Grabber::read_frame()
{
throw_errno_exception("VIDIOC_DQBUF");
stop();
getV4Ldevices();
enumVideoCaptureDevices();
}
return 0;
}
@@ -1029,7 +1020,7 @@ int V4L2Grabber::read_frame()
{
throw_errno_exception("VIDIOC_DQBUF");
stop();
getV4Ldevices();
enumVideoCaptureDevices();
}
return 0;
}
@@ -1298,6 +1289,254 @@ int V4L2Grabber::xioctl(int fileDescriptor, int request, void *arg)
return r;
}
void V4L2Grabber::setDevice(const QString& device)
{
if (_currentDeviceName != device)
{
(_initialized)
? _newDeviceName = device
: _currentDeviceName = _newDeviceName = device;
_reload = true;
}
}
bool V4L2Grabber::setInput(int input)
{
if(Grabber::setInput(input))
{
_reload = true;
return true;
}
return false;
}
bool V4L2Grabber::setWidthHeight(int width, int height)
{
if(Grabber::setWidthHeight(width, height))
{
_reload = true;
return true;
}
return false;
}
void V4L2Grabber::setEncoding(QString enc)
{
if(_pixelFormatConfig != parsePixelFormat(enc))
{
_pixelFormatConfig = parsePixelFormat(enc);
if(_initialized)
{
Debug(_log,"Set hardware encoding to: %s", QSTRING_CSTR(enc.toUpper()));
_reload = true;
}
else
_pixelFormat = _pixelFormatConfig;
}
}
void V4L2Grabber::setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue)
{
if(_initialized)
DebugIf(verbose, _log,"TODO: Set brightness to %i, contrast to %i, saturation to %i, hue to %i", brightness, contrast, saturation, hue);
}
void V4L2Grabber::setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold)
{
_noSignalThresholdColor.red = uint8_t(255*redSignalThreshold);
_noSignalThresholdColor.green = uint8_t(255*greenSignalThreshold);
_noSignalThresholdColor.blue = uint8_t(255*blueSignalThreshold);
_noSignalCounterThreshold = qMax(1, noSignalCounterThreshold);
if(_signalDetectionEnabled)
Info(_log, "Signal threshold set to: {%d, %d, %d}", _noSignalThresholdColor.red, _noSignalThresholdColor.green, _noSignalThresholdColor.blue );
}
void V4L2Grabber::setSignalDetectionOffset(double horizontalMin, double verticalMin, double horizontalMax, double verticalMax)
{
// rainbow 16 stripes 0.47 0.2 0.49 0.8
// unicolor: 0.25 0.25 0.75 0.75
_x_frac_min = horizontalMin;
_y_frac_min = verticalMin;
_x_frac_max = horizontalMax;
_y_frac_max = verticalMax;
if(_signalDetectionEnabled)
Info(_log, "Signal detection area set to: %f,%f x %f,%f", _x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max );
}
void V4L2Grabber::setSignalDetectionEnable(bool enable)
{
if (_signalDetectionEnabled != enable)
{
_signalDetectionEnabled = enable;
if(_initialized)
Info(_log, "Signal detection is now %s", enable ? "enabled" : "disabled");
}
}
void V4L2Grabber::setCecDetectionEnable(bool enable)
{
if (_cecDetectionEnabled != enable)
{
_cecDetectionEnabled = enable;
if(_initialized)
Info(_log, QString("CEC detection is now %1").arg(enable ? "enabled" : "disabled").toLocal8Bit());
}
}
bool V4L2Grabber::reload(bool force)
{
if (_streamNotifier != nullptr && _streamNotifier->isEnabled() && (_reload || force))
{
Info(_log,"Reloading V4L2 Grabber");
uninit();
_pixelFormat = _pixelFormatConfig;
_newDeviceName = _currentDeviceName;
_reload = false;
return start();
}
return false;
}
#if defined(ENABLE_CEC)
void V4L2Grabber::handleCecEvent(CECEvent event)
{
switch (event)
{
case CECEvent::On :
Debug(_log,"CEC on event received");
_cecStandbyActivated = false;
return;
case CECEvent::Off :
Debug(_log,"CEC off event received");
_cecStandbyActivated = true;
return;
default: break;
}
}
#endif
QJsonArray V4L2Grabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
enumVideoCaptureDevices();
QJsonArray inputsDiscovered;
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
{
QJsonObject device, in;
QJsonArray video_inputs, formats;
device["device"] = it.key();
device["device_name"] = _deviceProperties.value(it.key()).name;
device["type"] = "v4l2";
QMultiMap<QString, int> inputs = QMultiMap<QString, int>();
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
if (i.key() == it.key())
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
if (!inputs.contains(y.value().inputName, y.key()))
inputs.insert(y.value().inputName, y.key());
for (auto input = inputs.begin(); input != inputs.end(); input++)
{
in["name"] = input.key();
in["inputIdx"] = input.value();
QJsonArray standards;
QList<VideoStandard> videoStandards = QList<VideoStandard>();
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
if (i.key() == it.key())
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
if (y.key() == input.value())
for (auto std = y.value().standards.begin(); std != y.value().standards.end(); std++)
if(!videoStandards.contains(*std))
videoStandards << *std;
for (auto standard : videoStandards)
standards.append(VideoStandard2String(standard));
if (!standards.isEmpty())
in["standards"] = standards;
QList<PixelFormat> encodingFormats = QList<PixelFormat>();
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
if (i.key() == it.key())
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
if (y.key() == input.value())
for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++)
if (!encodingFormats.contains(enc.key()))
encodingFormats << enc.key();
for (auto encodingFormat : encodingFormats)
{
QJsonObject format;
QJsonArray resolutionArray;
format["format"] = pixelFormatToString(encodingFormat);
QMultiMap<int, int> deviceResolutions = QMultiMap<int, int>();
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
if (i.key() == it.key())
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
if (y.key() == input.value())
for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++)
if (enc.key() == encodingFormat && !deviceResolutions.contains(enc.value().width, enc.value().height))
deviceResolutions.insert(enc.value().width, enc.value().height);
for (auto width_height = deviceResolutions.begin(); width_height != deviceResolutions.end(); width_height++)
{
QJsonObject resolution;
QJsonArray fps;
resolution["width"] = int(width_height.key());
resolution["height"] = int(width_height.value());
QIntList framerates = QIntList();
for(auto i = _deviceProperties.begin(); i != _deviceProperties.end(); ++i)
if (i.key() == it.key())
for (auto y = i.value().inputs.begin(); y != i.value().inputs.end(); y++)
if (y.key() == input.value())
for (auto enc = y.value().encodingFormats.begin(); enc != y.value().encodingFormats.end(); enc++)
if(enc.key() == encodingFormat && enc.value().width == width_height.key() && enc.value().height == width_height.value())
for (auto fps = enc.value().framerates.begin(); fps != enc.value().framerates.end(); fps++)
if(!framerates.contains(*fps))
framerates << *fps;
for (auto framerate : framerates)
fps.append(framerate);
resolution["fps"] = fps;
resolutionArray.append(resolution);
}
format["resolutions"] = resolutionArray;
formats.append(format);
}
in["formats"] = formats;
video_inputs.append(in);
}
device["video_inputs"] = video_inputs;
inputsDiscovered.append(device);
}
_deviceProperties.clear();
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}
void V4L2Grabber::enumFrameIntervals(QList<int> &framerates, int fileDescriptor, int pixelformat, int width, int height)
{
// collect available frame rates
@@ -1349,188 +1588,3 @@ void V4L2Grabber::enumFrameIntervals(QList<int> &framerates, int fileDescriptor,
framerates.append(streamparms.parm.capture.timeperframe.denominator / streamparms.parm.capture.timeperframe.numerator);
}
}
void V4L2Grabber::setSignalDetectionEnable(bool enable)
{
if (_signalDetectionEnabled != enable)
{
_signalDetectionEnabled = enable;
Info(_log, "Signal detection is now %s", enable ? "enabled" : "disabled");
}
}
void V4L2Grabber::setCecDetectionEnable(bool enable)
{
if (_cecDetectionEnabled != enable)
{
_cecDetectionEnabled = enable;
Info(_log, QString("CEC detection is now %1").arg(enable ? "enabled" : "disabled").toLocal8Bit());
}
}
void V4L2Grabber::setPixelDecimation(int pixelDecimation)
{
if (_pixelDecimation != pixelDecimation)
{
_pixelDecimation = pixelDecimation;
_imageResampler.setHorizontalPixelDecimation(pixelDecimation);
_imageResampler.setVerticalPixelDecimation(pixelDecimation);
}
}
void V4L2Grabber::setDeviceVideoStandard(QString device, VideoStandard videoStandard)
{
if (_deviceName != device || _videoStandard != videoStandard)
{
// extract input of device
QChar input = device.at(device.size() - 1);
_input = input.isNumber() ? input.digitValue() : -1;
bool started = _initialized;
uninit();
_deviceName = device;
_videoStandard = videoStandard;
if(started) start();
}
}
bool V4L2Grabber::setInput(int input)
{
if(Grabber::setInput(input))
{
bool started = _initialized;
uninit();
if(started) start();
return true;
}
return false;
}
bool V4L2Grabber::setWidthHeight(int width, int height)
{
if(Grabber::setWidthHeight(width,height))
{
bool started = _initialized;
uninit();
if(started) start();
return true;
}
return false;
}
bool V4L2Grabber::setFramerate(int fps)
{
if(Grabber::setFramerate(fps))
{
bool started = _initialized;
uninit();
if(started) start();
return true;
}
return false;
}
QStringList V4L2Grabber::getDevices() const
{
QStringList result = QStringList();
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
result << it.key();
return result;
}
QString V4L2Grabber::getDeviceName(const QString& devicePath) const
{
return _deviceProperties.value(devicePath).name;
}
QMultiMap<QString, int> V4L2Grabber::getDeviceInputs(const QString& devicePath) const
{
QMultiMap<QString, int> result = QMultiMap<QString, int>();
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
if (it.key() == devicePath)
for (auto input = it.value().inputs.begin(); input != it.value().inputs.end(); input++)
if (!result.contains(input.value().inputName, input.key()))
result.insert(input.value().inputName, input.key());
return result;
}
QList<VideoStandard> V4L2Grabber::getAvailableDeviceStandards(const QString& devicePath, const int& deviceInput) const
{
QList<VideoStandard> result =QList<VideoStandard>();
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
if (it.key() == devicePath)
for (auto input = it.value().inputs.begin(); input != it.value().inputs.end(); input++)
if (input.key() == deviceInput)
for (auto standard = input.value().standards.begin(); standard != input.value().standards.end(); standard++)
if(!result.contains(*standard))
result << *standard;
return result;
}
QStringList V4L2Grabber::getAvailableEncodingFormats(const QString& devicePath, const int& deviceInput) const
{
QStringList result = QStringList();
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
if (it.key() == devicePath)
for (auto input = it.value().inputs.begin(); input != it.value().inputs.end(); input++)
if (input.key() == deviceInput)
for (auto enc = input.value().encodingFormats.begin(); enc != input.value().encodingFormats.end(); enc++)
if (!result.contains(pixelFormatToString(enc.key()).toLower(), Qt::CaseInsensitive))
result << pixelFormatToString(enc.key()).toLower();
return result;
}
QMultiMap<int, int> V4L2Grabber::getAvailableDeviceResolutions(const QString& devicePath, const int& deviceInput, const PixelFormat& encFormat) const
{
QMultiMap<int, int> result = QMultiMap<int, int>();
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
if (it.key() == devicePath)
for (auto input = it.value().inputs.begin(); input != it.value().inputs.end(); input++)
if (input.key() == deviceInput)
for (auto enc = input.value().encodingFormats.begin(); enc != input.value().encodingFormats.end(); enc++)
if (!result.contains(enc.value().width, enc.value().height))
result.insert(enc.value().width, enc.value().height);
return result;
}
QIntList V4L2Grabber::getAvailableDeviceFramerates(const QString& devicePath, const int& deviceInput, const PixelFormat& encFormat, const unsigned width, const unsigned height) const
{
QIntList result = QIntList();
for(auto it = _deviceProperties.begin(); it != _deviceProperties.end(); ++it)
if (it.key() == devicePath)
for (auto input = it.value().inputs.begin(); input != it.value().inputs.end(); input++)
if (input.key() == deviceInput)
for (auto enc = input.value().encodingFormats.begin(); enc != input.value().encodingFormats.end(); enc++)
if(enc.key() == encFormat && enc.value().width == width && enc.value().height == height)
for (auto fps = enc.value().framerates.begin(); fps != enc.value().framerates.end(); fps++)
if(!result.contains(*fps))
result << *fps;
return result;
}
void V4L2Grabber::handleCecEvent(CECEvent event)
{
switch (event)
{
case CECEvent::On :
Debug(_log,"CEC on event received");
_cecStandbyActivated = false;
return;
case CECEvent::Off :
Debug(_log,"CEC off event received");
_cecStandbyActivated = true;
return;
default: break;
}
}

View File

@@ -4,9 +4,15 @@
#include <xcb/randr.h>
#include <xcb/xcb_event.h>
// Constants
namespace {
const bool verbose = false;
} //End of constants
X11Grabber::X11Grabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation)
: Grabber("X11GRABBER", 0, 0, cropLeft, cropRight, cropTop, cropBottom)
, _x11Display(nullptr)
, _xImage(nullptr)
, _pixmap(None)
, _srcFormat(nullptr)
, _dstFormat(nullptr)
@@ -17,8 +23,15 @@ X11Grabber::X11Grabber(int cropLeft, int cropRight, int cropTop, int cropBottom,
, _calculatedHeight(0)
, _src_x(cropLeft)
, _src_y(cropTop)
, _XShmAvailable(false)
, _XRenderAvailable(false)
, _XRandRAvailable(false)
, _isWayland (false)
, _logger{}
, _image(0,0)
{
_logger = Logger::getInstance("X11");
_useImageResampler = false;
_imageResampler.setCropping(0, 0, 0, 0); // cropping is performed by XRender, XShmGetImage or XGetImage
memset(&_pictAttr, 0, sizeof(_pictAttr));
@@ -37,7 +50,10 @@ X11Grabber::~X11Grabber()
void X11Grabber::freeResources()
{
// Cleanup allocated resources of the X11 grab
XDestroyImage(_xImage);
if (_xImage != nullptr)
{
XDestroyImage(_xImage);
}
if (_XRandRAvailable)
{
qApp->removeNativeEventFilter(this);
@@ -100,39 +116,72 @@ void X11Grabber::setupResources()
_imageResampler.setHorizontalPixelDecimation(_pixelDecimation);
_imageResampler.setVerticalPixelDecimation(_pixelDecimation);
}
}
bool X11Grabber::Setup()
bool X11Grabber::open()
{
_x11Display = XOpenDisplay(NULL);
if (_x11Display == nullptr)
bool rc = false;
if (getenv("WAYLAND_DISPLAY") != nullptr)
{
Error(_log, "Unable to open display");
if (getenv("DISPLAY"))
_isWayland = true;
}
else
{
_x11Display = XOpenDisplay(nullptr);
if (_x11Display != nullptr)
{
Error(_log, "%s",getenv("DISPLAY"));
rc = true;
}
}
return rc;
}
bool X11Grabber::setupDisplay()
{
bool result = false;
if ( ! open() )
{
if ( _isWayland )
{
Error(_log, "Grabber does not work under Wayland!");
}
else
{
Error(_log, "DISPLAY environment variable not set");
if (getenv("DISPLAY") != nullptr)
{
Error(_log, "Unable to open display [%s]",getenv("DISPLAY"));
}
else
{
Error(_log, "DISPLAY environment variable not set");
}
}
return false;
}
else
{
_window = DefaultRootWindow(_x11Display);
_window = DefaultRootWindow(_x11Display);
int dummy, pixmaps_supported;
int dummy, pixmaps_supported;
_XRandRAvailable = XRRQueryExtension(_x11Display, &_XRandREventBase, &dummy);
_XRenderAvailable = XRenderQueryExtension(_x11Display, &dummy, &dummy);
_XShmAvailable = XShmQueryExtension(_x11Display);
XShmQueryVersion(_x11Display, &dummy, &dummy, &pixmaps_supported);
_XShmPixmapAvailable = pixmaps_supported && XShmPixmapFormat(_x11Display) == ZPixmap;
_XRandRAvailable = XRRQueryExtension(_x11Display, &_XRandREventBase, &dummy);
_XRenderAvailable = XRenderQueryExtension(_x11Display, &dummy, &dummy);
_XShmAvailable = XShmQueryExtension(_x11Display);
XShmQueryVersion(_x11Display, &dummy, &dummy, &pixmaps_supported);
_XShmPixmapAvailable = pixmaps_supported && XShmPixmapFormat(_x11Display) == ZPixmap;
Info(_log, QString("XRandR=[%1] XRender=[%2] XShm=[%3] XPixmap=[%4]")
.arg(_XRandRAvailable ? "available" : "unavailable")
.arg(_XRenderAvailable ? "available" : "unavailable")
.arg(_XShmAvailable ? "available" : "unavailable")
.arg(_XShmPixmapAvailable ? "available" : "unavailable")
.toStdString().c_str());
bool result = (updateScreenDimensions(true) >=0);
ErrorIf(!result, _log, "X11 Grabber start failed");
setEnabled(result);
result = (updateScreenDimensions(true) >=0);
ErrorIf(!result, _log, "X11 Grabber start failed");
setEnabled(result);
}
return result;
}
@@ -235,7 +284,8 @@ int X11Grabber::updateScreenDimensions(bool force)
_width = _windowAttr.width;
_height = _windowAttr.height;
int width=0, height=0;
int width=0;
int height=0;
// Image scaling is performed by XRender when available, otherwise by ImageResampler
if (_XRenderAvailable)
@@ -301,19 +351,24 @@ void X11Grabber::setVideoMode(VideoMode mode)
updateScreenDimensions(true);
}
void X11Grabber::setPixelDecimation(int pixelDecimation)
bool X11Grabber::setPixelDecimation(int pixelDecimation)
{
if(_pixelDecimation != pixelDecimation)
if(Grabber::setPixelDecimation(pixelDecimation))
{
_pixelDecimation = pixelDecimation;
updateScreenDimensions(true);
return true;
}
return false;
}
void X11Grabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
{
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
if(_x11Display != nullptr) updateScreenDimensions(true); // segfault on init
if(_x11Display != nullptr)
{
updateScreenDimensions(true); // segfault on init
}
}
bool X11Grabber::nativeEventFilter(const QByteArray & eventType, void * message, long int * /*result*/)
@@ -332,3 +387,78 @@ bool X11Grabber::nativeEventFilter(const QByteArray & eventType, void * message,
return false;
}
QJsonObject X11Grabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
if ( open() )
{
inputsDiscovered["device"] = "x11";
inputsDiscovered["device_name"] = "X11";
inputsDiscovered["type"] = "screen";
QJsonArray video_inputs;
if (_x11Display != nullptr)
{
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
// Iterate through all X screens
for (int i = 0; i < XScreenCount(_x11Display); ++i)
{
_window = DefaultRootWindow(_x11Display);
const Status status = XGetWindowAttributes(_x11Display, _window, &_windowAttr);
if (status == 0)
{
Debug(_log, "Failed to obtain window attributes");
}
else
{
QJsonObject in;
QString displayName;
char* name;
if ( XFetchName(_x11Display, _window, &name) > 0 )
{
displayName = name;
}
else {
displayName = QString("Display:%1").arg(i);
}
in["name"] = displayName;
in["inputIdx"] = i;
QJsonArray formats;
QJsonArray resolutionArray;
QJsonObject format;
QJsonObject resolution;
resolution["width"] = _windowAttr.width;
resolution["height"] = _windowAttr.height;
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
}
if ( !video_inputs.isEmpty() )
{
inputsDiscovered["video_inputs"] = video_inputs;
}
}
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -19,7 +19,7 @@ void X11Wrapper::action()
if (! _init )
{
_init = true;
if ( ! _grabber.Setup() )
if ( ! _grabber.setupDisplay() )
{
stop();
}

View File

@@ -22,7 +22,7 @@ void check_error(xcb_generic_error_t * error)
// Requests with void response type
template<class Request, class ...Args>
typename std::enable_if<std::is_same<typename Request::ResponseType, xcb_void_cookie_t>::value, void>::type
query(xcb_connection_t * connection, Args&& ...args)
static query(xcb_connection_t * connection, Args&& ...args)
{
auto cookie = Request::RequestFunction(connection, std::forward<Args>(args)...);
@@ -33,9 +33,8 @@ template<class Request, class ...Args>
// Requests with non-void response type
template<class Request, class ...Args>
typename std::enable_if<!std::is_same<typename Request::ResponseType, xcb_void_cookie_t>::value,
std::unique_ptr<typename Request::ResponseType, decltype(&free)>>::type
query(xcb_connection_t * connection, Args&& ...args)
typename std::enable_if<!std::is_same<typename Request::ResponseType, xcb_void_cookie_t>::value, std::unique_ptr<typename Request::ResponseType, decltype(&free)>>::type
static query(xcb_connection_t * connection, Args&& ...args)
{
auto cookie = Request::RequestFunction(connection, std::forward<Args>(args)...);

View File

@@ -21,6 +21,14 @@ struct GetGeometry
static constexpr auto ReplyFunction = xcb_get_geometry_reply;
};
struct GetProperty
{
typedef xcb_get_property_reply_t ResponseType;
static constexpr auto RequestFunction = xcb_get_property;
static constexpr auto ReplyFunction = xcb_get_property_reply;
};
struct ShmQueryVersion
{
typedef xcb_shm_query_version_reply_t ResponseType;

View File

@@ -14,6 +14,11 @@
#include <memory>
// Constants
namespace {
const bool verbose = false;
} //End of constants
#define DOUBLE_TO_FIXED(d) ((xcb_render_fixed_t) ((d) * 65536))
XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom, int pixelDecimation)
@@ -36,6 +41,7 @@ XcbGrabber::XcbGrabber(int cropLeft, int cropRight, int cropTop, int cropBottom,
, _XcbRandRAvailable{}
, _XcbShmAvailable{}
, _XcbShmPixmapAvailable{}
, _isWayland (false)
, _logger{}
, _shmData{}
, _XcbRandREventBase{-1}
@@ -181,48 +187,77 @@ void XcbGrabber::setupShm()
}
}
bool XcbGrabber::Setup()
bool XcbGrabber::open()
{
int screen_num;
_connection = xcb_connect(nullptr, &screen_num);
bool rc = false;
int ret = xcb_connection_has_error(_connection);
if (ret != 0)
if (getenv("WAYLAND_DISPLAY") != nullptr)
{
Error(_logger, "Cannot open display, error %d", ret);
return false;
_isWayland = true;
}
const xcb_setup_t * setup = xcb_get_setup(_connection);
_screen = getScreen(setup, screen_num);
if (!_screen)
else
{
Error(_log, "Unable to open display, screen %d does not exist", screen_num);
_connection = xcb_connect(nullptr, &_screen_num);
if (getenv("DISPLAY"))
Error(_log, "%s", getenv("DISPLAY"));
int ret = xcb_connection_has_error(_connection);
if (ret != 0)
{
Debug(_logger, "Cannot open display, error %d", ret);
}
else
Error(_log, "DISPLAY environment variable not set");
freeResources();
return false;
{
const xcb_setup_t * setup = xcb_get_setup(_connection);
_screen = getScreen(setup, _screen_num);
if ( _screen != nullptr)
{
rc = true;
}
}
}
setupRandr();
setupRender();
setupShm();
return rc;
}
Info(_log, QString("XcbRandR=[%1] XcbRender=[%2] XcbShm=[%3] XcbPixmap=[%4]")
.arg(_XcbRandRAvailable ? "available" : "unavailable")
.arg(_XcbRenderAvailable ? "available" : "unavailable")
.arg(_XcbShmAvailable ? "available" : "unavailable")
.arg(_XcbShmPixmapAvailable ? "available" : "unavailable")
.toStdString().c_str());
bool XcbGrabber::setupDisplay()
{
bool result = false;
bool result = (updateScreenDimensions(true) >= 0);
ErrorIf(!result, _log, "XCB Grabber start failed");
setEnabled(result);
if ( ! open() )
{
if ( _isWayland )
{
Error(_log, "Grabber does not work under Wayland!");
}
else
{
if (getenv("DISPLAY") != nullptr)
{
Error(_log, "Unable to open display [%s], screen %d does not exist", getenv("DISPLAY"), _screen_num);
}
else
{
Error(_log, "DISPLAY environment variable not set");
}
freeResources();
}
}
else
{
setupRandr();
setupRender();
setupShm();
Info(_log, QString("XcbRandR=[%1] XcbRender=[%2] XcbShm=[%3] XcbPixmap=[%4]")
.arg(_XcbRandRAvailable ? "available" : "unavailable")
.arg(_XcbRenderAvailable ? "available" : "unavailable")
.arg(_XcbShmAvailable ? "available" : "unavailable")
.arg(_XcbShmPixmapAvailable ? "available" : "unavailable")
.toStdString().c_str());
result = (updateScreenDimensions(true) >= 0);
ErrorIf(!result, _log, "XCB Grabber start failed");
setEnabled(result);
}
return result;
}
@@ -394,13 +429,15 @@ void XcbGrabber::setVideoMode(VideoMode mode)
updateScreenDimensions(true);
}
void XcbGrabber::setPixelDecimation(int pixelDecimation)
bool XcbGrabber::setPixelDecimation(int pixelDecimation)
{
if(_pixelDecimation != pixelDecimation)
if(Grabber::setPixelDecimation(pixelDecimation))
{
_pixelDecimation = pixelDecimation;
updateScreenDimensions(true);
return true;
}
return false;
}
void XcbGrabber::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
@@ -459,3 +496,89 @@ xcb_render_pictformat_t XcbGrabber::findFormatForVisual(xcb_visualid_t visual) c
}
return {};
}
QJsonObject XcbGrabber::discover(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject inputsDiscovered;
if ( open() )
{
inputsDiscovered["device"] = "xcb";
inputsDiscovered["device_name"] = "XCB";
inputsDiscovered["type"] = "screen";
QJsonArray video_inputs;
if (_connection != nullptr && _screen != nullptr )
{
QJsonArray fps = { 1, 5, 10, 15, 20, 25, 30, 40, 50, 60 };
const xcb_setup_t * setup = xcb_get_setup(_connection);
xcb_screen_iterator_t it = xcb_setup_roots_iterator(setup);
xcb_screen_t * screen = nullptr;
int i = 0;
// Iterate through all X screens
for (; it.rem > 0; xcb_screen_next(&it))
{
screen = it.data;
auto geometry = query<GetGeometry>(_connection, screen->root);
if (geometry == nullptr)
{
Debug(_log, "Failed to obtain screen geometry for screen [%d]", i);
}
else
{
QJsonObject in;
QString displayName;
auto property = query<GetProperty>(_connection, 0, screen->root, XCB_ATOM_WM_NAME, XCB_ATOM_STRING, 0, 0);
if ( property != nullptr )
{
if ( xcb_get_property_value_length(property.get()) > 0 )
{
displayName = (char *) xcb_get_property_value(property.get());
}
}
if (displayName.isEmpty())
{
displayName = QString("Display:%1").arg(i);
}
in["name"] = displayName;
in["inputIdx"] = i;
QJsonArray formats;
QJsonArray resolutionArray;
QJsonObject format;
QJsonObject resolution;
resolution["width"] = geometry->width;
resolution["height"] = geometry->height;
resolution["fps"] = fps;
resolutionArray.append(resolution);
format["resolutions"] = resolutionArray;
formats.append(format);
in["formats"] = formats;
video_inputs.append(in);
}
++i;
}
if ( !video_inputs.isEmpty() )
{
inputsDiscovered["video_inputs"] = video_inputs;
}
}
}
DebugIf(verbose, _log, "device: [%s]", QString(QJsonDocument(inputsDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return inputsDiscovered;
}

View File

@@ -19,7 +19,7 @@ void XcbWrapper::action()
if (! _init )
{
_init = true;
if ( ! _grabber.Setup() )
if ( ! _grabber.setupDisplay() )
{
stop();
}

View File

@@ -1,22 +1,25 @@
#include <hyperion/Grabber.h>
Grabber::Grabber(const QString& grabberName, int width, int height, int cropLeft, int cropRight, int cropTop, int cropBottom)
: _imageResampler()
: _grabberName(grabberName)
, _imageResampler()
, _useImageResampler(true)
, _videoMode(VideoMode::VIDEO_2D)
, _videoStandard(VideoStandard::NO_CHANGE)
, _pixelDecimation(8)
, _flipMode(FlipMode::NO_CHANGE)
, _width(width)
, _height(height)
, _fps(15)
, _fpsSoftwareDecimation(0)
, _input(-1)
, _cropLeft(0)
, _cropRight(0)
, _cropTop(0)
, _cropBottom(0)
, _enabled(true)
, _log(Logger::getInstance(grabberName.toUpper()))
, _log(Logger::getInstance(_grabberName.toUpper()))
{
Grabber::setVideoMode(VideoMode::VIDEO_2D);
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
}
@@ -36,6 +39,27 @@ void Grabber::setVideoMode(VideoMode mode)
}
}
void Grabber::setVideoStandard(VideoStandard videoStandard)
{
if (_videoStandard != videoStandard)
_videoStandard = videoStandard;
}
bool Grabber::setPixelDecimation(int pixelDecimation)
{
if (_pixelDecimation != pixelDecimation)
{
Debug(_log,"Set image size decimation to %d", pixelDecimation);
_pixelDecimation = pixelDecimation;
_imageResampler.setHorizontalPixelDecimation(pixelDecimation);
_imageResampler.setVerticalPixelDecimation(pixelDecimation);
return true;
}
return false;
}
void Grabber::setFlipMode(FlipMode mode)
{
Debug(_log,"Set flipmode to %s", QSTRING_CSTR(flipModeToString(mode)));
@@ -111,9 +135,20 @@ bool Grabber::setFramerate(int fps)
{
if((fps > 0) && (_fps != fps))
{
Debug(_log,"Set new frames per second to: %i fps", fps);
_fps = fps;
return true;
}
return false;
}
void Grabber::setFpsSoftwareDecimation(int decimation)
{
if((_fpsSoftwareDecimation != decimation))
{
_fpsSoftwareDecimation = decimation;
if(decimation > 0)
Debug(_log,"Skip %i frame per second", decimation);
}
}

View File

@@ -44,14 +44,19 @@ GrabberWrapper::~GrabberWrapper()
bool GrabberWrapper::start()
{
if (!_timer->isActive())
bool rc = false;
if ( open() )
{
// Start the timer with the pre configured interval
Debug(_log,"Grabber start()");
_timer->start();
}
if (!_timer->isActive())
{
// Start the timer with the pre configured interval
Debug(_log,"Grabber start()");
_timer->start();
}
return _timer->isActive();
rc = _timer->isActive();
}
return rc;
}
void GrabberWrapper::stop()
@@ -129,11 +134,16 @@ void GrabberWrapper::setVideoMode(VideoMode mode)
{
if (_ggrabber != nullptr)
{
Info(_log,"setvideomode");
Info(_log,"setVideoMode");
_ggrabber->setVideoMode(mode);
}
}
void GrabberWrapper::setFlipMode(QString flipMode)
{
_ggrabber->setFlipMode(parseFlipMode(flipMode));
}
void GrabberWrapper::setCropping(unsigned cropLeft, unsigned cropRight, unsigned cropTop, unsigned cropBottom)
{
_ggrabber->setCropping(cropLeft, cropRight, cropTop, cropBottom);
@@ -165,7 +175,7 @@ void GrabberWrapper::handleSettingsUpdate(settings::type type, const QJsonDocume
_ggrabber->setWidthHeight(obj["width"].toInt(96), obj["height"].toInt(96));
// display index for MAC
_ggrabber->setDisplayIndex(obj["display"].toInt(0));
_ggrabber->setDisplayIndex(obj["input"].toInt(0));
// device path for Framebuffer
_ggrabber->setDevicePath(obj["device"].toString("/dev/fb0"));
@@ -181,7 +191,7 @@ void GrabberWrapper::handleSettingsUpdate(settings::type type, const QJsonDocume
obj["cropBottom"].toInt(0));
// eval new update time
updateTimer(1000/obj["frequency_Hz"].toInt(10));
updateTimer(1000/obj["fps"].toInt(10));
}
}
@@ -221,59 +231,3 @@ void GrabberWrapper::tryStart()
start();
}
}
QStringList GrabberWrapper::getDevices() const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getDevices();
return QStringList();
}
QString GrabberWrapper::getDeviceName(const QString& devicePath) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getDeviceName(devicePath);
return QString();
}
QMultiMap<QString, int> GrabberWrapper::getDeviceInputs(const QString& devicePath) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getDeviceInputs(devicePath);
return QMultiMap<QString, int>();
}
QList<VideoStandard> GrabberWrapper::getAvailableDeviceStandards(const QString& devicePath, const int& deviceInput) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getAvailableDeviceStandards(devicePath, deviceInput);
return QList<VideoStandard>();
}
QStringList GrabberWrapper::getAvailableEncodingFormats(const QString& devicePath, const int& deviceInput) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getAvailableEncodingFormats(devicePath, deviceInput);
return QStringList();
}
QMultiMap<int, int> GrabberWrapper::getAvailableDeviceResolutions(const QString& devicePath, const int& deviceInput, const PixelFormat& encFormat) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getAvailableDeviceResolutions(devicePath, deviceInput, encFormat);
return QMultiMap<int, int>();
}
QIntList GrabberWrapper::getAvailableDeviceFramerates(const QString& devicePath, const int& deviceInput, const PixelFormat& encFormat, const unsigned width, const unsigned height) const
{
if(_grabberName.startsWith("V4L"))
return _ggrabber->getAvailableDeviceFramerates(devicePath, deviceInput, encFormat, width, height);
return QIntList();
}

View File

@@ -5,7 +5,6 @@
// qt incl
#include <QDateTime>
#include <QTimer>
#include <QDebug>
// Hyperion includes
#include <hyperion/PriorityMuxer.h>
@@ -13,6 +12,8 @@
// utils
#include <utils/Logger.h>
const int PriorityMuxer::FG_PRIORITY = 1;
const int PriorityMuxer::BG_PRIORITY = 254;
const int PriorityMuxer::LOWEST_PRIORITY = std::numeric_limits<uint8_t>::max();
PriorityMuxer::PriorityMuxer(int ledCount, QObject * parent)
@@ -322,7 +323,7 @@ void PriorityMuxer::setCurrentTime()
newPriority = qMin(newPriority, infoIt->priority);
// call timeTrigger when effect or color is running with timeout > 0, blacklist prio 255
if(infoIt->priority < 254 && infoIt->timeoutTime_ms > 0 && (infoIt->componentId == hyperion::COMP_EFFECT || infoIt->componentId == hyperion::COMP_COLOR || infoIt->componentId == hyperion::COMP_IMAGE))
if(infoIt->priority < BG_PRIORITY && infoIt->timeoutTime_ms > 0 && (infoIt->componentId == hyperion::COMP_EFFECT || infoIt->componentId == hyperion::COMP_COLOR || infoIt->componentId == hyperion::COMP_IMAGE))
emit signalTimeTrigger(); // as signal to prevent Threading issues
++infoIt;

View File

@@ -16,7 +16,6 @@
"title" : "edt_dev_general_hardwareLedCount_title",
"minimum" : 1,
"default" : 1,
"access" : "expert",
"propertyOrder" : 2
},
"colorOrder" :
@@ -25,9 +24,11 @@
"title" : "edt_dev_general_colorOrder_title",
"enum" : ["rgb", "bgr", "rbg", "brg", "gbr", "grb"],
"default" : "rgb",
"options" : {
"enum_titles" : ["edt_conf_enum_rgb", "edt_conf_enum_bgr", "edt_conf_enum_rbg", "edt_conf_enum_brg", "edt_conf_enum_gbr", "edt_conf_enum_grb"]
"required" : true,
"options": {
"enum_titles": [ "edt_conf_enum_rgb", "edt_conf_enum_bgr", "edt_conf_enum_rbg", "edt_conf_enum_brg", "edt_conf_enum_gbr", "edt_conf_enum_grb" ]
},
"access" : "expert",
"propertyOrder" : 3
}
},

View File

@@ -2,79 +2,87 @@
"type" : "object",
"title" : "edt_conf_fg_heading_title",
"properties": {
"type": {
"available_devices": {
"type": "string",
"title": "edt_conf_fg_type_title",
"enum": [ "auto", "amlogic", "dispmanx", "dx", "framebuffer", "osx", "qt", "x11", "xcb" ],
"options": {
"enum_titles": [ "edt_conf_enum_automatic", "AMLogic", "DispmanX", "DirectX9", "Framebuffer", "OSX", "QT", "X11", "XCB" ]
},
"default": "auto",
"propertyOrder": 1
"title": "edt_conf_v4l2_device_title",
"propertyOrder": 1,
"required": false
},
"display": {
"device": {
"type": "string",
"title": "edt_conf_enum_custom",
"options": {
"hidden": true
},
"required": true,
"comment": "The 'available_devices' settings are dynamically inserted into the WebUI under PropertyOrder '1'.",
"propertyOrder": 2
},
"device_inputs": {
"type": "string",
"title": "edt_conf_v4l2_input_title",
"propertyOrder": 3,
"required": false
},
"input": {
"type": "integer",
"title": "edt_conf_fg_display_title",
"title": "edt_conf_enum_custom",
"minimum": 0,
"default": 0,
"propertyOrder": 2
"options": {
"hidden": true
},
"required": true,
"propertyOrder": 4,
"comment": "The 'device_inputs' settings are dynamically inserted into the WebUI under PropertyOrder '3'."
},
"resolutions": {
"type": "string",
"title": "edt_conf_v4l2_resolution_title",
"propertyOrder": 5,
"required": false
},
"width": {
"type": "integer",
"title": "edt_conf_fg_width_title",
"title": "edt_conf_enum_custom",
"minimum": 10,
"default": 80,
"append": "edt_append_pixel",
"propertyOrder": 3
"options": {
"hidden": true
},
"required": true,
"propertyOrder": 8,
"comment": "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '5'."
},
"height": {
"type": "integer",
"title": "edt_conf_fg_height_title",
"minimum": 10,
"default": 45,
"title": "edt_conf_enum_custom",
"append": "edt_append_pixel",
"propertyOrder": 4
"options": {
"hidden": true
},
"required": true,
"propertyOrder": 9,
"comment": "The 'resolutions' settings are dynamically inserted into the WebUI under PropertyOrder '5'."
},
"frequency_Hz": {
"type": "integer",
"framerates": {
"type": "string",
"title": "edt_conf_fg_frequency_Hz_title",
"propertyOrder": 10,
"required": false
},
"fps": {
"type": "integer",
"title": "edt_conf_enum_custom",
"minimum": 1,
"default": 10,
"append": "edt_append_hz",
"propertyOrder": 5
},
"cropLeft": {
"type": "integer",
"title": "edt_conf_v4l2_cropLeft_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 6
},
"cropRight": {
"type": "integer",
"title": "edt_conf_v4l2_cropRight_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 7
},
"cropTop": {
"type": "integer",
"title": "edt_conf_v4l2_cropTop_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 8
},
"cropBottom": {
"type": "integer",
"title": "edt_conf_v4l2_cropBottom_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 9
"append": "fps",
"options": {
"hidden": true
},
"required": true,
"propertyOrder": 11,
"comment": "The 'framerates' setting is dynamically inserted into the WebUI under PropertyOrder '10'."
},
"pixelDecimation": {
"type": "integer",
@@ -82,9 +90,41 @@
"minimum": 1,
"maximum": 30,
"default": 8,
"propertyOrder": 10
"required": true,
"propertyOrder": 12
},
"cropLeft": {
"type": "integer",
"title": "edt_conf_v4l2_cropLeft_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 13
},
"cropRight": {
"type": "integer",
"title": "edt_conf_v4l2_cropRight_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 14
},
"cropTop": {
"type": "integer",
"title": "edt_conf_v4l2_cropTop_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 15
},
"cropBottom": {
"type": "integer",
"title": "edt_conf_v4l2_cropBottom_title",
"minimum": 0,
"default": 0,
"append": "edt_append_pixel",
"propertyOrder": 16
}
},
"additionalProperties" : false
}

View File

@@ -265,12 +265,13 @@ bool LedDevice::switchOn()
{
if ( _isEnabled &&_isDeviceInitialised )
{
storeState();
if ( powerOn() )
if ( storeState() )
{
_isOn = true;
rc = true;
if ( powerOn() )
{
_isOn = true;
rc = true;
}
}
}
}

View File

@@ -13,6 +13,8 @@
// Constants
namespace {
const bool verbose = false;
const bool verbose3 = false;
const QString MULTICAST_GROUP_DEFAULT_ADDRESS = "239.255.255.250";
const quint16 MULTICAST_GROUP_DEFAULT_PORT = 49692;
@@ -272,13 +274,13 @@ void LedDeviceAtmoOrb::setColor(int orbId, const ColorRgb &color, int commandTyp
void LedDeviceAtmoOrb::sendCommand(const QByteArray &bytes)
{
//Debug ( _log, "command: [%s] -> %s:%u", QSTRING_CSTR( QString(bytes.toHex())), QSTRING_CSTR(_groupAddress.toString()), _multiCastGroupPort );
DebugIf(verbose3, _log, "command: [%s] -> %s:%u", QSTRING_CSTR( QString(bytes.toHex())), QSTRING_CSTR(_groupAddress.toString()), _multiCastGroupPort );
_udpSocket->writeDatagram(bytes.data(), bytes.size(), _groupAddress, _multiCastGroupPort);
}
QJsonObject LedDeviceAtmoOrb::discover(const QJsonObject& params)
{
//Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType );
@@ -353,14 +355,14 @@ QJsonObject LedDeviceAtmoOrb::discover(const QJsonObject& params)
}
devicesDiscovered.insert("devices", deviceList);
Debug(_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose, _log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
return devicesDiscovered;
}
void LedDeviceAtmoOrb::identify(const QJsonObject& params)
{
//Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
int orbId = 0;
if ( params["id"].isString() )

View File

@@ -1,50 +1,50 @@
#include "LedDeviceCololight.h"
#include <utils/QStringUtils.h>
#include <utils/WaitTime.h>
#include <QUdpSocket>
#include <QHostInfo>
#include <QtEndian>
#include <QEventLoop>
#include <chrono>
// Constants
namespace {
const bool verbose = false;
const bool verbose3 = false;
const bool verbose = false;
const bool verbose3 = false;
// Configuration settings
// Configuration settings
const char CONFIG_HW_LED_COUNT[] = "hardwareLedCount";
const char CONFIG_HW_LED_COUNT[] = "hardwareLedCount";
const int COLOLIGHT_BEADS_PER_MODULE = 19;
const int COLOLIGHT_BEADS_PER_MODULE = 19;
// Cololight discovery service
// Cololight discovery service
const int API_DEFAULT_PORT = 8900;
const int API_DEFAULT_PORT = 8900;
const char DISCOVERY_ADDRESS[] = "255.255.255.255";
const quint16 DISCOVERY_PORT = 12345;
const char DISCOVERY_MESSAGE[] = "Z-SEARCH * \r\n";
constexpr std::chrono::milliseconds DEFAULT_DISCOVERY_TIMEOUT{ 2000 };
constexpr std::chrono::milliseconds DEFAULT_READ_TIMEOUT{ 1000 };
constexpr std::chrono::milliseconds DEFAULT_IDENTIFY_TIME{ 2000 };
const char DISCOVERY_ADDRESS[] = "255.255.255.255";
const quint16 DISCOVERY_PORT = 12345;
const char DISCOVERY_MESSAGE[] = "Z-SEARCH * \r\n";
constexpr std::chrono::milliseconds DEFAULT_DISCOVERY_TIMEOUT{ 2000 };
constexpr std::chrono::milliseconds DEFAULT_READ_TIMEOUT{ 1000 };
constexpr std::chrono::milliseconds DEFAULT_IDENTIFY_TIME{ 2000 };
const char COLOLIGHT_MODEL[] = "mod";
const char COLOLIGHT_MODEL_TYPE[] = "subkey";
const char COLOLIGHT_MAC[] = "sn";
const char COLOLIGHT_NAME[] = "name";
const char COLOLIGHT_MODEL[] = "mod";
const char COLOLIGHT_MODEL_TYPE[] = "subkey";
const char COLOLIGHT_MAC[] = "sn";
const char COLOLIGHT_NAME[] = "name";
const char COLOLIGHT_MODEL_IDENTIFIER[] = "OD_WE_QUAN";
const char COLOLIGHT_MODEL_IDENTIFIER[] = "OD_WE_QUAN";
} //End of constants
LedDeviceCololight::LedDeviceCololight(const QJsonObject& deviceConfig)
: ProviderUdp(deviceConfig)
, _modelType(-1)
, _ledLayoutType(-1)
, _ledBeadCount(0)
, _distance(0)
, _sequenceNumber(1)
, _modelType(-1)
, _ledLayoutType(-1)
, _ledBeadCount(0)
, _distance(0)
, _sequenceNumber(1)
{
_packetFixPart.append(reinterpret_cast<const char*>(PACKET_HEADER), sizeof(PACKET_HEADER));
_packetFixPart.append(reinterpret_cast<const char*>(PACKET_SECU), sizeof(PACKET_SECU));
@@ -186,7 +186,7 @@ bool LedDeviceCololight::getInfo()
QByteArray response;
if (readResponse(response))
{
DebugIf(verbose, _log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
DebugIf(verbose,_log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
quint16 ledNum = qFromBigEndian<quint16>(response.data() + 1);
@@ -267,7 +267,7 @@ bool LedDeviceCololight::setColor(const uint32_t color)
QByteArray response;
if (readResponse(response))
{
DebugIf(verbose, _log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
DebugIf(verbose,_log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
isCmdOK = true;
}
}
@@ -303,7 +303,7 @@ bool LedDeviceCololight::setState(bool isOn)
QByteArray response;
if (readResponse(response))
{
DebugIf(verbose, _log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
DebugIf(verbose,_log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
isCmdOK = true;
}
}
@@ -327,7 +327,7 @@ bool LedDeviceCololight::setStateDirect(bool isOn)
QByteArray response;
if (readResponse(response))
{
DebugIf(verbose, _log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
DebugIf(verbose,_log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
isCmdOK = true;
}
}
@@ -381,7 +381,7 @@ bool LedDeviceCololight::setTL1CommandMode(bool isOn)
QByteArray response;
if (readResponse(response))
{
DebugIf(verbose, _log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
DebugIf(verbose,_log, "#[0x%x], Data returned: [%s]", _sequenceNumber, QSTRING_CSTR(toHex(response)));
isCmdOK = true;
}
}
@@ -498,7 +498,7 @@ bool LedDeviceCololight::readResponse(QByteArray& response)
}
else
{
DebugIf(verbose, _log, "No additional data returned");
DebugIf(verbose,_log, "No additional data returned");
}
}
isRequestOK = true;
@@ -605,7 +605,7 @@ QJsonArray LedDeviceCololight::discover()
{
QJsonObject obj;
QString ipAddress = i.key();
const QString& ipAddress = i.key();
obj.insert("ip", ipAddress);
obj.insert("model", i.value().value(COLOLIGHT_MODEL));
obj.insert("type", i.value().value(COLOLIGHT_MODEL_TYPE));
@@ -661,26 +661,27 @@ QJsonObject LedDeviceCololight::discover(const QJsonObject& /*params*/)
devicesDiscovered.insert("discoveryMethod", discoveryMethod);
devicesDiscovered.insert("devices", deviceList);
//Debug(_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return devicesDiscovered;
}
QJsonObject LedDeviceCololight::getProperties(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject properties;
QString apiHostname = params["host"].toString("");
QString hostName = params["host"].toString("");
quint16 apiPort = static_cast<quint16>(params["port"].toInt(API_DEFAULT_PORT));
QJsonObject propertiesDetails;
if (!apiHostname.isEmpty())
if (!hostName.isEmpty())
{
QJsonObject deviceConfig;
deviceConfig.insert("host", apiHostname);
deviceConfig.insert("host", hostName);
deviceConfig.insert("port", apiPort);
if (ProviderUdp::init(deviceConfig))
{
if (getInfo())
@@ -708,23 +709,23 @@ QJsonObject LedDeviceCololight::getProperties(const QJsonObject& params)
properties.insert("properties", propertiesDetails);
DebugIf(verbose, _log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
return properties;
}
void LedDeviceCololight::identify(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QString apiHostname = params["host"].toString("");
QString hostName = params["host"].toString("");
quint16 apiPort = static_cast<quint16>(params["port"].toInt(API_DEFAULT_PORT));
if (!apiHostname.isEmpty())
if (!hostName.isEmpty())
{
QJsonObject deviceConfig;
deviceConfig.insert("host", apiHostname);
deviceConfig.insert("host", hostName);
deviceConfig.insert("port", apiPort);
if (ProviderUdp::init(deviceConfig))
{
@@ -732,9 +733,7 @@ void LedDeviceCololight::identify(const QJsonObject& params)
{
setEffect(THE_CIRCUS);
QEventLoop loop;
QTimer::singleShot(DEFAULT_IDENTIFY_TIME.count(), &loop, &QEventLoop::quit);
loop.exec();
wait(DEFAULT_IDENTIFY_TIME);
setColor(ColorRgb::BLACK);
}

View File

@@ -5,7 +5,6 @@
#include <utils/QStringUtils.h>
// Qt includes
#include <QEventLoop>
#include <QNetworkReply>
#include <QtEndian>
@@ -78,12 +77,16 @@ const char SSDP_LIGHTPANELS[] = "nanoleaf_aurora:light";
// Nanoleaf Panel Shapetypes
enum SHAPETYPES {
TRIANGLE,
RHYTM,
SQUARE,
CONTROL_SQUARE_PRIMARY,
CONTROL_SQUARE_PASSIVE,
POWER_SUPPLY,
TRIANGLE = 0,
RHYTM = 1,
SQUARE = 2,
CONTROL_SQUARE_PRIMARY = 3,
CONTROL_SQUARE_PASSIVE = 4,
POWER_SUPPLY= 5,
HEXAGON_SHAPES = 7,
TRIANGE_SHAPES = 8,
MINI_TRIANGE_SHAPES = 8,
SHAPES_CONTROLLER = 12
};
// Nanoleaf external control versions
@@ -100,8 +103,8 @@ LedDeviceNanoleaf::LedDeviceNanoleaf(const QJsonObject& deviceConfig)
, _leftRight(true)
, _startPos(0)
, _endPos(0)
, _extControlVersion(EXTCTRLVER_V2),
_panelLedCount(0)
, _extControlVersion(EXTCTRLVER_V2)
, _panelLedCount(0)
{
}
@@ -127,7 +130,7 @@ bool LedDeviceNanoleaf::init(const QJsonObject& deviceConfig)
Info(_log, "Device Nanoleaf does not require rewrites. Refresh time is ignored.");
}
DebugIf(verbose, _log, "deviceConfig: [%s]", QString(QJsonDocument(_devConfig).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "deviceConfig: [%s]", QString(QJsonDocument(_devConfig).toJson(QJsonDocument::Compact)).toUtf8().constData());
bool isInitOK = false;
@@ -164,29 +167,29 @@ bool LedDeviceNanoleaf::init(const QJsonObject& deviceConfig)
// TODO: Allow to handle port dynamically
//Set hostname as per configuration and_defaultHost default port
_hostname = deviceConfig[CONFIG_ADDRESS].toString();
_hostName = deviceConfig[CONFIG_ADDRESS].toString();
_apiPort = API_DEFAULT_PORT;
_authToken = deviceConfig[CONFIG_AUTH_TOKEN].toString();
//If host not configured the init failed
if (_hostname.isEmpty())
if (_hostName.isEmpty())
{
this->setInError("No target hostname nor IP defined");
isInitOK = false;
}
else
{
if (initRestAPI(_hostname, _apiPort, _authToken))
if (initRestAPI(_hostName, _apiPort, _authToken))
{
// Read LedDevice configuration and validate against device configuration
if (initLedsConfiguration())
{
// Set UDP streaming host and port
_devConfig["host"] = _hostname;
_devConfig["host"] = _hostName;
_devConfig["port"] = STREAM_CONTROL_DEFAULT_PORT;
isInitOK = ProviderUdp::init(_devConfig);
Debug(_log, "Hostname/IP : %s", QSTRING_CSTR(_hostname));
Debug(_log, "Hostname/IP : %s", QSTRING_CSTR(_hostName));
Debug(_log, "Port : %d", _port);
}
}
@@ -206,7 +209,8 @@ bool LedDeviceNanoleaf::initLedsConfiguration()
httpResponse response = _restApi->get();
if (response.error())
{
this->setInError(response.getErrorReason());
QString errorReason = QString("Getting device details failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
isInitOK = false;
}
else
@@ -243,16 +247,16 @@ bool LedDeviceNanoleaf::initLedsConfiguration()
int panelshapeType = panelObj[PANEL_SHAPE_TYPE].toInt();
//int panelOrientation = panelObj[PANEL_ORIENTATION].toInt();
DebugIf(verbose, _log, "Panel [%d] (%d,%d) - Type: [%d]", panelId, panelX, panelY, panelshapeType);
DebugIf(verbose,_log, "Panel [%d] (%d,%d) - Type: [%d]", panelId, panelX, panelY, panelshapeType);
// Skip Rhythm panels
if (panelshapeType != RHYTM)
// Skip Rhythm and Shapes controller panels
if (panelshapeType != RHYTM && panelshapeType != SHAPES_CONTROLLER)
{
panelMap[panelY][panelX] = panelId;
}
else
{ // Reset non support/required features
Info(_log, "Rhythm panel skipped.");
Info(_log, "Rhythm/Shape Controller panel skipped.");
}
}
@@ -360,32 +364,31 @@ int LedDeviceNanoleaf::open()
int retval = -1;
_isDeviceReady = false;
QJsonDocument responseDoc = changeToExternalControlMode();
// Resolve port for Light Panels
QJsonObject jsonStreamControllInfo = responseDoc.object();
if (!jsonStreamControllInfo.isEmpty())
QJsonDocument responseDoc;
if (changeToExternalControlMode(responseDoc))
{
//Set default streaming port
_port = static_cast<uchar>(jsonStreamControllInfo[STREAM_CONTROL_PORT].toInt());
}
// Resolve port for Light Panels
QJsonObject jsonStreamControllInfo = responseDoc.object();
if (!jsonStreamControllInfo.isEmpty())
{
//Set default streaming port
_port = static_cast<uchar>(jsonStreamControllInfo[STREAM_CONTROL_PORT].toInt());
}
if (ProviderUdp::open() == 0)
{
// Everything is OK, device is ready
_isDeviceReady = true;
retval = 0;
if (ProviderUdp::open() == 0)
{
// Everything is OK, device is ready
_isDeviceReady = true;
retval = 0;
}
}
return retval;
}
QJsonObject LedDeviceNanoleaf::discover(const QJsonObject& /*params*/)
QJsonArray LedDeviceNanoleaf::discover()
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType);
QJsonArray deviceList;
// Discover Nanoleaf Devices
SSDPDiscover discover;
// Search for Canvas and Light-Panels
@@ -399,26 +402,41 @@ QJsonObject LedDeviceNanoleaf::discover(const QJsonObject& /*params*/)
deviceList = discover.getServicesDiscoveredJson();
}
return deviceList;
}
QJsonObject LedDeviceNanoleaf::discover(const QJsonObject& /*params*/)
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType);
QString discoveryMethod("ssdp");
QJsonArray deviceList;
deviceList = discover();
devicesDiscovered.insert("discoveryMethod", discoveryMethod);
devicesDiscovered.insert("devices", deviceList);
Debug(_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData());
return devicesDiscovered;
}
QJsonObject LedDeviceNanoleaf::getProperties(const QJsonObject& params)
{
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject properties;
// Get Nanoleaf device properties
QString host = params["host"].toString("");
if (!host.isEmpty())
QString hostName = params["host"].toString("");
if (!hostName.isEmpty())
{
QString authToken = params["token"].toString("");
QString filter = params["filter"].toString("");
// Resolve hostname and port (or use default API port)
QStringList addressparts = QStringUtils::split(host, ":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName, ":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString apiHost = addressparts[0];
int apiPort;
@@ -443,22 +461,22 @@ QJsonObject LedDeviceNanoleaf::getProperties(const QJsonObject& params)
properties.insert("properties", response.getBody().object());
Debug(_log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
}
return properties;
}
void LedDeviceNanoleaf::identify(const QJsonObject& params)
{
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QString host = params["host"].toString("");
if (!host.isEmpty())
QString hostName = params["host"].toString("");
if (!hostName.isEmpty())
{
QString authToken = params["token"].toString("");
// Resolve hostname and port (or use default API port)
QStringList addressparts = QStringUtils::split(host, ":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName, ":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString apiHost = addressparts[0];
int apiPort;
@@ -485,26 +503,41 @@ void LedDeviceNanoleaf::identify(const QJsonObject& params)
bool LedDeviceNanoleaf::powerOn()
{
bool on = false;
if (_isDeviceReady)
{
changeToExternalControlMode();
//Power-on Nanoleaf device
_restApi->setPath(API_STATE);
_restApi->put(getOnOffRequest(true));
if (changeToExternalControlMode())
{
//Power-on Nanoleaf device
_restApi->setPath(API_STATE);
httpResponse response = _restApi->put(getOnOffRequest(true));
if (response.error())
{
QString errorReason = QString("Power-on request failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
on = false;
}
}
}
return true;
return on;
}
bool LedDeviceNanoleaf::powerOff()
{
bool off = true;
if (_isDeviceReady)
{
//Power-off the Nanoleaf device physically
_restApi->setPath(API_STATE);
_restApi->put(getOnOffRequest(false));
httpResponse response = _restApi->put(getOnOffRequest(false));
if (response.error())
{
QString errorReason = QString("Power-off request failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
off = false;
}
}
return true;
return off;
}
QString LedDeviceNanoleaf::getOnOffRequest(bool isOn) const
@@ -513,16 +546,33 @@ QString LedDeviceNanoleaf::getOnOffRequest(bool isOn) const
return QString("{\"%1\":{\"%2\":%3}}").arg(STATE_ON, STATE_ONOFF_VALUE, state);
}
QJsonDocument LedDeviceNanoleaf::changeToExternalControlMode()
bool LedDeviceNanoleaf::changeToExternalControlMode()
{
QJsonDocument resp;
return changeToExternalControlMode(resp);
}
bool LedDeviceNanoleaf::changeToExternalControlMode(QJsonDocument& resp)
{
bool success = false;
Debug(_log, "Set Nanoleaf to External Control (UDP) streaming mode");
_extControlVersion = EXTCTRLVER_V2;
//Enable UDP Mode v2
_restApi->setPath(API_EFFECT);
httpResponse response = _restApi->put(API_EXT_MODE_STRING_V2);
if (response.error())
{
QString errorReason = QString("Change to external control mode failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
}
else
{
resp = response.getBody();
success = true;
}
return response.getBody();
return success;
}
int LedDeviceNanoleaf::write(const std::vector<ColorRgb>& ledValues)

View File

@@ -149,9 +149,15 @@ private:
///
/// @brief Change Nanoleaf device to External Control (UDP) mode
///
/// @return Response from device
///@brief
QJsonDocument changeToExternalControlMode();
/// @return True, if success
bool changeToExternalControlMode();
///
/// @brief Change Nanoleaf device to External Control (UDP) mode
///
/// @param[out] response from device
///
/// @return True, if success
bool changeToExternalControlMode(QJsonDocument& resp);
///
/// @brief Get command to power Nanoleaf device on or off
@@ -161,10 +167,18 @@ private:
///
QString getOnOffRequest(bool isOn) const;
///
/// @brief Discover Nanoleaf devices available (for configuration).
/// Nanoleaf specific ssdp discovery
///
/// @return A JSON structure holding a list of devices found
///
QJsonArray discover();
///REST-API wrapper
ProviderRestApi* _restApi;
QString _hostname;
QString _hostName;
int _apiPort;
QString _authToken;

View File

@@ -115,7 +115,7 @@ CiColor CiColor::rgbToCiColor(double red, double green, double blue, const CiCol
double cy;
double bri;
if(red + green + blue > 0)
if( (red + green + blue) > 0)
{
// Apply gamma correction.
double r = (red > 0.04045) ? pow((red + 0.055) / (1.0 + 0.055), 2.4) : (red / 12.92);
@@ -157,7 +157,7 @@ CiColor CiColor::rgbToCiColor(double red, double green, double blue, const CiCol
CiColor xy = { cx, cy, bri };
if(red + green + blue > 0)
if( (red + green + blue) > 0)
{
// Check if the given XY value is within the color reach of our lamps.
if (!isPointInLampsReach(xy, colorSpace))
@@ -387,8 +387,11 @@ void LedDevicePhilipsHueBridge::log(const char* msg, const char* type, ...) cons
vsnprintf(val, max_val_length, type, args);
va_end(args);
std::string s = msg;
int max = 30;
s.append(max - s.length(), ' ');
size_t max = 30;
if (max > s.length())
{
s.append(max - s.length(), ' ');
}
Debug( _log, "%s: %s", s.c_str(), val );
}
@@ -859,7 +862,7 @@ bool LedDevicePhilipsHue::init(const QJsonObject &deviceConfig)
if( _groupId == 0 )
{
log( "Group-ID is invalid", "%d", _groupId );
Error(_log, "Disabling Entertainment API as Group-ID is invalid" );
_useHueEntertainmentAPI = false;
}
}
@@ -888,7 +891,7 @@ bool LedDevicePhilipsHue::setLights()
if( _useHueEntertainmentAPI )
{
_useHueEntertainmentAPI = false;
Debug(_log, "Group-ID [%u] is not usable - Entertainment API usage was disabled!", _groupId );
Error(_log, "Group-ID [%u] is not usable - Entertainment API usage was disabled!", _groupId );
}
lArray = _devConfig[ CONFIG_LIGHTIDS ].toArray();
}
@@ -1018,7 +1021,7 @@ bool LedDevicePhilipsHue::updateLights(const QMap<quint16, QJsonObject> &map)
if( lightsCount == 0 )
{
Debug(_log, "No usable lights found!" );
Error(_log, "No usable lights found!" );
isInitOK = false;
}
@@ -1073,18 +1076,18 @@ bool LedDevicePhilipsHue::openStream()
if( isInitOK )
{
Info(_log, "Philips Hue Entertaiment API successful connected! Start Streaming." );
Info(_log, "Philips Hue Entertainment API successful connected! Start Streaming." );
_allLightsBlack = true;
noSignalDetection();
}
else
{
Error(_log, "Philips Hue Entertaiment API not connected!" );
Error(_log, "Philips Hue Entertainment API not connected!" );
}
}
else
{
Error(_log, "Philips Hue Entertaiment API could not initialisized!" );
Error(_log, "Philips Hue Entertainment API could not be initialised!" );
}
return isInitOK;
@@ -1235,7 +1238,7 @@ QByteArray LedDevicePhilipsHue::prepareStreamData() const
CiColor lightC = light.getColor();
quint64 R = lightC.x * 0xffff;
quint64 G = lightC.y * 0xffff;
quint64 B = lightC.bri * 0xffff;
quint64 B = (lightC.x || lightC.y) ? lightC.bri * 0xffff : 0;
unsigned int id = light.getId();
const uint8_t payload[] = {
0x00, 0x00, static_cast<uint8_t>(id),
@@ -1315,7 +1318,7 @@ bool LedDevicePhilipsHue::switchOff()
stop_retry_left = 3;
if (_useHueEntertainmentAPI)
{
stopStream();
stopStream();
}
return LedDevicePhilipsHueBridge::switchOff();
@@ -1467,7 +1470,7 @@ void LedDevicePhilipsHue::setColor(PhilipsHueLight& light, CiColor& color)
if( !_useHueEntertainmentAPI )
{
const int bri = qRound(qMin(254.0, _brightnessFactor * qMax(1.0, color.bri * 254.0)));
QString stateCmd = QString("\"%1\":[%2,%3],\"%4\":%5").arg( API_XY_COORDINATES ).arg( color.x, 0, 'd', 4 ).arg( color.y, 0, 'd', 4 ).arg( API_BRIGHTNESS ).arg( bri );
QString stateCmd = QString("{\"%1\":[%2,%3],\"%4\":%5}").arg( API_XY_COORDINATES ).arg( color.x, 0, 'd', 4 ).arg( color.y, 0, 'd', 4 ).arg( API_BRIGHTNESS ).arg( bri );
setLightState( light.getId(), stateCmd );
}
else

View File

@@ -23,7 +23,7 @@ bool LedDeviceTpm2net::init(const QJsonObject &deviceConfig)
{
_tpm2_max = deviceConfig["max-packet"].toInt(170);
_tpm2ByteCount = 3 * _ledCount;
_tpm2TotalPackets = 1 + _tpm2ByteCount / _tpm2_max;
_tpm2TotalPackets = (_tpm2ByteCount / _tpm2_max) + ((_tpm2ByteCount % _tpm2_max) != 0);
isInitOK = true;
}

View File

@@ -1,14 +1,20 @@
// Local-Hyperion includes
#include "LedDeviceWled.h"
#include <ssdp/SSDPDiscover.h>
#include <utils/QStringUtils.h>
#include <utils/WaitTime.h>
#include <QThread>
#include <chrono>
// Constants
namespace {
const bool verbose = false;
// Configuration settings
const char CONFIG_ADDRESS[] = "host";
const char CONFIG_RESTORE_STATE[] = "restoreOriginalState";
// UDP elements
const quint16 STREAM_DEFAULT_PORT = 19446;
@@ -24,12 +30,11 @@ const char API_PATH_STATE[] = "state";
const char STATE_ON[] = "on";
const char STATE_VALUE_TRUE[] = "true";
const char STATE_VALUE_FALSE[] = "false";
const char STATE_LIVE[] = "live";
// WLED ssdp services
// TODO: WLED - Update ssdp discovery parameters when available
const char SSDP_ID[] = "ssdp:all";
const char SSDP_FILTER[] = "(.*)";
const char SSDP_FILTER_HEADER[] = "ST";
const int BRI_MAX = 255;
constexpr std::chrono::milliseconds DEFAULT_IDENTIFY_TIME{ 2000 };
} //End of constants
@@ -53,7 +58,6 @@ LedDevice* LedDeviceWled::construct(const QJsonObject &deviceConfig)
bool LedDeviceWled::init(const QJsonObject &deviceConfig)
{
Debug(_log, "");
bool isInitOK = false;
// Initialise LedDevice sub-class, ProviderUdp::init will be executed later, if connectivity is defined
@@ -66,18 +70,21 @@ bool LedDeviceWled::init(const QJsonObject &deviceConfig)
Debug(_log, "ColorOrder : %s", QSTRING_CSTR( this->getColorOrder() ));
Debug(_log, "LatchTime : %d", this->getLatchTime());
_isRestoreOrigState = _devConfig[CONFIG_RESTORE_STATE].toBool(false);
Debug(_log, "RestoreOrigState : %d", _isRestoreOrigState);
//Set hostname as per configuration
QString address = deviceConfig[ CONFIG_ADDRESS ].toString();
QString hostName = deviceConfig[ CONFIG_ADDRESS ].toString();
//If host not configured the init fails
if ( address.isEmpty() )
if ( hostName.isEmpty() )
{
this->setInError("No target hostname nor IP defined");
return false;
}
else
{
QStringList addressparts = QStringUtils::split(address,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName,":", QStringUtils::SplitBehavior::SkipEmptyParts);
_hostname = addressparts[0];
if ( addressparts.size() > 1 )
{
@@ -100,13 +107,11 @@ bool LedDeviceWled::init(const QJsonObject &deviceConfig)
}
}
}
Debug(_log, "[%d]", isInitOK);
return isInitOK;
}
bool LedDeviceWled::initRestAPI(const QString &hostname, int port)
{
Debug(_log, "");
bool isInitOK = false;
if ( _restApi == nullptr )
@@ -116,38 +121,68 @@ bool LedDeviceWled::initRestAPI(const QString &hostname, int port)
isInitOK = true;
}
Debug(_log, "[%d]", isInitOK);
return isInitOK;
}
QString LedDeviceWled::getOnOffRequest(bool isOn) const
{
QString state = isOn ? STATE_VALUE_TRUE : STATE_VALUE_FALSE;
return QString( "{\"%1\":%2}" ).arg( STATE_ON, state);
return QString( "\"%1\":%2,\"%3\":%4" ).arg( STATE_ON, state).arg( STATE_LIVE, state);
}
QString LedDeviceWled::getBrightnessRequest(int bri) const
{
return QString( "\"bri\":%1" ).arg(bri);
}
QString LedDeviceWled::getEffectRequest(int effect, int speed) const
{
return QString( "\"seg\":{\"fx\":%1,\"sx\":%2}" ).arg(effect).arg(speed);
}
QString LedDeviceWled::getLorRequest(int lor) const
{
return QString( "\"lor\":%1" ).arg(lor);
}
bool LedDeviceWled::sendStateUpdateRequest(const QString &request)
{
bool rc = true;
_restApi->setPath(API_PATH_STATE);
httpResponse response1 = _restApi->put(QString("{%1}").arg(request));
if ( response1.error() )
{
rc = false;
}
return rc;
}
bool LedDeviceWled::powerOn()
{
Debug(_log, "");
bool on = true;
bool on = false;
if ( _isDeviceReady)
{
//Power-on WLED device
_restApi->setPath(API_PATH_STATE);
httpResponse response = _restApi->put(getOnOffRequest(true));
httpResponse response = _restApi->put(QString("{%1,%2}").arg(getOnOffRequest(true)).arg(getBrightnessRequest(BRI_MAX)));
if ( response.error() )
{
this->setInError ( response.getErrorReason() );
QString errorReason = QString("Power-on request failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
on = false;
}
else
{
on = true;
}
}
return on;
}
bool LedDeviceWled::powerOff()
{
Debug(_log, "");
bool off = true;
if ( _isDeviceReady)
{
@@ -156,53 +191,89 @@ bool LedDeviceWled::powerOff()
//Power-off the WLED device physically
_restApi->setPath(API_PATH_STATE);
httpResponse response = _restApi->put(getOnOffRequest(false));
httpResponse response = _restApi->put(QString("{%1}").arg(getOnOffRequest(false)));
if ( response.error() )
{
this->setInError ( response.getErrorReason() );
QString errorReason = QString("Power-off request failed with error: '%1'").arg(response.getErrorReason());
this->setInError ( errorReason );
off = false;
}
}
return off;
}
bool LedDeviceWled::storeState()
{
bool rc = true;
if ( _isRestoreOrigState )
{
_restApi->setPath(API_PATH_STATE);
httpResponse response = _restApi->get();
if ( response.error() )
{
QString errorReason = QString("Storing device state failed with error: '%1'").arg(response.getErrorReason());
setInError(errorReason);
rc = false;
}
else
{
_originalStateProperties = response.getBody().object();
DebugIf(verbose, _log, "state: [%s]", QString(QJsonDocument(_originalStateProperties).toJson(QJsonDocument::Compact)).toUtf8().constData() );
}
}
return rc;
}
bool LedDeviceWled::restoreState()
{
bool rc = true;
if ( _isRestoreOrigState )
{
//powerOff();
_restApi->setPath(API_PATH_STATE);
_originalStateProperties[STATE_LIVE] = false;
httpResponse response = _restApi->put(QString(QJsonDocument(_originalStateProperties).toJson(QJsonDocument::Compact)).toUtf8().constData());
if ( response.error() )
{
Warning (_log, "%s restoring state failed with error: '%s'", QSTRING_CSTR(_activeDeviceType), QSTRING_CSTR(response.getErrorReason()));
}
}
return rc;
}
QJsonObject LedDeviceWled::discover(const QJsonObject& /*params*/)
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType );
QJsonArray deviceList;
// Discover WLED Devices
SSDPDiscover discover;
discover.skipDuplicateKeys(true);
discover.setSearchFilter(SSDP_FILTER, SSDP_FILTER_HEADER);
QString searchTarget = SSDP_ID;
if ( discover.discoverServices(searchTarget) > 0 )
{
deviceList = discover.getServicesDiscoveredJson();
}
devicesDiscovered.insert("devices", deviceList);
Debug(_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose, _log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
return devicesDiscovered;
}
QJsonObject LedDeviceWled::getProperties(const QJsonObject& params)
{
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
QJsonObject properties;
// Get Nanoleaf device properties
QString host = params["host"].toString("");
if ( !host.isEmpty() )
QString hostName = params["host"].toString("");
if ( !hostName.isEmpty() )
{
QString filter = params["filter"].toString("");
// Resolve hostname and port (or use default API port)
QStringList addressparts = QStringUtils::split(host,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString apiHost = addressparts[0];
int apiPort;
@@ -226,49 +297,45 @@ QJsonObject LedDeviceWled::getProperties(const QJsonObject& params)
properties.insert("properties", response.getBody().object());
Debug(_log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose, _log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData() );
}
return properties;
}
void LedDeviceWled::identify(const QJsonObject& /*params*/)
void LedDeviceWled::identify(const QJsonObject& params)
{
#if 0
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QString host = params["host"].toString("");
if ( !host.isEmpty() )
QString hostName = params["host"].toString("");
if ( !hostName.isEmpty() )
{
// Resolve hostname and port (or use default API port)
QStringList addressparts = QStringUtils::split(host,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString apiHost = addressparts[0];
int apiPort;
if ( addressparts.size() > 1)
{
apiPort = addressparts[1].toInt();
}
else
{
apiPort = API_DEFAULT_PORT;
}
// TODO: WLED::identify - Replace with valid identification code
initRestAPI(apiHost, apiPort);
// initRestAPI(apiHost, apiPort);
_isRestoreOrigState = true;
storeState();
// QString resource = QString("%1/%2/%3").arg( API_LIGHTS ).arg( lightId ).arg( API_STATE);
// _restApi->setPath(resource);
QString request = getOnOffRequest(true) + "," + getLorRequest(1) + "," + getEffectRequest(25);
sendStateUpdateRequest(request);
// QString stateCmd;
// stateCmd += QString("\"%1\":%2,").arg( API_STATE_ON ).arg( API_STATE_VALUE_TRUE );
// stateCmd += QString("\"%1\":\"%2\"").arg( "alert" ).arg( "select" );
// stateCmd = "{" + stateCmd + "}";
wait(DEFAULT_IDENTIFY_TIME);
// // Perform request
// httpResponse response = _restApi->put(stateCmd);
// if ( response.error() )
// {
// Warning (_log, "%s identification failed with error: '%s'", QSTRING_CSTR(_activeDeviceType), QSTRING_CSTR(response.getErrorReason()));
// }
restoreState();
}
#endif
}
int LedDeviceWled::write(const std::vector<ColorRgb> &ledValues)

View File

@@ -8,8 +8,6 @@
///
/// Implementation of a WLED-device
/// ...
///
///
class LedDeviceWled : public ProviderUdp
{
@@ -105,6 +103,25 @@ protected:
///
bool powerOff() override;
///
/// @brief Store the device's original state.
///
/// Save the device's state before hyperion color streaming starts allowing to restore state during switchOff().
///
/// @return True if success
///
bool storeState() override;
///
/// @brief Restore the device's original state.
///
/// Restore the device's state as before hyperion color streaming started.
/// This includes the on/off state of the device.
///
/// @return True, if success
///
bool restoreState() override;
private:
///
@@ -123,12 +140,20 @@ private:
/// @return Command to switch device on/off
///
QString getOnOffRequest (bool isOn ) const;
QString getBrightnessRequest (int bri ) const;
QString getEffectRequest(int effect, int speed=128) const;
QString getLorRequest(int lor) const;
bool sendStateUpdateRequest(const QString &request);
///REST-API wrapper
ProviderRestApi* _restApi;
QString _hostname;
int _apiPort;
QJsonObject _originalStateProperties;
};
#endif // LEDDEVICEWLED_H

View File

@@ -234,12 +234,12 @@ int YeelightLight::writeCommand( const QJsonDocument &command, QJsonArray &resul
if ( ! _tcpSocket->waitForBytesWritten(WRITE_TIMEOUT.count()) )
{
QString errorReason = QString ("(%1) %2").arg(_tcpSocket->error()).arg( _tcpSocket->errorString());
log ( 2, "Error:", "bytesWritten: [%ll], %s", bytesWritten, QSTRING_CSTR(errorReason));
log ( 2, "Error:", "bytesWritten: [%d], %s", bytesWritten, QSTRING_CSTR(errorReason));
this->setInError ( errorReason );
}
else
{
log ( 3, "Success:", "Bytes written [%ll]", bytesWritten );
log ( 3, "Success:", "Bytes written [%d]", bytesWritten );
// Avoid to overrun the Yeelight Command Quota
qint64 elapsedTime = QDateTime::currentMSecsSinceEpoch() - _lastWriteTime;
@@ -258,7 +258,7 @@ int YeelightLight::writeCommand( const QJsonDocument &command, QJsonArray &resul
{
do
{
log ( 3, "Reading:", "Bytes available [%ll]", _tcpSocket->bytesAvailable() );
log ( 3, "Reading:", "Bytes available [%d]", _tcpSocket->bytesAvailable() );
while ( _tcpSocket->canReadLine() )
{
QByteArray response = _tcpSocket->readLine();
@@ -338,7 +338,7 @@ bool YeelightLight::streamCommand( const QJsonDocument &command )
{
int error = _tcpStreamSocket->error();
QString errorReason = QString ("(%1) %2").arg(error).arg( _tcpStreamSocket->errorString());
log ( 1, "Error:", "bytesWritten: [%ll], %s", bytesWritten, QSTRING_CSTR(errorReason));
log ( 1, "Error:", "bytesWritten: [%d], %s", bytesWritten, QSTRING_CSTR(errorReason));
if ( error == QAbstractSocket::RemoteHostClosedError )
{
@@ -353,7 +353,7 @@ bool YeelightLight::streamCommand( const QJsonDocument &command )
}
else
{
log ( 3, "Success:", "Bytes written [%ll]", bytesWritten );
log ( 3, "Success:", "Bytes written [%d]", bytesWritten );
rc = true;
}
}
@@ -956,7 +956,10 @@ void YeelightLight::log(int logLevel, const char* msg, const char* type, ...)
va_end(args);
std::string s = msg;
uint max = 20;
s.append(max - s.length(), ' ');
if (max > s.length())
{
s.append(max - s.length(), ' ');
}
Debug( _log, "%d|%15.15s| %s: %s", logLevel, QSTRING_CSTR(_name), s.c_str(), val);
}
@@ -1076,12 +1079,12 @@ bool LedDeviceYeelight::init(const QJsonObject &deviceConfig)
int configuredYeelightsCount = 0;
for (const QJsonValueRef light : configuredYeelightLights)
{
QString host = light.toObject().value("host").toString();
QString hostName = light.toObject().value("host").toString();
int port = light.toObject().value("port").toInt(API_DEFAULT_PORT);
if ( !host.isEmpty() )
if ( !hostName.isEmpty() )
{
QString name = light.toObject().value("name").toString();
Debug(_log, "Light [%u] - %s (%s:%d)", configuredYeelightsCount, QSTRING_CSTR(name), QSTRING_CSTR(host), port );
Debug(_log, "Light [%u] - %s (%s:%d)", configuredYeelightsCount, QSTRING_CSTR(name), QSTRING_CSTR(hostName), port );
++configuredYeelightsCount;
}
}
@@ -1107,10 +1110,10 @@ bool LedDeviceYeelight::init(const QJsonObject &deviceConfig)
_lightsAddressList.clear();
for (int j = 0; j < static_cast<int>( configuredLedCount ); ++j)
{
QString address = configuredYeelightLights[j].toObject().value("host").toString();
QString hostName = configuredYeelightLights[j].toObject().value("host").toString();
int port = configuredYeelightLights[j].toObject().value("port").toInt(API_DEFAULT_PORT);
QStringList addressparts = QStringUtils::split(address,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QStringList addressparts = QStringUtils::split(hostName,":", QStringUtils::SplitBehavior::SkipEmptyParts);
QString apiHost = addressparts[0];
int apiPort = port;
@@ -1347,14 +1350,10 @@ bool LedDeviceYeelight::restoreState()
return rc;
}
QJsonObject LedDeviceYeelight::discover(const QJsonObject& /*params*/)
QJsonArray LedDeviceYeelight::discover()
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType );
QJsonArray deviceList;
// Discover Yeelight Devices
SSDPDiscover discover;
discover.setPort(SSDP_PORT);
discover.skipDuplicateKeys(true);
@@ -1365,25 +1364,36 @@ QJsonObject LedDeviceYeelight::discover(const QJsonObject& /*params*/)
{
deviceList = discover.getServicesDiscoveredJson();
}
return deviceList;
}
QJsonObject LedDeviceYeelight::discover(const QJsonObject& /*params*/)
{
QJsonObject devicesDiscovered;
devicesDiscovered.insert("ledDeviceType", _activeDeviceType );
QString discoveryMethod("ssdp");
QJsonArray deviceList;
deviceList = discover();
devicesDiscovered.insert("devices", deviceList);
Debug(_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose,_log, "devicesDiscovered: [%s]", QString(QJsonDocument(devicesDiscovered).toJson(QJsonDocument::Compact)).toUtf8().constData() );
return devicesDiscovered;
}
QJsonObject LedDeviceYeelight::getProperties(const QJsonObject& params)
{
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
QJsonObject properties;
QString apiHostname = params["hostname"].toString("");
QString hostName = params["hostname"].toString("");
quint16 apiPort = static_cast<quint16>( params["port"].toInt(API_DEFAULT_PORT) );
Debug (_log, "apiHost [%s], apiPort [%d]", QSTRING_CSTR(apiHostname), apiPort);
if ( !apiHostname.isEmpty() )
if ( !hostName.isEmpty() )
{
YeelightLight yeelight(_log, apiHostname, apiPort);
YeelightLight yeelight(_log, hostName, apiPort);
//yeelight.setDebuglevel(3);
if ( yeelight.open() )
@@ -1399,15 +1409,15 @@ QJsonObject LedDeviceYeelight::getProperties(const QJsonObject& params)
void LedDeviceYeelight::identify(const QJsonObject& params)
{
Debug(_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
DebugIf(verbose,_log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData() );
QString apiHostname = params["hostname"].toString("");
QString hostName = params["hostname"].toString("");
quint16 apiPort = static_cast<quint16>( params["port"].toInt(API_DEFAULT_PORT) );
Debug (_log, "apiHost [%s], apiPort [%d]", QSTRING_CSTR(apiHostname), apiPort);
Debug (_log, "apiHost [%s], apiPort [%d]", QSTRING_CSTR(hostName), apiPort);
if ( !apiHostname.isEmpty() )
if ( !hostName.isEmpty() )
{
YeelightLight yeelight(_log, apiHostname, apiPort);
YeelightLight yeelight(_log, hostName, apiPort);
//yeelight.setDebuglevel(3);
if ( yeelight.open() )

View File

@@ -591,6 +591,14 @@ private:
///
uint getLightsCount() const { return _lightsCount; }
///
/// @brief Discover Yeelight devices available (for configuration).
/// Yeelight specific UDP Broadcast discovery
///
/// @return A JSON structure holding a list of devices found
///
QJsonArray discover();
/// Array of the Yeelight addresses handled by the LED-device
QVector<yeelightAddress> _lightsAddressList;

View File

@@ -8,12 +8,20 @@
//std includes
#include <iostream>
#include <chrono>
// Constants
namespace {
const QChar ONE_SLASH = '/';
const int HTTP_STATUS_NO_CONTENT = 204;
const int HTTP_STATUS_BAD_REQUEST = 400;
const int HTTP_STATUS_UNAUTHORIZED = 401;
const int HTTP_STATUS_NOT_FOUND = 404;
constexpr std::chrono::milliseconds DEFAULT_REST_TIMEOUT{ 400 };
} //End of constants
ProviderRestApi::ProviderRestApi(const QString &host, int port, const QString &basePath)
@@ -59,7 +67,7 @@ void ProviderRestApi::appendPath ( const QString &path )
appendPath (_path, path );
}
void ProviderRestApi::appendPath ( QString& path, const QString &appendPath) const
void ProviderRestApi::appendPath ( QString& path, const QString &appendPath)
{
if ( !appendPath.isEmpty() && appendPath != ONE_SLASH )
{
@@ -118,20 +126,26 @@ httpResponse ProviderRestApi::get()
httpResponse ProviderRestApi::get(const QUrl &url)
{
Debug(_log, "GET: [%s]", QSTRING_CSTR( url.toString() ));
// Perform request
QNetworkRequest request(url);
QNetworkReply* reply = _networkManager->get(request);
// Connect requestFinished signal to quit slot of the loop.
QEventLoop loop;
loop.connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
QEventLoop::connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
ReplyTimeout::set(reply, DEFAULT_REST_TIMEOUT.count());
// Go into the loop until the request is finished.
loop.exec();
httpResponse response;
if(reply->operation() == QNetworkAccessManager::GetOperation)
{
if(reply->error() != QNetworkReply::NoError)
{
Debug(_log, "GET: [%s]", QSTRING_CSTR( url.toString() ));
}
response = getResponse(reply );
}
// Free space.
@@ -147,19 +161,25 @@ httpResponse ProviderRestApi::put(const QString &body)
httpResponse ProviderRestApi::put(const QUrl &url, const QString &body)
{
Debug(_log, "PUT: [%s] [%s]", QSTRING_CSTR( url.toString() ), QSTRING_CSTR( body ) );
// Perform request
QNetworkRequest request(url);
QNetworkReply* reply = _networkManager->put(request, body.toUtf8());
// Connect requestFinished signal to quit slot of the loop.
QEventLoop loop;
loop.connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
QEventLoop::connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
ReplyTimeout::set(reply, DEFAULT_REST_TIMEOUT.count());
// Go into the loop until the request is finished.
loop.exec();
httpResponse response;
if(reply->operation() == QNetworkAccessManager::PutOperation)
{
if(reply->error() != QNetworkReply::NoError)
{
Debug(_log, "PUT: [%s] [%s]", QSTRING_CSTR( url.toString() ), QSTRING_CSTR( body ) );
}
response = getResponse(reply);
}
// Free space.
@@ -175,14 +195,11 @@ httpResponse ProviderRestApi::getResponse(QNetworkReply* const &reply)
int httpStatusCode = reply->attribute( QNetworkRequest::HttpStatusCodeAttribute ).toInt();
response.setHttpStatusCode(httpStatusCode);
Debug(_log, "Reply.httpStatusCode [%d]", httpStatusCode );
response.setNetworkReplyError(reply->error());
if(reply->error() == QNetworkReply::NoError)
{
if ( httpStatusCode != 204 ){
if ( httpStatusCode != HTTP_STATUS_NO_CONTENT ){
QByteArray replyData = reply->readAll();
if ( !replyData.isEmpty())
@@ -211,18 +228,19 @@ httpResponse ProviderRestApi::getResponse(QNetworkReply* const &reply)
}
else
{
Debug(_log, "Reply.httpStatusCode [%d]", httpStatusCode );
QString errorReason;
if ( httpStatusCode > 0 ) {
QString httpReason = reply->attribute( QNetworkRequest::HttpReasonPhraseAttribute ).toString();
QString advise;
switch ( httpStatusCode ) {
case 400:
case HTTP_STATUS_BAD_REQUEST:
advise = "Check Request Body";
break;
case 401:
case HTTP_STATUS_UNAUTHORIZED:
advise = "Check Authentication Token (API Key)";
break;
case 404:
case HTTP_STATUS_NOT_FOUND:
advise = "Check Resource given";
break;
default:
@@ -231,10 +249,20 @@ httpResponse ProviderRestApi::getResponse(QNetworkReply* const &reply)
errorReason = QString ("[%3 %4] - %5").arg(QString(httpStatusCode) , httpReason, advise);
}
else {
errorReason = reply->errorString();
if ( reply->error() == QNetworkReply::OperationCanceledError )
{
//Do not report errors caused by request cancellation because of timeouts
Debug(_log, "Reply: [%s]", QSTRING_CSTR(errorReason) );
}
else
{
response.setError(true);
response.setErrorReason(errorReason);
}
}
response.setError(true);
response.setErrorReason(errorReason);
// Create valid body which is empty
response.setBody( QJsonDocument() );

View File

@@ -10,6 +10,48 @@
#include <QUrlQuery>
#include <QJsonDocument>
#include <QBasicTimer>
#include <QTimerEvent>
//Set QNetworkReply timeout without external timer
//https://stackoverflow.com/questions/37444539/how-to-set-qnetworkreply-timeout-without-external-timer
class ReplyTimeout : public QObject {
Q_OBJECT
public:
enum HandleMethod { Abort, Close };
ReplyTimeout(QNetworkReply* reply, const int timeout, HandleMethod method = Abort) :
QObject(reply), m_method(method)
{
Q_ASSERT(reply);
if (reply && reply->isRunning()) {
m_timer.start(timeout, this);
connect(reply, &QNetworkReply::finished, this, &QObject::deleteLater);
}
}
static void set(QNetworkReply* reply, const int timeout, HandleMethod method = Abort)
{
new ReplyTimeout(reply, timeout, method);
}
protected:
QBasicTimer m_timer;
HandleMethod m_method;
void timerEvent(QTimerEvent * ev) override {
if (!m_timer.isActive() || ev->timerId() != m_timer.timerId())
return;
auto reply = static_cast<QNetworkReply*>(parent());
if (reply->isRunning())
{
if (m_method == Close)
reply->close();
else if (m_method == Abort)
reply->abort();
m_timer.stop();
}
}
};
///
/// Response object for REST-API calls and JSON-responses
///
@@ -191,7 +233,7 @@ private:
/// @param[in/out] path to be updated
/// @param[in] path, element to be appended
///
void appendPath (QString &path, const QString &appendPath) const;
static void appendPath (QString &path, const QString &appendPath) ;
Logger* _log;

View File

@@ -1,6 +1,10 @@
// hyperion local includes
#include "LedDeviceAtmo.h"
namespace {
const bool verbose = false;
} //End of constants
LedDeviceAtmo::LedDeviceAtmo(const QJsonObject &deviceConfig)
: ProviderRs232(deviceConfig)
{
@@ -43,3 +47,20 @@ int LedDeviceAtmo::write(const std::vector<ColorRgb> &ledValues)
memcpy(4 + _ledBuffer.data(), ledValues.data(), _ledCount * sizeof(ColorRgb));
return writeBytes(_ledBuffer.size(), _ledBuffer.data());
}
QJsonObject LedDeviceAtmo::getProperties(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject properties;
QString serialPort = params["serialPort"].toString("");
QJsonObject propertiesDetails;
QJsonArray possibleLedCounts = { 5 };
propertiesDetails.insert("ledCount", possibleLedCounts);
properties.insert("properties", propertiesDetails);
DebugIf(verbose, _log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
return properties;
}

View File

@@ -23,6 +23,14 @@ public:
///
static LedDevice* construct(const QJsonObject &deviceConfig);
///
/// @brief Get a Atmo device's resource properties
///
/// @param[in] params Parameters to query device
/// @return A JSON structure holding the device's properties
///
QJsonObject getProperties(const QJsonObject& params) override;
private:
///

View File

@@ -1,27 +1,31 @@
// hyperion local includes
#include "LedDeviceKarate.h"
LedDeviceKarate::LedDeviceKarate(const QJsonObject &deviceConfig)
namespace {
const bool verbose = false;
} //End of constants
LedDeviceKarate::LedDeviceKarate(const QJsonObject& deviceConfig)
: ProviderRs232(deviceConfig)
{
}
LedDevice* LedDeviceKarate::construct(const QJsonObject &deviceConfig)
LedDevice* LedDeviceKarate::construct(const QJsonObject& deviceConfig)
{
return new LedDeviceKarate(deviceConfig);
}
bool LedDeviceKarate::init(const QJsonObject &deviceConfig)
bool LedDeviceKarate::init(const QJsonObject& deviceConfig)
{
bool isInitOK = false;
// Initialise sub-class
if ( ProviderRs232::init(deviceConfig) )
if (ProviderRs232::init(deviceConfig))
{
if (_ledCount != 8 && _ledCount != 16)
{
//Error( _log, "%d channels configured. This should always be 16!", _ledCount);
QString errortext = QString ("%1 channels configured. This should always be 8 or 16!").arg(_ledCount);
QString errortext = QString("%1 channels configured. This should always be 8 or 16!").arg(_ledCount);
this->setInError(errortext);
isInitOK = false;
}
@@ -33,8 +37,8 @@ bool LedDeviceKarate::init(const QJsonObject &deviceConfig)
_ledBuffer[2] = 0x00; // Checksum
_ledBuffer[3] = _ledCount * 3; // Number of Databytes send
Debug( _log, "Karatelight header for %d leds: 0x%02x 0x%02x 0x%02x 0x%02x", _ledCount,
_ledBuffer[0], _ledBuffer[1], _ledBuffer[2], _ledBuffer[3] );
Debug(_log, "Karatelight header for %d leds: 0x%02x 0x%02x 0x%02x 0x%02x", _ledCount,
_ledBuffer[0], _ledBuffer[1], _ledBuffer[2], _ledBuffer[3]);
isInitOK = true;
}
@@ -42,20 +46,37 @@ bool LedDeviceKarate::init(const QJsonObject &deviceConfig)
return isInitOK;
}
int LedDeviceKarate::write(const std::vector<ColorRgb> &ledValues)
int LedDeviceKarate::write(const std::vector<ColorRgb>& ledValues)
{
for (signed iLed=0; iLed< static_cast<int>(_ledCount); iLed++)
{
const ColorRgb& rgb = ledValues[iLed];
_ledBuffer[iLed*3+4] = rgb.green;
_ledBuffer[iLed*3+5] = rgb.blue;
_ledBuffer[iLed*3+6] = rgb.red;
}
for (signed iLed = 0; iLed < static_cast<int>(_ledCount); iLed++)
{
const ColorRgb& rgb = ledValues[iLed];
_ledBuffer[iLed * 3 + 4] = rgb.green;
_ledBuffer[iLed * 3 + 5] = rgb.blue;
_ledBuffer[iLed * 3 + 6] = rgb.red;
}
// Calc Checksum
_ledBuffer[2] = _ledBuffer[0] ^ _ledBuffer[1];
for (unsigned int i = 3; i < _ledBuffer.size(); i++)
_ledBuffer[2] ^= _ledBuffer[i];
_ledBuffer[2] = _ledBuffer[0] ^ _ledBuffer[1];
for (unsigned int i = 3; i < _ledBuffer.size(); i++)
_ledBuffer[2] ^= _ledBuffer[i];
return writeBytes(_ledBuffer.size(), _ledBuffer.data());
}
QJsonObject LedDeviceKarate::getProperties(const QJsonObject& params)
{
DebugIf(verbose, _log, "params: [%s]", QString(QJsonDocument(params).toJson(QJsonDocument::Compact)).toUtf8().constData());
QJsonObject properties;
QString serialPort = params["serialPort"].toString("");
QJsonObject propertiesDetails;
QJsonArray possibleLedCounts = { 16, 8 };
propertiesDetails.insert("ledCount", possibleLedCounts);
properties.insert("properties", propertiesDetails);
DebugIf(verbose, _log, "properties: [%s]", QString(QJsonDocument(properties).toJson(QJsonDocument::Compact)).toUtf8().constData());
return properties;
}

View File

@@ -26,6 +26,14 @@ public:
/// @return LedDevice constructed
static LedDevice* construct(const QJsonObject &deviceConfig);
///
/// @brief Get a Karate device's resource properties
///
/// @param[in] params Parameters to query device
/// @return A JSON structure holding the device's properties
///
QJsonObject getProperties(const QJsonObject& params) override;
private:
///

View File

@@ -43,7 +43,9 @@ bool ProviderRs232::init(const QJsonObject &deviceConfig)
// If device name was given as unix /dev/ system-location, get port name
if ( _deviceName.startsWith(QLatin1String("/dev/")) )
{
_deviceName = _deviceName.mid(5);
}
_isAutoDeviceName = _deviceName.toLower() == "auto";
_baudRate_Hz = deviceConfig["rate"].toInt();
@@ -141,18 +143,16 @@ bool ProviderRs232::tryOpen(int delayAfterConnect_ms)
Debug(_log, "_rs232Port.open(QIODevice::ReadWrite): %s, Baud rate [%d]bps", QSTRING_CSTR(_deviceName), _baudRate_Hz);
QSerialPortInfo serialPortInfo(_deviceName);
QJsonObject portInfo;
Debug(_log, "portName: %s", QSTRING_CSTR(serialPortInfo.portName()));
Debug(_log, "systemLocation: %s", QSTRING_CSTR(serialPortInfo.systemLocation()));
Debug(_log, "description: %s", QSTRING_CSTR(serialPortInfo.description()));
Debug(_log, "manufacturer: %s", QSTRING_CSTR(serialPortInfo.manufacturer()));
Debug(_log, "productIdentifier: %s", QSTRING_CSTR(QString("0x%1").arg(serialPortInfo.productIdentifier(), 0, 16)));
Debug(_log, "vendorIdentifier: %s", QSTRING_CSTR(QString("0x%1").arg(serialPortInfo.vendorIdentifier(), 0, 16)));
Debug(_log, "serialNumber: %s", QSTRING_CSTR(serialPortInfo.serialNumber()));
if (!serialPortInfo.isNull() )
{
Debug(_log, "portName: %s", QSTRING_CSTR(serialPortInfo.portName()));
Debug(_log, "systemLocation: %s", QSTRING_CSTR(serialPortInfo.systemLocation()));
Debug(_log, "description: %s", QSTRING_CSTR(serialPortInfo.description()));
Debug(_log, "manufacturer: %s", QSTRING_CSTR(serialPortInfo.manufacturer()));
Debug(_log, "vendorIdentifier: %s", QSTRING_CSTR(QString("0x%1").arg(serialPortInfo.vendorIdentifier(), 0, 16)));
Debug(_log, "productIdentifier: %s", QSTRING_CSTR(QString("0x%1").arg(serialPortInfo.productIdentifier(), 0, 16)));
Debug(_log, "serialNumber: %s", QSTRING_CSTR(serialPortInfo.serialNumber()));
if ( !_rs232Port.open(QIODevice::ReadWrite) )
{
this->setInError(_rs232Port.errorString());
@@ -163,6 +163,18 @@ bool ProviderRs232::tryOpen(int delayAfterConnect_ms)
{
QString errortext = QString("Invalid serial device name: [%1]!").arg(_deviceName);
this->setInError( errortext );
// List available device
for (auto &port : QSerialPortInfo::availablePorts() ) {
Debug(_log, "Avail. serial device: [%s]-(%s|%s), Manufacturer: %s, Description: %s",
QSTRING_CSTR(port.portName()),
QSTRING_CSTR(QString("0x%1").arg(port.vendorIdentifier(), 0, 16)),
QSTRING_CSTR(QString("0x%1").arg(port.productIdentifier(), 0, 16)),
QSTRING_CSTR(port.manufacturer()),
QSTRING_CSTR(port.description())
);
}
return false;
}
}
@@ -215,7 +227,7 @@ int ProviderRs232::writeBytes(const qint64 size, const uint8_t *data)
{
if ( _rs232Port.error() == QSerialPort::TimeoutError )
{
Debug(_log, "Timeout after %dms: %d frames already dropped", WRITE_TIMEOUT, _frameDropCounter);
Debug(_log, "Timeout after %dms: %d frames already dropped", WRITE_TIMEOUT.count(), _frameDropCounter);
++_frameDropCounter;
@@ -245,7 +257,7 @@ int ProviderRs232::writeBytes(const qint64 size, const uint8_t *data)
QString ProviderRs232::discoverFirst()
{
// take first available USB serial port - currently no probing!
for (auto const & port : QSerialPortInfo::availablePorts())
for (auto & port : QSerialPortInfo::availablePorts())
{
if (!port.isNull() && !port.isBusy())
{
@@ -266,7 +278,7 @@ QJsonObject ProviderRs232::discover(const QJsonObject& /*params*/)
// Discover serial Devices
for (auto &port : QSerialPortInfo::availablePorts() )
{
if ( !port.isNull() )
if ( !port.isNull() && !port.portName().startsWith("ttyS"))
{
QJsonObject portInfo;
portInfo.insert("description", port.description());

View File

@@ -4,8 +4,16 @@
"properties":{
"host" : {
"type": "string",
"title":"edt_dev_spec_targetIpHost_title",
"propertyOrder" : 1
"title": "edt_dev_spec_targetIpHost_title",
"required": true,
"propertyOrder": 1
},
"restoreOriginalState": {
"type": "boolean",
"title": "edt_dev_spec_restoreOriginalState_title",
"default": false,
"required": true,
"propertyOrder": 2
},
"latchTime": {
"type": "integer",
@@ -15,7 +23,7 @@
"minimum": 0,
"maximum": 1000,
"access" : "expert",
"propertyOrder" : 2
"propertyOrder" : 3
}
},
"additionalProperties": true

View File

@@ -1,12 +1,13 @@
// project includes
#include "ProtoClientConnection.h"
// qt
#include <QTcpSocket>
#include <QHostAddress>
#include <QTimer>
#include <QRgb>
// project includes
#include "ProtoClientConnection.h"
// TODO Remove this class if third-party apps have been migrated (eg. Hyperion Android Grabber, Windows Screen grabber etc.)
ProtoClientConnection::ProtoClientConnection(QTcpSocket* socket, int timeout, QObject *parent)

View File

@@ -1,14 +1,16 @@
#pragma once
// protobuffer PROTO
// protobuf defines an Error() function itself, so undef it here
#undef Error
#include "message.pb.h"
// util
#include <utils/Logger.h>
#include <utils/Image.h>
#include <utils/ColorRgb.h>
#include <utils/Components.h>
// protobuffer PROTO
#include "message.pb.h"
class QTcpSocket;
class QTimer;

View File

@@ -1,5 +1,5 @@
#include <protoserver/ProtoServer.h>
#include "ProtoClientConnection.h"
#include <protoserver/ProtoServer.h>
// util
#include <utils/NetOrigin.h>

View File

@@ -3,7 +3,6 @@
// qt incl
#include <QDir>
#include <QFileInfo>
#include <QDebug>
// hyperion include
#include <hyperion/Hyperion.h>

View File

@@ -3,8 +3,8 @@
#include <utils/Logger.h>
ImageResampler::ImageResampler()
: _horizontalDecimation(1)
, _verticalDecimation(1)
: _horizontalDecimation(8)
, _verticalDecimation(8)
, _cropLeft(0)
, _cropRight(0)
, _cropTop(0)
@@ -27,7 +27,7 @@ void ImageResampler::processImage(const uint8_t * data, int width, int height, i
int cropRight = _cropRight;
int cropBottom = _cropBottom;
int xDestFlip = 0, yDestFlip = 0;
int uOffset, vOffset;
int uOffset = 0, vOffset = 0;
// handle 3D mode
switch (_videoMode)

View File

@@ -5,9 +5,10 @@ set(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/webserver)
FILE ( GLOB WebConfig_SOURCES "${CURRENT_HEADER_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.h" "${CURRENT_SOURCE_DIR}/*.cpp" )
FILE ( GLOB_RECURSE webFiles RELATIVE ${CMAKE_BINARY_DIR} ${CMAKE_SOURCE_DIR}/assets/webconfig/* )
FILE ( RELATIVE_PATH webConfigPath ${CMAKE_BINARY_DIR} ${CMAKE_SOURCE_DIR}/assets/webconfig)
FOREACH( f ${webFiles} )
STRING ( REPLACE "../assets/webconfig/" "" fname ${f})
STRING ( REPLACE "${webConfigPath}/" "" fname ${f})
SET(HYPERION_WEBCONFIG_RES "${HYPERION_WEBCONFIG_RES}\n\t\t<file alias=\"/webconfig/${fname}\">${f}</file>")
ENDFOREACH()
CONFIGURE_FILE(${CURRENT_SOURCE_DIR}/WebConfig.qrc.in ${CMAKE_BINARY_DIR}/WebConfig.qrc )

View File

@@ -85,6 +85,17 @@ void WebSocketClient::handleWebSocketFrame()
case OPCODE::BINARY:
case OPCODE::TEXT:
{
// A fragmented message consists of a single frame with the FIN bit
// clear and an opcode other than 0, followed by zero or more frames
// with the FIN bit clear and the opcode set to 0, and terminated by
// a single frame with the FIN bit set and an opcode of 0.
//
// Store frame type given by first frame
if (_wsh.opCode != OPCODE::CONTINUATION )
{
_frameOpCode = _wsh.opCode;
}
// check for protocol violations
if (_onContinuation && !isContinuation)
{
@@ -117,15 +128,15 @@ void WebSocketClient::handleWebSocketFrame()
if (_wsh.fin)
{
_onContinuation = false;
if (_wsh.opCode == OPCODE::TEXT)
{
if (_frameOpCode == OPCODE::TEXT)
{
_jsonAPI->handleMessage(QString(_wsReceiveBuffer));
}
else
{
handleBinaryMessage(_wsReceiveBuffer);
}
}
else
{
handleBinaryMessage(_wsReceiveBuffer);
}
_wsReceiveBuffer.clear();
}

View File

@@ -52,6 +52,9 @@ private:
// websocket header store
WebSocketHeader _wsh;
//opCode of first frame (in case of fragmented frames)
quint8 _frameOpCode;
// masks for fields in the basic header
static uint8_t const BHB0_OPCODE = 0x0F;
static uint8_t const BHB0_RSV3 = 0x10;