Refactor/Create APT/DNF Repository (#1648)

This commit is contained in:
Paulchen-Panther
2023-11-16 21:05:56 +01:00
committed by GitHub
parent c9518db597
commit 91270966f9
165 changed files with 1918 additions and 2924 deletions

View File

@@ -0,0 +1,186 @@
#ifndef ENCODERTHREAD_H
#define ENCODERTHREAD_H
// Qt includes
#include <QThread>
// util includes
#include <utils/PixelFormat.h>
#include <utils/ImageResampler.h>
// Determine the cmake options
#include <HyperionConfig.h>
// Turbo JPEG decoder
#ifdef HAVE_TURBO_JPEG
#include <turbojpeg.h>
#include <jconfig.h>
#endif
constexpr int DEFAULT_THREAD_COUNT {1};
/// Encoder thread for USB devices
class EncoderThread : public QObject
{
Q_OBJECT
public:
explicit EncoderThread();
~EncoderThread() override;
void setup(
PixelFormat pixelFormat, uint8_t* sharedData,
int size, int width, int height, int lineLength,
int cropLeft, int cropTop, int cropBottom, int cropRight,
VideoMode videoMode, FlipMode flipMode, int pixelDecimation);
void process();
bool isBusy() { return _busy; }
QAtomicInt _busy = false;
signals:
void newFrame(const Image<ColorRgb>& data);
private:
PixelFormat _pixelFormat;
uint8_t* _localData;
int _scalingFactorsCount;
int _width;
int _height;
int _lineLength;
int _currentFrame;
int _pixelDecimation;
unsigned long _size;
int _cropLeft;
int _cropTop;
int _cropBottom;
int _cropRight;
FlipMode _flipMode;
VideoMode _videoMode;
bool _doTransform;
ImageResampler _imageResampler;
#ifdef HAVE_TURBO_JPEG
tjhandle _tjInstance;
tjscalingfactor* _scalingFactors;
tjtransform* _xform;
void processImageMjpeg();
bool onError(const QString context) const;
#endif
};
template <typename TThread> class Thread : public QThread
{
public:
TThread *_thread;
explicit Thread(TThread *thread, QObject *parent = nullptr)
: QThread(parent)
, _thread(thread)
{
_thread->moveToThread(this);
start();
}
~Thread() override
{
quit();
wait();
}
EncoderThread* thread() const { return qobject_cast<EncoderThread*>(_thread); }
void setup(
PixelFormat pixelFormat, uint8_t* sharedData,
int size, int width, int height, int lineLength,
int cropLeft, int cropTop, int cropBottom, int cropRight,
VideoMode videoMode, FlipMode flipMode, int pixelDecimation)
{
auto encThread = qobject_cast<EncoderThread*>(_thread);
if (encThread != nullptr)
encThread->setup(pixelFormat, sharedData,
size, width, height, lineLength,
cropLeft, cropTop, cropBottom, cropRight,
videoMode, flipMode, pixelDecimation);
}
bool isBusy()
{
auto encThread = qobject_cast<EncoderThread*>(_thread);
if (encThread != nullptr)
return encThread->isBusy();
return true;
}
void process()
{
auto encThread = qobject_cast<EncoderThread*>(_thread);
if (encThread != nullptr)
encThread->process();
}
protected:
void run() override
{
QThread::run();
delete _thread;
}
};
class EncoderThreadManager : public QObject
{
Q_OBJECT
public:
explicit EncoderThreadManager(QObject *parent = nullptr)
: QObject(parent)
, _threadCount(qMax(QThread::idealThreadCount(), DEFAULT_THREAD_COUNT))
, _threads(nullptr)
{
_threads = new Thread<EncoderThread>*[_threadCount];
for (int i = 0; i < _threadCount; i++)
{
_threads[i] = new Thread<EncoderThread>(new EncoderThread, this);
_threads[i]->setObjectName("Encoder " + QString::number(i));
}
}
~EncoderThreadManager() override
{
if (_threads != nullptr)
{
for(int i = 0; i < _threadCount; i++)
{
_threads[i]->deleteLater();
_threads[i] = nullptr;
}
delete[] _threads;
_threads = nullptr;
}
}
void start()
{
if (_threads != nullptr)
for (int i = 0; i < _threadCount; i++)
connect(_threads[i]->thread(), &EncoderThread::newFrame, this, &EncoderThreadManager::newFrame);
}
void stop()
{
if (_threads != nullptr)
for(int i = 0; i < _threadCount; i++)
disconnect(_threads[i]->thread(), nullptr, nullptr, nullptr);
}
int _threadCount;
Thread<EncoderThread>** _threads;
signals:
void newFrame(const Image<ColorRgb>& data);
};
#endif //ENCODERTHREAD_H

View File

@@ -0,0 +1,47 @@
#pragma once
#include <HyperionConfig.h> // Required to determine the cmake options
#include <hyperion/GrabberWrapper.h>
#if defined(ENABLE_MF)
#include <grabber/video/mediafoundation/MFGrabber.h>
#elif defined(ENABLE_V4L2)
#include <grabber/video/v4l2/V4L2Grabber.h>
#endif
#if defined(ENABLE_CEC)
#include <cec/CECEvent.h>
#endif
class VideoWrapper : public GrabberWrapper
{
Q_OBJECT
public:
VideoWrapper();
~VideoWrapper() override;
public slots:
bool start() override;
void stop() override;
#if defined(ENABLE_CEC)
void handleCecEvent(CECEvent event);
#endif
void handleSettingsUpdate(settings::type type, const QJsonDocument& config) override;
private slots:
void newFrame(const Image<ColorRgb> & image);
void readError(const char* err);
void action() override;
private:
/// The Media Foundation or V4L2 grabber
#if defined(ENABLE_MF)
MFGrabber _grabber;
#elif defined(ENABLE_V4L2)
V4L2Grabber _grabber;
#endif
};

View File

@@ -0,0 +1,129 @@
#pragma once
// Windows include
#include <Windows.h>
// COM includes
#include <Guiddef.h>
// Qt includes
#include <QObject>
#include <QRectF>
#include <QMap>
#include <QMultiMap>
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
// utils includes
#include <utils/PixelFormat.h>
#include <utils/Components.h>
#include <hyperion/Grabber.h>
// decoder thread includes
#include <grabber/video/EncoderThread.h>
/// Forward class declaration
class SourceReaderCB;
/// Forward struct declaration
struct IMFSourceReader;
///
/// Media Foundation capture class
///
class MFGrabber : public Grabber
{
Q_OBJECT
friend class SourceReaderCB;
public:
struct DeviceProperties
{
QString symlink = QString();
int width = 0;
int height = 0;
int fps = 0;
int numerator = 0;
int denominator = 0;
PixelFormat pf = PixelFormat::NO_CHANGE;
GUID guid = GUID_NULL;
};
struct DeviceControls
{
QString property = QString();
int minValue = 0;
int maxValue = 0;
int step = 0;
int def = 0;
int currentValue = 0;
};
MFGrabber();
~MFGrabber() override;
void receive_image(const void *frameImageBuffer, int size);
void setDevice(const QString& device);
bool setInput(int input) override;
bool setWidthHeight(int width, int height) override;
void setEncoding(QString enc);
void setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue);
void setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold);
void setSignalDetectionOffset( double verticalMin, double horizontalMin, double verticalMax, double horizontalMax);
void setSignalDetectionEnable(bool enable);
bool reload(bool force = false);
///
/// @brief Discover available Media Foundation USB devices (for configuration).
/// @param[in] params Parameters used to overwrite discovery default behaviour
/// @return A JSON structure holding a list of USB devices found
///
QJsonArray discover(const QJsonObject& params);
public slots:
bool prepare();
bool start();
void stop();
void newThreadFrame(Image<ColorRgb> image);
signals:
void newFrame(const Image<ColorRgb> & image);
void readError(const char* err);
private:
bool init();
void uninit();
HRESULT init_device(QString device, DeviceProperties props);
void enumVideoCaptureDevices();
void start_capturing();
void process_image(const void *frameImageBuffer, int size);
QString _currentDeviceName,
_newDeviceName;
QMap<QString, QList<DeviceProperties>> _deviceProperties;
QMap<QString, QList<DeviceControls>> _deviceControls;
HRESULT _hr;
IMFSourceReader* _sourceReader;
SourceReaderCB* _sourceReaderCB;
EncoderThreadManager* _threadManager;
PixelFormat _pixelFormat,
_pixelFormatConfig;
int _lineLength,
_frameByteSize,
_noSignalCounterThreshold,
_noSignalCounter,
_brightness,
_contrast,
_saturation,
_hue;
QAtomicInt _currentFrame;
ColorRgb _noSignalThresholdColor;
bool _signalDetectionEnabled,
_noSignalDetected,
_initialized,
_reload;
double _x_frac_min,
_y_frac_min,
_x_frac_max,
_y_frac_max;
};

View File

@@ -0,0 +1,185 @@
#pragma once
// stl includes
#include <vector>
#include <map>
// Qt includes
#include <QObject>
#include <QSocketNotifier>
#include <QRectF>
#include <QMap>
#include <QMultiMap>
// util includes
#include <utils/PixelFormat.h>
#include <hyperion/Grabber.h>
#include <hyperion/GrabberWrapper.h>
#include <utils/VideoStandard.h>
#include <utils/Components.h>
// decoder thread includes
#include <grabber/video/EncoderThread.h>
// Determine the cmake options
#include <HyperionConfig.h>
#if defined(ENABLE_CEC)
#include <cec/CECEvent.h>
#endif
///
/// Capture class for V4L2 devices
///
class V4L2Grabber : public Grabber
{
Q_OBJECT
public:
struct DeviceProperties
{
QString name = QString();
struct InputProperties
{
QString inputName = QString();
QList<VideoStandard> standards = QList<VideoStandard>();
struct EncodingProperties
{
int width = 0;
int height = 0;
QList<int> framerates = QList<int>();
};
QMultiMap<PixelFormat, EncodingProperties> encodingFormats = QMultiMap<PixelFormat, EncodingProperties>();
};
QMap<int, InputProperties> inputs = QMap<int, InputProperties>();
};
struct DeviceControls
{
QString property = QString();
int minValue = 0;
int maxValue = 0;
int step = 0;
int defaultValue = 0;
int currentValue = 0;
};
V4L2Grabber();
~V4L2Grabber() override;
int grabFrame(Image<ColorRgb> &);
void setDevice(const QString& devicePath, const QString& deviceName);
bool setInput(int input) override;
bool setWidthHeight(int width, int height) override;
void setEncoding(QString enc);
void setBrightnessContrastSaturationHue(int brightness, int contrast, int saturation, int hue);
void setSignalThreshold(double redSignalThreshold, double greenSignalThreshold, double blueSignalThreshold, int noSignalCounterThreshold = 50);
void setSignalDetectionOffset( double verticalMin, double horizontalMin, double verticalMax, double horizontalMax);
void setSignalDetectionEnable(bool enable);
void setCecDetectionEnable(bool enable);
bool reload(bool force = false);
QRectF getSignalDetectionOffset() const { return QRectF(_x_frac_min, _y_frac_min, _x_frac_max, _y_frac_max); } //used from hyperion-v4l2
///
/// @brief Discover available V4L2 USB devices (for configuration).
/// @param[in] params Parameters used to overwrite discovery default behaviour
/// @return A JSON structure holding a list of USB devices found
///
QJsonArray discover(const QJsonObject& params);
public slots:
bool prepare();
bool start();
void stop();
void newThreadFrame(Image<ColorRgb> image);
#if defined(ENABLE_CEC)
void handleCecEvent(CECEvent event);
#endif
signals:
void newFrame(const Image<ColorRgb> & image);
void readError(const char* err);
private slots:
int read_frame();
private:
bool init();
void uninit();
bool open_device();
void close_device();
void init_read(unsigned int buffer_size);
void init_mmap();
void init_userp(unsigned int buffer_size);
void init_device(VideoStandard videoStandard);
void uninit_device();
void start_capturing();
void stop_capturing();
bool process_image(const void *p, int size);
int xioctl(int request, void *arg);
int xioctl(int fileDescriptor, int request, void *arg);
void throw_exception(const QString & error)
{
Error(_log, "Throws error: %s", QSTRING_CSTR(error));
}
void throw_errno_exception(const QString & error)
{
Error(_log, "Throws error nr: %s", QSTRING_CSTR(QString(error + " error code " + QString::number(errno) + ", " + strerror(errno))));
}
private:
enum io_method
{
IO_METHOD_READ,
IO_METHOD_MMAP,
IO_METHOD_USERPTR
};
struct buffer
{
void *start;
size_t length;
};
private:
QString _currentDevicePath, _currentDeviceName;
EncoderThreadManager* _threadManager;
QMap<QString, V4L2Grabber::DeviceProperties> _deviceProperties;
QMap<QString, QList<DeviceControls>> _deviceControls;
io_method _ioMethod;
int _fileDescriptor;
std::vector<buffer> _buffers;
PixelFormat _pixelFormat, _pixelFormatConfig;
int _lineLength;
int _frameByteSize;
QAtomicInt _currentFrame;
// signal detection
int _noSignalCounterThreshold;
ColorRgb _noSignalThresholdColor;
bool _cecDetectionEnabled, _cecStandbyActivated, _signalDetectionEnabled, _noSignalDetected;
int _noSignalCounter;
int _brightness, _contrast, _saturation, _hue;
double _x_frac_min;
double _y_frac_min;
double _x_frac_max;
double _y_frac_max;
QSocketNotifier *_streamNotifier;
bool _initialized, _reload;
protected:
void enumVideoCaptureDevices();
void enumFrameIntervals(QList<int> &framerates, int fileDescriptor, int pixelformat, int width, int height);
};