Former-commit-id: e21a530324abb6af92f4b34abdfbf23313775f4a
This commit is contained in:
T. van der Zwan 2014-01-27 19:16:45 +00:00
commit b4c49c9ec4
36 changed files with 1559 additions and 77 deletions

View File

@ -13,6 +13,9 @@ option (ENABLE_SPIDEV "Enable the SPIDEV device" ON)
message(STATUS "ENABLE_DISPMANX = " ${ENABLE_DISPMANX})
message(STATUS "ENABLE_SPIDEV = " ${ENABLE_SPIDEV})
option (ENABLE_V4L2 "Enable the V4L2 grabber" ON)
message(STATUS "ENABLE_V4L2 = " ${ENABLE_V4L2})
# Createt the configuration file
# configure a header file to pass some of the CMake settings
# to the source code

View File

@ -3,5 +3,8 @@
// Define to enable the dispmanx grabber
#cmakedefine ENABLE_DISPMANX
// Define to enable the v4l2 grabber
#cmakedefine ENABLE_V4L2
// Define to enable the spi-device
#cmakedefine ENABLE_SPIDEV

View File

@ -22,6 +22,7 @@ tar --create --verbose --gzip --absolute-names --show-transformed-names \
--transform "s://:/:g" \
"$builddir/bin/hyperiond" \
"$builddir/bin/hyperion-remote" \
"$builddir/bin/hyperion-v4l2" \
"$builddir/bin/gpio2spi" \
"$builddir/bin/dispmanx2png" \
"$repodir/effects/"* \

View File

@ -47,6 +47,7 @@ fi
if [ $IS_OPENELEC -ne 1 ]; then
ln -fs /opt/hyperion/bin/hyperiond /usr/bin/hyperiond
ln -fs /opt/hyperion/bin/hyperion-remote /usr/bin/hyperion-remote
ln -fs /opt/hyperion/bin/hyperion-v4l2 /usr/bin/hyperion-v4l2
fi
# create link to the gpio changer (gpio->spi)
@ -63,7 +64,6 @@ else
ln -s /opt/hyperion/config/hyperion.config.json /etc/hyperion.config.json
fi
# Copy the service control configuration to /etc/int
if [ $USE_INITCTL -eq 1 ]; then
echo 'Installing initctl script'

View File

@ -56,21 +56,21 @@
"red" :
{
"threshold" : 0.0000,
"gamma" : 2.0000,
"gamma" : 1.0000,
"blacklevel" : 0.0000,
"whitelevel" : 1.0000
},
"green" :
{
"threshold" : 0.0000,
"gamma" : 2.0000,
"gamma" : 1.0000,
"blacklevel" : 0.0000,
"whitelevel" : 1.0000
},
"blue" :
{
"threshold" : 0.0000,
"gamma" : 2.0000,
"gamma" : 1.0000,
"blacklevel" : 0.0000,
"whitelevel" : 1.0000
}
@ -349,9 +349,11 @@
/// The black border configuration, contains the following items:
/// * enable : true if the detector should be activated
/// * threshold : Value below which a pixel is regarded as black (value between 0.0 and 1.0)
"blackborderdetector" :
{
"enable" : true
"enable" : true,
"threshold" : 0.01
},
/// The configuration of the effect engine, contains the following items:

View File

@ -13,9 +13,9 @@
"device" :
{
"name" : "MyPi",
"type" : "test",
"output" : "~/hyperion.test.out",
"rate" : 250000,
"type" : "adalight",
"output" : "/dev/ttyUSB0",
"rate" : 115200,
"colorOrder" : "rgb"
},
@ -349,9 +349,11 @@
/// The black border configuration, contains the following items:
/// * enable : true if the detector should be activated
/// * threshold : Value below which a pixel is regarded as black (value between 0.0 and 1.0)
"blackborderdetector" :
{
"enable" : true
"enable" : true,
"threshold" : 0.01
},
/// The configuration of the effect engine, contains the following items:
@ -371,6 +373,38 @@
"duration_ms" : 3000
},
/// The configuration for the frame-grabber, contains the following items:
/// * width : The width of the grabbed frames [pixels]
/// * height : The height of the grabbed frames [pixels]
/// * frequency_Hz : The frequency of the frame grab [Hz]
// "framegrabber" :
// {
// "width" : 64,
// "height" : 64,
// "frequency_Hz" : 10.0
// },
/// The configuration of the XBMC connection used to enable and disable the frame-grabber. Contains the following fields:
/// * xbmcAddress : The IP address of the XBMC-host
/// * xbmcTcpPort : The TCP-port of the XBMC-server
/// * grabVideo : Flag indicating that the frame-grabber is on(true) during video playback
/// * grabPictures : Flag indicating that the frame-grabber is on(true) during picture show
/// * grabAudio : Flag indicating that the frame-grabber is on(true) during audio playback
/// * grabMenu : Flag indicating that the frame-grabber is on(true) in the XBMC menu
/// * grabScreensaver : Flag indicating that the frame-grabber is on(true) when XBMC is on screensaver
/// * enable3DDetection : Flag indicating that the frame-grabber should switch to a 3D compatible modus if a 3D video is playing
// "xbmcVideoChecker" :
// {
// "xbmcAddress" : "127.0.0.1",
// "xbmcTcpPort" : 9090,
// "grabVideo" : true,
// "grabPictures" : true,
// "grabAudio" : true,
// "grabMenu" : false,
// "grabScreensaver" : true,
// "enable3DDetection" : true
// },
/// The configuration of the Json server which enables the json remote interface
/// * port : Port at which the json server is started
"jsonServer" :

View File

@ -120,7 +120,7 @@ void OptionsParser::usage() const {
for(i = parameters.parameters.begin();
i != parameters.parameters.end(); i++)
{
cerr.width(30);
cerr.width(33);
cerr << std::left << " " + (*i)->usageLine();
cerr.width(40);

View File

@ -1 +1 @@
4d50c38a61c9f32a15b29ef3b3953c2835fa9cac
5e8ca7ba33eb38d828b50971ec94b045025caa78

View File

@ -1 +1 @@
96b8f6c7e0241930c944a7358af473dabecf1dae
08d42deff1de4c4296e4c6e22c783a0096ed3396

View File

@ -48,8 +48,9 @@ namespace hyperion
public:
///
/// Constructs a black-border detector
/// @param[in] blackborderThreshold The threshold which the blackborder detector should use
///
BlackBorderDetector();
BlackBorderDetector(uint8_t blackborderThreshold);
///
/// Performs the actual black-border detection on the given image
@ -125,7 +126,11 @@ namespace hyperion
inline bool isBlack(const Pixel_T & color)
{
// Return the simple compare of the color against black
return color.red < 3 && color.green < 3 && color.green < 3;
return color.red < _blackborderThreshold && color.green < _blackborderThreshold && color.green < _blackborderThreshold;
}
private:
/// Threshold for the blackborder detector [0 .. 255]
const uint8_t _blackborderThreshold;
};
} // end namespace hyperion

View File

@ -21,11 +21,13 @@ namespace hyperion
/// horizontal border becomes the current border
/// @param blurRemoveCnt The size to add to a horizontal or vertical border (because the
/// outer pixels is blurred (black and color combined due to image scaling))
/// @param[in] blackborderThreshold The threshold which the blackborder detector should use
///
BlackBorderProcessor(
const unsigned unknownFrameCnt,
const unsigned borderFrameCnt,
const unsigned blurRemoveCnt);
const unsigned blurRemoveCnt,
uint8_t blackborderThreshold);
///
/// Return the current (detected) border

View File

@ -8,7 +8,9 @@
#include <hyperion/ImageProcessorFactory.h>
#include <hyperion/LedString.h>
#include <hyperion/ImageToLedsMap.h>
#include <hyperion/BlackBorderProcessor.h>
// Black border includes
#include <blackborder/BlackBorderProcessor.h>
///
/// The ImageProcessor translates an RGB-image to RGB-values for the leds. The processing is
@ -53,7 +55,7 @@ public:
verifyBorder(image);
// Create a result vector and call the 'in place' functionl
std::vector<ColorRgb> colors = mImageToLeds->getMeanLedColor(image);
std::vector<ColorRgb> colors = _imageToLeds->getMeanLedColor(image);
// return the computed colors
return colors;
@ -75,7 +77,7 @@ public:
verifyBorder(image);
// Determine the mean-colors of each led (using the existing mapping)
mImageToLeds->getMeanLedColor(image, ledColors);
_imageToLeds->getMeanLedColor(image, ledColors);
}
///
@ -98,8 +100,10 @@ private:
/// given led-string specification
///
/// @param[in] ledString The led-string specification
/// @param[in] enableBlackBorderDetector Flag indicating if the blacborder detector should be enabled
/// @param[in] blackborderThreshold The threshold which the blackborder detector should use
///
ImageProcessor(const LedString &ledString, bool enableBlackBorderDetector);
ImageProcessor(const LedString &ledString, bool enableBlackBorderDetector, uint8_t blackborderThreshold);
///
/// Performs black-border detection (if enabled) on the given image
@ -116,17 +120,17 @@ private:
const hyperion::BlackBorder border = _borderProcessor->getCurrentBorder();
// Clean up the old mapping
delete mImageToLeds;
delete _imageToLeds;
if (border.unknown)
{
// Construct a new buffer and mapping
mImageToLeds = new hyperion::ImageToLedsMap(image.width(), image.height(), 0, 0, mLedString.leds());
_imageToLeds = new hyperion::ImageToLedsMap(image.width(), image.height(), 0, 0, _ledString.leds());
}
else
{
// Construct a new buffer and mapping
mImageToLeds = new hyperion::ImageToLedsMap(image.width(), image.height(), border.horizontalSize, border.verticalSize, mLedString.leds());
_imageToLeds = new hyperion::ImageToLedsMap(image.width(), image.height(), border.horizontalSize, border.verticalSize, _ledString.leds());
}
std::cout << "CURRENT BORDER TYPE: unknown=" << border.unknown << " hor.size=" << border.horizontalSize << " vert.size=" << border.verticalSize << std::endl;
@ -135,14 +139,14 @@ private:
private:
/// The Led-string specification
const LedString mLedString;
const LedString _ledString;
/// Flag the enables(true)/disabled(false) blackborder detector
bool _enableBlackBorderRemoval;
const bool _enableBlackBorderRemoval;
/// The processor for black border detection
hyperion::BlackBorderProcessor * _borderProcessor;
/// The mapping of image-pixels to leds
hyperion::ImageToLedsMap* mImageToLeds;
hyperion::ImageToLedsMap* _imageToLeds;
};

View File

@ -30,8 +30,10 @@ public:
/// Initialises this factory with the given led-configuration
///
/// @param[in] ledString The led configuration
/// @param[in] enableBlackBorderDetector Flag indicating if the blacborder detector should be enabled
/// @param[in] blackborderThreshold The threshold which the blackborder detector should use
///
void init(const LedString& ledString, bool enableBlackBorderDetector);
void init(const LedString& ledString, bool enableBlackBorderDetector, double blackborderThreshold);
///
/// Creates a new ImageProcessor. The onwership of the processor is transferred to the caller.
@ -46,4 +48,7 @@ private:
/// Flag indicating if the black border detector should be used
bool _enableBlackBorderDetector;
/// Threshold for the blackborder detector [0 .. 255]
uint8_t _blackborderThreshold;
};

View File

@ -4,6 +4,7 @@ SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc)
add_subdirectory(hyperion)
add_subdirectory(blackborder)
add_subdirectory(jsonserver)
add_subdirectory(protoserver)
add_subdirectory(boblightserver)

View File

@ -0,0 +1,11 @@
// BlackBorders includes
#include <blackborder/BlackBorderDetector.h>
using namespace hyperion;
BlackBorderDetector::BlackBorderDetector(uint8_t blackborderThreshold) :
_blackborderThreshold(blackborderThreshold)
{
// empty
}

View File

@ -1,17 +1,17 @@
// Local-Hyperion includes
#include <hyperion/BlackBorderProcessor.h>
// Blackborder includes
#include <blackborder/BlackBorderProcessor.h>
using namespace hyperion;
BlackBorderProcessor::BlackBorderProcessor(
const unsigned unknownFrameCnt,
BlackBorderProcessor::BlackBorderProcessor(const unsigned unknownFrameCnt,
const unsigned borderFrameCnt,
const unsigned blurRemoveCnt) :
const unsigned blurRemoveCnt,
uint8_t blackborderThreshold) :
_unknownSwitchCnt(unknownFrameCnt),
_borderSwitchCnt(borderFrameCnt),
_blurRemoveCnt(blurRemoveCnt),
_detector(),
_detector(blackborderThreshold),
_currentBorder({true, -1, -1}),
_previousDetectedBorder({true, -1, -1}),
_consistentCnt(0)

View File

@ -0,0 +1,23 @@
# Define the current source locations
SET(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/blackborder)
SET(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/blackborder)
SET(Blackborder_HEADERS
${CURRENT_HEADER_DIR}/BlackBorderDetector.h
${CURRENT_HEADER_DIR}/BlackBorderProcessor.h
)
SET(Blackborder_SOURCES
${CURRENT_SOURCE_DIR}/BlackBorderDetector.cpp
${CURRENT_SOURCE_DIR}/BlackBorderProcessor.cpp
)
add_library(blackborder
${Blackborder_HEADERS}
${Blackborder_SOURCES}
)
target_link_libraries(blackborder
hyperion-utils
)

View File

@ -1,10 +0,0 @@
// Local-Hyperion includes
#include <hyperion/BlackBorderDetector.h>
using namespace hyperion;
BlackBorderDetector::BlackBorderDetector()
{
// empty
}

View File

@ -17,9 +17,6 @@ SET(Hyperion_HEADERS
${CURRENT_HEADER_DIR}/LedString.h
${CURRENT_HEADER_DIR}/PriorityMuxer.h
${CURRENT_HEADER_DIR}/BlackBorderDetector.h
${CURRENT_HEADER_DIR}/BlackBorderProcessor.h
${CURRENT_SOURCE_DIR}/MultiColorTransform.h
)
@ -30,8 +27,6 @@ SET(Hyperion_SOURCES
${CURRENT_SOURCE_DIR}/LedString.cpp
${CURRENT_SOURCE_DIR}/PriorityMuxer.cpp
${CURRENT_SOURCE_DIR}/BlackBorderDetector.cpp
${CURRENT_SOURCE_DIR}/BlackBorderProcessor.cpp
${CURRENT_SOURCE_DIR}/ImageToLedsMap.cpp
${CURRENT_SOURCE_DIR}/MultiColorTransform.cpp
${CURRENT_SOURCE_DIR}/LinearColorSmoothing.cpp
@ -54,6 +49,7 @@ add_library(hyperion
)
target_link_libraries(hyperion
blackborder
hyperion-utils
leddevice
effectengine

View File

@ -270,7 +270,10 @@ Hyperion::Hyperion(const Json::Value &jsonConfig) :
throw std::runtime_error("Color transformation incorrectly set");
}
// initialize the image processor factory
ImageProcessorFactory::getInstance().init(_ledString, jsonConfig["blackborderdetector"].get("enable", true).asBool());
ImageProcessorFactory::getInstance().init(
_ledString,
jsonConfig["blackborderdetector"].get("enable", true).asBool(),
jsonConfig["blackborderdetector"].get("threshold", 0.01).asDouble());
// initialize the color smoothing filter
_device = createColorSmoothing(jsonConfig["color"]["smoothing"], _device);

View File

@ -2,50 +2,52 @@
// Hyperion includes
#include <hyperion/ImageProcessor.h>
#include <hyperion/ImageToLedsMap.h>
#include <hyperion/BlackBorderProcessor.h>
// Blacborder includes
#include <blackborder/BlackBorderProcessor.h>
using namespace hyperion;
ImageProcessor::ImageProcessor(const LedString& ledString, bool enableBlackBorderDetector) :
mLedString(ledString),
ImageProcessor::ImageProcessor(const LedString& ledString, bool enableBlackBorderDetector, uint8_t blackborderThreshold) :
_ledString(ledString),
_enableBlackBorderRemoval(enableBlackBorderDetector),
_borderProcessor(new BlackBorderProcessor(600, 50, 1)),
mImageToLeds(nullptr)
_borderProcessor(new BlackBorderProcessor(600, 50, 1, blackborderThreshold)),
_imageToLeds(nullptr)
{
// empty
}
ImageProcessor::~ImageProcessor()
{
delete mImageToLeds;
delete _imageToLeds;
delete _borderProcessor;
}
unsigned ImageProcessor::getLedCount() const
{
return mLedString.leds().size();
return _ledString.leds().size();
}
void ImageProcessor::setSize(const unsigned width, const unsigned height)
{
// Check if the existing buffer-image is already the correct dimensions
if (mImageToLeds && mImageToLeds->width() == width && mImageToLeds->height() == height)
if (_imageToLeds && _imageToLeds->width() == width && _imageToLeds->height() == height)
{
return;
}
// Clean up the old buffer and mapping
delete mImageToLeds;
delete _imageToLeds;
// Construct a new buffer and mapping
mImageToLeds = new ImageToLedsMap(width, height, 0, 0, mLedString.leds());
_imageToLeds = new ImageToLedsMap(width, height, 0, 0, _ledString.leds());
}
bool ImageProcessor::getScanParameters(size_t led, double &hscanBegin, double &hscanEnd, double &vscanBegin, double &vscanEnd) const
{
if (led < mLedString.leds().size())
if (led < _ledString.leds().size())
{
const Led & l = mLedString.leds()[led];
const Led & l = _ledString.leds()[led];
hscanBegin = l.minX_frac;
hscanEnd = l.maxX_frac;
vscanBegin = l.minY_frac;

View File

@ -10,13 +10,25 @@ ImageProcessorFactory& ImageProcessorFactory::getInstance()
return instance;
}
void ImageProcessorFactory::init(const LedString& ledString, bool enableBlackBorderDetector)
void ImageProcessorFactory::init(const LedString& ledString, bool enableBlackBorderDetector, double blackborderThreshold)
{
_ledString = ledString;
_enableBlackBorderDetector = enableBlackBorderDetector;
int threshold = int(std::ceil(blackborderThreshold * 255));
if (threshold < 0)
threshold = 0;
else if (threshold > 255)
threshold = 255;
_blackborderThreshold = uint8_t(threshold);
if (_enableBlackBorderDetector)
{
std::cout << "Black border threshold set to " << blackborderThreshold << " (" << int(_blackborderThreshold) << ")" << std::endl;
}
}
ImageProcessor* ImageProcessorFactory::newImageProcessor() const
{
return new ImageProcessor(_ledString, _enableBlackBorderDetector);
return new ImageProcessor(_ledString, _enableBlackBorderDetector, _blackborderThreshold);
}

View File

@ -223,6 +223,12 @@
"enable" : {
"type" : "boolean",
"required" : true
},
"threshold" : {
"type" : "number",
"required" : false,
"minimum" : 0.0,
"maximum" : 1.0
}
},
"additionalProperties" : false

View File

@ -3,9 +3,6 @@
set(CURRENT_HEADER_DIR ${CMAKE_SOURCE_DIR}/include/protoserver)
set(CURRENT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/libsrc/protoserver)
# add protocol buffers
find_package(Protobuf REQUIRED)
include_directories(
${CMAKE_CURRENT_BINARY_DIR}
${PROTOBUF_INCLUDE_DIRS})

View File

@ -1,2 +1,5 @@
add_subdirectory(hyperiond)
add_subdirectory(hyperion-remote)
if (ENABLE_V4L2)
add_subdirectory(hyperion-v4l2)
endif (ENABLE_V4L2)

View File

@ -0,0 +1,57 @@
cmake_minimum_required(VERSION 2.8)
project(hyperion-v4l2)
# add protocol buffers
find_package(Protobuf REQUIRED)
# find Qt4
find_package(Qt4 REQUIRED QtCore QtGui QtNetwork)
include_directories(
${CMAKE_CURRENT_BINARY_DIR}
${PROTOBUF_INCLUDE_DIRS}
${QT_INCLUDES}
)
set(Hyperion_V4L2_HEADERS
V4L2Grabber.h
ProtoConnection.h
ImageHandler.h
VideoStandardParameter.h
)
set(Hyperion_V4L2_SOURCES
hyperion-v4l2.cpp
V4L2Grabber.cpp
ProtoConnection.cpp
ImageHandler.cpp
)
set(Hyperion_V4L2_PROTOS
${CMAKE_CURRENT_SOURCE_DIR}/../../libsrc/protoserver/message.proto
)
protobuf_generate_cpp(Hyperion_V4L2_PROTO_SRCS Hyperion_V4L2_PROTO_HDRS
${Hyperion_V4L2_PROTOS}
)
add_executable(hyperion-v4l2
${Hyperion_V4L2_HEADERS}
${Hyperion_V4L2_SOURCES}
${Hyperion_V4L2_PROTO_SRCS}
${Hyperion_V4L2_PROTO_HDRS}
)
target_link_libraries(hyperion-v4l2
getoptPlusPlus
blackborder
hyperion-utils
${PROTOBUF_LIBRARIES}
pthread
)
qt4_use_modules(hyperion-v4l2
Core
Gui
Network)

View File

@ -0,0 +1,41 @@
// hyperion-v4l2 includes
#include "ImageHandler.h"
ImageHandler::ImageHandler(const std::string &address, int priority, double signalThreshold, bool skipProtoReply) :
_priority(priority),
_connection(address),
_signalThreshold(signalThreshold),
_signalProcessor(100, 50, 0, uint8_t(std::min(255, std::max(0, int(255*signalThreshold)))))
{
_connection.setSkipReply(skipProtoReply);
}
void ImageHandler::receiveImage(const Image<ColorRgb> &image)
{
// check if we should do signal detection
if (_signalThreshold < 0)
{
_connection.setImage(image, _priority, 200);
}
else
{
if (_signalProcessor.process(image))
{
std::cout << "Signal state = " << (_signalProcessor.getCurrentBorder().unknown ? "off" : "on") << std::endl;
}
// consider an unknown border as no signal
// send the image to Hyperion if we have a signal
if (!_signalProcessor.getCurrentBorder().unknown)
{
_connection.setImage(image, _priority, 200);
}
}
}
void ImageHandler::imageCallback(void *arg, const Image<ColorRgb> &image)
{
ImageHandler * handler = static_cast<ImageHandler *>(arg);
handler->receiveImage(image);
}

View File

@ -0,0 +1,34 @@
// blackborder includes
#include <blackborder/BlackBorderProcessor.h>
// hyperion-v4l includes
#include "ProtoConnection.h"
/// This class handles callbacks from the V4L2 grabber
class ImageHandler
{
public:
ImageHandler(const std::string & address, int priority, double signalThreshold, bool skipProtoReply);
/// Handle a single image
/// @param image The image to process
void receiveImage(const Image<ColorRgb> & image);
/// static function used to direct callbacks to a ImageHandler object
/// @param arg This should be an ImageHandler instance
/// @param image The image to process
static void imageCallback(void * arg, const Image<ColorRgb> & image);
private:
/// Priority for calls to Hyperion
const int _priority;
/// Hyperion proto connection object
ProtoConnection _connection;
/// Threshold used for signal detection
double _signalThreshold;
/// Blackborder detector which is used as a signal detector (unknown border = no signal)
hyperion::BlackBorderProcessor _signalProcessor;
};

View File

@ -0,0 +1,188 @@
// stl includes
#include <stdexcept>
// Qt includes
#include <QRgb>
// hyperion-v4l2 includes
#include "ProtoConnection.h"
ProtoConnection::ProtoConnection(const std::string & a) :
_socket(),
_skipReply(false)
{
QString address(a.c_str());
QStringList parts = address.split(":");
if (parts.size() != 2)
{
throw std::runtime_error(QString("Wrong address: unable to parse address (%1)").arg(address).toStdString());
}
_host = parts[0];
bool ok;
_port = parts[1].toUShort(&ok);
if (!ok)
{
throw std::runtime_error(QString("Wrong address: Unable to parse the port number (%1)").arg(parts[1]).toStdString());
}
// try to connect to host
std::cout << "Connecting to Hyperion: " << _host.toStdString() << ":" << _port << std::endl;
connectToHost();
}
ProtoConnection::~ProtoConnection()
{
_socket.close();
}
void ProtoConnection::setSkipReply(bool skip)
{
_skipReply = skip;
}
void ProtoConnection::setColor(const ColorRgb & color, int priority, int duration)
{
proto::HyperionRequest request;
request.set_command(proto::HyperionRequest::COLOR);
proto::ColorRequest * colorRequest = request.MutableExtension(proto::ColorRequest::colorRequest);
colorRequest->set_rgbcolor((color.red << 16) | (color.green << 8) | color.blue);
colorRequest->set_priority(priority);
colorRequest->set_duration(duration);
// send command message
sendMessage(request);
}
void ProtoConnection::setImage(const Image<ColorRgb> &image, int priority, int duration)
{
proto::HyperionRequest request;
request.set_command(proto::HyperionRequest::IMAGE);
proto::ImageRequest * imageRequest = request.MutableExtension(proto::ImageRequest::imageRequest);
imageRequest->set_imagedata(image.memptr(), image.width() * image.height() * 3);
imageRequest->set_imagewidth(image.width());
imageRequest->set_imageheight(image.height());
imageRequest->set_priority(priority);
imageRequest->set_duration(duration);
// send command message
sendMessage(request);
}
void ProtoConnection::clear(int priority)
{
proto::HyperionRequest request;
request.set_command(proto::HyperionRequest::CLEAR);
proto::ClearRequest * clearRequest = request.MutableExtension(proto::ClearRequest::clearRequest);
clearRequest->set_priority(priority);
// send command message
sendMessage(request);
}
void ProtoConnection::clearAll()
{
proto::HyperionRequest request;
request.set_command(proto::HyperionRequest::CLEARALL);
// send command message
sendMessage(request);
}
void ProtoConnection::connectToHost()
{
_socket.connectToHost(_host, _port);
if (_socket.waitForConnected()) {
std::cout << "Connected to Hyperion host" << std::endl;
}
}
void ProtoConnection::sendMessage(const proto::HyperionRequest &message)
{
if (_socket.state() == QAbstractSocket::UnconnectedState)
{
std::cout << "Currently disconnected: trying to connect to host" << std::endl;
connectToHost();
}
if (_socket.state() != QAbstractSocket::ConnectedState)
{
return;
}
// We only get here if we are connected
// serialize message (FastWriter already appends a newline)
std::string serializedMessage = message.SerializeAsString();
int length = serializedMessage.size();
const uint8_t header[] = {
uint8_t((length >> 24) & 0xFF),
uint8_t((length >> 16) & 0xFF),
uint8_t((length >> 8) & 0xFF),
uint8_t((length ) & 0xFF)};
// write message
int count = 0;
count += _socket.write(reinterpret_cast<const char *>(header), 4);
count += _socket.write(reinterpret_cast<const char *>(serializedMessage.data()), length);
if (!_socket.waitForBytesWritten())
{
std::cerr << "Error while writing data to host" << std::endl;
return;
}
if (!_skipReply)
{
// read reply data
QByteArray serializedReply;
length = -1;
while (length < 0 && serializedReply.size() < length+4)
{
// receive reply
if (!_socket.waitForReadyRead())
{
std::cerr << "Error while reading data from host" << std::endl;
return;
}
serializedReply += _socket.readAll();
if (length < 0 && serializedReply.size() >= 4)
{
// read the message size
length =
((serializedReply[0]<<24) & 0xFF000000) |
((serializedReply[1]<<16) & 0x00FF0000) |
((serializedReply[2]<< 8) & 0x0000FF00) |
((serializedReply[3] ) & 0x000000FF);
}
}
// parse reply data
proto::HyperionReply reply;
reply.ParseFromArray(serializedReply.constData()+4, length);
// parse reply message
parseReply(reply);
}
}
bool ProtoConnection::parseReply(const proto::HyperionReply &reply)
{
bool success = false;
if (!reply.success())
{
if (reply.has_error())
{
throw std::runtime_error("Error: " + reply.error());
}
else
{
throw std::runtime_error("Error: No error info");
}
}
return success;
}

View File

@ -0,0 +1,102 @@
#pragma once
// stl includes
#include <string>
// Qt includes
#include <QColor>
#include <QImage>
#include <QTcpSocket>
#include <QMap>
// hyperion util
#include <utils/Image.h>
#include <utils/ColorRgb.h>
// jsoncpp includes
#include <message.pb.h>
///
/// Connection class to setup an connection to the hyperion server and execute commands
///
class ProtoConnection
{
public:
///
/// Constructor
///
/// @param address The address of the Hyperion server (for example "192.168.0.32:19444)
///
ProtoConnection(const std::string & address);
///
/// Destructor
///
~ProtoConnection();
/// Do not read reply messages from Hyperion if set to true
void setSkipReply(bool skip);
///
/// Set all leds to the specified color
///
/// @param color The color
/// @param priority The priority
/// @param duration The duration in milliseconds
///
void setColor(const ColorRgb & color, int priority, int duration = 1);
///
/// Set the leds according to the given image (assume the image is stretched to the display size)
///
/// @param image The image
/// @param priority The priority
/// @param duration The duration in milliseconds
///
void setImage(const Image<ColorRgb> & image, int priority, int duration = -1);
///
/// Clear the given priority channel
///
/// @param priority The priority
///
void clear(int priority);
///
/// Clear all priority channels
///
void clearAll();
private:
/// Try to connect to the Hyperion host
void connectToHost();
///
/// Send a command message and receive its reply
///
/// @param message The message to send
///
void sendMessage(const proto::HyperionRequest & message);
///
/// Parse a reply message
///
/// @param reply The received reply
///
/// @return true if the reply indicates success
///
bool parseReply(const proto::HyperionReply & reply);
private:
/// The TCP-Socket with the connection to the server
QTcpSocket _socket;
/// Host address
QString _host;
/// Host port
uint16_t _port;
/// Skip receiving reply messages from Hyperion if set
bool _skipReply;
};

View File

@ -0,0 +1,711 @@
#include <iostream>
#include <sstream>
#include <stdexcept>
#include <cstdio>
#include <cassert>
#include <cstdlib>
#include <fcntl.h>
#include <unistd.h>
#include <cstring>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <linux/videodev2.h>
#include "V4L2Grabber.h"
#define CLEAR(x) memset(&(x), 0, sizeof(x))
static inline uint8_t clamp(int x)
{
return (x<0) ? 0 : ((x>255) ? 255 : uint8_t(x));
}
static void yuv2rgb(uint8_t y, uint8_t u, uint8_t v, uint8_t & r, uint8_t & g, uint8_t & b)
{
// see: http://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion
int c = y - 16;
int d = u - 128;
int e = v - 128;
r = clamp((298 * c + 409 * e + 128) >> 8);
g = clamp((298 * c - 100 * d - 208 * e + 128) >> 8);
b = clamp((298 * c + 516 * d + 128) >> 8);
}
V4L2Grabber::V4L2Grabber(const std::string &device, int input, VideoStandard videoStandard, int width, int height, int cropHorizontal, int cropVertical, int frameDecimation, int pixelDecimation) :
_deviceName(device),
_ioMethod(IO_METHOD_MMAP),
_fileDescriptor(-1),
_buffers(),
_pixelFormat(0),
_width(width),
_height(height),
_cropWidth(cropHorizontal),
_cropHeight(cropVertical),
_frameDecimation(std::max(1, frameDecimation)),
_pixelDecimation(std::max(1, pixelDecimation)),
_currentFrame(0),
_callback(nullptr),
_callbackArg(nullptr)
{
open_device();
init_device(videoStandard, input);
}
V4L2Grabber::~V4L2Grabber()
{
uninit_device();
close_device();
}
void V4L2Grabber::setCallback(V4L2Grabber::ImageCallback callback, void *arg)
{
_callback = callback;
_callbackArg = arg;
}
void V4L2Grabber::start()
{
start_capturing();
}
void V4L2Grabber::capture(int frameCount)
{
for (int count = 0; count < frameCount || frameCount < 0; ++count)
{
for (;;)
{
// the set of file descriptors for select
fd_set fds;
FD_ZERO(&fds);
FD_SET(_fileDescriptor, &fds);
// timeout
struct timeval tv;
tv.tv_sec = 2;
tv.tv_usec = 0;
// block until data is available
int r = select(_fileDescriptor + 1, &fds, NULL, NULL, &tv);
if (-1 == r)
{
if (EINTR == errno)
continue;
throw_errno_exception("select");
}
if (0 == r)
{
throw_exception("select timeout");
}
if (read_frame())
{
break;
}
/* EAGAIN - continue select loop. */
}
}
}
void V4L2Grabber::stop()
{
stop_capturing();
}
void V4L2Grabber::open_device()
{
struct stat st;
if (-1 == stat(_deviceName.c_str(), &st))
{
std::ostringstream oss;
oss << "Cannot identify '" << _deviceName << "'";
throw_errno_exception(oss.str());
}
if (!S_ISCHR(st.st_mode))
{
std::ostringstream oss;
oss << "'" << _deviceName << "' is no device";
throw_exception(oss.str());
}
_fileDescriptor = open(_deviceName.c_str(), O_RDWR /* required */ | O_NONBLOCK, 0);
if (-1 == _fileDescriptor)
{
std::ostringstream oss;
oss << "Cannot open '" << _deviceName << "'";
throw_errno_exception(oss.str());
}
}
void V4L2Grabber::close_device()
{
if (-1 == close(_fileDescriptor))
throw_errno_exception("close");
_fileDescriptor = -1;
}
void V4L2Grabber::init_read(unsigned int buffer_size)
{
_buffers.resize(1);
_buffers[0].length = buffer_size;
_buffers[0].start = malloc(buffer_size);
if (!_buffers[0].start) {
throw_exception("Out of memory");
}
}
void V4L2Grabber::init_mmap()
{
struct v4l2_requestbuffers req;
CLEAR(req);
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl(VIDIOC_REQBUFS, &req)) {
if (EINVAL == errno) {
std::ostringstream oss;
oss << "'" << _deviceName << "' does not support memory mapping";
throw_exception(oss.str());
} else {
throw_errno_exception("VIDIOC_REQBUFS");
}
}
if (req.count < 2) {
std::ostringstream oss;
oss << "Insufficient buffer memory on " << _deviceName;
throw_exception(oss.str());
}
_buffers.resize(req.count);
for (size_t n_buffers = 0; n_buffers < req.count; ++n_buffers) {
struct v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (-1 == xioctl(VIDIOC_QUERYBUF, &buf))
throw_errno_exception("VIDIOC_QUERYBUF");
_buffers[n_buffers].length = buf.length;
_buffers[n_buffers].start =
mmap(NULL /* start anywhere */,
buf.length,
PROT_READ | PROT_WRITE /* required */,
MAP_SHARED /* recommended */,
_fileDescriptor, buf.m.offset);
if (MAP_FAILED == _buffers[n_buffers].start)
throw_errno_exception("mmap");
}
}
void V4L2Grabber::init_userp(unsigned int buffer_size)
{
struct v4l2_requestbuffers req;
CLEAR(req);
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_USERPTR;
if (-1 == xioctl(VIDIOC_REQBUFS, &req)) {
if (EINVAL == errno)
{
std::ostringstream oss;
oss << "'" << _deviceName << "' does not support user pointer";
throw_exception(oss.str());
} else {
throw_errno_exception("VIDIOC_REQBUFS");
}
}
_buffers.resize(4);
for (size_t n_buffers = 0; n_buffers < 4; ++n_buffers) {
_buffers[n_buffers].length = buffer_size;
_buffers[n_buffers].start = malloc(buffer_size);
if (!_buffers[n_buffers].start) {
throw_exception("Out of memory");
}
}
}
void V4L2Grabber::init_device(VideoStandard videoStandard, int input)
{
struct v4l2_capability cap;
if (-1 == xioctl(VIDIOC_QUERYCAP, &cap))
{
if (EINVAL == errno) {
std::ostringstream oss;
oss << "'" << _deviceName << "' is no V4L2 device";
throw_exception(oss.str());
} else {
throw_errno_exception("VIDIOC_QUERYCAP");
}
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
std::ostringstream oss;
oss << "'" << _deviceName << "' is no video capture device";
throw_exception(oss.str());
}
switch (_ioMethod) {
case IO_METHOD_READ:
if (!(cap.capabilities & V4L2_CAP_READWRITE))
{
std::ostringstream oss;
oss << "'" << _deviceName << "' does not support read i/o";
throw_exception(oss.str());
}
break;
case IO_METHOD_MMAP:
case IO_METHOD_USERPTR:
if (!(cap.capabilities & V4L2_CAP_STREAMING))
{
std::ostringstream oss;
oss << "'" << _deviceName << "' does not support streaming i/o";
throw_exception(oss.str());
}
break;
}
/* Select video input, video standard and tune here. */
struct v4l2_cropcap cropcap;
CLEAR(cropcap);
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (0 == xioctl(VIDIOC_CROPCAP, &cropcap)) {
struct v4l2_crop crop;
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect; /* reset to default */
if (-1 == xioctl(VIDIOC_S_CROP, &crop)) {
switch (errno) {
case EINVAL:
/* Cropping not supported. */
break;
default:
/* Errors ignored. */
break;
}
}
} else {
/* Errors ignored. */
}
// set input if needed
if (input >= 0)
{
if (-1 == xioctl(VIDIOC_S_INPUT, &input))
{
throw_errno_exception("VIDIOC_S_INPUT");
}
}
// set the video standard if needed
switch (videoStandard)
{
case PAL:
{
v4l2_std_id std_id = V4L2_STD_PAL;
if (-1 == xioctl(VIDIOC_S_STD, &std_id))
{
throw_errno_exception("VIDIOC_S_STD");
}
}
break;
case NTSC:
{
v4l2_std_id std_id = V4L2_STD_NTSC;
if (-1 == xioctl(VIDIOC_S_STD, &std_id))
{
throw_errno_exception("VIDIOC_S_STD");
}
}
break;
case NO_CHANGE:
default:
// No change to device settings
break;
}
// get the current settings
struct v4l2_format fmt;
CLEAR(fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == xioctl(VIDIOC_G_FMT, &fmt))
{
throw_errno_exception("VIDIOC_G_FMT");
}
// check pixel format
switch (fmt.fmt.pix.pixelformat)
{
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_YUYV:
_pixelFormat = fmt.fmt.pix.pixelformat;
break;
default:
throw_exception("Only pixel formats UYVY and YUYV are supported");
}
if (_width > 0 || _height > 0)
{
if (_width > 0)
{
fmt.fmt.pix.width = _width;
}
if (fmt.fmt.pix.height > 0)
{
fmt.fmt.pix.height = _height;
}
// set the settings
if (-1 == xioctl(VIDIOC_S_FMT, &fmt))
{
throw_errno_exception("VIDIOC_S_FMT");
}
// get the format settings again
// (the size may not have been accepted without an error)
if (-1 == xioctl(VIDIOC_G_FMT, &fmt))
{
throw_errno_exception("VIDIOC_G_FMT");
}
}
// store width & height
_width = fmt.fmt.pix.width;
_height = fmt.fmt.pix.height;
// print the eventually used width and height
std::cout << "V4L2 width=" << _width << " height=" << _height << std::endl;
switch (_ioMethod) {
case IO_METHOD_READ:
init_read(fmt.fmt.pix.sizeimage);
break;
case IO_METHOD_MMAP:
init_mmap();
break;
case IO_METHOD_USERPTR:
init_userp(fmt.fmt.pix.sizeimage);
break;
}
}
void V4L2Grabber::uninit_device()
{
switch (_ioMethod) {
case IO_METHOD_READ:
free(_buffers[0].start);
break;
case IO_METHOD_MMAP:
for (size_t i = 0; i < _buffers.size(); ++i)
if (-1 == munmap(_buffers[i].start, _buffers[i].length))
throw_errno_exception("munmap");
break;
case IO_METHOD_USERPTR:
for (size_t i = 0; i < _buffers.size(); ++i)
free(_buffers[i].start);
break;
}
_buffers.resize(0);
}
void V4L2Grabber::start_capturing()
{
switch (_ioMethod) {
case IO_METHOD_READ:
/* Nothing to do. */
break;
case IO_METHOD_MMAP:
{
for (size_t i = 0; i < _buffers.size(); ++i) {
struct v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (-1 == xioctl(VIDIOC_QBUF, &buf))
throw_errno_exception("VIDIOC_QBUF");
}
v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == xioctl(VIDIOC_STREAMON, &type))
throw_errno_exception("VIDIOC_STREAMON");
break;
}
case IO_METHOD_USERPTR:
{
for (size_t i = 0; i < _buffers.size(); ++i) {
struct v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_USERPTR;
buf.index = i;
buf.m.userptr = (unsigned long)_buffers[i].start;
buf.length = _buffers[i].length;
if (-1 == xioctl(VIDIOC_QBUF, &buf))
throw_errno_exception("VIDIOC_QBUF");
}
v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == xioctl(VIDIOC_STREAMON, &type))
throw_errno_exception("VIDIOC_STREAMON");
break;
}
}
}
void V4L2Grabber::stop_capturing()
{
enum v4l2_buf_type type;
switch (_ioMethod) {
case IO_METHOD_READ:
/* Nothing to do. */
break;
case IO_METHOD_MMAP:
case IO_METHOD_USERPTR:
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == xioctl(VIDIOC_STREAMOFF, &type))
throw_errno_exception("VIDIOC_STREAMOFF");
break;
}
}
int V4L2Grabber::read_frame()
{
bool rc = false;
struct v4l2_buffer buf;
switch (_ioMethod) {
case IO_METHOD_READ:
int size;
if ((size = read(_fileDescriptor, _buffers[0].start, _buffers[0].length)) == -1)
{
switch (errno)
{
case EAGAIN:
return 0;
case EIO:
/* Could ignore EIO, see spec. */
/* fall through */
default:
throw_errno_exception("read");
}
}
rc = process_image(_buffers[0].start, size);
break;
case IO_METHOD_MMAP:
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl(VIDIOC_DQBUF, &buf))
{
switch (errno)
{
case EAGAIN:
return 0;
case EIO:
/* Could ignore EIO, see spec. */
/* fall through */
default:
throw_errno_exception("VIDIOC_DQBUF");
}
}
assert(buf.index < _buffers.size());
rc = process_image(_buffers[buf.index].start, buf.bytesused);
if (-1 == xioctl(VIDIOC_QBUF, &buf))
{
throw_errno_exception("VIDIOC_QBUF");
}
break;
case IO_METHOD_USERPTR:
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_USERPTR;
if (-1 == xioctl(VIDIOC_DQBUF, &buf))
{
switch (errno)
{
case EAGAIN:
return 0;
case EIO:
/* Could ignore EIO, see spec. */
/* fall through */
default:
throw_errno_exception("VIDIOC_DQBUF");
}
}
for (size_t i = 0; i < _buffers.size(); ++i)
{
if (buf.m.userptr == (unsigned long)_buffers[i].start && buf.length == _buffers[i].length)
{
break;
}
}
rc = process_image((void *)buf.m.userptr, buf.bytesused);
if (-1 == xioctl(VIDIOC_QBUF, &buf))
{
throw_errno_exception("VIDIOC_QBUF");
}
break;
}
return rc ? 1 : 0;
}
bool V4L2Grabber::process_image(const void *p, int size)
{
if (++_currentFrame >= _frameDecimation)
{
// We do want a new frame...
if (size != 2*_width*_height)
{
std::cout << "Frame too small: " << size << " != " << (2*_width*_height) << std::endl;
}
else
{
process_image(reinterpret_cast<const uint8_t *>(p));
_currentFrame = 0; // restart counting
return true;
}
}
return false;
}
void V4L2Grabber::process_image(const uint8_t * data)
{
int width = (_width - 2 * _cropWidth + _pixelDecimation/2) / _pixelDecimation;
int height = (_height - 2 * _cropHeight + _pixelDecimation/2) / _pixelDecimation;
Image<ColorRgb> image(width, height);
for (int ySource = _cropHeight + _pixelDecimation/2, yDest = 0; ySource < _height - _cropHeight; ySource += _pixelDecimation, ++yDest)
{
for (int xSource = _cropWidth + _pixelDecimation/2, xDest = 0; xSource < _width - _cropWidth; xSource += _pixelDecimation, ++xDest)
{
int index = (_width * ySource + xSource) * 2;
uint8_t y = 0;
uint8_t u = 0;
uint8_t v = 0;
switch (_pixelFormat)
{
case V4L2_PIX_FMT_UYVY:
y = data[index+1];
u = (xSource%2 == 0) ? data[index ] : data[index-2];
v = (xSource%2 == 0) ? data[index+2] : data[index ];
break;
case V4L2_PIX_FMT_YUYV:
y = data[index];
u = (xSource%2 == 0) ? data[index+1] : data[index-1];
v = (xSource%2 == 0) ? data[index+3] : data[index+1];
break;
}
ColorRgb & rgb = image(xDest, yDest);
yuv2rgb(y, u, v, rgb.red, rgb.green, rgb.blue);
}
}
if (_callback != nullptr)
{
(*_callback)(_callbackArg, image);
}
}
int V4L2Grabber::xioctl(int request, void *arg)
{
int r;
do
{
r = ioctl(_fileDescriptor, request, arg);
}
while (-1 == r && EINTR == errno);
return r;
}
void V4L2Grabber::throw_exception(const std::string & error)
{
std::ostringstream oss;
oss << error << " error";
throw std::runtime_error(oss.str());
}
void V4L2Grabber::throw_errno_exception(const std::string & error)
{
std::ostringstream oss;
oss << error << " error " << errno << ", " << strerror(errno);
throw std::runtime_error(oss.str());
}

View File

@ -0,0 +1,96 @@
#pragma once
// stl includes
#include <string>
#include <vector>
// util includes
#include <utils/Image.h>
#include <utils/ColorRgb.h>
/// Capture class for V4L2 devices
///
/// @see http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html
class V4L2Grabber
{
public:
typedef void (*ImageCallback)(void * arg, const Image<ColorRgb> & image);
enum VideoStandard {
PAL, NTSC, NO_CHANGE
};
public:
V4L2Grabber(const std::string & device, int input, VideoStandard videoStandard, int width, int height, int cropHorizontal, int cropVertical, int frameDecimation, int pixelDecimation);
virtual ~V4L2Grabber();
void setCallback(ImageCallback callback, void * arg);
void start();
void capture(int frameCount = -1);
void stop();
private:
void open_device();
void close_device();
void init_read(unsigned int buffer_size);
void init_mmap();
void init_userp(unsigned int buffer_size);
void init_device(VideoStandard videoStandard, int input);
void uninit_device();
void start_capturing();
void stop_capturing();
int read_frame();
bool process_image(const void *p, int size);
void process_image(const uint8_t *p);
int xioctl(int request, void *arg);
void throw_exception(const std::string &error);
void throw_errno_exception(const std::string &error);
private:
enum io_method {
IO_METHOD_READ,
IO_METHOD_MMAP,
IO_METHOD_USERPTR
};
struct buffer {
void *start;
size_t length;
};
private:
const std::string _deviceName;
const io_method _ioMethod;
int _fileDescriptor;
std::vector<buffer> _buffers;
uint32_t _pixelFormat;
int _width;
int _height;
const int _cropWidth;
const int _cropHeight;
const int _frameDecimation;
const int _pixelDecimation;
int _currentFrame;
ImageCallback _callback;
void * _callbackArg;
};

View File

@ -0,0 +1,36 @@
// getoptPlusPLus includes
#include <getoptPlusPlus/getoptpp.h>
using namespace vlofgren;
/// Data parameter for the video standard
typedef vlofgren::PODParameter<V4L2Grabber::VideoStandard> VideoStandardParameter;
namespace vlofgren {
/// Translates a string (as passed on the commandline) to a color standard
///
/// @param[in] s The string (as passed on the commandline)
/// @return The color standard
/// @throws Parameter::ParameterRejected If the string did not result in a video standard
template<>
V4L2Grabber::VideoStandard VideoStandardParameter::validate(const std::string& s) throw (Parameter::ParameterRejected)
{
QString input = QString::fromStdString(s).toLower();
if (input == "pal")
{
return V4L2Grabber::PAL;
}
else if (input == "ntsc")
{
return V4L2Grabber::NTSC;
}
else if (input == "no-change")
{
return V4L2Grabber::NO_CHANGE;
}
throw Parameter::ParameterRejected("Invalid value for video standard. Valid values are: PAL, NTSC, and NO-CHANGE");
return V4L2Grabber::NO_CHANGE;
}
}

View File

@ -0,0 +1,112 @@
// STL includes
#include <csignal>
#include <iomanip>
// QT includes
#include <QImage>
// getoptPlusPLus includes
#include <getoptPlusPlus/getoptpp.h>
// blackborder includes
#include <blackborder/BlackBorderProcessor.h>
// hyperion-v4l2 includes
#include "V4L2Grabber.h"
#include "ProtoConnection.h"
#include "VideoStandardParameter.h"
#include "ImageHandler.h"
using namespace vlofgren;
// save the image as screenshot
void saveScreenshot(void *, const Image<ColorRgb> & image)
{
// store as PNG
QImage pngImage((const uint8_t *) image.memptr(), image.width(), image.height(), 3*image.width(), QImage::Format_RGB888);
pngImage.save("screenshot.png");
}
int main(int argc, char** argv)
{
try
{
// create the option parser and initialize all parameters
OptionsParser optionParser("V4L capture application for Hyperion");
ParameterSet & parameters = optionParser.getParameters();
StringParameter & argDevice = parameters.add<StringParameter> ('d', "device", "The device to use [default=/dev/video0]");
VideoStandardParameter & argVideoStandard = parameters.add<VideoStandardParameter>('v', "video-standard", "The used video standard. Valid values are PAL or NTSC (optional)");
IntParameter & argInput = parameters.add<IntParameter> (0x0, "input", "Input channel (optional)");
IntParameter & argWidth = parameters.add<IntParameter> (0x0, "width", "Try to set the width of the video input (optional)");
IntParameter & argHeight = parameters.add<IntParameter> (0x0, "height", "Try to set the height of the video input (optional)");
IntParameter & argCropWidth = parameters.add<IntParameter> (0x0, "crop-width", "Number of pixels to crop from the left and right sides in the picture before decimation [default=0]");
IntParameter & argCropHeight = parameters.add<IntParameter> (0x0, "crop-height", "Number of pixels to crop from the top and the bottom in the picture before decimation [default=0]");
IntParameter & argSizeDecimation = parameters.add<IntParameter> ('s', "size-decimator", "Decimation factor for the output size [default=1]");
IntParameter & argFrameDecimation = parameters.add<IntParameter> ('f', "frame-decimator", "Decimation factor for the video frames [default=1]");
SwitchParameter<> & argScreenshot = parameters.add<SwitchParameter<>> (0x0, "screenshot", "Take a single screenshot, save it to file and quit");
DoubleParameter & argSignalThreshold = parameters.add<DoubleParameter> ('t', "signal-threshold", "The signal threshold for detecting the presence of a signal. Value should be between 0.0 and 1.0.");
StringParameter & argAddress = parameters.add<StringParameter> ('a', "address", "Set the address of the hyperion server [default: 127.0.0.1:19445]");
IntParameter & argPriority = parameters.add<IntParameter> ('p', "priority", "Use the provided priority channel (the lower the number, the higher the priority) [default: 800]");
SwitchParameter<> & argSkipReply = parameters.add<SwitchParameter<>> (0x0, "skip-reply", "Do not receive and check reply messages from Hyperion");
SwitchParameter<> & argHelp = parameters.add<SwitchParameter<>> ('h', "help", "Show this help message and exit");
// set defaults
argDevice.setDefault("/dev/video0");
argVideoStandard.setDefault(V4L2Grabber::NO_CHANGE);
argInput.setDefault(-1);
argWidth.setDefault(-1);
argHeight.setDefault(-1);
argCropWidth.setDefault(0);
argCropHeight.setDefault(0);
argSizeDecimation.setDefault(1);
argFrameDecimation.setDefault(1);
argAddress.setDefault("127.0.0.1:19445");
argPriority.setDefault(800);
argSignalThreshold.setDefault(-1);
// parse all options
optionParser.parse(argc, const_cast<const char **>(argv));
// check if we need to display the usage. exit if we do.
if (argHelp.isSet())
{
optionParser.usage();
return 0;
}
V4L2Grabber grabber(
argDevice.getValue(),
argInput.getValue(),
argVideoStandard.getValue(),
argWidth.getValue(),
argHeight.getValue(),
std::max(0, argCropWidth.getValue()),
std::max(0, argCropHeight.getValue()),
std::max(1, argFrameDecimation.getValue()),
std::max(1, argSizeDecimation.getValue()));
grabber.start();
if (argScreenshot.isSet())
{
grabber.setCallback(&saveScreenshot, nullptr);
grabber.capture(1);
}
else
{
ImageHandler handler(argAddress.getValue(), argPriority.getValue(), argSignalThreshold.getValue(), argSkipReply.isSet());
grabber.setCallback(&ImageHandler::imageCallback, &handler);
grabber.capture();
}
grabber.stop();
}
catch (const std::runtime_error & e)
{
// An error occured. Display error and quit
std::cerr << e.what() << std::endl;
return 1;
}
return 0;
}

View File

@ -3,9 +3,11 @@
#include <random>
// Hyperion includes
#include <hyperion/BlackBorderDetector.h>
#include <utils/ColorRgb.h>
// Blackborder includes
#include <blackborder/BlackBorderDetector.h>
using namespace hyperion;
ColorRgb randomColor()
@ -41,7 +43,7 @@ int TC_NO_BORDER()
{
int result = 0;
BlackBorderDetector detector;
BlackBorderDetector detector(3);
{
Image<ColorRgb> image = createImage(64, 64, 0, 0);
@ -60,7 +62,7 @@ int TC_TOP_BORDER()
{
int result = 0;
BlackBorderDetector detector;
BlackBorderDetector detector(3);
{
Image<ColorRgb> image = createImage(64, 64, 12, 0);
@ -79,7 +81,7 @@ int TC_LEFT_BORDER()
{
int result = 0;
BlackBorderDetector detector;
BlackBorderDetector detector(3);
{
Image<ColorRgb> image = createImage(64, 64, 0, 12);
@ -98,7 +100,7 @@ int TC_DUAL_BORDER()
{
int result = 0;
BlackBorderDetector detector;
BlackBorderDetector detector(3);
{
Image<ColorRgb> image = createImage(64, 64, 12, 12);
@ -116,7 +118,7 @@ int TC_UNKNOWN_BORDER()
{
int result = 0;
BlackBorderDetector detector;
BlackBorderDetector detector(3);
{
Image<ColorRgb> image = createImage(64, 64, 30, 30);

View File

@ -8,8 +8,8 @@
#include <utils/Image.h>
#include <utils/ColorRgb.h>
// Local-Hyperion includes
#include "hyperion/BlackBorderProcessor.h"
// Blackborder includes
#include "blackborder/BlackBorderProcessor.h"
using namespace hyperion;
@ -48,7 +48,7 @@ int main()
unsigned borderCnt = 50;
unsigned blurCnt = 0;
BlackBorderProcessor processor(unknownCnt, borderCnt, blurCnt);
BlackBorderProcessor processor(unknownCnt, borderCnt, blurCnt, 3);
// Start with 'no border' detection
Image<ColorRgb> noBorderImage = createImage(64, 64, 0, 0);