Merge remote-tracking branch 'origin/master' into temperture

This commit is contained in:
LordGrey
2024-05-30 19:42:16 +02:00
25 changed files with 723 additions and 111 deletions

View File

@@ -594,6 +594,10 @@ QJsonArray JsonInfo::discoverScreenInputs(const QJsonObject& params) const
discoverGrabber<DirectXGrabber>(screenInputs, params);
#endif
#ifdef ENABLE_DDA
discoverGrabber<DDAGrabber>(screenInputs, params);
#endif
#ifdef ENABLE_X11
discoverGrabber<X11Grabber>(screenInputs, params);
#endif

View File

@@ -34,6 +34,10 @@ if(ENABLE_DX)
add_subdirectory(directx)
endif(ENABLE_DX)
if(ENABLE_DDA)
add_subdirectory(dda)
endif(ENABLE_DDA)
if(ENABLE_AUDIO)
add_subdirectory(audio)
endif()

View File

@@ -0,0 +1,12 @@
add_library(dda-grabber
${CMAKE_SOURCE_DIR}/include/grabber/dda/DDAGrabber.h
${CMAKE_SOURCE_DIR}/include/grabber/dda/DDAWrapper.h
${CMAKE_SOURCE_DIR}/libsrc/grabber/dda/DDAGrabber.cpp
${CMAKE_SOURCE_DIR}/libsrc/grabber/dda/DDAWrapper.cpp
)
target_link_libraries(dda-grabber
hyperion
d3d11.lib
dxgi.lib
)

View File

@@ -0,0 +1,357 @@
#include "grabber/dda/DDAGrabber.h"
#include <atlbase.h>
#include <d3d11.h>
#include <dxgi1_2.h>
#include <physicalmonitorenumerationapi.h>
#include <windows.h>
#pragma comment(lib, "d3d9.lib")
#pragma comment(lib, "dxva2.lib")
namespace
{
// Driver types supported.
constexpr D3D_DRIVER_TYPE kDriverTypes[] = {
D3D_DRIVER_TYPE_HARDWARE,
D3D_DRIVER_TYPE_WARP,
D3D_DRIVER_TYPE_REFERENCE,
};
// Feature levels supported.
D3D_FEATURE_LEVEL kFeatureLevels[] = {D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_1};
// Returns true if the two texture descriptors are compatible for copying.
bool areTextureDescriptionsCompatible(D3D11_TEXTURE2D_DESC a, D3D11_TEXTURE2D_DESC b)
{
return a.Width == b.Width && a.Height == b.Height && a.MipLevels == b.MipLevels && a.ArraySize == b.ArraySize &&
a.Format == b.Format;
}
} // namespace
// Logs a message along with the hex error HRESULT.
#define LOG_ERROR(hr, msg) Error(_log, msg ": 0x%x", hr)
// Checks if the HRESULT is an error, and if so, logs it and returns from the
// current function.
#define RETURN_IF_ERROR(hr, msg, returnValue) \
if (FAILED(hr)) \
{ \
LOG_ERROR(hr, msg); \
return returnValue; \
}
// Checks if the condition is false, and if so, logs an error and returns from
// the current function.
#define RET_CHECK(cond, returnValue) \
if (!(cond)) \
{ \
Error(_log, "Assertion failed: " #cond); \
return returnValue; \
}
// Private implementation. These member variables are here and not in the .h
// so we don't have to include <atlbase.h> in the header and pollute everything
// else that includes it.
class DDAGrabberImpl
{
public:
int display = 0;
int desktopWidth = 0;
int desktopHeight = 0;
// Created in the constructor.
CComPtr<ID3D11Device> device;
CComPtr<ID3D11DeviceContext> deviceContext;
CComPtr<IDXGIDevice> dxgiDevice;
CComPtr<IDXGIAdapter> dxgiAdapter;
// Created in restartCapture - only valid while desktop capture is in
// progress.
CComPtr<IDXGIOutputDuplication> desktopDuplication;
CComPtr<ID3D11Texture2D> intermediateTexture;
D3D11_TEXTURE2D_DESC intermediateTextureDesc;
};
DDAGrabber::DDAGrabber(int display, int cropLeft, int cropRight, int cropTop, int cropBottom)
: Grabber("GRABBER-DDA", cropLeft, cropRight, cropTop, cropBottom), d(new DDAGrabberImpl)
{
d->display = display;
HRESULT hr = S_OK;
// Iterate through driver types until we find one that succeeds.
D3D_FEATURE_LEVEL featureLevel;
for (D3D_DRIVER_TYPE driverType : kDriverTypes)
{
hr = D3D11CreateDevice(nullptr, driverType, nullptr, 0, kFeatureLevels, std::size(kFeatureLevels),
D3D11_SDK_VERSION, &d->device, &featureLevel, &d->deviceContext);
if (SUCCEEDED(hr))
{
break;
}
}
RETURN_IF_ERROR(hr, "CreateDevice failed", );
// Get the DXGI factory.
hr = d->device.QueryInterface(&d->dxgiDevice);
RETURN_IF_ERROR(hr, "Failed to get DXGI device", );
// Get the factory's adapter.
hr = d->dxgiDevice->GetAdapter(&d->dxgiAdapter);
RETURN_IF_ERROR(hr, "Failed to get DXGI Adapter", );
}
DDAGrabber::~DDAGrabber()
{
}
bool DDAGrabber::restartCapture()
{
if (!d->dxgiAdapter)
{
return false;
}
HRESULT hr = S_OK;
d->desktopDuplication.Release();
// Get the output that was selected.
CComPtr<IDXGIOutput> output;
hr = d->dxgiAdapter->EnumOutputs(d->display, &output);
RETURN_IF_ERROR(hr, "Failed to get output", false);
// Get the descriptor which has the size of the display.
DXGI_OUTPUT_DESC desc;
hr = output->GetDesc(&desc);
RETURN_IF_ERROR(hr, "Failed to get output description", false);
d->desktopWidth = desc.DesktopCoordinates.right - desc.DesktopCoordinates.left;
d->desktopHeight = desc.DesktopCoordinates.bottom - desc.DesktopCoordinates.top;
_width = (d->desktopWidth - _cropLeft - _cropRight) / _pixelDecimation;
_height = (d->desktopHeight - _cropTop - _cropBottom) / _pixelDecimation;
Info(_log, "Desktop size: %dx%d, cropping=%d,%d,%d,%d, decimation=%d, final image size=%dx%d", d->desktopWidth,
d->desktopHeight, _cropLeft, _cropTop, _cropRight, _cropBottom, _pixelDecimation, _width, _height);
// Get the DXGIOutput1 interface.
CComPtr<IDXGIOutput1> output1;
hr = output.QueryInterface(&output1);
RETURN_IF_ERROR(hr, "Failed to get output1", false);
// Create the desktop duplication interface.
hr = output1->DuplicateOutput(d->device, &d->desktopDuplication);
RETURN_IF_ERROR(hr, "Failed to create desktop duplication interface", false);
return true;
}
int DDAGrabber::grabFrame(Image<ColorRgb> &image)
{
// Do nothing if we're disabled.
if (!_isEnabled)
{
return 0;
}
// Start the capture if it's not already running.
if (!d->desktopDuplication && !restartCapture())
{
return -1;
}
HRESULT hr = S_OK;
// Release the last frame, if any.
hr = d->desktopDuplication->ReleaseFrame();
if (FAILED(hr) && hr != DXGI_ERROR_INVALID_CALL)
{
LOG_ERROR(hr, "Failed to release frame");
}
// Acquire the next frame.
CComPtr<IDXGIResource> desktopResource;
DXGI_OUTDUPL_FRAME_INFO frameInfo;
hr = d->desktopDuplication->AcquireNextFrame(INFINITE, &frameInfo, &desktopResource);
if (hr == DXGI_ERROR_ACCESS_LOST || hr == DXGI_ERROR_INVALID_CALL)
{
if (!restartCapture())
{
return -1;
}
return 0;
}
if (hr == DXGI_ERROR_WAIT_TIMEOUT)
{
// This shouldn't happen since we specified an INFINITE timeout.
return 0;
}
RETURN_IF_ERROR(hr, "Failed to acquire next frame", 0);
// Get the 2D texture.
CComPtr<ID3D11Texture2D> texture;
hr = desktopResource.QueryInterface(&texture);
RETURN_IF_ERROR(hr, "Failed to get 2D texture", 0);
// The texture we acquired is on the GPU and can't be accessed from the CPU,
// so we have to copy it into another texture that can.
D3D11_TEXTURE2D_DESC textureDesc;
texture->GetDesc(&textureDesc);
// Create a new intermediate texture if we haven't done so already, or the
// existing one is incompatible with the acquired texture (i.e. it has
// different dimensions).
if (!d->intermediateTexture || !areTextureDescriptionsCompatible(d->intermediateTextureDesc, textureDesc))
{
Info(_log, "Creating intermediate texture");
d->intermediateTexture.Release();
d->intermediateTextureDesc = textureDesc;
d->intermediateTextureDesc.Usage = D3D11_USAGE_STAGING;
d->intermediateTextureDesc.BindFlags = 0;
d->intermediateTextureDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
d->intermediateTextureDesc.MiscFlags = 0;
hr = d->device->CreateTexture2D(&d->intermediateTextureDesc, nullptr, &d->intermediateTexture);
RETURN_IF_ERROR(hr, "Failed to create intermediate texture", 0);
}
// Copy the texture to the intermediate texture.
d->deviceContext->CopyResource(d->intermediateTexture, texture);
RETURN_IF_ERROR(hr, "Failed to copy texture", 0);
// Map the texture so we can access its pixels.
D3D11_MAPPED_SUBRESOURCE resource;
hr = d->deviceContext->Map(d->intermediateTexture, 0, D3D11_MAP_READ, 0, &resource);
RETURN_IF_ERROR(hr, "Failed to map texture", 0);
// Copy the texture to the output image.
RET_CHECK(textureDesc.Format == DXGI_FORMAT_B8G8R8A8_UNORM, 0);
ColorRgb *dest = image.memptr();
for (size_t destY = 0, srcY = _cropTop; destY < image.height(); destY++, srcY += _pixelDecimation)
{
uint32_t *src =
reinterpret_cast<uint32_t *>(reinterpret_cast<unsigned char *>(resource.pData) + srcY * resource.RowPitch) +
_cropLeft;
for (size_t destX = 0; destX < image.width(); destX++, src += _pixelDecimation, dest++)
{
*dest = ColorRgb{static_cast<uint8_t>(((*src) >> 16) & 0xff), static_cast<uint8_t>(((*src) >> 8) & 0xff),
static_cast<uint8_t>(((*src) >> 0) & 0xff)};
}
}
return 0;
}
void DDAGrabber::setVideoMode(VideoMode mode)
{
Grabber::setVideoMode(mode);
restartCapture();
}
bool DDAGrabber::setPixelDecimation(int pixelDecimation)
{
if (Grabber::setPixelDecimation(pixelDecimation))
return restartCapture();
return false;
}
void DDAGrabber::setCropping(int cropLeft, int cropRight, int cropTop, int cropBottom)
{
// Grabber::setCropping rejects the cropped size if it is larger than _width
// and _height, so temporarily set those back to the original pre-cropped full
// desktop sizes first. They'll be set back to the cropped sizes by
// restartCapture.
_width = d->desktopWidth;
_height = d->desktopHeight;
Grabber::setCropping(cropLeft, cropRight, cropTop, cropBottom);
restartCapture();
}
bool DDAGrabber::setDisplayIndex(int index)
{
bool rc = true;
if (d->display != index)
{
d->display = index;
rc = restartCapture();
}
return rc;
}
QJsonObject DDAGrabber::discover(const QJsonObject &params)
{
QJsonObject ret;
if (!d->dxgiAdapter)
{
return ret;
}
HRESULT hr = S_OK;
// Enumerate through the outputs.
QJsonArray videoInputs;
for (int i = 0;; ++i)
{
CComPtr<IDXGIOutput> output;
hr = d->dxgiAdapter->EnumOutputs(i, &output);
if (!output || !SUCCEEDED(hr))
{
break;
}
// Get the output description.
DXGI_OUTPUT_DESC desc;
hr = output->GetDesc(&desc);
if (FAILED(hr))
{
Error(_log, "Failed to get output description");
continue;
}
// Add it to the JSON.
const int width = desc.DesktopCoordinates.right - desc.DesktopCoordinates.left;
const int height = desc.DesktopCoordinates.bottom - desc.DesktopCoordinates.top;
videoInputs.append(QJsonObject{
{"inputIdx", i},
{"name", QString::fromWCharArray(desc.DeviceName)},
{"formats",
QJsonArray{
QJsonObject{
{"resolutions",
QJsonArray{
QJsonObject{
{"width", width},
{"height", height},
{"fps", QJsonArray{1, 5, 10, 15, 20, 25, 30, 40, 50, 60, 120, 144}},
},
}},
},
}},
});
}
ret["video_inputs"] = videoInputs;
if (!videoInputs.isEmpty())
{
ret["device"] = "dda";
ret["device_name"] = "DXGI DDA";
ret["type"] = "screen";
ret["default"] = QJsonObject{
{"video_input",
QJsonObject{
{"inputIdx", 0},
{"resolution",
QJsonObject{
{"fps", 60},
}},
}},
};
}
return ret;
}

View File

@@ -0,0 +1,20 @@
#include "grabber/dda/DDAWrapper.h"
DDAWrapper::DDAWrapper(int updateRate_Hz, int display, int pixelDecimation, int cropLeft, int cropRight, int cropTop,
int cropBottom)
: GrabberWrapper(GRABBERTYPE, &_grabber, updateRate_Hz), _grabber(display, cropLeft, cropRight, cropTop, cropBottom)
{
_grabber.setPixelDecimation(pixelDecimation);
}
DDAWrapper::DDAWrapper(const QJsonDocument &grabberConfig)
: DDAWrapper(GrabberWrapper::DEFAULT_RATE_HZ, 0, GrabberWrapper::DEFAULT_PIXELDECIMATION, 0, 0, 0, 0)
{
this->handleSettingsUpdate(settings::SYSTEMCAPTURE, grabberConfig);
}
void DDAWrapper::action()
{
transferFrame(_grabber);
}

View File

@@ -122,18 +122,6 @@ void EncoderThread::process()
else
#endif
{
if (_pixelFormat == PixelFormat::BGR24)
{
if (_flipMode == FlipMode::NO_CHANGE)
_imageResampler.setFlipMode(FlipMode::HORIZONTAL);
else if (_flipMode == FlipMode::HORIZONTAL)
_imageResampler.setFlipMode(FlipMode::NO_CHANGE);
else if (_flipMode == FlipMode::VERTICAL)
_imageResampler.setFlipMode(FlipMode::BOTH);
else if (_flipMode == FlipMode::BOTH)
_imageResampler.setFlipMode(FlipMode::VERTICAL);
}
Image<ColorRgb> image = Image<ColorRgb>();
_imageResampler.processImage(
_localData,

View File

@@ -1079,6 +1079,22 @@ void V4L2Grabber::newThreadFrame(Image<ColorRgb> image)
}
else
emit newFrame(image);
#ifdef FRAME_BENCH
// calculate average frametime
if (_currentFrame > 1)
{
if (_currentFrame % 100 == 0)
{
Debug(_log, "%d: avg. frametime=%.02fms / %.02fms", int(_currentFrame), _frameTimer.restart()/100.0, 1000.0/_fps);
}
}
else
{
Debug(_log, "%d: frametimer started", int(_currentFrame));
_frameTimer.start();
}
#endif
}
int V4L2Grabber::xioctl(int request, void *arg)

View File

@@ -173,6 +173,10 @@ QStringList GrabberWrapper::availableGrabbers(GrabberTypeFilter type)
#ifdef ENABLE_DX
grabbers << "dx";
#endif
#ifdef ENABLE_DDA
grabbers << "dda";
#endif
}
if (type == GrabberTypeFilter::VIDEO || type == GrabberTypeFilter::ALL)

View File

@@ -65,7 +65,7 @@ void LedDeviceWrapper::createLedDevice(const QJsonObject& config)
connect(thread, &QThread::started, _ledDevice, &LedDevice::start);
// further signals
connect(this, &LedDeviceWrapper::updateLeds, _ledDevice, &LedDevice::updateLeds, Qt::QueuedConnection);
connect(this, &LedDeviceWrapper::updateLeds, _ledDevice, &LedDevice::updateLeds, Qt::BlockingQueuedConnection);
connect(this, &LedDeviceWrapper::switchOn, _ledDevice, &LedDevice::switchOn, Qt::BlockingQueuedConnection);
connect(this, &LedDeviceWrapper::switchOff, _ledDevice, &LedDevice::switchOff, Qt::BlockingQueuedConnection);

View File

@@ -895,6 +895,7 @@ void LedDevicePhilipsHueBridge::setBridgeDetails(const QJsonDocument &doc, bool
log( "API-Version", "%u.%u.%u", _api_major, _api_minor, _api_patch );
log( "API v2 ready", "%s", _isAPIv2Ready ? "Yes" : "No" );
log( "Entertainment ready", "%s", _isHueEntertainmentReady ? "Yes" : "No" );
log( "Use Entertainment API", "%s", _useEntertainmentAPI ? "Yes" : "No" );
log( "DIYHue", "%s", _isDiyHue ? "Yes" : "No" );
}
}
@@ -1799,11 +1800,11 @@ bool LedDevicePhilipsHue::init(const QJsonObject &deviceConfig)
if (LedDevicePhilipsHueBridge::init(_devConfig))
{
log( "Off on Black", "%s", _switchOffOnBlack ? "Yes" : "No" );
log( "Brightness Factor", "%f", _brightnessFactor );
log( "Transition Time", "%d", _transitionTime );
log( "Restore Original State", "%s", _isRestoreOrigState ? "Yes" : "No" );
log( "Use Hue Entertainment API", "%s", _useEntertainmentAPI ? "Yes" : "No" );
log("Off on Black", "%s", _switchOffOnBlack ? "Yes" : "No" );
log("Brightness Factor", "%f", _brightnessFactor );
log("Transition Time", "%d", _transitionTime );
log("Restore Original State", "%s", _isRestoreOrigState ? "Yes" : "No" );
log("Use Hue Entertainment API", "%s", _useEntertainmentAPI ? "Yes" : "No" );
log("Brightness Threshold", "%f", _blackLevel);
log("CandyGamma", "%s", _candyGamma ? "Yes" : "No" );
log("Time powering off when black", "%s", _onBlackTimeToPowerOff ? "Yes" : "No" );
@@ -1864,7 +1865,7 @@ bool LedDevicePhilipsHue::setLights()
Debug(_log, "Lights configured: %d", configuredLightsCount );
if (updateLights( getLightMap()))
{
if (_useApiV2)
if (_useApiV2 && _useEntertainmentAPI)
{
_channelsCount = getGroupChannelsCount (_groupId);
@@ -2208,15 +2209,14 @@ int LedDevicePhilipsHue::write(const std::vector<ColorRgb> & ledValues)
int rc {0};
if (_isOn)
{
if (!_useApiV2)
{
rc = writeSingleLights( ledValues );
}
if (_useEntertainmentAPI && _isInitLeds)
{
rc= writeStreamData(ledValues);
}
else
{
rc = writeSingleLights( ledValues );
}
}
return rc;
}
@@ -2482,7 +2482,7 @@ void LedDevicePhilipsHue::setColor(PhilipsHueLight& light, CiColor& color)
QJsonObject colorXY;
colorXY[API_X_COORDINATE] = color.x;
colorXY[API_Y_COORDINATE] = color.y;
cmd.insert(API_COLOR, QJsonObject {{API_DURATION, colorXY }});
cmd.insert(API_COLOR, QJsonObject {{API_XY_COORDINATES, colorXY }});
cmd.insert(API_DIMMING, QJsonObject {{API_BRIGHTNESS, bri }});
}
else
@@ -2556,7 +2556,7 @@ void LedDevicePhilipsHue::setState(PhilipsHueLight& light, bool on, const CiColo
QJsonObject colorXY;
colorXY[API_X_COORDINATE] = color.x;
colorXY[API_Y_COORDINATE] = color.y;
cmd.insert(API_COLOR, QJsonObject {{API_DURATION, colorXY }});
cmd.insert(API_COLOR, QJsonObject {{API_XY_COORDINATES, colorXY }});
cmd.insert(API_DIMMING, QJsonObject {{API_BRIGHTNESS, bri }});
}
else

View File

@@ -30,7 +30,8 @@ enum HttpStatusCode {
BadRequest = 400,
UnAuthorized = 401,
Forbidden = 403,
NotFound = 404
NotFound = 404,
TooManyRequests = 429
};
} //End of constants
@@ -336,6 +337,15 @@ httpResponse ProviderRestApi::getResponse(QNetworkReply* const& reply)
case HttpStatusCode::NotFound:
advise = "Check Resource given";
break;
case HttpStatusCode::TooManyRequests:
{
QString retryAfterTime = response.getHeader("Retry-After");
if (!retryAfterTime.isEmpty())
{
advise = "Retry-After: " + response.getHeader("Retry-After");
}
}
break;
default:
advise = httpReason;
break;

View File

@@ -29,9 +29,6 @@ void ImageResampler::processImage(const uint8_t * data, int width, int height, i
int cropTop = _cropTop;
int cropBottom = _cropBottom;
int xDestFlip = 0, yDestFlip = 0;
int uOffset = 0, vOffset = 0;
// handle 3D mode
switch (_videoMode)
{
@@ -53,118 +50,175 @@ void ImageResampler::processImage(const uint8_t * data, int width, int height, i
outputImage.resize(outputWidth, outputHeight);
for (int yDest = 0, ySource = cropTop + (_verticalDecimation >> 1); yDest < outputHeight; ySource += _verticalDecimation, ++yDest)
int xDestStart, xDestEnd;
int yDestStart, yDestEnd;
switch (_flipMode)
{
int yOffset = lineLength * ySource;
if (pixelFormat == PixelFormat::NV12)
{
uOffset = (height + ySource / 2) * lineLength;
}
else if (pixelFormat == PixelFormat::I420)
{
uOffset = width * height + (ySource/2) * width/2;
vOffset = width * height * 1.25 + (ySource/2) * width/2;
}
case FlipMode::NO_CHANGE:
xDestStart = 0;
xDestEnd = outputWidth-1;
yDestStart = 0;
yDestEnd = outputHeight-1;
break;
case FlipMode::HORIZONTAL:
xDestStart = 0;
xDestEnd = outputWidth-1;
yDestStart = -(outputHeight-1);
yDestEnd = 0;
break;
case FlipMode::VERTICAL:
xDestStart = -(outputWidth-1);
xDestEnd = 0;
yDestStart = 0;
yDestEnd = outputHeight-1;
break;
case FlipMode::BOTH:
xDestStart = -(outputWidth-1);
xDestEnd = 0;
yDestStart = -(outputHeight-1);
yDestEnd = 0;
break;
}
for (int xDest = 0, xSource = cropLeft + (_horizontalDecimation >> 1); xDest < outputWidth; xSource += _horizontalDecimation, ++xDest)
switch (pixelFormat)
{
case PixelFormat::UYVY:
{
switch (_flipMode)
for (int yDest = yDestStart, ySource = cropTop + (_verticalDecimation >> 1); yDest <= yDestEnd; ySource += _verticalDecimation, ++yDest)
{
case FlipMode::HORIZONTAL:
xDestFlip = xDest;
yDestFlip = outputHeight-yDest-1;
break;
case FlipMode::VERTICAL:
xDestFlip = outputWidth-xDest-1;
yDestFlip = yDest;
break;
case FlipMode::BOTH:
xDestFlip = outputWidth-xDest-1;
yDestFlip = outputHeight-yDest-1;
break;
case FlipMode::NO_CHANGE:
xDestFlip = xDest;
yDestFlip = yDest;
break;
}
ColorRgb &rgb = outputImage(xDestFlip, yDestFlip);
switch (pixelFormat)
{
case PixelFormat::UYVY:
for (int xDest = xDestStart, xSource = cropLeft + (_horizontalDecimation >> 1); xDest <= xDestEnd; xSource += _horizontalDecimation, ++xDest)
{
int index = yOffset + (xSource << 1);
ColorRgb & rgb = outputImage(abs(xDest), abs(yDest));
int index = lineLength * ySource + (xSource << 1);
uint8_t y = data[index+1];
uint8_t u = ((xSource&1) == 0) ? data[index ] : data[index-2];
uint8_t v = ((xSource&1) == 0) ? data[index+2] : data[index ];
ColorSys::yuv2rgb(y, u, v, rgb.red, rgb.green, rgb.blue);
}
break;
case PixelFormat::YUYV:
}
break;
}
case PixelFormat::YUYV:
{
for (int yDest = yDestStart, ySource = cropTop + (_verticalDecimation >> 1); yDest <= yDestEnd; ySource += _verticalDecimation, ++yDest)
{
for (int xDest = xDestStart, xSource = cropLeft + (_horizontalDecimation >> 1); xDest <= xDestEnd; xSource += _horizontalDecimation, ++xDest)
{
int index = yOffset + (xSource << 1);
ColorRgb & rgb = outputImage(abs(xDest), abs(yDest));
int index = lineLength * ySource + (xSource << 1);
uint8_t y = data[index];
uint8_t u = ((xSource&1) == 0) ? data[index+1] : data[index-1];
uint8_t v = ((xSource&1) == 0) ? data[index+3] : data[index+1];
ColorSys::yuv2rgb(y, u, v, rgb.red, rgb.green, rgb.blue);
}
break;
case PixelFormat::BGR16:
}
break;
}
case PixelFormat::BGR16:
{
for (int yDest = yDestStart, ySource = cropTop + (_verticalDecimation >> 1); yDest <= yDestEnd; ySource += _verticalDecimation, ++yDest)
{
for (int xDest = xDestStart, xSource = cropLeft + (_horizontalDecimation >> 1); xDest <= xDestEnd; xSource += _horizontalDecimation, ++xDest)
{
int index = yOffset + (xSource << 1);
ColorRgb & rgb = outputImage(abs(xDest), abs(yDest));
int index = lineLength * ySource + (xSource << 1);
rgb.blue = (data[index] & 0x1f) << 3;
rgb.green = (((data[index+1] & 0x7) << 3) | (data[index] & 0xE0) >> 5) << 2;
rgb.red = (data[index+1] & 0xF8);
}
break;
case PixelFormat::BGR24:
}
break;
}
case PixelFormat::BGR24:
{
for (int yDest = yDestStart, ySource = cropTop + (_verticalDecimation >> 1); yDest <= yDestEnd; ySource += _verticalDecimation, ++yDest)
{
for (int xDest = xDestStart, xSource = cropLeft + (_horizontalDecimation >> 1); xDest <= xDestEnd; xSource += _horizontalDecimation, ++xDest)
{
int index = yOffset + (xSource << 1) + xSource;
ColorRgb & rgb = outputImage(abs(xDest), abs(yDest));
int index = lineLength * ySource + (xSource << 1) + xSource;
rgb.blue = data[index ];
rgb.green = data[index+1];
rgb.red = data[index+2];
}
break;
case PixelFormat::RGB32:
}
break;
}
case PixelFormat::RGB32:
{
for (int yDest = yDestStart, ySource = cropTop + (_verticalDecimation >> 1); yDest <= yDestEnd; ySource += _verticalDecimation, ++yDest)
{
for (int xDest = xDestStart, xSource = cropLeft + (_horizontalDecimation >> 1); xDest <= xDestEnd; xSource += _horizontalDecimation, ++xDest)
{
int index = yOffset + (xSource << 2);
ColorRgb & rgb = outputImage(abs(xDest), abs(yDest));
int index = lineLength * ySource + (xSource << 2);
rgb.red = data[index ];
rgb.green = data[index+1];
rgb.blue = data[index+2];
}
break;
case PixelFormat::BGR32:
}
break;
}
case PixelFormat::BGR32:
{
for (int yDest = yDestStart, ySource = cropTop + (_verticalDecimation >> 1); yDest <= yDestEnd; ySource += _verticalDecimation, ++yDest)
{
for (int xDest = xDestStart, xSource = cropLeft + (_horizontalDecimation >> 1); xDest <= xDestEnd; xSource += _horizontalDecimation, ++xDest)
{
int index = yOffset + (xSource << 2);
ColorRgb & rgb = outputImage(abs(xDest), abs(yDest));
int index = lineLength * ySource + (xSource << 2);
rgb.blue = data[index ];
rgb.green = data[index+1];
rgb.red = data[index+2];
}
break;
case PixelFormat::NV12:
}
break;
}
case PixelFormat::NV12:
{
for (int yDest = yDestStart, ySource = cropTop + (_verticalDecimation >> 1); yDest <= yDestEnd; ySource += _verticalDecimation, ++yDest)
{
int uOffset = (height + ySource / 2) * lineLength;
for (int xDest = xDestStart, xSource = cropLeft + (_horizontalDecimation >> 1); xDest <= xDestEnd; xSource += _horizontalDecimation, ++xDest)
{
uint8_t y = data[yOffset + xSource];
ColorRgb & rgb = outputImage(abs(xDest), abs(yDest));
uint8_t y = data[lineLength * ySource + xSource];
uint8_t u = data[uOffset + ((xSource >> 1) << 1)];
uint8_t v = data[uOffset + ((xSource >> 1) << 1) + 1];
ColorSys::yuv2rgb(y, u, v, rgb.red, rgb.green, rgb.blue);
}
break;
case PixelFormat::I420:
}
break;
}
case PixelFormat::I420:
{
for (int yDest = yDestStart, ySource = cropTop + (_verticalDecimation >> 1); yDest <= yDestEnd; ySource += _verticalDecimation, ++yDest)
{
int uOffset = width * height + (ySource/2) * width/2;
int vOffset = width * height * 1.25 + (ySource/2) * width/2;
for (int xDest = xDestStart, xSource = cropLeft + (_horizontalDecimation >> 1); xDest <= xDestEnd; xSource += _horizontalDecimation, ++xDest)
{
int y = data[yOffset + xSource];
ColorRgb & rgb = outputImage(abs(xDest), abs(yDest));
int y = data[lineLength * ySource + xSource];
int u = data[uOffset + (xSource >> 1)];
int v = data[vOffset + (xSource >> 1)];
ColorSys::yuv2rgb(y, u, v, rgb.red, rgb.green, rgb.blue);
break;
}
break;
case PixelFormat::MJPEG:
break;
case PixelFormat::NO_CHANGE:
Error(Logger::getInstance("ImageResampler"), "Invalid pixel format given");
break;
}
break;
}
case PixelFormat::MJPEG:
break;
case PixelFormat::NO_CHANGE:
Error(Logger::getInstance("ImageResampler"), "Invalid pixel format given");
break;
}
}