Feat: Add image sender to webui + Browser screen capture (#611)

* Feat: Add image sender to webui

This PR adds a new image sending feature to the webui and extends the api accordingly. In javascript the processing of images is horrible slow (without WASM), so the solution is at the API side with out-of-the-box power of Qt.

- The image cmd accepts now a raw image that is encoded with base64. Supported are all image formats that qt supports (enough)
- There is no real size limit. It will be automatically scaled down to max 2000px width or height according to aspect ratio
- It's possible to scale down through a new "scale" property if developer wants that.
- Test were successfull until 3MP pictues, 4k+ closes the websocket on browser side, so 2k is a decent value
- Adds a new image streaming feature from browser (tabs/applications/complete desktop as target). This works just if used with HTTPS PR#612 AND with a recent version of Firefox/Chrome
This commit is contained in:
brindosch 2019-08-21 16:10:35 +02:00 committed by GitHub
parent 8e5f3251b5
commit 09ee8f26ee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 245 additions and 28 deletions

View File

@ -43,9 +43,12 @@
<span class="input-group-addon" id="remote_input_reseff" title="Repeat Effect" style="cursor:pointer"><i class="fa fa-repeat"></i></span>
</td>
</tr>
<tr style="display:none">
<td style="vertical-align:middle"><label for="remote_input_img" >Picture:</label></td>
<td><input id="remote_input_img" type="file" accept="image/*" /></td>
<tr>
<td style="vertical-align:middle"><label for="remote_input_img" data-i18n="remote_effects_label_picture" >Picture:</label></td>
<td class="input-group custom-file">
<input class="form-control" id="remote_input_img" type="file" accept="image/*" />
<span class="input-group-addon" id="remote_input_repimg" title="Repeat Image" style="cursor:pointer"><i class="fa fa-repeat"></i></span>
</td>
</tr>
<tr>
<td style="vertical-align:middle"><label for="remote_duration" data-i18n="remote_input_duration"></label></td>

View File

@ -215,6 +215,7 @@
"remote_color_button_reset": "Farbe/Effekt zurücksetzen",
"remote_color_label_color": "Farbe:",
"remote_effects_label_effects": "Effekt:",
"remote_effects_label_picture" : "Bild:",
"remote_adjustment_label": "Farbanpassung",
"remote_adjustment_intro": "Verändere live Farbe/Helligkeit/Kompensation. $1",
"remote_videoMode_label": "Video Modus",

View File

@ -214,6 +214,7 @@
"remote_color_button_reset" : "Reset Color/Effect",
"remote_color_label_color" : "Color:",
"remote_effects_label_effects" : "Effect:",
"remote_effects_label_picture" : "Picture:",
"remote_adjustment_label" : "Color adjustment",
"remote_adjustment_intro" : "Modifiy color/brightness/compensation during runtime. $1",
"remote_videoMode_label" : "Video mode",

View File

@ -93,6 +93,16 @@
<!-- /.navbar-header -->
<ul class="nav navbar-top-links navbar-right">
<!-- Browser built in capture stream - streamer.js -->
<li class="dropdown" id="btn_streamer" style="display:none">
<!-- Hidden helpers -->
<canvas style="display:none" id="streamcanvas"></canvas>
<video style="display:none" id="streamvideo" autoplay></video>
<a>
<i id="btn_streamer_icon" class="fa fa-video-camera fa-fw"></i>
</a>
</li>
<!-- instance switcher -->
<li class="dropdown" id="btn_hypinstanceswitch" style="display:none">
<a class="dropdown-toggle" data-toggle="dropdown" href="#">
<i class="fa fa-exchange fa-fw"></i> <i class="fa fa-caret-down"></i>
@ -311,6 +321,7 @@
<script src="js/content_index.js"></script>
<script src="js/settings.js"></script>
<script src="js/streamer.js"></script>
<script src="js/wizard.js"></script>
<!--gijgo dialog-->

View File

@ -6,6 +6,8 @@ $(document).ready(function() {
var mappingList = window.serverSchema.properties.color.properties.imageToLedMappingType.enum;
var duration = 0;
var rgb = {r:255,g:0,b:0};
var lastImgData = "";
var lastFileName= "";
//create html
createTable('ssthead', 'sstbody', 'sstcont');
@ -115,7 +117,7 @@ $(document).ready(function() {
if(priority > 254)
continue;
if(priority < 254 && (compId == "EFFECT" || compId == "COLOR") )
if(priority < 254 && (compId == "EFFECT" || compId == "COLOR" || compId == "IMAGE") )
clearAll = true;
if (visible)
@ -139,6 +141,9 @@ $(document).ready(function() {
case "COLOR":
owner = $.i18n('remote_color_label_color')+' '+'<div style="width:18px; height:18px; border-radius:20px; margin-bottom:-4px; border:1px grey solid; background-color: rgb('+value+'); display:inline-block" title="RGB: ('+value+')"></div>';
break;
case "IMAGE":
owner = $.i18n('remote_effects_label_picture')+' '+owner;
break;
case "GRABBER":
owner = $.i18n('general_comp_GRABBER')+': ('+owner+')';
break;
@ -161,7 +166,7 @@ $(document).ready(function() {
var btn = '<button id="srcBtn'+i+'" type="button" '+btn_state+' class="btn btn-'+btn_type+' btn_input_selection" onclick="requestSetSource('+priority+');">'+btn_text+'</button>';
if((compId == "EFFECT" || compId == "COLOR") && priority < 254)
if((compId == "EFFECT" || compId == "COLOR" || compId == "IMAGE") && priority < 254)
btn += '<button type="button" class="btn btn-sm btn-danger" style="margin-left:10px;" onclick="requestPriorityClear('+priority+');"><i class="fa fa-close"></button>';
if(btn_type != 'default')
@ -301,7 +306,9 @@ $(document).ready(function() {
$("#reset_color").off().on("click", function(){
requestPriorityClear();
lastImgData = "";
$("#effect_select").val("__none__");
$("#remote_input_img").val("");
});
$("#remote_duration").off().on("change", function(){
@ -320,10 +327,20 @@ $(document).ready(function() {
sendEffect();
});
$("#remote_input_repimg").off().on("click", function(){
if(lastImgData != "")
requestSetImage(lastImgData, duration, lastFileName);
});
$("#remote_input_img").change(function(){
readImg(this, function(src,width,height){
console.log(src,width,height)
requestSetImage(src,width,height,duration)
readImg(this, function(src,fileName){
lastFileName = fileName;
if(src.includes(","))
lastImgData = src.split(",")[1];
else
lastImgData = src;
requestSetImage(lastImgData, duration, lastFileName);
});
});

View File

@ -123,19 +123,19 @@ function initWebSocket()
{
var error = response.hasOwnProperty("error")? response.error : "unknown";
$(window.hyperion).trigger({type:"error",reason:error});
console.log("[window.websocket::onmessage] "+error)
console.log("[window.websocket::onmessage] ",error)
}
}
catch(exception_error)
{
$(window.hyperion).trigger({type:"error",reason:exception_error});
console.log("[window.websocket::onmessage] "+exception_error)
console.log("[window.websocket::onmessage] ",exception_error)
}
};
window.websocket.onerror = function (error) {
$(window.hyperion).trigger({type:"error",reason:error});
console.log("[window.websocket::onerror] "+error)
console.log("[window.websocket::onerror] ",error)
};
}
}
@ -290,9 +290,9 @@ function requestSetColor(r,g,b,duration)
sendToHyperion("color", "", '"color":['+r+','+g+','+b+'], "priority":'+window.webPrio+',"duration":'+validateDuration(duration)+',"origin":"'+window.webOrigin+'"');
}
function requestSetImage(data,width,height,duration)
function requestSetImage(data,duration,name)
{
sendToHyperion("image", "", '"imagedata":"'+data+'", "imagewidth":'+width+',"imageheight":'+height+', "priority":'+window.webPrio+',"duration":'+validateDuration(duration)+'');
sendToHyperion("image", "", '"imagedata":"'+data+'", "priority":'+window.webPrio+',"duration":'+validateDuration(duration)+', "format":"auto", "origin":"'+window.webOrigin+'", "name":"'+name+'"');
}
function requestSetComponentState(comp, state)

View File

@ -0,0 +1,111 @@
$(document).ready( function() {
// check if browser supports streaming
if(window.navigator.mediaDevices && window.navigator.mediaDevices.getDisplayMedia){
$("#btn_streamer").toggle()
}
// variables
var streamActive = false;
var screenshotTimer = "";
var screenshotIntervalTimeMs = 100;
var streamImageHeight = 0;
var streamImageWidth = 0;
const videoElem = document.getElementById("streamvideo");
const canvasElem = document.getElementById("streamcanvas");
// Options for getDisplayMedia()
var displayMediaOptions = {
video: {
cursor: "never",
width: 170,
height: 100,
frameRate: 15
},
audio: false
};
async function startCapture() {
streamActive = true;
try {
var stream = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
videoElem.srcObject = stream;
// get the active track of the stream
const track = stream.getVideoTracks()[0];
// listen for track ending, fires when user aborts through browser
track.onended = function(event) {
stopCapture();
};
// wait for video ready
videoElem.addEventListener('loadedmetadata', (e) => {
window.setTimeout(() => (
onCapabilitiesReady(track.getSettings())
), 500);
});
} catch(err) {
stopCapture();
console.error("Error: " + err);
}
}
function onCapabilitiesReady(settings) {
// extract real width/height
streamImageWidth = settings.width;
streamImageHeight = settings.height;
// start screenshotTimer
updateScrTimer(false);
// we are sending
$("#btn_streamer_icon").addClass("text-danger");
}
function stopCapture(evt) {
streamActive = false;
$("#btn_streamer_icon").removeClass("text-danger");
updateScrTimer(true);
// sometimes it's null on abort
if(videoElem.srcObject){
let tracks = videoElem.srcObject.getTracks();
tracks.forEach(track => track.stop());
videoElem.srcObject = null;
}
}
function takePicture(){
var context = canvasElem.getContext('2d');
canvasElem.width = streamImageWidth;
canvasElem.height = streamImageHeight;
context.drawImage(videoElem, 0, 0, streamImageWidth, streamImageHeight);
var data = canvasElem.toDataURL('image/png').split(",")[1];
requestSetImage(data, 2, "Streaming");
}
// start or update screenshot timer
function updateScrTimer(stop){
clearInterval(screenshotTimer)
if(stop === false){
screenshotTimer = setInterval(() => (
takePicture()
), screenshotIntervalTimeMs);
}
}
$("#btn_streamer").off().on("click",function(e){
if(!$("#btn_streamer_icon").hasClass("text-danger") && !streamActive){
startCapture();
} else {
stopCapture();
}
});
});

View File

@ -392,11 +392,11 @@ function readImg(input,cb)
{
if (input.files && input.files[0]) {
var reader = new FileReader();
// inject fileName property
reader.fileName = input.files[0].name
reader.onload = function (e) {
var i = new Image();
i.src = e.target.result;
cb(i.src,i.width,i.height);
cb(e.target.result, e.target.fileName);
}
reader.readAsDataURL(input.files[0]);
}

View File

@ -28,17 +28,27 @@
},
"imagewidth": {
"type" : "integer",
"required": true,
"minimum": 0
},
"imageheight": {
"type" : "integer",
"required": true,
"minimum": 0
},
"imagedata": {
"type": "string",
"required": true
},
"format": {
"type": "string",
"enum" : ["auto"]
},
"scale": {
"type": "integer",
"minimum" : 25,
"maximum" : 2000
},
"name": {
"type": "string"
}
},
"additionalProperties": false

View File

@ -112,7 +112,7 @@ bool JsonAPI::handleInstanceSwitch(const quint8& inst, const bool& forced)
// // imageStream last state
// if(_ledcolorsImageActive)
// connect(_hyperion, &Hyperion::currentImage, this, &JsonAPI::setImage, Qt::UniqueConnection);
//
//
// //ledColor stream last state
// if(_ledcolorsLedsActive)
// connect(_hyperion, &Hyperion::rawLedColors, this, &JsonAPI::streamLedcolorsUpdate, Qt::UniqueConnection);
@ -172,7 +172,7 @@ void JsonAPI::handleMessage(const QString& messageString, const QString& httpAut
sendErrorReply("No Authorization", command, tan);
return;
}
// switch over all possible commands and handle them
if (command == "color") handleColorCommand (message, command, tan);
else if (command == "image") handleImageCommand (message, command, tan);
@ -232,20 +232,83 @@ void JsonAPI::handleImageCommand(const QJsonObject& message, const QString& comm
int duration = message["duration"].toInt(-1);
int width = message["imagewidth"].toInt();
int height = message["imageheight"].toInt();
int scale = message["scale"].toInt(-1);
QString format = message["format"].toString();
QString imgName = message["name"].toString("");
QByteArray data = QByteArray::fromBase64(QByteArray(message["imagedata"].toString().toUtf8()));
// check consistency of the size of the received data
if (data.size() != width*height*3)
// truncate name length
imgName.truncate(16);
if(format == "auto")
{
sendErrorReply("Size of image data does not match with the width and height", command, tan);
return;
QImage img = QImage::fromData(data);
if(img.isNull())
{
sendErrorReply("Failed to parse picture, the file might be corrupted", command, tan);
return;
}
// check for requested scale
if(scale > 24)
{
if(img.height() > scale)
{
img = img.scaledToHeight(scale);
}
if(img.width() > scale)
{
img = img.scaledToWidth(scale);
}
}
// check if we need to force a scale
if(img.width() > 2000 || img.height() > 2000)
{
scale = 2000;
if(img.height() > scale)
{
img = img.scaledToHeight(scale);
}
if(img.width() > scale)
{
img = img.scaledToWidth(scale);
}
}
width = img.width();
height = img.height();
// extract image
img = img.convertToFormat(QImage::Format_ARGB32_Premultiplied);
data.clear();
data.reserve(img.width() * img.height() * 3);
for (int i = 0; i < img.height(); ++i)
{
const QRgb * scanline = reinterpret_cast<const QRgb *>(img.scanLine(i));
for (int j = 0; j < img.width(); ++j)
{
data.append((char) qRed(scanline[j]));
data.append((char) qGreen(scanline[j]));
data.append((char) qBlue(scanline[j]));
}
}
}
else
{
// check consistency of the size of the received data
if (data.size() != width*height*3)
{
sendErrorReply("Size of image data does not match with the width and height", command, tan);
return;
}
}
// create ImageRgb
// copy image
Image<ColorRgb> image(width, height);
memcpy(image.memptr(), data.data(), data.size());
_hyperion->registerInput(priority, hyperion::COMP_IMAGE, origin);
_hyperion->registerInput(priority, hyperion::COMP_IMAGE, origin, imgName);
_hyperion->setInputImage(priority, image, duration);
// send reply

View File

@ -268,7 +268,7 @@ void PriorityMuxer::clearAll(bool forceClearAll)
for(auto key : _activeInputs.keys())
{
const InputInfo info = getInputInfo(key);
if ((info.componentId == hyperion::COMP_COLOR || info.componentId == hyperion::COMP_EFFECT) && key < PriorityMuxer::LOWEST_PRIORITY-1)
if ((info.componentId == hyperion::COMP_COLOR || info.componentId == hyperion::COMP_EFFECT || info.componentId == hyperion::COMP_IMAGE) && key < PriorityMuxer::LOWEST_PRIORITY-1)
{
clearInput(key);
}
@ -299,7 +299,7 @@ void PriorityMuxer::setCurrentTime(void)
newPriority = qMin(newPriority, infoIt->priority);
// call timeTrigger when effect or color is running with timeout > 0, blacklist prio 255
if(infoIt->priority < 254 && infoIt->timeoutTime_ms > 0 && (infoIt->componentId == hyperion::COMP_EFFECT || infoIt->componentId == hyperion::COMP_COLOR))
if(infoIt->priority < 254 && infoIt->timeoutTime_ms > 0 && (infoIt->componentId == hyperion::COMP_EFFECT || infoIt->componentId == hyperion::COMP_COLOR || infoIt->componentId == hyperion::COMP_IMAGE))
emit signalTimeTrigger(); // as signal to prevent Threading issues
++infoIt;