Merge branch 'dev' into pr_4387

This commit is contained in:
Nick O'Leary
2024-03-21 16:41:24 +00:00
committed by GitHub
349 changed files with 53003 additions and 4221 deletions

View File

@@ -68,6 +68,7 @@ var api = module.exports = {
* @param {String} opts.store - the context store
* @param {String} opts.key - the context key
* @param {Object} opts.req - the request to log (optional)
* @param {Boolean} opts.keysOnly - whether to return keys only
* @return {Promise} - the node information
* @memberof @node-red/runtime_context
*/
@@ -102,6 +103,15 @@ var api = module.exports = {
if (key) {
store = store || availableStores.default;
ctx.get(key,store,function(err, v) {
if (opts.keysOnly) {
if (Array.isArray(v)) {
resolve({ [store]: { format: `array[${v.length}]`}})
} else if (typeof v === 'object') {
resolve({ [store]: { keys: Object.keys(v), format: 'Object' } })
} else {
resolve({ [store]: { keys: [] }})
}
}
var encoded = util.encodeObject({msg:v});
if (store !== availableStores.default) {
encoded.store = store;
@@ -118,32 +128,58 @@ var api = module.exports = {
stores = [store];
}
var result = {};
var c = stores.length;
var errorReported = false;
stores.forEach(function(store) {
exportContextStore(scope,ctx,store,result,function(err) {
if (err) {
// TODO: proper error reporting
if (!errorReported) {
errorReported = true;
runtime.log.audit({event: "context.get",scope:scope,id:id,store:store,key:key,error:"unexpected_error"}, opts.req);
var err = new Error();
err.code = "unexpected_error";
err.status = 400;
return reject(err);
if (opts.keysOnly) {
ctx.keys(store,function(err, keys) {
if (err) {
// TODO: proper error reporting
if (!errorReported) {
errorReported = true;
runtime.log.audit({event: "context.get",scope:scope,id:id,store:store,key:key,error:"unexpected_error"}, opts.req);
var err = new Error();
err.code = "unexpected_error";
err.status = 400;
return reject(err);
}
return
}
result[store] = { keys }
c--;
if (c === 0) {
if (!errorReported) {
runtime.log.audit({event: "context.get",scope:scope,id:id,store:store,key:key},opts.req);
resolve(result);
}
}
})
} else {
exportContextStore(scope,ctx,store,result,function(err) {
if (err) {
// TODO: proper error reporting
if (!errorReported) {
errorReported = true;
runtime.log.audit({event: "context.get",scope:scope,id:id,store:store,key:key,error:"unexpected_error"}, opts.req);
var err = new Error();
err.code = "unexpected_error";
err.status = 400;
return reject(err);
}
return;
}
c--;
if (c === 0) {
if (!errorReported) {
runtime.log.audit({event: "context.get",scope:scope,id:id,store:store,key:key},opts.req);
resolve(result);
return;
}
}
});
c--;
if (c === 0) {
if (!errorReported) {
runtime.log.audit({event: "context.get",scope:scope,id:id,store:store,key:key},opts.req);
resolve(result);
}
}
});
}
})
}
} else {

View File

@@ -99,6 +99,9 @@ var api = module.exports = {
safeSettings.markdownEditor = runtime.settings.editorTheme.markdownEditor || {};
safeSettings.markdownEditor.mermaid = safeSettings.markdownEditor.mermaid || { enabled: true };
}
if (runtime.settings.editorTheme.mermaid) {
safeSettings.mermaid = runtime.settings.editorTheme.mermaid
}
}
safeSettings.libraries = runtime.library.getLibraries();
if (util.isArray(runtime.settings.paletteCategories)) {

View File

@@ -161,7 +161,8 @@ class Flow {
for (let i = 0; i < configNodes.length; i++) {
const node = this.flow.configs[configNodes[i]]
if (node.type === 'global-config' && node.env) {
const nodeEnv = await flowUtil.evaluateEnvProperties(this, node.env, credentials.get(node.id))
const globalCreds = credentials.get(node.id)?.map || {}
const nodeEnv = await flowUtil.evaluateEnvProperties(this, node.env, globalCreds)
this._env = { ...this._env, ...nodeEnv }
}
}
@@ -484,7 +485,7 @@ class Flow {
}
if (!key.startsWith("$parent.")) {
if (this._env.hasOwnProperty(key)) {
return this._env[key]
return (this._env[key] && Object.hasOwn(this._env[key], 'value') && this._env[key].__clone__) ? clone(this._env[key].value) : this._env[key]
}
} else {
key = key.substring(8);

View File

@@ -41,7 +41,7 @@ class Group {
}
if (!key.startsWith("$parent.")) {
if (this._env.hasOwnProperty(key)) {
return this._env[key]
return (this._env[key] && Object.hasOwn(this._env[key], 'value') && this._env[key].__clone__) ? clone(this._env[key].value) : this._env[key]
}
} else {
key = key.substring(8);

View File

@@ -73,9 +73,20 @@ class Subflow extends Flow {
id: subflowInstance.id,
configs: {},
nodes: {},
groups: {},
subflows: {}
}
if (subflowDef.groups) {
// Clone all of the subflow group definitions and give them new IDs
for (i in subflowDef.groups) {
if (subflowDef.groups.hasOwnProperty(i)) {
node = createNodeInSubflow(subflowInstance.id,subflowDef.groups[i]);
node_map[node._alias] = node;
subflowInternalFlowConfig.groups[node.id] = node;
}
}
}
if (subflowDef.configs) {
// Clone all of the subflow config node definitions and give them new IDs
for (i in subflowDef.configs) {
@@ -101,6 +112,7 @@ class Subflow extends Flow {
remapSubflowNodes(subflowInternalFlowConfig.configs,node_map);
remapSubflowNodes(subflowInternalFlowConfig.nodes,node_map);
remapSubflowNodes(subflowInternalFlowConfig.groups,node_map);
// console.log("Instance config\n",JSON.stringify(subflowInternalFlowConfig,"",2));
@@ -200,6 +212,7 @@ class Subflow extends Flow {
var subflowInstanceConfig = {
id: this.subflowInstance.id,
type: this.subflowInstance.type,
g: this.subflowInstance.g,
z: this.subflowInstance.z,
name: this.subflowInstance.name,
wires: [],
@@ -237,7 +250,7 @@ class Subflow extends Flow {
for (j=0;j<wires.length;j++) {
if (wires[j].id != self.subflowDef.id) {
node = self.node_map[wires[j].id];
if (node._originalWires) {
if (node && node._originalWires) {
node.wires = clone(node._originalWires);
}
}
@@ -254,8 +267,10 @@ class Subflow extends Flow {
subflowInstanceModified = true;
} else {
node = self.node_map[wires[j].id];
node.wires[wires[j].port] = node.wires[wires[j].port].concat(newWires[i]);
modifiedNodes[node.id] = node;
if (node) {
node.wires[wires[j].port] = node.wires[wires[j].port].concat(newWires[i]);
modifiedNodes[node.id] = node;
}
}
}
}
@@ -283,10 +298,14 @@ class Subflow extends Flow {
this.node._updateWires(subflowInstanceConfig.wires);
} else {
var node = self.node_map[wires[j].id];
if (!node._originalWires) {
node._originalWires = clone(node.wires);
if (node) {
if (!node._originalWires) {
node._originalWires = clone(node.wires);
}
node.wires[wires[j].port] = (node.wires[wires[j].port]||[]).concat(this.subflowInstance.wires[i]);
} else {
this.error("Unknown node referenced inside subflow: " + wires[j].id)
}
node.wires[wires[j].port] = (node.wires[wires[j].port]||[]).concat(this.subflowInstance.wires[i]);
}
}
}
@@ -302,11 +321,15 @@ class Subflow extends Flow {
this.node._updateWires(subflowInstanceConfig.wires);
} else {
var node = self.node_map[wires[j].id];
if (!node._originalWires) {
node._originalWires = clone(node.wires);
if (node) {
if (!node._originalWires) {
node._originalWires = clone(node.wires);
}
node.wires[wires[j].port] = (node.wires[wires[j].port]||[]);
node.wires[wires[j].port].push(subflowStatusId);
} else {
this.error("Unknown node referenced inside subflow: " + wires[j].id)
}
node.wires[wires[j].port] = (node.wires[wires[j].port]||[]);
node.wires[wires[j].port].push(subflowStatusId);
}
}
}
@@ -353,7 +376,7 @@ class Subflow extends Flow {
}
if (!key.startsWith("$parent.")) {
if (this._env.hasOwnProperty(key)) {
return this._env[key]
return (this._env[key] && Object.hasOwn(this._env[key], 'value') && this._env[key].__clone__) ? clone(this._env[key].value) : this._env[key]
}
} else {
key = key.substring(8);

View File

@@ -374,7 +374,12 @@ async function start(type,diff,muteLog,isDeploy) {
// A modified-type deploy means restarting things that have changed
// Update the global flow
activeFlows['global'].update(activeFlowConfig,activeFlowConfig);
if (activeFlows['global']) {
activeFlows['global'].update(activeFlowConfig,activeFlowConfig);
} else {
log.debug("red/nodes/flows.start : starting flow : global");
activeFlows['global'] = Flow.create(flowAPI,activeFlowConfig);
}
for (id in activeFlowConfig.flows) {
if (activeFlowConfig.flows.hasOwnProperty(id)) {
if (!activeFlowConfig.flows[id].disabled) {

View File

@@ -57,18 +57,20 @@ var EnvVarPropertyRE = /^\${(\S+)}$/;
function mapEnvVarProperties(obj,prop,flow,config) {
var v = obj[prop];
const v = obj[prop];
if (Buffer.isBuffer(v)) {
return;
} else if (Array.isArray(v)) {
for (var i=0;i<v.length;i++) {
for (let i=0;i<v.length;i++) {
mapEnvVarProperties(v,i,flow,config);
}
} else if (typeof obj[prop] === 'string') {
if (obj[prop][0] === "$" && (EnvVarPropertyRE_old.test(v) || EnvVarPropertyRE.test(v)) ) {
var envVar = v.substring(2,v.length-1);
var r = redUtil.getSetting(config, envVar, flow);
obj[prop] = r ? r : obj[prop];
const envVar = v.substring(2,v.length-1);
const r = redUtil.getSetting(config, envVar, flow);
if (r !== undefined && r !== '') {
obj[prop] = r
}
}
} else {
for (var p in v) {
@@ -80,6 +82,7 @@ function mapEnvVarProperties(obj,prop,flow,config) {
}
async function evaluateEnvProperties(flow, env, credentials) {
credentials = credentials || {}
const pendingEvaluations = []
const evaluatedEnv = {}
const envTypes = []
@@ -99,6 +102,9 @@ async function evaluateEnvProperties(flow, env, credentials) {
pendingEvaluations.push(new Promise((resolve, _) => {
redUtil.evaluateNodeProperty(value, 'jsonata', {_flow: flow}, null, (err, result) => {
if (!err) {
if (typeof result === 'object') {
result = { value: result, __clone__: true}
}
evaluatedEnv[name] = result
}
resolve()
@@ -106,12 +112,16 @@ async function evaluateEnvProperties(flow, env, credentials) {
}))
} else {
value = redUtil.evaluateNodeProperty(value, type, {_flow: flow}, null, null);
if (typeof value === 'object') {
value = { value: value, __clone__: true}
}
}
evaluatedEnv[name] = value
}
if (pendingEvaluations.length > 0) {
await Promise.all(pendingEvaluations)
}
// Now loop over the env types and evaluate them properly
for (let i = 0; i < envTypes.length; i++) {
let { name, value, type } = envTypes[i]
// If an env-var wants to lookup itself, delegate straight to the parent
@@ -122,10 +132,25 @@ async function evaluateEnvProperties(flow, env, credentials) {
if (evaluatedEnv.hasOwnProperty(value)) {
value = evaluatedEnv[value]
} else {
value = redUtil.evaluateNodeProperty(value, type, {_flow: flow}, null, null);
value = redUtil.evaluateNodeProperty(value, type, {_flow: {
// Provide a hook so when it tries to look up a flow setting,
// we can insert the just-evaluated value which hasn't yet
// been set on the flow object - otherwise delegate up to the flow
getSetting: function(name) {
if (evaluatedEnv.hasOwnProperty(name)){
return evaluatedEnv[name]
}
return flow.getSetting(name)
}
}}, null, null);
}
if (typeof value === 'object' && !value.__clone__) {
value = { value: value, __clone__: true}
}
evaluatedEnv[name] = value
}
// console.log(evaluatedEnv)
return evaluatedEnv
}

View File

@@ -27,6 +27,7 @@ var express = require("express");
var path = require('path');
var fs = require("fs");
var os = require("os");
const crypto = require("crypto")
const {log,i18n,events,exec,util,hooks} = require("@node-red/util");
@@ -51,7 +52,7 @@ var adminApi = {
var nodeApp;
var adminApp;
var server;
let userSettings;
/**
* Initialise the runtime module.
@@ -61,8 +62,9 @@ var server;
* better abstracted.
* @memberof @node-red/runtime
*/
function init(userSettings,httpServer,_adminApi) {
function init(_userSettings,httpServer,_adminApi) {
server = httpServer;
userSettings = _userSettings
if (server && server.on) {
// Add a listener to the upgrade event so that we can properly timeout connection
@@ -134,7 +136,12 @@ function start() {
.then(function() { return settings.load(storage)})
.then(function() { return library.init(runtime)})
.then(function() {
if (settings.available()) {
if (settings.get('instanceId') === undefined) {
settings.set('instanceId', crypto.randomBytes(8).toString('hex'))
}
userSettings.instanceId = settings.get('instanceId') || ''
}
if (log.metric()) {
runtimeMetricInterval = setInterval(function() {
reportMetrics();
@@ -147,7 +154,7 @@ function start() {
log.info(log._("runtime.version",{component:"Node.js ",version:process.version}));
if (settings.UNSUPPORTED_VERSION) {
log.error("*****************************************************************");
log.error("* "+log._("runtime.unsupported_version",{component:"Node.js",version:process.version,requires: ">=8.9.0"})+" *");
log.error("* "+log._("runtime.unsupported_version",{component:"Node.js",version:process.version,requires: ">=18"})+" *");
log.error("*****************************************************************");
events.emit("runtime-event",{id:"runtime-unsupported-version",payload:{type:"error",text:"notification.errors.unsupportedVersion"},retain:true});
}

View File

@@ -42,6 +42,7 @@ function Node(n) {
this._closeCallbacks = [];
this._inputCallback = null;
this._inputCallbacks = null;
this._expectedDoneCount = 0;
if (n.name) {
this.name = n.name;
@@ -159,6 +160,9 @@ Node.prototype.on = function(event, callback) {
if (event == "close") {
this._closeCallbacks.push(callback);
} else if (event === "input") {
if (callback.length === 3) {
this._expectedDoneCount++
}
if (this._inputCallback) {
this._inputCallbacks = [this._inputCallback, callback];
this._inputCallback = null;
@@ -218,19 +222,17 @@ Node.prototype._emitInput = function(arg) {
} else if (node._inputCallbacks) {
// Multiple callbacks registered. Call each one, tracking eventual completion
var c = node._inputCallbacks.length;
let doneCount = 0
for (var i=0;i<c;i++) {
var cb = node._inputCallbacks[i];
if (cb.length === 2) {
c++;
}
try {
cb.call(
node,
arg,
function() { node.send.apply(node,arguments) },
function(err) {
c--;
if (c === 0) {
doneCount++;
if (doneCount === node._expectedDoneCount) {
node._complete(arg,err);
}
}
@@ -257,6 +259,9 @@ Node.prototype._removeListener = Node.prototype.removeListener;
Node.prototype.removeListener = function(name, listener) {
var index;
if (name === "input") {
if (listener.length === 3) {
this._expectedDoneCount--
}
if (this._inputCallback && this._inputCallback === listener) {
// Removing the only callback
this._inputCallback = null;

View File

@@ -384,10 +384,28 @@ var api = module.exports = {
}
}
} else if (nodeType === "global-config") {
if (JSON.stringify(savedCredentials.map) !== JSON.stringify(newCreds.map)) {
savedCredentials.map = newCreds.map;
dirty = true;
}
savedCredentials.map = savedCredentials.map || {}
const existingCredentialKeys = Object.keys(savedCredentials.map)
const newCredentialKeys = Object.keys(newCreds?.map || [])
existingCredentialKeys.forEach(key => {
if (!newCreds.map?.[key]) {
// This key doesn't exist in the new credentials list - remove
delete savedCredentials.map[key]
delete savedCredentials.map[`has_${key}`]
dirty = true
}
})
newCredentialKeys.forEach(key => {
if (!/^has_/.test(key)) {
if (!savedCredentials.map[key] || newCreds.map[key] !== '__PWRD__') {
// This key either doesn't exist in current saved, or the
// value has been changed
savedCredentials.map[key] = newCreds.map[key]
savedCredentials.map[`has_${key}`] = newCreds.map[`has_${key}`]
dirty = true
}
}
})
} else {
var dashedType = nodeType.replace(/\s+/g, '-');
var definition = credentialsDef[dashedType];

View File

@@ -77,7 +77,7 @@ var storageModuleInterface = {
flows: flows,
credentials: creds
};
result.rev = crypto.createHash('md5').update(JSON.stringify(result.flows)).digest("hex");
result.rev = crypto.createHash('sha256').update(JSON.stringify(result.flows)).digest("hex");
return result;
})
});
@@ -95,7 +95,7 @@ var storageModuleInterface = {
return credentialSavePromise.then(function() {
return storageModule.saveFlows(flows, user).then(function() {
return crypto.createHash('md5').update(JSON.stringify(config.flows)).digest("hex");
return crypto.createHash('sha256').update(JSON.stringify(config.flows)).digest("hex");
})
});
},