mirror of
https://github.com/node-red/node-red.git
synced 2025-03-01 10:36:34 +00:00
Merge branch 'dev' into 3843-alternative-impl
This commit is contained in:
@@ -100,9 +100,13 @@ function buildDiagnosticReport(scope, callback) {
|
||||
version: os.version(),
|
||||
},
|
||||
runtime: {
|
||||
isStarted: runtime.isStarted(),
|
||||
modules: modules,
|
||||
version: runtime.settings.version,
|
||||
isStarted: runtime.isStarted(),
|
||||
flows: {
|
||||
state: runtime.flows && runtime.flows.state(),
|
||||
started: runtime.flows && runtime.flows.started,
|
||||
},
|
||||
modules: modules,
|
||||
settings: {
|
||||
available: runtime.settings.available(),
|
||||
apiMaxLength: runtime.settings.apiMaxLength || "UNSET",
|
||||
@@ -114,6 +118,11 @@ function buildDiagnosticReport(scope, callback) {
|
||||
flowFile: runtime.settings.flowFile || "UNSET",
|
||||
mqttReconnectTime: runtime.settings.mqttReconnectTime || "UNSET",
|
||||
serialReconnectTime: runtime.settings.serialReconnectTime || "UNSET",
|
||||
socketReconnectTime: runtime.settings.socketReconnectTime || "UNSET",
|
||||
socketTimeout: runtime.settings.socketTimeout || "UNSET",
|
||||
tcpMsgQueueSize: runtime.settings.tcpMsgQueueSize || "UNSET",
|
||||
inboundWebSocketTimeout: runtime.settings.inboundWebSocketTimeout || "UNSET",
|
||||
runtimeState: runtime.settings.runtimeState || "UNSET",
|
||||
|
||||
adminAuth: runtime.settings.adminAuth ? "SET" : "UNSET",
|
||||
|
||||
@@ -131,6 +140,7 @@ function buildDiagnosticReport(scope, callback) {
|
||||
uiHost: runtime.settings.uiHost ? "SET" : "UNSET",
|
||||
uiPort: runtime.settings.uiPort ? "SET" : "UNSET",
|
||||
userDir: runtime.settings.userDir ? "SET" : "UNSET",
|
||||
nodesDir: runtime.settings.nodesDir && runtime.settings.nodesDir.length ? "SET" : "UNSET",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -89,10 +89,16 @@ var api = module.exports = {
|
||||
|
||||
if (!runtime.settings.disableEditor) {
|
||||
safeSettings.context = runtime.nodes.listContextStores();
|
||||
if (runtime.settings.editorTheme && runtime.settings.editorTheme.codeEditor) {
|
||||
safeSettings.codeEditor = runtime.settings.editorTheme.codeEditor || {};
|
||||
safeSettings.codeEditor.lib = safeSettings.codeEditor.lib || "monaco";
|
||||
safeSettings.codeEditor.options = safeSettings.codeEditor.options || {};
|
||||
if (runtime.settings.editorTheme) {
|
||||
if (runtime.settings.editorTheme.codeEditor) {
|
||||
safeSettings.codeEditor = runtime.settings.editorTheme.codeEditor || {};
|
||||
safeSettings.codeEditor.lib = safeSettings.codeEditor.lib || "monaco";
|
||||
safeSettings.codeEditor.options = safeSettings.codeEditor.options || {};
|
||||
}
|
||||
if (runtime.settings.editorTheme.markdownEditor) {
|
||||
safeSettings.markdownEditor = runtime.settings.editorTheme.markdownEditor || {};
|
||||
safeSettings.markdownEditor.mermaid = safeSettings.markdownEditor.mermaid || { enabled: true };
|
||||
}
|
||||
}
|
||||
safeSettings.libraries = runtime.library.getLibraries();
|
||||
if (util.isArray(runtime.settings.paletteCategories)) {
|
||||
|
@@ -889,6 +889,16 @@ function handlePreRoute(flow, sendEvent, reportError) {
|
||||
})
|
||||
}
|
||||
|
||||
function deliverMessageToDestination(sendEvent) {
|
||||
if (sendEvent?.destination?.node) {
|
||||
try {
|
||||
sendEvent.destination.node.receive(sendEvent.msg);
|
||||
} catch(err) {
|
||||
Log.error(`Error delivering message to node:${sendEvent.destination.node._path} [${sendEvent.destination.node.type}]`)
|
||||
Log.error(err.stack)
|
||||
}
|
||||
}
|
||||
}
|
||||
function handlePreDeliver(flow,sendEvent, reportError) {
|
||||
// preDeliver - the local router has identified the node it is going to send to. At this point, the message has been cloned if needed.
|
||||
hooks.trigger("preDeliver",sendEvent,(err) => {
|
||||
@@ -898,15 +908,10 @@ function handlePreDeliver(flow,sendEvent, reportError) {
|
||||
} else if (err !== false) {
|
||||
if (asyncMessageDelivery) {
|
||||
setImmediate(function() {
|
||||
if (sendEvent.destination.node) {
|
||||
sendEvent.destination.node.receive(sendEvent.msg);
|
||||
}
|
||||
deliverMessageToDestination(sendEvent)
|
||||
})
|
||||
} else {
|
||||
if (sendEvent.destination.node) {
|
||||
sendEvent.destination.node.receive(sendEvent.msg);
|
||||
|
||||
}
|
||||
deliverMessageToDestination(sendEvent)
|
||||
}
|
||||
// postDeliver - the message has been dispatched to be delivered asynchronously (unless the sync delivery flag is set, in which case it would be continue as synchronous delivery)
|
||||
hooks.trigger("postDeliver", sendEvent, function(err) {
|
||||
|
@@ -474,7 +474,7 @@ class Subflow extends Flow {
|
||||
*/
|
||||
function createNodeInSubflow(subflowInstanceId, def) {
|
||||
let node = clone(def);
|
||||
let nid = redUtil.generateId();
|
||||
let nid = `${subflowInstanceId}-${node.id}` //redUtil.generateId();
|
||||
// console.log("Create Node In subflow",node._alias, "--->",nid, "(",node.type,")")
|
||||
// node_map[node.id] = node;
|
||||
node._alias = node.id;
|
||||
|
@@ -641,6 +641,7 @@ function getFlow(id) {
|
||||
if (node.type === 'link out') {
|
||||
delete node.wires;
|
||||
}
|
||||
delete node.credentials;
|
||||
return node;
|
||||
})
|
||||
}
|
||||
@@ -648,7 +649,10 @@ function getFlow(id) {
|
||||
if (flow.configs) {
|
||||
var configIds = Object.keys(flow.configs);
|
||||
result.configs = configIds.map(function(configId) {
|
||||
return clone(flow.configs[configId]);
|
||||
const node = clone(flow.configs[configId]);
|
||||
delete node.credentials;
|
||||
return node
|
||||
|
||||
})
|
||||
if (result.configs.length === 0) {
|
||||
delete result.configs;
|
||||
@@ -660,12 +664,16 @@ function getFlow(id) {
|
||||
var subflow = clone(flow.subflows[subflowId]);
|
||||
var nodeIds = Object.keys(subflow.nodes);
|
||||
subflow.nodes = nodeIds.map(function(id) {
|
||||
return subflow.nodes[id];
|
||||
const node = clone(subflow.nodes[id])
|
||||
delete node.credentials
|
||||
return node
|
||||
});
|
||||
if (subflow.configs) {
|
||||
var configIds = Object.keys(subflow.configs);
|
||||
subflow.configs = configIds.map(function(id) {
|
||||
return subflow.configs[id];
|
||||
const node = clone(subflow.configs[id])
|
||||
delete node.credentials
|
||||
return node
|
||||
})
|
||||
}
|
||||
delete subflow.instances;
|
||||
@@ -777,6 +785,16 @@ const flowAPI = {
|
||||
}
|
||||
|
||||
|
||||
function getGlobalConfig() {
|
||||
let gconf = null;
|
||||
eachNode((n) => {
|
||||
if (n.type === "global-config") {
|
||||
gconf = n;
|
||||
}
|
||||
});
|
||||
return gconf;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
init: init,
|
||||
|
||||
@@ -790,6 +808,9 @@ module.exports = {
|
||||
get:getNode,
|
||||
eachNode: eachNode,
|
||||
|
||||
|
||||
getGlobalConfig: getGlobalConfig,
|
||||
|
||||
/**
|
||||
* Gets the current flow configuration
|
||||
*/
|
||||
|
@@ -18,7 +18,9 @@ var redUtil = require("@node-red/util").util;
|
||||
var Log = require("@node-red/util").log;
|
||||
var subflowInstanceRE = /^subflow:(.+)$/;
|
||||
var typeRegistry = require("@node-red/registry");
|
||||
const credentials = require("../nodes/credentials");
|
||||
|
||||
let _runtime = null;
|
||||
|
||||
var envVarExcludes = {};
|
||||
|
||||
@@ -134,10 +136,12 @@ function createNode(flow,config) {
|
||||
subflowInstanceConfig,
|
||||
instanceConfig
|
||||
);
|
||||
// Register this subflow as an instance node of the parent flow.
|
||||
// This allows nodes inside the subflow to get ahold of each other
|
||||
// such as a node accessing its config node
|
||||
flow.subflowInstanceNodes[config.id] = subflow
|
||||
subflow.start();
|
||||
return subflow.node;
|
||||
|
||||
Log.error(Log._("nodes.flow.unknown-type", {type:type}));
|
||||
}
|
||||
} catch(err) {
|
||||
Log.error(err);
|
||||
@@ -197,7 +201,9 @@ function parseConfig(config) {
|
||||
if (subflowDetails) {
|
||||
var subflowType = subflowDetails[1]
|
||||
n.subflow = subflowType;
|
||||
flow.subflows[subflowType].instances.push(n)
|
||||
if (flow.subflows[subflowType]) {
|
||||
flow.subflows[subflowType].instances.push(n)
|
||||
}
|
||||
}
|
||||
if (container) {
|
||||
container.nodes[n.id] = n;
|
||||
@@ -263,15 +269,55 @@ function parseConfig(config) {
|
||||
return flow;
|
||||
}
|
||||
|
||||
function getGlobalEnv(name) {
|
||||
const nodes = _runtime.nodes;
|
||||
if (!nodes) {
|
||||
return null;
|
||||
}
|
||||
const gconf = nodes.getGlobalConfig();
|
||||
const env = gconf ? gconf.env : null;
|
||||
|
||||
if (env) {
|
||||
const cred = (gconf ? credentials.get(gconf.id) : null) || {
|
||||
map: {}
|
||||
};
|
||||
const map = cred.map;
|
||||
|
||||
for (let i = 0; i < env.length; i++) {
|
||||
const item = env[i];
|
||||
if (item.name === name) {
|
||||
if (item.type === "cred") {
|
||||
return {
|
||||
name: name,
|
||||
value: map[name],
|
||||
type: "cred"
|
||||
};
|
||||
}
|
||||
return item;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
init: function(runtime) {
|
||||
_runtime = runtime;
|
||||
envVarExcludes = {};
|
||||
if (runtime.settings.hasOwnProperty('envVarExcludes') && Array.isArray(runtime.settings.envVarExcludes)) {
|
||||
runtime.settings.envVarExcludes.forEach(v => envVarExcludes[v] = true);
|
||||
}
|
||||
},
|
||||
getEnvVar: function(k) {
|
||||
return !envVarExcludes[k]?process.env[k]:undefined
|
||||
if (!envVarExcludes[k]) {
|
||||
const item = getGlobalEnv(k);
|
||||
if (item) {
|
||||
const val = redUtil.evaluateNodeProperty(item.value, item.type, null, null, null);
|
||||
return val;
|
||||
}
|
||||
return process.env[k];
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
diffNodes: diffNodes,
|
||||
mapEnvVarProperties: mapEnvVarProperties,
|
||||
|
@@ -89,6 +89,15 @@ function init(userSettings,httpServer,_adminApi) {
|
||||
|
||||
nodeApp = express();
|
||||
adminApp = express();
|
||||
const defaultServerSettings = {
|
||||
"x-powered-by": false
|
||||
}
|
||||
const serverSettings = Object.assign({},defaultServerSettings,userSettings.httpServerOptions||{});
|
||||
for (let eOption in serverSettings) {
|
||||
nodeApp.set(eOption, serverSettings[eOption]);
|
||||
adminApp.set(eOption, serverSettings[eOption]);
|
||||
}
|
||||
|
||||
|
||||
if (_adminApi) {
|
||||
adminApi = _adminApi;
|
||||
@@ -161,6 +170,8 @@ function start() {
|
||||
for (i=0;i<nodeErrors.length;i+=1) {
|
||||
if (nodeErrors[i].err.code === "type_already_registered") {
|
||||
log.warn("["+nodeErrors[i].id+"] "+log._("server.type-already-registered",{type:nodeErrors[i].err.details.type,module: nodeErrors[i].err.details.moduleA}));
|
||||
} else if (nodeErrors[i].err.code === "set_has_no_types") {
|
||||
log.warn("["+nodeErrors[i].id+"] "+log._("server.set-has-no-types", nodeErrors[i].err.details));
|
||||
} else {
|
||||
log.warn("["+nodeErrors[i].id+"] "+nodeErrors[i].err);
|
||||
}
|
||||
|
@@ -373,6 +373,11 @@ Node.prototype.send = function(msg) {
|
||||
if (msg === null || typeof msg === "undefined") {
|
||||
return;
|
||||
} else if (!util.isArray(msg)) {
|
||||
// A single message has been passed in
|
||||
if (typeof msg !== 'object') {
|
||||
this.error(Log._("nodes.flow.non-message-returned", { type: typeof msg }));
|
||||
return
|
||||
}
|
||||
if (this._wire) {
|
||||
// A single message and a single wire on output 0
|
||||
// TODO: pre-load flows.get calls - cannot do in constructor
|
||||
@@ -425,27 +430,31 @@ Node.prototype.send = function(msg) {
|
||||
for (k = 0; k < msgs.length; k++) {
|
||||
var m = msgs[k];
|
||||
if (m !== null && m !== undefined) {
|
||||
if (!m._msgid) {
|
||||
hasMissingIds = true;
|
||||
if (typeof m !== 'object') {
|
||||
this.error(Log._("nodes.flow.non-message-returned", { type: typeof m }));
|
||||
} else {
|
||||
if (!m._msgid) {
|
||||
hasMissingIds = true;
|
||||
}
|
||||
/* istanbul ignore else */
|
||||
if (!sentMessageId) {
|
||||
sentMessageId = m._msgid;
|
||||
}
|
||||
sendEvents.push({
|
||||
msg: m,
|
||||
source: {
|
||||
id: this.id,
|
||||
node: this,
|
||||
port: i
|
||||
},
|
||||
destination: {
|
||||
id: wires[j],
|
||||
node: undefined
|
||||
},
|
||||
cloneMessage: msgSent
|
||||
});
|
||||
msgSent = true;
|
||||
}
|
||||
/* istanbul ignore else */
|
||||
if (!sentMessageId) {
|
||||
sentMessageId = m._msgid;
|
||||
}
|
||||
sendEvents.push({
|
||||
msg: m,
|
||||
source: {
|
||||
id: this.id,
|
||||
node: this,
|
||||
port: i
|
||||
},
|
||||
destination: {
|
||||
id: wires[j],
|
||||
node: undefined
|
||||
},
|
||||
cloneMessage: msgSent
|
||||
});
|
||||
msgSent = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -589,17 +589,28 @@ function deleteContext(id,flowId) {
|
||||
* If flowConfig is undefined, all flow/node contexts will be removed
|
||||
**/
|
||||
function clean(flowConfig) {
|
||||
flowConfig = flowConfig || { allNodes: {} };
|
||||
var promises = [];
|
||||
for(var plugin in stores){
|
||||
if(stores.hasOwnProperty(plugin)){
|
||||
promises.push(stores[plugin].clean(Object.keys(flowConfig.allNodes)));
|
||||
}
|
||||
flowConfig = flowConfig || { allNodes: {}, subflows: {} };
|
||||
const knownNodes = new Set(Object.keys(flowConfig.allNodes))
|
||||
|
||||
// We need to alias all of the subflow instance contents
|
||||
for (const subflow of Object.values(flowConfig.subflows || {})) {
|
||||
subflow.instances.forEach(instance => {
|
||||
for (const nodeId of Object.keys(subflow.nodes || {})) {
|
||||
knownNodes.add(`${instance.id}-${nodeId}`)
|
||||
}
|
||||
for (const nodeId of Object.keys(subflow.configs || {})) {
|
||||
knownNodes.add(`${instance.id}-${nodeId}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
for (var id in contexts) {
|
||||
if (contexts.hasOwnProperty(id) && id !== "global") {
|
||||
var promises = [];
|
||||
for (const store of Object.values(stores)){
|
||||
promises.push(store.clean(Array.from(knownNodes)));
|
||||
}
|
||||
for (const id of Object.keys(contexts)) {
|
||||
if (id !== "global") {
|
||||
var idParts = id.split(":");
|
||||
if (!flowConfig.allNodes.hasOwnProperty(idParts[0])) {
|
||||
if (!knownNodes.has(idParts[0])) {
|
||||
delete contexts[id];
|
||||
}
|
||||
}
|
||||
|
@@ -383,6 +383,11 @@ var api = module.exports = {
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (nodeType === "global-config") {
|
||||
if (JSON.stringify(savedCredentials.map) !== JSON.stringify(newCreds.map)) {
|
||||
savedCredentials.map = newCreds.map;
|
||||
dirty = true;
|
||||
}
|
||||
} else {
|
||||
var dashedType = nodeType.replace(/\s+/g, '-');
|
||||
var definition = credentialsDef[dashedType];
|
||||
|
@@ -205,6 +205,7 @@ module.exports = {
|
||||
getNode: flows.get,
|
||||
eachNode: flows.eachNode,
|
||||
getContext: context.get,
|
||||
getGlobalConfig: flows.getGlobalConfig,
|
||||
|
||||
clearContext: context.clear,
|
||||
|
||||
|
@@ -18,7 +18,7 @@ var i18n = require("@node-red/util").i18n;
|
||||
|
||||
module.exports = {
|
||||
"package.json": function(project) {
|
||||
var package = {
|
||||
var packageDetails = {
|
||||
"name": project.name,
|
||||
"description": project.summary||i18n._("storage.localfilesystem.projects.summary"),
|
||||
"version": "0.0.1",
|
||||
@@ -30,11 +30,11 @@ module.exports = {
|
||||
};
|
||||
if (project.files) {
|
||||
if (project.files.flow) {
|
||||
package['node-red'].settings.flowFile = project.files.flow;
|
||||
package['node-red'].settings.credentialsFile = project.files.credentials;
|
||||
packageDetails['node-red'].settings.flowFile = project.files.flow;
|
||||
packageDetails['node-red'].settings.credentialsFile = project.files.credentials;
|
||||
}
|
||||
}
|
||||
return JSON.stringify(package,"",4);
|
||||
return JSON.stringify(packageDetails,"",4);
|
||||
},
|
||||
"README.md": function(project) {
|
||||
var content = project.name+"\n"+("=".repeat(project.name.length))+"\n\n";
|
||||
|
@@ -71,6 +71,8 @@ function runGitCommand(args,cwd,env,emit) {
|
||||
err.code = "git_missing_user";
|
||||
} else if (/name consists only of disallowed characters/i.test(stderr)) {
|
||||
err.code = "git_missing_user";
|
||||
} else if (/nothing (add )?to commit/i.test(stdout)) {
|
||||
return stdout;
|
||||
}
|
||||
throw err;
|
||||
})
|
||||
@@ -106,7 +108,7 @@ function runGitCommandWithSSHCommand(args,cwd,auth,emit) {
|
||||
commandEnv.GIT_SSH = path.join(__dirname,"node-red-ssh.sh");
|
||||
commandEnv.NODE_RED_KEY_FILE=auth.key_path;
|
||||
// GIT_SSH_COMMAND - added in git 2.3.0
|
||||
commandEnv.GIT_SSH_COMMAND = "ssh -i " + auth.key_path + " -F /dev/null";
|
||||
commandEnv.GIT_SSH_COMMAND = "ssh -i \"" + auth.key_path + "\" -F /dev/null";
|
||||
// console.log('commandEnv:', commandEnv);
|
||||
return runGitCommand(args,cwd,commandEnv,emit).then( result => {
|
||||
rs.close();
|
||||
@@ -419,7 +421,10 @@ module.exports = {
|
||||
});
|
||||
},
|
||||
initRepo: function(cwd) {
|
||||
return runGitCommand(["init"],cwd);
|
||||
var args = ["init", "--initial-branch", "main"];
|
||||
return runGitCommand(args, cwd).catch(function () {
|
||||
return runGitCommand(["init"], cwd);
|
||||
});
|
||||
},
|
||||
setUpstream: function(cwd,remoteBranch) {
|
||||
var args = ["branch","--set-upstream-to",remoteBranch];
|
||||
|
Reference in New Issue
Block a user