2014-05-03 23:26:35 +02:00
|
|
|
/**
|
|
|
|
* Copyright 2014 IBM Corp.
|
|
|
|
*
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
* limitations under the License.
|
|
|
|
**/
|
|
|
|
|
|
|
|
var util = require("util");
|
2014-02-25 00:35:11 +01:00
|
|
|
var clone = require("clone");
|
2014-05-03 23:26:35 +02:00
|
|
|
var when = require("when");
|
2014-07-08 09:49:48 +02:00
|
|
|
|
2014-05-03 23:26:35 +02:00
|
|
|
var typeRegistry = require("./registry");
|
|
|
|
var credentials = require("./credentials");
|
|
|
|
var log = require("../log");
|
|
|
|
var events = require("../events");
|
|
|
|
|
|
|
|
var storage = null;
|
|
|
|
|
|
|
|
var nodes = {};
|
2014-02-25 00:35:11 +01:00
|
|
|
var subflows = {};
|
2014-05-03 23:26:35 +02:00
|
|
|
var activeConfig = [];
|
|
|
|
var missingTypes = [];
|
|
|
|
|
|
|
|
events.on('type-registered',function(type) {
|
|
|
|
if (missingTypes.length > 0) {
|
|
|
|
var i = missingTypes.indexOf(type);
|
|
|
|
if (i != -1) {
|
|
|
|
missingTypes.splice(i,1);
|
|
|
|
util.log("[red] Missing type registered: "+type);
|
2014-07-31 09:36:54 +02:00
|
|
|
if (missingTypes.length === 0) {
|
|
|
|
parseConfig();
|
|
|
|
}
|
2014-05-03 23:26:35 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
2014-02-25 00:35:11 +01:00
|
|
|
|
|
|
|
function getID() {
|
|
|
|
return (1+Math.random()*4294967295).toString(16);
|
|
|
|
}
|
|
|
|
|
|
|
|
function createSubflow(sf,sfn) {
|
|
|
|
var node_map = {};
|
|
|
|
var newNodes = [];
|
|
|
|
var node;
|
|
|
|
var wires;
|
|
|
|
var i,j,k;
|
|
|
|
|
|
|
|
// Clone all of the subflow node definitions and give them new IDs
|
|
|
|
for (i=0;i<sf.nodes.length;i++) {
|
|
|
|
node = clone(sf.nodes[i]);
|
|
|
|
var nid = getID();
|
|
|
|
node_map[node.id] = node;
|
|
|
|
node.id = nid;
|
|
|
|
newNodes.push(node);
|
|
|
|
}
|
|
|
|
// Update all subflow interior wiring to reflect new node IDs
|
|
|
|
for (i=0;i<newNodes.length;i++) {
|
|
|
|
node = newNodes[i];
|
|
|
|
var outputs = node.wires;
|
|
|
|
|
|
|
|
for (j=0;j<outputs.length;j++) {
|
|
|
|
wires = outputs[j];
|
|
|
|
for (k=0;k<wires.length;k++) {
|
|
|
|
outputs[j][k] = node_map[outputs[j][k]].id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create a subflow node to accept inbound messages and route appropriately
|
|
|
|
var Node = require("./Node");
|
|
|
|
var subflowInstance = {
|
|
|
|
id: sfn.id,
|
2014-10-29 22:38:05 +01:00
|
|
|
type: sfn.type,
|
2014-02-25 00:35:11 +01:00
|
|
|
name: sfn.name,
|
|
|
|
wires: []
|
|
|
|
}
|
|
|
|
if (sf.in) {
|
|
|
|
subflowInstance.wires = sf.in.map(function(n) { return n.wires.map(function(w) { return node_map[w.id].id;})})
|
|
|
|
}
|
|
|
|
var subflowNode = new Node(subflowInstance);
|
|
|
|
subflowNode.on("input", function(msg) { this.send(msg);});
|
|
|
|
|
|
|
|
// Wire the subflow outputs
|
|
|
|
if (sf.out) {
|
|
|
|
for (i=0;i<sf.out.length;i++) {
|
|
|
|
wires = sf.out[i].wires;
|
|
|
|
for (j=0;j<wires.length;j++) {
|
|
|
|
if (wires[j].id === sf.id) {
|
|
|
|
node = subflowNode;
|
|
|
|
delete subflowNode._wire;
|
|
|
|
} else {
|
|
|
|
node = node_map[wires[j].id];
|
|
|
|
}
|
|
|
|
node.wires[wires[j].port] = node.wires[wires[j].port].concat(sfn.wires[i]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Instantiate the nodes
|
|
|
|
for (i=0;i<newNodes.length;i++) {
|
|
|
|
node = newNodes[i];
|
|
|
|
var nn = null;
|
|
|
|
var type = node.type;
|
|
|
|
|
|
|
|
var m = /^subflow:(.+)$/.exec(type);
|
|
|
|
if (!m) {
|
|
|
|
var nt = typeRegistry.get(type);
|
|
|
|
if (nt) {
|
|
|
|
try {
|
|
|
|
nn = new nt(node);
|
|
|
|
}
|
|
|
|
catch (err) {
|
|
|
|
util.log("[red] "+type+" : "+err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (nn === null) {
|
|
|
|
util.log("[red] unknown type: "+type);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
var subflowId = m[1];
|
|
|
|
createSubflow(subflows[subflowId],node);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2014-05-03 23:26:35 +02:00
|
|
|
|
2014-07-21 17:07:28 +02:00
|
|
|
/**
|
|
|
|
* Parses the current activeConfig and creates the required node instances
|
|
|
|
*/
|
|
|
|
function parseConfig() {
|
2014-07-02 00:46:25 +02:00
|
|
|
var i;
|
|
|
|
var nt;
|
2014-02-25 00:35:11 +01:00
|
|
|
var type;
|
|
|
|
var subflow;
|
2014-05-03 23:26:35 +02:00
|
|
|
missingTypes = [];
|
2014-07-21 17:07:28 +02:00
|
|
|
|
|
|
|
// Scan the configuration for any unknown node types
|
2014-07-02 00:46:25 +02:00
|
|
|
for (i=0;i<activeConfig.length;i++) {
|
2014-02-25 00:35:11 +01:00
|
|
|
type = activeConfig[i].type;
|
2014-05-03 23:26:35 +02:00
|
|
|
// TODO: remove workspace in next release+1
|
2014-02-25 00:35:11 +01:00
|
|
|
if (type != "workspace" && type != "tab" && !/^subflow($|:.+$)/.test(type)) {
|
2014-07-02 00:46:25 +02:00
|
|
|
nt = typeRegistry.get(type);
|
2014-05-03 23:26:35 +02:00
|
|
|
if (!nt && missingTypes.indexOf(type) == -1) {
|
|
|
|
missingTypes.push(type);
|
|
|
|
}
|
|
|
|
}
|
2014-07-02 00:46:25 +02:00
|
|
|
}
|
2014-07-21 17:07:28 +02:00
|
|
|
// Abort if there are any missing types
|
2014-05-03 23:26:35 +02:00
|
|
|
if (missingTypes.length > 0) {
|
|
|
|
util.log("[red] Waiting for missing types to be registered:");
|
2014-07-08 09:49:48 +02:00
|
|
|
for (i=0;i<missingTypes.length;i++) {
|
2014-05-03 23:26:35 +02:00
|
|
|
util.log("[red] - "+missingTypes[i]);
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
util.log("[red] Starting flows");
|
|
|
|
events.emit("nodes-starting");
|
2014-02-25 00:35:11 +01:00
|
|
|
|
|
|
|
for (i=0;i<activeConfig.length;i++) {
|
|
|
|
type = activeConfig[i].type;
|
|
|
|
if (type === "subflow") {
|
|
|
|
subflow = activeConfig[i];
|
|
|
|
subflow.nodes = [];
|
|
|
|
subflow.instances = [];
|
|
|
|
subflows[subflow.id] = subflow;
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i=0;i<activeConfig.length;i++) {
|
|
|
|
if (subflows[activeConfig[i].z]) {
|
|
|
|
subflow = subflows[activeConfig[i].z];
|
|
|
|
subflow.nodes.push(activeConfig[i]);
|
|
|
|
}
|
|
|
|
}
|
2014-07-21 17:07:28 +02:00
|
|
|
|
|
|
|
// Instantiate each node in the flow
|
2014-07-02 00:46:25 +02:00
|
|
|
for (i=0;i<activeConfig.length;i++) {
|
2014-05-03 23:26:35 +02:00
|
|
|
var nn = null;
|
2014-02-25 00:35:11 +01:00
|
|
|
type = activeConfig[i].type;
|
|
|
|
|
|
|
|
var m = /^subflow:(.+)$/.exec(type);
|
|
|
|
if (!m) {
|
|
|
|
// TODO: remove workspace in next release+1
|
|
|
|
if (type != "workspace" && type != "tab" && type != "subflow" && !subflows[activeConfig[i].z]) {
|
|
|
|
nt = typeRegistry.get(type);
|
|
|
|
if (nt) {
|
|
|
|
try {
|
|
|
|
nn = new nt(activeConfig[i]);
|
|
|
|
}
|
|
|
|
catch (err) {
|
|
|
|
util.log("[red] "+type+" : "+err);
|
|
|
|
}
|
2014-05-03 23:26:35 +02:00
|
|
|
}
|
2014-02-25 00:35:11 +01:00
|
|
|
if (nn === null) {
|
|
|
|
util.log("[red] unknown type: "+type);
|
2014-05-03 23:26:35 +02:00
|
|
|
}
|
|
|
|
}
|
2014-02-25 00:35:11 +01:00
|
|
|
} else {
|
|
|
|
var subflowId = m[1];
|
|
|
|
createSubflow(subflows[subflowId],activeConfig[i]);
|
2014-05-03 23:26:35 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// Clean up any orphaned credentials
|
|
|
|
credentials.clean(flowNodes.get);
|
|
|
|
events.emit("nodes-started");
|
2014-07-21 17:18:22 +02:00
|
|
|
}
|
2014-05-03 23:26:35 +02:00
|
|
|
|
2014-07-21 17:07:28 +02:00
|
|
|
/**
|
|
|
|
* Stops the current activeConfig
|
|
|
|
*/
|
2014-05-03 23:26:35 +02:00
|
|
|
function stopFlows() {
|
|
|
|
if (activeConfig&&activeConfig.length > 0) {
|
|
|
|
util.log("[red] Stopping flows");
|
|
|
|
}
|
2014-05-14 22:17:54 +02:00
|
|
|
return flowNodes.clear();
|
2014-05-03 23:26:35 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
var flowNodes = module.exports = {
|
|
|
|
init: function(_storage) {
|
|
|
|
storage = _storage;
|
|
|
|
},
|
2014-07-21 17:07:28 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Load the current activeConfig from storage and start it running
|
|
|
|
* @return a promise for the loading of the config
|
|
|
|
*/
|
2014-05-03 23:26:35 +02:00
|
|
|
load: function() {
|
|
|
|
return storage.getFlows().then(function(flows) {
|
|
|
|
return credentials.load().then(function() {
|
|
|
|
activeConfig = flows;
|
|
|
|
if (activeConfig && activeConfig.length > 0) {
|
|
|
|
parseConfig();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}).otherwise(function(err) {
|
|
|
|
util.log("[red] Error loading flows : "+err);
|
|
|
|
});
|
|
|
|
},
|
2014-07-21 17:07:28 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Add a node to the current active set
|
|
|
|
* @param n the node to add
|
|
|
|
*/
|
2014-05-03 23:26:35 +02:00
|
|
|
add: function(n) {
|
|
|
|
nodes[n.id] = n;
|
|
|
|
n.on("log",log.log);
|
|
|
|
},
|
2014-07-21 17:07:28 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Get a node
|
|
|
|
* @param i the node id
|
|
|
|
* @return the node
|
|
|
|
*/
|
2014-05-03 23:26:35 +02:00
|
|
|
get: function(i) {
|
|
|
|
return nodes[i];
|
|
|
|
},
|
2014-07-21 17:07:28 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Stops all active nodes and clears the active set
|
|
|
|
* @return a promise for the stopping of all active nodes
|
|
|
|
*/
|
2014-05-03 23:26:35 +02:00
|
|
|
clear: function() {
|
2014-05-14 22:17:54 +02:00
|
|
|
return when.promise(function(resolve) {
|
|
|
|
events.emit("nodes-stopping");
|
|
|
|
var promises = [];
|
|
|
|
for (var n in nodes) {
|
2014-07-02 00:46:25 +02:00
|
|
|
if (nodes.hasOwnProperty(n)) {
|
|
|
|
try {
|
|
|
|
var p = nodes[n].close();
|
|
|
|
if (p) {
|
|
|
|
promises.push(p);
|
|
|
|
}
|
|
|
|
} catch(err) {
|
|
|
|
nodes[n].error(err);
|
2014-06-09 00:28:46 +02:00
|
|
|
}
|
2014-05-14 22:17:54 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
when.settle(promises).then(function() {
|
|
|
|
events.emit("nodes-stopped");
|
|
|
|
nodes = {};
|
|
|
|
resolve();
|
|
|
|
});
|
|
|
|
});
|
2014-05-03 23:26:35 +02:00
|
|
|
},
|
2014-07-21 17:07:28 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Provides an iterator over the active set of nodes
|
|
|
|
* @param cb a function to be called for each node in the active set
|
|
|
|
*/
|
2014-05-03 23:26:35 +02:00
|
|
|
each: function(cb) {
|
|
|
|
for (var n in nodes) {
|
2014-07-02 00:46:25 +02:00
|
|
|
if (nodes.hasOwnProperty(n)) {
|
|
|
|
cb(nodes[n]);
|
|
|
|
}
|
2014-05-03 23:26:35 +02:00
|
|
|
}
|
|
|
|
},
|
2014-07-08 09:49:48 +02:00
|
|
|
|
2014-07-21 17:07:28 +02:00
|
|
|
/**
|
|
|
|
* @return the active configuration
|
|
|
|
*/
|
2014-05-03 23:26:35 +02:00
|
|
|
getFlows: function() {
|
|
|
|
return activeConfig;
|
|
|
|
},
|
2014-07-21 17:07:28 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Sets the current active config.
|
|
|
|
* @param config the configuration to enable
|
|
|
|
* @return a promise for the starting of the new flow
|
|
|
|
*/
|
2014-07-21 16:56:38 +02:00
|
|
|
setFlows: function (config) {
|
|
|
|
// Extract any credential updates
|
|
|
|
for (var i=0; i<config.length; i++) {
|
|
|
|
var node = config[i];
|
|
|
|
if (node.credentials) {
|
|
|
|
credentials.extract(node);
|
|
|
|
delete node.credentials;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return credentials.save()
|
|
|
|
.then(function() { return storage.saveFlows(config);})
|
|
|
|
.then(function() { return stopFlows();})
|
|
|
|
.then(function () {
|
|
|
|
activeConfig = config;
|
|
|
|
parseConfig();
|
|
|
|
});
|
2014-05-03 23:26:35 +02:00
|
|
|
},
|
|
|
|
stopFlows: stopFlows
|
2014-07-14 22:46:36 +02:00
|
|
|
};
|