Filter out duplicate nodes when importing a flow

This commit is contained in:
Nick O'Leary 2019-03-15 19:02:24 +00:00
parent afe89c3621
commit 8aa00b0cfc
No known key found for this signature in database
GPG Key ID: 4F2157149161A6C9
2 changed files with 14 additions and 1 deletions

View File

@ -766,6 +766,20 @@ RED.nodes = (function() {
if (!$.isArray(newNodes)) {
newNodes = [newNodes];
}
// Scan for any duplicate nodes and remove them. This is a temporary
// fix to help resolve corrupted flows caused by 0.20.0 where multiple
// copies of the flow would get loaded at the same time.
// If the user hit deploy they would have saved those duplicates.
var seenIds = {};
newNodes = newNodes.filter(function(n) {
if (seenIds[n.id]) {
return false;
}
seenIds[n.id] = true;
return true;
})
var isInitialLoad = false;
if (!initialLoad) {
isInitialLoad = true;

View File

@ -34,7 +34,6 @@ var RED = (function() {
var srcUrl = $(el).attr('src');
if (srcUrl && !/^\s*(https?:|\/|\.)/.test(srcUrl)) {
$(el).remove();
console.log("Appending script for",moduleId)
var newScript = document.createElement("script");
newScript.onload = function() {
scriptCount--;