Normalization and Clustering Flow

Node-RED flow for performing data normalization and clustering. It takes input data for normalization and clustering, applies the specified normalization technique (such as Min-Max or Mean), performs clustering using the K-means algorithm, and returns the cluster centroids and statistical properties of the input data in JSON format.

[{"id":"0bdfbac1f0c218e0","type":"tab","label":"Normalization Extension","disabled":false,"info":"","env":[]},{"id":"60e707bae46e2331","type":"http in","z":"0bdfbac1f0c218e0","name":"","url":"/run","method":"post","upload":false,"swaggerDoc":"","x":400,"y":220,"wires":[["61b34aabae37d071"]]},{"id":"22fcc7626accc6f6","type":"http response","z":"0bdfbac1f0c218e0","name":"","statusCode":"","headers":{},"x":950,"y":220,"wires":[]},{"id":"49d8a6eef4cb19ab","type":"debug","z":"0bdfbac1f0c218e0","name":"debug 5","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"true","targetType":"full","statusVal":"","statusType":"auto","x":940,"y":140,"wires":[]},{"id":"61b34aabae37d071","type":"function","z":"0bdfbac1f0c218e0","name":"Clustering function","func":"const clusterMaker = global.get('clusters');\nvar centroids = [];\nlet normalizationType = \"None\";\nlet data_statistical_properties; \n\n// Retrieve the parameters from the input message or use default values\nconst numClusters = msg.payload.value.numClusters || 3;\nconst numIterations = msg.payload.value.numIterations || 750;\n\nclusterMaker.k(numClusters);\nclusterMaker.iterations(numIterations);\n\n// Function to perform Min-Max normalization\nfunction minMaxNormalization(data, clustering_type) {\n    if (clustering_type == \"multiple\"){\n        const min = Math.min(...data);\n        const max = Math.max(...data);\n        const scaledData = data.map(value => [(value[0] - min) / (max - min)]);\n        return scaledData;\n    } \n    else if (clustering_type == \"single\"){\n        const min = Math.min(...data);\n        const max = Math.max(...data);\n        const scaledData = data.map(value => (value - min) / (max - min));\n        return scaledData;\n    } \n}\n\n// Function to perform mean normalization\nfunction meanNormalization(data, clustering_type) {\n    if (clustering_type == \"multiple\") {\n        const values = data.map(value => value[0]);\n        const mean = values.reduce((sum, value) => sum + value, 0) / values.length;\n        const stdDev = Math.sqrt(\n            values.reduce((sum, value) => sum + Math.pow(value - mean, 2), 0) / values.length\n        );\n        const scaledData = data.map(value => [(value[0] - mean) / stdDev]);\n        return scaledData;\n    }\n    else if (clustering_type == \"single\") {\n        const sum = data.reduce((acc, value) => acc + value, 0);\n        const mean = sum / data.length;\n        const stdDev = Math.sqrt(\n            data.reduce((sum, value) => sum + Math.pow(value - mean, 2), 0) / data.length\n        );\n        const scaledData = data.map(value => (value - mean) / stdDev);\n        return scaledData;\n    } \n}\n\nfunction get_data_statistical_properties(data){\n\n    const stat_properties = [];\n\n    for (const key in data) {\n        const resource = data[key];\n        const valueArray = resource.value;\n        const min = Math.min(...valueArray);\n        const max = Math.max(...valueArray);\n        const sum = valueArray.reduce((acc, value) => acc + value, 0);\n        const mean = sum / valueArray.length;\n        const stdDev = Math.sqrt(\n            valueArray.reduce((sum, value) => sum + Math.pow(value - mean, 2), 0) / valueArray.length\n        );\n        stat_properties.push({\n            name: resource.name,\n            value: {\"min\": min, \"max\":max, \"mean\":mean, \"stdDev\":stdDev}\n        });\n    }\n\n    return stat_properties;\n}\n\n\nif (msg.payload.value.mode === \"multiple\") {\n    // For each resource\n    msg.payload.value.data.forEach(element => {\n        // Creating clusterMakerInput\n        let clusterMakerInput = [];\n        element.value.forEach(value => {\n            clusterMakerInput.push([value]);\n        });\n\n        // Print the unnormalized data\n        //node.warn(\"Unnormalized data:\");\n        //node.warn(clusterMakerInput);\n\n        // Apply feature scaling based on the user-specified normalization technique\n        if (msg.payload.value.normalization === \"minmax\") {\n            //clusterMakerInput = clusterMakerInput.map(data => minMaxNormalization(data));\n            clusterMakerInput = minMaxNormalization(clusterMakerInput, msg.payload.value.mode)\n            normalizationType = \"MinMax\";\n        } else if (msg.payload.value.normalization === \"mean\") {\n            //clusterMakerInput = clusterMakerInput.map(data => meanNormalization(data));\n            clusterMakerInput = meanNormalization(clusterMakerInput, msg.payload.value.mode)\n            normalizationType = \"Mean\";\n        }\n\n        // Print the normalized data\n        //node.warn(\"Normalized data:\");\n        //node.warn(clusterMakerInput);\n\n        clusterMaker.data(clusterMakerInput);\n\n        let temp = clusterMaker.clusters();\n\n        // Creating the output\n        let clusterMakerOutput = [];\n        temp.forEach(element => {\n            element.centroid.forEach(element2 => {\n                clusterMakerOutput.push(element2);\n            });\n        });\n        clusterMakerOutput.sort();\n        centroids.push({ [element.name]: clusterMakerOutput });\n    });\n} else if (msg.payload.value.mode === \"single\") {\n\n    let input_values;\n    const normalizedData = [];\n\n    // Apply feature scaling based on the user-specified normalization technique\n    if (msg.payload.value.normalization === \"minmax\") {\n        for (const key in msg.payload.value.data) {\n            const resource = msg.payload.value.data[key];\n            const valueArray = resource.value;\n            const normalizedArray = minMaxNormalization(valueArray, \"single\");\n            normalizedData.push({\n                name: resource.name,\n                value: normalizedArray\n            });\n        }\n        input_values = normalizedData;\n        normalizationType = \"MinMax\";\n    } else if (msg.payload.value.normalization === \"mean\") {\n        for (const key in msg.payload.value.data) {\n            const resource = msg.payload.value.data[key];\n            const valueArray = resource.value;\n            const normalizedArray = meanNormalization(valueArray, \"single\");\n            normalizedData[key] = {\n                name: resource.name,\n                value: normalizedArray\n            };\n        }\n\n        input_values = normalizedData;\n        //node.warn(input_values);\n        normalizationType = \"Mean\";\n    }\n    else {\n        input_values = msg.payload.value.data;\n        //node.warn(input_values);\n        normalizationType = \"None\";\n    }\n\n    // data.length = m\n    // data.element1.length = n\n    // Finding the maximum length of all data arrays\n    let n = [];\n\n    for (let m = 0; m < input_values.length; m++) {\n        n.push(input_values[m].value.length);\n    }\n\n    const nMax = Math.max(...n);\n\n    // Creating the array for clusterMakerInput\n    let clusterMakerInput = [];\n    for (let n = 0; n < nMax; n++) {\n        let arrTemp = [];\n        input_values.forEach(m => {\n            arrTemp.push(m.value[n]);\n        });\n        node.warn(arrTemp);\n        clusterMakerInput.push(arrTemp);\n    }\n\n    clusterMaker.data(clusterMakerInput);\n    centroids = clusterMaker.clusters();\n\n}\n\ndata_statistical_properties = get_data_statistical_properties(msg.payload.value.data)\n\nmsg.payload = { centroids, normalizationType, data_statistical_properties};\nreturn msg;","outputs":1,"noerr":0,"initialize":"","finalize":"","libs":[],"x":730,"y":220,"wires":[["22fcc7626accc6f6","49d8a6eef4cb19ab"]]},{"id":"3bb826503eae074c","type":"http in","z":"0bdfbac1f0c218e0","name":"","url":"/init","method":"post","upload":false,"swaggerDoc":"","x":540,"y":400,"wires":[["386d1d1a05338043"]]},{"id":"386d1d1a05338043","type":"http response","z":"0bdfbac1f0c218e0","name":"","statusCode":"","headers":{},"x":710,"y":400,"wires":[]},{"id":"326aa812e0eb4ed0","type":"catch","z":"0bdfbac1f0c218e0","name":"","scope":null,"uncaught":false,"x":540,"y":320,"wires":[["e8e606dc288a1422"]]},{"id":"e8e606dc288a1422","type":"function","z":"0bdfbac1f0c218e0","name":"ADD ERROR INFO","func":"var payload=msg.payload;\nmsg.payload={};\n\nmsg.payload.error=msg.error;\nmsg.payload.error.payload=payload;\nreturn msg;","outputs":1,"noerr":0,"initialize":"","finalize":"","libs":[],"x":750,"y":320,"wires":[["22fcc7626accc6f6"]]},{"id":"dee65f181f179fbf","type":"inject","z":"0bdfbac1f0c218e0","name":"Inject","props":[{"p":"payload"},{"p":"topic","vt":"str"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"","payloadType":"date","x":390,"y":140,"wires":[["a3c8e2c4420b78c8"]]},{"id":"a3c8e2c4420b78c8","type":"function","z":"0bdfbac1f0c218e0","name":"Test input","func":"msg.payload = {\n    \"value\": {\n        \"mode\": \"single\",\n        \"normalization\": \"mean\",\n        \"data\": [\n            {\n                \"name\": \"Resource1\",\n                \"value\": [1, 2, 3, 4, 5]\n            },\n            {\n                \"name\": \"Resource2\",\n                \"value\": [6, 7, 8, 9, 15]\n            }\n        ],\n        \"numClusters\": 3,\n        \"numIterations\": 1000\n    }\n};\nreturn msg;\n","outputs":1,"noerr":0,"initialize":"","finalize":"","libs":[],"x":540,"y":140,"wires":[["61b34aabae37d071"]]}]

Collection Info

Flow Info

Created 2 years, 2 months ago
Rating: not yet rated

Actions

Rate:

Node Types

Core
  • catch (x1)
  • debug (x1)
  • function (x3)
  • http in (x2)
  • http response (x2)
  • inject (x1)
Other
  • tab (x1)

Tags

  • Normalization
  • Clustering
  • K-Means
  • KMeans
  • ML
  • AI
Copy this flow JSON to your clipboard and then import into Node-RED using the Import From > Clipboard (Ctrl-I) menu option