Node-RED x OpenAI Realtime API Demo
This flow shows how to connect Node-RED to OpenAI's Realtime API for live audio streaming and AI-powered responses. It takes audio from your mic, sends it to OpenAI for real-time processing, and returns AI-generated replies—all within the Node-RED dashboard.
Along the way, you'll see a couple of cool pulsating orb visualizations that react to both the input and output audio. It’s a fun little visual touch as you chat with the real-time model.
This one's for anyone curious about how to use Node-RED's dashboard with OpenAI's latest Realtime API. Just pop open the dashboard, allow browser permission for your mic, and enjoy!
[{"id":"9283e5e4e8905e17","type":"tab","label":"🗣️ OpenAI Realtime API Demo","disabled":false,"info":""},{"id":"54e31aa438e8aac7","type":"ui_template","z":"9283e5e4e8905e17","group":"abddc29c4bf79647","name":"Dashboard Mic In","order":1,"width":0,"height":0,"format":"<style>\n body {\n background-color: black !important; /* Sets the entire dashboard background to black */\n }\n\n .nr-dashboard-cardpanel,\n .nr-dashboard-template,\n md-card {\n background-color: black !important; /* Sets panels and cards background to black */\n color: white !important; /* Ensures text is readable against dark background */\n border: none !important; /* Removes any borders around the widgets */\n box-shadow: none !important; /* Removes any shadow effect around the widget (optional) */\n }\n\n /* Specifically targets the card panel to remove its border */\n .nr-dashboard-theme ui-card-panel {\n background-color: black !important; /* Matches the background with black */\n border: none !important; /* Removes the white border */\n }\n</style>\n\n<div>\n <canvas id=\"audioInputOrb\" width=\"400\" height=\"400\" style=\"display: block; margin: 0 auto;\"></canvas>\n <script>\n (async function(scope) {\n try {\n // Request microphone access\n const stream = await navigator.mediaDevices.getUserMedia({\n audio: {\n echoCancellation: true,\n noiseSuppression: true,\n autoGainControl: true\n }\n });\n\n const audioContext = new AudioContext({ sampleRate: 24000 });\n\n // PCM Processor Script for handling audio chunks\n const processorScript = `\n class PCMProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.chunkSize = 16384; // Set chunk size to 16,384 samples (32KB for PCM16 data)\n this.buffer = []; // Buffer to accumulate frames\n }\n\n process(inputs, outputs, parameters) {\n const input = inputs[0]; // Get input audio data\n if (input.length > 0) {\n const channelData = input[0]; // Get data from the first input channel\n // Accumulate audio frames in the buffer\n this.buffer.push(...channelData);\n // If we have enough frames to match the chunk size, send the audio data\n if (this.buffer.length >= this.chunkSize) {\n const audioChunk = this.buffer.slice(0, this.chunkSize); // Take a chunk\n this.port.postMessage(audioChunk); // Send the chunk to the main thread\n // Remove the sent frames from the buffer\n this.buffer = this.buffer.slice(this.chunkSize);\n }\n }\n return true; // Keep the processor alive\n }\n }\n registerProcessor('pcm-processor', PCMProcessor);\n `;\n\n // Load the processor\n const blob = new Blob([processorScript], { type: 'application/javascript' });\n const url = URL.createObjectURL(blob);\n await audioContext.audioWorklet.addModule(url);\n\n // Setup analyzer and processor nodes\n const analyzer = audioContext.createAnalyser(); // Corrected line\n analyzer.fftSize = 256;\n const mediaStreamSource = audioContext.createMediaStreamSource(stream);\n const processorNode = new AudioWorkletNode(audioContext, 'pcm-processor');\n\n mediaStreamSource.connect(analyzer);\n mediaStreamSource.connect(processorNode);\n processorNode.connect(audioContext.destination);\n\n // Emit audio data to Node-RED\n processorNode.port.onmessage = (event) => {\n const audioData = event.data;\n scope.send({ payload: audioData });\n };\n\n // Orb Visualization Setup\n const canvas = document.getElementById('audioInputOrb');\n const ctx = canvas.getContext('2d');\n const centerX = canvas.width / 2;\n const centerY = canvas.height / 2;\n const bufferLength = analyzer.frequencyBinCount;\n const dataArray = new Uint8Array(bufferLength);\n\n // Amorphic edge animation parameters\n const noiseFrequency = 0.002; // Slow edge motion\n const noiseAmplitude = 10; // Subtle deformation of the edges\n let time = 0; // Time variable for slow, local deformation\n\n // Smooth color transition setup for the orb\n let color = [getRandomInt(255), getRandomInt(255), getRandomInt(255)];\n let targetColor = [getRandomInt(255), getRandomInt(255), getRandomInt(255)];\n const transitionSpeed = 0.01; // Adjust this to control the speed of transition\n\n // Helper function for Perlin-like organic motion\n function noise(t) {\n return Math.sin(t) * Math.cos(t * 1.3) * 0.7;\n }\n\n function drawOrb() {\n requestAnimationFrame(drawOrb);\n\n analyzer.getByteFrequencyData(dataArray);\n const averageLevel = dataArray.reduce((sum, value) => sum + value, 0) / bufferLength;\n const baseRadius = Math.max(50, (averageLevel / 255) * 150 + 50); // Orb size grows with audio level\n\n // Smooth transition for color\n color = smoothColorTransition(color, targetColor, transitionSpeed);\n if (colorDistance(color, targetColor) < 1) {\n targetColor = [getRandomInt(255), getRandomInt(255), getRandomInt(255)];\n }\n\n // Clear canvas\n ctx.clearRect(0, 0, canvas.width, canvas.height);\n\n // Create a gradient for the orb with a slight glowing effect\n const gradient = ctx.createRadialGradient(centerX, centerY, baseRadius * 0.1, centerX, centerY, baseRadius);\n gradient.addColorStop(0, `rgb(${color[0]}, ${color[1]}, ${color[2]})`);\n gradient.addColorStop(1, 'rgba(0, 0, 0, 0.5)');\n\n // Start drawing the amorphic orb with dynamic radius variation, but no spinning\n ctx.beginPath();\n const totalVertices = 100; // More vertices create smoother motion\n\n for (let i = 0; i <= totalVertices; i++) {\n const angle = (i / totalVertices) * Math.PI * 2;\n // Each vertex has its own small random offset to create local deformation\n const localNoise = noise(time + i * 0.1) * noiseAmplitude;\n const dynamicRadius = baseRadius + localNoise;\n\n const x = centerX + Math.cos(angle) * dynamicRadius;\n const y = centerY + Math.sin(angle) * dynamicRadius;\n\n if (i === 0) {\n ctx.moveTo(x, y);\n } else {\n ctx.lineTo(x, y);\n }\n }\n ctx.closePath();\n\n // Apply gradient and fill the orb\n ctx.fillStyle = gradient;\n ctx.shadowBlur = 30;\n ctx.shadowColor = `rgb(${color[0]}, ${color[1]}, ${color[2]})`;\n ctx.fill();\n\n time += 0.01; // Increment time for smooth animation of vertices\n }\n\n function smoothColorTransition(current, target, speed) {\n return current.map((c, i) => c + (target[i] - c) * speed);\n }\n\n function colorDistance(color1, color2) {\n return Math.sqrt(\n Math.pow(color1[0] - color2[0], 2) +\n Math.pow(color1[1] - color2[1], 2) +\n Math.pow(color1[2] - color2[2], 2)\n );\n }\n\n function getRandomInt(max) {\n return Math.floor(Math.random() * max);\n }\n\n drawOrb();\n\n } catch (err) {\n console.error('Error accessing microphone:', err);\n alert('Could not access microphone. Please check permissions.');\n }\n })(scope);\n </script>\n</div>","storeOutMessages":false,"fwdInMessages":false,"resendOnRefresh":false,"templateScope":"local","className":"","x":210,"y":260,"wires":[["8440299d91742fca"]]},{"id":"710e19fe934f09e1","type":"ui_template","z":"9283e5e4e8905e17","group":"abddc29c4bf79647","name":"Dashboard Audio Out","order":4,"width":0,"height":0,"format":"<div>\n <canvas id=\"audioOutputOrb\" width=\"400\" height=\"400\" style=\"display: block; margin: 0 auto;\"></canvas>\n <script>\n console.log(\"Initializing WebSocket connection...\");\n\n const socketOut = new WebSocket('ws://' + window.location.hostname + ':1880/audio_out');\n const audioContext = new (window.AudioContext || window.webkitAudioContext)({ sampleRate: 24000 });\n socketOut.binaryType = 'arraybuffer'; // Expect binary data\n\n let audioQueueTime = audioContext.currentTime; // Track when to play the next buffer\n const pcmQueue = []; // Buffer to store incoming audio chunks for queuing\n\n // Create an analyzerNode for visualizing output audio levels\n const analyzer = audioContext.createAnalyser(); // Corrected line\n analyzer.fftSize = 256; // FFT size for audio analysis\n const bufferLength = analyzer.frequencyBinCount;\n const dataArray = new Uint8Array(bufferLength); // Array to hold frequency data\n\n const canvas = document.getElementById('audioOutputOrb');\n const ctx = canvas.getContext('2d');\n const centerX = canvas.width / 2;\n const centerY = canvas.height / 2;\n\n // Amorphic edge animation parameters\n const noiseFrequency = 0.002; // Slow edge motion\n const noiseAmplitude = 10; // Subtle deformation of the edges\n let time = 0; // Time variable for slow, local deformation\n\n // Smooth color transition setup for the orb\n let color = [getRandomInt(255), getRandomInt(255), getRandomInt(255)];\n let targetColor = [getRandomInt(255), getRandomInt(255), getRandomInt(255)];\n const transitionSpeed = 0.01; // Adjust this to control the speed of transition\n\n // Helper function for Perlin-like organic motion\n function noise(t) {\n return Math.sin(t) * Math.cos(t * 1.3) * 0.7;\n }\n\n socketOut.onopen = () => {\n console.log(\"WebSocket connection established.\");\n };\n\n socketOut.onmessage = (event) => {\n const arrayBuffer = event.data;\n const pcmData = new DataView(arrayBuffer);\n addPCMToQueue(pcmData);\n };\n\n socketOut.onerror = (error) => {\n console.error(\"WebSocket error:\", error);\n };\n\n socketOut.onclose = () => {\n console.log(\"WebSocket connection closed.\");\n };\n\n function addPCMToQueue(pcmData) {\n const float32Array = convertPCM16ToFloat32(pcmData);\n\n const audioBuffer = audioContext.createBuffer(1, float32Array.length, 24000);\n audioBuffer.copyToChannel(float32Array, 0);\n\n const source = audioContext.createBufferSource();\n source.buffer = audioBuffer;\n source.connect(analyzer);\n analyzer.connect(audioContext.destination);\n\n const bufferDuration = audioBuffer.duration;\n\n if (audioContext.currentTime < audioQueueTime) {\n source.start(audioQueueTime);\n } else {\n source.start(audioContext.currentTime);\n audioQueueTime = audioContext.currentTime + bufferDuration;\n }\n\n audioQueueTime += bufferDuration;\n }\n\n function convertPCM16ToFloat32(pcmData) {\n const numSamples = pcmData.byteLength / 2;\n const float32Array = new Float32Array(numSamples);\n\n for (let i = 0; i < numSamples; i++) {\n const sample = pcmData.getInt16(i * 2, true);\n float32Array[i] = sample / 0x8000;\n }\n\n return float32Array;\n }\n\n function drawOrb() {\n requestAnimationFrame(drawOrb);\n\n analyzer.getByteFrequencyData(dataArray);\n const averageLevel = dataArray.reduce((sum, value) => sum + value, 0) / bufferLength;\n const baseRadius = Math.max(50, (averageLevel / 255) * 150 + 50); // Orb size grows with audio level\n\n // Smooth transition for color\n color = smoothColorTransition(color, targetColor, transitionSpeed);\n if (colorDistance(color, targetColor) < 1) {\n targetColor = [getRandomInt(255), getRandomInt(255), getRandomInt(255)];\n }\n\n // Clear canvas\n ctx.clearRect(0, 0, canvas.width, canvas.height);\n\n // Create a gradient for the orb with a slight glowing effect\n const gradient = ctx.createRadialGradient(centerX, centerY, baseRadius * 0.1, centerX, centerY, baseRadius);\n gradient.addColorStop(0, `rgb(${color[0]}, ${color[1]}, ${color[2]})`);\n gradient.addColorStop(1, 'rgba(0, 0, 0, 0.5)');\n\n // Start drawing the amorphic orb with dynamic radius variation, but no spinning\n ctx.beginPath();\n const totalVertices = 100; // More vertices create smoother motion\n\n for (let i = 0; i <= totalVertices; i++) {\n const angle = (i / totalVertices) * Math.PI * 2;\n // Each vertex has its own small random offset to create local deformation\n const localNoise = noise(time + i * 0.1) * noiseAmplitude;\n const dynamicRadius = baseRadius + localNoise;\n\n const x = centerX + Math.cos(angle) * dynamicRadius;\n const y = centerY + Math.sin(angle) * dynamicRadius;\n\n if (i === 0) {\n ctx.moveTo(x, y);\n } else {\n ctx.lineTo(x, y);\n }\n }\n ctx.closePath();\n\n // Apply gradient and fill the orb\n ctx.fillStyle = gradient;\n ctx.shadowBlur = 30;\n ctx.shadowColor = `rgb(${color[0]}, ${color[1]}, ${color[2]})`;\n ctx.fill();\n\n time += 0.01; // Increment time for smooth animation of vertices\n }\n\n function smoothColorTransition(current, target, speed) {\n return current.map((c, i) => c + (target[i] - c) * speed);\n }\n\n function colorDistance(color1, color2) {\n return Math.sqrt(\n Math.pow(color1[0] - color2[0], 2) +\n Math.pow(color1[1] - color2[1], 2) +\n Math.pow(color1[2] - color2[2], 2)\n );\n }\n\n function getRandomInt(max) {\n return Math.floor(Math.random() * max);\n }\n\n drawOrb();\n\n </script>\n</div>","storeOutMessages":false,"fwdInMessages":false,"resendOnRefresh":false,"templateScope":"local","className":"","x":220,"y":300,"wires":[[]]},{"id":"50bd762fa90f0d24","type":"websocket out","z":"9283e5e4e8905e17","name":"audio to browser","server":"e84623b91094cf29","client":"","x":1070,"y":500,"wires":[]},{"id":"153ac2f02dcbfac5","type":"websocket in","z":"9283e5e4e8905e17","name":"In from OAI","server":"","client":"8f3680c761efb573","x":210,"y":460,"wires":[["61e503d7a678ac53"]]},{"id":"bbac3b7479cc7b1d","type":"base64","z":"9283e5e4e8905e17","name":"buffer to base64","action":"","property":"payload.delta","x":680,"y":400,"wires":[["3a59879ac3df6b7a"]]},{"id":"309d7137a3562b76","type":"mqtt out","z":"9283e5e4e8905e17","name":"","topic":"/audio/response","qos":"2","retain":"false","respTopic":"","contentType":"","userProps":"","correl":"","expiry":"","broker":"296894e123bdad90","x":1080,"y":400,"wires":[]},{"id":"0080e28f407e2692","type":"mqtt in","z":"9283e5e4e8905e17","name":"","topic":"/audio/response","qos":"2","datatype":"auto-detect","broker":"296894e123bdad90","nl":false,"rap":true,"rh":"0","inputs":0,"x":860,"y":500,"wires":[["50bd762fa90f0d24"]]},{"id":"8440299d91742fca","type":"function","z":"9283e5e4e8905e17","name":"Signal Processing","func":"// msg.payload contains the Float32Array audio data from the Template node\nlet float32Array = msg.payload;\n\n// Convert Float32 to PCM16 using the provided logic\nlet pcm16Buffer = convertToPCM16(float32Array);\n\n// Base64 encode the PCM16 audio chunk using Buffer\nmsg.payload = base64EncodeAudio(pcm16Buffer);\n\nreturn msg;\n\n// Function to convert Float32 to PCM16 (as in the provider's example)\nfunction convertToPCM16(float32Array) {\n const buffer = new ArrayBuffer(float32Array.length * 2); // 2 bytes per PCM16 sample\n const view = new DataView(buffer);\n let offset = 0;\n\n for (let i = 0; i < float32Array.length; i++, offset += 2) {\n let s = Math.max(-1, Math.min(1, float32Array[i]));\n view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);\n }\n\n return buffer;\n}\n\n// Function to Base64 encode the PCM16 buffer using Buffer\nfunction base64EncodeAudio(buffer) {\n // Convert the ArrayBuffer to a Node.js Buffer\n const nodeBuffer = Buffer.from(buffer);\n \n // Encode the Buffer into a Base64 string\n return nodeBuffer.toString('base64');\n}","outputs":1,"timeout":0,"noerr":0,"initialize":"","finalize":"","libs":[],"x":450,"y":260,"wires":[["4bcf1f5b35606baf"]]},{"id":"4bcf1f5b35606baf","type":"change","z":"9283e5e4e8905e17","name":"transform payload","rules":[{"t":"move","p":"payload","pt":"msg","to":"_payload","tot":"msg"},{"t":"set","p":"payload","pt":"msg","to":"{}","tot":"json"},{"t":"set","p":"payload.type","pt":"msg","to":"input_audio_buffer.append","tot":"str"},{"t":"move","p":"_payload","pt":"msg","to":"payload.audio","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":670,"y":260,"wires":[["de27cbd1166279da"]]},{"id":"de27cbd1166279da","type":"websocket out","z":"9283e5e4e8905e17","name":"Out to OAI","server":"","client":"8f3680c761efb573","x":870,"y":280,"wires":[]},{"id":"5d4cd78a14749a1c","type":"inject","z":"9283e5e4e8905e17","name":"update session","props":[{"p":"payload.type","v":"session.update","vt":"str"},{"p":"payload.session.voice","v":"shimmer","vt":"str"},{"p":"payload.session.turn_detection.type","v":"server_vad","vt":"str"},{"p":"payload.session.tool_choice","v":"auto","vt":"str"},{"p":"payload.session.tools","v":"[]","vt":"json"}],"repeat":"","crontab":"","once":true,"onceDelay":"0.1","topic":"","x":680,"y":300,"wires":[["de27cbd1166279da"]]},{"id":"bda801ad02c8f446","type":"debug","z":"9283e5e4e8905e17","name":"error log","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"payload","targetType":"msg","statusVal":"","statusType":"auto","x":440,"y":360,"wires":[]},{"id":"9ae1e57e199263b6","type":"switch","z":"9283e5e4e8905e17","name":"response router","property":"payload.type","propertyType":"msg","rules":[{"t":"eq","v":"error","vt":"str"},{"t":"eq","v":"response.audio.delta","vt":"str"},{"t":"eq","v":"response.output_item.done","vt":"str"},{"t":"else"}],"checkall":"true","repair":false,"outputs":4,"x":420,"y":420,"wires":[["bda801ad02c8f446"],["bbac3b7479cc7b1d"],["04c4c83668045a8d"],["87afe7ba1d5c7b9e"]]},{"id":"61e503d7a678ac53","type":"json","z":"9283e5e4e8905e17","name":"to JSON","property":"payload","action":"obj","pretty":false,"x":220,"y":420,"wires":[["9ae1e57e199263b6"]]},{"id":"87afe7ba1d5c7b9e","type":"debug","z":"9283e5e4e8905e17","name":"extra log","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"payload","targetType":"msg","statusVal":"","statusType":"auto","x":440,"y":480,"wires":[]},{"id":"04c4c83668045a8d","type":"switch","z":"9283e5e4e8905e17","name":"function call router","property":"payload.item.type","propertyType":"msg","rules":[{"t":"eq","v":"function_call","vt":"str"}],"checkall":"true","repair":false,"outputs":1,"x":690,"y":440,"wires":[["c50a152da792062f"]],"inputLabels":["response.output_item.done"],"outputLabels":["function calls"]},{"id":"c50a152da792062f","type":"debug","z":"9283e5e4e8905e17","name":"debug 239","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","statusVal":"","statusType":"auto","x":890,"y":440,"wires":[]},{"id":"3a59879ac3df6b7a","type":"change","z":"9283e5e4e8905e17","name":"","rules":[{"t":"set","p":"payload","pt":"msg","to":"payload.delta","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":880,"y":400,"wires":[["309d7137a3562b76"]]},{"id":"efdc08dca7c88e88","type":"comment","z":"9283e5e4e8905e17","name":"✨ Instructions 👀","info":"# Instructions\n\n## Setup\n\nTo use this flow as is, be sure to:\n\n1. Create a Node-RED environment variable named:\n\nBEARER_OPENAI_API_KEY\n\n... with the value set to:\n\nBearer sk-your-openai-api-key\n\n** Note be sure to include the word \"Bearer\" before your API key.\n\n## Notes\n\n- No external or 3rd-party libraries are used in this demo. We're using the \npower of Node-RED to interact with OpenAI's Realtime API directly via websockets.\n\n- Interestingly, I was able to get proper websocket messaging out to OpenAI after\ndopping in the MQTT proxy. Not quite sure why yet, but I'm guessing it has \nsomething to do with message timing. I'm actually ok with this for now. I\nlike having the advanced features of MQTT 5 to play around with here.\n\n- The _function call router_ node passes along any function call objects returned\nin the OpenAI response. You can drop in a downstream __switch__ node to route your\nfunction call objects to their corresponding processing nodes.","x":210,"y":180,"wires":[],"icon":"font-awesome/fa-sticky-note-o"},{"id":"abddc29c4bf79647","type":"ui_group","name":"Node-RED x OpenAI Realtime API","tab":"173f5e1e39e73ebe","order":1,"disp":true,"width":"16","collapse":false,"className":""},{"id":"e84623b91094cf29","type":"websocket-listener","path":"/audio_out","wholemsg":"false"},{"id":"8f3680c761efb573","type":"websocket-client","path":"wss://api.openai.com/v1/realtime?model=gpt-4o-realtime-preview","tls":"","wholemsg":"false","hb":"0","subprotocol":"","headers":[{"keyType":"Authorization","keyValue":"","valueType":"env","valueValue":"BEARER_OPENAI_API_KEY"},{"keyType":"other","keyValue":"OpenAI-Beta","valueType":"other","valueValue":"realtime=v1"}]},{"id":"296894e123bdad90","type":"mqtt-broker","name":"","broker":"localhost","port":"1883","clientid":"","autoConnect":true,"usetls":false,"protocolVersion":"5","keepalive":"60","cleansession":true,"autoUnsubscribe":true,"birthTopic":"","birthQos":"0","birthRetain":"false","birthPayload":"","birthMsg":{},"closeTopic":"","closeQos":"0","closeRetain":"false","closePayload":"","closeMsg":{},"willTopic":"","willQos":"0","willRetain":"false","willPayload":"","willMsg":{},"userProps":"","sessionExpiry":""},{"id":"173f5e1e39e73ebe","type":"ui_tab","name":"AI Chat","icon":"dashboard","disabled":false,"hidden":false}]