Created
August 23, 2022 10:25
Revisions
-
leostera created this gist
Aug 23, 2022 .There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -0,0 +1,207 @@ import React, { useState, useCallback, useMemo} from 'react'; import ReactFlow, { ReactFlowProvider, useNodesState, useEdgesState, addEdge, useReactFlow, Handle, Position, } from 'react-flow-renderer'; import './main.css'; const AudioContext = window.AudioContext || window.webkitAudioContext; window.audioCtx = null // Audio Graph const NODES = {}; const EDGES = {}; function Oscilloscope({ width, height, audioAnalyser }) { let canvasRef = React.useRef(null); let [tick, setTick] = React.useState(0); React.useLayoutEffect(() => { let timerId const animate = () => { setTick(t => t + 1) timerId = requestAnimationFrame(animate) } timerId = requestAnimationFrame(animate) return () => cancelAnimationFrame(timerId) }) React.useEffect(() => { const bufferLength = audioAnalyser.frequencyBinCount; const dataArray = new Uint8Array(bufferLength); audioAnalyser.getByteFrequencyData(dataArray); const canvas = canvasRef.current const canvasCtx = canvas.getContext('2d') canvasCtx.fillStyle = 'rgb(0, 0, 0)'; canvasCtx.fillRect(0, 0, width, height); var barWidth = (width / bufferLength) * 2.5; var barHeight; var x = 0; for(var i = 0; i < bufferLength; i++) { barHeight = dataArray[i]; canvasCtx.fillStyle = 'rgb(' + (barHeight+100) + ',50,50)'; canvasCtx.fillRect(x,height-barHeight/2,barWidth,barHeight/2); x += barWidth + 1; } }, [tick]) return <canvas ref={canvasRef} width={width} height={height} /> } function AudioSourceNode(props) { NODES[props.id] = NODES[props.id] || {}; let node = NODES[props.id] let [devices, setDevices] = React.useState([]); audioAnalyser.minDecibels = -90; audioAnalyser.maxDecibels = 10; audioAnalyser.smoothingTimeConstant = 0.85; audioAnalyser.fftSize = 256; React.useEffect(() => { if (devices.length === 0) { navigator.mediaDevices.enumerateDevices().then( devices => { let audioDevices = devices.filter(d => d.kind === "audioinput").reduce((acc, d) => { acc[d.deviceId] = d; return acc }, {}); setDevices(_ => audioDevices) }) } }, [setDevices]); const onChange = useCallback((evt) => { let deviceId = evt.target.value; navigator.mediaDevices.getUserMedia({ audio: { deviceId } }) .then(stream => { let tracks = stream.getAudioTracks(); let source = audioCtx.createMediaStreamSource(stream); let gainNode = audioCtx.createGain(); source.connect(audioAnalyser); audioAnalyser.connect(audioCtx.destination); console.log("stream", tracks[0].enabled) console.log("source", source) }) node.deviceId = deviceId; console.log(node); }, [devices]); return ( <div> <span htmlFor="device">Device: </span> <select id="text" name="device" onChange={onChange}> { Object.entries(devices).map(([id, dev]) => { return <option key={id} value={id}> {dev.label} </option> }) } </select> <br /> <Oscilloscope width={400} height={120} audioAnalyser={audioAnalyser} /> <Handle type="source" position={Position.Bottom} id="a" /> <Handle type="source" position={Position.Bottom} id="b" /> </div> ); } const flowKey = 'example-flow'; const getNodeId = () => `randomnode_${+new Date()}`; const initialNodes = [ { id: '1', type: 'AudioSourceNode', data: { label: 'Node 1' }, position: { x: 100, y: 100 } }, { id: '2', data: { label: 'Node 2' }, position: { x: 100, y: 500 } }, ]; const initialEdges = [{ id: 'e1-2', source: '1', target: '2' }]; const SaveRestore = () => { const [nodes, setNodes, onNodesChange] = useNodesState(initialNodes); const [edges, setEdges, onEdgesChange] = useEdgesState(initialEdges); const [rfInstance, setRfInstance] = useState(null); const { setViewport } = useReactFlow(); const onConnect = useCallback((params) => setEdges((eds) => addEdge(params, eds)), [setEdges]); const onSave = useCallback(() => { if (rfInstance) { const flow = rfInstance.toObject(); localStorage.setItem(flowKey, JSON.stringify(flow)); } }, [rfInstance]); const onRestore = useCallback(() => { const restoreFlow = async () => { const flow = JSON.parse(localStorage.getItem(flowKey)); if (flow) { const { x = 0, y = 0, zoom = 1 } = flow.viewport; setNodes(flow.nodes || []); setEdges(flow.edges || []); setViewport({ x, y, zoom }); } }; restoreFlow(); }, [setNodes, setViewport]); const onAdd = useCallback(() => { const newNode = { id: getNodeId(), data: { label: 'Added node' }, position: { x: Math.random() * window.innerWidth - 100, y: Math.random() * window.innerHeight, }, }; setNodes((nds) => nds.concat(newNode)); }, [setNodes]); const nodeTypes = useMemo(() => ({ AudioSourceNode, }), []); return ( <ReactFlow nodes={nodes} edges={edges} onNodesChange={onNodesChange} onEdgesChange={onEdgesChange} onConnect={onConnect} onInit={setRfInstance} nodeTypes={nodeTypes} > <div className="save__controls"> <button onClick={onSave}>save</button> <button onClick={onRestore}>restore</button> <button onClick={onAdd}>add node</button> </div> </ReactFlow> ); }; import * as ReactDOM from 'react-dom'; let started = false; window.addEventListener("click", () => { if (started) return; started = true; window.audioCtx = audioCtx || new AudioContext(); window.audioAnalyser = audioCtx.createAnalyser(); ReactDOM.render( <ReactFlowProvider> <SaveRestore /> </ReactFlowProvider>, document.getElementById("app")); })