rnbo patch as speaker and with 6 inputs working example
This commit is contained in:
@@ -4,7 +4,7 @@
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>9 x single-band rnbo patches</title>
|
||||
<title>channel pass through rnbo patches</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
|
BIN
public/audio/LMusik_RSprache.mp3
Normal file
BIN
public/audio/LMusik_RSprache.mp3
Normal file
Binary file not shown.
BIN
public/audio/stereo-test1.mp3
Normal file
BIN
public/audio/stereo-test1.mp3
Normal file
Binary file not shown.
BIN
public/audio/stereo-test2.wav
Normal file
BIN
public/audio/stereo-test2.wav
Normal file
Binary file not shown.
156
public/patches/passthrough_6Kanal_Test.rnbopat.export.json
Normal file
156
public/patches/passthrough_6Kanal_Test.rnbopat.export.json
Normal file
File diff suppressed because one or more lines are too long
77
public/patches/passthrough_Stereo.rnbopat.export.json
Normal file
77
public/patches/passthrough_Stereo.rnbopat.export.json
Normal file
File diff suppressed because one or more lines are too long
@@ -1,43 +1,34 @@
|
||||
let CONNECTED = false
|
||||
export function setupPlayButton(
|
||||
sources: Map<number, AudioBufferSourceNode>,
|
||||
gains: Map<number, GainNode>,
|
||||
source: AudioNode[],
|
||||
gain: GainNode,
|
||||
context: AudioContext
|
||||
) {
|
||||
const button = document.createElement('button')
|
||||
button.textContent = '▶ Play All'
|
||||
button.textContent = '▶ Connect Source'
|
||||
button.style.fontSize = '1.5rem'
|
||||
button.style.padding = '10px'
|
||||
button.style.marginTop = '20px'
|
||||
|
||||
|
||||
button.onclick = () => {
|
||||
const startTime = context.currentTime + 0.1 // 100ms in Zukunft
|
||||
const rampTime = 2
|
||||
// 1️⃣ Master-Gain auf 0 dB hochrampen
|
||||
master.gain.setValueAtTime(0.001, startTime)
|
||||
master.gain.exponentialRampToValueAtTime(1.0, startTime + rampTime)
|
||||
|
||||
sources.forEach((source, freq) => {
|
||||
const gainNode = gains.get(freq)
|
||||
if (!gainNode) return
|
||||
|
||||
// Ramp up GainNode
|
||||
const gain = gainNode.gain
|
||||
gain.setValueAtTime(0.001, startTime)
|
||||
gain.exponentialRampToValueAtTime(1.0, startTime + rampTime)
|
||||
|
||||
// Start BufferSource
|
||||
try {
|
||||
source.start(startTime)
|
||||
console.log(`Started ${freq}Hz at ${startTime.toFixed(2)}`)
|
||||
} catch (err) {
|
||||
console.warn(`Start error for ${freq}Hz`, err)
|
||||
}
|
||||
})
|
||||
|
||||
button.disabled = true
|
||||
button.textContent = '🔊 Playing...'
|
||||
if(CONNECTED){
|
||||
source.forEach((n) => {
|
||||
if(n instanceof AudioBufferSourceNode) {
|
||||
n.disconnect()
|
||||
button.textContent = 'Disconnected...'
|
||||
CONNECTED=false
|
||||
}
|
||||
})
|
||||
}else {
|
||||
source.forEach(n => n.start())
|
||||
gain.gain.setValueAtTime(0.001, startTime)
|
||||
if(source instanceof AudioBufferSourceNode) source.start(startTime)
|
||||
gain.gain.linearRampToValueAtTime(1, startTime+rampTime)
|
||||
button.textContent = '🔊 Connected and Playing...'
|
||||
CONNECTED=true
|
||||
}
|
||||
|
||||
document.body.appendChild(button)
|
||||
}
|
||||
|
||||
document.body.appendChild(button)
|
||||
}
|
429
src/main.ts
429
src/main.ts
@@ -1,129 +1,291 @@
|
||||
import { createDevice, type Device } from '@rnbo/js'
|
||||
import { attachDBValueListener } from './dbValueListener'
|
||||
import { fetchOrLoadBuffer, createBufferSource } from './composables/BufferStore'
|
||||
import RNBO, { createDevice, TimeNow } from '@rnbo/js'
|
||||
import { fetchOrLoadBuffer } from './composables/BufferStore'
|
||||
import { setupPlayButton } from './composables/Player';
|
||||
import { getPreferredDevice } from './lib/microphone';
|
||||
import { getAudioContext } from './audio/context'
|
||||
|
||||
import patcherUrl from '../public/patches/controlvalue/LAF-Controll-Values_Simple_Band1000.rnbopat.export.json?url'
|
||||
import patcherUrl from '../public/patches/passthrough_6Kanal_Test.rnbopat.export.json?url'
|
||||
|
||||
interface DeviceInfo {
|
||||
id: number;
|
||||
name: string;
|
||||
device: Device;
|
||||
audioNode: AudioNode;
|
||||
}
|
||||
|
||||
// RNBO
|
||||
// RNBO
|
||||
let patcherPromise: Promise<any>
|
||||
|
||||
// create context & master gain
|
||||
const mobile = /Mobi|Android|iPhone|iPad|iPod/i.test(navigator.userAgent)
|
||||
|
||||
//
|
||||
// Gains to control the audio
|
||||
//
|
||||
|
||||
const bands = mobile ? [150, 1500, 8000] : [63, 125, 250, 500, 1000, 2000, 4000, 8000, 16000]
|
||||
const devices = new Map<number, DeviceInfo>()
|
||||
const sources = new Map<number, AudioBufferSourceNode>()
|
||||
const gains = new Map<number, GainNode>()
|
||||
|
||||
// Audio Files
|
||||
const urlsWebM = mobile ? [
|
||||
{ name: bands[0], url: 'audio/masking/3bands/low_band_256kbps.webm' },
|
||||
{ name: bands[1], url: 'audio/masking/3bands/mid_band_256kbps.webm' },
|
||||
{ name: bands[2], url: 'audio/masking/3bands/high_band_256kbps.webm' },
|
||||
] :
|
||||
[
|
||||
{ name: bands[0], url: 'audio/masking/9bands/Quellrauschen63.webm' },
|
||||
{ name: bands[1], url: 'audio/masking/9bands/Quellrauschen125.webm' },
|
||||
{ name: bands[2], url: 'audio/masking/9bands/Quellrauschen250.webm' },
|
||||
{ name: bands[3], url: 'audio/masking/9bands/Quellrauschen500.webm' },
|
||||
{ name: bands[4], url: 'audio/masking/9bands/Quellrauschen1000.webm' },
|
||||
{ name: bands[5], url: 'audio/masking/9bands/Quellrauschen2000.webm' },
|
||||
{ name: bands[6], url: 'audio/masking/9bands/Quellrauschen4000.webm' },
|
||||
{ name: bands[7], url: 'audio/masking/9bands/Quellrauschen8000.webm' },
|
||||
{ name: bands[8], url: 'audio/masking/9bands/Quellrauschen16000.webm' },
|
||||
]
|
||||
|
||||
async function getPatcher() {
|
||||
if (!patcherPromise) patcherPromise = fetch(patcherUrl).then(r => r.json())
|
||||
return patcherPromise
|
||||
}
|
||||
|
||||
async function init() {
|
||||
// Schritt 0: Mic-Recht
|
||||
await initMicrophone()
|
||||
|
||||
// Schritt 1: Kontext
|
||||
const ctx = getAudioContext()
|
||||
await ctx.suspend()
|
||||
initMicrophone(ctx)
|
||||
// Schritt 2: Fetch Patcher
|
||||
await getPatcher()
|
||||
// Schritt 3: Erstelle alle Devices
|
||||
const newDevices = await Promise.all(
|
||||
bands.map(async (freq, index) => {
|
||||
const device = await createBandDevice(freq, ctx) as DeviceInfo
|
||||
const patcher = await getPatcher()
|
||||
const rnboDevice = await createDevice({
|
||||
context: ctx,
|
||||
patcher
|
||||
})
|
||||
|
||||
|
||||
// Initiale Werte
|
||||
const events = {
|
||||
/**
|
||||
* Schickt einen numerischen Event-Input an das Gerät.
|
||||
* @param {string} name – genau wie im Max-Patch benannt
|
||||
* @param {number} val
|
||||
* @param {number} [when=context.currentTime] – Sekunden relativ zur AudioClock
|
||||
*/
|
||||
send(name: any, val: any, when = TimeNow) {
|
||||
const event = new RNBO.MessageEvent(when, name, val)
|
||||
rnboDevice.scheduleEvent(event)
|
||||
console.log(`Input Event ${name} schedueled.`)
|
||||
},
|
||||
};
|
||||
|
||||
// PARAMETER
|
||||
// PARAMETER
|
||||
// PARAMETER
|
||||
|
||||
let in7 = 0 // Hier soll das aus dem jeweiligen Textfeld
|
||||
let in8 = 0
|
||||
let in9 = 0
|
||||
let in10 = 0
|
||||
let in11 = 0
|
||||
let in12 = 0
|
||||
|
||||
events.send("in7", in7) // channel 1 auf mute
|
||||
events.send("in8", in8) // channel 2 auf mute
|
||||
events.send("in9", in9) // channel 3 auf mute
|
||||
events.send("in10", in10) // channel 4 auf mute
|
||||
events.send("in11", in11) // channel 5 auf mute
|
||||
events.send("in12", in12) // channel 6 auf mute
|
||||
console.log("Alle Ins gesetzt")
|
||||
|
||||
// Schritt 4 Fetch alle AudioBuffer
|
||||
await fetchOrLoadBuffer(freq.toString(),
|
||||
urlsWebM[index].url,
|
||||
ctx
|
||||
)
|
||||
const sourceNode = await createBufferSource(freq.toString(),ctx)
|
||||
const bandGainNode = ctx.createGain()
|
||||
const buffer1 = await fetchOrLoadBuffer("100000",
|
||||
'audio/LMusik_RSprache.mp3',
|
||||
ctx
|
||||
)
|
||||
|
||||
const buffer2 = await fetchOrLoadBuffer("100001",
|
||||
'audio/stereo-test1.mp3',
|
||||
ctx
|
||||
)
|
||||
|
||||
const buffer3 = await fetchOrLoadBuffer("100002",
|
||||
'audio/stereo-test2.wav',
|
||||
ctx
|
||||
)
|
||||
const sourceNode1 = ctx.createBufferSource() // stereo
|
||||
const sourceNode2 = ctx.createBufferSource() // stereo
|
||||
const sourceNode3 = ctx.createBufferSource() // stereo
|
||||
|
||||
|
||||
sourceNode1.buffer = buffer1
|
||||
sourceNode2.buffer = buffer2
|
||||
sourceNode3.buffer = buffer3
|
||||
sourceNode1.loop = true
|
||||
sourceNode2.loop = true
|
||||
sourceNode3.loop = true
|
||||
|
||||
const channelMergerL = ctx.createChannelMerger()
|
||||
const channelMergerR = ctx.createChannelMerger()
|
||||
|
||||
|
||||
// CHANNEL INTERPRETATION AND COUNT MODE
|
||||
channelMergerL.channelInterpretation = 'speakers'
|
||||
channelMergerR.channelInterpretation = 'speakers'
|
||||
rnboDevice.node.channelCountMode = 'max'
|
||||
rnboDevice.node.channelInterpretation = 'speakers'
|
||||
|
||||
const inputGainNode = ctx.createGain()
|
||||
|
||||
// Connecte Audios auf ein Merger
|
||||
|
||||
const { left: sourceNode1L, right: sourceNode1R } = splitStereoIntoMonoChannels(sourceNode1)
|
||||
const {left: sourceNode2L, right: sourceNode2R } = splitStereoIntoMonoChannels(sourceNode2)
|
||||
const {left: sourceNode3L, right: sourceNode3R } = splitStereoIntoMonoChannels(sourceNode3)
|
||||
console.log({sourceNode1L, sourceNode1R }, {sourceNode2L, sourceNode2R },{sourceNode3L, sourceNode3R })
|
||||
console.log({ channelMergerL, channelMergerR })
|
||||
|
||||
|
||||
// sourceNode1L.connect(ctx.destination, 0, 0)
|
||||
|
||||
sourceNode1L.connect(rnboDevice.node, 0, 0)
|
||||
sourceNode1R.connect(rnboDevice.node, 0, 1)
|
||||
|
||||
sourceNode2L.connect(rnboDevice.node, 0, 2)
|
||||
sourceNode2R.connect(rnboDevice.node, 0 ,3)
|
||||
sourceNode3L.connect(rnboDevice.node, 0 ,4)
|
||||
sourceNode3R.connect(rnboDevice.node, 0, 5)
|
||||
|
||||
const channelSplitter = ctx.createChannelSplitter(6)
|
||||
|
||||
channelMergerL.connect(channelSplitter)
|
||||
|
||||
// Schritt 5 Verbinde AudioNodes
|
||||
sourceNode.node?.connect(bandGainNode)
|
||||
bandGainNode.connect(masterGain)
|
||||
//update the source list
|
||||
if(sourceNode.node instanceof AudioBufferSourceNode && bandGainNode instanceof GainNode ) {
|
||||
sources.set(freq, sourceNode.node)
|
||||
gains.set(freq, bandGainNode)
|
||||
}
|
||||
return {freq, device}
|
||||
}
|
||||
))
|
||||
registerDevicesWithListeners(newDevices)
|
||||
console.log("RNBO Devices created")
|
||||
|
||||
devices.forEach((devices, freq)=> {
|
||||
try{
|
||||
if(devices && devices.device) {
|
||||
attachDBValueListener(devices.device,freq)
|
||||
}
|
||||
}catch {
|
||||
console.warn("couldnt attach rnbo listener")
|
||||
}
|
||||
})
|
||||
// channelMerger.connect(ctx.destination) // 6
|
||||
|
||||
rnboDevice.node.connect(ctx.destination)
|
||||
console.log("RNBO Device created connected with input source")
|
||||
console.log("Everything is setup, display a play button")
|
||||
setupPlayButton(sources, gains,ctx)
|
||||
setupPlayButton([sourceNode1,sourceNode2,sourceNode3], inputGainNode , ctx)
|
||||
|
||||
masterGain.connect(ctx.destination)
|
||||
// UI
|
||||
|
||||
const ui = document.createElement('div')
|
||||
ui.style.padding = '20px'
|
||||
|
||||
// Report-Infos anhängen
|
||||
ui.appendChild(createNodeInspector('RNBO Device', rnboDevice.node))
|
||||
|
||||
ui.appendChild(createNodeInspectorAndToggle('Scape Source LEFT 1', sourceNode1L, 0, events))
|
||||
ui.appendChild(createNodeInspectorAndToggle('Scape Source 2', sourceNode2, 1, events))
|
||||
ui.appendChild(createNodeInspectorAndToggle('Scape Source 3', sourceNode3, 2, events))
|
||||
ui.appendChild(createNodeInspector('ChannelMerger', channelMergerL))
|
||||
ui.appendChild(createNodeInspector('ChannelMerger', channelMergerR))
|
||||
ui.appendChild(createNodeInspector('channelSplitter', channelSplitter))
|
||||
|
||||
console.log("rnboDevice.numInputChannels= "+ rnboDevice.numInputChannels)
|
||||
console.log("rnboDevice.numOutputChannels = "+ rnboDevice.numOutputChannels)
|
||||
console.log("rnboDevice.node.numberOfInputs = "+ rnboDevice.node.numberOfInputs)
|
||||
console.log("rnboDevice.node.numberOfOutputs = "+ rnboDevice.node.numberOfOutputs)
|
||||
|
||||
document.body.appendChild(ui)
|
||||
}
|
||||
|
||||
init() // hey ho lets go
|
||||
|
||||
|
||||
/**
|
||||
* Enables the Microphone to use it.
|
||||
*/
|
||||
|
||||
function registerDevicesWithListeners(newDevices: { freq: number, device: any }[]) {
|
||||
const devices = new Map<number, any>()
|
||||
// resume only after interaction inside the iframe (mobile safe)
|
||||
window.addEventListener('click', () => getAudioContext().resume(), { once: true })
|
||||
console.log("Await unlock context")
|
||||
|
||||
newDevices.forEach(({ freq, device }) => {
|
||||
devices.set(freq, device)
|
||||
|
||||
try {
|
||||
if (device) {
|
||||
attachDBValueListener(device, freq)
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(`Could not attach RNBO listener for ${freq}Hz`, err)
|
||||
}
|
||||
function createNodeInspector(label: string, node: AudioNode): HTMLElement {
|
||||
const container = document.createElement('div')
|
||||
container.style.marginTop = '10px'
|
||||
container.style.padding = '8px'
|
||||
container.style.border = '1px solid #ccc'
|
||||
container.style.borderRadius = '6px'
|
||||
container.style.fontFamily = 'monospace'
|
||||
container.style.background = '#f9f9f9'
|
||||
|
||||
const title = document.createElement('strong')
|
||||
title.textContent = `🎧 ${label}`
|
||||
container.appendChild(title)
|
||||
|
||||
const list = document.createElement('ul')
|
||||
list.style.listStyle = 'none'
|
||||
list.style.padding = '4px'
|
||||
list.style.margin = '4px 0'
|
||||
|
||||
const addRow = (key: string, value: string | number) => {
|
||||
|
||||
const li = document.createElement('li')
|
||||
li.textContent = `${key}: ${value}`
|
||||
list.appendChild(li)
|
||||
}
|
||||
|
||||
addRow('channelCount', node.channelCount)
|
||||
addRow('channelCountMode', node.channelCountMode)
|
||||
addRow('channelInterpretation', node.channelInterpretation)
|
||||
addRow('numberOfInputs', node.numberOfInputs)
|
||||
addRow('numberOfOutputs', node.numberOfOutputs)
|
||||
|
||||
container.appendChild(list)
|
||||
return container
|
||||
}
|
||||
|
||||
function createNodeInspectorAndToggle(label: string, node: AudioNode, sourceIndex: number, events: { send: (tag: string, value: number) => void }): HTMLElement {
|
||||
const container = document.createElement('div')
|
||||
container.style.marginTop = '10px'
|
||||
container.style.padding = '8px'
|
||||
container.style.border = '1px solid #ccc'
|
||||
container.style.borderRadius = '6px'
|
||||
container.style.fontFamily = 'monospace'
|
||||
container.style.background = '#f9f9f9'
|
||||
|
||||
const title = document.createElement('strong')
|
||||
title.textContent = `🎧 ${label}`
|
||||
container.appendChild(title)
|
||||
|
||||
const list = document.createElement('ul')
|
||||
list.style.listStyle = 'none'
|
||||
list.style.padding = '4px'
|
||||
list.style.margin = '4px 0'
|
||||
|
||||
const addRow = (key: string, value: string | number) => {
|
||||
const li = document.createElement('li')
|
||||
li.textContent = `${key}: ${value}`
|
||||
list.appendChild(li)
|
||||
}
|
||||
|
||||
addRow('channelCount', node.channelCount)
|
||||
addRow('channelCountMode', node.channelCountMode)
|
||||
addRow('channelInterpretation', node.channelInterpretation)
|
||||
addRow('numberOfInputs', node.numberOfInputs)
|
||||
addRow('numberOfOutputs', node.numberOfOutputs)
|
||||
|
||||
container.appendChild(list)
|
||||
if(node instanceof AudioBufferSourceNode || node instanceof GainNode) {
|
||||
// Button-UI für L/R Kanalsteuerung (toggle 0–1–2)
|
||||
const buttonContainer = document.createElement('div')
|
||||
buttonContainer.style.marginTop = '8px'
|
||||
|
||||
;['L', 'R'].forEach((side, i) => {
|
||||
const modeBtn = document.createElement('button')
|
||||
modeBtn.textContent = `Mute ${side}: 0`
|
||||
modeBtn.style.marginRight = '8px'
|
||||
|
||||
let currentMode = 0
|
||||
const paramTag = `in${7 + sourceIndex * 2 + i}` // z. B. in7, in8, ..., in12
|
||||
|
||||
modeBtn.addEventListener('click', () => {
|
||||
currentMode = (currentMode + 1) % 3
|
||||
modeBtn.textContent = `Mute ${side}: ${currentMode}`
|
||||
events.send(paramTag, currentMode)
|
||||
console.log(`🔁 Sent ${paramTag} = ${currentMode}`)
|
||||
})
|
||||
|
||||
buttonContainer.appendChild(modeBtn)
|
||||
})
|
||||
|
||||
console.log("✅ RNBO Devices registered and listeners attached")
|
||||
return devices
|
||||
container.appendChild(buttonContainer)
|
||||
}
|
||||
return container
|
||||
}
|
||||
|
||||
function splitStereoIntoMonoChannels(node: AudioNode): { left: GainNode, right: GainNode } {
|
||||
console.log("BEFORE SPLIT: channelCount -> "+ node.channelCount)
|
||||
const ctx = getAudioContext()
|
||||
const splitter = ctx.createChannelSplitter(2)
|
||||
|
||||
node.connect(splitter)
|
||||
|
||||
const leftGain = ctx.createGain()
|
||||
leftGain.channelCount = 1
|
||||
leftGain.channelCountMode = 'explicit'
|
||||
leftGain.channelInterpretation = 'discrete'
|
||||
leftGain.gain.setValueAtTime(1,leftGain.context.currentTime)
|
||||
|
||||
|
||||
const rightGain = ctx.createGain()
|
||||
rightGain.channelCount = 1
|
||||
rightGain.channelCountMode = 'explicit'
|
||||
rightGain.channelInterpretation = 'discrete'
|
||||
rightGain.gain.setValueAtTime(1,rightGain.context.currentTime)
|
||||
splitter.connect(leftGain, 0) // Kanal 0 = Links
|
||||
splitter.connect(rightGain, 1) // Kanal 1 = Rechts
|
||||
|
||||
console.log("AFTER SPLIT: channelCount -> ",{ left: leftGain, right: rightGain } )
|
||||
return { left: leftGain, right: rightGain }
|
||||
}
|
||||
|
||||
|
||||
@@ -131,15 +293,21 @@ function registerDevicesWithListeners(newDevices: { freq: number, device: any }[
|
||||
* Enables the Microphone to use it.
|
||||
*/
|
||||
|
||||
async function initMicrophone() {
|
||||
|
||||
async function initMicrophone(ctx:AudioContext) {
|
||||
try {
|
||||
await navigator.mediaDevices.getUserMedia({ audio: true })
|
||||
} catch {
|
||||
window.alert("Keine Zugriff aufs Mikrofon.")
|
||||
}
|
||||
const availableDevices = await updateAvailableDevices()
|
||||
console.log("MIKROFONE ", {availableDevices})
|
||||
const preferredDevice = availableDevices[3]
|
||||
const deviceJabra = "fdab2256f4654ac7c3fb7c7f334d88a6e468cf155fb00bb4c33d0f2f2479f653"
|
||||
const externesMic = "276fc63a34034d08b7d5957cda539d268d20598e1708932112de199d85cfed1f"
|
||||
try {
|
||||
const preferredDevice = await getPreferredDevice()
|
||||
console.log("INIT MIC", { preferredDevice})
|
||||
if(preferredDevice){
|
||||
console.log("prefered device !!! ")
|
||||
const constraints: MediaStreamConstraints = {
|
||||
audio: { deviceId: { exact: preferredDevice.deviceId }, echoCancellation: false, noiseSuppression: false,
|
||||
audio: { deviceId: { exact: "276fc63a34034d08b7d5957cda539d268d20598e1708932112de199d85cfed1f" }, echoCancellation: false, noiseSuppression: false,
|
||||
autoGainControl: false },
|
||||
video: false
|
||||
}
|
||||
@@ -147,53 +315,32 @@ async function initMicrophone() {
|
||||
constraints
|
||||
)
|
||||
const source = ctx.createMediaStreamSource(stream)
|
||||
console.log("SOURCE MICROFON", {source})
|
||||
const splitter = ctx.createChannelSplitter(source.channelCount)
|
||||
|
||||
try {
|
||||
source.connect(splitter)
|
||||
devices.forEach((device, freq) => {
|
||||
splitter.connect(device.audioNode, 0, 0)
|
||||
console.log('🎤 Mikrofon mono-signal wurde an RNBO-Device mit Freq: '+freq+' geroutet.')
|
||||
})
|
||||
createNodeInspector("MIKROFON", source)
|
||||
console.log("SOURCE MICROFON", {source})
|
||||
|
||||
} catch (error) {
|
||||
console.error("Error!: connecting the microphone to deivces... connected")
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}else {
|
||||
console.log("Das Gerät wurde nicht gefunden. Lieber nochmal den Mikrofon-Zugriff checken")
|
||||
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
console.error('❌ Fehler beim Zugriff auf Mikrofon:', err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function creates a RNBODevice with a given frequency. If the microphone is connected
|
||||
* this devices sends out control Values as messages.
|
||||
* @param freq
|
||||
* @param ctx
|
||||
* @returns RNBO Device
|
||||
*/
|
||||
async function createBandDevice(freq: number, ctx: AudioContext): Promise<DeviceInfo> {
|
||||
const patcher = await getPatcher()
|
||||
const rnboDevice = await createDevice({
|
||||
context: ctx,
|
||||
patcher
|
||||
})
|
||||
const param = rnboDevice.parametersById.get('centerFrequency')
|
||||
param.value = freq
|
||||
console.log("Device created for "+ freq)
|
||||
return { id: freq, name: ""+freq, device: rnboDevice, audioNode: rnboDevice.node }
|
||||
}
|
||||
|
||||
init() // hey ho lets go
|
||||
|
||||
// resume only after interaction inside the iframe (mobile safe)
|
||||
window.addEventListener('click', () => ctx.resume(), { once: true })
|
||||
console.log("Await unlock context")
|
||||
/**
|
||||
* This function updates the List of possible microphone sources.
|
||||
*/
|
||||
async function updateAvailableDevices (): Promise<MediaDeviceInfo[]> {
|
||||
try {
|
||||
const devices = await navigator.mediaDevices.enumerateDevices()
|
||||
const newDevices = devices.filter(
|
||||
device => device.kind === 'audioinput'
|
||||
)
|
||||
if (newDevices.length === 0) {
|
||||
console.warn(
|
||||
'No audio input devices found. This might be due to missing permissions or no connected devices.'
|
||||
)
|
||||
}
|
||||
return newDevices
|
||||
} catch (error) {
|
||||
console.error('Error enumerating devices:', error)
|
||||
}
|
||||
return []
|
||||
}
|
Reference in New Issue
Block a user