Initial commit

This commit is contained in:
Mindboost
2025-07-01 10:53:26 +00:00
commit 38050e5c69
416 changed files with 48708 additions and 0 deletions

View File

@@ -0,0 +1,66 @@
<template>
<div v-if="false">AudioStateHandler</div>
</template>
<script>
import { mapState, mapActions } from 'pinia'
import { useAudioStore } from '~/stores/audio'
export default {
name: 'AudioStateHandler',
computed: {
...mapState(useAudioStore, ['playing'])
},
watch: {
playing (newValue) {
this.$logger.log('Global playing state changed', newValue)
}
},
mounted () {
window.addEventListener('keydown', this.handleKeyDown)
this.$logger.log('Global audio state handler mounted')
// Set up MediaSession API
if ('mediaSession' in navigator) {
navigator.mediaSession.setActionHandler('play', () => this.setPlaying(true))
navigator.mediaSession.setActionHandler('pause', () => this.setPlaying(false))
}
},
beforeUnmount () {
window.removeEventListener('keydown', this.handleKeyDown)
},
methods: {
...mapActions(useAudioStore, ['setPlaying'])
/** handleKeyDown (e) {
const activeElement = document.activeElement
const tagName = activeElement.tagName.toLowerCase()
// List of elements where spacebar interaction should be preserved
const interactiveElements = [
'input', 'textarea', 'button', 'select', 'option',
'video', 'audio', 'a', 'summary'
]
// Check for contenteditable attribute
const isContentEditable = activeElement.getAttribute('contenteditable') === 'true'
// Check for custom data attribute that might indicate spacebar interaction
const usesSpacebar = activeElement.getAttribute('data-uses-spacebar') === 'true'
if (e.code === 'Space' &&
!interactiveElements.includes(tagName) &&
!isContentEditable &&
!usesSpacebar) {
this.$logger.log('Space key pressed in AudioStateHandler')
e.preventDefault() // Prevent the default action (scrolling)
this.setPlaying(!this.playing)
this.$logger.log('New playing state:', this.playing)
}
} */
}
}
</script>

View File

@@ -0,0 +1,10 @@
<!-- eslint-disable vue/multi-word-component-names -->
<template>
<div>
<RNBOPlayer />
</div>
</template>
<script setup>
import RNBOPlayer from '~/archive/components/tests/RNBOPlayer.vue'
</script>

View File

@@ -0,0 +1,24 @@
<!-- eslint-disable vue/valid-template-root -->
<template></template>
<script>
import { usePlayerControls } from '@/composables/usePlayerControls'
export default {
name: 'AudioStateHandler',
watch: {
playing (newValue) {
this.$logger.log('Global playing state changed', newValue)
}
},
mounted () {
// usePlayerControls().addSpaceListener()
// Set up MediaSession API
if ('mediaSession' in navigator) {
// usePlayerControls().addMediaControls()
}
}
}
</script>

View File

@@ -0,0 +1,158 @@
<template>
<div class="player">
<div class="slider-wrapper">
<div v-if="muted" @click="toggleMute">
<img style="width: 25px" src="~/assets/image/noiseicon_muted.svg">
</div>
<div v-else @click="toggleMute">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
<div class="slider">
<input
id="gain-control-noise"
v-model="volume"
type="range"
min="0"
max="1"
step="0.005"
@wheel.prevent="changeVolumeOnWheel"
>
<span class="slider-progress-bar" :style="{ width: `${volume * 100}%` }" />
</div>
</div>
<NoiseControlledWebAudio3Band
v-for="(frequency, index) in frequencies"
ref="Player"
:key="frequency"
:master-attack="masterAttack"
:master-release="masterRelease"
:center-frequency="frequency"
:master-gain="masterGain"
:q-factor="qFactors[index]"
@ready="onBandReady"
@update:mid-volume="controlMusicGain"
/>
</div>
</template>
<script lang="ts">
import { useAudioStore } from '../../stores/audio'
import { useMicStore } from '~/stores/microphone'
import type { Microphone } from '~/stores/interfaces/Microphone'
import NoiseControlledWebAudio3Band from '~/components/experiments/tests/ControlValues/NoiseControlledWebAudio3Band'
export default {
name: 'AdaptiveNoiseGain',
components: {
NoiseControlledWebAudio3Band
},
emits: ['musicGain'],
setup () {
const masterGain = ref(useAudioStore().getMasterGainNoise())
const player = ref(null)
const { t } = useI18n()
const frequencies = ref([150, 1500, 8000])
const qFactors = ref([0.8, 0.9, 0.6])
const loadedBands = ref(0)
const muted = computed(() => useAudioStore().getNoiseVolume < 0.01)
let oldVolume = 0
const route = useRoute()
const isExperimentsRoute = computed(() => route.path.match(/\/[a-z]{2}\/experiments/))
const masterAttack = ref(120000 * 2) // Beispielwert in Samples
const masterRelease = ref(144000 * 2)
const loading = computed(() => loadedBands.value < frequencies.value.length)
const onBandReady = () => {
loadedBands.value++
}
const toggleMute = () => {
if (!muted.value) {
oldVolume = masterGain.value.gain.value
masterGain.value.gain.linearRampToValueAtTime(0, masterGain.value.context.currentTime + 0.4)
useAudioStore().setNoiseVolume(0)
} else if (oldVolume > 0) {
masterGain.value.gain.linearRampToValueAtTime(oldVolume, masterGain.value.context.currentTime + 0.4)
useAudioStore().setNoiseVolume(oldVolume)
} else {
masterGain.value.gain.linearRampToValueAtTime(1, masterGain.value.context.currentTime + 0.4)
useAudioStore().setNoiseVolume(1)
}
}
return {
frequencies,
loading,
onBandReady,
t,
loadedBands,
masterAttack,
masterRelease,
isExperimentsRoute,
qFactors,
masterGain,
toggleMute,
muted,
player
}
},
data () {
return {
audioContext: useAudioStore().getContext(),
musicReady: false,
tropics_src: window.location.origin + useRuntimeConfig().public.tracks.masking_src as string,
fading: false,
connected: false,
volume: useAudioStore().noiseVolume,
previousVolume: useAudioStore().noiseVolume
}
},
onMounted () {
},
watch: {
volume (newVolume: number) {
const audioStore = useAudioStore()
audioStore.setNoiseVolume(newVolume)
if (!isNaN(newVolume)) { audioStore.getMasterGainNoise().gain.linearRampToValueAtTime(newVolume, 0.125) }
const m = this.muted
}
},
beforeUnmount () {
const micro = useMicStore().getMicrophone() as Microphone
micro.microphoneStream?.getTracks().forEach(m => m.stop())
},
methods: {
changeVolumeOnWheel (event:WheelEvent) {
// Adjust volume on wheel scroll
const gainValue = this.volume
const deltaY = event.deltaY
if (deltaY < 0) {
const volumeAdd = (Math.min(1, gainValue + 0.02))
this.volume = volumeAdd
} else {
const volumeCut = (Math.max(0, gainValue - 0.02))
this.volume = volumeCut
}
},
controlMusicGain (value: string) {
this.$emit('musicGain', value)
},
handleCanPlayNoise () {
// useNuxtApp().$logger.log('NoiseElemeint has now playingstate: ' + state)
this.musicReady = true
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
}
}
}
</script>

View File

@@ -0,0 +1,128 @@
<template>
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
tooltip-title="Click, scroll or touch to change volume of noise"
title="Noise"
@update:volume="updateNoiseGain"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="lagoon_src"
tooltip-title="Click, scroll or touch to change volume of music"
title="Lagoon"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
@update:fadeout="fadeOutMusic"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</template>
<script lang="ts">
import AudioElement from '~/components/experiments/AudioElement.vue'
import { useAudioStore } from '~/stores/audio'
export default {
name: 'NoiseMusicGainLagoon',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
playing: false,
paused: false,
createdNodes: {} as any,
lagoon_src: window.location.origin + useRuntimeConfig().public.tracks.lagoon_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string
}
},
mounted () {
if (this.audioContext) {
// useNuxtApp().$logger.log('Audiocontext available ', this.audioContext)
}
// useNuxtApp().$logger.log('I created two AudioElements')
},
methods: {
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeOutMusic () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(1, this.audioContext.currentTime + 1.3)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(1, this.audioContext.currentTime + 1.3)
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.createdNodes = []
this.refreshAudioContext()
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,123 @@
<template>
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="meadow_src"
title="Meadow"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
@update:fadeout="fadeOutMusic"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</template>
<script lang="ts">
import AudioElement from '~/components/experiments/AudioElement.vue'
import { useAudioStore } from '~/stores/audio'
export default {
name: 'NoiseMusicGainMeadow',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
playing: false,
paused: false,
createdNodes: {} as any,
meadow_src: window.location.origin + useRuntimeConfig().public.tracks.meadow_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string
}
},
mounted () {
},
methods: {
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeOutMusic () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(1, this.audioContext.currentTime + 1.3)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(1, this.audioContext.currentTime + 1.3)
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.createdNodes = []
this.refreshAudioContext()
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,117 @@
<template>
<AudioElement2
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement2>
<AudioElement2
ref="Music"
key="1"
:src="meadow_src"
title="Tropics"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement2>
</template>
<script lang="ts">
import AudioElement2 from '~/components/experiments/AudioElement2.vue'
import { useAudioStore } from '~/stores/audio'
export default {
name: 'NoiseMusicGainTropics',
components: { AudioElement2 },
data () {
return {
audioContext: useAudioStore().getContext(),
playing: false,
paused: false,
createdNodes: {} as any,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src,
meadow_src: window.location.origin + useRuntimeConfig().public.tracks.meadow_src
}
},
mounted () {
},
methods: {
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeOutMusic () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement2
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement2
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(1, this.audioContext.currentTime + 1.3)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(1, this.audioContext.currentTime + 1.3)
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.createdNodes = []
this.refreshAudioContext()
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,122 @@
<template>
<div>
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="tropics_src"
title="Tropics"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../stores/audio'
export default {
name: 'NoiseMusicGainTropics',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
playing: false,
paused: false,
createdNodes: {} as any,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src,
tropics_src: window.location.origin + useRuntimeConfig().public.tracks.tropics_src
}
},
mounted () {
if (this.audioContext) {
// useNuxtApp().$logger.log('Audiocontext available ', this.audioContext)
}
// useNuxtApp().$logger.log('I created two AudioElements')
},
methods: {
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeOutMusic () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(1, this.audioContext.currentTime + 1.3)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(1, this.audioContext.currentTime + 1.3)
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.createdNodes = []
this.refreshAudioContext()
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,375 @@
<template>
AudioElementManager
<div class="rnboplayer">
<AudioElement
:ref="noise.title"
:key="noise.id"
:src="noise.src"
:title="noise.title"
@update:volume="updateNoiseGain"
@update:loaded="noiseReady=true"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
</template>
</AudioElement>
<AudioElement
:ref="selectedAudio.title"
:key="selectedAudio.id"
:src="selectedAudio.src"
:title="selectedAudio.title"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
@update:fadeout="fadeOutGains"
@update:loaded="musicReady=true"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
</AudioElement>
</div>
</template>
<script>
import AudioElement from '@/components/experiments/AudioElement.vue'
import importedMusicPatcher from '@/assets/patch/music_patch.export.json'
import importedNoisePatcher from '@/assets/patch/noise_patch.export.json'
import { createRNBODevice } from '@/lib/AudioFunctions'
import { useAudioStore } from '~/stores/audio'
// import setupNodes from '@/components/Player/Nodes'
// import { useAudioStore } from '~/stores/audio'
export default {
components: {
AudioElement
},
props: {
soundscape: {
type: String,
default: 'Lagoon'
}
},
data () {
return {
audioList: [
{ id: 1, title: 'Lagoon', src: window.location.origin + '/sounds/lagoon.m4a' },
{ id: 2, title: 'Forest', src: window.location.origin + '/sounds/forest.m4a' },
{ id: 3, title: 'Meadow', src: window.location.origin + '/sounds/meadow.m4a' },
{ id: 4, title: 'Tropics', src: window.location.origin + '/sounds/tropics.m4a' }
],
noise: { id: 5, title: 'Noise', src: window.location.origin + '/masking/noise.flac' },
selectedAudioTitle: '',
currentElement: {},
createdNodes: {},
musicPatch: importedMusicPatcher,
noisePatch: importedNoisePatcher,
audioContext: useAudioStore().getContext()
}
},
computed: {
selectedAudio () {
return this.audioList.find(audio => audio.title === this.selectedAudioTitle) || null
}
},
beforeUnmount () {
this.disconnectNodes()
if (this.audioContext) { this.audioContext.close() }
this.audioContext = null
},
beforeMount () {
this.audioContext = new AudioContext()
},
mounted () {
if (this.soundscape) {
this.audioList = this.audioList.filter(audio => audio.title.toLowerCase() === this.soundscape.toLowerCase())
}
this.selectAudioByTitle(this.audioList[0].title)
this.currentElement = this.audioList[0]
this.addUserNavigationHandling()
this.musicPatch = importedMusicPatcher
this.noisePatch = importedNoisePatcher
},
methods: {
fadeOutGains () {
// Define the duration of the fade out
const fadeDuration = 2.0 // 2 seconds for fade out
const currentTime = this.audioContext.currentTime
const fadeEndTime = currentTime + fadeDuration
this.fading = true
if (this.createdNodes.noiseGain) {
// Cancel scheduled values to clear any previous scheduled changes
this.createdNodes.noiseGain.gain.cancelScheduledValues(currentTime)
// Set the current value
this.createdNodes.noiseGain.gain.setValueAtTime(this.createdNodes.noiseGain.gain.value, currentTime)
// Schedule the fade out
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, fadeEndTime)
}
if (this.createdNodes.musicGain) {
// Cancel scheduled values to clear any previous scheduled changes
this.createdNodes.musicGain.gain.cancelScheduledValues(currentTime)
// Set the current value
this.createdNodes.musicGain.gain.setValueAtTime(this.createdNodes.musicGain.gain.value, currentTime)
// Schedule the fade out
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, fadeEndTime)
}
setTimeout(() => {
this.fading = false
}, fadeDuration * 1000)
},
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
changeMusicVolume (newValue) {
if (!this.createdNodes.musicGain.gain) { return }
// useNuxtApp().$logger.log(this.createdNodes.musicGain.gain)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.createdNodes.musicGain.context.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(newValue, this.createdNodes.musicGain.context.currentTime + 0.01)
},
changeNoiseVolume (newValue) {
if (!this.createdNodes.noiseGain.gain) { return }
this.createdNodes.noiseGain.gain.setValueAtTime(newValue, this.createdNodes.noiseGain.context.currentTime + 0.01)
// this.createdNodes.noiseGain.gain.value = newValue / 100
},
addUserNavigationHandling () {
if ('mediaSession' in navigator) {
// Set the handler for the next track action
navigator.mediaSession.setActionHandler('nexttrack', () => {
this.skip('next')
})
// Set the handler for the previous track action
navigator.mediaSession.setActionHandler('previoustrack', () => {
this.skip('previous')
})
}
},
getSong (currentTitle, direction) {
const index = this.audioList.findIndex(song => song.title === currentTitle)
let adjacentIndex = index + (direction === 'next' ? 1 : -1)
// Loop back to the first song if 'next' goes beyond the last index
if (adjacentIndex >= this.audioList.length) {
adjacentIndex = 0
} else if (adjacentIndex < 0) {
adjacentIndex = this.audioList.length - 1
}
return this.audioList[adjacentIndex]
},
skip (direction) {
const nextSong = this.getSong(this.selectedAudioTitle, direction)
this.selectAudioByTitle(nextSong.title)
},
updateCurrentElement (title) {
this.currentElement = this.audioList.find(e => e.title === title)
},
selectAudioByTitle (title) {
this.selectedAudioTitle = title
// const audioElement = this.audioList.find(e => e.title === this.selectedAudioTitle)
this.currentElement = this.audioList[this.audioList.findIndex(e => e.title === title)]
// useNuxtApp().$logger.log('currentElement: ', this.selectedAudioTitle)
},
areAllNodesAvailable () {
// List of required node keys as you've named them in this.createdNodes
const requiredNodes = [
'musicSource', 'musicSplitter', 'microphoneSource',
'musicDevice', 'outputSplitter', 'musicGain',
'noiseSource', 'noiseSplitter', 'noiseDevice',
'noiseGain', 'merger'
]
// Check if each required node exists and is not undefined in this.createdNodes
return requiredNodes.every(nodeKey => this.createdNodes[nodeKey] !== undefined)
},
connectNodes () {
// Destructure for easier access
const {
musicSource, musicSplitter, microphoneSource,
musicDevice, outputSplitter, musicGain,
noiseSource, noiseSplitter, noiseDevice,
noiseGain, merger
} = this.createdNodes
// Assuming all nodes are created and references to them are correct
// Connect music source to splitter (Stereo to 2 Channels)
musicSource.connect(musicSplitter) // 2 channels: L, R
// Connect microphone to musicDevice input 0 (Mono)
microphoneSource.connect(musicDevice, 0, 0) // 1 channel: Microphone
// Connect musicSplitter to musicDevice inputs 1 and 2 (Stereo)
musicSplitter.connect(musicDevice, 0, 1) // 1 channel: Left
musicSplitter.connect(musicDevice, 1, 2) // 1 channel: Right
// Connect musicDevice to outputSplitter (Stereo out)
musicDevice.connect(outputSplitter) // Assuming musicDevice outputs stereo
// Optionally connect musicDevice to musicGain for additional gain control (Stereo)
musicDevice.connect(musicGain) // Assuming musicDevice outputs stereo, connected to both channels of musicGain
// Connect noise source to noiseSplitter (Stereo to 2 Channels)
noiseSource.connect(noiseSplitter) // 2 channels: L, R
// Connect microphone to noiseDevice input 0 (Mono)
microphoneSource.connect(noiseDevice, 0, 0) // 1 channel: Microphone
// Connect noiseSplitter to noiseDevice inputs 1 and 2 (Stereo)
noiseSplitter.connect(noiseDevice, 0, 1) // 1 channel: Left
noiseSplitter.connect(noiseDevice, 1, 2) // 1 channel: Right
// Connect outputSplitter to noiseDevice inputs 3 and 4 (Stereo from musicDevice)
outputSplitter.connect(noiseDevice, 0, 3) // 1 channel: Left from musicDevice
outputSplitter.connect(noiseDevice, 1, 4) // 1 channel: Right from musicDevice
// Assuming noiseDevice outputs stereo, connect to noiseGain (Stereo)
noiseDevice.connect(noiseGain) // Assuming noiseDevice outputs stereo, connected to both channels of noiseGain
// Assuming you want to merge and output both processed signals (Stereo from both musicGain and noiseGain)
// Connect noiseGain to the first two inputs of the merger (Stereo)
noiseGain.connect(merger, 0, 0) // 1 channel: Left
noiseGain.connect(merger, 0, 1) // 1 channel: Right
// Connect musicGain to the last two inputs of the merger (Stereo)
musicGain.connect(merger, 0, 2) // 1 channel: Left
musicGain.connect(merger, 0, 3) // 1 channel: Right
// Finally, connect the merger to the audio context's destination (Stereo to output)
merger.connect(merger.context.destination)
},
handlePlayingUpdate (isPlaying) {
// useNuxtApp().$logger.log('handling Playing update: ' + isPlaying + ' <-- isplaying' + this.audioContext)
const ContextClass = (window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.oAudioContext || window.msAudioContext)
let audioContext = this.audioContext ? this.audioContext : new ContextClass()
let microphoneSource = null
if (isPlaying) {
// Web Audio API is available.
// const musicAudioComponent = this.$refs[this.currentElement.title].audioElement
if (this.areAllNodesAvailable()) {
this.disconnectNodes()
// reconnecting because all Nodes are there
this.connectNodes()
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(1.0, this.audioContext.currentTime + 2)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(1.0, this.audioContext.currentTime + 3)
// useNuxtApp().$logger.log('Connected everything because it was there already')
} else {
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
},
video: false
})
.then((micStream) => {
audioContext = this.audioContext
microphoneSource = audioContext.createMediaStreamSource(micStream)
this.createdNodes.microphoneSource ||= microphoneSource
// Return both audioContext and microphoneSource to the next link in the chain
return { audioContext, microphoneSource }
})
.then(({ audioContext, microphoneSource }) => {
// First RNBO device creation
return createRNBODevice(audioContext, this.noisePatch).then((noiseRNBODevice) => {
// useNuxtApp().$logger.log('Noise RNBODEVICE created: with ' + noiseRNBODevice.node.numInputChannels + ' inputs and ' + noiseRNBODevice.node.numOutputChannels + ' outputs')
this.createdNodes.noiseRNBODevice ||= noiseRNBODevice
// Return al.then(() => {hen(() => necessary objects for the next step
return { audioContext, microphoneSource, noiseRNBODevice }
})
})
.then(({ audioContext, microphoneSource, noiseRNBODevice }) => {
// Second RNBO device creation
return createRNBODevice(audioContext, this.musicPatch).then((musicRNBODevice) => {
// useNuxtApp().$logger.log('Music RNBODEVICE created: with ' + musicRNBODevice.node.numInputChannels + ' inputs and ' + musicRNBODevice.node.numOutputChannels + ' outputs')
// Return all necessary objects for the final setup
this.createdNodes.musicRNBODevice ||= musicRNBODevice
return { audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }
})
}).then(({ audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }) => {
// Ensure this.createdNodes is initialized
this.createdNodes ||= {}
// Assign nodes if they don't exist
this.createdNodes.microphoneSource ||= microphoneSource
this.createdNodes.musicDevice ||= musicRNBODevice.node
this.createdNodes.noiseDevice ||= noiseRNBODevice.node
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(this.$refs.Noise.$refs.audioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(this.$refs[this.currentElement.title].$refs.audioElement)
this.createdNodes.musicSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.outputSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.merger ||= audioContext.createChannelMerger(4)
this.createdNodes.musicGain.gain.value = 0.001
this.createdNodes.noiseGain.gain.value = 0.001 // macht nichts
this.connectNodes()
setTimeout(() => {
this.createdNodes.musicGain.gain.exponentialRampToValueAtTime(0.5, audioContext.currentTime + 3)
this.createdNodes.noiseGain.gain.exponentialRampToValueAtTime(0.5, audioContext.currentTime + 4)
}, 150)
})
.catch((_error) => {
this.disconnectNodes()
this.refreshAudioContext()
if (this.audioContext) {
this.audioContext.destination.disconnect()
// audioContext.destination = null
}
// useNuxtApp().$logger.error('Error setting up audio:', error)
})
}
} else {
this.disconnectNodes()
this.refreshAudioContext()
}
},
disconnectNodes () {
// Ensure this.createdNodes is defined and is an object
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof node.disconnect === 'function') {
node.disconnect()
}
// Additional handling for specific types of nodes, if necessary
// For example, stopping an OscillatorNode
if (node instanceof OscillatorNode) {
node.stop()
}
})
}
}
}
}
</script>
<style scoped>
.rnboplayer {
position: fixed;
width: 220px; /* Or specify a fixed width like 220px if you prefer */
max-width: 220px; /* This line might be redundant depending on your width strategy */
height: 100px;
display: inline-grid;
z-index: 2;
bottom: 11%;
left: 0;
right: 0;
margin-left: auto;
margin-right: auto;
margin-bottom: 1em;
}
</style>

View File

@@ -0,0 +1,498 @@
<template>
<div class="rnboplayer">
<button v-if="preparedForWebAudio" @click="skip('next')">
skip sound
</button>
<button v-if="preparedForWebAudio" @click="skip('previous')">
previous sound
</button>
<button v-if="preparedForWebAudio" @click="mute">
mute AudioElements
</button>
<button v-if="preparedForWebAudio" @click="connectMusicDevice">
connect Music
</button>
<button v-if="preparedForWebAudio" @click="connectNoiseNode">
connect Noise & Music
</button>
<button v-if="preparedForWebAudio" @click="disconnectNodes">
disconnect Nodes
</button>
<button v-if="preparedForWebAudio" @click="disconnectMusicNodes">
disconnnect Music Nodes
</button>
<AudioElement
ref="Noise"
key="5"
:src="sourceNoiseURL"
title="Noise"
@volume:update="changeNoiseVolume"
>
<template #default="{}">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
<AudioElement
v-if="selectedAudioTitle"
:ref="selectedAudio.title"
:key="selectedAudio.id"
:src="selectedAudio.src"
:title="selectedAudio.title"
@update:playing="handlePlayingUpdate"
@update:paused="handlePausedUpdate"
@volume:update="changeMusicVolume"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</div>
</template>
<script>
import AudioElement from '@/components/experiments/AudioElement.vue'
import importedMusicPatcher from '@/assets/patch/music_patch.export.json'
import importedNoisePatcher from '@/assets/patch/noise_patch.export.json'
import importedTestPatcher from '@/assets/patch/test_patch.export.json'
import { createRNBODevice, getAttenuationFactor } from '@/lib/AudioFunctions'
import { useAudioStore } from '~/stores/audio'
import { useUserStore } from '~/stores/user'
export default {
components: {
AudioElement
},
props: {
soundscape: {
type: String,
default: 'Lagoon'
}
},
data () {
return {
audioList: [
{ id: 1, title: 'Lagoon', src: window.location.origin + '/sounds/lagoon.m4a' },
{ id: 2, title: 'Forest', src: window.location.origin + '/sounds/forest.m4a' },
{ id: 3, title: 'Meadow', src: window.location.origin + '/sounds/meadow.m4a' },
{ id: 4, title: 'Tropics', src: window.location.origin + '/sounds/tropics.m4a' }
],
noise: { id: 5, title: 'Noise', src: window.location.origin + '/masking/noise.flac' },
selectedAudioTitle: '',
currentElement: {},
createdNodes: {},
createdNodesCount: 0,
noiseRNBODevice: {},
musicPatch: importedMusicPatcher,
noisePatch: importedTestPatcher,
audioContext: null,
toggleMute: false,
readyPlayer: false,
createMusicNodeCounter: 0
}
},
computed: {
selectedAudio () {
return this.audioList.find(audio => audio.title === this.selectedAudioTitle) || null
},
isPlaying () {
return useAudioStore().isPlaying()
},
preparedForWebAudio () {
return this.areAllNodesAvailable()
},
sourceNoiseURL () {
return window.location.origin + '/masking/noise.flac'
}
},
onRenderTriggered () {
// useNuxtApp().$logger.log('render AudioNoiseTest-' + props.title)
},
watch: {
createdNodesCount: {
handler () {
if (this.areAllNodesAvailable()) {
// useNuxtApp().$logger.log('we are complete with the node generation , lets start the noise once ')
this.connectNoiseNode()
}
},
deep: true
}
},
beforeUnmount () {
this.disconnectNodes()
if (this.audioContext) { this.audioContext.close() }
this.audioContext = null
},
beforeMount () {
this.audioContext = useAudioStore().getContext()
},
mounted () {
this.selectAudioByTitle(this.audioList[0].title)
this.currentElement = this.audioList[0]
this.addUserNavigationHandling()
this.musicPatch = importedMusicPatcher
this.noisePatch = importedNoisePatcher
this.testPatch = importedTestPatcher
},
methods: {
unmute () {
const musicAudioElement = this.$refs[this.currentElement.title].audioElement
const noiseAudioElement = this.$refs.Noise.audioElement
noiseAudioElement.volume = 1
musicAudioElement.volume = 1
musicAudioElement.muted = false
noiseAudioElement.muted = false
const musicGain = this.createdNodes.musicGain
const audioContext = this.createdNodes.musicGain.context
const noiseGain = this.createdNodes.musicGain
if (musicGain.gain.value === 0) { musicGain.gain.linearRampToValueAtTime(1, audioContext.currentTime + 10) }
noiseGain.gain.linearRampToValueAtTime(1, audioContext.currentTime + 10)
useAudioStore().setPlaying(false)
},
changeMusicVolume (newValue) {
if (!this.createdNodes.musicGain.gain) { return }
// useNuxtApp().$logger.log(this.createdNodes.musicGain.gain)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.createdNodes.musicGain.context.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(newValue / 100, this.createdNodes.musicGain.context.currentTime + 0.30)
},
changeNoiseVolume (newValue) {
const noiseGain = this.createdNodes.noiseGain
if (!noiseGain) { return }
noiseGain.gain.cancelScheduledValues(noiseGain.context.currentTime)
noiseGain.gain.linearRampToValueAtTime(newValue / 100, noiseGain.context.currentTime + 0.30)
},
addUserNavigationHandling () {
if ('mediaSession' in navigator) {
// Set the handler for the next track action
navigator.mediaSession.setActionHandler('nexttrack', () => {
this.skip('next')
})
// Set the handler for the previous track action
navigator.mediaSession.setActionHandler('previoustrack', () => {
this.skip('previous')
})
}
},
getSong (currentTitle, direction) {
const index = this.audioList.findIndex(song => song.title === currentTitle)
let adjacentIndex = index + (direction === 'next' ? 1 : -1)
// Loop back to the first song if 'next' goes beyond the last index
if (adjacentIndex >= this.audioList.length) {
adjacentIndex = 0
} else if (adjacentIndex < 0) {
adjacentIndex = this.audioList.length - 1
}
return this.audioList[adjacentIndex]
},
skip (_direction) {
// useNuxtApp().$logger.log('fade out the song')
const musicGain = this.createdNodes.musicGain
if (musicGain instanceof GainNode) {
musicGain.gain.setValueAtTime(musicGain.gain.value, this.createdNodes.musicGain.context.currentTime)
musicGain.gain.linearRampToValueAtTime(0, musicGain.context.currentTime + 2)
// useNuxtApp().$logger.log('volume of musicAudio ' + musicAudioElement.volume)
}
setTimeout(() => {
// useNuxtApp().$logger.log('gain of musicAudio ' + musicGain.gain.value)
}, 6000)
// useNuxtApp().$logger.log(nextSong)
/*
setTimeout(() => {
// useNuxtApp().$logger.log('replace the song')
this.selectAudioByTitle(nextSong.title)
}, 2000) */
},
selectAudioByTitle (title) {
this.selectedAudioTitle = title // update the selected Audio what is a reactive prop that triggers rerenderin the Audioelement
setTimeout(() => {
this.replaceMusicNode()
// this line triggers a rerender of the noise audio element what is not what we want
this.currentElement = this.audioList[this.audioList.findIndex(e => e.title === title)]
}, 100)
},
areAllNodesAvailable () {
// List of required node keys as you've named them in this.createdNodes
const requiredNodes = [
'musicSource', 'musicSplitter', 'microphoneSource',
'musicDevice', 'outputSplitter', 'musicGain',
'noiseSource', 'noiseSplitter', 'noiseDevice',
'noiseGain', 'merger'
]
// Check if each required node exists and is not undefined in this.createdNodes
return requiredNodes.every(nodeKey => this.createdNodes[nodeKey] !== undefined)
},
connectMusicDevice () {
const { musicDevice, microphoneSource, musicSplitter, musicSource, musicGain, outputSplitter } = this.createdNodes
// useNuxtApp().$logger.log('connect Music device')
musicSource.connect(musicSplitter, 0, 0)
// musicSource.connect(musicSource.context.destination)
microphoneSource.connect(musicDevice, 0, 0)
musicSplitter.connect(musicDevice, 0, 1)
musicSplitter.connect(musicDevice, 1, 2)
musicDevice.connect(musicGain)
musicDevice.connect(outputSplitter)
musicGain.connect(this.audioContext.destination)
// useNuxtApp().$logger.log('Music device connected')
musicGain.gain.value = 1
},
//
// hier Version 2 testen Noise und Music rein, dann Gain
//
//
// hier Version 1 testen Noise und Music Mergen dann Gain
//
connectNoiseNode () {
// useNuxtApp().$logger.log('CONNECT NOISE NODE')
const {
noiseSplitter, microphoneSource, noiseDevice,
musicSplitter, noiseSource, noiseGain
} = this.createdNodes
this.connectMusicDevice()
noiseSource.connect(noiseSplitter, 0, 0)
microphoneSource.connect(noiseDevice, 0, 0)
noiseSplitter.connect(noiseDevice, 0, 1)
noiseSplitter.connect(noiseDevice, 1, 2) // 2 channels
musicSplitter.connect(noiseDevice, 0, 3)
musicSplitter.connect(noiseDevice, 1, 4)
const attenuation = this.noiseRNBODevice.parametersById.get('attenuation')
attenuation.value = getAttenuationFactor()
noiseDevice.connect(noiseGain)
noiseGain.connect(noiseGain.context.destination)
setTimeout(() => {
this.unmute()
}, 300)
},
connectNoiseNodeOrginal () {
// useNuxtApp().$logger.log('CONNECT NOISE NODE')
const {
noiseSplitter, microphoneSource, noiseDevice,
outputSplitter, noiseSource, noiseGain
} = this.createdNodes
this.connectMusicDevice()
noiseSource.connect(noiseSplitter, 0, 0)
microphoneSource.connect(noiseDevice, 0, 0)
noiseSplitter.connect(noiseDevice, 0, 1)
noiseSplitter.connect(noiseDevice, 1, 2) // 2 channels
outputSplitter.connect(noiseDevice, 0, 3)
outputSplitter.connect(noiseDevice, 1, 4)
const attenuation = this.noiseRNBODevice.parametersById.get('attenuation')
attenuation.value = getAttenuationFactor()
noiseDevice.connect(noiseGain)
noiseGain.connect(noiseGain.context.destination)
setTimeout(() => {
this.unmute()
}, 300)
},
handleLoadingUpdate () {
// useNuxtApp().$logger.log('Loaded Audio File we are ready to connect everything')
},
handlePlayingUpdate (isPlaying) {
// useNuxtApp().$logger.log('prepared AudioNodes ' + this.createMusicNodeCounter)
if (!isPlaying) { this.disconnectNodes() } else if (this.areAllNodesAvailable()) {
this.replaceMusicNode()
} else {
// Whatever was before, reset
if (this.createdNodes.length > 0) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 2)
this.disconnectNodes()
}
this.prepareWebAudioNodes()
}
},
prepareWebAudioNodes () {
this.createMusicNodeCounter++
const userStore = useUserStore()
const isAuth = userStore.isAuthenticated
// Create new Context there might be some nodes missing.
// const ContextClass = (window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.oAudioContext || window.msAudioContext)
// useNuxtApp().$logger.log('user', { userStore }, { isAuth })
if (!isAuth) {
// useNuxtApp().$logger.error('User no logged in ')
return
}
// Take the microphone and all the other nodes and make them available for mindbooost
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
},
video: false
}).then((micStream) => {
const audioContext = this.audioContext ? this.audioContext : useAudioStore().getContext()
const microphoneSource = audioContext.createMediaStreamSource(micStream)
this.createdNodes.microphoneSource ||= microphoneSource
// Return both audioContext and microphoneSource to the next link in the chain
return { audioContext, microphoneSource }
})
.then(({ audioContext, microphoneSource }) => {
// First RNBO device creation
return createRNBODevice(audioContext, this.testPatch).then((noiseRNBODevice) => {
this.createdNodes.noiseRNBODevice ||= noiseRNBODevice
return { audioContext, microphoneSource, noiseRNBODevice }
})
})
.then(({ audioContext, microphoneSource, noiseRNBODevice }) => {
// Second RNBO device creation
return createRNBODevice(audioContext, this.musicPatch).then((musicRNBODevice) => {
this.createdNodes.musicRNBODevice ||= musicRNBODevice
return { audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }
})
}).then(({ audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }) => {
this.createdNodes ||= {}
this.noiseRNBODevice = noiseRNBODevice
const noiseAudioElement = this.$refs.Noise.$refs.audioElement
const musicAudioElement = this.$refs[this.currentElement.title].$refs.audioElement
// Assign nodes if they don't exist
this.createdNodes.microphoneSource ||= microphoneSource
this.createdNodes.musicDevice ||= musicRNBODevice.node
this.createdNodes.noiseDevice ||= noiseRNBODevice.node
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.outputSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.merger ||= audioContext.createChannelMerger(4)
this.createdNodesCount = this.createdNodes.length
})
},
replaceMusicNode () {
if (this.areAllNodesAvailable()) {
const { musicSource, musicGain } = this.createdNodes
setTimeout(() => {
const musicAudioElement = this.$refs[this.currentElement.title].$refs.audioElement
const currentGain = musicGain.gain.value
const newMusicNode = musicSource.context.createMediaElementSource(musicAudioElement)
musicSource.disconnect()
this.connectMusicDevice()
this.createdNodes.musicSource = newMusicNode
// newMusicNode.connect(musicSplitter)
musicGain.gain.linearRampToValueAtTime(currentGain, this.audioContext.currentTime + 2)
}, 2100)
}
},
disconnectMusicNodes () {
const { musicDevice, musicSplitter, musicSource, outputSplitter, musicGain, noiseGain } = this.createdNodes
// useNuxtApp().$logger.log('disconnect Music device')
if (this.areAllNodesAvailable()) {
if (musicGain instanceof GainNode) { musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 2) }
if (noiseGain instanceof GainNode) { noiseGain.gain.linearRampToValueAtTime(noiseGain.gain.value, this.audioContext.currentTime) }
setTimeout(() => {
musicSource.disconnect()
musicSplitter.disconnect()
musicDevice.disconnect()
outputSplitter.disconnect()
// useNuxtApp().$logger.log('Music device disconnected')
musicSource.disconnect()
}, 2050)
} else {
// useNuxtApp().$logger.log('No nodes present to disconnect')
}
},
disconnectNodes () {
// useNuxtApp().$logger.log('DISCONNECT ALL NODES')
if (this.createdNodes.microphoneSource) { this.createdNodes.microphoneSource.mediaStream.getTracks().forEach(track => track.stop()) }
// Ensure this.createdNodes is defined and is an object
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof node.disconnect === 'function') {
node.disconnect()
}
// Additional handling for specific types of nodes, if necessary
// For example, stopping an OscillatorNode
if (node instanceof OscillatorNode) {
node.stop()
}
})
}
useAudioStore().setPlaying(false)
}
}
}
</script>
<style scoped>
.rnboplayer {
position: fixed;
width: 220px; /* Or specify a fixed width like 220px if you prefer */
max-width: 220px; /* This line might be redundant depending on your width strategy */
height: fit-content;
display: inline-grid;
z-index: 2;
bottom: 11%;
left: 0;
right: 0;
margin-left: auto;
margin-right: auto;
margin-bottom: 1em;
}
.play.yellow {
background: rgba(255, 176, 66, 0.008);
border-radius: 50%;
box-shadow: 0 0 0 0 rgba(255, 177, 66, 1);
animation: pulse-yellow 4s infinite;
position: fixed;
bottom: 40%;
width: 200px;
height: 200px;
background-image: url('/images/playbtn.svg');
background-repeat:no-repeat;
background-attachment:fixed;
background-position: 58% 55%;
}
.pause.yellow {
background: rgba(255, 176, 66, 0.008);
border-radius: 50%;
box-shadow: 0 0 0 0 rgba(255, 177, 66, 1);
opacity: 0.05;
position: fixed;
bottom: 40%;
width: 200px;
height: 200px;
background-image: url('/images/pausebtn.svg');
background-size: 130px 100px;
background-repeat:no-repeat;
background-attachment:fixed;
background-position: center;
}
.pause.yellow:hover{
opacity: 0.5;
}
@keyframes pulse-yellow {
0% {
transform: scale(0.95);
box-shadow: 0 0 0 0 rgba(255, 177, 66, 0.7);
}
70% {
transform: scale(1);
box-shadow: 0 0 0 10px rgba(255, 177, 66, 0);
}
100% {
transform: scale(0.95);
box-shadow: 0 0 0 0 rgba(255, 177, 66, 0);
}
}
</style>

View File

@@ -0,0 +1,606 @@
<template>
<div class="rnboplayer">
<button v-if="preparedForWebAudio" @click="fixAudio">
fix Audio
</button>
<button v-if="preparedForWebAudio" @click="skip('next')">
skip sound
</button>
<button v-if="preparedForWebAudio" @click="skip('previous')">
previous sound
</button>
<button v-if="preparedForWebAudio" @click="mute">
mute AudioElements
</button>
<button v-if="preparedForWebAudio" @click="connectMusicDevice">
connect Music
</button>
<button v-if="preparedForWebAudio" @click="connectNoiseNode">
connect Noise
</button>
<button v-if="preparedForWebAudio" @click="connectNodes">
connect Nodes
</button>
<button v-if="preparedForWebAudio" @click="disconnectMusicNodes">
disconnnect Music Nodes
</button>
<div v-if="selectedAudioTitle">
<AudioElement
:ref="selectedAudio.title"
:key="selectedAudio.id"
:src="selectedAudio.src"
:title="selectedAudio.title"
@update:playing="handlePlayingUpdate"
@volume="changeMusicVolume"
>
<template #default="{ play, pause }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
<div style="display:none">
<button v-if="!isPlaying()" @click="play">
Play
</button>
<button v-else @click="pause">
Pause
</button>
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</div>
<AudioElement
ref="Noise"
key="5"
:src="sourceNoiseURL"
title="Noise"
@volume="changeNoiseVolume"
>
<template #default="{ play, pause }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
<div style="display:none">
<button v-if="!isPlaying()" @click="play">
Play
</button>
<button v-else @click="pause">
Pause
</button>
</div>
</template>
</AudioElement>
</div>
</template>
<script>
import AudioElement from '@/components/experiments/AudioElement.vue'
import importedMusicPatcher from '@/assets/patch/music_patch.export.json'
import importedNoisePatcher from '@/assets/patch/noise_patch.export.json'
import importedTestPatcher from '@/assets/patch/test_patch.export.json'
import { createRNBODevice } from '@/lib/AudioFunctions'
import { ANC } from '~/stores/interfaces/ANC'
import { HeadsetType } from '~/stores/interfaces/HeadsetType'
import { useUserStore } from '~/stores/user'
function getAttenuationFactor (anc, ht) {
useUserStore()
if (anc === ANC.Yes && ht === HeadsetType.OverEar) { return 0.0562 }
if (anc === ANC.No && ht === HeadsetType.OverEar) { return 0.5623 }
if (anc === ANC.Yes && ht === HeadsetType.InEar) { return 0.1778 }
if (anc === ANC.No && ht === HeadsetType.InEar) { return 0.0316 }
return 0.5623
}
export default {
components: {
AudioElement
},
props: {
soundscape: {
type: String,
default: 'Lagoon'
}
},
data () {
return {
audioList: [
{ id: 1, title: 'Lagoon', src: window.location.origin + '/sounds/lagoon.m4a' },
{ id: 2, title: 'Forest', src: window.location.origin + '/sounds/forest.m4a' },
{ id: 3, title: 'Meadow', src: window.location.origin + '/sounds/meadow.m4a' },
{ id: 4, title: 'Tropics', src: window.location.origin + '/sounds/tropics.m4a' }
],
noise: { id: 5, title: 'Noise', src: window.location.origin + '/masking/noise.flac' },
selectedAudioTitle: '',
currentElement: {},
createdNodes: {},
musicPatch: importedMusicPatcher,
noisePatch: importedNoisePatcher,
audioContext: null,
toggleMute: false
}
},
computed: {
selectedAudio () {
return this.audioList.find(audio => audio.title === this.selectedAudioTitle) || null
},
isPlaying () {
return useAudioStore().isPlaying()
},
preparedForWebAudio () {
return this.areAllNodesAvailable()
},
sourceNoiseURL () {
return window.location.origin + '/masking/noise.flac'
}
},
onRenderTriggered () {
// useNuxtApp().$logger.log('render AudioNoiseTest-----------' + props.title)
},
beforeUnmount () {
this.disconnectNodes()
if (this.audioContext) { this.audioContext.close() }
this.audioContext = null
},
beforeMount () {
this.audioContext = new AudioContext()
},
mounted () {
this.selectAudioByTitle(this.audioList[0].title)
this.currentElement = this.audioList[0]
this.addUserNavigationHandling()
this.musicPatch = importedMusicPatcher
this.noisePatch = importedNoisePatcher
this.testPatch = importedTestPatcher
},
methods: {
fixAudio () {
// useNuxtApp().$logger.log('fixAudio')
// useNuxtApp().$logger.log(' ||||| noiseAudioElement ', { noiseAudioElement }, ' musicAudioElement ', { musicAudioElement })
// useNuxtApp().$logger.log('||||| volume noise ' + noiseAudioElement.volume + ' volume music ' + musicAudioElement.volume)
// useNuxtApp().$logger.log('||||| playstate noise ' + noiseAudioElement.autoplay + ' playstate music ' + musicAudioElement.autoplay)
// useNuxtApp().$logger.log('||||| muted noise ' + noiseAudioElement.muted + ' playstate music ' + musicAudioElement.muted)
// useNuxtApp().$logger.log('||||| gain noise ' + this.createdNodes.noiseGain.gain.value + ' gain music ' + this.createdNodes.musicGain.gain.value)
this.connectToGainToDestination()
this.unmute()
// this.connectDirectToDestination() // brings the sound back
},
connectDirectToDestination () {
this.createdNodes.noiseSource.connect(this.createdNodes.noiseSource.context.destination)
this.createdNodes.musicSource.connect(this.createdNodes.musicSource.context.destination)
},
connectToGainToDestination () {
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.createdNodes.noiseGain.context.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0.8, this.createdNodes.noiseGain.context.currentTime + 3)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.createdNodes.noiseGain.context.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0.8, this.createdNodes.musicGain.context.currentTime + 3)
this.createdNodes.noiseGain.connect(this.createdNodes.noiseGain.context.destination)
this.createdNodes.musicGain.connect(this.createdNodes.musicGain.context.destination)
},
mute () {
// Assuming gainNode is an instance of GainNode and audioContext is your AudioContext
let newValue = 0.5 // The new gain value you want to ramp to
const rampDuration = 0.1 // Duration of the ramp in seconds
const currentTime = this.audioContext.currentTime
if (this.toggleMute) {
newValue = 0
const noiseAudioElement = this.$refs[this.currentElement.title].audioElement
const musicAudioElement = this.$refs.Noise.audioElement
noiseAudioElement.volume = 0
musicAudioElement.volume = 0
musicAudioElement.muted = true
noiseAudioElement.muted = true
this.createdNodes.musicGain.gain.setValueAtTime(this.createdNodes.musicGain.gain.value, currentTime) // Start at the current value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(newValue, currentTime + rampDuration)
this.createdNodes.noiseGain.gain.setValueAtTime(this.createdNodes.noiseGain.gain.value, currentTime) // Start at the current value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(newValue, currentTime + rampDuration)
this.toggleMute = false
} else {
newValue = 1
this.toggleMute = true
const noiseAudioElement = this.$refs[this.currentElement.title].audioElement
const musicAudioElement = this.$refs.Noise.audioElement
noiseAudioElement.volume = 1
musicAudioElement.volume = 1
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.createdNodes.musicGain.gain.setValueAtTime(this.createdNodes.musicGain.gain.value, currentTime) // Start at the current value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(newValue, currentTime + rampDuration)
this.createdNodes.noiseGain.gain.setValueAtTime(this.createdNodes.noiseGain.gain.value, currentTime) // Start at the current value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(newValue, currentTime + rampDuration)
}
},
unmute () {
const noiseAudioElement = this.$refs[this.currentElement.title].audioElement
const musicAudioElement = this.$refs.Noise.audioElement
noiseAudioElement.volume = 1
musicAudioElement.volume = 1
musicAudioElement.muted = false
noiseAudioElement.muted = false
},
changeMusicVolume (newValue) {
if (!this.createdNodes.musicGain.gain) { return }
// useNuxtApp().$logger.log(this.createdNodes.musicGain.gain)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.createdNodes.musicGain.context.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(newValue / 100, this.createdNodes.musicGain.context.currentTime + 0.30)
},
changeNoiseVolume (newValue) {
const noiseGain = this.createdNodes.noiseGain
if (!noiseGain) { return }
noiseGain.gain.cancelScheduledValues(noiseGain.context.currentTime)
noiseGain.gain.linearRampToValueAtTime(newValue / 100, noiseGain.context.currentTime + 0.30)
},
addUserNavigationHandling () {
if ('mediaSession' in navigator) {
// Set the handler for the next track action
navigator.mediaSession.setActionHandler('nexttrack', () => {
this.skip('next')
})
// Set the handler for the previous track action
navigator.mediaSession.setActionHandler('previoustrack', () => {
this.skip('previous')
})
}
},
getSong (currentTitle, direction) {
const index = this.audioList.findIndex(song => song.title === currentTitle)
let adjacentIndex = index + (direction === 'next' ? 1 : -1)
// Loop back to the first song if 'next' goes beyond the last index
if (adjacentIndex >= this.audioList.length) {
adjacentIndex = 0
} else if (adjacentIndex < 0) {
adjacentIndex = this.audioList.length - 1
}
return this.audioList[adjacentIndex]
},
skip (direction) {
const nextSong = this.getSong(this.selectedAudioTitle, direction)
this.selectAudioByTitle(nextSong.title)
},
updateCurrentElement (title) {
this.currentElement = this.audioList.find(e => e.title === title)
},
selectAudioByTitle (title) {
this.selectedAudioTitle = title
// const audioElement = this.audioList.find(e => e.title === this.selectedAudioTitle)
this.currentElement = this.audioList[this.audioList.findIndex(e => e.title === title)]
// useNuxtApp().$logger.log('refs ', refs)
// useNuxtApp().$logger.log('currentElement: ', this.selectedAudioTitle)
},
areAllNodesAvailable () {
// List of required node keys as you've named them in this.createdNodes
const requiredNodes = [
'musicSource', 'musicSplitter', 'microphoneSource',
'musicDevice', 'outputSplitter', 'musicGain',
'noiseSource', 'noiseSplitter', 'noiseDevice',
'noiseGain', 'merger'
]
// Check if each required node exists and is not undefined in this.createdNodes
return requiredNodes.every(nodeKey => this.createdNodes[nodeKey] !== undefined)
},
connectMusicNode () {
const {
microphoneSource, musicSplitter,
outputSplitter, musicSource, musicDevice
} = this.createdNodes
musicSource.connect(musicSplitter)
microphoneSource.connect(musicDevice, 0, 0)
musicSplitter.connect(musicDevice, 0, 1) // 1 channel: Left
musicSplitter.connect(musicDevice, 1, 2) // 1 channel: Right
musicDevice.connect(outputSplitter, 0, 0)
outputSplitter.connect(outputSplitter.context.destination)
},
async connectTestNode () {
const testPatch = await createRNBODevice(this.audioContext, importedTestPatcher)
const testPatchNode = testPatch.node
const {
musicSource, musicSplitter, noiseSource, noiseSplitter
} = this.createdNodes
musicSource.connect(musicSplitter)
noiseSource.connect(noiseSplitter)
noiseSplitter.connect(testPatchNode, 0, 1) // 1 channel: Left
noiseSplitter.connect(testPatchNode, 1, 2)
musicSplitter.connect(testPatchNode, 0, 3) // 1 channel: Left
musicSplitter.connect(testPatchNode, 1, 4) // 1 channel: Right
testPatchNode.connect(this.audioContext.destination)
},
connectMusicDevice () {
const { musicDevice, microphoneSource, musicSplitter, musicSource, musicGain, outputSplitter } = this.createdNodes
// useNuxtApp().$logger.log('connect Music device')
musicSource.connect(musicSplitter, 0, 0)
// musicSource.connect(musicSource.context.destination)
microphoneSource.connect(musicDevice, 0, 0)
musicSplitter.connect(musicDevice, 0, 1)
musicSplitter.connect(musicDevice, 1, 2)
musicDevice.connect(musicGain)
musicDevice.connect(outputSplitter)
musicGain.connect(this.audioContext.destination)
// useNuxtApp().$logger.log('Music device connected')
musicGain.gain.value = 1
},
connectNoiseNode () {
const {
noiseSplitter, microphoneSource, noiseDevice,
outputSplitter, noiseSource
} = this.createdNodes
this.connectMusicDevice()
noiseSource.connect(noiseSplitter, 0, 0)
microphoneSource.connect(noiseDevice, 0, 0)
noiseSplitter.connect(noiseDevice, 0, 1)
noiseSplitter.connect(noiseDevice, 1, 2) // 2 channels
outputSplitter.connect(noiseDevice, 0, 3)
outputSplitter.connect(noiseDevice, 1, 4)
noiseDevice.connect(this.audioContext.destination)
},
connectNodes () {
// Destructure for easier access
const {
musicSource, musicSplitter, microphoneSource,
musicDevice, outputSplitter, musicGain,
noiseSource, noiseSplitter, noiseDevice,
noiseGain
} = this.createdNodes
// Assuming all nodes are created and references to them are correct
// Connect music source to splitter (Stereo to 2 Channels)
musicSource.connect(musicSplitter) // 2 channels: L, R
// Connect microphone to musicDevice input 0 (Mono)
microphoneSource.connect(musicDevice, 0, 0) // 1 channel: Microphone
// Connect musicSplitter to musicDevice inputs 1 and 2 (Stereo)
musicSplitter.connect(musicDevice, 0, 1) // 1 channel: Left
musicSplitter.connect(musicDevice, 1, 2) // 1 channel: Right
// Connect musicDevice to outputSplitter (Stereo out)
musicDevice.connect(outputSplitter) // Assuming musicDevice outputs stereo
// Optionally connect musicDevice to musicGain for additional gain control (Stereo)
musicDevice.connect(musicGain) // Assuming musicDevice outputs stereo, connected to both channels of musicGain
// Connect noise source to noiseSplitter (Stereo to 2 Channels)
noiseSource.connect(noiseSplitter) // 2 channels: L, R
// Connect microphone to noiseDevice input 0 (Mono)
microphoneSource.connect(noiseDevice, 0, 0) // 1 channel: Microphone
// Connect noiseSplitter to noiseDevice inputs 1 and 2 (Stereo)
noiseSplitter.connect(noiseDevice, 0, 1) // 1 channel: Left
noiseSplitter.connect(noiseDevice, 1, 2) // 1 channel: Right
// Connect outputSplitter to noiseDevice inputs 3 and 4 (Stereo from musicDevice)
outputSplitter.connect(noiseDevice, 0, 3) // 1 channel: Left from musicDevice
outputSplitter.connect(noiseDevice, 1, 4) // 1 channel: Right from musicDevice
// Assuming noiseDevice outputs stereo, connect to noiseGain (Stereo)
noiseDevice.connect(noiseGain) // Assuming noiseDevice outputs stereo, connected to both channels of noiseGain
const attenuation = noiseDevice.parametersById.get('attenuation')
attenuation.value = getAttenuationFactor('Yes', 'OverEar')
noiseGain.connect(this.audioContext.destination)
musicGain.connect(this.audioContext.destination)
},
handlePlayingUpdate (isPlaying) {
const ContextClass = (window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.oAudioContext || window.msAudioContext)
let audioContext = this.audioContext ? this.audioContext : new ContextClass()
let microphoneSource = null
if (isPlaying) {
// Web Audio API is available.
// const musicAudioComponent = this.$refs[this.currentElement.title].audioElement
if (this.areAllNodesAvailable()) {
this.disconnectNodes()
// reconnecting because all Nodes are there
this.connectNoiseNode()
this.unmute()
// useNuxtApp().$logger.log('Connected everything because it was there already')
} else {
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
},
video: false
})
.then((micStream) => {
audioContext = this.audioContext
microphoneSource = audioContext.createMediaStreamSource(micStream)
this.createdNodes.microphoneSource ||= microphoneSource
// Return both audioContext and microphoneSource to the next link in the chain
return { audioContext, microphoneSource }
})
.then(({ audioContext, microphoneSource }) => {
// First RNBO device creation
return createRNBODevice(audioContext, this.noisePatch).then((noiseRNBODevice) => {
// useNuxtApp().$logger.log({ noiseRNBODevice }, 'Noise RNBODEVICE created: with ' + noiseRNBODevice.node.numberOfInputs + ' inputs and ' + noiseRNBODevice.node.numberOfOutputs + ' outputs')
this.createdNodes.noiseRNBODevice ||= noiseRNBODevice
// Return al.then(() => {hen(() => necessary objects for the next step
return { audioContext, microphoneSource, noiseRNBODevice }
})
})
.then(({ audioContext, microphoneSource, noiseRNBODevice }) => {
// Second RNBO device creation
return createRNBODevice(audioContext, this.musicPatch).then((musicRNBODevice) => {
// useNuxtApp().$logger.log({ noiseRNBODevice }, 'Music RNBODEVICE created: with ' + musicRNBODevice.node.numberOfInputs + ' inputs and ' + musicRNBODevice.node.numberOfOutputs + ' outputs')
// Return all necessary objects for the final setup
this.createdNodes.musicRNBODevice ||= musicRNBODevice
return { audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }
})
}).then(({ audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }) => {
// Ensure this.createdNodes is initialized
this.createdNodes ||= {}
const noiseAudioElement = this.$refs.Noise.$refs.audioElement
const musicAudioElement = this.$refs[this.currentElement.title].$refs.audioElement
// Assign nodes if they don't exist
this.createdNodes.microphoneSource ||= microphoneSource
this.createdNodes.musicDevice ||= musicRNBODevice.node
this.createdNodes.noiseDevice ||= noiseRNBODevice.node
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.outputSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.merger ||= audioContext.createChannelMerger(4)
this.createdNodes.musicGain.gain.value = 0.901
this.createdNodes.noiseGain.gain.value = 0.901 // macht nichts
musicAudioElement.muted = false
noiseAudioElement.muted = false
// this.connectNodes()
// this.connectTestNode()
})
.catch((_error) => {
this.disconnectNodes()
if (this.audioContext) {
this.audioContext.destination.disconnect()
// audioContext.destination = null
}
})
}
} else {
this.disconnectNodes()
}
},
disconnectMusicNodes () {
const { musicDevice, musicSplitter, musicSource, outputSplitter, musicGain } = this.createdNodes
// useNuxtApp().$logger.log('discconnect Music device')
musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 2)
setTimeout(() => {
musicSource.disconnect()
// microphoneSource.disconnect()
musicSplitter.disconnect()
musicDevice.disconnect()
outputSplitter.disconnect()
// useNuxtApp().$logger.log('Music device disconnected')
musicSource.disconnect()
}, 2100)
},
fadeInToNewSoundscape (oldNode, newNode, audioContext, duration = 1) {
// Create two gain nodes, one for each audio source
const oldGain = this.createdNodes.musicGain
const newGain = audioContext.createGain()
// Initially, the old node is at full volume, and the new one is silent
oldGain.gain.setValueAtTime(oldGain.value, audioContext.currentTime)
newGain.gain.setValueAtTime(0, audioContext.currentTime)
// Connect the nodes
oldNode.connect(oldGain).connect(audioContext.destination)
newNode.connect(newGain).connect(audioContext.destination)
// Schedule the fade out of the old node
oldGain.gain.linearRampToValueAtTime(0, audioContext.currentTime + duration)
// Schedule the fade in of the new node
newGain.gain.linearRampToValueAtTime(1, audioContext.currentTime + duration)
// Optionally, stop the old node after the fade out
oldNode.mediaElement.addEventListener('ended', () => {
oldNode.disconnect()
})
// Start playing the new node if it's not already playing
if (newNode.mediaElement.paused) {
newNode.mediaElement.play().catch(_error => 'Playback error:', error)
}
},
disconnectNodes () {
// useNuxtApp().$logger.log('DISCONNECT ALL NODES')
// Ensure this.createdNodes is defined and is an object
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof node.disconnect === 'function') {
node.disconnect()
}
// Additional handling for specific types of nodes, if necessary
// For example, stopping an OscillatorNode
if (node instanceof OscillatorNode) {
node.stop()
}
})
}
}
}
}
</script>
<style scoped>
.rnboplayer {
position: fixed;
width: 220px; /* Or specify a fixed width like 220px if you prefer */
max-width: 220px; /* This line might be redundant depending on your width strategy */
height: fit-content;
display: inline-grid;
z-index: 2;
bottom: 11%;
left: 0;
right: 0;
margin-left: auto;
margin-right: auto;
margin-bottom: 1em;
}
.play.yellow {
background: rgba(255, 176, 66, 0.008);
border-radius: 50%;
box-shadow: 0 0 0 0 rgba(255, 177, 66, 1);
animation: pulse-yellow 4s infinite;
position: fixed;
bottom: 40%;
width: 200px;
height: 200px;
background-image: url('/images/playbtn.svg');
background-repeat:no-repeat;
background-attachment:fixed;
background-position: 58% 55%;
}
.pause.yellow {
background: rgba(255, 176, 66, 0.008);
border-radius: 50%;
box-shadow: 0 0 0 0 rgba(255, 177, 66, 1);
opacity: 0.05;
position: fixed;
bottom: 40%;
width: 200px;
height: 200px;
background-image: url('/images/pausebtn.svg');
background-size: 130px 100px;
background-repeat:no-repeat;
background-attachment:fixed;
background-position: center;
}
.pause.yellow:hover{
opacity: 0.5;
}
@keyframes pulse-yellow {
0% {
transform: scale(0.95);
box-shadow: 0 0 0 0 rgba(255, 177, 66, 0.7);
}
70% {
transform: scale(1);
box-shadow: 0 0 0 10px rgba(255, 177, 66, 0);
}
100% {
transform: scale(0.95);
box-shadow: 0 0 0 0 rgba(255, 177, 66, 0);
}
}
</style>

View File

@@ -0,0 +1,110 @@
<template>
<h1>Clitching in audio</h1>
<h1>Test Version Noise Patch Music: mit WebAudio, ohne Noise-Patch & ohne Music-Patch, aber Musik</h1>
<h2>Currently Artifacts caused by the import of the RNBO Patch </h2>
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="forest_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</template>
<script lang="ts">
import { type Device, createDevice } from '@rnbo/js'
import AudioElement from '~/components/experiments/AudioElement.vue'
import { useAudioStore } from '~/stores/audio'
import importedNoisePatcher from '@/assets/patch/noise_patch.export.json'
export default {
name: 'MicNoisePatchMusic',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext() as AudioContext,
playing: false,
paused: false,
createdNodes: {} as any,
noiseDevice: null as Device | null,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string
}
},
beforeMount () {
// useNuxtApp().$logger.log('beforeMount')
this.createNoiseDevice()
},
methods: {
async createNoiseDevice () {
if (!this.noiseDevice) {
const patcher = await importedNoisePatcher as any
//
// the method createDevice is called and right after the audio clichtes start
//
this.noiseDevice = await createDevice({
context: this.audioContext,
patcher
})
// useNuxtApp().$logger.log(patcher)
}
},
async handlePlayingUpdate () {
const noiseElement = await this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
await this.createNoiseDevice()
musicAudioElement.muted = false
noiseAudioElement.muted = false
},
updateNoiseGain (volume: number) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
},
updateMusicGain (volume: number) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
</script>

View File

@@ -0,0 +1,192 @@
<template>
<h1>Test Version NoiseMusicGain: mit WebAudio & Gain und PlayPause, Laden von Devices</h1>
<h2>Play State: {{ playing }} </h2>
<p>
The method refreshAudioContext helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
</p>
<p>
Whenever I view this page the audio starts playing, when I hit 'space' it fades out within 2seconds
when i start playing again nothing happens... I would expect playing.
</p>
<div v-if="createdNodes.musicGain">
{{ createdNodes.musicGain.gain.value }}
</div>
<div v-if="createdNodes.noiseGain">
{{ createdNodes.noiseGain.gain.value }}
</div>
<AudioElement
ref="Microphone"
key="6"
src="/sounds/debug/LMusik_RSprache.mp3"
title="Microphone"
@update:volume="updateNoiseGain"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/headphone.png">
</div>
</template>
</AudioElement>
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:loaded="noiseReady=true"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="forest_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
@update:fadeout="fadeOutGains"
@update:loaded="musicReady=true"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</template>
<script lang="ts">
import { type Device } from '@rnbo/js'
import AudioElement from '~/components/experiments/AudioElement.vue'
import { useAudioStore } from '~/stores/audio'
import importedNoisePatcher from '@/assets/patches/1.3.1_versions/singleBand/adaptive_masking_controller_NoMusic.rnbopat.export.json'
import importedMusicPatcher from '@/assets/patches/1.3.1_versions/nomusicPatch/export/js/ASM_Vers_4in2out_48kHz_NoMusik.rnbopat.export.json'
import { createRNBODevice } from '@/lib/AudioFunctions'
import setupNodes from '~/components/Player/Nodes'
export default {
name: 'NoiseMusicGain',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
playing: useAudioStore().playing,
paused: false,
createdNodes: {} as any,
fading: false,
noiseReady: false,
musicReady: false,
musicDevice: {} as Device,
noiseDevice: {} as Device,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string
}
},
async beforeMount () {
this.noiseDevice = await createRNBODevice(this.audioContext, importedNoisePatcher)
this.musicDevice = await createRNBODevice(this.audioContext, importedMusicPatcher)
// useNuxtApp().$logger.log('beforeMount: created RNBO devices ', this.musicDevice, this.noiseDevice)
},
mounted () {
},
methods: {
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeOutGains () {
const fadeTime = this.audioContext.currentTime + 1.3
this.fading = true
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 3)
}
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 3)
}
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeInGains () {
const fadeTime = this.audioContext.currentTime + 6
this.fading = true
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
async handlePlayingUpdate (state: boolean) {
if (!this.musicReady && !this.noiseReady) {
// useNuxtApp().$logger.log('not yet ready' + this.musicReady + ' ready noise' + this.noiseReady)
return
}
if (state && useAudioStore().isPlaying()) {
// Music has just started react on it.
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
const gains = await setupNodes(this.createdNodes.noiseSource, this.createdNodes.musicSource)
this.createdNodes.musicGain = gains.pop() // Order is important frost music then noiseGains
this.createdNodes.noiseGain = gains.pop()
// useNuxtApp().$logger.log('GAINVALUES M:' + this.createdNodes.musicGain.gain.value, ' N:' + this.createdNodes.noiseGain.gain.value)
this.fadeInGains()
} else {
if (this.fading) { return }
// Music has just stopped react on it.
this.playing = false
setTimeout(() => {
this.createdNodes = []
this.refreshAudioContext()
}, 1500)
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,97 @@
<template>
<h1>Clitching in audio</h1>
<h1>Test Version Mic NoisePatch Music: mit WebAudio, mit Noise-Patch & ohne Music-Patch, aber Musik</h1>
<h2>Currently Artifacts caused by the import of the RNBO Patch </h2>
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="forest_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</template>
<script lang="ts">
import { createDevice } from '@rnbo/js'
import AudioElement from '~/components/experiments/AudioElement.vue'
import { useAudioStore } from '~/stores/audio'
import importedNoisePatcher from '@/assets/patch/test_patch.export.json'
export default {
name: 'MicNoisePatchMusic',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
playing: false,
paused: false,
createdNodes: {} as any,
noiseDevice: null,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src
}
},
beforeMount () {
const patcher = importedNoisePatcher as any
return createDevice({
context: useAudioStore().getContext(),
patcher
}).then((_device) => {
// useNuxtApp().$logger.log('RNBO device created:', device)
})
},
methods: {
handlePlayingUpdate () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
musicAudioElement.muted = false
noiseAudioElement.muted = false
},
updateNoiseGain (volume: number) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
},
updateMusicGain (volume: number) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
</script>

View File

@@ -0,0 +1,676 @@
<template>
<div
class="container"
style="display: flex; flex-wrap: wrap; gap:20px"
>
<h1>The component is currently not working regarding to the change of buffersource nodes to audioelement source nodes</h1>
<div v-if="playing == false" class="play yellow" @click.prevent="startPlay">
<audio id="hiddenAudio" controls style="display:none;" />
</div>
<div v-if="playing == true && isStartPlayRunning == false" class="pause yellow" @click.prevent="stopPlay">
<audio id="hiddenAudio" controls style="display:none;" />
</div>
<div v-if="isStartPlayRunning" class="spinner-border spinner-border-sm yellow" role="status">
<span class="sr-only">"Loading..."</span>
</div>
<div class="row">
<div class="slider">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
<input
id="gain-control"
v-model="outputNoiseGain"
type="range"
min="0"
max="100"
step="1"
@wheel="changeNoiseGain"
>
</div>
</div>
<div class="row">
<div class="slider">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
<input
id="gain-control"
v-model="outputMusicGain"
type="range"
min="0"
max="100"
step="1"
@wheel="changeMusicGain"
>
</div>
</div>
</div>
</template>
<script lang="ts">
import { type Device } from '@rnbo/js'
import { mapActions, mapState } from 'pinia'
import { useAudioStore } from '@/stores/interfaces/oldstores/audio_old'
import { ANC } from '@/stores/interfaces/ANC'
import { HeadsetType } from '@/stores/interfaces/HeadsetType'
import {
createRNBODevice
} from '@/lib/AudioFunctions'
import { useMicStore } from '@/stores/microphone'
import importedNoisePatcher from '@/assets/patch/noise_patch.export.json'
import importedMusicPatcher from '@/assets/patch/music_patch.export.json'
import { useUserStore } from '~/stores/user'
enum AttenuationFactor{
OverEarANC = 0.0562,
OverEar = 0.5623,
InEar = 0.1778,
InEarANC = 0.0316
}
interface ComponentData {
audioContext: AudioContext | null,
// Howler
html5AudioPoolSize: number,
// sources:
micSource: MediaStreamAudioSourceNode | null,
noiseAudioSource: AudioBufferSourceNode | null,
musicAudioSource: AudioBufferSourceNode | null,
music: Howl | null,
// RNBO devices:
noiseDevice: Device | null,
musicDevice: Device | null,
// modules:
noiseGainNode: GainNode | null,
musicGainNode: GainNode | null,
// user input:
outputNoiseGain: number | 0,
outputMusicGain: number | 0,
// music splitter node
musicInputChannelSplitter :ChannelSplitterNode | null,
noiseInputChannelSplitter: ChannelSplitterNode | null,
// keep for later:
micStream: MediaStream | null,
// fading
isFading: boolean | false,
// DEBUG:
eventLog: string,
// Button handling to avoid spamming button as long as the audio preossing is busy
isStopPlayRunning: boolean,
isStartPlayRunning: boolean,
// component state:
// isPlaying: boolean <- replaced with the stored state
startTime: number
pauseTime: number
startOffset: number
hoveringNoiseSlider: false,
hoveringMusicSlider: false
}
export default {
name: 'RNBOPlayer',
props: {
scenery: {
type: String,
default: 'forest'
}
},
data (): ComponentData {
return {
audioContext: null,
micSource: null,
// Howler.js
html5AudioPoolSize: 0,
music: null,
noiseAudioSource: null,
musicAudioSource: null,
// RNBO devices:
noiseDevice: null,
musicDevice: null,
// modules:
noiseGainNode: null,
musicGainNode: null,
// user input:
outputNoiseGain: 0.0,
outputMusicGain: 0.0,
// music splitter node
// Music input channel splitter:
musicInputChannelSplitter: null,
noiseInputChannelSplitter: null,
// keep for later:
micStream: null,
// fading
isFading: false,
// component state:
// isPlaying: false, replaced with the audio store state
startTime: 0,
pauseTime: 0,
startOffset: 0,
// Handle state of buttons
isStopPlayRunning: false,
isStartPlayRunning: false,
eventLog: '',
// audio context playtime
hoveringNoiseSlider: false,
hoveringMusicSlider: false
}
},
computed: {
...mapState(useAudioStore, ['playing']),
...mapState(useUserStore, ['user']),
normalizedNoiseGain: function () {
return this.outputNoiseGain / 100.0
},
normalizedMusicGain: function () {
return this.outputMusicGain / 100.0
}
},
watch: {
scenery () {
// useNuxtApp().$logger.log('Scenery changed to: ' + this.scenery)
},
/**
* This methods watches changes on the music gain slider and use a simple ramp for adaption
* @param newValue
*/
async outputMusicGain (newValue) {
// useNuxtApp().$logger.log('Value Music Changed: new Volume is ' + newValue)
this.outputMusicGain = newValue
const musicGainNode = this.musicGainNode
const audioCtx = await useAudioStore().getAudioContext()
if (musicGainNode && !this.isFading) { musicGainNode.gain.linearRampToValueAtTime(this.normalizedMusicGain, audioCtx.currentTime + 0.5) }
},
/**
* This methods watches changes on the noise gain slider and use a simple ramp for adaption
* @param newValue
*/
async outputNoiseGain (newValue) {
// useNuxtApp().$logger.log('Value Noise Changed: new Volume is ' + newValue)
this.outputNoiseGain = newValue
const noiseGainNode = this.noiseGainNode
const audioCtx = await useAudioStore().getAudioContext()
if (noiseGainNode && !this.isFading) { noiseGainNode.gain.linearRampToValueAtTime(this.normalizedNoiseGain, audioCtx.currentTime + 0.5) }
}
},
beforeUnmount (): void {
this.slowlyFadeOutAudio(1000)
useAudioStore().resetAudioContext()
},
mounted () {
const audioStore = useAudioStore()
const micStore = useMicStore()
// Change global volume.
Howler.volume(1)
// bufferStore.preLoadAudioFiles()
micStore.getMicrophone()
this.startPlay()
this.audioContext = audioStore.getAudioContext()
const hiddenAudio = document.getElementById('hiddenAudio')
if (hiddenAudio instanceof HTMLAudioElement) {
const sink = useUserStore().audioOutputDevice as any
hiddenAudio.setSinkId(sink.deviceId)
}
hiddenAudio?.addEventListener('play', (_e) => {
// useNuxtApp().$logger.log('Eventhandler play of hiddenAudio: ' + e)
if (!this.isStartPlayRunning || !useAudioStore().playing) {
this.startPlay()
}
})
hiddenAudio?.addEventListener('pause', (_e) => {
// useNuxtApp().$logger.log('Eventhandler pause of hiddenAudio: ' + e)
this.stopPlay()
})
if ('mediaSession' in navigator) {
this.showMediaInformation()
// Play action
navigator.mediaSession.setActionHandler('play', (_e) => {
// Your play action here
// useNuxtApp().$logger.log('Eventhandler play of navigator: ' + e)
if (!this.isStartPlayRunning === false || !useAudioStore().playing) { this.startPlay() }
})
// Pause action
navigator.mediaSession.setActionHandler('pause', (_e) => {
this.stopPlay()
})
/* CURRENTLY NOT IMPLEMENTED MORE A FEATURE
// Previous track action
navigator.mediaSession.setActionHandler('previoustrack', () => {
const localePath = useLocalePath()
// Your previous track action here
if (this.scenery === 'Tropics') { this.scenery }
localePath('Home' + this.scenery)
// useNuxtApp().$logger.log('Previous track button pressed')
})
// Next track action
navigator.mediaSession.setActionHandler('nexttrack', () => {
// Your next track action here
// useNuxtApp().$logger.log('Next track button pressed')
})
*/
}
},
methods: {
...mapActions(useAudioStore, ['addNode', 'startAllSources', 'startNode']),
/**
* This methods the audio playback
*/
showMediaInformation () {
if ('mediaSession' in navigator) {
navigator.mediaSession.metadata = new MediaMetadata({
title: this.scenery,
artist: 'Mindboost',
artwork: [
{ src: '/images/scenery/' + this.scenery + '.jpg', sizes: '96x96', type: 'image/jpeg' }
]
})
}
},
/**
* This method is called to stop playing music and noise. As audiobuffernodes can only be started once,
* the audiocontext is used to create new buffernodes.
* @param audioCtx
*/
changeNoiseGain (event:WheelEvent): void {
event.preventDefault()
// Determine the direction of the scroll
const delta = Math.sign(event.deltaY)
if (this.outputNoiseGain - delta < 101 && this.outputNoiseGain - delta > -1) {
this.outputNoiseGain -= delta
}
},
changeMusicGain (event:WheelEvent): void {
event.preventDefault()
// Determine the direction of the scroll
const delta = Math.sign(event.deltaY)
if (this.outputMusicGain - delta < 101 && this.outputMusicGain - delta > -1) {
this.outputMusicGain -= delta
}
},
stopPlay () {
if (this.isStopPlayRunning) {
// useNuxtApp().$logger.log('StopPlay is already running. Please wait.')
return
}
this.isStopPlayRunning = true
if (useAudioStore().isPlaying) {
// useNuxtApp().$logger.log('Its play is. Please wait, we stop it')
this.slowlyFadeOutAudio(1000)
// this.music?.stop()
}
useMicStore().detachMicrophone()
this.isStopPlayRunning = false
},
slowlyFadeOutAudio (duration = 5000) {
this.isFading = true
let currentValue = 100
const targetValue = 0
const increment = targetValue / (duration / 100) // Calculate how much to increment every 10ms
this.musicGainNode?.gain.linearRampToValueAtTime(0, duration / 1000) // Convert duration to seconds
this.noiseGainNode?.gain.linearRampToValueAtTime(0, duration / 1000)
const intervalId = setInterval(() => {
currentValue += increment
if (currentValue >= targetValue) {
currentValue = targetValue // Ensure it doesn't go over 100
clearInterval(intervalId) // Stop the interval
}
this.outputNoiseGain = currentValue
this.outputMusicGain = currentValue
if (this.musicAudioSource instanceof MediaElementAudioSourceNode) {
this.musicAudioSource.disconnect()
this.musicAudioSource = null
}
if (this.noiseAudioSource instanceof MediaElementAudioSourceNode) {
this.noiseAudioSource.disconnect()
this.noiseAudioSource = null
}
useAudioStore().setPlaying(false)
// useNuxtApp().$logger.log(currentValue)
}, 50) // Update every 50 milliseconds (0.05 second)
this.isFading = false
},
/**
* raises slowly the sliders to trigger the watcher that fades in the audio
* @param duration
*/
slowlyFadeInAudio (duration = 5000) {
this.isFading = true
let currentValue = 1
const targetValue = 100
const increment = targetValue / (duration / 100) // Calculate how much to increment every 10ms
const intervalId = setInterval(() => {
currentValue += increment
if (currentValue >= targetValue) {
currentValue = targetValue // Ensure it doesn't go over 100
clearInterval(intervalId) // Stop the interval
}
this.outputNoiseGain = currentValue
this.outputMusicGain = currentValue
// useNuxtApp().$logger.log(currentValue)
}, 30) // Update every 50 milliseconds (0.05 second)
this.isFading = false
},
testHowler () {
},
async startPlay () {
const audioStore = useAudioStore()
const microStore = useMicStore()
// const bufferStore = useBufferStore()
//
// State management of playing audio
//
if (this.isStartPlayRunning) { return } else { this.isStartPlayRunning = true }
// get the current context
const audioContext = await useAudioStore().getAudioContext()
// get inputs
const mic = await microStore.getMicrophone()
const microphone = useAudioStore().getMicrophoneNode()
this.micSource = mic.microphoneNode as MediaStreamAudioSourceNode
this.micStream = mic.microphoneStream // needed later to stop audio input
// get audio files:
/*
this.noiseAudioSource = await audioStore.getBufferSourceNode('noise')
this.musicAudioSource = await audioStore.getBufferSourceNode(this.scenery)
*/
const noiseAudioSource = await audioStore.getBufferSourceNode('noise')
const musicAudioSource = await audioStore.getBufferSourceNode(this.scenery)
// const musicAudioSource:AudioNodeItem = await audioStore.getBufferSourceNode(this.scenery)
// RNBO devices:
this.musicDevice = await createRNBODevice(audioContext, importedMusicPatcher)
this.noiseDevice = await createRNBODevice(audioContext, importedNoisePatcher)
const musicDevice = audioStore.addNode('musicDevice', this.musicDevice.node)
const noiseDevice = audioStore.addNode('noiseDevice', this.noiseDevice.node)
// Music input channel splitter:
this.musicInputChannelSplitter = new ChannelSplitterNode(
audioContext,
{ numberOfOutputs: 2 }
)
const musicInputChannelSplitter = audioStore.addNode('musicInputChannelSplitter', this.musicInputChannelSplitter)
this.noiseInputChannelSplitter = new ChannelSplitterNode(
audioContext,
{ numberOfOutputs: 2 }
)
const noiseInputChannelSplitter = audioStore.addNode('noiseInputChannelSplitter', this.noiseInputChannelSplitter)
const musicOutputChannelSplitterNode : ChannelSplitterNode = new ChannelSplitterNode(
audioContext,
{ numberOfOutputs: 2 }
)
const musicOutputChannelSplitter = audioStore.addNode('musicOutputChannelSplitter', musicOutputChannelSplitterNode)
/**
*
* GAIN NODES
*/
const musicGainNode : GainNode = audioContext.createGain()
const noiseGainNode : GainNode = audioContext.createGain()
musicGainNode.gain.value = 0.5
noiseGainNode.gain.value = 0.5
this.noiseGainNode = noiseGainNode
this.musicGainNode = musicGainNode
const musicGain = audioStore.addNode('musicGainNode', musicGainNode)
const noiseGain = audioStore.addNode('noiseGainNode', noiseGainNode)
/**
* MUSIC PATCH
*/
try {
if (microphone && musicDevice) { audioStore.connectNodes(microphone, musicDevice, 0, 0) }
if (musicAudioSource && musicInputChannelSplitter) { audioStore.connectNodes(musicAudioSource, musicInputChannelSplitter, 0, 0) } // 2
if (musicDevice && musicInputChannelSplitter) { audioStore.connectNodes(musicInputChannelSplitter, musicDevice, 0, 1) } // 2
if (musicDevice && musicInputChannelSplitter) { audioStore.connectNodes(musicInputChannelSplitter, musicDevice, 1, 2) } // 1
} catch (error) {
// const logger = useNuxtApp().$logger as Logger
// logger.info('Initializing audio context...')
}
// Gains of Music PATCH
const destination = await audioStore.addNode('destination', audioContext.destination)
audioStore.connectNodes(musicDevice, musicGain) // 2
audioStore.connectNodes(musicGain, destination) // 2
/**
* NOISE PATCH
*/
audioStore.connectNodes(microphone, noiseDevice, 0, 0)
audioStore.connectNodes(noiseAudioSource, noiseInputChannelSplitter, 0, 0) // 2
audioStore.connectNodes(noiseInputChannelSplitter, noiseDevice, 0, 1) // 1
audioStore.connectNodes(noiseInputChannelSplitter, noiseDevice, 1, 2) // 1
// cross connect music to audio devices:
audioStore.connectNodes(musicDevice, musicOutputChannelSplitter, 0, 0) // 2
audioStore.connectNodes(musicOutputChannelSplitter, noiseDevice, 0, 3) // 1
audioStore.connectNodes(musicOutputChannelSplitter, noiseDevice, 1, 4) // 1
// this.noiseDevice?.node.connect(audioContext.destination, 0, 0) // 2
audioStore.connectNodes(noiseDevice, noiseGain, 0, 0) // 2
/**
* Gain of the noise
*/
audioStore.connectNodes(noiseGain, destination)
// noiseGainNode.connect(audioContext.destination)
/**
* START ALL SOURCES
*/
// audioStore.startAllSources()
// useNuxtApp().$logger.error('Audio Source is not defined and cannot be started')
this.startTime = audioContext.currentTime
this.pauseTime = 0
/**
* Parameters for rnbo device
*/
const attenuationFactor = this.noiseDevice.parametersById.get('attenuation')
attenuationFactor.value = getAttenuationFactor('Yes' as ANC, 'OverEar' as HeadsetType)
this.noiseDevice.messageEvent.subscribe((e) => {
if (e.tag === 'out7') {
this.eventLog = e.payload + '\n'
}
})
this.noiseDevice.messageEvent.subscribe((e) => {
if (e.tag === 'out7') {
this.eventLog = e.payload + '\n'
}
})
/**
* DEBUG
*/
// call the fade in in 3s
this.slowlyFadeInAudio(3000)
this.isStartPlayRunning = false
}
}
}
function getAttenuationFactor (anc:ANC, ht:HeadsetType): Number {
useUserStore()
if (anc === ANC.Yes && ht === HeadsetType.OverEar) { return AttenuationFactor.OverEarANC }
if (anc === ANC.No && ht === HeadsetType.OverEar) { return AttenuationFactor.OverEar }
if (anc === ANC.Yes && ht === HeadsetType.InEar) { return AttenuationFactor.InEarANC }
if (anc === ANC.No && ht === HeadsetType.InEar) { return AttenuationFactor.InEar }
return 0.5623
}
</script>
<style scoped>
.player {
background-color: #fff;
border-radius: 12px;
}
.player button {
border-radius: 10px;
padding: 10px;
}
.container {
display: flex;
flex-wrap: wrap;
gap: 20px; /* Spacing between items */
width: 225px;
margin-bottom: 20px;
}
.icon, .slider {
flex: 1 1 100px; /* Flex-grow, flex-shrink, flex-basis */
display: flex;
align-items: center; /* Center items vertically */
justify-content: center; /* Center items horizontally */
}
.icon {
/* Add padding around the icon for margin */
margin-right: 15px; /* Adjust this value as needed */
/* Align items if using flexbox */
display: flex;
align-items: center;
justify-content: center;
}
.icon img {
/* Adjust width and height as needed or keep them auto to maintain aspect ratio */
width: auto;
height: 100%; /* Example height, adjust based on your icon size */
}
.slider input[type=range] {
width: 100%; /* Full width of its parent */
background-color: transparent !important;
}
@media (min-width: 600px) {
.row {
display: flex;
width: 100%;
}
.icon, .slider {
flex: 1; /* Take up equal space */
}
}
/* Styles the track */
input[type="range"]::-webkit-slider-runnable-track {
background: #e9c046; /* yellow track */
height: 8px;
border-radius: 5px;
}
input[type="range"]::-moz-range-track {
background: #e9c046; /* yellow track */
height: 8px;
border-radius: 5px;
}
input[type="range"]::-ms-track {
background: #e9c046; /* yellow track */
border-color: transparent;
color: transparent;
height: 8px;
}
.play.yellow {
background: rgba(255, 176, 66, 0.008);
border-radius: 50%;
box-shadow: 0 0 0 0 rgba(255, 177, 66, 1);
animation: pulse-yellow 4s infinite;
position: fixed;
bottom: 40%;
width: 200px;
height: 200px;
background-image: url('/images/playbtn.svg');
background-repeat:no-repeat;
background-attachment:fixed;
background-position: 58% 55%;
}
.pause.yellow {
background: rgba(255, 176, 66, 0.008);
border-radius: 50%;
box-shadow: 0 0 0 0 rgba(255, 177, 66, 1);
opacity: 0.05;
position: fixed;
bottom: 40%;
width: 200px;
height: 200px;
background-image: url('/images/pausebtn.svg');
background-size: 130px 100px;
background-repeat:no-repeat;
background-attachment:fixed;
background-position: center;
}
.pause.yellow:hover{
opacity: 0.5;
}
@keyframes pulse-yellow {
0% {
transform: scale(0.95);
box-shadow: 0 0 0 0 rgba(255, 177, 66, 0.7);
}
70% {
transform: scale(1);
box-shadow: 0 0 0 10px rgba(255, 177, 66, 0);
}
100% {
transform: scale(0.95);
box-shadow: 0 0 0 0 rgba(255, 177, 66, 0);
}
}
</style>

68
archive/demo/index.vue Normal file
View File

@@ -0,0 +1,68 @@
<!-- eslint-disable vue/multi-word-component-names -->
<template>
<div>
Do not load multiple test without unloading others.
<div>
NoisePatchMusic
<NoisePatchMusic v-if="tests['noisepatch'].active" />
<button
@click="toggleState('noisepatch')"
>
Load
</button>
<button
v-if="!tests['noisepatch'].active"
@click="toggleState('noisepatch')"
>
Unload
</button>
</div>
<div>
AudioElementManager (all patches connected)
<audioElementManager v-if="tests['complete'].active" />
<button
@click="toggleState('complete')"
>
Load
</button>
<button
v-if="!tests['complete'].active"
@click="toggleState('complete')"
>
Unload
</button>
</div>
<div>
RNBOplayer
<RNBOPlayer v-if="tests['loadrnbo'].active" />
<button
@click="toggleState('loadrnbo')"
>
Load
</button>
<button
v-if="!tests['loadrnbo'].active"
@click="toggleState('loadrnbo')"
>
Unload
</button>
</div>
</div>
</template>
<script setup>
// import Player from '~/components/Player.vue'
import NoisePatchMusic from '~/archive/components/tests/NoisePatchMusic_glitchOnLoad.vue'
import audioElementManager from '~/archive/pages/experiments/audioElementManager.vue'
import RNBOPlayer from '~/archive/components/tests/RNBOPlayer.vue'
const tests = ref({
noisepatch: { active: false },
loadrnbo: { active: false },
complete: { active: false }
})
const toggleState = (stateName) => {
const test = tests.value
test[stateName].active = !test[stateName].active
}
</script>

View File

@@ -0,0 +1,376 @@
<template>
<div>
<h2>Press Space to start, wait few seconds then the sound should start. You will hear the arifacts right after the music begins</h2>
<h2>These file contain both patches, use Gainsliders of AudioElement to control volume and allow to skip. No AudioStore used</h2>
<div class="rnboplayer">
<div v-if="selectedAudioTitle">
{{ selectedAudio.title }}
<AudioElement
:ref="selectedAudio.title"
:key="selectedAudio.id"
:src="selectedAudio.src"
:title="selectedAudio.title"
@update:playing="handlePlayingUpdate"
@update:volume="changeMusicVolume"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</div>
<AudioElement
:ref="noise.title"
:key="noise.id"
:src="noise.src"
:title="noise.title"
@update:volume="changeNoiseVolume"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
<button @click="skip">
Skip
</button>
</div>
</div>
</template>
<script>
import AudioElement from '@/components/experiments/AudioElement.vue'
import importedMusicPatcher from '@/assets/patch/music_patch.export.json'
import importedNoisePatcher from '@/assets/patch/noise_patch.export.json'
import { createRNBODevice } from '@/lib/AudioFunctions'
// import setupNodes from '@/components/Player/Nodes'
// import { useAudioStore } from '~/stores/audio'
export default {
components: {
AudioElement
},
data () {
return {
audioList: [
{ id: 1, title: 'Lagoon', src: window.location.origin + '/sounds/lagoon.m4a' },
{ id: 2, title: 'Forest', src: window.location.origin + '/sounds/forest.m4a' },
{ id: 3, title: 'Meadow', src: window.location.origin + '/sounds/meadow.m4a' },
{ id: 4, title: 'Tropics', src: window.location.origin + '/sounds/tropics.m4a' }
],
noise: { id: 5, title: 'Noise', src: window.location.origin + '/masking/noise.flac' },
selectedAudioTitle: '',
currentElement: {},
createdNodes: {},
musicPatch: importedMusicPatcher,
noisePatch: importedNoisePatcher,
audioContext: null,
playState: false
}
},
computed: {
selectedAudio () {
return this.audioList.find(audio => audio.title === this.selectedAudioTitle) || null
},
isPlaying () {
return this.playState
}
},
beforeUnmount () {
this.disconnectNodes()
if (this.audioContext) { this.audioContext.close() }
this.audioContext = null
},
mounted () {
this.audioContext = new AudioContext()
// Example: Select 'Song Three' by default
this.selectAudioByTitle(this.audioList[0].title)
this.addUserNavigationHandling()
this.musicPatch = importedMusicPatcher
this.noisePatch = importedNoisePatcher
},
methods: {
checkAudioContext () {
// Check if there are any keys (node names) in the createdNodes object
if (Object.keys(this.createdNodes).length === 0) {
return false
}
// Check if the 'musicGain' node exists and has a context
if (this.createdNodes.musicGain && this.createdNodes.musicGain.context) {
const audioContextOfNodes = this.createdNodes.musicGain.context
// Compare the contexts
if (this.audioContext === audioContextOfNodes) {
// useNuxtApp().$logger.log('Same context')
return true
} else {
if (audioContextOfNodes.state === 'closed' || audioContextOfNodes.state === 'suspended') {
// useNuxtApp().$logger.log('AudioContext of nodes is closed or suspended')
return false
} else {
this.audioContext = audioContextOfNodes
}
return false
}
} else {
// useNuxtApp().$logger.log('musicGain node does not exist or has no context')
return false
}
},
changeNoiseVolume (volume) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
this.noiseGain = this.createdNodes.noiseGain.gain.value
}
},
changeMusicVolume (volume) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
this.musicGain = this.createdNodes.musicGain.gain.value
}
},
addUserNavigationHandling () {
if ('mediaSession' in navigator) {
// Set the handler for the next track action
navigator.mediaSession.setActionHandler('nexttrack', () => {
this.skip('next')
})
// Set the handler for the previous track action
navigator.mediaSession.setActionHandler('previoustrack', () => {
this.skip('previous')
})
}
},
getSong (currentTitle, direction) {
const index = this.audioList.findIndex(song => song.title === currentTitle)
let adjacentIndex = index + (direction === 'next' ? 1 : -1)
// Loop back to the first song if 'next' goes beyond the last index
if (adjacentIndex >= this.audioList.length) {
adjacentIndex = 0
} else if (adjacentIndex < 0) {
adjacentIndex = this.audioList.length - 1
}
return this.audioList[adjacentIndex]
},
skip (direction) {
const nextSong = this.getSong(this.selectedAudioTitle, direction)
this.selectAudioByTitle(nextSong.title)
},
updateCurrentElement (title) {
this.currentElement = this.audioList.find(e => e.title === title)
},
selectAudioByTitle (title) {
this.selectedAudioTitle = title
// const audioElement = this.audioList.find(e => e.title === this.selectedAudioTitle)
this.currentElement = this.audioList[this.audioList.findIndex(e => e.title === title)]
// useNuxtApp().$logger.log('currentElement: ', this.selectedAudioTitle)
},
areAllNodesAvailable () {
// List of required node keys as you've named them in this.createdNodes
const requiredNodes = [
'musicSource', 'musicSplitter', 'microphoneSource',
'musicDevice', 'outputSplitter', 'musicGain',
'noiseSource', 'noiseSplitter', 'noiseDevice',
'noiseGain', 'merger'
]
// Check if each required node exists and is not undefined in this.createdNodes
return requiredNodes.every(nodeKey => this.createdNodes[nodeKey] !== undefined)
},
connectNodes () {
// Destructure for easier access
const {
musicSource, musicSplitter, microphoneSource,
musicDevice, outputSplitter, musicGain,
noiseSource, noiseSplitter, noiseDevice,
noiseGain, merger
} = this.createdNodes
// Assuming all nodes are created and references to them are correct
// Connect music source to splitter (Stereo to 2 Channels)
musicSource.connect(musicSplitter) // 2 channels: L, R
// Connect microphone to musicDevice input 0 (Mono)
microphoneSource.connect(musicDevice, 0, 0) // 1 channel: Microphone
// Connect musicSplitter to musicDevice inputs 1 and 2 (Stereo)
musicSplitter.connect(musicDevice, 0, 1) // 1 channel: Left
musicSplitter.connect(musicDevice, 1, 2) // 1 channel: Right
// Connect musicDevice to outputSplitter (Stereo out)
musicDevice.connect(outputSplitter) // Assuming musicDevice outputs stereo
// Optionally connect musicDevice to musicGain for additional gain control (Stereo)
musicDevice.connect(musicGain) // Assuming musicDevice outputs stereo, connected to both channels of musicGain
// Connect noise source to noiseSplitter (Stereo to 2 Channels)
noiseSource.connect(noiseSplitter) // 2 channels: L, R
// Connect microphone to noiseDevice input 0 (Mono)
microphoneSource.connect(noiseDevice, 0, 0) // 1 channel: Microphone
// Connect noiseSplitter to noiseDevice inputs 1 and 2 (Stereo)
noiseSplitter.connect(noiseDevice, 0, 1) // 1 channel: Left
noiseSplitter.connect(noiseDevice, 1, 2) // 1 channel: Right
// Connect outputSplitter to noiseDevice inputs 3 and 4 (Stereo from musicDevice)
outputSplitter.connect(noiseDevice, 0, 3) // 1 channel: Left from musicDevice
outputSplitter.connect(noiseDevice, 1, 4) // 1 channel: Right from musicDevice
// Assuming noiseDevice outputs stereo, connect to noiseGain (Stereo)
noiseDevice.connect(noiseGain) // Assuming noiseDevice outputs stereo, connected to both channels of noiseGain
// Assuming you want to merge and output both processed signals (Stereo from both musicGain and noiseGain)
// Connect noiseGain to the first two inputs of the merger (Stereo)
noiseGain.connect(merger, 0, 0) // 1 channel: Left
noiseGain.connect(merger, 0, 1) // 1 channel: Right
// Connect musicGain to the last two inputs of the merger (Stereo)
musicGain.connect(merger, 0, 2) // 1 channel: Left
musicGain.connect(merger, 0, 3) // 1 channel: Right
// Finally, connect the merger to the audio context's destination (Stereo to output)
merger.connect(merger.context.destination)
},
handlePlayingUpdate (isPlaying) {
const ContextClass = (window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.oAudioContext || window.msAudioContext)
const audioContext = this.audioContext ? this.audioContext : new ContextClass()
let microphoneSource = null
// useNuxtApp().$logger.log('Handling playing update = ' + isPlaying) // true when playing
if (isPlaying) {
// Web Audio API is available.
// const musicAudioComponent = this.$refs[this.currentElement.title].audioElement
if (this.areAllNodesAvailable() && this.checkAudioContext()) {
this.disconnectNodes()
const musicAudioElement = this.$refs[this.currentElement.title].$refs.audioElement
const musicGainValue = this.createdNodes.musicGain.value
const noiseGainValue = this.createdNodes.noiseGain.value
// Prepare the audio elements (unmute)
this.$refs[this.currentElement.title].$refs.audioElement.muted = false
this.$refs.Noise.$refs.audioElement.muted = false
// replace music node because all Nodes are there
this.createdNodes.musicSource = null
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.connectNodes()
if (musicGainValue > 0 && musicGainValue <= 1.0) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue || 1.0, audioContext.currentTime + 2)
}
if (noiseGainValue > 0 && noiseGainValue <= 1.0) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue || 1.0, audioContext.currentTime + 3)
}
} else {
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
},
video: false
})
.then((micStream) => {
microphoneSource = this.audioContext.createMediaStreamSource(micStream)
this.createdNodes.microphoneSource ||= microphoneSource
// Return both audioContext and microphoneSource to the next link in the chain
return { audioContext, microphoneSource }
})
.then(({ audioContext, microphoneSource }) => {
// First RNBO device creation
return createRNBODevice(audioContext, this.noisePatch).then((noiseRNBODevice) => {
this.createdNodes.noiseRNBODevice ||= noiseRNBODevice
// Return al.then(() => {hen(() => necessary objects for the next step
return { audioContext, microphoneSource, noiseRNBODevice }
})
})
.then(({ audioContext, microphoneSource, noiseRNBODevice }) => {
// Second RNBO device creation
return createRNBODevice(audioContext, this.musicPatch).then((musicRNBODevice) => {
// Return all necessary objects for the final setup
this.createdNodes.musicRNBODevice ||= musicRNBODevice
return { audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }
})
}).then(({ audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }) => {
const musicElement = this.$refs[this.currentElement.title]
const musicAudioElement = musicElement.$refs.audioElement
// Ensure this.createdNodes is initialized
this.createdNodes ||= {}
// Assign nodes if they don't exist
this.createdNodes.microphoneSource ||= microphoneSource
this.createdNodes.musicDevice ||= musicRNBODevice.node
this.createdNodes.noiseDevice ||= noiseRNBODevice.node
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(this.$refs.Noise.$refs.audioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.outputSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.merger ||= audioContext.createChannelMerger(4)
musicAudioElement.muted = false
this.$refs.Noise.$refs.audioElement.muted = false
this.createdNodes.musicGain.gain.value = 0.0001
this.createdNodes.noiseGain.gain.value = 0.0001 // macht nichts
this.connectNodes()
setTimeout(() => {
this.playState = true
this.createdNodes.musicGain.gain.exponentialRampToValueAtTime(1, audioContext.currentTime + 3)
this.createdNodes.noiseGain.gain.exponentialRampToValueAtTime(1, audioContext.currentTime + 4)
}, 150)
})
.catch((_error) => {
this.disconnectNodes()
if (this.audioContext) {
this.audioContext.destination.disconnect()
// audioContext.destination = null
}
// useNuxtApp().$logger.error('Error setting up audio:', error)
})
}
} else {
this.disconnectNodes()
const newAudioContext = new AudioContext()
this.audioContext.close()
this.audioContext = newAudioContext
}
},
disconnectNodes () {
// Ensure this.createdNodes is defined and is an object
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof node.disconnect === 'function') {
node.disconnect()
}
// Additional handling for specific types of nodes, if necessary
// For example, stopping an OscillatorNode
if (node instanceof OscillatorNode) {
node.stop()
}
})
this.playState = false
}
}
}
}
</script>
<style scoped>
.rnboplayer {
position: fixed;
width: 220px; /* Or specify a fixed width like 220px if you prefer */
max-width: 220px; /* This line might be redundant depending on your width strategy */
height: 100px;
display: inline-grid;
z-index: 2;
bottom: 11%;
left: 0;
right: 0;
margin-left: auto;
margin-right: auto;
}
</style>

View File

@@ -0,0 +1,7 @@
<template>
<AudioElementManager />
</template>
<script setup>
import AudioElementManager from '~/archive/components/tests/AudioElementManager.vue'
</script>

View File

@@ -0,0 +1,365 @@
<template>
<div>
<h2>FireFox: wait its loading: few seconds then the sound should start. You will hear the arifacts right after the music begins</h2>
<h2>These file contain both patches, use Gainsliders of AudioElement to control volume and allow to skip. AudioStore used</h2>
<div class="rnboplayer">
<div v-if="selectedAudioTitle">
{{ selectedAudio.title }}
<AudioElement
:ref="selectedAudio.title"
:key="selectedAudio.id"
:src="selectedAudio.src"
:title="selectedAudio.title"
@update:playing="handlePlayingUpdate"
@update:volume="changeMusicVolume"
>
<template #default="{}" />
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</div>
<AudioElement
:ref="noise.title"
:key="noise.id"
:src="noise.src"
:title="noise.title"
@update:volume="changeNoiseVolume"
>
<template #default="{}" />
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
<button @click="skip">
Skip
</button>
</div>
</div>
</template>
<script>
import AudioElement from '@/components/experiments/AudioElement.vue'
import importedMusicPatcher from '@/assets/patch/music_patch.export.json'
import importedNoisePatcher from '@/assets/patch/noise_patch.export.json'
import { createRNBODevice } from '@/lib/AudioFunctions'
// import setupNodes from '@/components/Player/Nodes'
// import { useAudioStore } from '~/stores/audio'
export default {
components: {
AudioElement
},
data () {
return {
audioList: [
{ id: 1, title: 'Lagoon', src: window.location.origin + '/sounds/lagoon.m4a' },
{ id: 2, title: 'Forest', src: window.location.origin + '/sounds/forest.m4a' },
{ id: 3, title: 'Meadow', src: window.location.origin + '/sounds/meadow.m4a' },
{ id: 4, title: 'Tropics', src: window.location.origin + '/sounds/tropics.m4a' }
],
noise: { id: 5, title: 'Noise', src: window.location.origin + '/masking/noise.flac' },
selectedAudioTitle: '',
currentElement: {},
createdNodes: {},
musicPatch: importedMusicPatcher,
noisePatch: importedNoisePatcher,
audioContext: null,
playState: false
}
},
computed: {
selectedAudio () {
return this.audioList.find(audio => audio.title === this.selectedAudioTitle) || null
},
isPlaying () {
return this.playState
},
checkContextCompatibility () {
// Assume createdNodes is an object where each property is a node
const nodes = Object.values(this.createdNodes)
const incompatible = nodes.filter(node => node.context !== this.audioContext)
if (nodes.length === nodes.length - incompatible.length) { return true }
// useNuxtApp().$logger.log(incompatible.length + '/' + nodes.length + ' sind inkompatibel')
this.handleIncompatibeError(incompatible)
return false
}
},
handleIncompatibeError (incompatible) {
if (incompatible) {
const node = incompatible.pop()
if (node.context !== this.createdNodes.musicSplitter.context) {
const audioContext = this.createdNodes.musicSplitter.context
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
}
} else {
// useNuxtApp().$logger.log('no error to solve')
}
},
beforeUnmount () {
this.disconnectNodes()
if (this.audioContext) { this.audioContext.close() }
this.audioContext = null
},
mounted () {
this.audioContext = new AudioContext()
// Example: Select 'Song Three' by default
this.selectAudioByTitle(this.audioList[0].title)
this.addUserNavigationHandling()
this.musicPatch = importedMusicPatcher
this.noisePatch = importedNoisePatcher
},
methods: {
changeNoiseVolume (volume) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
this.noiseGain = this.createdNodes.noiseGain.gain.value
}
},
changeMusicVolume (volume) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
this.musicGain = this.createdNodes.musicGain.gain.value
}
},
addUserNavigationHandling () {
if ('mediaSession' in navigator) {
// Set the handler for the next track action
navigator.mediaSession.setActionHandler('nexttrack', () => {
this.skip('next')
})
// Set the handler for the previous track action
navigator.mediaSession.setActionHandler('previoustrack', () => {
this.skip('previous')
})
}
},
getSong (currentTitle, direction) {
const index = this.audioList.findIndex(song => song.title === currentTitle)
let adjacentIndex = index + (direction === 'next' ? 1 : -1)
// Loop back to the first song if 'next' goes beyond the last index
if (adjacentIndex >= this.audioList.length) {
adjacentIndex = 0
} else if (adjacentIndex < 0) {
adjacentIndex = this.audioList.length - 1
}
return this.audioList[adjacentIndex]
},
skip (direction) {
const nextSong = this.getSong(this.selectedAudioTitle, direction)
this.selectAudioByTitle(nextSong.title)
},
updateCurrentElement (title) {
this.currentElement = this.audioList.find(e => e.title === title)
},
selectAudioByTitle (title) {
this.selectedAudioTitle = title
// const audioElement = this.audioList.find(e => e.title === this.selectedAudioTitle)
this.currentElement = this.audioList[this.audioList.findIndex(e => e.title === title)]
// useNuxtApp().$logger.log('currentElement: ', this.selectedAudioTitle)
},
areAllNodesAvailable () {
// List of required node keys as you've named them in this.createdNodes
const requiredNodes = [
'musicSource', 'musicSplitter', 'microphoneSource',
'musicDevice', 'outputSplitter', 'musicGain',
'noiseSource', 'noiseSplitter', 'noiseDevice',
'noiseGain', 'merger'
]
// Check if each required node exists and is not undefined in this.createdNodes
return requiredNodes.every(nodeKey => this.createdNodes[nodeKey] !== undefined)
},
connectNodes () {
// Destructure for easier access
const {
musicSource, musicSplitter, microphoneSource,
musicDevice, outputSplitter, musicGain,
noiseSource, noiseSplitter, noiseDevice,
noiseGain, merger
} = this.createdNodes
// Assuming all nodes are created and references to them are correct
// Connect music source to splitter (Stereo to 2 Channels)
musicSource.connect(musicSplitter) // 2 channels: L, R
// Connect microphone to musicDevice input 0 (Mono)
microphoneSource.connect(musicDevice, 0, 0) // 1 channel: Microphone
// Connect musicSplitter to musicDevice inputs 1 and 2 (Stereo)
musicSplitter.connect(musicDevice, 0, 1) // 1 channel: Left
musicSplitter.connect(musicDevice, 1, 2) // 1 channel: Right
// Connect musicDevice to outputSplitter (Stereo out)
musicDevice.connect(outputSplitter) // Assuming musicDevice outputs stereo
// Optionally connect musicDevice to musicGain for additional gain control (Stereo)
musicDevice.connect(musicGain) // Assuming musicDevice outputs stereo, connected to both channels of musicGain
// Connect noise source to noiseSplitter (Stereo to 2 Channels)
noiseSource.connect(noiseSplitter) // 2 channels: L, R
// Connect microphone to noiseDevice input 0 (Mono)
microphoneSource.connect(noiseDevice, 0, 0) // 1 channel: Microphone
// Connect noiseSplitter to noiseDevice inputs 1 and 2 (Stereo)
noiseSplitter.connect(noiseDevice, 0, 1) // 1 channel: Left
noiseSplitter.connect(noiseDevice, 1, 2) // 1 channel: Right
// Connect outputSplitter to noiseDevice inputs 3 and 4 (Stereo from musicDevice)
outputSplitter.connect(noiseDevice, 0, 3) // 1 channel: Left from musicDevice
outputSplitter.connect(noiseDevice, 1, 4) // 1 channel: Right from musicDevice
// Assuming noiseDevice outputs stereo, connect to noiseGain (Stereo)
noiseDevice.connect(noiseGain) // Assuming noiseDevice outputs stereo, connected to both channels of noiseGain
// Assuming you want to merge and output both processed signals (Stereo from both musicGain and noiseGain)
// Connect noiseGain to the first two inputs of the merger (Stereo)
noiseGain.connect(merger, 0, 0) // 1 channel: Left
noiseGain.connect(merger, 0, 1) // 1 channel: Right
// Connect musicGain to the last two inputs of the merger (Stereo)
musicGain.connect(merger, 0, 2) // 1 channel: Left
musicGain.connect(merger, 0, 3) // 1 channel: Right
// Finally, connect the merger to the audio context's destination (Stereo to output)
merger.connect(merger.context.destination)
},
handlePlayingUpdate (isPlaying) {
const ContextClass = (window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.oAudioContext || window.msAudioContext)
const audioContext = this.audioContext ? this.audioContext : new ContextClass()
let microphoneSource = null
// useNuxtApp().$logger.log('Handling playing update = ' + isPlaying) // true when playing
if (isPlaying) {
// Web Audio API is available.
// const musicAudioComponent = this.$refs[this.currentElement.title].audioElement
if (this.areAllNodesAvailable()) {
this.disconnectNodes()
const musicAudioElement = this.$refs[this.currentElement.title].$refs.audioElement
const musicGainValue = this.createdNodes.musicGain.value
const noiseGainValue = this.createdNodes.noiseGain.value
// Prepare the audio elements (unmute)
this.$refs[this.currentElement.title].$refs.audioElement.muted = false
this.$refs.Noise.$refs.audioElement.muted = false
// replace music node because all Nodes are there
this.createdNodes.musicSource = null
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.connectNodes()
if (musicGainValue > 0 && musicGainValue <= 1.0) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue || 1.0, audioContext.currentTime + 2)
}
if (noiseGainValue > 0 && noiseGainValue <= 1.0) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue || 1.0, audioContext.currentTime + 3)
}
} else {
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
},
video: false
})
.then((micStream) => {
microphoneSource = this.audioContext.createMediaStreamSource(micStream)
this.createdNodes.microphoneSource ||= microphoneSource
// Return both audioContext and microphoneSource to the next link in the chain
return { audioContext, microphoneSource }
})
.then(({ audioContext, microphoneSource }) => {
// First RNBO device creation
return createRNBODevice(audioContext, this.noisePatch).then((noiseRNBODevice) => {
this.createdNodes.noiseRNBODevice ||= noiseRNBODevice
// Return al.then(() => {hen(() => necessary objects for the next step
return { audioContext, microphoneSource, noiseRNBODevice }
})
})
.then(({ audioContext, microphoneSource, noiseRNBODevice }) => {
// Second RNBO device creation
return createRNBODevice(audioContext, this.musicPatch).then((musicRNBODevice) => {
// Return all necessary objects for the final setup
this.createdNodes.musicRNBODevice ||= musicRNBODevice
return { audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }
})
}).then(({ audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }) => {
const musicElement = this.$refs[this.currentElement.title]
const musicAudioElement = musicElement.$refs.audioElement
// Ensure this.createdNodes is initialized
this.createdNodes ||= {}
// Assign nodes if they don't exist
this.createdNodes.microphoneSource ||= microphoneSource
this.createdNodes.musicDevice ||= musicRNBODevice.node
this.createdNodes.noiseDevice ||= noiseRNBODevice.node
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(this.$refs.Noise.$refs.audioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.outputSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.merger ||= audioContext.createChannelMerger(4)
musicAudioElement.muted = false
this.$refs.Noise.$refs.audioElement.muted = false
this.createdNodes.musicGain.gain.value = 0.0001
this.createdNodes.noiseGain.gain.value = 0.0001 // macht nichts
this.connectNodes()
setTimeout(() => {
this.playState = true
this.createdNodes.musicGain.gain.exponentialRampToValueAtTime(1, audioContext.currentTime + 3)
this.createdNodes.noiseGain.gain.exponentialRampToValueAtTime(1, audioContext.currentTime + 4)
}, 150)
})
.catch((_error) => {
this.disconnectNodes()
if (this.audioContext) {
this.audioContext.destination.disconnect()
// audioContext.destination = null
}
this.$toast.error('Oouh sorry! Error while setting up audio, please reload.')
})
}
} else {
this.disconnectNodes()
const newAudioContext = new AudioContext()
this.audioContext.close()
this.audioContext = newAudioContext
}
},
disconnectNodes () {
// Ensure this.createdNodes is defined and is an object
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof node.disconnect === 'function') {
node.disconnect()
}
// Additional handling for specific types of nodes, if necessary
// For example, stopping an OscillatorNode
if (node instanceof OscillatorNode) {
node.stop()
}
})
this.playState = false
}
}
}
}
</script>
<style scoped>
.rnboplayer {
position: fixed;
width: 220px; /* Or specify a fixed width like 220px if you prefer */
max-width: 220px; /* This line might be redundant depending on your width strategy */
height: 100px;
display: inline-grid;
z-index: 2;
bottom: 11%;
left: 0;
right: 0;
margin-left: auto;
margin-right: auto;
}
</style>

View File

@@ -0,0 +1,399 @@
<template>
<div>
<KeyboardPlayHandler />
<h2>Press Space to start, wait few seconds then the sound should start. You will hear the arifacts right after the music begins</h2>
<h2>These file contain both patches, use Gainsliders of AudioElement to control volume and allow to skip. AudioStore used for shared AudioContext</h2>
<div class="rnboplayer">
<div v-if="selectedAudioTitle">
{{ selectedAudio.title }}
<AudioElement
:ref="selectedAudio.title"
:key="selectedAudio.id"
:src="selectedAudio.src"
:title="selectedAudio.title"
@update:playing="handlePlayingUpdate"
@update:volume="changeMusicVolume"
>
<template #default="{}" />
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</div>
<AudioElement
:ref="noise.title"
:key="noise.id"
:src="noise.src"
:title="noise.title"
@update:volume="changeNoiseVolume"
>
<template #default="{}" />
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
<div class="row">
<div class="col-auto">
<button @click="skip">
Skip
</button>
</div>
<div class="col-auto">
<button @click="resumeContext">
Resume Context
</button>
</div>
<div class="col-auto">
<button @click="stopContext">
Suspend Context
</button>
</div>
</div>
</div>
</div>
</template>
<script>
import AudioElement from '@/components/experiments/AudioElement.vue'
import importedMusicPatcher from '@/assets/patch/music_patch.export.json'
import importedNoisePatcher from '@/assets/patch/noise_patch.export.json'
import { createRNBODevice } from '@/lib/AudioFunctions'
import KeyboardPlayHandler from '~/archive/components/KeyboardPlayHandler.vue'
// import setupNodes from '@/components/Player/Nodes'
import { useAudioStore } from '~/stores/audio'
export default {
components: {
AudioElement,
KeyboardPlayHandler
},
data () {
return {
audioList: [
{ id: 1, title: 'Lagoon', src: window.location.origin + '/sounds/lagoon.m4a' },
{ id: 2, title: 'Forest', src: window.location.origin + '/sounds/forest.m4a' },
{ id: 3, title: 'Meadow', src: window.location.origin + '/sounds/meadow.m4a' },
{ id: 4, title: 'Tropics', src: window.location.origin + '/sounds/tropics.m4a' }
],
noise: { id: 5, title: 'Noise', src: window.location.origin + useRuntimeConfig().public.tracks.masking_src_mp3 },
selectedAudioTitle: '',
currentElement: {},
createdNodes: {},
musicPatch: importedMusicPatcher,
noisePatch: importedNoisePatcher,
audioContext: null,
playState: false
}
},
computed: {
selectedAudio () {
return this.audioList.find(audio => audio.title === this.selectedAudioTitle) || null
},
checkContextCompatibility () {
// Assume createdNodes is an object where each property is a node
const nodes = Object.values(this.createdNodes)
const incompatible = nodes.filter(node => node.context !== this.audioContext)
if (nodes.length === nodes.length - incompatible.length) { return true }
// // useNuxtApp().$logger.log(incompatible.length + '/' + nodes.length + ' sind inkompatibel')
this.handleIncompatibeError(incompatible)
return false
},
isPlaying () {
return this.playState
}
},
beforeUnmount () {
if (this.currentElement) {
this.currentElement.pause()
this.currentElement.src = ''
this.currentElement.load()
this.currentElement = null
}
this.disconnectNodes()
if (this.audioContext) { this.audioContext.close() }
this.audioContext = null
},
mounted () {
this.audioContext = useAudioStore().getContext()
// Example: Select 'Song Three' by default
this.selectAudioByTitle(this.audioList[0].title)
this.addUserNavigationHandling()
this.musicPatch = importedMusicPatcher
this.noisePatch = importedNoisePatcher
},
methods: {
changeNoiseVolume (volume) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
this.noiseGain = this.createdNodes.noiseGain.gain.value
}
},
changeMusicVolume (volume) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
this.musicGain = this.createdNodes.musicGain.gain.value
}
},
addUserNavigationHandling () {
if ('mediaSession' in navigator) {
// Set the handler for the next track action
navigator.mediaSession.setActionHandler('nexttrack', () => {
this.skip('next')
})
// Set the handler for the previous track action
navigator.mediaSession.setActionHandler('previoustrack', () => {
this.skip('previous')
})
}
},
getSong (currentTitle, direction) {
const index = this.audioList.findIndex(song => song.title === currentTitle)
let adjacentIndex = index + (direction === 'next' ? 1 : -1)
// Loop back to the first song if 'next' goes beyond the last index
if (adjacentIndex >= this.audioList.length) {
adjacentIndex = 0
} else if (adjacentIndex < 0) {
adjacentIndex = this.audioList.length - 1
}
return this.audioList[adjacentIndex]
},
skip (direction) {
const nextSong = this.getSong(this.selectedAudioTitle, direction)
this.selectAudioByTitle(nextSong.title)
},
resumeContext () {
if (this.audioContext.state === 'suspended') { this.audioContext.resume() } else {
// useNuxtApp().$logger.log('already resumed?')
}
},
stopContext () {
if (this.audioContext.state === 'running') { this.audioContext.suspend() } else {
// useNuxtApp().$logger.log('already suspended')
}
},
updateCurrentElement (title) {
this.currentElement = this.audioList.find(e => e.title === title)
},
selectAudioByTitle (title) {
this.selectedAudioTitle = title
// const audioElement = this.audioList.find(e => e.title === this.selectedAudioTitle)
this.currentElement = this.audioList[this.audioList.findIndex(e => e.title === title)]
// // useNuxtApp().$logger.log('currentElement: ', this.selectedAudioTitle)
},
areAllNodesAvailable () {
// List of required node keys as you've named them in this.createdNodes
const requiredNodes = [
'musicSource', 'musicSplitter', 'microphoneSource',
'musicDevice', 'outputSplitter', 'musicGain',
'noiseSource', 'noiseSplitter', 'noiseDevice',
'noiseGain', 'merger'
]
// Check if each required node exists and is not undefined in this.createdNodes
return requiredNodes.every(nodeKey => this.createdNodes[nodeKey] !== undefined)
},
handleIncompatibeError (incompatible) {
if (incompatible) {
const node = incompatible.pop()
if (node.context !== this.createdNodes.musicSplitter.context) {
const audioContext = this.createdNodes.musicSplitter.context
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
}
} else {
// useNuxtApp().$logger.log('no error to solve')
}
},
connectNodes () {
try {
// Destructure for easier access
const {
musicSource, musicSplitter, microphoneSource,
musicDevice, outputSplitter, musicGain,
noiseSource, noiseSplitter, noiseDevice,
noiseGain, merger
} = this.createdNodes
// Check context compatibility
if (!this.checkContextCompatibility) {
if (!this.checkContextCompatibility) { throw new Error('Incompatible audio context among nodes.') }
}
// Connect music source to splitter (Stereo to 2 Channels)
musicSource.connect(musicSplitter)
// Connect microphone to musicDevice input 0 (Mono)
microphoneSource.connect(musicDevice, 0, 0)
// Connect musicSplitter to musicDevice inputs 1 and 2 (Stereo)
musicSplitter.connect(musicDevice, 0, 1)
musicSplitter.connect(musicDevice, 1, 2)
// Connect musicDevice to outputSplitter (Stereo out)
musicDevice.connect(outputSplitter)
// Optionally connect musicDevice to musicGain for additional gain control (Stereo)
musicDevice.connect(musicGain)
// Connect noise source to noiseSplitter (Stereo to 2 Channels)
noiseSource.connect(noiseSplitter)
// Connect microphone to noiseDevice input 0 (Mono)
microphoneSource.connect(noiseDevice, 0, 0)
// Connect noiseSplitter to noiseDevice inputs 1 and 2 (Stereo)
noiseSplitter.connect(noiseDevice, 0, 1)
noiseSplitter.connect(noiseDevice, 1, 2)
// Connect outputSplitter to noiseDevice inputs 3 and 4 (Stereo from musicDevice)
outputSplitter.connect(noiseDevice, 0, 3)
outputSplitter.connect(noiseDevice, 1, 4)
// Assuming noiseDevice outputs stereo, connect to noiseGain (Stereo)
noiseDevice.connect(noiseGain)
// Assuming you want to merge and output both processed signals (Stereo from both musicGain and noiseGain)
// Connect noiseGain to the first two inputs of the merger (Stereo)
noiseGain.connect(merger, 0, 0)
noiseGain.connect(merger, 0, 1)
// Connect musicGain to the last two inputs of the merger (Stereo)
musicGain.connect(merger, 0, 2)
musicGain.connect(merger, 0, 3)
// Finally, connect the merger to the audio context's destination (Stereo to output)
merger.connect(merger.context.destination)
} catch (e) {
// useNuxtApp().$logger.error('Failed to connect nodes: ', e.message)
// Additional error handling can be implemented here if necessary
}
},
handlePlayingUpdate (isPlaying) {
const audioContext = this.audioContext ? this.audioContext : useAudioStore().getContext()
let microphoneSource = null
// // useNuxtApp().$logger.log('Handling playing update = ' + isPlaying) // true when playing
if (isPlaying) {
// Web Audio API is available.
// const musicAudioComponent = this.$refs[this.currentElement.title].audioElement
if (this.areAllNodesAvailable()) {
this.disconnectNodes()
const musicAudioElement = this.$refs[this.currentElement.title].$refs.audioElement
const musicGainValue = this.createdNodes.musicGain.gain.value
const noiseGainValue = this.createdNodes.noiseGain.gain.value
// Prepare the audio elements (unmute)
this.$refs[this.currentElement.title].$refs.audioElement.muted = false
this.$refs.Noise.$refs.audioElement.muted = false
// replace music node because all Nodes are there
this.createdNodes.musicSource = null
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.connectNodes()
if (musicGainValue > 0 && musicGainValue <= 1.0) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue || 1.0, audioContext.currentTime + 2)
}
if (noiseGainValue > 0 && noiseGainValue <= 1.0) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue || 1.0, audioContext.currentTime + 3)
}
this.audioContext.resume()
} else {
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
},
video: false
})
.then((micStream) => {
microphoneSource = this.audioContext.createMediaStreamSource(micStream)
this.createdNodes.microphoneSource ||= microphoneSource
// Return both audioContext and microphoneSource to the next link in the chain
return { audioContext, microphoneSource }
})
.then(({ audioContext, microphoneSource }) => {
// First RNBO device creation
return createRNBODevice(audioContext, this.noisePatch).then((noiseRNBODevice) => {
this.createdNodes.noiseRNBODevice ||= noiseRNBODevice
// Return al.then(() => {hen(() => necessary objects for the next step
return { audioContext, microphoneSource, noiseRNBODevice }
})
})
.then(({ audioContext, microphoneSource, noiseRNBODevice }) => {
// Second RNBO device creation
return createRNBODevice(audioContext, this.musicPatch).then((musicRNBODevice) => {
// Return all necessary objects for the final setup
this.createdNodes.musicRNBODevice ||= musicRNBODevice
return { audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }
})
}).then(({ audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }) => {
const musicElement = this.$refs[this.currentElement.title]
const musicAudioElement = musicElement.$refs.audioElement
// Ensure this.createdNodes is initialized
this.createdNodes ||= {}
// Assign nodes if they don't exist
this.createdNodes.microphoneSource ||= microphoneSource
this.createdNodes.musicDevice ||= musicRNBODevice.node
this.createdNodes.noiseDevice ||= noiseRNBODevice.node
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(this.$refs.Noise.$refs.audioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.outputSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.merger ||= audioContext.createChannelMerger(4)
musicAudioElement.muted = false
this.$refs.Noise.$refs.audioElement.muted = false
this.createdNodes.musicGain.gain.value = 0.0001
this.createdNodes.noiseGain.gain.value = 0.0001 // macht nichts
this.connectNodes()
setTimeout(() => {
this.playState = true
this.createdNodes.musicGain.gain.exponentialRampToValueAtTime(1, audioContext.currentTime + 3)
this.createdNodes.noiseGain.gain.exponentialRampToValueAtTime(1, audioContext.currentTime + 4)
if (audioContext.state === 'suspended') { this.resumeContext() }
}, 150)
})
.catch((_error) => {
this.disconnectNodes()
if (this.audioContext) {
this.audioContext.destination.disconnect()
// audioContext.destination = null
}
this.$toast.error('Oouh sorry! Error while setting up audio, please reload.')
})
}
} else {
// this.audioContext.destination.disconnect()
this.audioContext.suspend()
// this.disconnectNodes()
// const newAudioContext = useAudioStore().getContext()
// this.audioContext.close()
// this.audioContext = newAudioContext
}
},
disconnectNodes () {
// Ensure this.createdNodes is defined and is an object
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof node.disconnect === 'function') {
node.disconnect()
}
// Additional handling for specific types of nodes, if necessary
// For example, stopping an OscillatorNode
if (node instanceof OscillatorNode) {
node.stop()
}
})
this.playState = false
}
}
}
}
</script>
<style scoped>
.rnboplayer {
position: fixed;
width: 220px; /* Or specify a fixed width like 220px if you prefer */
max-width: 220px; /* This line might be redundant depending on your width strategy */
height: 100px;
display: inline-grid;
z-index: 2;
bottom: 11%;
left: 0;
right: 0;
margin-left: auto;
margin-right: auto;
}
</style>

View File

@@ -0,0 +1,395 @@
<template>
<div>
<KeyboardPlayHandler />
<h2>You will hear the arifacts right after the music begins</h2>
<h2>
These file contain both patches, use Gainsliders of AudioElement to control volume and allow to skip. AudioStore used for shared AudioContext.
Music is connected directlly to the noise patch,
way less glitches
</h2>
<div class="rnboplayer">
<div v-if="selectedAudioTitle">
{{ selectedAudio.title }}
<AudioElement
:ref="selectedAudio.title"
:key="selectedAudio.id"
:src="selectedAudio.src"
:title="selectedAudio.title"
@update:playing="handlePlayingUpdate"
@update:volume="changeMusicVolume"
>
<template #default="{}" />
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</div>
<AudioElement
:ref="noise.title"
:key="noise.id"
:src="noise.src"
:title="noise.title"
@update:volume="changeNoiseVolume"
>
<template #default="{}" />
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
<div class="row">
<div class="col-auto">
<button @click="skip">
Skip
</button>
</div>
<div class="col-auto">
<button @click="resumeContext">
Resume Context
</button>
</div>
<div class="col-auto">
<button @click="stopContext">
Suspend Context
</button>
</div>
</div>
</div>
</div>
</template>
<script>
import AudioElement from '@/components/experiments/AudioElement.vue'
import importedMusicPatcher from '@/assets/patch/music_patch.export.json'
import importedNoisePatcher from '@/assets/patch/noise_patch.export.json'
import { createRNBODevice } from '@/lib/AudioFunctions'
import KeyboardPlayHandler from '~/archive/components/KeyboardPlayHandler.vue'
// import setupNodes from '@/components/Player/Nodes'
import { useAudioStore } from '~/stores/audio'
export default {
components: {
AudioElement,
KeyboardPlayHandler
},
data () {
return {
audioList: [
{ id: 1, title: 'Lagoon', src: window.location.origin + '/sounds/lagoon.m4a' },
{ id: 2, title: 'Forest', src: window.location.origin + '/sounds/forest.m4a' },
{ id: 3, title: 'Meadow', src: window.location.origin + '/sounds/meadow.m4a' },
{ id: 4, title: 'Tropics', src: window.location.origin + '/sounds/tropics.m4a' }
],
noise: { id: 5, title: 'Noise', src: window.location.origin + '/masking/noise.flac' },
selectedAudioTitle: '',
currentElement: {},
createdNodes: {},
musicPatch: importedMusicPatcher,
noisePatch: importedNoisePatcher,
audioContext: null,
playState: false
}
},
computed: {
selectedAudio () {
return this.audioList.find(audio => audio.title === this.selectedAudioTitle) || null
},
checkContextCompatibility () {
// Assume createdNodes is an object where each property is a node
const nodes = Object.values(this.createdNodes)
const incompatible = nodes.filter(node => node.context !== this.audioContext)
if (nodes.length === nodes.length - incompatible.length) { return true }
// // useNuxtApp().$logger.log(incompatible.length + '/' + nodes.length + ' sind inkompatibel')
this.handleIncompatibeError(incompatible)
return false
},
isPlaying () {
return this.playState
}
},
beforeUnmount () {
this.disconnectNodes()
if (this.audioContext) { this.audioContext.close() }
this.audioContext = null
},
mounted () {
this.audioContext = useAudioStore().getContext()
// Example: Select 'Song Three' by default
this.selectAudioByTitle(this.audioList[0].title)
this.addUserNavigationHandling()
this.musicPatch = importedMusicPatcher
this.noisePatch = importedNoisePatcher
},
methods: {
changeNoiseVolume (volume) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
this.noiseGain = this.createdNodes.noiseGain.gain.value
}
},
changeMusicVolume (volume) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
this.musicGain = this.createdNodes.musicGain.gain.value
}
},
addUserNavigationHandling () {
if ('mediaSession' in navigator) {
// Set the handler for the next track action
navigator.mediaSession.setActionHandler('nexttrack', () => {
this.skip('next')
})
// Set the handler for the previous track action
navigator.mediaSession.setActionHandler('previoustrack', () => {
this.skip('previous')
})
}
},
getSong (currentTitle, direction) {
const index = this.audioList.findIndex(song => song.title === currentTitle)
let adjacentIndex = index + (direction === 'next' ? 1 : -1)
// Loop back to the first song if 'next' goes beyond the last index
if (adjacentIndex >= this.audioList.length) {
adjacentIndex = 0
} else if (adjacentIndex < 0) {
adjacentIndex = this.audioList.length - 1
}
return this.audioList[adjacentIndex]
},
skip (direction) {
const nextSong = this.getSong(this.selectedAudioTitle, direction)
this.selectAudioByTitle(nextSong.title)
},
resumeContext () {
if (this.audioContext.state === 'suspended') { this.audioContext.resume() } else {
// useNuxtApp().$logger.log('already resumed?')
}
},
stopContext () {
if (this.audioContext.state === 'running') { this.audioContext.suspend() } else {
// useNuxtApp().$logger.log('already suspended')
}
},
updateCurrentElement (title) {
this.currentElement = this.audioList.find(e => e.title === title)
},
selectAudioByTitle (title) {
this.selectedAudioTitle = title
// const audioElement = this.audioList.find(e => e.title === this.selectedAudioTitle)
this.currentElement = this.audioList[this.audioList.findIndex(e => e.title === title)]
// // useNuxtApp().$logger.log('currentElement: ', this.selectedAudioTitle)
},
areAllNodesAvailable () {
// List of required node keys as you've named them in this.createdNodes
const requiredNodes = [
'musicSource', 'musicSplitter', 'microphoneSource',
'musicDevice', 'outputSplitter', 'musicGain',
'noiseSource', 'noiseSplitter', 'noiseDevice',
'noiseGain', 'merger'
]
// Check if each required node exists and is not undefined in this.createdNodes
return requiredNodes.every(nodeKey => this.createdNodes[nodeKey] !== undefined)
},
handleIncompatibeError (incompatible) {
if (incompatible) {
const node = incompatible.pop()
if (node.context !== this.createdNodes.musicSplitter.context) {
const audioContext = this.createdNodes.musicSplitter.context
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
}
} else {
// useNuxtApp().$logger.log('no error to solve')
}
},
connectNodes () {
try {
// Destructure for easier access
const {
musicSource, microphoneSource,
musicGain,
noiseSource, noiseSplitter, noiseDevice,
noiseGain, merger
} = this.createdNodes
// Check context compatibility
if (!this.checkContextCompatibility) {
if (!this.checkContextCompatibility) { throw new Error('Incompatible audio context among nodes.') }
}
// Connect music source to splitter (Stereo to 2 Channels)
musicSource.connect(musicGain)
// Connect microphone to musicDevice input 0 (Mono)
// Connect musicDevice to outputSplitter (Stereo out)
// musicDevice.connect(outputSplitter)
// Optionally connect musicDevice to musicGain for additional gain control (Stereo)
// musicDevice.connect(musicGain)
// Connect noise source to noiseSplitter (Stereo to 2 Channels)
noiseSource.connect(noiseSplitter)
// Connect microphone to noiseDevice input 0 (Mono)
microphoneSource.connect(noiseDevice, 0, 0)
// Connect noiseSplitter to noiseDevice inputs 1 and 2 (Stereo)
noiseSplitter.connect(noiseDevice, 0, 1)
noiseSplitter.connect(noiseDevice, 1, 2)
// Connect outputSplitter to noiseDevice inputs 3 and 4 (Stereo from musicDevice)
// const rndSrc = noiseDevice.context.createConstantSource()
// const rndSrc2 = noiseDevice.context.createConstantSource()
// rndSrc.connect(noiseDevice, 0, 3)
// rndSrc2.connect(noiseDevice, 0, 4)
// Assuming noiseDevice outputs stereo, connect to noiseGain (Stereo)
noiseDevice.connect(noiseGain)
// Assuming you want to merge and output both processed signals (Stereo from both musicGain and noiseGain)
// Connect noiseGain to the first two inputs of the merger (Stereo)
noiseGain.connect(merger, 0, 0)
noiseGain.connect(merger, 0, 1)
// Connect musicGain to the last two inputs of the merger (Stereo)
musicGain.connect(merger, 0, 2)
musicGain.connect(merger, 0, 3)
// Finally, connect the merger to the audio context's destination (Stereo to output)
merger.connect(merger.context.destination)
} catch (e) {
// useNuxtApp().$logger.error('Failed to connect nodes: ', e.message)
// Additional error handling can be implemented here if necessary
}
},
handlePlayingUpdate (isPlaying) {
const audioContext = this.audioContext ? this.audioContext : useAudioStore().getContext()
let microphoneSource = null
// // useNuxtApp().$logger.log('Handling playing update = ' + isPlaying) // true when playing
if (isPlaying) {
// Web Audio API is available.
// const musicAudioComponent = this.$refs[this.currentElement.title].audioElement
if (this.areAllNodesAvailable()) {
this.disconnectNodes()
const musicAudioElement = this.$refs[this.currentElement.title].$refs.audioElement
const musicGainValue = this.createdNodes.musicGain.gain.value
const noiseGainValue = this.createdNodes.noiseGain.gain.value
// Prepare the audio elements (unmute)
this.$refs[this.currentElement.title].$refs.audioElement.muted = false
this.$refs.Noise.$refs.audioElement.muted = false
// replace music node because all Nodes are there
this.createdNodes.musicSource = null
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.connectNodes()
if (musicGainValue > 0 && musicGainValue <= 1.0) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue || 1.0, audioContext.currentTime + 2)
}
if (noiseGainValue > 0 && noiseGainValue <= 1.0) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue || 1.0, audioContext.currentTime + 3)
}
this.audioContext.resume()
} else {
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
},
video: false
})
.then((micStream) => {
microphoneSource = this.audioContext.createMediaStreamSource(micStream)
this.createdNodes.microphoneSource ||= microphoneSource
// Return both audioContext and microphoneSource to the next link in the chain
return { audioContext, microphoneSource }
})
.then(({ audioContext, microphoneSource }) => {
// First RNBO device creation
return createRNBODevice(audioContext, this.noisePatch).then((noiseRNBODevice) => {
this.createdNodes.noiseRNBODevice ||= noiseRNBODevice
// Return al.then(() => {hen(() => necessary objects for the next step
return { audioContext, microphoneSource, noiseRNBODevice }
})
})
.then(({ audioContext, microphoneSource, noiseRNBODevice }) => {
// Second RNBO device creation
return createRNBODevice(audioContext, this.musicPatch).then((musicRNBODevice) => {
// Return all necessary objects for the final setup
this.createdNodes.musicRNBODevice ||= musicRNBODevice
return { audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }
})
}).then(({ audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }) => {
const musicElement = this.$refs[this.currentElement.title]
const musicAudioElement = musicElement.$refs.audioElement
// Ensure this.createdNodes is initialized
this.createdNodes ||= {}
// Assign nodes if they don't exist
this.createdNodes.microphoneSource ||= microphoneSource
this.createdNodes.musicDevice ||= musicRNBODevice.node
this.createdNodes.noiseDevice ||= noiseRNBODevice.node
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(this.$refs.Noise.$refs.audioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.outputSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.merger ||= audioContext.createChannelMerger(4)
musicAudioElement.muted = false
this.$refs.Noise.$refs.audioElement.muted = false
this.createdNodes.musicGain.gain.value = 0.0001
this.createdNodes.noiseGain.gain.value = 0.0001 // macht nichts
this.connectNodes()
setTimeout(() => {
this.playState = true
this.createdNodes.musicGain.gain.exponentialRampToValueAtTime(1, audioContext.currentTime + 3)
this.createdNodes.noiseGain.gain.exponentialRampToValueAtTime(1, audioContext.currentTime + 4)
if (audioContext.state === 'suspended') { this.resumeContext() }
}, 1000)
})
.catch((_error) => {
this.disconnectNodes()
if (this.audioContext) {
this.audioContext.destination.disconnect()
// audioContext.destination = null
}
this.$toast.error('Oouh sorry! Error while setting up audio, please reload.')
})
}
} else {
// this.audioContext.destination.disconnect()
this.audioContext.suspend()
// this.disconnectNodes()
// const newAudioContext = useAudioStore().getContext()
// this.audioContext.close()
// this.audioContext = newAudioContext
}
},
disconnectNodes () {
// Ensure this.createdNodes is defined and is an object
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof node.disconnect === 'function') {
node.disconnect()
}
// Additional handling for specific types of nodes, if necessary
// For example, stopping an OscillatorNode
if (node instanceof OscillatorNode) {
node.stop()
}
})
this.playState = false
}
}
}
}
</script>
<style scoped>
.rnboplayer {
position: fixed;
width: 220px; /* Or specify a fixed width like 220px if you prefer */
max-width: 220px; /* This line might be redundant depending on your width strategy */
height: 100px;
display: inline-grid;
z-index: 2;
bottom: 11%;
left: 0;
right: 0;
margin-left: auto;
margin-right: auto;
}
</style>

View File

@@ -0,0 +1,455 @@
<template>
<div class="rnboplayer">
<label class="switch">
<input type="checkbox" :v-model="deviceUsed">
<span class="slider" />
</label>
<div class="container-fluid">
<div class="row justify-content-center">
<div class="adaptive pb-3">
<div class="col-12 ">
<div class="d-none d-md-block mx-auto pb-1" style="width: 225px">
<div class="progress " style="height: 10px">
<div
class="progress-bar bar"
role="progressbar"
aria-label="Basic example"
:style="{width:bar_width+'%'}"
style="background-color: #e9c046;transition: 0.1s"
aria-valuenow="75"
aria-valuemin="0"
aria-valuemax="100"
/>
</div>
</div>
<div class="d-flex justify-content-center mb-1">
<nuxt-link to="#adaptive-modal" data-bs-target="#adaptive-modal" data-bs-toggle="modal" class="text-muted text-decoration-none fw-bold fs-6 ">
{{ t('Adaptive soundscape') }} : <span class="" style="color: #e9c046">{{ t('On') }}</span>
</nuxt-link><span class="ps-3"><i style="padding: 5px 0px;" class="fa-solid text-muted d-flex fa-chevron-right" /></span>
</div>
<div class="d-block d-md-none mx-auto pb-1" style="width: 225px">
<div class="progress " style="height: 10px">
<div
class="progress-bar bar"
role="progressbar"
aria-label="Basic example"
:style="{width:bar_width+'%'}"
style="background-color: #e9c046;transition: 0.1s"
aria-valuenow="75"
aria-valuemin="0"
aria-valuemax="100"
/>
</div>
</div>
</div>
<BootomBar />
</div>
</div>
</div>
<div v-if="selectedAudioTitle">
{{ selectedAudio.title }}
<AudioElement
:ref="selectedAudio.title"
:key="selectedAudio.id"
:src="selectedAudio.src"
:title="selectedAudio.title"
@update:playing="handlePlayingUpdate"
@volume="changeMusicVolume"
>
<template #default="{ }" />
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</div>
<AudioElement
:ref="noise.title"
:key="noise.id"
:src="noise.src"
:title="noise.title"
@update:playing="handlePlayingUpdate"
@volume="changeNoiseVolume"
>
<template #default="{}" />
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
<button @click="skip">
Skip
</button>
</div>
</template>
<script>
import AudioElement from '@/components/experiments/AudioElement.vue'
import importedMusicPatcher from '@/assets/patch/music_patch.export.json'
import importedNoisePatcher from '@/assets/patch/noise_patch.export.json'
import { createRNBODevice } from '@/lib/AudioFunctions'
// import setupNodes from '@/components/Player/Nodes'
// import { useAudioStore } from '~/stores/audio'
export default {
components: {
AudioElement
},
data () {
return {
t: useI18n,
audioList: [
{ id: 1, title: 'Lagoon', src: window.location.origin + '/sounds/lagoon.m4a' },
{ id: 2, title: 'Forest', src: window.location.origin + '/sounds/forest.m4a' },
{ id: 3, title: 'Meadow', src: window.location.origin + '/sounds/meadow.m4a' },
{ id: 4, title: 'Tropics', src: window.location.origin + '/sounds/tropics.m4a' }
],
noise: { id: 5, title: 'Noise', src: window.location.origin + '/masking/noise.flac' },
selectedAudioTitle: '',
currentElement: {},
createdNodes: {},
musicPatch: importedMusicPatcher,
noisePatch: importedNoisePatcher,
audioContext: null,
isPlayerRunning: false,
checked: false,
// meter
meter: null,
bar_width: 0,
analyser: null,
dataArray: null,
bar_val: 25
}
},
computed: {
selectedAudio () {
return this.audioList.find(audio => audio.title === this.selectedAudioTitle) || null
},
deviceUsed () {
return this.isPlayerRunning
}
},
beforeUnmount () {
this.disconnectNodes()
if (this.audioContext) { this.audioContext.close() }
this.audioContext = null
},
mounted () {
this.audioContext = new AudioContext()
this.currentElement = this.audioList[0]
// Example: Select 'Song Three' by default
this.selectAudioByTitle(this.audioList[0].title)
this.addUserNavigationHandling()
this.musicPatch = importedMusicPatcher
this.noisePatch = importedNoisePatcher
},
methods: {
changeMusicVolume (newValue) {
if (!this.createdNodes.musicGain.gain) { return }
// useNuxtApp().$logger.log(this.createdNodes.musicGain.gain)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.createdNodes.musicGain.context.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(newValue / 100, this.createdNodes.musicGain.context.currentTime + 0.01)
},
changeNoiseVolume (newValue) {
if (!this.createdNodes.noiseGain.gain) { return }
this.createdNodes.noiseGain.gain.setValueAtTime(newValue / 100, this.createdNodes.noiseGain.context.currentTime + 0.01)
// this.createdNodes.noiseGain.gain.value = newValue / 100
},
addUserNavigationHandling () {
if ('mediaSession' in navigator) {
// Set the handler for the next track action
navigator.mediaSession.setActionHandler('nexttrack', () => {
this.skip('next')
})
// Set the handler for the previous track action
navigator.mediaSession.setActionHandler('previoustrack', () => {
this.skip('previous')
})
}
},
getSong (currentTitle, direction) {
const index = this.audioList.findIndex(song => song.title === currentTitle)
let adjacentIndex = index + (direction === 'next' ? 1 : -1)
// Loop back to the first song if 'next' goes beyond the last index
if (adjacentIndex >= this.audioList.length) {
adjacentIndex = 0
} else if (adjacentIndex < 0) {
adjacentIndex = this.audioList.length - 1
}
return this.audioList[adjacentIndex]
},
skip (direction) {
const nextSong = this.getSong(this.selectedAudioTitle, direction)
this.selectAudioByTitle(nextSong.title)
},
updateCurrentElement (title) {
this.currentElement = this.audioList.find(e => e.title === title)
},
selectAudioByTitle (title) {
this.selectedAudioTitle = title
// const audioElement = this.audioList.find(e => e.title === this.selectedAudioTitle)
this.currentElement = this.audioList[this.audioList.findIndex(e => e.title === title)]
// useNuxtApp().$logger.log('currentElement: ', this.selectedAudioTitle)
},
areAllNodesAvailable () {
// List of required node keys as you've named them in this.createdNodes
const requiredNodes = [
'musicSource', 'musicSplitter', 'microphoneSource',
'musicDevice', 'outputSplitter', 'musicGain',
'noiseSource', 'noiseSplitter', 'noiseDevice',
'noiseGain', 'merger'
]
// Check if each required node exists and is not undefined in this.createdNodes
return requiredNodes.every(nodeKey => this.createdNodes[nodeKey] !== undefined)
},
updateMeter () {
requestAnimationFrame(this.updateMeter)
this.analyser.getByteFrequencyData(this.dataArray)
const rms = this.getRMS(this.dataArray)
let level = 20 * Math.log10(rms / 128)
level = Math.max(0, Math.min(100, level + 100))
// bar.style.width = level + '%';
this.bar_width = level
},
getRMS (dataArray) {
let rms = 0
for (let i = 0; i < dataArray.length; i++) {
rms += dataArray[i] * dataArray[i]
}
rms /= dataArray.length
rms = Math.sqrt(rms)
return rms
},
connectNodes () {
// Destructure for easier access
const audioCtx = this.createdNodes.microphoneSource.context
this.analyser = audioCtx.createAnalyser()
this.createdNodes.microphoneSource.connect(this.analyser)
this.analyser.fftSize = 2048
const bufferLength = this.analyser.frequencyBinCount
this.dataArray = new Uint8Array(bufferLength)
this.updateMeter()
const {
musicSource, musicSplitter, microphoneSource,
musicDevice, outputSplitter, musicGain,
noiseSource, noiseSplitter, noiseDevice,
noiseGain, merger
} = this.createdNodes
// Assuming all nodes are created and references to them are correct
// Connect music source to splitter (Stereo to 2 Channels)
musicSource.connect(musicSplitter) // 2 channels: L, R
// Connect microphone to musicDevice input 0 (Mono)
microphoneSource.connect(musicDevice, 0, 0) // 1 channel: Microphone
// Connect musicSplitter to musicDevice inputs 1 and 2 (Stereo)
musicSplitter.connect(musicDevice, 0, 1) // 1 channel: Left
musicSplitter.connect(musicDevice, 1, 2) // 1 channel: Right
// Connect musicDevice to outputSplitter (Stereo out)
musicDevice.connect(outputSplitter) // Assuming musicDevice outputs stereo
// Optionally connect musicDevice to musicGain for additional gain control (Stereo)
musicDevice.connect(musicGain) // Assuming musicDevice outputs stereo, connected to both channels of musicGain
// Connect noise source to noiseSplitter (Stereo to 2 Channels)
noiseSource.connect(noiseSplitter) // 2 channels: L, R
// Connect microphone to noiseDevice input 0 (Mono)
microphoneSource.connect(noiseDevice, 0, 0) // 1 channel: Microphone
// Connect noiseSplitter to noiseDevice inputs 1 and 2 (Stereo)
noiseSplitter.connect(noiseDevice, 0, 1) // 1 channel: Left
noiseSplitter.connect(noiseDevice, 1, 2) // 1 channel: Right
// Connect outputSplitter to noiseDevice inputs 3 and 4 (Stereo from musicDevice)
outputSplitter.connect(noiseDevice, 0, 3) // 1 channel: Left from musicDevice
outputSplitter.connect(noiseDevice, 1, 4) // 1 channel: Right from musicDevice
// Assuming noiseDevice outputs stereo, connect to noiseGain (Stereo)
noiseDevice.connect(noiseGain) // Assuming noiseDevice outputs stereo, connected to both channels of noiseGain
// Assuming you want to merge and output both processed signals (Stereo from both musicGain and noiseGain)
// Connect noiseGain to the first two inputs of the merger (Stereo)
noiseGain.connect(merger, 0, 0) // 1 channel: Left
noiseGain.connect(merger, 0, 1) // 1 channel: Right
// Connect musicGain to the last two inputs of the merger (Stereo)
musicGain.connect(merger, 0, 2) // 1 channel: Left
musicGain.connect(merger, 0, 3) // 1 channel: Right
// Finally, connect the merger to the audio context's destination (Stereo to output)
merger.connect(merger.context.destination)
},
handlePlayingUpdate (isPlaying) {
const ContextClass = (window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.oAudioContext || window.msAudioContext)
let audioContext = this.audioContext ? this.audioContext : new ContextClass()
let microphoneSource = null
// useNuxtApp().$logger.log('Handling playing update = ' + isPlaying) // true when playing
if (isPlaying) {
// Web Audio API is available.
// const musicAudioComponent = this.$refs[this.currentElement.title].audioElement
if (this.areAllNodesAvailable()) {
this.disconnectNodes()
// reconnecting because all Nodes are there
this.connectNodes()
this.createdNodes.noiseGain.gain.exponentialRampToValueAtTime(1.0, audioContext.currentTime + 2)
this.createdNodes.noiseGain.gain.exponentialRampToValueAtTime(1.0, audioContext.currentTime + 3)
// useNuxtApp().$logger.log('Connected everything because it was there already')
} else {
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
},
video: false
})
.then((micStream) => {
audioContext = new ContextClass()
microphoneSource = audioContext.createMediaStreamSource(micStream)
this.createdNodes.microphoneSource ||= microphoneSource
// Return both audioContext and microphoneSource to the next link in the chain
// useNuxtApp().$logger.log('Step 1 Micstream created')
return { audioContext, microphoneSource }
})
.then(({ audioContext, microphoneSource }) => {
// First RNBO device creation
return createRNBODevice(audioContext, this.noisePatch).then((noiseRNBODevice) => {
this.createdNodes.noiseRNBODevice ||= noiseRNBODevice
// Return al.then(() => {hen(() => necessary objects for the next step
// useNuxtApp().$logger.log('Step 2 AudioContext, NoiseDevice und MicSource created')
return { audioContext, microphoneSource, noiseRNBODevice }
})
})
.then(({ audioContext, microphoneSource, noiseRNBODevice }) => {
// Second RNBO device creation
return createRNBODevice(audioContext, this.musicPatch).then((musicRNBODevice) => {
// Return all necessary objects for the final setup
this.createdNodes.musicRNBODevice ||= musicRNBODevice
// useNuxtApp().$logger.log('Step 3 AudioContext, NoiseDevice, musicDevice und MicSource created')
return { audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }
})
}).then(({ audioContext, microphoneSource, noiseRNBODevice, musicRNBODevice }) => {
// Ensure this.createdNodes is initialized
this.createdNodes ||= {}
// Assign nodes if they don't exist
this.createdNodes.microphoneSource ||= microphoneSource
this.createdNodes.musicDevice ||= musicRNBODevice.node
this.createdNodes.noiseDevice ||= noiseRNBODevice.node
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(this.$refs.Noise.$refs.audioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(this.$refs[this.currentElement.title].$refs.audioElement)
this.createdNodes.musicSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.outputSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.merger ||= audioContext.createChannelMerger(4)
this.createdNodes.musicGain.gain.value = 0.001
this.createdNodes.noiseGain.gain.value = 0.001 // macht nichts
this.connectNodes()
this.createdNodes.musicGain.gain.exponentialRampToValueAtTime(0.5, audioContext.currentTime + 3)
this.createdNodes.noiseGain.gain.exponentialRampToValueAtTime(0.5, audioContext.currentTime + 4)
})
.catch((_error) => {
// this.disconnectNodes()
if (this.audioContext) {
this.audioContext.destination.disconnect()
// audioContext.destination = null
}
})
}
} else {
this.disconnectNodes()
}
},
disconnectNodes () {
// Ensure this.createdNodes is defined and is an object
if (this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof node.disconnect === 'function') {
node.disconnect()
}
// Additional handling for specific types of nodes, if necessary
// For example, stopping an OscillatorNode
if (node instanceof OscillatorNode) {
node.stop()
}
})
}
}
}
}
</script>
<style scoped>
.rnboplayer {
position: fixed;
width: 220px; /* Or specify a fixed width like 220px if you prefer */
max-width: 220px; /* This line might be redundant depending on your width strategy */
height: 100px;
display: inline-grid;
z-index: 2;
bottom: 11%;
left: 0;
right: 0;
margin-left: auto;
margin-right: auto;
}
/* The switch - the box around the slider */
.switch {
position: relative;
display: inline-block;
width: 60px;
height: 34px;
}
/* Hide default HTML checkbox */
.switch input {
opacity: 0;
width: 0;
height: 0;
}
/* The slider */
.slider {
position: absolute;
cursor: pointer;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-color: #ccc;
-webkit-transition: .4s;
transition: .4s;
}
.slider:before {
position: absolute;
content: "";
height: 26px;
width: 26px;
left: 4px;
bottom: 4px;
background-color: white;
-webkit-transition: .4s;
transition: .4s;
}
input:checked + .slider {
background-color: #2196F3;
}
input:focus + .slider {
box-shadow: 0 0 1px #2196F3;
}
input:checked + .slider:before {
-webkit-transform: translateX(26px);
-ms-transform: translateX(26px);
transform: translateX(26px);
}
</style>