Initial commit

This commit is contained in:
Mindboost
2025-07-01 10:53:26 +00:00
commit 38050e5c69
416 changed files with 48708 additions and 0 deletions

View File

@@ -0,0 +1,139 @@
<template>
<div class="player">
<NoiseControlledWebAudio3Band
v-for="(frequency, index) in frequencies"
ref="Player"
:key="frequency"
:master-attack="masterAttack"
:master-release="masterRelease"
:center-frequency="frequency"
:master-gain="masterGain"
:q-factor="qFactors[index]"
@ready="onBandReady"
@update:mid-volume="controlMusicGain"
/>
</div>
</template>
<script lang="ts">
import { useAudioStore } from '../../../stores/audio'
import { useMicStore } from '~/stores/microphone'
import type { Microphone } from '~/stores/interfaces/Microphone'
import NoiseControlledWebAudio3Band from '~/components/experiments/tests/ControlValues/NoiseControlledWebAudio3Band.vue'
export default {
name: 'AdaptiveNoiseGain',
components: {
NoiseControlledWebAudio3Band
},
emits: ['musicGain'],
setup () {
const masterGain = ref(useAudioStore().getMasterGainNoise())
const player = ref(null)
const { t } = useI18n()
const frequencies = ref([150, 1500, 8000])
const qFactors = ref([0.8, 0.9, 0.6])
const loadedBands = ref(0)
const muted = computed(() => useAudioStore().getNoiseVolume < 0.01)
let oldVolume = 0
const route = useRoute()
const isExperimentsRoute = computed(() => route.path.match(/\/[a-z]{2}\/experiments/))
const masterAttack = ref(120000 * 2) // Beispielwert in Samples
const masterRelease = ref(144000 * 2)
const loading = computed(() => loadedBands.value < frequencies.value.length)
const onBandReady = () => {
loadedBands.value++
}
const toggleMute = () => {
if (!muted.value) {
oldVolume = masterGain.value.gain.value
masterGain.value.gain.linearRampToValueAtTime(0, masterGain.value.context.currentTime + 0.4)
useAudioStore().setNoiseVolume(0)
} else if (oldVolume > 0) {
masterGain.value.gain.linearRampToValueAtTime(oldVolume, masterGain.value.context.currentTime + 0.4)
useAudioStore().setNoiseVolume(oldVolume)
} else {
masterGain.value.gain.linearRampToValueAtTime(1, masterGain.value.context.currentTime + 0.4)
useAudioStore().setNoiseVolume(1)
}
}
return {
frequencies,
loading,
onBandReady,
t,
loadedBands,
masterAttack,
masterRelease,
isExperimentsRoute,
qFactors,
masterGain,
toggleMute,
muted,
player
}
},
data () {
return {
audioContext: useAudioStore().getContext(),
musicReady: false,
tropics_src: window.location.origin + useRuntimeConfig().public.tracks.masking_src as string,
fading: false,
connected: false,
volume: useAudioStore().noiseVolume,
previousVolume: useAudioStore().noiseVolume
}
},
onMounted () {
},
watch: {
volume (newVolume: number) {
const audioStore = useAudioStore()
audioStore.setNoiseVolume(newVolume)
if (!isNaN(newVolume)) { audioStore.getMasterGainNoise().gain.linearRampToValueAtTime(newVolume, 0.125) }
const m = this.muted
}
},
beforeUnmount () {
const micro = useMicStore().getMicrophone() as Microphone
micro.microphoneStream?.getTracks().forEach(m => m.stop())
},
methods: {
changeVolumeOnWheel (event:WheelEvent) {
// Adjust volume on wheel scroll
const gainValue = this.volume
const deltaY = event.deltaY
if (deltaY < 0) {
const volumeAdd = (Math.min(1, gainValue + 0.02))
this.volume = volumeAdd
} else {
const volumeCut = (Math.max(0, gainValue - 0.02))
this.volume = volumeCut
}
},
controlMusicGain (value: string) {
useAudioStore().setVolume(parseFloat(value))
this.$emit('musicGain', value)
},
handleCanPlayNoise () {
// useNuxtApp().$logger.log('NoiseElemeint has now playingstate: ' + state)
this.musicReady = true
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
}
}
}
</script>

View File

@@ -0,0 +1,239 @@
<template>
<div class="player">
<AudioElement
ref="Music"
key="1"
v-model:volume="volume"
:src="src"
:title="title"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img
v-if="volume == 0"
class="slider-icon"
style="width: 25px; height: 25px;"
src="~/assets/image/music_muted.svg"
title="Click to unmute"
@click="toggleMute()"
>
<img
v-else
class="slider-icon"
style="width: 25px; height: 25px;"
src="~/assets/image/music.svg"
title="Click to mute"
@click="toggleMute()"
>
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'MusicGain',
components: { AudioElement },
props: {
src: {
type: String,
required: true
},
title: {
type: String,
required: true
}
},
emits: ['musicReady'],
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
fading: false,
connected: false,
muted: false,
volume: useAudioStore().getVolume,
previousVolume: useAudioStore().getVolume
}
},
watch: {
musicReady (value) {
this.$emit('musicReady', value)
this.handlePlayingUpdate(true)
}
},
mounted () {
this.applyStoredVolume()
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
this.createdNodes = null
}
},
toggleMute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
if (this.muted) {
// Unmute: Stelle den vorherigen Lautstärkewert wieder her
this.muted = false
audioElement.muted = false
this.volume = this.previousVolume || 1 // Falls kein vorheriger Wert gespeichert ist, setze auf 1
audioElement.volume = this.volume
} else {
// Mute: Speichere den aktuellen Lautstärkewert und mute das Audio
this.previousVolume = this.volume
this.volume = 0
audioElement.volume = 0
this.muted = true
audioElement.muted = true
}
useAudioStore().setVolume(this.volume)
element.$emit('update:volume', this.volume)
},
mute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
if (useAudioStore().playing !== true) {
logger.info('Skip interaction, because playing state is false.')
} else {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
audioElement.play()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
}
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic () {
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
} else if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
// useAudioStore().playing = true
} else {
// Music has just stopped react on it.
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
applyStoredVolume () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
// Setze die Lautstärke des Audio-Elements
audioElement.volume = this.volume
// Emitiere ein Event, um die Lautstärke in AudioElement zu aktualisieren
element.$emit('update:volume', this.volume)
},
updateMusicGain (volume: number) {
this.volume = volume // Lautstärke speichern
useAudioStore().setVolume(volume)
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 1)
}
}
}
}
</script>

View File

@@ -0,0 +1,180 @@
<template>
<div class="player">
<AudioElement
ref="Music"
key="1"
:src="forest_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img v-if="!muted" style="width: 25px; height: 25px;" src="~/assets/image/music.svg" title="Click to mute" @click="toggleMute()">
<img v-if="muted" style="width: 25px; height: 25px;" src="~/assets/image/music_muted.svg" title="Click to unmute" @click="toggleMute()">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'MusicGainForest',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src as string,
fading: false,
connected: false,
muted: false
}
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
toggleMute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = !audioElement.muted
this.muted = audioElement.muted
},
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
this.createdNodes = null
this.connected = false
}
},
mute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 0.4)
}
},
handleCanPlayMusic () {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
} else if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.forest_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,170 @@
<template>
<div class="player">
<AudioElement
ref="Music"
key="1"
:src="lagoon_src"
title="Lagoon"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img v-if="!muted" style="width: 25px; height: 25px;" src="~/assets/image/music.svg" title="Click to mute" @click="toggleMute()">
<img v-if="muted" style="width: 25px; height: 25px;" src="~/assets/image/music_muted.svg" title="Click to unmute" @click="toggleMute()">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'MusicGainLagoon',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
lagoon_src: window.location.origin + useRuntimeConfig().public.tracks.lagoon_src as string,
fading: false,
connected: false,
muted: false
}
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
this.createdNodes = null
}
},
toggleMute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = !audioElement.muted
this.muted = audioElement.muted
},
mute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic () {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
} else if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.lagoon_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,179 @@
<template>
<div class="player">
<AudioElement
ref="Music"
key="1"
:src="meadow_src"
title="Meadow"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img v-if="!muted" style="width: 25px; height: 25px;" src="~/assets/image/music.svg" title="Click to mute" @click="toggleMute()">
<img v-if="muted" style="width: 25px; height: 25px;" src="~/assets/image/music_muted.svg" title="Click to unmute" @click="toggleMute()">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'MusicGainMeadow',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
meadow_src: window.location.origin + useRuntimeConfig().public.tracks.meadow_src as string,
fading: false,
connected: false,
muted: false
}
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
this.createdNodes = null
}
},
toggleMute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = !audioElement.muted
this.muted = audioElement.muted
},
mute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic () {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
} else if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.meadow_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,178 @@
<template>
<div class="player">
<AudioElement
ref="Music"
key="1"
:src="tropics_src"
title="Tropic"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img v-if="!muted" style="width: 25px; height: 25px;" src="~/assets/image/music.svg" title="Click to mute" @click="toggleMute()">
<img v-if="muted" style="width: 25px; height: 25px;" src="~/assets/image/music_muted.svg" title="Click to unmute" @click="toggleMute()">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'MusicGainTropic',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
tropics_src: window.location.origin + useRuntimeConfig().public.tracks.tropics_src as string,
fading: false,
connected: false,
muted: false
}
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
}
this.createdNodes = null
},
toggleMute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = !audioElement.muted
this.muted = audioElement.muted
},
mute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic () {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
} else if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.tropics_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,192 @@
<template>
<div class="player">
<AudioElement
ref="Noise"
v-model:volume="volume"
:src="tropics_src"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayNoise"
/>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'NoiseGain',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
tropics_src: window.location.origin + useRuntimeConfig().public.tracks.masking_src as string,
fading: false,
connected: false,
muted: false,
volume: useAudioStore().noiseVolume,
previousVolume: useAudioStore().noiseVolume
}
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
}
this.createdNodes = null
},
toggleMute () {
const element = this.$refs.Noise as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
if (this.muted) {
// Unmute: Stelle den vorherigen Lautstärkewert wieder her
this.muted = false
audioElement.muted = false
this.volume = this.previousVolume || 1 // Falls kein vorheriger Wert gespeichert ist, setze auf 1
audioElement.volume = this.volume
} else {
// Mute: Speichere den aktuellen Lautstärkewert und mute das Audio
this.previousVolume = this.volume
this.volume = 0
audioElement.volume = 0
this.muted = true
audioElement.muted = true
}
useAudioStore().setNoiseVolume(this.volume)
element.$emit('update:volume', this.volume)
},
mute () {
const element = this.$refs.Noise as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Noise as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayNoise () {
// useNuxtApp().$logger.log('NoiseElemeint has now playingstate: ' + state)
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Noise as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.tropics_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
// useAudioStore().playing = true
} else {
// Noise has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
applyStoredVolume () {
const element = this.$refs.Noise as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
// Setze die Lautstärke des Audio-Elements
audioElement.volume = this.volume
// Emitiere ein Event, um die Lautstärke in AudioElement zu aktualisieren
element.$emit('update:volume', this.volume)
},
updateNoiseGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,239 @@
<template>
<div class="player">
<Microphone ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:canplay="handleCanPlayNoise"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="forest_src"
:playlist="music_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import Microphone from '../tests/Microphone.vue'
import { useDevicesStore } from '../../../stores/device'
export default {
name: 'NoiseMusicGain',
components: { AudioElement, Microphone },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
noiseReady: false,
musicReady: false,
micReady: false,
deviceReady: false,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src as string,
music_src: [window.location.origin + useRuntimeConfig().public.tracks.lagoon_src as string, window.location.origin + useRuntimeConfig().public.tracks.tropics_src as string, window.location.origin + useRuntimeConfig().public.tracks.forest_src as string, window.location.origin + useRuntimeConfig().public.tracks.meadow_src as string] as string[],
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string,
fading: false,
connected: false
}
},
methods: {
// This methodd gets a microphone stream from the micorphone component and creates the microphone node
// need to be called before the noise device is connected to the audio graph
setupMicrophone (stream:MediaStream) {
try {
this.createdNodes.microphone = this.audioContext.createMediaStreamSource(stream)
this.micReady = true
} catch (error: any) {
this.micReady = false
throw new Error(error.message)
}
},
async setupDevice () {
try {
const deviceStore = useDevicesStore()
await deviceStore.createFullBandDevice('adaptive_masking_controller_NoMusic')
this.createdNodes.noiseDevice = deviceStore.getDeviceAudioNode('adaptive_masking_controller_NoMusic')
this.deviceReady = true
} catch (error) {
this.deviceReady = false
}
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
const fadeTime = this.audioContext.currentTime + 6.0
this.fading = true
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
this.createdNodes.noiseSource.muted = false
this.createdNodes.musicSource.muted = false
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic () {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = true
},
handleCanPlayNoise () {
// useNuxtApp().$logger.log('NoiseElement has now playingstate: ' + state)
this.noiseReady = true
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
if (!this.noiseReady) {
// useNuxtApp().$logger.log('noise not ready')
return false
}
if (!this.micReady) {
// useNuxtApp().$logger.log('mic not ready')
return false
}
if (!this.deviceReady) {
// useNuxtApp().$logger.log('device not ready')
return false
}
return true
},
async handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
this.fadeInGains()
} else {
this.fadeOutGains()
}
} else {
if (!this.deviceReady) { await this.setupDevice() }
if (!this.micReady) {
// useNuxtApp().$logger.log('micophone not yet ready attach it!! ')
// useNuxtApp().$logger.log('microphone attached' + stream)
}
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
// useNuxtApp().$logger.log('Waiting for all devices to be ready')
}
}
},
handlePlayingUpdate (state: boolean) {
// Stop the music again, mute it and set the noiseReady or musicReady to true
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.forest_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// HERE THE NOISE PATCH COMES INTO PLAY
this.createdNodes.micSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseInputChannelSplitter ||= audioContext.createChannelSplitter(2)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.microphone.connect(this.createdNodes.micSplitter)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseInputChannelSplitter)
this.createdNodes.micSplitter.connect(this.createdNodes.noiseDevice, 0, 0)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 0, 1)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 1, 2)
this.createdNodes.noiseDevice.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,263 @@
<template>
<div class="player">
{{ controllValues }}
<Microphone ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:canplay="handleCanPlayNoise"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="lagoon_src"
title="Lagoon"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import Microphone from '../tests/Microphone.vue'
import { useDevicesStore } from '../../../stores/device'
export default {
name: 'NoiseMusicGain',
components: { AudioElement, Microphone },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
noiseReady: false,
musicReady: false,
micReady: false,
deviceReady: false,
lagoon_src: window.location.origin + useRuntimeConfig().public.tracks.lagoon_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string,
fading: false,
connected: false,
lastUpdate: Date.now(),
updateInterval: 125,
controllValues: new Map() // milliseconds
}
},
methods: {
// This methodd gets a microphone stream from the micorphone component and creates the microphone node
// need to be called before the noise device is connected to the audio graph
setupMicrophone (stream:MediaStream) {
try {
this.createdNodes.microphone = this.audioContext.createMediaStreamSource(stream)
this.micReady = true
} catch (error: any) {
this.micReady = false
throw new Error(error.message)
}
},
async setupDevice () {
try {
const deviceStore = useDevicesStore()
await deviceStore.createFullBandDevice('adaptive_masking_controller_NoMusic')
this.createdNodes.noiseDevice = deviceStore.getDeviceAudioNode('adaptive_masking_controller_NoMusic')
this.createdNodes.noiseDevice.port.onmessage = this.handleEvent
this.deviceReady = true
} catch (error) {
this.deviceReady = false
}
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
handleEvent (event:any) {
const now = Date.now()
if (now - this.lastUpdate < this.updateInterval) { return } // Skip this update
if (event.data && Array.isArray(event.data) && event.data.length > 1) {
const eventDataDetail = event.data[1] // Assuming the relevant data is at index 1
if (eventDataDetail && eventDataDetail.tag && eventDataDetail.payload && Array.isArray(eventDataDetail.payload)) {
if (/out[3-9]|out1[01]/.test(eventDataDetail.tag)) {
this.controllValues.set(eventDataDetail.tag, eventDataDetail.payload[0])
this.lastUpdate = now
}
}
}
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
const fadeTime = this.audioContext.currentTime + 6.0
this.fading = true
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
this.createdNodes.noiseSource.muted = false
this.createdNodes.musicSource.muted = false
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic (state: boolean) {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = state
},
handleCanPlayNoise (state: boolean) {
// useNuxtApp().$logger.log('NoiseElement has now playingstate: ' + state)
this.noiseReady = state
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
if (!this.noiseReady) {
// useNuxtApp().$logger.log('noise not ready')
return false
}
if (!this.micReady) {
// useNuxtApp().$logger.log('mic not ready')
return false
}
if (!this.deviceReady) {
// useNuxtApp().$logger.log('device not ready')
return false
}
return true
},
async handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
if (!this.deviceReady) { await this.setupDevice() }
if (!this.micReady) {
// useNuxtApp().$logger.log('micophone not yet ready attach it!! ')
// useNuxtApp().$logger.log('microphone attached' + stream)
}
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
// useNuxtApp().$logger.log('Waiting for all devices to be ready')
}
}
},
handlePlayingUpdate (state: boolean) {
// Stop the music again, mute it and set the noiseReady or musicReady to true
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// HERE THE NOISE PATCH COMES INTO PLAY
this.createdNodes.micSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseInputChannelSplitter ||= audioContext.createChannelSplitter(2)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.microphone.connect(this.createdNodes.micSplitter)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseInputChannelSplitter)
this.createdNodes.micSplitter.connect(this.createdNodes.noiseDevice, 0, 0)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 0, 1)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 1, 2)
this.createdNodes.noiseDevice.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
mounted () {
this.addMediaNavigationHandling()
},
addMediaNavigationHandling () {
if ('mediaSession' in navigator) {
// Play action
navigator.mediaSession.setActionHandler('play', (_e) => {
useAudioStore().setPlaying(true)
})
// Pause action
navigator.mediaSession.setActionHandler('pause', (_e) => {
useAudioStore().setPlaying(false)
})
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,242 @@
<template>
<div class="player">
<Microphone ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:canplay="handleCanPlayNoise"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="meadow_src"
title="Meadow"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import Microphone from '../tests/Microphone.vue'
import { useDevicesStore } from '../../../stores/device'
export default {
name: 'NoiseMusicGain',
components: { AudioElement, Microphone },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
noiseReady: false,
musicReady: false,
micReady: false,
deviceReady: false,
meadow_src: window.location.origin + useRuntimeConfig().public.tracks.meadow_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string,
fading: false,
connected: false
}
},
methods: {
// This methodd gets a microphone stream from the micorphone component and creates the microphone node
// need to be called before the noise device is connected to the audio graph
setupMicrophone (stream:MediaStream) {
try {
this.createdNodes.microphone = this.audioContext.createMediaStreamSource(stream)
this.micReady = true
} catch (error: any) {
this.micReady = false
throw new Error(error.message)
}
},
async setupDevice () {
try {
const deviceStore = useDevicesStore()
await deviceStore.createFullBandDevice('adaptive_masking_controller_NoMusic')
this.createdNodes.noiseDevice = deviceStore.getDeviceAudioNode('adaptive_masking_controller_NoMusic')
this.deviceReady = true
} catch (error) {
this.deviceReady = false
}
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
const fadeTime = this.audioContext.currentTime + 6.0
this.fading = true
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
this.createdNodes.noiseSource.muted = false
this.createdNodes.musicSource.muted = false
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic (state: boolean) {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = state
},
handleCanPlayNoise (state: boolean) {
// useNuxtApp().$logger.log('NoiseElement has now playingstate: ' + state)
this.noiseReady = state
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
if (!this.noiseReady) {
// useNuxtApp().$logger.log('noise not ready')
return false
}
if (!this.micReady) {
// useNuxtApp().$logger.log('mic not ready')
return false
}
if (!this.deviceReady) {
// useNuxtApp().$logger.log('device not ready')
return false
}
return true
},
async handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
if (!this.deviceReady) { await this.setupDevice() }
if (!this.micReady) {
// useNuxtApp().$logger.log('micophone not yet ready attach it!! ')
// useNuxtApp().$logger.log('microphone attached' + stream)
}
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
// useNuxtApp().$logger.log('Waiting for all devices to be ready')
}
}
},
handlePlayingUpdate (state: boolean) {
// Stop the music again, mute it and set the noiseReady or musicReady to true
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// HERE THE NOISE PATCH COMES INTO PLAY
this.createdNodes.micSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseInputChannelSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.microphone.connect(this.createdNodes.micSplitter)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseInputChannelSplitter)
this.createdNodes.micSplitter.connect(this.createdNodes.noiseDevice, 0, 0)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 0, 1)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 1, 2)
this.createdNodes.noiseDevice.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
mounted () {
this.addMediaNavigationHandling()
},
addMediaNavigationHandling () {
if ('mediaSession' in navigator) {
// Play action
navigator.mediaSession.setActionHandler('play', (_e) => {
useAudioStore().setPlaying(true)
})
// Pause action
navigator.mediaSession.setActionHandler('pause', (_e) => {
useAudioStore().setPlaying(false)
})
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,241 @@
<template>
<div class="player">
<Microphone ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:canplay="handleCanPlayNoise"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="tropics_src"
title="Tropics"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import Microphone from '../tests/Microphone.vue'
import { useDevicesStore } from '../../../stores/device'
export default {
name: 'NoiseMusicGain',
components: { AudioElement, Microphone },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
noiseReady: false,
musicReady: false,
micReady: false,
deviceReady: false,
tropics_src: window.location.origin + useRuntimeConfig().public.tracks.tropics_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string,
fading: false,
connected: false
}
},
methods: {
// This methodd gets a microphone stream from the micorphone component and creates the microphone node
// need to be called before the noise device is connected to the audio graph
setupMicrophone (stream:MediaStream) {
try {
this.createdNodes.microphone = this.audioContext.createMediaStreamSource(stream)
this.micReady = true
} catch (error: any) {
this.micReady = false
throw new Error(error.message)
}
},
async setupDevice () {
try {
const deviceStore = useDevicesStore()
await deviceStore.createFullBandDevice('adaptive_masking_controller_NoMusic')
this.createdNodes.noiseDevice = deviceStore.getDeviceAudioNode('adaptive_masking_controller_NoMusic')
this.deviceReady = true
} catch (error) {
this.deviceReady = false
}
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
const fadeTime = this.audioContext.currentTime + 6.0
this.fading = true
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
this.createdNodes.noiseSource.muted = false
this.createdNodes.musicSource.muted = false
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic (state: boolean) {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = state
},
handleCanPlayNoise (state: boolean) {
// useNuxtApp().$logger.log('NoiseElement has now playingstate: ' + state)
this.noiseReady = state
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
if (!this.noiseReady) {
// useNuxtApp().$logger.log('noise not ready')
return false
}
if (!this.micReady) {
// useNuxtApp().$logger.log('mic not ready')
return false
}
if (!this.deviceReady) {
// useNuxtApp().$logger.log('device not ready')
return false
}
return true
},
async handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
if (!this.deviceReady) { await this.setupDevice() }
if (!this.micReady) {
// useNuxtApp().$logger.log('micophone not yet ready attach it!! ')
}
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
// useNuxtApp().$logger.log('Waiting for all devices to be ready')
}
}
},
handlePlayingUpdate (state: boolean) {
// Stop the music again, mute it and set the noiseReady or musicReady to true
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
// HERE THE NOISE PATCH COMES INTO PLAY
this.createdNodes.micSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseInputChannelSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.microphone.connect(this.createdNodes.micSplitter)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseInputChannelSplitter)
this.createdNodes.micSplitter.connect(this.createdNodes.noiseDevice, 0, 0)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 0, 1)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 1, 2)
this.createdNodes.noiseDevice.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
mounted () {
this.addMediaNavigationHandling()
},
addMediaNavigationHandling () {
if ('mediaSession' in navigator) {
// Play action
navigator.mediaSession.setActionHandler('play', (_e) => {
useAudioStore().setPlaying(true)
})
// Pause action
navigator.mediaSession.setActionHandler('pause', (_e) => {
useAudioStore().setPlaying(false)
})
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,266 @@
<template>
<div class="player">
<div v-if="deviceReady">RNBOValues: {{ createdNodes.noiseDevice }}</div>
<Microphone ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:canplay="handleCanPlayNoise"
@update:playing="handlePlayingUpdate2"
>
<template #default="{}">
<img v-if="!muted" style="width: 25px; height: 25px;" src="~/assets/image/sound.svg" title="Click to mute" @click="toggleMute()">
<img v-if="muted" style="width: 25px; height: 25px;" src="~/assets/image/sound_muted.svg" title="Click to unmute" @click="toggleMute()">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import type { Device } from '@rnbo/js'
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import Microphone from '../tests/Microphone.vue'
import { useDevicesStore } from '../../../stores/device'
export default {
name: 'RNBODevice',
components: { AudioElement, Microphone },
emits: { 'update:control-value': null },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
noiseReady: false,
micReady: false,
deviceReady: false,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string,
fading: false,
connected: false,
muted: false
}
},
methods: {
toggleMute () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
noiseAudioElement.muted = !noiseAudioElement.muted
this.muted = noiseAudioElement.muted
},
mute () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
noiseAudioElement.muted = true
this.muted = true
},
unmute () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
noiseAudioElement.muted = false
this.muted = false
},
// This methodd gets a microphone stream from the micorphone component and creates the microphone node
// need to be called before the noise device is connected to the audio graph
setupMicrophone (stream:MediaStream) {
useNuxtApp().$logger.log('setup Microphone')
try {
this.createdNodes.microphone ||= this.audioContext.createMediaStreamSource(stream)
this.micReady = true
} catch (error: any) {
this.micReady = false
throw new Error(error.message)
}
},
// This method setup a RNBO Device, it gets the name of the Patch and add the noise audio node to createdNodes
async setupDevice () {
await useAudioStore().ensureAudioContextRunning()
useNuxtApp().$logger.log('setup Device')
try {
const deviceStore = useDevicesStore()
const device = await deviceStore.createNoiseDevice('adaptive_masking_controller_NoMusic') as Device // ich bekomme keine analyse Werte raus
this.createdNodes.noiseDevice = deviceStore.getDeviceAudioNode('adaptive_masking_controller_NoMusic')
this.deviceReady = true
this.attachDBValueListener(device)
} catch (error) {
useNuxtApp().$logger.error('Error setting up device, fall back.', { error })
this.deviceReady = false
}
},
// This method takes the controll value before ramp and controls the volume of music
attachDBValueListener (noiseDevice: Device) {
noiseDevice.messageEvent.subscribe((ev: any) => {
try {
// if (ev.tag === 'out4') { // out4 represents controll value before Timeramp
if (ev.tag === 'out3') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out3= ' + newValue[0])
}
if (ev.tag === 'out4') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out4= ' + newValue[0])
}
if (ev.tag === 'out5') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out5= ' + newValue[0])
}
if (ev.tag === 'ou6') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out6= ' + newValue[0])
}
if (ev.tag === 'out7') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out7= ' + newValue[0])
}
if (ev.tag === 'out8') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out8= ' + newValue[0])
}
if (ev.tag === 'out9= ') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('Band 1000 = ' + newValue)
this.$emit('update:control-value', newValue[0])
}
} catch (error: any) {
// this.$logger.warn('Failed to attach a control value listener, music is not gain controlled.')
}
})
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
this.unmute()
// useNuxtApp().$logger.log('Fade In Gains')
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
setTimeout(() => {
this.fading = true
const noiseGain = this.createdNodes.noiseGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
}, 450)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayNoise (state: boolean) {
// useNuxtApp().$logger.log('NoiseElement has now playingstate: ' + state)
this.noiseReady = state
},
readyForWebaudio () {
if (!this.noiseReady) {
return false
}
if (!this.micReady) {
return false
}
if (!this.deviceReady) {
return false
}
return true
},
async handlePlayingUpdate2 (state: boolean) {
useNuxtApp().$logger.log('handling Playing Update2= ' + state, this.audioContext.state)
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(true)
} else {
if (!this.deviceReady) {
useNuxtApp().$logger.log('Device is not ready, create it now')
await this.setupDevice()
}
if (!this.micReady) {
// await this.setupMicrophone(Microphone)
useNuxtApp().$logger.log('micophone not yet ready attach it!! ')
// useNuxtApp().$logger.log('microphone attached' + stream)
}
if (this.readyForWebaudio()) {
useNuxtApp().$logger.log('everything is now ready start play')
this.handlePlayingUpdate(true)
} else {
useNuxtApp().$logger.log('Waiting for all devices to be ready')
}
}
},
handlePlayingUpdate (state: boolean) {
try {
// Stop the music again, mute it and set the noiseReady or musicReady to true
if (state) {
useNuxtApp().$logger.log('stop playing')
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
audioContext.resume()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
// HERE THE NOISE PATCH COMES INTO PLAY
this.createdNodes.micSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseInputChannelSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.microphone.connect(this.createdNodes.micSplitter)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseInputChannelSplitter)
this.createdNodes.micSplitter.connect(this.createdNodes.noiseDevice, 0, 0)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 0, 1)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 1, 2)
this.createdNodes.noiseDevice.connect(this.createdNodes.noiseGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.audioContext.currentTime)
noiseAudioElement.muted = false
this.connected = true
this.unmute()
this.fadeInGains()
useAudioStore().playing = true
this.$logger.info('RNBO Patch successfully connected and playing')
} else {
// Music has just stopped react on it.
this.$logger.info('Stopping audio and disconnecting RNBO Patch')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
} catch (error) {
this.$logger.info('Error in handlePlayingUpdate')
this.connected = false
useAudioStore().playing = false
// You might want to show an error message to the user here
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
useNuxtApp().$logger.log('volume= ' + volume)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
}
}
}
</script>