Initial commit

This commit is contained in:
Mindboost
2025-07-01 10:53:26 +00:00
commit 38050e5c69
416 changed files with 48708 additions and 0 deletions

View File

@@ -0,0 +1,313 @@
<template>
<div class="slider-wrapper">
<!-- Slot for passing any audio element -->
<slot />
<div class="slider">
<input
id="gain-control"
v-model.number="volumeValue"
type="range"
min="0"
max="1"
step="0.001"
data-toggle="tooltip"
data-placement="top"
:title="tooltipTitle"
@wheel.prevent="changeVolumeOnWheel"
>
<span
class="slider-progress-bar"
:style="{ width: `${volumeValue * 100}%` }"
/>
</div>
<audio
:id="title"
ref="audioElement"
hidden
loop
@loadedmetadata="handleUpdatedMetadata"
@canplay="ready=true"
/>
</div>
</template>
<script lang="ts">
import { ref, watch, onMounted } from 'vue'
import { useAudioStore } from '~/stores/audio'
import { useUserStore } from '~/stores/user'
export default {
name: 'AudioElement',
props: {
src: {
type: String,
required: true
},
title: {
type: String,
default: 'Unknown'
},
tooltipTitle: {
type: String,
default: 'Change the volume by click, scroll or touch.',
required: false
},
volume: {
type: Number,
default: 1
}
},
emits: ['update:volume', 'update:loaded', 'update:playing', 'update:canplay'],
setup (props, { emit }) {
const audioStore = useAudioStore()
const audioElement = ref<HTMLAudioElement | null>(null)
const volumeValue = ref(props.volume)
const ready = ref(false)
const mute = () => {
if ((audioElement.value instanceof HTMLAudioElement)) {
const ae = audioElement.value
ae.muted = true
}
}
const play = async () => {
try {
const { $audioPrepared } = useNuxtApp()
await $audioPrepared
const sink = useUserStore().audioOutputDevice as MediaDeviceInfo
audioElement.value?.setSinkId(sink.deviceId)
if (audioElement.value instanceof HTMLAudioElement) {
// set audio element to mute, will be anyway muted after connecting to the web audio graph
audioElement.value.play()
}
} catch (error) {
useNuxtApp().$logger.error('Oouh sorry! Error while setting up audio, please reload.', error)
}
}
const pause = () => {
audioElement.value?.pause()
}
const togglePlayPause = async () => {
if (audioElement.value) {
if (audioStore.playing) {
await audioElement.value.play()
} else {
audioElement.value.pause()
}
}
}
watch(() => props.volume, (newValue) => {
volumeValue.value = newValue
})
watch(() => audioStore.playing, async (newValue) => {
if (newValue) {
updateMediaSession()
await togglePlayPause()
} else {
await togglePlayPause()
}
})
watch(volumeValue, (newValue) => {
if (audioElement.value) {
audioElement.value.volume = newValue
emit('update:volume', newValue) // Gib die Änderungen an die Elternkomponente weiter
}
})
const changeVolumeOnWheel = (event:WheelEvent) => {
// Adjust volume on wheel scroll
const deltaY = event.deltaY
if (deltaY < 0) {
const volumeAdd = (Math.min(1, volumeValue.value + 0.02))
volumeValue.value = volumeAdd
} else {
const volumeCut = (Math.max(0, volumeValue.value - 0.02))
volumeValue.value = volumeCut
}
}
const updateMediaSession = () => {
try {
const path = window.location.origin + '/images/scenery/' + props.title.toLowerCase() + '.jpg'
// useNuxtApp().$logger.log(navigator.mediaSession.metadata)
if (audioElement.value) {
audioElement.value.controls = true
navigator.mediaSession.metadata = new MediaMetadata({
title: props.title,
artist: 'mindboost',
album: 'mindboost Originale',
artwork: [
{ src: path, sizes: '192x192', type: 'image/jpeg' }
]
})
}
} catch (e) {
// useNuxtApp().$logger.error(e)
}
}
const handleUpdatedMetadata = () => {
// use the navigator for the soundscape.
if (props.title !== 'Noise') {
updateMediaSession()
}
}
onMounted(() => {
if (audioElement.value) {
audioElement.value.src = props.src
audioElement.value.volume = volumeValue.value
}
})
onUnmounted(() => {
audioElement.value = null
volumeValue.value = 0
})
return {
audioElement,
ready,
play,
pause,
handleUpdatedMetadata,
volumeValue,
togglePlayPause,
changeVolumeOnWheel,
updateMediaSession
}
}
}
</script>
<style>
.slider-wrapper{
display: flex;
flex-direction: column;
align-items: center;
margin-bottom: 1.5em;
}
.slider {
position: relative;
width: 100%;
margin: 1em auto;
}
.slider-icon {
cursor: pointer;
}
@media only screen and (max-width: 576px) {
.slider{
max-width: 80%;
}
}
.slider-progress-bar {
position: absolute;
height: 10px;
background-color: #e9c046;
border-top-left-radius: 5px;
border-bottom-left-radius: 5px;
border-radius: 5px;
left: 0;
z-index: 0;
}
/* Allgemeine Einstellungen für den Slider */
input[type="range"] {
-webkit-appearance: none; /* Entfernt das Standard-Styling in Webkit-Browsern */
appearance: none; /* Entfernt Standard-Styling in anderen Browsern */
width: 100%;
height: 10px;
background: #fff;
border-radius: 5px;
outline: none;
cursor: pointer;
position: absolute;
z-index:1;
left: 0;
}
/* Styling für den Track in Webkit-Browsern */
input[type="range"]::-webkit-slider-runnable-track {
height: 10px;
background: transparent;
border-radius: 5px;
position: relative;
z-index: 2;
}
/* Styling für den Thumb in Webkit-Browsern */
input[type="range"]::-webkit-slider-thumb {
-webkit-appearance: none;
height: 20px;
width: 20px;
background: #fff;
border-radius: 50%;
margin-top: -5px;
position: relative;
z-index: 10;
border: 2px solid #e9c046;
}
/* Styling für den Track in Firefox */
input[type="range"]::-moz-range-track {
height: 10px;
background: transparent;
border-radius: 5px;
position: relative;
z-index: 1;
}
/* Styling für den Thumb in Firefox */
input[type="range"]::-moz-range-thumb {
height: 20px;
width: 20px;
background-color: #e9c046;
border: none;
border-radius: 50%;
position: relative;
z-index: 1;
border: 2px solid #e9c046;
margin-top: -5px;
}
input[type="range"]::-moz-range-progress {
height: 10px;
background-color: #e9c046;
border-radius: 5px;
}
/* Styling für den Track in Internet Explorer/Edge */
input[type="range"]::-ms-track {
height: 10px;
background: transparent;
border-color: transparent;
color: transparent;
border-radius: 5px;
}
/* Styling für den Thumb in Internet Explorer/Edge */
input[type="range"]::-ms-thumb {
height: 20px;
width: 20px;
background: #f5f5f5;
border: none;
border-radius: 50%;
box-shadow: 0 0 3px rgba(0, 0, 0, 0.3);
position: relative;
z-index: 2;
border: 2px solid #e9c046;
}
/* Entfernt Ticks in IE */
input[type="range"]::-ms-ticks-after,
input[type="range"]::-ms-ticks-before {
display: none;
}
</style>

View File

@@ -0,0 +1,285 @@
<template>
<div>
<!-- Slot for passing any audio element -->
<slot />
<div class="slider">
<input
id="gain-control"
v-model="volume"
type="range"
min="0"
max="1"
step="0.02"
@wheel.prevent="changeVolumeOnWheel"
>
</div>
<audio
ref="audioElement"
hidden
autoplay
muted
loop
@play="handlePlay"
@pause="handlePause"
@keydown="handleKeyDown"
@loadedmetadata="handleLoaded"
/>
</div>
</template>
<script lang="ts">
import { ref, watch, onMounted } from 'vue'
import { useAudioStore } from '~/stores/audio'
import { useUserStore } from '~/stores/user'
export default {
name: 'AudioElement',
props: {
src: {
type: String,
required: true
},
title: {
type: String,
default: 'Unknown'
}
},
emits: ['update:volume', 'update:loaded', 'update:playing', 'update:fadeout'],
setup (props, { emit }) {
const audioStore = useAudioStore()
const audioElement = ref<HTMLAudioElement | null>(null)
const volume = ref(1)
const play = () => {
try {
// set audio element to mute, will be anyway muted after connecting to the web audio graph
// useNuxtApp().$logger.log('Trigger Play of the audioelement tag: CurrentTime ' + currentTime + ' paused' + paused + ' ended' + ended)
const sink = useUserStore().audioOutputDevice as MediaDeviceInfo
audioElement.value?.setSinkId(sink.deviceId)
audioElement.value?.play()
if (audioElement.value) { audioElement.value.muted = true }
} catch (e) {
}
}
const pause = () => {
audioElement.value?.pause()
}
const pauseFadeOut = () => {
// useNuxtApp().$logger.log('pauseFadeOut')
emit('update:fadeout')
setTimeout(() => {
pause()
}, 2000)
}
const isPlaying = () => {
return audioStore.playing
}
onUpdated(() => {
if (props.title === 'Noise') {
// useNuxtApp().$logger.log('cannot set control of noise to false')
} else if ('mediaSession' in navigator) {
const path = window.location.origin + '/images/scenery/' + props.title.toLowerCase() + '.jpg'
navigator.mediaSession.metadata = new MediaMetadata({
title: props.title,
artist: 'mindboost',
album: 'mindboost Originale',
artwork: [
{ src: path, sizes: '192x192', type: 'image/jpeg' }
]
})
}
})
watch(() => audioStore.playing, (newValue) => {
if (newValue) {
// useNuxtApp().$logger.log('PLAY THE AUDIO')
play()
} else {
// useNuxtApp().$logger.log('PAUSE THE AUDIO')
pauseFadeOut()
}
})
watch(() => volume.value, (newValue) => {
if (audioElement.value) {
emit('update:volume', newValue)
}
})
const changeVolumeOnWheel = (event:WheelEvent) => {
// Adjust volume on wheel scroll
const deltaY = event.deltaY
if (deltaY < 0) {
const volumeAdd = (Math.min(1, volume.value + 0.02))
volume.value = volumeAdd
} else {
const volumeCut = (Math.max(0, volume.value - 0.02))
volume.value = volumeCut
}
}
let enableKeyHandler = true
const debounce = <T extends (...args: any[]) => void>(func: T, timeout: number = 1500): () => void => {
let timer: NodeJS.Timeout | null
return () => {
if (timer) {
clearTimeout(timer)
}
timer = setTimeout(() => { func.apply(this) }, timeout)
}
}
const reEnableKeyHandler = debounce(() => {
enableKeyHandler = true
})
const handleKeyDown = (e: KeyboardEvent) => {
if (!enableKeyHandler) { return }
if (e.code === 'Space') {
e.preventDefault() // Prevent the default action (scrolling)
// useNuxtApp().$logger.log('The current state is ' + audioStore.playing + ' so start or stop audio')
if (audioStore.playing) {
handlePause()
} else {
handlePlay()
}
enableKeyHandler = false // Disable handler
reEnableKeyHandler()
}
}
const handlePlay = () => {
// useNuxtApp().$logger.log('handlePlay called ' + audioStore.isPlaying())
// First handlePlay is triggered by the autoplay, so no need to start play
emit('update:playing', true)
audioStore.setPlaying(true)
// useNuxtApp().$logger.log('handlePlay ended with ' + audioStore.isPlaying())
}
const handlePause = () => {
// useNuxtApp().$logger.log('handlePause: Change play state to false')
emit('update:playing', false)
audioStore.playing = false
}
const handleLoaded = () => {
emit('update:loaded', true)
}
onMounted(() => {
if (audioElement.value) {
audioElement.value.src = props.src
if (props.title !== 'Noise') {
window.addEventListener('keydown', handleKeyDown.bind(this))
if ('mediaSession' in navigator) {
// Play action
navigator.mediaSession.setActionHandler('play', (_e) => {
handlePlay()
// if (!enableMediaHandler) {
/// / useNuxtApp().$logger.log('during event handling - being busy ')
// return
// }
// enableMediaHandler = false // Disable handler
// reEnableMediaHandler()
// // Your play action here
})
// Stop action
navigator.mediaSession.setActionHandler('stop', (_e) => {
// if (!enableMediaHandler) { return }
// // Your stop action here
// if (audioElement.value) {
// audioElement.value.currentTime = 0
// audioElement.value.src = ''
// audioElement.value.removeAttribute('src')
// }
// audioStore.playing = false
})
// Pause action
navigator.mediaSession.setActionHandler('pause', (_e) => {
handlePause()
// if (!enableMediaHandler) {
/// / useNuxtApp().$logger.log('during event handling - being busy ')
// return
// }
// audioStore.playing = false
// enableMediaHandler = false // Disable handler
// reEnableMediaHandler()
})
}
}
}
})
onRenderTriggered(() => {
// useNuxtApp().$logger.log('render AudioElement-----------' + props.title)
})
onUnmounted(() => {
window.removeEventListener('keydown', handleKeyDown.bind(this))
})
return {
audioElement,
play,
pause,
isPlaying,
handlePlay,
handlePause,
handleLoaded,
handleKeyDown,
volume,
changeVolumeOnWheel
}
}
}
</script>
<style scoped>
.slider {
margin-bottom: 1em;
}
/* Style adjustments if needed */
input[type="range"] {
background:transparent !important;
}
/* Styles the track */
input[type="range"]::-webkit-slider-runnable-track {
background: #e9c046; /* yellow track */
height: 10px;
border-radius: 5px;
}
input[type="range"]::-moz-range-track {
background: #e9c046; /* yellow track */
height: 10px;
border-radius: 5px;
}
input[type="range"]::-ms-track {
background: #e9c046; /* yellow track */
border-color: transparent;
color: transparent;
height: 8px;
}
/* Styles the thumb */
input[type="range"]::-webkit-slider-thumb {
-webkit-appearance: none; /* Required to style in Webkit browsers */
margin-top: -6px; /* Adjusts the position of the thumb relative to the track */
}
input[type="range"]::-moz-range-thumb {
border: none; /* Removes any default border */
}
input[type="range"]::-ms-thumb {
margin-top: 0; /* May need to adjust the position similar to webkit */
border: none; /* Removes any default border */
}
</style>

View File

@@ -0,0 +1,224 @@
<template>
<h1>AUDiO Tag mit dem volume: {{ safeVolume }}</h1>
<p>audioStatus {{ audioStatus }}</p>
<button @click="muteAudio">muteAudio</button>
<audio
v-if="src"
ref="audioElement"
:src="src"
loop="true"
:volume="safeVolume"
mediagroup="noiseGroup"
sinkId=""
@canplay="()=> $emit('canplay')"
/>
</template>
<script lang="ts">
import type { Logger } from 'pino'
import { defineComponent, ref, watch, computed } from 'vue'
import { ensureAudio, useAudioStore } from '~/stores/audio'
import { useUserStore } from '~/stores/user'
export default defineComponent({
name: 'AudioTag',
props: {
src: {
type: String,
required: true
},
volume: {
type: [Number, String],
default: 0.23,
validator: (value:any) => {
const num = parseFloat(value)
return num >= -3.4 && num <= 3.4
}
},
play: {
type: Boolean,
default: false
}
},
emits: ['canplay'],
setup (props, { emit: $emit }) {
const logger = useNuxtApp().$logger as Logger
const audioElement = ref<HTMLAudioElement | null>(null)
const audioStatus = ref<'stopped' | 'playing' | 'paused'>('stopped')
const safeVolume = computed(() => {
const vol = parseFloat(props.volume.toString())
return Math.max(0, Math.min(1, isNaN(vol) ? 1 : vol))
})
const muteAudio = () => {
if (audioElement.value) {
audioElement.value.muted = !audioElement.value.muted
}
}
// kleine Hilfsfunktion für Volume-Fading
const fadeVolume = (audio: HTMLAudioElement, from: number, to: number, duration: number) => {
const steps = 20
const stepTime = duration / steps
let currentStep = 0
const fadeInterval = setInterval(() => {
currentStep++
const progress = currentStep / steps
const newVolume = from + (to - from) * progress
audio.volume = Math.max(0, Math.min(1, newVolume))
if (currentStep >= steps) {
clearInterval(fadeInterval)
audio.volume = to
}
}, stepTime)
}
const startPlaying = async () => {
const audioStore = useAudioStore()
await ensureAudio()
const sink = useUserStore().audioOutputDevice as MediaDeviceInfo
audioElement.value?.setSinkId(sink.deviceId)
if (audioElement.value && audioStore.isPlaying()) {
try {
const audio = audioElement.value
audio.currentTime = 0
audio.volume = 0
audio.muted = false
await audio.play().catch((error) => {
logger.warn('Error playing audio:', error)
})
fadeVolume(audio, 0, 0.3, 8000) // weiches Fade-In
audioStatus.value = 'playing'
} catch (error) {
logger.warn('Error starting audio:', error)
}
} else {
logger.info('Audio Playback has not started, audioStore is not playing')
}
}
const pausePlaying = () => {
if (audioElement.value && audioStatus.value === 'playing') {
try {
const audio = audioElement.value
const initialVolume = audio.volume
fadeVolume(audio, initialVolume, 0, 300)
setTimeout(() => {
audio.pause()
audioStatus.value = 'paused'
}, 350) // minimal länger als Fade
} catch (error) {
logger.warn('Error pausing audio:', error)
}
}
}
const stopAndResetPlaying = () => {
if (audioElement.value) {
try {
const audio = audioElement.value
const sink = useUserStore().audioOutputDevice as MediaDeviceInfo
audio.setSinkId(sink.deviceId)
audio.pause()
audio.currentTime = 0
audio.volume = safeVolume.value
audio.muted = true
audioStatus.value = 'stopped'
} catch (error) {
logger.warn('Error stopping audio:', error)
}
}
}
const resumePlaying = async () => {
const audioStore = useAudioStore()
await ensureAudio()
if (audioElement.value && audioStore.isPlaying() && audioStatus.value === 'paused') {
try {
const audio = audioElement.value
audio.muted = false
audio.volume = 0
await audio.play().catch((error) => {
logger.warn('Error resuming audio:', error)
})
fadeVolume(audio, 0, safeVolume.value, 500) // kürzeres Fade-In
audioStatus.value = 'playing'
} catch (error) {
logger.warn('Error resuming audio:', error)
}
} else {
logger.info('Audio Resume not possible: wrong audio state')
}
}
const stopPlaying = () => {
if (audioElement.value) {
try {
const audio = audioElement.value
const initialVolume = audio.volume
fadeVolume(audio, initialVolume, 0, 500)
setTimeout(() => {
audio.pause()
audio.currentTime = 0
audio.volume = safeVolume.value
audio.muted = true
audioStatus.value = 'stopped'
}, 550)
} catch (error) {
logger.warn('Error stopping and fading audio:', error)
}
}
}
watch(() => props.src, () => {
stopAndResetPlaying()
if (audioElement.value) {
audioElement.value.load()
}
})
watch(() => props.play, (newPlay) => {
setTimeout(() => {
if (newPlay === false) {
pausePlaying()
} else if (audioStatus.value === 'paused') {
resumePlaying()
} else {
startPlaying()
}
}, 500)
})
watch(() => safeVolume.value, (newVolume) => {
if (audioElement.value && audioStatus.value === 'playing') {
try {
audioElement.value.volume = newVolume
} catch (error) {
logger.warn('Error updating audio volume:', error)
}
}
})
return {
audioElement,
startPlaying,
pausePlaying,
stopPlaying,
resumePlaying,
safeVolume,
muteAudio,
audioStatus
}
}
})
</script>

View File

@@ -0,0 +1,240 @@
<template>
<h3> AudioTagWebAudio </h3>
<button @click="fetchAudio">Fetch </button> <br> fetched:{{ readyState }}<br><br>
<button v-if="readyState" @click="startAudio"> Play </button>
<button @click="mute">Mute</button>
<button @click="unmute">Unmute</button>
<p v-if="audioNodes.length > 0">
<!-- eslint-disable-next-line vue/require-v-for-key -->
</p><div v-for="node in audioNodes">
duration: {{ node.buffer?.duration }}
volume: {{ safeVolume }}
length: {{ node.buffer?.length }}
channels: {{ node.buffer?.numberOfChannels }}
duration: {{ node.buffer?.duration }}
</div>
</template>
<script lang="ts">
import type { Logger } from 'pino'
import { defineComponent, ref, watch, computed } from 'vue'
import { createAudioSource } from '~/lib/AudioFunctions'
import { ensureAudio, useAudioStore } from '~/stores/audio'
export default defineComponent({
name: 'AudioTagWebAudio',
props: {
src: {
type: String,
required: true
},
volume: {
type: Number,
default: 0.0
},
play: {
type: Boolean,
default: false
},
masterGain: {
type: GainNode,
default: null,
require: true
}
},
emits: ['canplay'],
setup (props, { emit: $emit }) {
const logger = useNuxtApp().$logger as Logger
const ctx = useAudioStore().getContext()
const readyState = ref(false)
const fadingState = ref(false)
const audioNodes = ref([] as Array<AudioBufferSourceNode>)
let gainNode: GainNode | null = null
let audioElement: AudioBufferSourceNode | null = null
const safeVolume = computed((): number => {
const volumeVal = props.volume as number
if (volumeVal >= 0 && volumeVal <= 1.2589254117941673) { return volumeVal }
return Math.abs(volumeVal - 0) < Math.abs(volumeVal - 1) ? 0 : 1.2589254117941673
})
onMounted(() => {
fetchAudio()
})
onBeforeUnmount(() => {
if (audioElement instanceof AudioBufferSourceNode) {
try {
if (audioElement.hasStarted) {
audioElement.stop()
}
} catch (e) {
logger.warn('Audio element could not be stopped cleanly:', e)
}
try {
audioElement.disconnect()
} catch (e) {
logger.warn('Audio element could not be disconnected:', e)
}
audioElement = null // ✅ freigeben
}
if (gainNode instanceof GainNode) {
try {
gainNode.gain.cancelScheduledValues(ctx.currentTime)
gainNode.disconnect()
} catch (e) {
logger.warn('Gain node cleanup failed:', e)
}
gainNode = null // ✅ freigeben
}
})
const emitReady = () => {
$emit('canplay')
readyState.value = true
}
const mute = (value: any) => {
gainNode?.gain.setValueAtTime(0, ctx.currentTime)
}
const unmute = (value: any) => {
gainNode?.gain.setValueAtTime(1, ctx.currentTime)
}
const connectGainNode = (source:AudioBufferSourceNode) => {
gainNode = ctx.createGain()
gainNode.gain.setValueAtTime(0, ctx.currentTime)
source.connect(gainNode)
gainNode.connect(props.masterGain)
}
const fetchAudio = async () => {
audioElement = null
audioElement = await createAudioSource(ctx, props.src)
audioNodes.value.push(audioElement)
if (audioElement instanceof AudioBufferSourceNode) {
connectGainNode(audioElement)
emitReady()
}
}
const recreateSourceNode = () => {
if (!ctx || !audioElement?.buffer || !gainNode) {
logger.error('Cannot recreate source node: missing context, buffer, or gain node.')
return
}
// Erstelle neue AudioBufferSourceNode
const newSource = ctx.createBufferSource()
newSource.buffer = audioElement.buffer
newSource.playbackRate.value = audioElement.playbackRate.value || 1
// Optional: Übertrage weitere Parameter falls nötig (looping, detune, etc.)
newSource.loop = audioElement.loop ?? false
// Verbinde mit GainNode
newSource.connect(gainNode)
// Ersetze die alte Referenz
audioElement = newSource
// Reset hasStarted
newSource.hasStarted = false
}
const startAudio = async () => {
await ensureAudio()
if (props.play === false) {
return
}
if (gainNode instanceof GainNode && audioElement instanceof AudioBufferSourceNode) {
if ((audioElement as any).hasStarted) {
recreateSourceNode()
}
gainNode.gain.setValueAtTime(0, ctx.currentTime)
audioElement.playbackRate.value = 1
try {
(audioElement as any).hasStarted = true
audioElement.start()
} catch (error) {
(audioElement as any).hasStarted = false
audioElement.playbackRate.value = 1
}
gainNode.gain.linearRampToValueAtTime(safeVolume.value, ctx.currentTime + 5)
} else {
logger.error('Missing required audioNodes.')
}
}
const stopAudio = () => {
if (gainNode instanceof GainNode && audioElement instanceof AudioBufferSourceNode) {
try {
// Sanftes Fade-Out
const currentTime = ctx.currentTime
gainNode.gain.cancelScheduledValues(currentTime)
gainNode.gain.setValueAtTime(gainNode.gain.value, currentTime)
gainNode.gain.linearRampToValueAtTime(0, currentTime + 0.5) // 0.5 Sek. Fade-Out
// Stoppen nach dem Fade-Out
setTimeout(() => {
try {
if (audioElement instanceof AudioBufferSourceNode && audioElement.hasStarted) {
audioElement?.stop()
const audioElementObj = audioElement as any
}
} catch (error) {
logger.error('Error stopping audioElement:', { error })
}
}, 500)
} catch (error) {
logger.warn('Error during stopAudio:', error)
}
} else {
logger.error('Missing required audioNodes for stopping.')
}
}
watch(() => props.play, () => {
if (props.play && audioElement && readyState.value) {
try {
startAudio()
} catch (error) {
logger.warn('Error while start audio', error)
}
} else if (audioElement && !props.play) {
stopAudio()
}
})
watch(() => props.src, async () => {
if (props.src === '') {
logger.warn('Audio-Source is empty. Please check your props.')
return
}
await fetchAudio()
})
watch(() => props.volume, () => {
const bla = 0
if (!readyState.value) {
logger.warn('Audio is not yet ready for playing.')
return
}
if (props.play) {
if (gainNode instanceof GainNode) {
gainNode.gain.exponentialRampToValueAtTime(safeVolume.value, ctx.currentTime + 15)
}
}
})
return {
safeVolume,
emitReady,
fetchAudio,
readyState,
fadingState,
startAudio,
gainNode,
audioNodes,
mute,
unmute
}
}
})
</script>

View File

@@ -0,0 +1,96 @@
<template>
<h3> AudioTagWebAudio </h3>
<p>In diesem Test funktioniert die Messung der db</p>
<button @click="fetchAudio">Fetch </button> <br> fetched:{{ readyState }}<br><br>
<button v-if="readyState" @click="startAudio"> Play </button> gain:<p v-if="gainNode"> {{ gainNode }} </p>
<div v-if="audioNodes.length > 0">
<!-- eslint-disable-next-line vue/require-v-for-key -->
<div v-for="node in audioNodes">
state: {{ node.state() }} <br>
src: {{ node._src }} <br>
playing: {{ node.playing() }} <br>
webaudio: {{ node._webAudio }} <br>
duration: {{ node.duration() }} <br>
</div>
</div>
</template>
<script lang="ts">
import { defineComponent, ref, watch, computed } from 'vue'
import { createStreamingAudioSource } from '~/lib/AudioFunctions'
import { useAudioStore } from '~/stores/audio'
export default defineComponent({
name: 'AudioTagWebAudio',
props: {
src: {
type: String,
required: true
},
volume: {
type: Number,
default: 0.5
}
},
emits: ['canplay'],
setup (props, { emit: $emit }) {
const ctx = useAudioStore().getContext()
const readyState = ref(false)
const fadingState = ref(false)
const audioNodes = ref([] as Array<Howl>)
const safeVolume = computed((): Number => {
const volumeVal = props.volume as number
if (volumeVal >= 0 && volumeVal <= 1) { return volumeVal }
return Math.abs(volumeVal - 0) < Math.abs(volumeVal - 1) ? 0 : 1
})
let gainNode: GainNode | null = null
let audioElement: Howl | null = null
const emitReady = () => {
$emit('canplay')
}
const connectGainNode = (source:AudioBufferSourceNode) => {
gainNode = ctx.createGain()
gainNode.gain.setValueAtTime(0, ctx.currentTime)
source.connect(gainNode)
gainNode.connect(ctx.destination)
}
const fetchAudio = async () => {
audioElement = null
useNuxtApp().$logger.log('PFAD: ' + props.src)
audioElement = await createStreamingAudioSource(ctx, props.src)
audioNodes.value.push(audioElement)
if (audioElement instanceof Howl) {
audioElement.volume(props.volume)
audioElement?.play()
useNuxtApp().$logger.log({ audioElement })
readyState.value = true
emitReady()
}
useNuxtApp().$logger.log('gefetchtes audioElement', { audioElement })
}
const startAudio = () => {
useNuxtApp().$logger.log('start Audio')
audioElement?.mute()
}
watch(() => props.volume, () => {
audioElement?.volume(props.volume)
})
return {
safeVolume,
emitReady,
fetchAudio,
readyState,
fadingState,
startAudio,
gainNode,
audioNodes
}
}
})
</script>

View File

@@ -0,0 +1,101 @@
<template>
<div>
<canvas id="audioVisualizer" width="640" height="100" />
</div>
</template>
<script>
function drawVisualization (analyser, canvasCtx) {
const bufferLength = analyser.frequencyBinCount
const dataArray = new Uint8Array(bufferLength)
const WIDTH = canvasCtx.canvas.width
const HEIGHT = canvasCtx.canvas.height
function draw () {
requestAnimationFrame(draw)
analyser.getByteTimeDomainData(dataArray)
canvasCtx.fillStyle = 'rgb(200, 200, 200)'
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT)
canvasCtx.lineWidth = 2
canvasCtx.strokeStyle = 'rgb(0, 0, 0)'
canvasCtx.beginPath()
const sliceWidth = WIDTH * 1.0 / bufferLength
let x = 0
for (let i = 0; i < bufferLength; i++) {
const v = dataArray[i] / 128.0
const y = v * HEIGHT / 2
if (i === 0) {
canvasCtx.moveTo(x, y)
} else {
canvasCtx.lineTo(x, y)
}
x += sliceWidth
}
canvasCtx.lineTo(WIDTH, HEIGHT / 2)
canvasCtx.stroke()
}
draw()
}
export default {
name: 'AudioVisualization',
props: {
analyser: {
type: Object,
required: true
}
},
setup (props) {
onMounted(() => {
const audioCanvas = document.getElementById('audioVisualizer').getContext('2d')
drawVisualization(props.analyser, audioCanvas)
})
onUnmounted(() => {
window.removeEventListener('keydown', handleKeyDown.bind(this))
})
return {
audioCanvas: null
}
}
}
</script>
<style scoped>
/* Style adjustments if needed */
/* Styles the track */
input[type="range"]::-webkit-slider-runnable-track {
background: #e9c046; /* yellow track */
height: 8px;
border-radius: 5px;
}
input[type="range"]::-moz-range-track {
background: #e9c046; /* yellow track */
height: 8px;
border-radius: 5px;
}
input[type="range"]::-ms-track {
background: #e9c046; /* yellow track */
border-color: transparent;
color: transparent;
height: 8px;
}
</style>
^

View File

@@ -0,0 +1,169 @@
<template>
<div>
<div v-if="error" class="error-message">
{{ error }}
</div>
<AudioFileSelector @file-selected="onFileSelected" />
<!-- Ramp Time Control -->
<div class="ramp-time-control">
<label for="rampTime">Ramp Time (ms):</label>
<input
id="rampTime"
v-model="rampTime"
type="range"
min="100"
max="50000"
step="100"
>
<span>{{ rampTime }}ms</span>
</div>
<div class="gain-controller">
<div v-for="frequency in frequencies" :key="frequency" class="frequency-control">
<RNBOControlValue
:center-frequency="frequency"
@control-value-change="(value) => handleValueChange(value.frequency, value.value)"
/>
<AudioTagWebAudio
:ref="el => { if (el) audioElements[frequency] = el }"
:src="audioSrc"
:volume="currentVolumes[frequency]"
/>
<div>
Frequency: {{ frequency }}Hz
<br>Gain Value (dB): {{ gainValuesDB[frequency] }}
<br>Normalized: {{ normalizedVolumes[frequency] }}
<br>Volume: {{ currentVolumes[frequency] }}
</div>
</div>
</div>
</div>
</template>
<script lang="ts">
import { defineComponent, ref, onUnmounted, reactive } from 'vue'
import AudioFileSelector from '../AudioFileSelector.vue'
import AudioTagWebAudio from './AudioTagWebAudio.vue'
import RNBOControlValue from './tests/ControlValues/RNBOControlValue.vue'
import { useAudioStore } from '~/stores/audio'
export default defineComponent({
name: 'GainController',
components: {
RNBOControlValue,
AudioTagWebAudio,
AudioFileSelector
},
setup () {
const logger = useNuxtApp().$logger
logger.info('GainController setup')
const audioStore = useAudioStore()
logger.info('Got audioStore', audioStore)
const audioSrc = ref(window.location.origin + '/sounds/lagoon.ogg')
const rampTime = ref(25000)
logger.info('Set rampTime', 25000)
const frequencies = ref([63, 125, 250, 500, 1000, 2000, 4000, 8000, 16000])
logger.info('Set frequencies', { frequencies })
const error = ref<string | null>(null)
const gainValuesDB = reactive<Record<number, number>>({})
const currentVolumes = reactive<Record<number, number>>({})
const normalizedVolumes = reactive<Record<number, number>>({})
const audioElements = reactive<Record<number, InstanceType<typeof AudioTagWebAudio>>>({})
const rampIntervals: Record<number, ReturnType<typeof setInterval> | null> = {}
const onFileSelected = (file: string) => {
const fullPath = new URL(file, window.location.origin).toString()
audioSrc.value = fullPath
logger.info('User hat ein File ausgewählt ' + file)
useNuxtApp().$logger.log({ audioElements })
setTimeout(() => {
audioStore.setPlaying(true)
}, 250)
}
const calculateNormalizedVolume = (db: number): number => {
const minDB = -12
const maxDB = 2
return (db - minDB) / (maxDB - minDB)
}
const rampVolume = (frequency: number, targetVolume: number, duration: number) => {
const startVolume = currentVolumes[frequency] || 1
const startTime = Date.now()
const endTime = startTime + duration
if (rampIntervals[frequency]) {
clearInterval(rampIntervals[frequency]!)
}
rampIntervals[frequency] = setInterval(() => {
const now = Date.now()
if (now >= endTime) {
currentVolumes[frequency] = targetVolume
clearInterval(rampIntervals[frequency]!)
rampIntervals[frequency] = null
return
}
const progress = (now - startTime) / duration
currentVolumes[frequency] = startVolume + (targetVolume - startVolume) * progress
}, 50)
}
const handleValueChange = (frequency: number, value: number) => {
logger.info('Change for ' + frequency + ' to ' + value)
if (!isNaN(value)) {
gainValuesDB[frequency] = value
normalizedVolumes[frequency] = calculateNormalizedVolume(value)
rampVolume(frequency, normalizedVolumes[frequency], rampTime.value)
} else {
useNuxtApp().$logger.log('value is not NaN', { value })
}
}
onUnmounted(() => {
Object.values(rampIntervals).forEach((interval) => {
if (interval) { clearInterval(interval) }
})
})
return {
frequencies,
gainValuesDB,
currentVolumes,
normalizedVolumes,
audioElements,
handleValueChange,
error,
onFileSelected,
audioSrc,
rampTime
}
}
})
</script>
<style scoped>
.error-message {
color: red;
font-weight: bold;
padding: 10px;
border: 1px solid red;
border-radius: 5px;
margin-bottom: 10px;
}
.ramp-time-control {
margin-top: 20px;
}
.ramp-time-control input {
width: 300px;
margin: 0 10px;
}
.frequency-control {
margin-bottom: 20px;
padding: 10px;
border: 1px solid #ccc;
border-radius: 5px;
}
</style>

View File

@@ -0,0 +1,173 @@
<template>
<div>
<div v-if="error" class="error-message">
{{ error }}
</div>
<AudioFileSelector @file-selected="onFileSelected" />
<button @click="startAll">Start all Noises</button>
<!-- Ramp Time Control -->
</div>
<div class="gain-controller">
<div v-for="frequency in frequencies" :key="frequency" class="frequency-control">
<RNBOControlValue
:v-if="!isNaN(frequency)"
:center-frequency="frequency"
:status="audioSources[frequency] != null"
@control-value-change="(value) => handleValueChange(value.frequency, value.value)"
/>
<div :v-if="audioSources[frequency] != undefined && audioSources[frequency] != ''">
<AudioTagWebAudio
:ref="el => { if (el) audioElements[frequency] = el as any}"
:src="audioSources[frequency] || ''"
:volume="Number(currentVolumes[frequency])"
/>
</div>
<div>
Frequency: {{ frequency }}Hz
<br>Gain Value (dB): {{ gainValuesDB[frequency] }}
<br>Normalized: {{ normalizedVolumes[frequency] }}
<br>Volume: {{ currentVolumes[frequency] }}
</div>
</div>
</div>
</template>
<script lang="ts">
import { defineComponent, ref, onUnmounted, reactive } from 'vue'
import AudioFileSelector from '../AudioFileSelector.vue'
import AudioTagWebAudio from './AudioTagWebAudio.vue'
import RNBOControlValue from './tests/ControlValues/RNBOControlValue.vue'
import { calculateNormalizedVolume } from '~/lib/AudioFunctions'
import { useAudioStore } from '~/stores/audio'
import tracksConfig from '~/tracks.config'
export default defineComponent({
name: 'GainController',
components: {
RNBOControlValue,
AudioTagWebAudio,
AudioFileSelector
},
setup () {
const logger = useNuxtApp().$logger as any
logger.info('GainController setup')
const audioStore = useAudioStore()
logger.info('Got audioStore', audioStore)
const audioSrc = ref(window.location.origin + '/sounds/lagoon.ogg')
const rampTime = ref(25000)
logger.info('Set rampTime', 25000)
const frequencies = ref([63, 125, 250, 500, 1000, 2000, 4000, 8000, 16000])
logger.info('Set frequencies', { frequencies })
const error = ref<string | null>(null)
const gainValuesDB = reactive<Record<number, number>>({})
const currentVolumes = reactive<Record<number, number>>({})
const audioSources = reactive<Record<number, string>>({})
const normalizedVolumes = reactive<Record<number, number>>({})
const audioElements = reactive<Record<number, InstanceType<typeof AudioTagWebAudio>>>({})
const rampIntervals: Record<number, ReturnType<typeof setInterval> | null> = {}
const onFileSelected = (file: string) => {
const fullPath = new URL(file, window.location.origin).toString()
audioSrc.value = fullPath
logger.info('User hat ein File ausgewählt ' + file)
useNuxtApp().$logger.log({ audioElements })
setTimeout(() => {
audioStore.setPlaying(true)
}, 250)
}
const startAll = () => {
const audioPaths = [tracksConfig['63_src'], tracksConfig['125_src'], tracksConfig['250_src'], tracksConfig['500_src'],
tracksConfig['1000_src'], tracksConfig['2000_src'], tracksConfig['4000_src'], tracksConfig['8000_src'], tracksConfig['16000_src']]
const recordsCount = Object.keys(audioElements).length
if (recordsCount > 0) {
for (let recordsCounter = 0; recordsCounter < recordsCount; recordsCounter++) {
const audioElement = audioElements[frequencies.value[recordsCounter]]
audioSources[frequencies.value[recordsCounter]] = `${window.location.origin}${encodeURI(audioPaths[recordsCounter])}`
}
useNuxtApp().$logger.log(audioSources[63])
}
}
const rampVolume = (frequency: number, targetVolume: number, duration: number) => {
const startVolume = currentVolumes[frequency] || 1
const startTime = Date.now()
const endTime = startTime + duration
if (rampIntervals[frequency]) {
clearInterval(rampIntervals[frequency]!)
}
rampIntervals[frequency] = setInterval(() => {
const now = Date.now()
if (now >= endTime) {
currentVolumes[frequency] = targetVolume
clearInterval(rampIntervals[frequency]!)
rampIntervals[frequency] = null
return
}
const progress = (now - startTime) / duration
currentVolumes[frequency] = startVolume + (targetVolume - startVolume) * progress
}, 50)
}
const handleValueChange = (frequency: number, value: number) => {
if (!isNaN(value)) {
gainValuesDB[frequency] = value
normalizedVolumes[frequency] = calculateNormalizedVolume(value)
currentVolumes[frequency] = normalizedVolumes[frequency]
} else {
logger.warn('value is not a number, skip...', { value })
}
}
onUnmounted(() => {
Object.values(rampIntervals).forEach((interval) => {
if (interval) { clearInterval(interval) }
})
})
return {
frequencies,
gainValuesDB,
currentVolumes,
normalizedVolumes,
audioElements,
handleValueChange,
error,
onFileSelected,
audioSrc,
rampTime,
startAll,
audioSources
}
}
})
</script>
<style scoped>
.error-message {
color: red;
font-weight: bold;
padding: 10px;
border: 1px solid red;
border-radius: 5px;
margin-bottom: 10px;
}
.ramp-time-control {
margin-top: 20px;
}
.ramp-time-control input {
width: 300px;
margin: 0 10px;
}
.frequency-control {
margin-bottom: 20px;
padding: 10px;
border: 1px solid #ccc;
border-radius: 5px;
}
</style>

View File

@@ -0,0 +1,139 @@
<template>
<div class="player">
<NoiseControlledWebAudio3Band
v-for="(frequency, index) in frequencies"
ref="Player"
:key="frequency"
:master-attack="masterAttack"
:master-release="masterRelease"
:center-frequency="frequency"
:master-gain="masterGain"
:q-factor="qFactors[index]"
@ready="onBandReady"
@update:mid-volume="controlMusicGain"
/>
</div>
</template>
<script lang="ts">
import { useAudioStore } from '../../../stores/audio'
import { useMicStore } from '~/stores/microphone'
import type { Microphone } from '~/stores/interfaces/Microphone'
import NoiseControlledWebAudio3Band from '~/components/experiments/tests/ControlValues/NoiseControlledWebAudio3Band.vue'
export default {
name: 'AdaptiveNoiseGain',
components: {
NoiseControlledWebAudio3Band
},
emits: ['musicGain'],
setup () {
const masterGain = ref(useAudioStore().getMasterGainNoise())
const player = ref(null)
const { t } = useI18n()
const frequencies = ref([150, 1500, 8000])
const qFactors = ref([0.8, 0.9, 0.6])
const loadedBands = ref(0)
const muted = computed(() => useAudioStore().getNoiseVolume < 0.01)
let oldVolume = 0
const route = useRoute()
const isExperimentsRoute = computed(() => route.path.match(/\/[a-z]{2}\/experiments/))
const masterAttack = ref(120000 * 2) // Beispielwert in Samples
const masterRelease = ref(144000 * 2)
const loading = computed(() => loadedBands.value < frequencies.value.length)
const onBandReady = () => {
loadedBands.value++
}
const toggleMute = () => {
if (!muted.value) {
oldVolume = masterGain.value.gain.value
masterGain.value.gain.linearRampToValueAtTime(0, masterGain.value.context.currentTime + 0.4)
useAudioStore().setNoiseVolume(0)
} else if (oldVolume > 0) {
masterGain.value.gain.linearRampToValueAtTime(oldVolume, masterGain.value.context.currentTime + 0.4)
useAudioStore().setNoiseVolume(oldVolume)
} else {
masterGain.value.gain.linearRampToValueAtTime(1, masterGain.value.context.currentTime + 0.4)
useAudioStore().setNoiseVolume(1)
}
}
return {
frequencies,
loading,
onBandReady,
t,
loadedBands,
masterAttack,
masterRelease,
isExperimentsRoute,
qFactors,
masterGain,
toggleMute,
muted,
player
}
},
data () {
return {
audioContext: useAudioStore().getContext(),
musicReady: false,
tropics_src: window.location.origin + useRuntimeConfig().public.tracks.masking_src as string,
fading: false,
connected: false,
volume: useAudioStore().noiseVolume,
previousVolume: useAudioStore().noiseVolume
}
},
onMounted () {
},
watch: {
volume (newVolume: number) {
const audioStore = useAudioStore()
audioStore.setNoiseVolume(newVolume)
if (!isNaN(newVolume)) { audioStore.getMasterGainNoise().gain.linearRampToValueAtTime(newVolume, 0.125) }
const m = this.muted
}
},
beforeUnmount () {
const micro = useMicStore().getMicrophone() as Microphone
micro.microphoneStream?.getTracks().forEach(m => m.stop())
},
methods: {
changeVolumeOnWheel (event:WheelEvent) {
// Adjust volume on wheel scroll
const gainValue = this.volume
const deltaY = event.deltaY
if (deltaY < 0) {
const volumeAdd = (Math.min(1, gainValue + 0.02))
this.volume = volumeAdd
} else {
const volumeCut = (Math.max(0, gainValue - 0.02))
this.volume = volumeCut
}
},
controlMusicGain (value: string) {
useAudioStore().setVolume(parseFloat(value))
this.$emit('musicGain', value)
},
handleCanPlayNoise () {
// useNuxtApp().$logger.log('NoiseElemeint has now playingstate: ' + state)
this.musicReady = true
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
}
}
}
</script>

View File

@@ -0,0 +1,239 @@
<template>
<div class="player">
<AudioElement
ref="Music"
key="1"
v-model:volume="volume"
:src="src"
:title="title"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img
v-if="volume == 0"
class="slider-icon"
style="width: 25px; height: 25px;"
src="~/assets/image/music_muted.svg"
title="Click to unmute"
@click="toggleMute()"
>
<img
v-else
class="slider-icon"
style="width: 25px; height: 25px;"
src="~/assets/image/music.svg"
title="Click to mute"
@click="toggleMute()"
>
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'MusicGain',
components: { AudioElement },
props: {
src: {
type: String,
required: true
},
title: {
type: String,
required: true
}
},
emits: ['musicReady'],
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
fading: false,
connected: false,
muted: false,
volume: useAudioStore().getVolume,
previousVolume: useAudioStore().getVolume
}
},
watch: {
musicReady (value) {
this.$emit('musicReady', value)
this.handlePlayingUpdate(true)
}
},
mounted () {
this.applyStoredVolume()
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
this.createdNodes = null
}
},
toggleMute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
if (this.muted) {
// Unmute: Stelle den vorherigen Lautstärkewert wieder her
this.muted = false
audioElement.muted = false
this.volume = this.previousVolume || 1 // Falls kein vorheriger Wert gespeichert ist, setze auf 1
audioElement.volume = this.volume
} else {
// Mute: Speichere den aktuellen Lautstärkewert und mute das Audio
this.previousVolume = this.volume
this.volume = 0
audioElement.volume = 0
this.muted = true
audioElement.muted = true
}
useAudioStore().setVolume(this.volume)
element.$emit('update:volume', this.volume)
},
mute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
if (useAudioStore().playing !== true) {
logger.info('Skip interaction, because playing state is false.')
} else {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
audioElement.play()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
}
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic () {
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
} else if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
// useAudioStore().playing = true
} else {
// Music has just stopped react on it.
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
applyStoredVolume () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
// Setze die Lautstärke des Audio-Elements
audioElement.volume = this.volume
// Emitiere ein Event, um die Lautstärke in AudioElement zu aktualisieren
element.$emit('update:volume', this.volume)
},
updateMusicGain (volume: number) {
this.volume = volume // Lautstärke speichern
useAudioStore().setVolume(volume)
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 1)
}
}
}
}
</script>

View File

@@ -0,0 +1,180 @@
<template>
<div class="player">
<AudioElement
ref="Music"
key="1"
:src="forest_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img v-if="!muted" style="width: 25px; height: 25px;" src="~/assets/image/music.svg" title="Click to mute" @click="toggleMute()">
<img v-if="muted" style="width: 25px; height: 25px;" src="~/assets/image/music_muted.svg" title="Click to unmute" @click="toggleMute()">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'MusicGainForest',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src as string,
fading: false,
connected: false,
muted: false
}
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
toggleMute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = !audioElement.muted
this.muted = audioElement.muted
},
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
this.createdNodes = null
this.connected = false
}
},
mute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 0.4)
}
},
handleCanPlayMusic () {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
} else if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.forest_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,170 @@
<template>
<div class="player">
<AudioElement
ref="Music"
key="1"
:src="lagoon_src"
title="Lagoon"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img v-if="!muted" style="width: 25px; height: 25px;" src="~/assets/image/music.svg" title="Click to mute" @click="toggleMute()">
<img v-if="muted" style="width: 25px; height: 25px;" src="~/assets/image/music_muted.svg" title="Click to unmute" @click="toggleMute()">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'MusicGainLagoon',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
lagoon_src: window.location.origin + useRuntimeConfig().public.tracks.lagoon_src as string,
fading: false,
connected: false,
muted: false
}
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
this.createdNodes = null
}
},
toggleMute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = !audioElement.muted
this.muted = audioElement.muted
},
mute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic () {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
} else if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.lagoon_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,179 @@
<template>
<div class="player">
<AudioElement
ref="Music"
key="1"
:src="meadow_src"
title="Meadow"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img v-if="!muted" style="width: 25px; height: 25px;" src="~/assets/image/music.svg" title="Click to mute" @click="toggleMute()">
<img v-if="muted" style="width: 25px; height: 25px;" src="~/assets/image/music_muted.svg" title="Click to unmute" @click="toggleMute()">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'MusicGainMeadow',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
meadow_src: window.location.origin + useRuntimeConfig().public.tracks.meadow_src as string,
fading: false,
connected: false,
muted: false
}
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
this.createdNodes = null
}
},
toggleMute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = !audioElement.muted
this.muted = audioElement.muted
},
mute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic () {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
} else if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.meadow_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,178 @@
<template>
<div class="player">
<AudioElement
ref="Music"
key="1"
:src="tropics_src"
title="Tropic"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img v-if="!muted" style="width: 25px; height: 25px;" src="~/assets/image/music.svg" title="Click to mute" @click="toggleMute()">
<img v-if="muted" style="width: 25px; height: 25px;" src="~/assets/image/music_muted.svg" title="Click to unmute" @click="toggleMute()">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'MusicGainTropic',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
tropics_src: window.location.origin + useRuntimeConfig().public.tracks.tropics_src as string,
fading: false,
connected: false,
muted: false
}
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
}
this.createdNodes = null
},
toggleMute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = !audioElement.muted
this.muted = audioElement.muted
},
mute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Music as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic () {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
} else if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.tropics_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,192 @@
<template>
<div class="player">
<AudioElement
ref="Noise"
v-model:volume="volume"
:src="tropics_src"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayNoise"
/>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'NoiseGain',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
musicReady: false,
tropics_src: window.location.origin + useRuntimeConfig().public.tracks.masking_src as string,
fading: false,
connected: false,
muted: false,
volume: useAudioStore().noiseVolume,
previousVolume: useAudioStore().noiseVolume
}
},
beforeUnmount () {
this.disconnectNodes()
},
methods: {
disconnectNodes () {
if (typeof this.createdNodes === 'object' && this.createdNodes !== null) {
Object.values(this.createdNodes).forEach((node) => {
// Check if the node exists and has a disconnect method
if (node && typeof AudioNode) {
const tobedisconnected = node as AudioNode
tobedisconnected.disconnect()
node = null
}
})
}
this.createdNodes = null
},
toggleMute () {
const element = this.$refs.Noise as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
if (this.muted) {
// Unmute: Stelle den vorherigen Lautstärkewert wieder her
this.muted = false
audioElement.muted = false
this.volume = this.previousVolume || 1 // Falls kein vorheriger Wert gespeichert ist, setze auf 1
audioElement.volume = this.volume
} else {
// Mute: Speichere den aktuellen Lautstärkewert und mute das Audio
this.previousVolume = this.volume
this.volume = 0
audioElement.volume = 0
this.muted = true
audioElement.muted = true
}
useAudioStore().setNoiseVolume(this.volume)
element.$emit('update:volume', this.volume)
},
mute () {
const element = this.$refs.Noise as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = true
this.muted = audioElement.muted
},
unmute () {
const element = this.$refs.Noise as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
audioElement.muted = false
this.muted = audioElement.muted
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
this.fading = true
this.unmute()
const musicGain = this.createdNodes.musicGain
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.musicGainValue.gain.value
this.createdNodes.musicGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayNoise () {
// useNuxtApp().$logger.log('NoiseElemeint has now playingstate: ' + state)
this.musicReady = true
this.handlePlayingUpdate(true)
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
return true
},
handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
} else {
this.fadeOutGains()
}
}
},
handlePlayingUpdate (state: boolean) {
if (state) {
const musicElement = this.$refs.Noise as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.tropics_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.connected = true
this.fadeInGains()
// useAudioStore().playing = true
} else {
// Noise has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
applyStoredVolume () {
const element = this.$refs.Noise as typeof AudioElement
const audioElement = element.$refs.audioElement as HTMLMediaElement
// Setze die Lautstärke des Audio-Elements
audioElement.volume = this.volume
// Emitiere ein Event, um die Lautstärke in AudioElement zu aktualisieren
element.$emit('update:volume', this.volume)
},
updateNoiseGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,239 @@
<template>
<div class="player">
<Microphone ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:canplay="handleCanPlayNoise"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="forest_src"
:playlist="music_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import Microphone from '../tests/Microphone.vue'
import { useDevicesStore } from '../../../stores/device'
export default {
name: 'NoiseMusicGain',
components: { AudioElement, Microphone },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
noiseReady: false,
musicReady: false,
micReady: false,
deviceReady: false,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src as string,
music_src: [window.location.origin + useRuntimeConfig().public.tracks.lagoon_src as string, window.location.origin + useRuntimeConfig().public.tracks.tropics_src as string, window.location.origin + useRuntimeConfig().public.tracks.forest_src as string, window.location.origin + useRuntimeConfig().public.tracks.meadow_src as string] as string[],
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string,
fading: false,
connected: false
}
},
methods: {
// This methodd gets a microphone stream from the micorphone component and creates the microphone node
// need to be called before the noise device is connected to the audio graph
setupMicrophone (stream:MediaStream) {
try {
this.createdNodes.microphone = this.audioContext.createMediaStreamSource(stream)
this.micReady = true
} catch (error: any) {
this.micReady = false
throw new Error(error.message)
}
},
async setupDevice () {
try {
const deviceStore = useDevicesStore()
await deviceStore.createFullBandDevice('adaptive_masking_controller_NoMusic')
this.createdNodes.noiseDevice = deviceStore.getDeviceAudioNode('adaptive_masking_controller_NoMusic')
this.deviceReady = true
} catch (error) {
this.deviceReady = false
}
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
const fadeTime = this.audioContext.currentTime + 6.0
this.fading = true
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
this.createdNodes.noiseSource.muted = false
this.createdNodes.musicSource.muted = false
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic () {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = true
},
handleCanPlayNoise () {
// useNuxtApp().$logger.log('NoiseElement has now playingstate: ' + state)
this.noiseReady = true
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
if (!this.noiseReady) {
// useNuxtApp().$logger.log('noise not ready')
return false
}
if (!this.micReady) {
// useNuxtApp().$logger.log('mic not ready')
return false
}
if (!this.deviceReady) {
// useNuxtApp().$logger.log('device not ready')
return false
}
return true
},
async handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (this.readyForWebaudio()) {
if (state) {
this.handlePlayingUpdate(state)
this.fadeInGains()
} else {
this.fadeOutGains()
}
} else {
if (!this.deviceReady) { await this.setupDevice() }
if (!this.micReady) {
// useNuxtApp().$logger.log('micophone not yet ready attach it!! ')
// useNuxtApp().$logger.log('microphone attached' + stream)
}
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
// useNuxtApp().$logger.log('Waiting for all devices to be ready')
}
}
},
handlePlayingUpdate (state: boolean) {
// Stop the music again, mute it and set the noiseReady or musicReady to true
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
if (musicAudioElement.currentSrc !== this.forest_src) {
this.createdNodes.musicSource.disconnect()
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.musicGain.connect(destination)
}
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// HERE THE NOISE PATCH COMES INTO PLAY
this.createdNodes.micSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseInputChannelSplitter ||= audioContext.createChannelSplitter(2)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.microphone.connect(this.createdNodes.micSplitter)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseInputChannelSplitter)
this.createdNodes.micSplitter.connect(this.createdNodes.noiseDevice, 0, 0)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 0, 1)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 1, 2)
this.createdNodes.noiseDevice.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,263 @@
<template>
<div class="player">
{{ controllValues }}
<Microphone ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:canplay="handleCanPlayNoise"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="lagoon_src"
title="Lagoon"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import Microphone from '../tests/Microphone.vue'
import { useDevicesStore } from '../../../stores/device'
export default {
name: 'NoiseMusicGain',
components: { AudioElement, Microphone },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
noiseReady: false,
musicReady: false,
micReady: false,
deviceReady: false,
lagoon_src: window.location.origin + useRuntimeConfig().public.tracks.lagoon_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string,
fading: false,
connected: false,
lastUpdate: Date.now(),
updateInterval: 125,
controllValues: new Map() // milliseconds
}
},
methods: {
// This methodd gets a microphone stream from the micorphone component and creates the microphone node
// need to be called before the noise device is connected to the audio graph
setupMicrophone (stream:MediaStream) {
try {
this.createdNodes.microphone = this.audioContext.createMediaStreamSource(stream)
this.micReady = true
} catch (error: any) {
this.micReady = false
throw new Error(error.message)
}
},
async setupDevice () {
try {
const deviceStore = useDevicesStore()
await deviceStore.createFullBandDevice('adaptive_masking_controller_NoMusic')
this.createdNodes.noiseDevice = deviceStore.getDeviceAudioNode('adaptive_masking_controller_NoMusic')
this.createdNodes.noiseDevice.port.onmessage = this.handleEvent
this.deviceReady = true
} catch (error) {
this.deviceReady = false
}
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
handleEvent (event:any) {
const now = Date.now()
if (now - this.lastUpdate < this.updateInterval) { return } // Skip this update
if (event.data && Array.isArray(event.data) && event.data.length > 1) {
const eventDataDetail = event.data[1] // Assuming the relevant data is at index 1
if (eventDataDetail && eventDataDetail.tag && eventDataDetail.payload && Array.isArray(eventDataDetail.payload)) {
if (/out[3-9]|out1[01]/.test(eventDataDetail.tag)) {
this.controllValues.set(eventDataDetail.tag, eventDataDetail.payload[0])
this.lastUpdate = now
}
}
}
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
const fadeTime = this.audioContext.currentTime + 6.0
this.fading = true
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
this.createdNodes.noiseSource.muted = false
this.createdNodes.musicSource.muted = false
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic (state: boolean) {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = state
},
handleCanPlayNoise (state: boolean) {
// useNuxtApp().$logger.log('NoiseElement has now playingstate: ' + state)
this.noiseReady = state
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
if (!this.noiseReady) {
// useNuxtApp().$logger.log('noise not ready')
return false
}
if (!this.micReady) {
// useNuxtApp().$logger.log('mic not ready')
return false
}
if (!this.deviceReady) {
// useNuxtApp().$logger.log('device not ready')
return false
}
return true
},
async handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
if (!this.deviceReady) { await this.setupDevice() }
if (!this.micReady) {
// useNuxtApp().$logger.log('micophone not yet ready attach it!! ')
// useNuxtApp().$logger.log('microphone attached' + stream)
}
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
// useNuxtApp().$logger.log('Waiting for all devices to be ready')
}
}
},
handlePlayingUpdate (state: boolean) {
// Stop the music again, mute it and set the noiseReady or musicReady to true
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// HERE THE NOISE PATCH COMES INTO PLAY
this.createdNodes.micSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseInputChannelSplitter ||= audioContext.createChannelSplitter(2)
// useNuxtApp().$logger.log({ currentlyCreatedNodes })
this.createdNodes.microphone.connect(this.createdNodes.micSplitter)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseInputChannelSplitter)
this.createdNodes.micSplitter.connect(this.createdNodes.noiseDevice, 0, 0)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 0, 1)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 1, 2)
this.createdNodes.noiseDevice.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
mounted () {
this.addMediaNavigationHandling()
},
addMediaNavigationHandling () {
if ('mediaSession' in navigator) {
// Play action
navigator.mediaSession.setActionHandler('play', (_e) => {
useAudioStore().setPlaying(true)
})
// Pause action
navigator.mediaSession.setActionHandler('pause', (_e) => {
useAudioStore().setPlaying(false)
})
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,242 @@
<template>
<div class="player">
<Microphone ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:canplay="handleCanPlayNoise"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="meadow_src"
title="Meadow"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import Microphone from '../tests/Microphone.vue'
import { useDevicesStore } from '../../../stores/device'
export default {
name: 'NoiseMusicGain',
components: { AudioElement, Microphone },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
noiseReady: false,
musicReady: false,
micReady: false,
deviceReady: false,
meadow_src: window.location.origin + useRuntimeConfig().public.tracks.meadow_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string,
fading: false,
connected: false
}
},
methods: {
// This methodd gets a microphone stream from the micorphone component and creates the microphone node
// need to be called before the noise device is connected to the audio graph
setupMicrophone (stream:MediaStream) {
try {
this.createdNodes.microphone = this.audioContext.createMediaStreamSource(stream)
this.micReady = true
} catch (error: any) {
this.micReady = false
throw new Error(error.message)
}
},
async setupDevice () {
try {
const deviceStore = useDevicesStore()
await deviceStore.createFullBandDevice('adaptive_masking_controller_NoMusic')
this.createdNodes.noiseDevice = deviceStore.getDeviceAudioNode('adaptive_masking_controller_NoMusic')
this.deviceReady = true
} catch (error) {
this.deviceReady = false
}
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
const fadeTime = this.audioContext.currentTime + 6.0
this.fading = true
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
this.createdNodes.noiseSource.muted = false
this.createdNodes.musicSource.muted = false
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic (state: boolean) {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = state
},
handleCanPlayNoise (state: boolean) {
// useNuxtApp().$logger.log('NoiseElement has now playingstate: ' + state)
this.noiseReady = state
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
if (!this.noiseReady) {
// useNuxtApp().$logger.log('noise not ready')
return false
}
if (!this.micReady) {
// useNuxtApp().$logger.log('mic not ready')
return false
}
if (!this.deviceReady) {
// useNuxtApp().$logger.log('device not ready')
return false
}
return true
},
async handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
if (!this.deviceReady) { await this.setupDevice() }
if (!this.micReady) {
// useNuxtApp().$logger.log('micophone not yet ready attach it!! ')
// useNuxtApp().$logger.log('microphone attached' + stream)
}
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
// useNuxtApp().$logger.log('Waiting for all devices to be ready')
}
}
},
handlePlayingUpdate (state: boolean) {
// Stop the music again, mute it and set the noiseReady or musicReady to true
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
// HERE THE NOISE PATCH COMES INTO PLAY
this.createdNodes.micSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseInputChannelSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.microphone.connect(this.createdNodes.micSplitter)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseInputChannelSplitter)
this.createdNodes.micSplitter.connect(this.createdNodes.noiseDevice, 0, 0)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 0, 1)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 1, 2)
this.createdNodes.noiseDevice.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
mounted () {
this.addMediaNavigationHandling()
},
addMediaNavigationHandling () {
if ('mediaSession' in navigator) {
// Play action
navigator.mediaSession.setActionHandler('play', (_e) => {
useAudioStore().setPlaying(true)
})
// Pause action
navigator.mediaSession.setActionHandler('pause', (_e) => {
useAudioStore().setPlaying(false)
})
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,241 @@
<template>
<div class="player">
<Microphone ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:canplay="handleCanPlayNoise"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="tropics_src"
title="Tropics"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate2"
@update:canplay="handleCanPlayMusic"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import Microphone from '../tests/Microphone.vue'
import { useDevicesStore } from '../../../stores/device'
export default {
name: 'NoiseMusicGain',
components: { AudioElement, Microphone },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
noiseReady: false,
musicReady: false,
micReady: false,
deviceReady: false,
tropics_src: window.location.origin + useRuntimeConfig().public.tracks.tropics_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string,
fading: false,
connected: false
}
},
methods: {
// This methodd gets a microphone stream from the micorphone component and creates the microphone node
// need to be called before the noise device is connected to the audio graph
setupMicrophone (stream:MediaStream) {
try {
this.createdNodes.microphone = this.audioContext.createMediaStreamSource(stream)
this.micReady = true
} catch (error: any) {
this.micReady = false
throw new Error(error.message)
}
},
async setupDevice () {
try {
const deviceStore = useDevicesStore()
await deviceStore.createFullBandDevice('adaptive_masking_controller_NoMusic')
this.createdNodes.noiseDevice = deviceStore.getDeviceAudioNode('adaptive_masking_controller_NoMusic')
this.deviceReady = true
} catch (error) {
this.deviceReady = false
}
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
const fadeTime = this.audioContext.currentTime + 6.0
this.fading = true
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
this.createdNodes.noiseSource.muted = false
this.createdNodes.musicSource.muted = false
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
if (this.createdNodes.musicGain) {
const musicGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(musicGainValue, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayMusic (state: boolean) {
// useNuxtApp().$logger.log('MusicElemeint has now playingstate: ' + state)
this.musicReady = state
},
handleCanPlayNoise (state: boolean) {
// useNuxtApp().$logger.log('NoiseElement has now playingstate: ' + state)
this.noiseReady = state
},
readyForWebaudio () {
if (!this.musicReady) {
// useNuxtApp().$logger.log('music not ready')
return false
}
if (!this.noiseReady) {
// useNuxtApp().$logger.log('noise not ready')
return false
}
if (!this.micReady) {
// useNuxtApp().$logger.log('mic not ready')
return false
}
if (!this.deviceReady) {
// useNuxtApp().$logger.log('device not ready')
return false
}
return true
},
async handlePlayingUpdate2 (state: boolean) {
// useNuxtApp().$logger.log('A new State reached us, it is a handlingPlay update' + state)
// useNuxtApp().$logger.log('ReadyState of all:' + this.readyForWebaudio())
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
if (!this.deviceReady) { await this.setupDevice() }
if (!this.micReady) {
// useNuxtApp().$logger.log('micophone not yet ready attach it!! ')
}
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(state)
} else {
// useNuxtApp().$logger.log('Waiting for all devices to be ready')
}
}
},
handlePlayingUpdate (state: boolean) {
// Stop the music again, mute it and set the noiseReady or musicReady to true
if (state) {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
// HERE THE NOISE PATCH COMES INTO PLAY
this.createdNodes.micSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseInputChannelSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.microphone.connect(this.createdNodes.micSplitter)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseInputChannelSplitter)
this.createdNodes.micSplitter.connect(this.createdNodes.noiseDevice, 0, 0)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 0, 1)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 1, 2)
this.createdNodes.noiseDevice.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.audioContext.currentTime)
this.createdNodes.musicGain.gain.setValueAtTime(0, this.audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.connected = true
this.fadeInGains()
useAudioStore().playing = true
} else {
// Music has just stopped react on it.
// useNuxtApp().$logger.log('Stop everything webaudio is still running')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
},
mounted () {
this.addMediaNavigationHandling()
},
addMediaNavigationHandling () {
if ('mediaSession' in navigator) {
// Play action
navigator.mediaSession.setActionHandler('play', (_e) => {
useAudioStore().setPlaying(true)
})
// Pause action
navigator.mediaSession.setActionHandler('pause', (_e) => {
useAudioStore().setPlaying(false)
})
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,266 @@
<template>
<div class="player">
<div v-if="deviceReady">RNBOValues: {{ createdNodes.noiseDevice }}</div>
<Microphone ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:canplay="handleCanPlayNoise"
@update:playing="handlePlayingUpdate2"
>
<template #default="{}">
<img v-if="!muted" style="width: 25px; height: 25px;" src="~/assets/image/sound.svg" title="Click to mute" @click="toggleMute()">
<img v-if="muted" style="width: 25px; height: 25px;" src="~/assets/image/sound_muted.svg" title="Click to unmute" @click="toggleMute()">
</template>
</AudioElement>
</div>
</template>
<script lang="ts">
import type { Device } from '@rnbo/js'
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import Microphone from '../tests/Microphone.vue'
import { useDevicesStore } from '../../../stores/device'
export default {
name: 'RNBODevice',
components: { AudioElement, Microphone },
emits: { 'update:control-value': null },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
noiseReady: false,
micReady: false,
deviceReady: false,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string,
fading: false,
connected: false,
muted: false
}
},
methods: {
toggleMute () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
noiseAudioElement.muted = !noiseAudioElement.muted
this.muted = noiseAudioElement.muted
},
mute () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
noiseAudioElement.muted = true
this.muted = true
},
unmute () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
noiseAudioElement.muted = false
this.muted = false
},
// This methodd gets a microphone stream from the micorphone component and creates the microphone node
// need to be called before the noise device is connected to the audio graph
setupMicrophone (stream:MediaStream) {
useNuxtApp().$logger.log('setup Microphone')
try {
this.createdNodes.microphone ||= this.audioContext.createMediaStreamSource(stream)
this.micReady = true
} catch (error: any) {
this.micReady = false
throw new Error(error.message)
}
},
// This method setup a RNBO Device, it gets the name of the Patch and add the noise audio node to createdNodes
async setupDevice () {
await useAudioStore().ensureAudioContextRunning()
useNuxtApp().$logger.log('setup Device')
try {
const deviceStore = useDevicesStore()
const device = await deviceStore.createNoiseDevice('adaptive_masking_controller_NoMusic') as Device // ich bekomme keine analyse Werte raus
this.createdNodes.noiseDevice = deviceStore.getDeviceAudioNode('adaptive_masking_controller_NoMusic')
this.deviceReady = true
this.attachDBValueListener(device)
} catch (error) {
useNuxtApp().$logger.error('Error setting up device, fall back.', { error })
this.deviceReady = false
}
},
// This method takes the controll value before ramp and controls the volume of music
attachDBValueListener (noiseDevice: Device) {
noiseDevice.messageEvent.subscribe((ev: any) => {
try {
// if (ev.tag === 'out4') { // out4 represents controll value before Timeramp
if (ev.tag === 'out3') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out3= ' + newValue[0])
}
if (ev.tag === 'out4') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out4= ' + newValue[0])
}
if (ev.tag === 'out5') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out5= ' + newValue[0])
}
if (ev.tag === 'ou6') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out6= ' + newValue[0])
}
if (ev.tag === 'out7') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out7= ' + newValue[0])
}
if (ev.tag === 'out8') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('out8= ' + newValue[0])
}
if (ev.tag === 'out9= ') { // out3 represents controll value in dB after Timeramp
const newValue = ev.payload
useNuxtApp().$logger.log('Band 1000 = ' + newValue)
this.$emit('update:control-value', newValue[0])
}
} catch (error: any) {
// this.$logger.warn('Failed to attach a control value listener, music is not gain controlled.')
}
})
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeInGains () {
this.unmute()
// useNuxtApp().$logger.log('Fade In Gains')
if (useAudioStore().playing !== true) { return }
const fadeTime = this.audioContext.currentTime + 3.0
setTimeout(() => {
this.fading = true
const noiseGain = this.createdNodes.noiseGain
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
}, 450)
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
fadeOutGains () {
if (this.createdNodes.noiseGain) {
const noiseGainValue = this.createdNodes.noiseGain.gain.value
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(noiseGainValue, this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.audioContext.currentTime + 1.3)
}
},
handleCanPlayNoise (state: boolean) {
// useNuxtApp().$logger.log('NoiseElement has now playingstate: ' + state)
this.noiseReady = state
},
readyForWebaudio () {
if (!this.noiseReady) {
return false
}
if (!this.micReady) {
return false
}
if (!this.deviceReady) {
return false
}
return true
},
async handlePlayingUpdate2 (state: boolean) {
useNuxtApp().$logger.log('handling Playing Update2= ' + state, this.audioContext.state)
if (!state) {
this.mute()
return
}
if (this.readyForWebaudio()) {
this.handlePlayingUpdate(true)
} else {
if (!this.deviceReady) {
useNuxtApp().$logger.log('Device is not ready, create it now')
await this.setupDevice()
}
if (!this.micReady) {
// await this.setupMicrophone(Microphone)
useNuxtApp().$logger.log('micophone not yet ready attach it!! ')
// useNuxtApp().$logger.log('microphone attached' + stream)
}
if (this.readyForWebaudio()) {
useNuxtApp().$logger.log('everything is now ready start play')
this.handlePlayingUpdate(true)
} else {
useNuxtApp().$logger.log('Waiting for all devices to be ready')
}
}
},
handlePlayingUpdate (state: boolean) {
try {
// Stop the music again, mute it and set the noiseReady or musicReady to true
if (state) {
useNuxtApp().$logger.log('stop playing')
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
audioContext.resume()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
// HERE THE NOISE PATCH COMES INTO PLAY
this.createdNodes.micSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.noiseInputChannelSplitter ||= audioContext.createChannelSplitter(2)
this.createdNodes.microphone.connect(this.createdNodes.micSplitter)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseInputChannelSplitter)
this.createdNodes.micSplitter.connect(this.createdNodes.noiseDevice, 0, 0)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 0, 1)
this.createdNodes.noiseInputChannelSplitter.connect(this.createdNodes.noiseDevice, 1, 2)
this.createdNodes.noiseDevice.connect(this.createdNodes.noiseGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, this.audioContext.currentTime)
noiseAudioElement.muted = false
this.connected = true
this.unmute()
this.fadeInGains()
useAudioStore().playing = true
this.$logger.info('RNBO Patch successfully connected and playing')
} else {
// Music has just stopped react on it.
this.$logger.info('Stopping audio and disconnecting RNBO Patch')
this.fadeOutGains()
this.createdNodes = []
this.refreshAudioContext()
this.connected = false
}
} catch (error) {
this.$logger.info('Error in handlePlayingUpdate')
this.connected = false
useAudioStore().playing = false
// You might want to show an error message to the user here
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
useNuxtApp().$logger.log('volume= ' + volume)
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
}
}
}
}
</script>

View File

@@ -0,0 +1,34 @@
<template>
<div>
<iframe
style="border-radius:12px"
:src="spotifyEmbedUrl"
width="100%"
height="352"
frameBorder="0"
allowfullscreen=""
allow="autoplay; clipboard-write; encrypted-media; fullscreen; picture-in-picture"
loading="lazy"
/>
</div>
</template>
<script>
export default {
name: 'PomodoroPlaylist',
props: {
spotifyUri: {
type: String,
default: ''
}
},
computed: {
spotifyEmbedUrl () {
if (!this.spotifyUri) {
return 'https://open.spotify.com/embed/playlist/6HuAVqLOmYskc2qOaBZBBz?utm_source=generator'
}
return `https://open.spotify.com/embed/${this.spotifyUri}`
}
}
}
</script>

View File

@@ -0,0 +1,63 @@
<template>
<div class="episodes">
<button class="episode" data-spotify-id="spotify:episode:7makk4oTQel546B0PZlDM5">
My Path to Spotify: Women in Engineering
</button>
<button class="episode" data-spotify-id="spotify:episode:43cbJh4ccRD7lzM2730YK3">
What is Backstage?
</button>
<button class="episode" data-spotify-id="spotify:episode:6I3ZzCxRhRkNqnQNo8AZPV">
Introducing Nerd Out@Spotify
</button>
</div>
<div id="embed-iframe" />
</template>
<script lang="js">
export default {
name: 'SpotifyTest',
mounted () {
window.onSpotifyIframeApiReady = (IFrameAPI) => {
const element = document.getElementById('embed-iframe')
const options = {
uri: 'spotify:episode:7makk4oTQel546B0PZlDM5'
}
const callback = (EmbedController) => {}
IFrameAPI.createController(element, options, callback)
}
}
}
</script>
<style type="text/css">
.episodes {
display: flex;
flex-direction: column;
}
.episode {
min-width: max-content;
margin-bottom: .8rem;
padding: .8rem 1rem;
border-radius: 10px;
border: 0;
background: #191414;
color: #fff;
cursor: pointer;
}
.episode:hover {
background: #1Db954;
}
@media screen and (min-width: 860px) {
body {
display: flex;
flex-direction: row;
gap: 1rem;
}
}
</style>

View File

@@ -0,0 +1,28 @@
<template>
<div v-if="spotifyUri">
<iframe
:src="spotifyEmbedUrl"
width="100%"
height="380"
frameborder="0"
allowtransparency="true"
allow="encrypted-media"
/>
</div>
</template>
<script>
export default {
props: {
spotifyUri: {
type: String,
default: ''
}
},
computed: {
spotifyEmbedUrl () {
return `https://open.spotify.com/embed/${this.spotifyUri}`
}
}
}
</script>

View File

@@ -0,0 +1,56 @@
<template>
<h1>hell yeah</h1>
<div @click="newDummyAudio"> NEW AUDIO</div>
<div @click="showArtWork">SHOW ARTWORK</div>
<div @click="play">PLAY</div>
</template>
<script lang="ts" setup>
import { watch, onMounted } from 'vue'
import { useAudioStore, ensureAudio } from '~/stores/audio'
const createAudioTag = () => {
const newDummyAudio = new Audio('data:audio/mp3;base64,SUQzBAAAAAAAI1RTU0UAAAAPAAADTGF2ZjU2LjM2LjEwMAAAAAAAAAAAAAAA//OEAAAAAAAAAAAAAAAAAAAAAAAASW5mbwAAAA8AAAAEAAABIADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV6urq6urq6urq6urq6urq6urq6urq6urq6v////////////////////////////////8AAAAATGF2YzU2LjQxAAAAAAAAAAAAAAAAJAAAAAAAAAAAASDs90hvAAAAAAAAAAAAAAAAAAAA//MUZAAAAAGkAAAAAAAAA0gAAAAATEFN//MUZAMAAAGkAAAAAAAAA0gAAAAARTMu//MUZAYAAAGkAAAAAAAAA0gAAAAAOTku//MUZAkAAAGkAAAAAAAAA0gAAAAANVVV')
newDummyAudio.loop = true
newDummyAudio.controls = true
newDummyAudio.muted = true
newDummyAudio.play()
}
const play = (state) => {
const audio = newDummyAudio()
if (state) {
audio.play().catch(() => {})
}
}
const showArtWork = () => {
const pathKlein = window.location.origin + '/images/scenery/noise_artwork_1024.jpg'
const pathGross = window.location.origin + '/images/scenery/noise_artwork_512.jpg'
navigator.mediaSession.metadata = null
navigator.mediaSession.metadata = new MediaMetadata({
title: 'Calm Speech Blocker',
artist: 'mindboost',
album: 'get your focus',
artwork: [
{ src: pathKlein, sizes: '1024x1024', type: 'image/jpeg' },
{ src: pathGross, sizes: '512x512', type: 'image/jpeg' }
]
})
}
watch(
() => useAudioStore().getPlaying,
(newVal) => {
},
{ immediate: true }
)
// Media Session Setup separat
onMounted(async () => {
const playState = useAudioStore().getPlaying
await ensureAudio
createAudioTag()
showArtWork()
})
</script>

View File

@@ -0,0 +1,62 @@
<template>
<li class="nav__item">
<a
id="focused-icon"
class="nav__item-link"
href="#"
@click.prevent="togglePlaying"
@touchstart.prevent="togglePlaying"
>
<svg v-if="!playing" width="24" height="24" fill="none" xmlns="http://www.w3.org/2000/svg">
<!-- PLAY ICON -->
<path
d="m6.192 3.67 13.568 7.633a.8.8 0 0 1 0 1.394L6.192 20.33A.8.8 0 0 1 5 19.632V4.368a.8.8 0 0 1 1.192-.698Z"
fill="currentColor"
/>
</svg>
<svg v-else width="24" height="24" fill="none" xmlns="http://www.w3.org/2000/svg">
<!-- PAUSE ICON -->
<g clip-path="url(#a)">
<path
d="M17.083 19.917a2.326 2.326 0 0 1-1.706-.71 2.332 2.332 0 0 1-.71-1.707V5.417c0-.665.236-1.234.71-1.706A2.333 2.333 0 0 1 17.083 3c.664 0 1.233.236 1.708.71.474.475.71 1.044.709 1.707V17.5a2.33 2.33 0 0 1-.71 1.707 2.322 2.322 0 0 1-1.707.71Zm-9.666 0a2.326 2.326 0 0 1-1.707-.71A2.332 2.332 0 0 1 5 17.5V5.417c0-.665.237-1.234.71-1.706A2.333 2.333 0 0 1 7.417 3c.663 0 1.233.236 1.707.71.475.475.71 1.044.71 1.707V17.5a2.33 2.33 0 0 1-.71 1.707 2.322 2.322 0 0 1-1.707.71Z"
fill="#e9c046"
/>
</g>
<defs>
<clipPath id="a">
<path fill="#e9c046" d="M0 0h24v24H0z" />
</clipPath>
</defs>
</svg>
</a>
</li>
</template>
<script>
import { mapState, mapActions } from 'pinia'
import { useAudioStore } from '~/stores/audio'
export default defineNuxtComponent({
name: 'PlayButton',
created () {
this.audioStore = useAudioStore()
},
computed: {
...mapState(useAudioStore, ['playing'])
},
watch: {
playing (newValue) {
this.$logger.log('Global playing state changed', newValue)
}
},
methods: {
togglePlaying () {
this.audioStore.setPlaying(!this.audioStore.playing)
}
}
})
</script>

View File

@@ -0,0 +1,154 @@
<template>
<AudioReactiveBar />
<h3>Playing :{{ playing }}</h3>
<h3>Number of AudioTags {{ numberOfAudioTags }}</h3>
<p>Infos: {{ errorMessage }}</p>
<p>AudioContext State: {{ updateCtx }}</p>
<li v-for="(audio, index) in audioTags" :key="index">
Tag {{ index + 1 }}: <span v-if="checkAudioElementPlaying(audio)">Playing</span><span v-else>Not Playing</span>
</li>
</template>
<script lang="ts">
import { ref, watch, onMounted } from 'vue'
import { useAudioStore } from '~/stores/audio'
import AudioReactiveBar from '~/components/AudioReactiveBar.vue'
export default {
name: 'StateBar',
components: { AudioReactiveBar },
setup () {
const audioStore = useAudioStore()
const playing = ref(useAudioStore().playing)
const audioTags = ref(Array.from(document.querySelectorAll('audio')))
const numberOfAudioTags = ref(audioTags.value.length)
const errorMessage = ref('')
const updateCtx = ref(audioStore.audioContext)
watch(() => audioStore.playing, (newValue) => {
playing.value = newValue
// Run the function to log the status of audio elements
checkAudioElementsStatus()
})
watch(() => numberOfAudioTags, (_newValue) => {
// useNuxtApp().$logger.log('new AudioTag Amount' + newValue)
// Run the function to log the status of audio elements
checkAudioElementsStatus()
})
const checkAudioElementPlaying = (audioElement:HTMLAudioElement) => {
const isPlaying = !audioElement.paused && audioElement.currentTime > 0 && !audioElement.ended
// useNuxtApp().$logger.log(`Audio Element ${audioElement.src}: ${isPlaying ? 'Playing' : 'Not Playing'}`)
errorMessage.value = `Audio Element ${audioElement.src}: ${isPlaying ? 'Playing' : 'Not Playing'}`
return isPlaying
}
const monitor = () => {
// This could be an interval or a direct method call in your gain changing methods
setInterval(() => {
const audioElements = document.querySelectorAll('audio')
errorMessage.value = ''
let currentTime, paused, ended
audioElements.forEach((element) => {
currentTime = element.currentTime
paused = element.paused
ended = element.ended
errorMessage.value += (` Audio Element ${element.src}: time ${currentTime},paused ${paused}, ended ${ended} _______________`)
})
}, 100) // Update every 100 ms, adjust interval as necessary
}
onMounted(() => {
monitor()
})
onUnmounted(() => {
})
return {
numberOfAudioTags,
audioTags,
playing,
checkAudioElementPlaying,
errorMessage,
monitor,
updateCtx
}
}
}
function checkAudioElementsStatus () {
// Find all audio elements on the page
const audioElements = document.querySelectorAll('audio')
// Iterate over each audio element to check if it's playing
audioElements.forEach((_audioElement, _index) => {
// const _isPlaying = !audioElement.paused && audioElement.currentTime > 0 && !audioElement.ended
// useNuxtApp().$logger.log(`Audio Element ${index + 1}: ${isPlaying ? 'Playing' : 'Not Playing'}`, { audioElement })
})
}
</script>
<style scoped>
/* The switch - the box around the slider */
.switch {
position: relative;
display: inline-block;
width: 60px;
height: 34px;
}
/* Hide default HTML checkbox */
.switch input {
opacity: 0;
width: 0;
height: 0;
}
/* The slider */
.slider {
position: absolute;
cursor: pointer;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-color: #ccc;
-webkit-transition: .4s;
transition: .4s;
}
.slider:before {
position: absolute;
content: "";
height: 26px;
width: 26px;
left: 4px;
bottom: 4px;
background-color: white;
-webkit-transition: .4s;
transition: .4s;
}
input:checked + .slider {
background-color: #2196F3;
}
input:focus + .slider {
box-shadow: 0 0 1px #2196F3;
}
input:checked + .slider:before {
-webkit-transform: translateX(26px);
-ms-transform: translateX(26px);
transform: translateX(26px);
}
/* Rounded sliders */
.slider.round {
border-radius: 34px;
}
.slider.round:before {
border-radius: 50%;
}
</style>

View File

@@ -0,0 +1,19 @@
export function createMockMediaStream (): MediaStream {
// Create an empty MediaStream
const mockStream = new MediaStream()
// Create a mock audio track using the Web Audio API
const audioContext = new AudioContext()
const oscillator = audioContext.createOscillator()
const dst = audioContext.createMediaStreamDestination()
oscillator.connect(dst)
oscillator.start()
// Add the audio track from the destination to the stream
dst.stream.getAudioTracks().forEach((track) => {
mockStream.addTrack(track)
})
// You can now return this mockStream as a simulated microphone MediaStream
return mockStream
}

View File

@@ -0,0 +1,117 @@
<template>
<div class="noise-controlled-band">
<AudioTag
:ref="el => audioElement"
:src="audioSrc"
:volume="volume"
:play="playing"
@canplay="onCanPlay"
/>
<RNBOControlValue
:center-frequency="centerFrequency"
:status="playing"
:q-factor="$props.qFactor"
:attack="masterAttack"
:release="masterRelease"
@control-value-change="handleValueChange"
/>
</div>
</template>
<script lang="ts">
import { defineComponent, ref, computed } from 'vue'
import AudioTag from '../../AudioTag.vue'
import RNBOControlValue from '../..//tests/ControlValues/RNBOControlValue.vue'
import tracksConfig from '~/tracks.config'
import { useAudioStore } from '~/stores/audio'
import { calculateNormalizedVolume } from '~/lib/AudioFunctions'
export default defineComponent({
name: 'NoiseControlledBand',
components: {
AudioTag,
RNBOControlValue
},
props: {
centerFrequency: {
type: Number,
required: true
},
qFactor: {
type: Number,
required: true
},
masterAttack: {
type: Number,
default: 120000,
required: false
},
masterRelease: {
type: Number,
default: 144000,
required: false
}
},
emits: ['ready', 'update:mid-volume'],
setup (props, { emit }) {
const audioElement = ref<InstanceType<typeof HTMLElement> | null>(null)
const playing = computed(() => { return useAudioStore().playing }) // the playing state is bind to the audioStore
const gainValueDB = ref(0)
const volume = ref(0)
const audioSrc = computed(() => {
try {
const frequency = props.centerFrequency
let band = ''
if (frequency < 500) {
band = 'low_band'
} else if (frequency >= 500 && frequency < 4000) {
band = 'mid_band'
} else {
band = 'high_band'
}
const path = `/masking/3bands/${band}_256kbps.webm`
const fullPath = `${window.location.origin}${encodeURI(path)}`
useNuxtApp().$logger.info('Loading audio track:', fullPath)
return fullPath
} catch (error) {
useNuxtApp().$logger.error('Error loading audio track:', error)
return ''
}
})
const handleValueChange = (data: { frequency: number; value: number }) => {
// Convert dB to linear scale
gainValueDB.value = calculateNormalizedVolume(data.value)
volume.value = gainValueDB.value
emit('update:mid-volume', volume.value)
}
const onCanPlay = () => {
emit('ready', props.centerFrequency)
}
return {
audioElement,
audioSrc,
gainValueDB,
volume,
handleValueChange,
onCanPlay,
playing
}
}
})
</script>
<style scoped>
.noise-controlled-band {
border: 1px solid #ccc;
padding: 10px;
margin-bottom: 10px;
border-radius: 5px;
}
</style>

View File

@@ -0,0 +1,105 @@
<template>
<div class="noise-controlled-band">
<AudioTag
:ref="el => audioElement"
:src="audioSrc"
:volume="volume"
:play="playing"
@canplay="onCanPlay"
/>
<RNBOControlValue
:center-frequency="centerFrequency"
:status="playing"
:attack="masterAttack"
:release="masterRelease"
@control-value-change="handleValueChange"
/>
</div>
</template>
<script lang="ts">
import { defineComponent, ref, computed } from 'vue'
import AudioTag from '../../AudioTag.vue'
import RNBOControlValue from '../../tests/ControlValues/RNBOControlValue.vue'
import tracksConfig from '~/tracks.config'
import { useAudioStore } from '~/stores/audio'
import { calculateNormalizedVolume } from '~/lib/AudioFunctions'
export default defineComponent({
name: 'NoiseControlledBand',
components: {
AudioTag,
RNBOControlValue
},
props: {
centerFrequency: {
type: Number,
required: true
},
masterAttack: {
type: Number,
default: 120000,
required: false
},
masterRelease: {
type: Number,
default: 144000,
required: false
}
},
emits: ['ready'],
setup (props, { emit }) {
const audioElement = ref<InstanceType<typeof HTMLElement> | null>(null)
const playing = computed(() => { return useAudioStore().playing }) // the playing state is bind to the audioStore
const gainValueDB = ref(0)
const volume = ref(0)
const audioSrc = computed(() => {
try {
const frequencyKey = `${props.centerFrequency}_src`
useNuxtApp().$logger.log('Loading audio track:', frequencyKey)
const trackPath = tracksConfig[frequencyKey as keyof typeof tracksConfig]
if (!trackPath) {
throw new Error(`No track found for frequency ${props.centerFrequency}`)
}
const returnValue = `${window.location.origin}${encodeURI(trackPath)}`
// Check if the audio file is available
return returnValue
} catch (error) {
useNuxtApp().$logger.error('Error loading audio track:', error)
return ''
}
})
const handleValueChange = (data: { frequency: number; value: number }) => {
// Convert dB to linear scale
gainValueDB.value = calculateNormalizedVolume(data.value)
volume.value = gainValueDB.value
}
const onCanPlay = () => {
useNuxtApp().$logger.log(`Audio for frequency ${props.centerFrequency} is ready to play`)
emit('ready', props.centerFrequency)
}
return {
audioElement,
audioSrc,
gainValueDB,
volume,
handleValueChange,
onCanPlay,
playing
}
}
})
</script>
<style scoped>
.noise-controlled-band {
border: 1px solid #ccc;
padding: 10px;
margin-bottom: 10px;
border-radius: 5px;
}
</style>

View File

@@ -0,0 +1,121 @@
<template>
<div v-show="false" id="hiddenAudio">
<AudioTagWebAudio
ref="audioElement"
:src="audioSrc"
:volume="volume"
:play="playing"
:master-gain="masterGain"
@canplay="onCanPlay"
/>
<div class="noise-controlled-band">
Gain: {{ gainValueDB }}
<RNBOControlValue
:center-frequency="centerFrequency"
:status="playing"
:attack="masterAttack"
:release="masterRelease"
:q-factor="qFactor"
@control-value-change="handleValueChange"
/>
</div>
</div>
</template>
<script lang="ts">
import { defineComponent, ref, computed } from 'vue'
import type { Logger } from 'pino'
import RNBOControlValue from '../../tests/ControlValues/RNBOControlValue.vue'
import AudioTagWebAudio from '../../AudioTagWebAudio.vue'
import { calculateNormalizedVolume } from '~/lib/AudioFunctions'
import { useAudioStore } from '~/stores/audio'
export default defineComponent({
name: 'NoiseControlledWebAudio3Band',
components: {
AudioTagWebAudio,
RNBOControlValue
},
props: {
centerFrequency: {
type: Number,
required: true
},
masterGain: {
type: GainNode,
required: true
},
masterAttack: {
type: Number,
default: 120000 * 2,
required: false
},
masterRelease: {
type: Number,
default: 144000 / 1000,
required: false
},
qFactor: {
type: Number,
required: true
}
},
emits: ['ready'],
setup (props, { emit }) {
const audioElement = ref<InstanceType<typeof HTMLElement> | null>(null)
const gainValueDB = ref(0)
const volume = ref(1)
const playing = computed(() => { return useAudioStore().getPlaying }) // the playing state is bind to the audioStore
const logger = useNuxtApp().$logger as Logger
const audioSrc = computed(() => {
try {
const frequency = props.centerFrequency
let band = ''
if (frequency < 500) {
band = 'low_band'
} else if (frequency >= 500 && frequency < 4000) {
band = 'mid_band'
} else {
band = 'high_band'
}
const path = `/masking/3bands/${band}_256kbps.webm`
const fullPath = `${window.location.origin}${encodeURI(path)}`
return fullPath
} catch (error) {
return ''
}
})
const handleValueChange = (data: { frequency: number; value: number }) => {
// Convert dB to linear scale
gainValueDB.value = calculateNormalizedVolume(data.value)
volume.value = gainValueDB.value
}
const onCanPlay = () => {
logger.info(`Audio for frequency ${props.centerFrequency} is ready to play`)
emit('ready', props.centerFrequency)
}
return {
audioElement,
audioSrc,
gainValueDB,
volume,
handleValueChange,
onCanPlay,
playing
}
}
})
</script>
<style scoped>
.noise-controlled-band {
border: 1px solid #ccc;
padding: 10px;
margin-bottom: 10px;
border-radius: 5px;
}
</style>

View File

@@ -0,0 +1,111 @@
<template>
<div class="noise-controlled-band">
<AudioTagWebAudio
ref="audioElement"
:src="audioSrc"
:volume="volume"
:play="playing"
:master-gain="masterGain"
@canplay="onCanPlay"
/>
Gain: {{ gainValueDB }}
<RNBOControlValue
:center-frequency="centerFrequency"
:status="playing"
:attack="masterAttack"
:release="masterRelease"
@control-value-change="handleValueChange"
/>
</div>
</template>
<script lang="ts">
import { defineComponent, ref, computed } from 'vue'
import type { Logger } from 'pino'
import RNBOControlValue from '../..//tests/ControlValues/RNBOControlValue.vue'
import AudioTagWebAudio from '../../AudioTagWebAudio.vue'
import tracksConfig from '~/tracks.config'
import { calculateNormalizedVolume } from '~/lib/AudioFunctions'
import { useAudioStore } from '~/stores/audio'
export default defineComponent({
name: 'NoiseControlledWebAudioBand',
components: {
AudioTagWebAudio,
RNBOControlValue
},
props: {
centerFrequency: {
type: Number,
required: true
},
masterGain: {
type: GainNode,
required: true
},
masterAttack: {
type: Number,
default: 120000,
required: false
},
masterRelease: {
type: Number,
default: 144000,
required: false
}
},
emits: ['ready'],
setup (props, { emit }) {
const audioElement = ref<InstanceType<typeof HTMLElement> | null>(null)
const gainValueDB = ref(0)
const volume = ref(1)
const playing = computed(() => { return useAudioStore().playing }) // the playing state is bind to the audioStore
const logger = useNuxtApp().$logger as Logger
const audioSrc = computed(() => {
try {
const frequencyKey = `${props.centerFrequency}_src`
logger.info('Loading audio track:', frequencyKey)
const trackPath = tracksConfig[frequencyKey as keyof typeof tracksConfig]
if (!trackPath) {
throw new Error(`No track found for frequency ${props.centerFrequency}`)
}
const returnValue = `${window.location.origin}${encodeURI(trackPath)}`
// Check if the audio file is available
return returnValue
} catch (error) {
logger.error('Error loading audio track:', error)
return ''
}
})
const handleValueChange = (data: { frequency: number; value: number }) => {
// Convert dB to linear scale
gainValueDB.value = calculateNormalizedVolume(data.value)
volume.value = gainValueDB.value
}
const onCanPlay = () => {
logger.info(`Audio for frequency ${props.centerFrequency} is ready to play`)
emit('ready', props.centerFrequency)
}
return {
audioElement,
audioSrc,
gainValueDB,
volume,
handleValueChange,
onCanPlay,
playing
}
}
})
</script>
<style scoped>
.noise-controlled-band {
border: 1px solid #ccc;
padding: 10px;
margin-bottom: 10px;
border-radius: 5px;
}
</style>

View File

@@ -0,0 +1,316 @@
<!--
RNBOControlValue ist eine Komponente zur Steuerung und Überwachung von Audio-Parametern,
insbesondere für die Kontrolle von Frequenzbändern. Sie ermöglicht das Testen von
Audio-Geräten, die Anpassung von Parametern und die Anzeige von Kontrollwerten in Echtzeit.
-->
<template>
<div v-if="true">
<h2>Control Values Device Test -- {{ centerFrequency }}Hz</h2>
<button @click="testControlValuesDevice">Test Control Values Device</button>
<p v-if="testResult">{{ testResult }}</p>
<div class="microphone-info">
<h3>Current Microphone:</h3>
<p>{{ currentMicrophone || 'No microphone selected' }}</p>
</div>
<table v-if="parameters.length > 0" class="parameter-table">
<thead>
<tr>
<th>Parameter Name</th>
<th>Value</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr v-for="param in parameters" :key="param.name">
<td>{{ param.name }}</td>
<td>
<input
:value="param.value"
type="number"
@input="updateParameter(param.name, $event.target?.value)"
>
</td>
<td>
<input
:value="param.value"
type="range"
:min="param.min"
:max="param.max"
:step="param.step"
@input="updateParameter(param.name, $event.target?.value)"
>
</td>
</tr>
</tbody>
</table>
<!-- Neue Keynote für den Outlet-Wert -->
<div class="outlet-keynote">
<h3>Control Value</h3>
<div class="outlet-value">{{ formatValue(outletValue) }}</div>
</div>
</div>
</template>
<script lang="ts">
import { defineComponent } from 'vue'
import { mapActions, mapState } from 'pinia'
import { useDevicesStore } from '~/stores/device'
import { useMicStore } from '~/stores/microphone'
import { useAudioStore } from '~/stores/audio'
interface ParameterData {
name: string;
paramId: string;
value: number;
min: number;
max: number;
step: number;
}
const minSamples = 48
const maxSamples = 1920000
export default defineComponent({
name: 'RNBOControlValue',
props: {
centerFrequency: {
type: Number,
required: true,
validator: (value: number) => [63, 125, 250, 500, 1000, 1500, 2000, 4000, 8000, 16000, 150].includes(value)
},
qFactor: {
type: Number,
default: 1.414,
validator: (value: number) => value > 0.1 && value < 1.5
},
attack: {
type: Number,
default: 120000,
validator: (value:number) => value >= minSamples && value <= maxSamples
},
release: {
type: Number,
default: 144000,
validator: (value:number) => value >= minSamples && value <= maxSamples
},
status: {
type: Boolean,
default: false
}
},
emits: ['control-value-change'],
data () {
return {
testResult: '',
parameters: [] as ParameterData[],
device: null as any,
outletValue: 0,
currentMicrophone: ''
}
},
computed: {
...mapState(useMicStore, ['microphone'])
},
watch: {
release (value) {
this.updateParameter('release', '' + value)
},
attack (value) {
if (!isNaN(value)) {
this.updateParameter('attack', '' + value)
}
},
status (newStatus) {
const microphoneStore = useMicStore()
// status is bind to playing status in audio store
if (newStatus) {
microphoneStore.attachMicrophone()
this.testControlValuesDevice()
} else {
microphoneStore.detachMicrophone()
}
}
},
mounted () {
this.updateParameter('centerFrequency', '' + this.centerFrequency)
this.updateParameter('release', '' + this.release)
this.updateParameter('attack', '' + this.attack)
this.updateParameter('qFactor', String(this.qFactor))
},
methods: {
async getCurrentMicrophone () {
const micStore = useMicStore()
try {
const micro = await micStore.getMicrophone()
if (micro?.microphoneNode.label) {
this.currentMicrophone = micro.microphoneNode.label
} else {
this.currentMicrophone = 'No microphone detected'
}
} catch (error) {
useNuxtApp().$logger.error('Error getting microphone:', error)
this.currentMicrophone = 'Error detecting microphone'
}
},
async testControlValuesDevice () {
const deviceStore = useDevicesStore()
const micStore = useMicStore()
const audioStore = useAudioStore()
await audioStore.getContext()
if (!audioStore) { return }
if (audioStore.audioContext?.state !== 'running') {
await audioStore.audioContext?.resume()
}
this.device = await deviceStore.createControlValuesDevice(`testControlValues_${this.centerFrequency}Hz`, this.centerFrequency)
if (!this.device) {
this.testResult = `Failed to create control values device for ${this.centerFrequency}Hz`
// this.$logger.error('Device creation failed')
}
const device = this.device
// this.$logger.info(`Created device for ${this.centerFrequency}Hz`, { device })
const microphone = await micStore.getMicrophone()
try {
const micSource = microphone.microphoneNode
if (micSource && this.device.node && audioStore.audioContext) {
micSource.connect(this.device.node)
await this.updateParameterList()
await new Promise(resolve => setTimeout(resolve, 100))
this.setupOutletListener()
this.testResult = `Control values device for ${this.centerFrequency}Hz created successfully`
} else {
this.testResult = `Failed to connect microphone to device for ${this.centerFrequency}Hz`
// this.$logger.error('Connection failed:', { micSource, deviceNode: this.device.node, audioContext: audioStore.audioContext })
}
} catch (error) {
// this.$logger.error(`Test failed for ${this.centerFrequency}Hz:`, error)
this.testResult = `Test failed for ${this.centerFrequency}Hz: ${error instanceof Error ? error.message : String(error)}`
// this.$logger.error(this.testResult)
}
},
setupOutletListener () {
try {
if (!this.device) {
// this.$logger.warn('Device is not available')
return
}
this.device.messageEvent.subscribe((ev: any) => {
if (ev.tag === 'out1') {
const newValue = this.ensureNumber(ev.payload)
this.outletValue = newValue
if (!(newValue > -13 && newValue < -12.98)) {
this.$emit('control-value-change', { frequency: this.centerFrequency, value: newValue })
}
}
})
} catch (error: unknown) {
// this.$logger.error(`Test failed for ${this.centerFrequency}Hz:`, error)
this.testResult = `Test failed for ${this.centerFrequency}Hz: ${error instanceof Error ? error.message : String(error)}`
}
},
updateParameterList () {
// this.$logger.info('Updating parameter list', this.device)
if (this.device && this.device.parameters) {
const params = this.device.parameters
// this.$logger.info('Parameters:', { params })
this.parameters = Array.from(params.entries()).map(([name, param]: [string, any]) => {
return {
name: param.name,
value: this.ensureNumber(param.value),
min: this.ensureNumber(param.min),
max: this.ensureNumber(param.max),
step: this.ensureNumber(param.step) || 0.01
}
})
// this.$logger.info('Updated parameters:', this.parameters)
} else {
// this.$logger.info('No parameters found')
}
},
updateParameter (name: string, value: string) {
if (this.device && this.device.parameters) {
const numValue = this.ensureNumber(value)
const param = this.parameters.find(p => p.name === name)
if (param) {
param.value = numValue
if (typeof this.device.parameters.set === 'function') {
this.device.parameters.set(name, numValue)
} else if (Array.isArray(this.device.parameters)) {
const deviceParam = this.device.parameters.find((p: any) => p.name === name)
if (deviceParam) {
deviceParam.value = numValue
}
} else if (typeof this.device.parameters === 'object') {
if (this.device.parameters[name]) {
this.device.parameters[name].value = numValue
}
}
this.$forceUpdate()
}
} else {
// Device noch nicht da → update nur in local parameters
const param = this.parameters.find(p => p.name === name)
if (param) {
param.value = this.ensureNumber(value)
} else {
this.parameters.push({ name, value: this.ensureNumber(value) })
}
this.$forceUpdate()
}
},
ensureNumber (value: any): number {
const num = Number(value)
return isNaN(num) ? 0 : num
},
formatValue (value: number): string {
return value.toFixed(2)
}
}
})
</script>
<style scoped>
.parameter-table {
width: 100%;
border-collapse: collapse;
margin-top: 20px;
}
.parameter-table th, .parameter-table td {
border: 1px solid #ddd;
padding: 8px;
text-align: left;
}
.parameter-table th {
background-color: #f2f2f2;
}
.parameter-table input[type="range"] {
width: 100%;
}
.outlet-keynote {
text-align: center;
margin-top: 30px;
}
.outlet-keynote h3 {
font-size: 1.2em;
margin-bottom: 10px;
}
.outlet-value {
font-size: 2em;
font-weight: bold;
}
</style>

View File

@@ -0,0 +1,75 @@
<template>
<div v-show="false">
<h1>Microphone</h1>
<button @click="attach">
{{ microphoneActive ? 'Mikrofon trennen' : 'Mikrofon aktivieren' }}
</button>
</div>
</template>
<script lang="ts">
import { ref, onMounted, onUnmounted, watch } from 'vue'
import { useAudioStore } from '~/stores/audio'
import { useMicStore } from '~/stores/microphone'
export default {
name: 'MicrophoneHandler',
emits: ['update:attach'],
setup (_props, { emit }) {
const audioStore = useAudioStore()
const microphone = ref<Promise<MediaStream> | null>(null)
const microphoneActive = ref(false)
const attach = () => {
if (!microphone.value) {
microphone.value = navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
},
video: false
})
microphoneActive.value = true
}
return microphone.value.then((stream) => {
// useNuxtApp().$logger.log('stream there' + stream)
emit('update:attach', stream)
return stream
})
}
const detach = async () => {
if (microphone.value) {
try {
const stream = await microphone.value
stream.getTracks().forEach(track => track.stop())
} catch (error) {
}
microphone.value = null
microphoneActive.value = false
}
}
watch(() => audioStore.playing, (newValue) => {
if (newValue) {
attach()
} else {
detach()
}
})
onUnmounted(() => {
// Clean up by detaching the microphone when the component is unmounted
detach()
})
// Return the public properties and methods
return {
attach,
detach,
isPlaying: () => audioStore.playing,
microphoneActive
}
}
}
</script>

View File

@@ -0,0 +1,83 @@
<template>
<h1>Test Version NoiseMusicGain: mit WebAudio & Gain, ohne Noise-Patch & ohne Music-Patch</h1>
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="forest_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'NoiseMusicGain',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
playing: false,
paused: false,
createdNodes: {} as any,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src
}
},
methods: {
handlePlayingUpdate () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
musicAudioElement.muted = false
noiseAudioElement.muted = false
},
updateNoiseGain (volume: number) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
},
updateMusicGain (volume: number) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
</script>

View File

@@ -0,0 +1,99 @@
<template>
<h1>Test Version NoiseMusicGain and FadeIn: mit WebAudio & Gain, ohne Noise-Patch & ohne Music-Patch</h1>
<h2> Obwohl die Methode linearRampToValueAtTime verwendet wird, startet das audio einfach nach der eingestellten Zeit ohne fade</h2>
<button @click="fadeInGains">
Trigger FadeIn
</button>
<button @click="fadeOutGains">
Trigger FadeOut
</button>
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
>
<template #default="{}">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="forest_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
>
<template #default="{ }">
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</template>
</AudioElement>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'NoiseMusicGain',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
playing: false,
paused: false,
createdNodes: {} as any,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string
}
},
methods: {
handlePlayingUpdate () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
this.fadeInGains()
},
fadeInGains () {
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
noiseGain.gain.linearRampToValueAtTime(1.0, noiseGain.context.currentTime + 7)
musicGain.gain.linearRampToValueAtTime(1.0, musicGain.context.currentTime + 5)
},
fadeOutGains () {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, this.createdNodes.noiseGain.context.currentTime + 3)
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, this.createdNodes.musicGain.context.currentTime + 3)
},
updateNoiseGain (volume: number) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
},
updateMusicGain (volume: number) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
</script>

View File

@@ -0,0 +1,90 @@
<template>
<h1>Test Version NoiseMusicGain: mit WebAudio & Gain, ohne Noise-Patch & ohne Music-Patch</h1>
<MicrophoneHandler ref="Microphone" @update:attach="setupMicrophone" />
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="forest_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
import MicrophoneHandler from '../tests/Microphone.vue'
export default {
name: 'NoiseMusicGain',
components: { AudioElement, MicrophoneHandler },
data () {
return {
audioContext: useAudioStore().getContext(),
playing: false,
paused: false,
createdNodes: {} as any,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string
}
},
methods: {
setupMicrophone (stream:MediaStream) {
// useNuxtApp().$logger.log('setupMicrophone got this stream', { stream })
this.createdNodes.microphone = this.audioContext.createMediaStreamSource(stream)
this.createdNodes.microphone.connect(this.audioContext.destination)
},
handlePlayingUpdate () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
musicAudioElement.muted = false
noiseAudioElement.muted = false
},
updateNoiseGain (volume: number) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
},
updateMusicGain (volume: number) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
}
}
}
</script>

View File

@@ -0,0 +1,220 @@
<template>
<h1>Test Version NoiseMusicGain: mit WebAudio & Gain und PlayPause, ohne Noise-Patch & ohne Music-Patch</h1>
<h2>Play State: {{ playing }} </h2>
<p>
The method refreshAudioContext helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
</p>
<p>
Whenever I view this page the audio starts playing, when I hit 'space' it fades out within 2seconds
when i start playing again nothing happens... I would expect playing.
</p>
<div v-if="createdNodes.musicGain">
{{ musicGain }}
</div>
<div v-else>
No MusicGain
</div>
<div v-if="createdNodes.noiseGain">
{{ noiseGain }}
</div>
<div v-else>
No noiseGain
</div>
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
@update:volume="updateNoiseGain"
@update:loaded="noiseReady=true"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="forest_src"
title="Forest"
@update:volume="updateMusicGain"
@update:playing="handlePlayingUpdate"
@update:fadeout="fadeOutGains"
@update:loaded="musicReady=true"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</template>
<script lang="ts">
import AudioElement from '../AudioElement2.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'NoiseMusicGain',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
playing: false,
paused: false,
createdNodes: {} as any,
fading: false,
noiseReady: false,
musicReady: false,
musicGain: 0,
noiseGain: 0,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src as string,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src as string
}
},
mounted () {
this.monitorGainNodes()
},
methods: {
monitorGainNodes () {
// This could be an interval or a direct method call in your gain changing methods
setInterval(() => {
if (this.createdNodes.musicGain) {
this.musicGain = this.createdNodes.musicGain.gain.value
}
if (this.createdNodes.noiseGain) {
this.noiseGain = this.createdNodes.noiseGain.gain.value
}
}, 100) // Update every 100 ms, adjust interval as necessary
},
// This method helps to get the ressources free when we stop playing the audio
// without it would be louder each time we start playing
refreshAudioContext () {
const newAudioContext = new AudioContext()
this.audioContext.close()
useAudioStore().audioContext = newAudioContext
this.audioContext = useAudioStore().getContext()
},
fadeOutGains () {
// useNuxtApp().$logger.log('Fade OUT Gains')
// Define the duration of the fade out
const fadeDuration = 2.0 // 2 seconds for fade out
const currentTime = this.audioContext.currentTime
const fadeEndTime = currentTime + fadeDuration
this.fading = true
if (this.createdNodes.noiseGain) {
// Cancel scheduled values to clear any previous scheduled changes
this.createdNodes.noiseGain.gain.cancelScheduledValues(currentTime)
// Set the current value
this.createdNodes.noiseGain.gain.setValueAtTime(this.createdNodes.noiseGain.gain.value, currentTime)
// Schedule the fade out
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(0, fadeEndTime)
this.noiseGain = 0
}
if (this.createdNodes.musicGain) {
// Cancel scheduled values to clear any previous scheduled changes
this.createdNodes.musicGain.gain.cancelScheduledValues(currentTime)
// Set the current value
this.createdNodes.musicGain.gain.setValueAtTime(this.createdNodes.musicGain.gain.value, currentTime)
// Schedule the fade out
this.createdNodes.musicGain.gain.linearRampToValueAtTime(0, fadeEndTime)
this.playing = false
}
setTimeout(() => {
this.fading = false
}, fadeDuration * 1000)
},
fadeInGains () {
// useNuxtApp().$logger.log('Fade In Gains')
const fadeTime = this.audioContext.currentTime + 6.0
this.fading = true
const noiseGain = this.createdNodes.noiseGain
const musicGain = this.createdNodes.musicGain
this.createdNodes.noiseSource.muted = false
this.createdNodes.musicSource.muted = false
noiseGain.gain.linearRampToValueAtTime(1.0, fadeTime)
musicGain.gain.linearRampToValueAtTime(1.0, fadeTime)
this.noiseGain = noiseGain.gain.value
this.musicGain = musicGain.gain.value
this.playing = true
setTimeout(() => {
this.fading = false
}, fadeTime * 1000)
},
handlePlayingUpdate (state: boolean) {
// useNuxtApp().$logger.log(`Audio Element is ${state ? 'Playing' : 'Not Playing'}`)
if (!this.musicReady && !this.noiseReady) {
// useNuxtApp().$logger.log('not yet ready' + this.musicReady + ' ready noise' + this.noiseReady)
return
}
if (state && useAudioStore().isPlaying()) {
// Everytime a AudioTag starts playing it is muted, because we want to control everything in web audio
// Music has just started react on it.
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.musicGain ||= audioContext.createGain()
this.createdNodes.noiseGain ||= audioContext.createGain()
this.createdNodes.musicGain.gain.setValueAtTime(0, audioContext.currentTime)
this.createdNodes.noiseGain.gain.setValueAtTime(0, audioContext.currentTime)
musicAudioElement.muted = false
noiseAudioElement.muted = false
musicAudioElement.volume = 1.0
noiseAudioElement.volume = 1.0
this.createdNodes.noiseSource = audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource = audioContext.createMediaElementSource(musicAudioElement)
this.createdNodes.noiseSource.connect(this.createdNodes.noiseGain)
this.createdNodes.musicSource.connect(this.createdNodes.musicGain)
this.createdNodes.noiseGain.connect(destination)
this.createdNodes.musicGain.connect(destination)
this.fadeInGains()
} else {
if (this.fading && this.createdNodes.noiseGain && this.createdNodes.musicGain) {
this.createdNodes.noiseGain.gain.cancelScheduledValues(this.createdNodes.noiseGain.currentTime)
this.createdNodes.musicGain.gain.cancelScheduledValues(this.createdNodes.musicGain.currentTime)
}
// Music has just stopped react on it.
setTimeout(() => {
this.playing = false
this.createdNodes = []
this.refreshAudioContext()
}, 2000)
}
},
updateNoiseGain (volume: number) {
if (this.createdNodes.noiseGain) {
this.createdNodes.noiseGain.gain.linearRampToValueAtTime(volume, this.createdNodes.noiseGain.context.currentTime + 0.30)
this.noiseGain = this.createdNodes.noiseGain.gain.value
}
},
updateMusicGain (volume: number) {
if (this.createdNodes.musicGain) {
this.createdNodes.musicGain.gain.linearRampToValueAtTime(volume, this.createdNodes.musicGain.context.currentTime + 0.30)
this.musicGain = this.createdNodes.musicGain.gain.value
}
}
}
}
</script>

View File

@@ -0,0 +1,73 @@
<template>
<h1>Test Version NoiseMusicWebAudio: mit WebAudio, ohne Gain, ohne Noise-Patch & ohne Music-Patch</h1>
<AudioElement
ref="Noise"
key="5"
:src="noise_src"
title="Noise"
>
<template #default="{}">
<div class="icon">
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
</template>
</AudioElement>
<AudioElement
ref="Music"
key="1"
:src="forest_src"
title="Forest"
@update:playing="handlePlayingUpdate"
>
<template #default="{ }">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
</template>
<!-- Slot content for AudioElement, if needed -->
</AudioElement>
</template>
<script lang="ts">
import AudioElement from '../AudioElement.vue'
import { useAudioStore } from '../../../stores/audio'
export default {
name: 'NoiseMusicGain',
components: { AudioElement },
data () {
return {
audioContext: useAudioStore().getContext(),
createdNodes: {} as any,
playing: false,
paused: false,
noise_src: window.location.origin + useRuntimeConfig().public.noise_src,
forest_src: window.location.origin + useRuntimeConfig().public.tracks.forest_src
}
},
beforeUnmount () {
this.audioContext.close()
this.audioContext = null
},
methods: {
handlePlayingUpdate () {
const noiseElement = this.$refs.Noise as typeof AudioElement
const noiseAudioElement = noiseElement.$refs.audioElement as HTMLMediaElement
const musicElement = this.$refs.Music as typeof AudioElement
const musicAudioElement = musicElement.$refs.audioElement as HTMLMediaElement
const audioContext = this.audioContext
const destination = this.audioContext.destination
this.createdNodes.noiseSource ||= audioContext.createMediaElementSource(noiseAudioElement)
this.createdNodes.musicSource ||= audioContext.createMediaElementSource(musicAudioElement)
musicAudioElement.muted = false
noiseAudioElement.muted = false
this.createdNodes.noiseSource.connect(destination)
this.createdNodes.musicSource.connect(destination)
}
}
}
</script>

View File

@@ -0,0 +1,354 @@
<template>
<div>
<h4>{{ currentScene.file }}</h4>
<h4>Node {{ currentScene.node ? true : false }}</h4>
<h4>{{ currentScene.title }}</h4>
<h4>Howl {{ currentScene.howl ? true : false }}</h4>
</div>
<div class="rnboplayer">
<button class="play yellow" @click="play" />
<button v-if="playing" class="pause yellow" @click="pause" />
<button v-if="playing" @click="useNuxtApp().$logger.log('hit play')">
Pause
</button>
<div class="row">
<div class="slider">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/noiseicon.svg">
</div>
<input
id="gain-control"
v-model="outputNoiseGain"
type="range"
min="0"
max="100"
step="1"
@wheel="changeNoiseGain"
>
</div>
</div>
<div class="row">
<div class="slider">
<div class="icon">
<!-- tropic icon -->
<img style="width: 25px" src="~/assets/image/musicicon.svg">
</div>
<input
id="gain-control"
v-model="outputMusicGain"
type="range"
min="0"
max="100"
default="50"
step="1"
@wheel="changeMusicGain"
>
</div>
</div>
</div>
</template>
<script setup lang="ts">
import { onMounted, onBeforeUnmount, ref, computed, watch } from 'vue'
import Player from '~/components/Player/Player.js'
import setupNodes from '~/components/Player/Nodes'
import { useUserStore } from '~/stores/user.js'
import { usePlayerStore } from '~/stores/player.js'
import tracksConfig from '~/tracks.config'
// Stores
const userStore = useUserStore()
const playerStore = usePlayerStore()
const playing = computed(() => usePlayerStore().playing as boolean)
// Refs
const scenePlayer = ref<Player | null>(null)
const noisePlayer = ref<Player | null>(null)
const playlistIndex = ref(scenePlayer.value?.index || 0)
// Sliders
const outputMusicGain = ref(0)
const outputNoiseGain = ref(0)
// Playlists
const scenesPlaylist = ref([
{ title: 'Lagoon', file: 'lagoon', howl: null, node: null },
{ title: 'Meadow', file: 'meadow', howl: null, node: null },
{ title: 'Tropics', file: 'tropics', howl: null, node: null },
{ title: 'Forest', file: 'forest', howl: null, node: null }
])
const howlsReadyCount = computed(() =>
scenesPlaylist.value.filter(item => item.howl !== null).length
)
const nodesReadyCount = computed(() =>
scenesPlaylist.value.filter(item => item.node !== null).length
)
const noisePlaylist = ref([
{ title: 'noise', file: 'noise', howl: null, node: null }
])
const currentScene = computed(() => scenesPlaylist.value[0])
// Gain Nodes
const noiseGainNode = ref<GainNode | null>(null)
const musicGainNode = ref<GainNode | null>(null)
function play () {
playerStore.playing = true
useNuxtApp().$logger.log('current playing = ', playerStore.playing)
}
function pause () {
playerStore.playing = false
useNuxtApp().$logger.log('current playing = ', playerStore.playing)
}
function changeNoiseGain (event: WheelEvent) {
event.preventDefault()
const delta = Math.sign(event.deltaY)
if (noiseGainNode.value && isWithinRange(outputNoiseGain.value - delta)) {
outputNoiseGain.value -= delta
}
}
function changeMusicGain (event: WheelEvent) {
event.preventDefault()
const delta = Math.sign(event.deltaY)
if (musicGainNode.value && isWithinRange(outputMusicGain.value - delta)) {
scenePlayer.value?.setVolume()
outputMusicGain.value -= delta
}
}
function isWithinRange (val: number) {
return val >= 0 && val <= 100
}
// Lifecycle
onMounted(async () => {
const logger = useNuxtApp().$logger
scenePlayer.value = new Player(scenesPlaylist, 'sounds')
noisePlayer.value = new Player(noisePlaylist, 'masking')
scenePlayer.value.initializeHowl(scenePlayer.value.index)
noisePlayer.value.initializeHowl(noisePlayer.value.index)
const userScenery = userStore.getUserScenery
const filteredScenes = scenePlayer.value.playlist.filter(
(playItem: { title: string }) => playItem.title.toLowerCase() === userScenery.toLowerCase()
)
const noiseNode = noisePlayer.value.playlist[0].node
const response = await setupNodes(noiseNode, filteredScenes[0].node)
if (response === null) {
logger.info('Got no gain nodes from setupNodes, continue with HTML5')
// noisePlayer.value.playlist[0].howl.play()
// scenePlayer.value.playlist[0].howl.play()
} else {
try {
// filteredScenes[0].howl.play()
// noisePlayer.value.playlist[0].howl.play()
if (response.length > 1) {
noiseGainNode.value = response[0]
musicGainNode.value = response[1]
}
if (musicGainNode.value && noiseGainNode.value) {
musicGainNode.value.gain.value = 0.5
noiseGainNode.value.gain.value = 0.5
}
} catch (error) {
useNuxtApp().$logger.error(error)
}
}
})
onBeforeUnmount(() => {
noisePlayer.value = null
scenePlayer.value = null
playerStore.resetAudioContext()
})
// Watches
watch(playlistIndex, (val) => {
const player = scenePlayer.value as Player
player.index = val
// musicGainNode.value?.gain.linearRampToValueAtTime(val / 100, musicGainNode.value.context.currentTime + 0.2)
})
watch(outputMusicGain, (val) => {
musicGainNode.value?.gain.linearRampToValueAtTime(val / 100, musicGainNode.value.context.currentTime + 0.2)
})
watch(outputNoiseGain, (val) => {
noiseGainNode.value?.gain.linearRampToValueAtTime(val / 100, noiseGainNode.value.context.currentTime + 0.2)
})
watch(
() => noisePlayer.value?.playlist[0]?.node,
(node) => {
if (node) { useNuxtApp().$logger.log('🎧 Node ist jetzt da:', node) }
}
)
watch(
() => playerStore.playing,
(state) => {
if (state && noisePlayer && scenePlayer) {
noisePlayer.value?.play()
scenePlayer.value?.play()
useNuxtApp().$logger.log('🎧 Player spielt')
} else {
noisePlayer.value?.pause()
scenePlayer.value?.pause()
useNuxtApp().$logger.log('🎧 Player spielt nicht')
}
}
)
</script>
<style scoped>
.rnboplayer{
position: fixed;
width: 220px; /* Or specify a fixed width like 220px if you prefer */
max-width: 220px; /* This line might be redundant depending on your width strategy */
height: 100px;
display: inline-grid;
z-index: 2;
bottom: 11%;
left: 0;
right: 0;
margin-left: auto;
margin-right: auto;
}
.player {
background-color: #fff;
border-radius: 12px;
position:sticky;
width: 225px;
height: inherit;
display:flex;
float: bottom;
}
.player button {
border-radius: 10px;
padding: 10px;
}
.container {
display: flex;
flex-wrap: wrap;
gap: 20px; /* Spacing between items */
width: 225px;
margin-bottom: 20px;
}
.icon, .slider {
flex: 1 1 100px; /* Flex-grow, flex-shrink, flex-basis */
display: flex;
align-items: center; /* Center items vertically */
justify-content: center; /* Center items horizontally */
}
.icon {
/* Add padding around the icon for margin */
margin-right: 15px; /* Adjust this value as needed */
/* Align items if using flexbox */
display: flex;
align-items: center;
justify-content: center;
}
.icon img {
/* Adjust width and height as needed or keep them auto to maintain aspect ratio */
width: auto;
height: 100%; /* Example height, adjust based on your icon size */
}
.slider input[type=range] {
width: 100%; /* Full width of its parent */
background-color: transparent !important;
}
@media (min-width: 600px) {
.row {
display: flex;
width: 100%;
}
.icon, .slider {
flex: 1; /* Take up equal space */
}
}
/* Styles the track */
input[type="range"]::-webkit-slider-runnable-track {
background: #e9c046; /* yellow track */
height: 8px;
border-radius: 5px;
}
input[type="range"]::-moz-range-track {
background: #e9c046; /* yellow track */
height: 8px;
border-radius: 5px;
}
input[type="range"]::-ms-track {
background: #e9c046; /* yellow track */
border-color: transparent;
color: transparent;
height: 8px;
}
.play.yellow {
background: rgba(255, 176, 66, 0.008);
border-radius: 50%;
box-shadow: 0 0 0 0 rgba(255, 177, 66, 1);
animation: pulse-yellow 4s infinite;
position: fixed;
bottom: 40%;
width: 200px;
height: 200px;
background-image: url('/images/playbtn.svg');
background-repeat:no-repeat;
background-attachment:fixed;
background-position: 58% 55%;
}
.pause.yellow {
background: rgba(255, 176, 66, 0.008);
border-radius: 50%;
box-shadow: 0 0 0 0 rgba(255, 177, 66, 1);
opacity: 0.05;
position: fixed;
bottom: 40%;
width: 200px;
height: 200px;
background-image: url('/images/pausebtn.svg');
background-size: 130px 100px;
background-repeat:no-repeat;
background-attachment:fixed;
background-position: center;
}
.pause.yellow:hover{
opacity: 0.5;
}
@keyframes pulse-yellow {
0% {
transform: scale(0.95);
box-shadow: 0 0 0 0 rgba(255, 177, 66, 0.7);
}
70% {
transform: scale(1);
box-shadow: 0 0 0 10px rgba(255, 177, 66, 0);
}
100% {
transform: scale(0.95);
box-shadow: 0 0 0 0 rgba(255, 177, 66, 0);
}
}
</style>