Initial commit
This commit is contained in:
133
composables/useArtWorkManager.js
Normal file
133
composables/useArtWorkManager.js
Normal file
@@ -0,0 +1,133 @@
|
||||
import { useAudioStore } from '~/stores/audio'
|
||||
import { useUserStore } from '~/stores/user'
|
||||
import { getSoundcapeList } from '~/tracks.config'
|
||||
|
||||
/**
|
||||
* The useArtWorkManager composable is used to provide an update to
|
||||
* the mediaSession of the navigator. This is relevant whenever
|
||||
* the soundscape of the user changes.
|
||||
*
|
||||
* If Soundscapes are played, the soundscape should be shown.
|
||||
* If MusicMode is selected (so no Soundscape is playing), a Noise media Session is shown.
|
||||
*
|
||||
* @returns useArtWorkManager: Set of functions.
|
||||
*/
|
||||
|
||||
export const useArtWorkManager = () => {
|
||||
let dummyAudio = null
|
||||
// State Management
|
||||
const currentSoundscape = ref(() => useUserStore().user.settings.soundscape || 'Forest')
|
||||
const playing = ref(() => useAudioStore().playing)
|
||||
const musicMode = ref(() => useUserStore().soundMode)
|
||||
|
||||
// Helper functions
|
||||
/**
|
||||
* Easily access the next soundscape in the playlist to update the media meta data.
|
||||
* @returns soundscape title as string
|
||||
*/
|
||||
|
||||
const createAudioTag = () => {
|
||||
if (!dummyAudio) {
|
||||
dummyAudio = new Audio('data:audio/mp3;base64,SUQzBAAAAAAAI1RTU0UAAAAPAAADTGF2ZjU2LjM2LjEwMAAAAAAAAAAAAAAA//OEAAAAAAAAAAAAAAAAAAAAAAAASW5mbwAAAA8AAAAEAAABIADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV6urq6urq6urq6urq6urq6urq6urq6urq6v////////////////////////////////8AAAAATGF2YzU2LjQxAAAAAAAAAAAAAAAAJAAAAAAAAAAAASDs90hvAAAAAAAAAAAAAAAAAAAA//MUZAAAAAGkAAAAAAAAA0gAAAAATEFN//MUZAMAAAGkAAAAAAAAA0gAAAAARTMu//MUZAYAAAGkAAAAAAAAA0gAAAAAOTku//MUZAkAAAGkAAAAAAAAA0gAAAAANVVV')
|
||||
dummyAudio.loop = true
|
||||
dummyAudio.playbackRate = 0.25
|
||||
dummyAudio.muted = true
|
||||
dummyAudio.preload = 'auto'
|
||||
dummyAudio.controls = true // oder true für Debug
|
||||
dummyAudio.id = 'dummy-audio'
|
||||
dummyAudio.setAttribute('data-role', 'media-session-dummy')
|
||||
document.body.appendChild(dummyAudio)
|
||||
|
||||
// Versuche zu spielen, nur nach Nutzerinteraktion e.g. click)
|
||||
dummyAudio.play().catch((err) => {
|
||||
useNuxtApp().$logger.warn('[MediaSession] DummyAudio konnte nicht starten', err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const getNextSoundscape = () => {
|
||||
const list = getSoundcapeList()
|
||||
const current = currentSoundscape.value
|
||||
const index = list.indexOf(current)
|
||||
return list[(index + 1) % list.length]
|
||||
}
|
||||
const getPreviousSoundscape = () => {
|
||||
const list = getSoundcapeList()
|
||||
const current = currentSoundscape.value
|
||||
const index = list.indexOf(current)
|
||||
return list[(index - 1) % list.length]
|
||||
}
|
||||
|
||||
const addMusicArtWork = (scenery = 'Calm Speech Blocker') => {
|
||||
if ('mediaSession' in navigator) {
|
||||
navigator.mediaSession.metadata = new MediaMetadata({
|
||||
title: scenery,
|
||||
artist: 'mindboost',
|
||||
album: 'mindboost focus soundscapes',
|
||||
artwork: [
|
||||
{ src: '/images/scenery/' + scenery + '.svg', sizes: '512x512', type: 'image/svg' }
|
||||
]
|
||||
})
|
||||
navigator.mediaSession.playbackState = !playing.value ? 'paused' : 'playing'
|
||||
}
|
||||
}
|
||||
|
||||
const addNoiseArtWork = () => {
|
||||
createAudioTag()
|
||||
if ('mediaSession' in navigator) {
|
||||
const pathKlein = window.location.origin + '/images/scenery/noise_artwork_1024.jpg'
|
||||
const pathGross = window.location.origin + '/images/scenery/noise_artwork_512.jpg'
|
||||
navigator.mediaSession.metadata = new MediaMetadata({
|
||||
title: 'Calm Speech Blocker',
|
||||
artist: 'mindboost',
|
||||
album: 'get your focus',
|
||||
artwork: [
|
||||
{ src: pathKlein, sizes: '1024x1024', type: 'image/jpeg' },
|
||||
{ src: pathGross, sizes: '512x512', type: 'image/jpeg' }
|
||||
]
|
||||
})
|
||||
navigator.mediaSession.playbackState = !playing.value ? 'paused' : 'playing'
|
||||
}
|
||||
}
|
||||
|
||||
// Watcher for changes of the currentSoundscape
|
||||
watch(currentSoundscape.value, (newVal, oldVal) => {
|
||||
if (newVal !== oldVal) {
|
||||
if (musicMode.value === 'music') {
|
||||
// Noise Placeholder will be shown
|
||||
addNoiseArtWork()
|
||||
}
|
||||
if (musicMode.value === 'soundscape') {
|
||||
// Soundscape Media will be shown
|
||||
addMusicArtWork(newVal)
|
||||
}
|
||||
}
|
||||
}, { immediate: true })
|
||||
|
||||
watch(musicMode.value, (newVal, oldVal) => {
|
||||
if (newVal !== oldVal) {
|
||||
if (newVal === 'music') {
|
||||
// Noise Placeholder will be shown
|
||||
addNoiseArtWork()
|
||||
}
|
||||
if (newVal === 'soundscape') {
|
||||
// Soundscape Media will be shown
|
||||
addMusicArtWork(newVal)
|
||||
}
|
||||
}
|
||||
}, { immediate: true })
|
||||
|
||||
watch(playing.value, (newVal) => {
|
||||
if (newVal) {
|
||||
if (musicMode.value === 'music') { addNoiseArtWork() }
|
||||
if (musicMode.value === 'soundscape') { addMusicArtWork() }
|
||||
}
|
||||
}, { immediate: true })
|
||||
|
||||
return {
|
||||
addMusicArtWork,
|
||||
addNoiseArtWork,
|
||||
getNextSoundscape,
|
||||
getPreviousSoundscape
|
||||
}
|
||||
}
|
182
composables/useAudioReplacer.ts
Normal file
182
composables/useAudioReplacer.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import { useAudioStore } from '~/stores/audio'
|
||||
import { useUserStore } from '~/stores/user'
|
||||
|
||||
|
||||
export const useAudioReplacer = () => {
|
||||
|
||||
const activeReplacements = new Map<string, Promise<void>>()
|
||||
|
||||
/**
|
||||
* Wartet auf `canplay` eines AudioElements oder bricht mit Timeout ab.
|
||||
*/
|
||||
const waitForCanPlay = (audio: HTMLAudioElement, timeout = 10000) => {
|
||||
return new Promise<boolean>((resolve) => {
|
||||
const timer = setTimeout(() => resolve(false), timeout)
|
||||
audio.oncanplay = () => {
|
||||
clearTimeout(timer)
|
||||
resolve(true)
|
||||
}
|
||||
audio.onerror = () => {
|
||||
clearTimeout(timer)
|
||||
resolve(false)
|
||||
}
|
||||
audio.load()
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Replace an Audio Tag with an smooth fade in and fade out.
|
||||
* @param title, newSrc, duration
|
||||
*/
|
||||
|
||||
const replaceAudioWithFade = async ({
|
||||
newSrc,
|
||||
duration = 1.5
|
||||
}: {
|
||||
title: string
|
||||
newSrc: string
|
||||
duration?: number
|
||||
}) => {
|
||||
const { getNextSoundscape } = usePlayerControls()
|
||||
const title = getNextSoundscape()
|
||||
|
||||
// Verhindere gleichzeitige Ersetzungen
|
||||
if (activeReplacements.has(title)) {
|
||||
|
||||
await activeReplacements.get(title)
|
||||
}
|
||||
|
||||
const promise = (async () => {
|
||||
const audioStore = useAudioStore()
|
||||
await audioStore.ensureAudioContextRunning()
|
||||
const ctx = audioStore.getContext()
|
||||
const masterGain = audioStore.masterGainMusic
|
||||
|
||||
const oldAudio = document.getElementById(title) as HTMLAudioElement
|
||||
if (!oldAudio || !ctx || !masterGain) {
|
||||
return
|
||||
}
|
||||
|
||||
// Step 1: Fade out + stop
|
||||
await fadeOutAndStop(oldAudio, duration)
|
||||
|
||||
// Step 2: Replace
|
||||
const newAudio = oldAudio.cloneNode(true) as HTMLAudioElement
|
||||
newAudio.src = newSrc
|
||||
newAudio.volume = 0
|
||||
newAudio.loop = true
|
||||
newAudio.hidden = true
|
||||
const sink = useUserStore().audioOutputDevice as MediaDeviceInfo
|
||||
newAudio.setSinkId(sink.deviceId)
|
||||
|
||||
// Remove previous MediaElementSource if it exists
|
||||
try {
|
||||
const clone = ctx.createMediaElementSource(new Audio())
|
||||
clone.disconnect()
|
||||
} catch (err) {
|
||||
console.warn('[AudioReplacer] MediaElementSource clone check failed', err)
|
||||
}
|
||||
|
||||
// Replace in DOM
|
||||
oldAudio.replaceWith(newAudio)
|
||||
|
||||
// Step 3: Connect to WebAudio
|
||||
let gainNode: GainNode | null = null
|
||||
try {
|
||||
const source = ctx.createMediaElementSource(newAudio)
|
||||
gainNode = ctx.createGain()
|
||||
source.connect(gainNode).connect(masterGain)
|
||||
} catch (err) {
|
||||
console.warn('[AudioReplacer] Could not attach WebAudio. Using fallback.')
|
||||
}
|
||||
|
||||
// Step 4: Wait for canplay
|
||||
await waitForCanPlay(newAudio, 4000)
|
||||
|
||||
// Step 5: Play and fade in
|
||||
await newAudio.play().catch(console.error)
|
||||
fadeVolume(newAudio, 0, 1, duration * 1000)
|
||||
})()
|
||||
|
||||
activeReplacements.set(title, promise)
|
||||
|
||||
try {
|
||||
await promise
|
||||
} finally {
|
||||
activeReplacements.delete(title)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Faded ein AudioElement über WebAudio oder Volume auf 0 aus.
|
||||
*/
|
||||
const fadeOutAndStop = (audio: HTMLAudioElement, duration: number) => {
|
||||
return new Promise<void>((resolve) => {
|
||||
fadeVolume(audio, audio.volume, 0, duration * 1000)
|
||||
setTimeout(() => {
|
||||
audio.pause()
|
||||
audio.src = ''
|
||||
resolve()
|
||||
}, duration * 1000)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Faded ein AudioElement über WebAudio oder Volume auf 1 ein.
|
||||
*/
|
||||
const fadeInAudio = async (
|
||||
audio: HTMLAudioElement,
|
||||
duration = 1.5
|
||||
): Promise<void> => {
|
||||
const audioStore = useAudioStore()
|
||||
await audioStore.ensureAudioContextRunning()
|
||||
const ctx = audioStore.getContext()
|
||||
const masterGain = audioStore.masterGainMusic as GainNode
|
||||
|
||||
try {
|
||||
const gainNode = ctx.createGain()
|
||||
const source = ctx.createMediaElementSource(audio)
|
||||
source.connect(gainNode).connect(masterGain)
|
||||
|
||||
gainNode.gain.setValueAtTime(0, ctx.currentTime)
|
||||
gainNode.gain.linearRampToValueAtTime(1, ctx.currentTime + duration)
|
||||
|
||||
audio.play()
|
||||
await new Promise((res) => setTimeout(res, duration * 1000))
|
||||
} catch (error) {
|
||||
audio.play()
|
||||
await fadeVolume(audio, 0, 1, duration)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Faded die Lautstärke eines AudioElements linear.
|
||||
*/
|
||||
const fadeVolume = (audio: HTMLAudioElement, from: number, to: number, duration: number) => {
|
||||
const steps = 20
|
||||
const stepTime = duration / steps
|
||||
let currentStep = 0
|
||||
|
||||
const fadeInterval = setInterval(() => {
|
||||
currentStep++
|
||||
const progress = currentStep / steps
|
||||
const newVolume = from + (to - from) * progress
|
||||
audio.volume = Math.max(0, Math.min(1, newVolume))
|
||||
|
||||
if (currentStep >= steps) {
|
||||
clearInterval(fadeInterval)
|
||||
audio.volume = to
|
||||
}
|
||||
}, stepTime)
|
||||
}
|
||||
|
||||
return {
|
||||
fadeOutAndStop,
|
||||
fadeInAudio,
|
||||
waitForCanPlay,
|
||||
fadeVolume,
|
||||
replaceAudioWithFade
|
||||
}
|
||||
}
|
21
composables/useLocalStorage.js
Normal file
21
composables/useLocalStorage.js
Normal file
@@ -0,0 +1,21 @@
|
||||
import { useTimerStore } from '~~/stores/timer'
|
||||
|
||||
const storageKeys = {
|
||||
timer: 'timer-settings'
|
||||
}
|
||||
|
||||
export const storeSettings = () => {
|
||||
const timerSettings = useTimerStore().$state.settings
|
||||
|
||||
localStorage.setItem(storageKeys.timer, JSON.stringify(timerSettings))
|
||||
}
|
||||
|
||||
export const getSettings = () => {
|
||||
const storedSettings = {
|
||||
timer: localStorage.getItem(storageKeys.timer)
|
||||
}
|
||||
|
||||
if (storedSettings.timer) {
|
||||
useTimerStore().$state.settings = JSON.parse(storedSettings.timer)
|
||||
}
|
||||
}
|
125
composables/useMediaProvider.ts
Normal file
125
composables/useMediaProvider.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { getSoundscapeByTitle } from '~/tracks.config'
|
||||
import { ensureAudio, useAudioStore } from '~/stores/audio'
|
||||
import { watch } from 'vue'
|
||||
|
||||
/**
|
||||
* Media Provider Composable
|
||||
* Verwaltet AudioElemente und Web Audio Nodes für Soundscapes.
|
||||
*/
|
||||
export function useMediaProvider() {
|
||||
const audioElements = new Map<string, HTMLAudioElement>()
|
||||
/**
|
||||
* Gibt ein HTMLAudioElement zurück für ein Soundscape-Titel
|
||||
* @param title z.B. "Forest"
|
||||
*/
|
||||
const getAudioElementForTitle = async (title: string): Promise<HTMLAudioElement | null> => {
|
||||
const src = getSoundscapeByTitle(title) || 'Forest'
|
||||
const audioElement = await createPlayableAudioElement(src)
|
||||
return audioElement
|
||||
}
|
||||
|
||||
/**
|
||||
* Erstellt ein AudioElement, wartet bis es abspielbereit ist (canplay),
|
||||
* oder gibt nach Timeout einen Fehler zurück.
|
||||
*
|
||||
* @param {string} src - Die Quelle der Audiodatei
|
||||
* @param {number} timeoutMs - Maximale Wartezeit in Millisekunden
|
||||
* @returns {Promise<HTMLAudioElement>}
|
||||
*/
|
||||
|
||||
async function createPlayableAudioElement(src: string, timeoutMs = 10000): Promise<HTMLAudioElement> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const audio = new Audio(src)
|
||||
const timeout = setTimeout(() => {
|
||||
audio.src = ''
|
||||
reject(new Error(`AudioElement did not become playable within ${timeoutMs}ms`))
|
||||
}, timeoutMs)
|
||||
|
||||
const cleanup = () => {
|
||||
clearTimeout(timeout)
|
||||
audio.removeEventListener('canplay', onCanPlay)
|
||||
audio.removeEventListener('error', onError)
|
||||
}
|
||||
|
||||
const onCanPlay = () => {
|
||||
audioElements.set(src, audio)
|
||||
cleanup()
|
||||
resolve(audio)
|
||||
}
|
||||
|
||||
const onError = () => {
|
||||
cleanup()
|
||||
reject(new Error(`Failed to load audio: ${src}`))
|
||||
}
|
||||
|
||||
audio.addEventListener('canplay', onCanPlay)
|
||||
audio.addEventListener('error', onError)
|
||||
audio.load() // wichtig!
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Erstellt einen MediaElementAudioSourceNode + verbindet ihn mit masterGainNoise.
|
||||
* Fängt Fehler ab und gibt `null` zurück bei Problemen.
|
||||
*/
|
||||
const getMediaElementSourceByTitle = async (title: string): Promise<{ element: HTMLAudioElement; source: MediaElementAudioSourceNode } | null> => {
|
||||
const audioStore = useAudioStore()
|
||||
const audioContext = audioStore.getContext()
|
||||
const masterGain = audioStore.masterGainNoise
|
||||
|
||||
if (!audioContext || !masterGain) {
|
||||
console.warn('[MediaProvider] AudioContext oder masterGainNoise fehlt.')
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
const element = await createPlayableAudioElement(title)
|
||||
if (!element) return null
|
||||
const source = audioContext.createMediaElementSource(element)
|
||||
source.connect(masterGain)
|
||||
return { element, source }
|
||||
} catch (err) {
|
||||
console.error('[MediaProvider] Fehler beim Erstellen von MediaElementSource:', err)
|
||||
await ensureAudio
|
||||
const source = audioContext.createMediaElementSource(await createPlayableAudioElement(title))
|
||||
source.connect(masterGain)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reaktiver Binder für die Lautstärke an masterGainNoise.gain
|
||||
*/
|
||||
const bindVolumeToMasterGain = () => {
|
||||
const audioStore = useAudioStore()
|
||||
const masterGain = audioStore.masterGainNoise
|
||||
if (!masterGain) return
|
||||
|
||||
watch(
|
||||
() => audioStore.noiseVolume,
|
||||
async (volume) => {
|
||||
try {
|
||||
if(audioStore.audioContext) {
|
||||
masterGain.gain.setTargetAtTime(volume, audioStore.audioContext.currentTime, 0.05)
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn('[MediaProvider] Fehler beim Setzen der Lautstärke:', err)
|
||||
if(audioStore && audioStore.audioContext) {
|
||||
await ensureAudio
|
||||
masterGain.gain.setTargetAtTime(volume, audioStore.audioContext.currentTime, 0.05)
|
||||
}
|
||||
}
|
||||
},
|
||||
{ immediate: true }
|
||||
)
|
||||
}
|
||||
|
||||
bindVolumeToMasterGain()
|
||||
|
||||
return {
|
||||
getAudioElementForTitle,
|
||||
getMediaElementSourceByTitle,
|
||||
createPlayableAudioElement
|
||||
}
|
||||
}
|
172
composables/usePlayerControls.js
Normal file
172
composables/usePlayerControls.js
Normal file
@@ -0,0 +1,172 @@
|
||||
import { useAudioStore } from '~/stores/audio'
|
||||
import { useUserStore } from '~/stores/user'
|
||||
import tracksConfig, { getSoundcapeList } from '~/tracks.config'
|
||||
import { useArtWorkManager } from '~/composables/useArtWorkManager'
|
||||
|
||||
export const usePlayerControls = () => {
|
||||
// State Management
|
||||
const audio = useAudioStore()
|
||||
const userStore = useUserStore()
|
||||
const { $changeTrack } = useNuxtApp()
|
||||
|
||||
const currentIndex = computed(() => getSoundcapeList().indexOf(userStore.user.settings.soundscape))
|
||||
const currentSoundscape = computed(() => useUserStore().user.settings.soundscape)
|
||||
const playing = computed(() => audio.getPlaying)
|
||||
|
||||
const togglePlayingState = () => {
|
||||
if (playing.value) {
|
||||
audio.setPlaying(false)
|
||||
} else {
|
||||
audio.setPlaying(true)
|
||||
}
|
||||
}
|
||||
|
||||
// Listeners
|
||||
|
||||
const addSpaceListener = () => {
|
||||
window.addEventListener('keydown', handleSpace)
|
||||
}
|
||||
|
||||
const removeSpaceListener = () => {
|
||||
window.removeEventListener('keydown', null)
|
||||
}
|
||||
|
||||
const addMediaControls = () => {
|
||||
for (const [action, handler] of actionHandlers) {
|
||||
try {
|
||||
navigator.mediaSession.setActionHandler(action, handler)
|
||||
} catch (error) {
|
||||
useNuxtApp().$logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
const getNextSoundscape = () => {
|
||||
const list = getSoundcapeList()
|
||||
const current = currentSoundscape.value
|
||||
const index = list.indexOf(current)
|
||||
|
||||
return list[(index + 1) % list.length]
|
||||
}
|
||||
const getPreviousSoundscape = () => {
|
||||
const list = getSoundcapeList()
|
||||
const current = currentSoundscape.value
|
||||
|
||||
const index = list.indexOf(current)
|
||||
return list[(index - 1) % list.length]
|
||||
}
|
||||
|
||||
// Handlers
|
||||
const handleSpace = (e) => {
|
||||
const activeElement = document.activeElement
|
||||
const tagName = activeElement.tagName.toLowerCase()
|
||||
|
||||
// List of elements where spacebar interaction should be preserved
|
||||
const interactiveElements = [
|
||||
'input', 'textarea', 'button', 'select', 'option',
|
||||
'video', 'audio', 'a', 'summary'
|
||||
]
|
||||
|
||||
// Check for contenteditable attribute
|
||||
const isContentEditable = activeElement.getAttribute('contenteditable') === 'true'
|
||||
|
||||
// Check for custom data attribute that might indicate spacebar interaction
|
||||
const usesSpacebar = activeElement.getAttribute('data-uses-spacebar') === 'true'
|
||||
|
||||
if (e.code === 'Space' &&
|
||||
!interactiveElements.includes(tagName) &&
|
||||
!isContentEditable &&
|
||||
!usesSpacebar) {
|
||||
e.preventDefault() // Prevent the default action (scrolling)
|
||||
|
||||
navigator.mediaSession.playbackState = computed(() => (audio.getPlaying || false) ? 'playing' : 'paused').value
|
||||
togglePlayingState()
|
||||
}
|
||||
}
|
||||
const handlePlay = () => {
|
||||
if (!playing.value) { togglePlayingState() }
|
||||
}
|
||||
const handlePause = () => {
|
||||
if (playing.value) { togglePlayingState() }
|
||||
}
|
||||
const handleStop = () => {
|
||||
if (playing.value) { togglePlayingState() }
|
||||
}
|
||||
const handleNext = () => {
|
||||
if ('mediaSession' in navigator) {
|
||||
navigator.mediaSession.setActionHandler('nexttrack', async () => {
|
||||
const nextTrack = getNextSoundscape()
|
||||
|
||||
await $changeTrack(nextTrack)
|
||||
useArtWorkManager().addMusicArtWork(nextTrack)
|
||||
})
|
||||
}
|
||||
}
|
||||
const handlePrev = () => {
|
||||
if ('mediaSession' in navigator) {
|
||||
// Set the handler for the next track action
|
||||
navigator.mediaSession.setActionHandler('previoustrack', async () => {
|
||||
const nextTrack = getNextSoundscape()
|
||||
await $changeTrack(nextTrack)
|
||||
useArtWorkManager().addMusicArtWork(nextTrack)
|
||||
})
|
||||
}
|
||||
}
|
||||
// Set the handler for the previous track action
|
||||
const actionHandlers = [
|
||||
[
|
||||
'play', handlePlay
|
||||
],
|
||||
[
|
||||
'pause', handlePause
|
||||
],
|
||||
[
|
||||
'stop', handleStop
|
||||
],
|
||||
[
|
||||
'nexttrack', handleNext
|
||||
],
|
||||
[
|
||||
'previoustrack', handlePrev
|
||||
]
|
||||
]
|
||||
|
||||
const removeMediaNavigationHandling = () => {
|
||||
if ('mediaSession' in navigator) {
|
||||
// Remove Play action handler
|
||||
navigator.mediaSession.setActionHandler('play', null)
|
||||
|
||||
// Remove Pause action handler
|
||||
navigator.mediaSession.setActionHandler('pause', null)
|
||||
|
||||
// Remove Previous track action handler
|
||||
navigator.mediaSession.setActionHandler('previoustrack', null)
|
||||
|
||||
// Remove Next track action handler
|
||||
navigator.mediaSession.setActionHandler('nexttrack', null)
|
||||
}
|
||||
}
|
||||
const addPlayPauseHandling = () => {
|
||||
if ('mediaSession' in navigator) {
|
||||
// Previous track action
|
||||
navigator.mediaSession.setActionHandler('previoustrack', () => {
|
||||
this.skipTo(this.playlist.index - 1)
|
||||
// useNuxtApp().$logger.log('Previous track button pressed')
|
||||
})
|
||||
// Next track action
|
||||
navigator.mediaSession.setActionHandler('nexttrack', () => {
|
||||
// Your next track action here
|
||||
this.skipTo(this.playlist.index + 1)
|
||||
// useNuxtApp().$logger.log('Next track button pressed')
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
addSpaceListener,
|
||||
addMediaControls,
|
||||
getNextSoundscape,
|
||||
getPreviousSoundscape
|
||||
}
|
||||
}
|
143
composables/useRequirementChecker.ts
Normal file
143
composables/useRequirementChecker.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import type { Logger } from "pino"
|
||||
import { useAudioStore } from "~/stores/audio"
|
||||
import { useDevicesStore } from "~/stores/device"
|
||||
import { useMicStore } from "~/stores/microphone"
|
||||
import { useUserStore } from "~/stores/user"
|
||||
import { getSoundscapeByTitle } from "~/tracks.config"
|
||||
|
||||
|
||||
export const useRequirementChecker = () => {
|
||||
|
||||
const logger = useNuxtApp().$logger as Logger
|
||||
|
||||
|
||||
const audio = useAudioStore()
|
||||
const mic = useMicStore()
|
||||
const rnbo = useDevicesStore()
|
||||
const user = useUserStore().user as any
|
||||
|
||||
let audioContext = false
|
||||
let audioUnlock = false
|
||||
let microphone = false
|
||||
let rnboDevice = false
|
||||
let audioTags = false
|
||||
|
||||
const checkAudioContext = async () => {
|
||||
if(audio.audioContext?.state === 'running'){
|
||||
audioContext = true
|
||||
} else {
|
||||
await audio.ensureAudioContextRunning()
|
||||
checkAudioContext()
|
||||
}
|
||||
return audioContext
|
||||
}
|
||||
|
||||
const checkAudioUnlocked = async () => {
|
||||
|
||||
try {
|
||||
let test: HTMLAudioElement | null = new Audio()
|
||||
const sink = useUserStore().audioOutputDevice as MediaDeviceInfo
|
||||
test.setSinkId(sink.deviceId)
|
||||
test.src = 'https://localhost:3000/sounds/debug/LMusik_RSprache.mp3'
|
||||
test.muted = true
|
||||
await test.play().catch((e: any) => {
|
||||
audioUnlock = false
|
||||
})
|
||||
} catch (error) {
|
||||
audioUnlock = false
|
||||
}
|
||||
if(audioUnlock) {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const checkRNBODevice = async () => {
|
||||
try {
|
||||
const centerFrequencies3 = [150, 1500, 8000]
|
||||
const centerFrequencies9 = [63, 125, 250, 500, 1000, 2000, 4000, 8000, 16000]
|
||||
const mobile = /Mobi|Android|iPhone|iPad|iPod/i.test(navigator.userAgent)
|
||||
if(mobile) {
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies3[0]}Hz`, centerFrequencies3[0])
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies3[1]}Hz`, centerFrequencies3[1])
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies3[2]}Hz`, centerFrequencies3[2])
|
||||
rnboDevice = true
|
||||
}else {
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies9[0]}Hz`, centerFrequencies9[0])
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies9[1]}Hz`, centerFrequencies9[1])
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies9[2]}Hz`, centerFrequencies9[2])
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies9[3]}Hz`, centerFrequencies9[3])
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies9[4]}Hz`, centerFrequencies9[4])
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies9[5]}Hz`, centerFrequencies9[5])
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies9[6]}Hz`, centerFrequencies9[6])
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies9[7]}Hz`, centerFrequencies9[7])
|
||||
await rnbo.createControlValuesDevice(`testControlValues_${centerFrequencies9[8]}Hz`, centerFrequencies9[8])
|
||||
rnboDevice = true
|
||||
}
|
||||
} catch (error) {
|
||||
rnboDevice = false
|
||||
}
|
||||
return rnboDevice
|
||||
}
|
||||
const checkAudioTags = async () => {
|
||||
const lowPath = `/masking/3bands/low_band_256kbps.webm`
|
||||
const midPath = `/masking/3bands/mid_band_256kbps.webm`
|
||||
const highPath = `/masking/3bands/high_band_256kbps.webm`
|
||||
const soundscape = user.settings.soundscape || 'Forest'
|
||||
try {
|
||||
const soundScapePath = getSoundscapeByTitle(soundscape as string)
|
||||
const fullPaths = [`${window.location.origin}${encodeURI(lowPath)}`,
|
||||
`${window.location.origin}${encodeURI(midPath)}`,
|
||||
`${window.location.origin}${encodeURI(highPath)}`,
|
||||
]
|
||||
if(soundScapePath) {
|
||||
fullPaths.push(soundScapePath)
|
||||
}
|
||||
const media = useMediaProvider()
|
||||
await media.createPlayableAudioElement(fullPaths[0],3000)
|
||||
await media.createPlayableAudioElement(fullPaths[1],3000)
|
||||
await media.createPlayableAudioElement(fullPaths[2],3000)
|
||||
await media.createPlayableAudioElement(fullPaths[3],3000)
|
||||
audioTags = true
|
||||
} catch (error) {
|
||||
audioTags = false
|
||||
}
|
||||
return audioTags
|
||||
}
|
||||
|
||||
const checkMicrophone = async () => {
|
||||
try {
|
||||
await mic.attachMicrophone()
|
||||
microphone = true
|
||||
return true
|
||||
}catch{
|
||||
microphone = false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const ensureReadiness = async () => {
|
||||
let ready = true
|
||||
ready = await checkAudioUnlocked() || ready ? true : false
|
||||
logger.info("Audio Unlocked")
|
||||
ready = await checkAudioContext() || ready ? true : false
|
||||
logger.info("AudioContext running")
|
||||
ready = await checkRNBODevice() || ready ? true : false
|
||||
logger.info("RNBODevices ready for connections")
|
||||
ready = await checkAudioTags() || ready ? true : false
|
||||
logger.info("AudioTags ready to play")
|
||||
ready = await checkMicrophone() || ready ? true : false
|
||||
logger.info("Microphone attached")
|
||||
return ready
|
||||
}
|
||||
|
||||
|
||||
return {
|
||||
ensureReadiness,
|
||||
checkAudioUnlocked,
|
||||
checkAudioContext,
|
||||
audioUnlock, audioContext, microphone, rnboDevice, audioTags
|
||||
|
||||
}
|
||||
}
|
60
composables/useTimerControls.js
Normal file
60
composables/useTimerControls.js
Normal file
@@ -0,0 +1,60 @@
|
||||
import { useTimerStore } from '~/stores/timer'
|
||||
|
||||
export const useTimerControls = () => {
|
||||
const timer = useTimerStore()
|
||||
const interval = ref({})
|
||||
|
||||
const toggleTimer = () => {
|
||||
timer.toggleTimer()
|
||||
if (timer.isStarted) {
|
||||
interval.value = setInterval(startTimer, 1000)
|
||||
} else { clearInterval(interval.value) }
|
||||
}
|
||||
|
||||
const startTimer = () => {
|
||||
if (timer.getTimeRemaining <= 0) {
|
||||
const currentSession = timer.settings.timer[timer.currentSession].text
|
||||
|
||||
timer.nextSession()
|
||||
if (timer.playSessionEndSound) { playAlarm() }
|
||||
const nextSession = timer.settings.timer[timer.currentSession].text
|
||||
|
||||
// if (timer.showNotification) { showNotification(currentSession, nextSession) }
|
||||
|
||||
if (timer.settings.autoStart) { return }
|
||||
return toggleTimer()
|
||||
}
|
||||
timer.setTimeRemaining(timer.getTimeRemaining - 1)
|
||||
}
|
||||
|
||||
const clearTimer = () => {
|
||||
timer.started = false
|
||||
timer.clearTimeRemaining()
|
||||
clearInterval(interval.value)
|
||||
}
|
||||
|
||||
// const showNotification = (currentSession, nextSession) => {
|
||||
// const {
|
||||
// isSupported,
|
||||
// notification,
|
||||
// show,
|
||||
// onError
|
||||
// } = useWebNotification({
|
||||
// title: `${currentSession} is done.`,
|
||||
// body: `You've just finished a ${String(currentSession).toLowerCase()} session. Now get ready to start the next session: ${nextSession}`,
|
||||
// renotify: true,
|
||||
// tag: currentSession
|
||||
// })
|
||||
// if (isSupported.value) { show() }
|
||||
// }
|
||||
|
||||
const playAlarm = async () => {
|
||||
const audio = await import(`../assets/sounds/${useTimerStore().alarmAudio}.mp3`)
|
||||
const alarm = new Audio(audio.default)
|
||||
alarm.play()
|
||||
}
|
||||
return {
|
||||
toggleTimer,
|
||||
clearTimer
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user