Compare commits

..

10 Commits

Author SHA1 Message Date
Khaled Aldayeh 5e334d3bfd new Signal naming 2024-01-09 21:19:13 +01:00
Robert Rapp 653b9220f8 ChannelSteamer und Octave.js2 2023-12-16 22:46:22 +01:00
Robert Rapp fd3ccb8fa2 Test mit octave.js 2023-12-16 22:45:56 +01:00
Robert Rapp f33e61f2d3 Merge branch 'robbi-inspect' of https://gitea.mindboost.team/Mindboost/dev-audioprocessing into robbi-inspect 2023-12-07 18:20:21 +01:00
Robert Rapp 23ac4743f5 merged gitignores 2023-12-07 18:20:16 +01:00
Robert Rapp 7a40953f04 revert 63f795d4c9
revert Dockerfile aktualisiert
2023-12-07 04:33:10 +00:00
Robert Rapp cfc32b0229 improvement dockerfile multistage and production 2023-12-07 05:30:05 +01:00
Robert Rapp 63f795d4c9 Dockerfile aktualisiert 2023-12-07 03:11:46 +00:00
Robert Rapp 771a847148 Changed the path of octave.js to public subfolder called script 2023-12-07 01:42:46 +01:00
Robert Rapp bc54b1a089 updated readme and have a running version 2023-12-06 23:00:21 +01:00
102 changed files with 5542 additions and 20472 deletions

BIN
.DS_Store vendored Normal file → Executable file

Binary file not shown.

49
.dockerignore Normal file
View File

@ -0,0 +1,49 @@
# Dependency directories
node_modules
npm-debug.log
# Build directory
/dist
# Various log files
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Editor directories and files
.editorconfig
.idea
*.suo
*.ntvs*
*.njsproj
*.sln
.vs/
.vscode/
# Operating system files
.DS_Store
Thumbs.db
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional gitignore file
.gitignore
# Optional markdown files
*.md
# Optional configuration files
*.env
*.yml
# Other unnecessary files
*.tar.gz
*.zip
*.tgz
*.gzip

41
.gitignore vendored Normal file → Executable file
View File

@ -1,8 +1,35 @@
node_modules # Dependency directories
*.log* node_modules/
.nuxt yarn.lock
.nitro package-lock.json
.cache
.output # Nuxt build output
.nuxt/
dist/
.nuxt-build/
.output/
# Production build files
build/
# Generated files
.nuxtignore
nuxt.config.js
nuxt.config.ts
# Dotfiles and directories
.env .env
dist .env.*
!.env.example
.DS_Store
.gitattributes
.editorconfig
.vscode/
# Log files
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Local development
local/

0
.idea/.gitignore vendored Normal file → Executable file
View File

0
.idea/mindboostnx.iml Normal file → Executable file
View File

0
.idea/modules.xml Normal file → Executable file
View File

0
.idea/php.xml Normal file → Executable file
View File

0
.idea/vcs.xml Normal file → Executable file
View File

0
.npmrc Normal file → Executable file
View File

50
Dockerfile Normal file
View File

@ -0,0 +1,50 @@
#FROM node:18.14.2 as builder1
#WORKDIR /app
#ENV NODE_OPTIONS=--openssl-legacy-provider
#COPY package*.json ./
#RUN npm install
#COPY . .
#FROM node:18.14.2 as run1
#WORKDIR /app
#COPY package*.json ./
#COPY --from=builder /app/.output /.output
#EXPOSE 3000
#CMD [ "npm", "run", "start" ]
# Stage 1: Build
FROM node:18.14.2 as builder
WORKDIR /app
# Install only necessary dependencies for building
COPY package*.json ./
RUN npm install --only=production
# Copy the necessary source files to build your application
COPY . .
# Build the application
# Add any build scripts here. For example, if you're using a framework
# like Next.js, you might run `npm run build` here.
RUN npx nuxt build
# Stage 2: Runtime
FROM node:18.14.2-slim as run
WORKDIR /app
# Copy only necessary files from the builder stage
COPY --from=builder /app/package*.json ./
COPY --from=builder /app/node_modules ./node_modules
# If you have any build output, copy it here
COPY --from=builder /app/.output ./.output
# Copy only the production dependencies
RUN npm prune --production
EXPOSE 3000
CMD [ "npm", "run", "start" ]

59
README.md Normal file → Executable file
View File

@ -1,6 +1,63 @@
pro Messpunkt
125ms
LAF90
LAF10
TargetGain
CurrentGain
MicrofonSignal clear
MicofoneSigal gedämpft (AWeighted)
[time: 125, values: [35,50,50,40]
# Nuxt 3 Minimal Starter # Nuxt 3 Minimal Starter
Look at the [Nuxt 3 documentation](https://nuxt.com/docs/getting-started/introduction) to learn more. Look at the [Nuxt 3 documentation](https://nuxt.com/docs/getting-started/introduction) to learn more.
## Preparation
To run the project you need to have node 19 installed. As the current version is newer you cannot just start the application if you have downloaded and installed node on your machine. In docker this is solved by the docker script but to run test on your application you need to install the node version manager (nvm).
1. Install nvm using your terminal:
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.1/install.sh | bash
2. Make nvm available in your terminal. Note: If you install a programm like you cannot just access it via the terminal because it is not registered in the list of commands for your terminal. What you need to do is register it manually. Just paste the following command and hit enter.
export NVM_DIR="$([ -z "${XDG_CONFIG_HOME-}" ] && printf %s "${HOME}/.nvm" || printf %s "${XDG_CONFIG_HOME}/nvm")"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
3. You have now nvm available in your terminal so you will install node with version 19 by the command
nvm install 19
4. Now you set the node version of your current terminal to 19. This is necessary every time you run a new terminal, because the default node version is the newest. Just type
nvm use 19
## Setup ## Setup
@ -40,3 +97,5 @@ npm run preview
``` ```
Check out the [deployment documentation](https://nuxt.com/docs/getting-started/deployment) for more information. Check out the [deployment documentation](https://nuxt.com/docs/getting-started/deployment) for more information.

0
assets/animation.json Normal file → Executable file
View File

View File

@ -0,0 +1,67 @@
class BandpassProcessor extends AudioWorkletProcessor {
process(inputs, outputs, parameters) {
const input = inputs[0];
const output = outputs[0];
const frequency = parameters.frequency;
const Q = parameters.Q;
for (let channel = 0; channel < input.length; channel++) {
const inputChannel = input[channel];
const outputChannel = output[channel];
for (let i = 0; i < inputChannel.length; i++) {
// Apply bandpass filter to inputChannel[i] and store the result in outputChannel[i]
// using the provided frequency and Q parameters
}
}
// Calculate the RMS value of the output audio data
const rms = this.calculateRMS(output);
// Calculate the dB values
const dbValues = this.convertToDB(output);
// Calculate the 10th and 90th percentile values
const percentile10 = this.calculatePercentile(dbValues, 10);
const percentile90 = this.calculatePercentile(dbValues, 90);
// Send the processed data to the main thread
this.port.postMessage({ rms, dbValues, percentile10, percentile90 });
return true;
}
calculateRMS(data) {
let sumOfSquares = 0;
for (let channel = 0; channel < data.length; channel++) {
const channelData = data[channel];
for (let i = 0; i < channelData.length; i++) {
sumOfSquares += channelData[i] * channelData[i];
}
}
const meanSquare = sumOfSquares / (data.length * data[0].length);
const rms = Math.sqrt(meanSquare);
return rms;
}
calculatePercentile(data, percentile) {
const sortedData = data.slice().sort((a, b) => a - b);
const index = Math.floor((percentile / 100) * sortedData.length);
return sortedData[index];
}
convertToDB(data) {
const dbValues = [];
for (let channel = 0; channel < data.length; channel++) {
const channelData = data[channel];
for (let i = 0; i < channelData.length; i++) {
const amplitude = Math.abs(channelData[i]);
const db = 20 * Math.log10(amplitude + this.minAmplitude);
dbValues.push(db);
}
}
return dbValues;
}
}
export default registerProcessor('bandpass-processor', BandpassProcessor);

0
assets/css/style.css Normal file → Executable file
View File

0
assets/css/tailwind.css Normal file → Executable file
View File

0
assets/image/AirPods Pro Max (1).png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 562 B

After

Width:  |  Height:  |  Size: 562 B

0
assets/image/AirPods Pro Max.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 864 B

After

Width:  |  Height:  |  Size: 864 B

0
assets/image/AirPodsPro.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

0
assets/image/AirPodsProMax.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

0
assets/image/Audio Wave.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 342 B

After

Width:  |  Height:  |  Size: 342 B

0
assets/image/AudioWave1.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 475 B

After

Width:  |  Height:  |  Size: 475 B

0
assets/image/Cloud software.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 28 KiB

0
assets/image/Cloudsoftware.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 28 KiB

0
assets/image/Delivery.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

0
assets/image/Frame19439.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

0
assets/image/Logo.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 5.2 KiB

After

Width:  |  Height:  |  Size: 5.2 KiB

0
assets/image/Logocopy.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

0
assets/image/Sounscape.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 457 B

After

Width:  |  Height:  |  Size: 457 B

0
assets/image/Start-upguyflyingwithjetpack.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 21 KiB

0
assets/image/Vector (1).png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 768 B

After

Width:  |  Height:  |  Size: 768 B

0
assets/image/Vector (2).png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 333 B

After

Width:  |  Height:  |  Size: 333 B

0
assets/image/Vector (3).png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 333 B

After

Width:  |  Height:  |  Size: 333 B

0
assets/image/Vector (4).png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 503 B

After

Width:  |  Height:  |  Size: 503 B

0
assets/image/Vector (5).png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 496 B

After

Width:  |  Height:  |  Size: 496 B

0
assets/image/Vector.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 530 B

After

Width:  |  Height:  |  Size: 530 B

0
assets/image/Vector121.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 723 B

After

Width:  |  Height:  |  Size: 723 B

0
assets/image/Vector123.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 518 B

After

Width:  |  Height:  |  Size: 518 B

0
assets/image/Vector7.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 370 B

After

Width:  |  Height:  |  Size: 370 B

0
assets/image/Vector8.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 668 B

After

Width:  |  Height:  |  Size: 668 B

0
assets/image/airbods.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 569 B

After

Width:  |  Height:  |  Size: 569 B

0
assets/image/audiowave2.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 342 B

After

Width:  |  Height:  |  Size: 342 B

0
assets/image/headphone.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 697 B

After

Width:  |  Height:  |  Size: 697 B

0
assets/image/login2.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 50 KiB

After

Width:  |  Height:  |  Size: 50 KiB

0
assets/image/login3.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 52 KiB

After

Width:  |  Height:  |  Size: 52 KiB

0
assets/image/login4.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 53 KiB

After

Width:  |  Height:  |  Size: 53 KiB

0
assets/image/loginimg.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 50 KiB

After

Width:  |  Height:  |  Size: 50 KiB

0
assets/image/musicfile.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 53 KiB

After

Width:  |  Height:  |  Size: 53 KiB

0
assets/masking1.aac Normal file → Executable file
View File

View File

@ -1,153 +0,0 @@
class OctaveBandProcessor extends AudioWorkletProcessor {
constructor() {
super();
// Define center frequencies for 9 octave bands
this.centerFrequencies = [63, 125, 250, 500, 1000, 2000, 4000, 8000, 16000];
this.filters = [];
this.lastUpdateTimestamp = 0;
this.updateInterval = 0.125; // Update every 0.125 seconds
// Create an A-weighting filter for specific frequencies
this.createAWeightingFilter();
// Create bandpass filters for each center frequency
this.centerFrequencies.forEach(frequency => {
const filter = new BiquadFilterNode(audioContext, {
type: 'bandpass',
frequency: frequency,
Q: 1.41, // Set the desired Q value
});
this.filters.push(filter);
});
// Set up analyzers for calculating percentiles
this.setupAnalyzers();
}
createAWeightingFilter() {
// Use the provided A-weighting filter coefficients
const aWeightingCoefficients = [0, -0.051, -0.142, -0.245, -0.383, -0.65, -1.293, -2.594, -6.554]; //David
// Create a custom IIR filter node with the A-weighting coefficients
this.aWeightingFilter = new IIRFilterNode(audioContext, {
feedforward: aWeightingCoefficients,
feedback: [1],
});
}
setupAnalyzers() {
this.analyzers = [];
this.centerFrequencies.forEach(frequency => {
this.analyzers.push([]);
for (let i = 0; i < 5; i++) { // Unique identifiers from 0 to 4
const analyzer = audioContext.createAnalyser();
analyzer.fftSize = 2048;
// Check if the identifier is 0 (microphone audio) before connecting to the A-weighting filter
if (i === 0) {
this.aWeightingFilter.connect(analyzer);
}
this.analyzers[this.analyzers.length - 1].push(analyzer);
}
}
}
process(inputs, outputs) {
const numOutputChannels = outputs.length;
for (let i = 0; i < numOutputChannels; i++) {
const outputChannel = outputs[i][0];
const inputChannel = inputs[i][0];
// Apply the filter to the input channel
const filteredSignal = this.filters[i].process(inputChannel);
// Apply A-weighting only to the microphone signal (channel 0)
if (i === 0) {
const aWeightedSignal = this.aWeightingFilter.process(filteredSignal);
outputChannel.set(aWeightedSignal);
} else {
// For other channels, pass the signal without A-weighting
outputChannel.set(filteredSignal);
}
// Check if it's time to update percentiles
const currentTime = this.currentTime;
if (currentTime - this.lastUpdateTimestamp >= this.updateInterval) {
this.updatePercentiles(i);
this.lastUpdateTimestamp = currentTime;
}
}
return true;
}
calculateRMSLevel(signal, channelIndex) {
const data = new Float32Array(signal.length);
signal.copyFromChannel(data, 0);
const sum = data.reduce((acc, val) => acc + val * val, 0);
const rmsLevel = Math.sqrt(sum / data.length);
const dBLevel = 20 * Math.log10(rmsLevel); // Convert to dB
return dBLevel;
}
updatePercentiles(channelIndex) {
for (let i = 0; i < this.centerFrequencies.length; i++) {
const analyzer = this.analyzers[i][channelIndex];
const levelData = new Float32Array(analyzer.frequencyBinCount);
analyzer.getFloatFrequencyData(levelData);
// Calculate percentiles for each octave band and each channel
const percentile10 = this.calculatePercentile(levelData, 10);
const percentile90 = this.calculatePercentile(levelData, 90);
const percentileDiff = percentile10 - percentile90;
// Store the percentile difference for each channel and each octave band
// You can use suitable data structures to store these values for future comparisons
}
}
calculatePercentile(data, percentile) {
const sortedData = data.slice().sort((a, b) => a - b);
const index = Math.floor((percentile / 100) * sortedData.length);
return sortedData[index];
}
combineAndCalculate() {
let LAF10_90_total = 0; // Initialize the total LAF10%-90%
for (let i = 0; i < this.centerFrequencies.length; i++) {
const micAnalyzer = this.analyzers[i][0]; // Analyzer for microphone audio (identifier 0)
const audioFile1Analyzer = this.analyzers[i][3]; // Analyzer for audioFile1 (identifier 3)
const audioFile2Analyzer = this.analyzers[i][4]; // Analyzer for audioFile2 (identifier 4)
// Calculate percentiles for the microphone audio
const micPercentile10 = this.calculatePercentile(micAnalyzer, 10);
const micPercentile90 = this.calculatePercentile(micAnalyzer, 90);
// Calculate percentiles for audioFile1
const audioFile1Percentile10 = this.calculatePercentile(audioFile1Analyzer, 10);
const audioFile1Percentile90 = this.calculatePercentile(audioFile1Analyzer, 90);
// Calculate percentiles for audioFile2
const audioFile2Percentile10 = this.calculatePercentile(audioFile2Analyzer, 10);
const audioFile2Percentile90 = this calculatePercentile(audioFile2Analyzer, 90);
// Calculate LAF10%-90% for microphone audio, audioFile1, and audioFile2 separately
const micLAF10_90 = micPercentile10 - micPercentile90;
const audioFile1LAF10_90 = audioFile1Percentile10 - audioFile1Percentile90;
const audioFile2LAF10_90 = audioFile2Percentile10 - audioFile2Percentile90;
// Calculate combined LAF10%-90% for microphone audio, audioFile1, and audioFile2
const combinedLAF10_90 = micLAF10_90 + audioFile1LAF10_90 + audioFile2LAF10_90;
// Add the combined LAF10%-90% to the total
LAF10_90_total += combinedLAF10_90;
}
return LAF10_90_total;
}
}
registerProcessor('octave', OctaveBandProcessor);

View File

@ -1,28 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCxQKVsWo37+7Ut
gaYs8JN0Gxx8QrxA4+NqOZnwgu5dJnwEuWWJGiT/oTuBSiI+x4HeoFWlTFoFrc5v
XL7K1AUdS0AL2XhXha2Bsd6E5xyJKf8wkBFGB9ENBiNI7PvoOATsA71SxZHf4uNx
DF7Gg6cYC8MquhWWCn+bLPJzVnEe7fHss6tBcLufrAOXPFbCeUp/VVVPkF0bBwh+
jREdvhvArj03FVBlgsUK3EXH5RwJ2pZaKg6ZNdyb5fHURCOIYjv5UJxdsdKyvKbt
rY12i/ebB5P7pquIIykmo4FsP2aLvfUf2WDcEVaBum7DD3kUqjBE/54i/A3hVhVD
kfZJpOCrAgMBAAECggEABaNekfr7tDbfZ43NslCuulZhk4ICf62gDCyZmHNHdC3L
2RpWCR/n2GDtDyGxMVB0303KLeunCR/sRpr9tBXPtUPD3WpXZtVNpsyK1Bqz3isT
45wQaJjJxOYSlR0wrdaGVY4i5XpW7nciMIaXtOQbmn1HAev+rH982XvWdHhTpvlf
JtSzqBB20GJLUKhmDx/wyAJ8m2RXBHCnztRqDqqfJMfavbxIahwhXAmTANcMOErl
ugElPPei5oiH6Ecv+SwljptG1C7q++pYAsjkAuD2t8rTRNQWRhBYT3oumGBmEjh5
3jneSBdPGIdv1bnjsmuPl8sdPzwXc6y/EQ9Iiqk68QKBgQDuFGSolBzLMZNITLs/
rf3HWNs8AxGol7XLg8dRpPGuflGgB6pGOsYsfMQv2YXT3FndAeMCMR0VB3kuYo77
Q20gj4c2MDdbjPUGZi76wlLwYw36H46TsBiuoOZfk46FbZlaaFYydLHZPrWbnDzn
KgFXjYnojIFn86AO+u3Z2Mad0QKBgQC+mCmDZzORu5AowhIGAe42gEq1e9nIh9BB
iupo4I/+/EtSi4+m0KoDOUoR99bbs3md25rC6+6oLI9FAc4bp8pKd9443kr7SUy6
YYr70QFr9ubZOYMabjkUhXw/kzhqjfU6B1ce1JPS/3/mEdAxjHAppN+jqxp8+efT
lwryLV9JuwKBgD21pX2YnnoAiJd15BcWZzAzlOfSN9KGOEXfC1vbMBW1gjzDn9wC
QfmoAUYR0MFgXR6O7aNUzZ/0xvFB9KOmD3QgH8Do8IBXYe9drxGqKstKMYZChbc8
Lrmc1PmzCn8FMHmhj64WVz7jJTmHXrXgSmbCNhvDx4sFN9iKK/qKWLjxAoGBAJGr
5v7B4A5glvwv6GqjCxio0XEIahn0g920eRkTmbs/xaofdPoAvhCcttoo3RUqhad7
czvL66qp9A7AJHHKurhUCYrZi+Gn0ncZmoqA4l9MZIBejq+i0wm2RJKqyRHX0jg5
6AJuY1V/ZpfHwaI9PnT1yOBlJGek8eUsqncS6qOFAoGBAIqGzvI43w51hdT/tvS9
kUhoef++BuT7Rod6rK40l4ti3YfyABN5jFtPG/Bsz7nn7gb3YT/Q2RKzuEQFpLIK
JzMu6ElRSLyMTNbtSntq9Qw/hmpcSmzj0iao3kp5Arr9mkhua6ZbUUM0xDYMt4eN
EWsxmbuQkcHzvSBHV9w3NBak
-----END PRIVATE KEY-----

View File

@ -1,23 +0,0 @@
-----BEGIN CERTIFICATE-----
MIID3zCCAscCFFB62ppfH5aXwlofGjPHdDOoRDZmMA0GCSqGSIb3DQEBCwUAMIGr
MQswCQYDVQQGEwJERTEbMBkGA1UECAwSQmFkZW4tV3VlcnR0ZW1iZXJnMRIwEAYD
VQQHDAlTdHV0dGdhcnQxEjAQBgNVBAoMCU1pbmRib29zdDEWMBQGA1UECwwNSVQg
RGVwYXJ0bWVudDEUMBIGA1UEAwwLUm9iZXJ0IFJhcHAxKTAnBgkqhkiG9w0BCQEW
GnJvYmVydC5yYXBwQG1pbmRib29zdC50ZWFtMB4XDTIzMTEwMzE1MzkzM1oXDTI0
MTEwMjE1MzkzM1owgasxCzAJBgNVBAYTAkRFMRswGQYDVQQIDBJCYWRlbi1XdWVy
dHRlbWJlcmcxEjAQBgNVBAcMCVN0dXR0Z2FydDESMBAGA1UECgwJTWluZGJvb3N0
MRYwFAYDVQQLDA1JVCBEZXBhcnRtZW50MRQwEgYDVQQDDAtSb2JlcnQgUmFwcDEp
MCcGCSqGSIb3DQEJARYacm9iZXJ0LnJhcHBAbWluZGJvb3N0LnRlYW0wggEiMA0G
CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCxQKVsWo37+7UtgaYs8JN0Gxx8QrxA
4+NqOZnwgu5dJnwEuWWJGiT/oTuBSiI+x4HeoFWlTFoFrc5vXL7K1AUdS0AL2XhX
ha2Bsd6E5xyJKf8wkBFGB9ENBiNI7PvoOATsA71SxZHf4uNxDF7Gg6cYC8MquhWW
Cn+bLPJzVnEe7fHss6tBcLufrAOXPFbCeUp/VVVPkF0bBwh+jREdvhvArj03FVBl
gsUK3EXH5RwJ2pZaKg6ZNdyb5fHURCOIYjv5UJxdsdKyvKbtrY12i/ebB5P7pquI
Iykmo4FsP2aLvfUf2WDcEVaBum7DD3kUqjBE/54i/A3hVhVDkfZJpOCrAgMBAAEw
DQYJKoZIhvcNAQELBQADggEBAJ17zv0dhYSAFD1RBMBh6bVvsyzPFLe7P79dmo/I
DbZRi91rIfZMZa6vbnwcYh6tyi9bGtm/tWaCglnJ6ENjSgAupZUGWQ4TTGS4C0sj
rRjygWtLYchQVOKGeTbMqG/75GDciwKXhmNNflGWkBSbqdBPubBm+9lr3C2HArYB
+FvdIfWxLcpgZANZ4h4lThin40kUJ/3cD8d7RCqk+KhWZ09tMyMojBlNaRv0ejvS
I7Yb/Rvnf5gWK3UCmHlcSrX0zO0ulSL2Cgi9EswLnHNiFRYhDoCfSRfY1NlBUhF6
xpCSStKIS3CzZpDR4F1U32VqOQx3bfl5gb/cnMAu0FEQtgM=
-----END CERTIFICATE-----

112
components/BandProcessor.vue Executable file
View File

@ -0,0 +1,112 @@
<template>
<div>
<button @click="startMicrophone">Start Microphone</button>
<button @click="stopMicrophone">Stop Microphone</button>
<div v-if="errorMessage">{{ errorMessage }}</div>
</div>
</template>
<script>
export default {
mounted() {
console.log("mounted Bandprocessor");
//this.initAudioContext();
},
data() {
return {
audioContext: null,
microphoneStream: null,
audioProcessorNode: null,
errorMessage: null,
};
},
methods: {
async initAudioContext() {
try {
const audioContext = new AudioContext();
console.log("Current URL:", window.location.href);
await audioContext.audioWorklet.addModule("/scripts/octave.js"
);
// Now the 'bandpass-processor' is registered and can be used
} catch (e) {
console.error("Error loading audio worklet:", e);
}
},
async startMicrophone() {
console.log("Start initialization of AudioContext");
try {
this.initializeAudioContext();
await this.requestMicrophoneAccess();
await this.setupAudioProcessing();
console.log("AudioContext sucessful initialized");
} catch (error) {
console.error("Error starting microphone:", error.message);
this.errorMessage = error.message;
}
},
stopMicrophone() {
this.cleanup();
},
initializeAudioContext() {
this.audioContext = new AudioContext();
},
async requestMicrophoneAccess() {
try {
this.microphoneStream = await navigator.mediaDevices.getUserMedia({
audio: true,
});
} catch (error) {
console.error("Error accessing microphone:", error.message);
this.errorMessage =
"Microphone access denied. Please grant permission in your browser settings.";
throw new Error("Microphone access denied.");
}
if (
!this.microphoneStream ||
!(this.microphoneStream instanceof MediaStream)
) {
throw new Error("Microphone stream is not available.");
}
},
async setupAudioProcessing() {
try {
const microphoneSource = this.audioContext.createMediaStreamSource(
this.microphoneStream
);
await this.audioContext.audioWorklet.addModule("/scripts/octave.js");
this.audioProcessorNode = new AudioWorkletNode(this.audioContext, 'octave');
microphoneSource.connect(this.audioProcessorNode);
this.audioProcessorNode.connect(this.audioContext.destination);
} catch (error) {
console.error("Error setting up audio processing:", error.message);
this.errorMessage =
"Error setting up audio processing. Please check your microphone and try again.";
throw new Error("Audio processing setup failed.");
}
},
cleanup() {
if (this.audioContext) {
if (this.audioProcessorNode) {
this.audioProcessorNode.disconnect();
this.audioProcessorNode.port.postMessage({ command: "stop" });
}
this.audioContext.close();
this.resetVariables();
}
},
resetVariables() {
this.audioContext = null;
this.microphoneStream = null;
this.audioProcessorNode = null;
this.errorMessage = null;
},
},
beforeDestroy() {
this.cleanup();
},
};
</script>

0
components/ProductCard.vue Normal file → Executable file
View File

0
components/VueMeter.vue Normal file → Executable file
View File

20
components/homebar.vue Normal file → Executable file
View File

@ -137,7 +137,7 @@
<span><i class="fa-solid fs-3 fa-arrow-left-long" style="cursor: pointer" data-bs-dismiss="modal"></i></span> <span><i class="fa-solid fs-3 fa-arrow-left-long" style="cursor: pointer" data-bs-dismiss="modal"></i></span>
</div> </div>
<div class="col-8"> <div class="col-8">
<h4 class="text-center fw-bolder">Adaptive Soundscape</h4> <h4 class="text-center fw-bolder">{{t('Adaptive Soundscape')}} </h4>
</div> </div>
<div class="col-2"> <div class="col-2">
<div class="form-check form-switch float-end"> <div class="form-check form-switch float-end">
@ -148,9 +148,9 @@
</div> </div>
<div class="row px-2 pt-4"> <div class="row px-2 pt-4">
<div class="col-12 ps-3 fs-5" style="line-height: 25px"> <div class="col-12 ps-3 fs-5" style="line-height: 25px">
<p class="p-0 m-0"> The Mindboost sounscape responds to the acousitcs in your room.</p> <p class="p-0 m-0"> {{t('The Mindboost sounscape responds to the acousitcs in your room.')}}</p>
<p class="p-0 m-0"></p> <p class="p-0 m-0"></p>
<p class="">Currently, your room has a:</p> <p class="">{{t('Currently, your room has a:')}}</p>
</div> </div>
</div> </div>
<div class="row pt-4 ps-3"> <div class="row pt-4 ps-3">
@ -162,8 +162,8 @@
</span> </span>
</div> </div>
<div class="col-11 col-md-10 ps-3"> <div class="col-11 col-md-10 ps-3">
<h5>Noisy Environment</h5> <h5>{{t('Noisy Environment')}}</h5>
<p>The background noise at your workplace is disturbing. Your concentration is severely impaired. Mindboost protects you from the disturbing background noise.</p> <p>{{ t('The background noise at your workplace is disturbing. Your concentration is severely impaired. Mindboost protects you from the disturbing background noise.')}}</p>
</div> </div>
</div> </div>
@ -177,8 +177,8 @@
</span> </span>
</div> </div>
<div class="col-11 col-md-10 ps-3"> <div class="col-11 col-md-10 ps-3">
<h5>Medium-noise Environment</h5> <h5>{{t('Medium-noise Environment')}}</h5>
<p>The background noise at your workplace should be optimized. In the long term, it could disturb and have a negative impact on your health. Protect yourself with mindboost.</p> <p>{{t('The background noise at your workplace should be optimized. In the long term, it could disturb and have a negative impact on your health. Protect yourself with mindboost.')}}</p>
</div> </div>
</div> </div>
@ -191,9 +191,9 @@
</span> </span>
</div> </div>
<div class="col-11 col-md-10 ps-3"> <div class="col-11 col-md-10 ps-3">
<h5>Good Environment</h5> <h5>{{ t('Good Environment')}}</h5>
<p> <p>
The background noise at your workplace provides a longterm healthy basis for concentrated work. With Mindboost you make sure that even sudden disturbances do not distract you. {{t('The background noise at your workplace provides a longterm healthy basis for concentrated work. With Mindboost you make sure that even sudden disturbances do not distract you.')}}
</p> </p>
</div> </div>
</div> </div>
@ -345,4 +345,4 @@ export default {
width: 87%; width: 87%;
margin: auto; margin: auto;
} }
</style> </style>

0
components/settingbar.vue Normal file → Executable file
View File

0
components/toplogobar.vue Normal file → Executable file
View File

0
layouts/default.vue Normal file → Executable file
View File

39
nuxt.config.ts Normal file → Executable file
View File

@ -1,29 +1,9 @@
// https://nuxt.com/docs/api/configuration/nuxt-config // https://nuxt.com/docs/api/configuration/nuxt-config
// @ts-ignore // @ts-ignore
import path from 'path'
import fs from 'fs'
// import { defineNuxtConfig } from 'nuxt3'
export default defineNuxtConfig({ export default defineNuxtConfig({
alias: {
'@': path.resolve(__dirname, ''),
},
devServer: {
https: {
// key: fs.readFileSync(path.resolve(__dirname, '.nuxt/cert/privater-schluessel.key')),
// cert: fs.readFileSync(path.resolve(__dirname, '.nuxt/cert/zertifikat.crt'))
key:'cert/privater-schluessel.key',
cert:'cert/zertifikat.crt'
}
},
ssr:false, ssr:false,
plugins:[ plugins:[
{src: '~/plugins/AudioVisual.client', mode: 'client'}, {src: '~/plugins/AudioVisual.client', mode: 'client'},
@ -33,7 +13,7 @@ export default defineNuxtConfig({
{src: '~/plugins/axios'}, {src: '~/plugins/axios'},
], ],
modules: [ modules: [
// ... // ...
@ -55,7 +35,7 @@ export default defineNuxtConfig({
app: { app: {
pageTransition: { pageTransition: {
name: 'slide-right', name: 'slide-right',
mode: 'out-in' mode: 'out-in'
}, },
}, },
// @ts-ignore // @ts-ignore
@ -305,6 +285,13 @@ export default defineNuxtConfig({
"All soundscapes in Mindboost have been tested and optimized in listening tests in cooperation with the Fraunhofer IBP. So you can be sure that Mindboost supports you optimally with your concentration.":"All soundscapes in Mindboost have been tested and optimized in listening tests in cooperation with the Fraunhofer IBP. So you can be sure that Mindboost supports you optimally with your concentration.", "All soundscapes in Mindboost have been tested and optimized in listening tests in cooperation with the Fraunhofer IBP. So you can be sure that Mindboost supports you optimally with your concentration.":"All soundscapes in Mindboost have been tested and optimized in listening tests in cooperation with the Fraunhofer IBP. So you can be sure that Mindboost supports you optimally with your concentration.",
'Audio_Output':'Audio Output', 'Audio_Output':'Audio Output',
'Audio_Input':'Audio Input', 'Audio_Input':'Audio Input',
'Language':'Language',
'How is your audio hardware connected?':'How is your audio hardware connected?',
'select laptop or mobile device microphone':'select laptop or mobile device microphone',
'select headphones or headphone output':'select headphones or headphone output',
'Output device:':'Output device',
'Input device:':'Input device:',
'As input, please select the microphone of your laptop or mobile device not of your headphones.':'As input, please select the microphone of your laptop or mobile device not of your headphones.',
}, },
de: { de: {
"welcome": "Willkommen", "welcome": "Willkommen",
@ -511,10 +498,16 @@ export default defineNuxtConfig({
"gemäß Art. 77 DSGVO bei einer Aufsichtsbehörde zu beschweren. In der Regel können Sie sich hierfür an die Aufsichtsbehörde an Ihrem üblichen Aufenthaltsort oder Arbeitsplatz oder unserem Firmensitz wenden.":"gemäß Art. 77 DSGVO bei einer Aufsichtsbehörde zu beschweren. In der Regel können Sie sich hierfür an die Aufsichtsbehörde an Ihrem üblichen Aufenthaltsort oder Arbeitsplatz oder unserem Firmensitz wenden.", "gemäß Art. 77 DSGVO bei einer Aufsichtsbehörde zu beschweren. In der Regel können Sie sich hierfür an die Aufsichtsbehörde an Ihrem üblichen Aufenthaltsort oder Arbeitsplatz oder unserem Firmensitz wenden.":"gemäß Art. 77 DSGVO bei einer Aufsichtsbehörde zu beschweren. In der Regel können Sie sich hierfür an die Aufsichtsbehörde an Ihrem üblichen Aufenthaltsort oder Arbeitsplatz oder unserem Firmensitz wenden.",
'Audio_Output':'Audioausgang', 'Audio_Output':'Audioausgang',
'Audio_Input':'Audioeingang', 'Audio_Input':'Audioeingang',
'Language':'Sprache',
'How is your audio hardware connected?':'Wie ist Ihre Audio-Hardware angeschlossen?',
'select laptop or mobile device microphone':'Eingangsgerät:',
'select headphones or headphone output':'(Mikrofon von Laptop oder Mobilgerät auswählen)',
'Output device:':'Ausgangsgerät:',
'Input device:':'Eingangsgerät:',
'As input, please select the microphone of your laptop or mobile device not of your headphones.':'Als Eingang wählen Sie bitte das Mikrofon Ihres Laptops oder mobilen Geräts - nicht das Ihres Kopfhörers.',
} }
} }
} }
}, },
}) })

19148
package-lock.json generated

File diff suppressed because it is too large Load Diff

20
package.json Normal file → Executable file
View File

@ -1,32 +1,32 @@
{ {
"private": true, "private": true,
"scripts": { "scripts": {
"build": "nuxt build", "build": "npx nuxt build",
"dev": "nuxt dev", "dev": "npx nuxt dev",
"generate": "nuxt generate", "generate": "npx nuxt generate",
"preview": "nuxt preview", "preview": "npx nuxt preview",
"postinstall": "nuxt prepare" "postinstall": "npx nuxt prepare",
"start": "node .output/server/index.mjs"
}, },
"devDependencies": { "devDependencies": {
"@nuxt/vite-builder": "^3.0.0", "@nuxt/vite-builder": "^3.0.0",
"@nuxtjs/i18n": "^8.0.0-beta.10",
"@nuxtjs/tailwindcss": "^6.1.3", "@nuxtjs/tailwindcss": "^6.1.3",
"nuxt": "^3.8.0",
"nuxt-headlessui": "^1.0.4", "nuxt-headlessui": "^1.0.4",
"vue-stripe-js": "^1.0.1" "vue-stripe-js": "^1.0.1"
}, },
"dependencies": { "dependencies": {
"@heroicons/vue": "^2.0.13", "nuxt": "3.0.0",
"@nuxtjs/i18n": "^8.0.0-beta.10",
"nuxt-headlessui": "^1.0.4",
"@heroicons/vue": "^2.0.13",
"@incuca/vue3-toaster": "^1.1.1", "@incuca/vue3-toaster": "^1.1.1",
"@meforma/vue-toaster": "^1.3.0", "@meforma/vue-toaster": "^1.3.0",
"@pinia/nuxt": "^0.4.6", "@pinia/nuxt": "^0.4.6",
"@stripe/stripe-js": "^1.46.0", "@stripe/stripe-js": "^1.46.0",
"axios": "^1.3.2", "axios": "^1.3.2",
"bootstrap": "^5.2.3", "bootstrap": "^5.2.3",
"chart.js": "^4.4.0",
"pinia": "^2.0.28", "pinia": "^2.0.28",
"pinia-plugin-persistedstate": "^3.0.2", "pinia-plugin-persistedstate": "^3.0.2",
"standardized-audio-context": "^25.3.58",
"toastr": "^2.1.4", "toastr": "^2.1.4",
"vite": "^3.2.5", "vite": "^3.2.5",
"vue": "^3.2.26", "vue": "^3.2.26",

View File

@ -1,84 +1,89 @@
<template> <template>
<div> <div>
<button @click="startMicrophone">Start Microphone</button> <button @click="startMicrophone">Start Microphone</button>
<button @click="stopMicrophone">Stop Microphone</button> <button @click="stopMicrophone">Stop Microphone</button>
<div v-if="errorMessage">{{ errorMessage }}</div> <canvas ref="visualizationCanvas" width="800" height="200"></canvas>
</div> <div v-if="errorMessage">{{ errorMessage }}</div>
</template> </div>
</template>
<script>
export default { <script>
data() { export default {
return { data() {
audioContext: null, return {
microphoneStream: null, audioContext: null,
audioProcessorNode: null, microphoneStream: null,
errorMessage: null, audioProcessorNode: null,
}; errorMessage: null,
}, };
methods: {
async startMicrophone() {
try {
this.initializeAudioContext();
await this.requestMicrophoneAccess();
await this.setupAudioProcessing();
} catch (error) {
console.error('Error starting microphone:', error.message);
this.errorMessage = error.message;
}
}, },
stopMicrophone() { methods: {
async startMicrophone() {
try {
this.initializeAudioContext();
await this.requestMicrophoneAccess();
await this.setupAudioProcessing();
} catch (error) {
console.error('Error starting microphone:', error.message);
this.errorMessage = error.message;
}
finally{
console.log("Microphone started")
}
},
stopMicrophone() {
this.cleanup();
},
initializeAudioContext() {
this.audioContext = new window.AudioContext();
},
async requestMicrophoneAccess() {
try {
this.microphoneStream = await navigator.mediaDevices.getUserMedia({ audio: true });
} catch (error) {
console.error('Error accessing microphone:', error.message);
this.errorMessage = 'Microphone access denied. Please grant permission in your browser settings.';
throw new Error('Microphone access denied.');
}
if (!this.microphoneStream || !(this.microphoneStream instanceof MediaStream)) {
throw new Error('Microphone stream is not available.');
}
},
async setupAudioProcessing() {
try {
const microphoneSource = this.audioContext.createMediaStreamSource(this.microphoneStream);
await this.audioContext.audioWorklet.addModule('/scripts/octave2.js');
//this.audioProcessorNode = new AudioWorkletNode(this.audioContext, 'octave');
//microphoneSource.connect(this.audioProcessorNode);
//this.audioProcessorNode.connect(this.audioContext.destination);
} catch (error) {
console.error('Error setting up audio processing:', error.message);
this.errorMessage = 'Error setting up audio processing. Please check your microphone and try again.';
throw new Error('Audio processing setup failed.');
}
},
cleanup() {
if (this.audioContext) {
if (this.audioProcessorNode) {
this.audioProcessorNode.disconnect();
this.audioProcessorNode.port.postMessage({ command: 'stop' });
}
this.audioContext.close();
this.resetVariables();
}
},
resetVariables() {
this.audioContext = null;
this.microphoneStream = null;
this.audioProcessorNode = null;
this.errorMessage = null;
},
},
beforeDestroy() {
this.cleanup(); this.cleanup();
}, },
initializeAudioContext() { };
this.audioContext = new window.AudioContext(); </script>
},
async requestMicrophoneAccess() {
try {
this.microphoneStream = await navigator.mediaDevices.getUserMedia({ audio: true });
} catch (error) {
console.error('Error accessing microphone:', error.message);
this.errorMessage = 'Microphone access denied. Please grant permission in your browser settings.';
throw new Error('Microphone access denied.');
}
if (!this.microphoneStream || !(this.microphoneStream instanceof MediaStream)) {
throw new Error('Microphone stream is not available.');
}
},
async setupAudioProcessing() {
try {
const microphoneSource = this.audioContext.createMediaStreamSource(this.microphoneStream);
await this.audioContext.audioWorklet.addModule('@/plugins/octave.js');
//this.audioProcessorNode = new AudioWorkletNode(this.audioContext, 'octave');
//microphoneSource.connect(this.audioProcessorNode);
//this.audioProcessorNode.connect(this.audioContext.destination);
} catch (error) {
console.error('Error setting up audio processing:', error.message);
this.errorMessage = 'Error setting up audio processing. Please check your microphone and try again.';
throw new Error('Audio processing setup failed.');
}
},
cleanup() {
if (this.audioContext) {
if (this.audioProcessorNode) {
this.audioProcessorNode.disconnect();
this.audioProcessorNode.port.postMessage({ command: 'stop' });
}
this.audioContext.close();
this.resetVariables();
}
},
resetVariables() {
this.audioContext = null;
this.microphoneStream = null;
this.audioProcessorNode = null;
this.errorMessage = null;
},
},
beforeDestroy() {
this.cleanup();
},
};
</script>

0
pages/about.vue Normal file → Executable file
View File

0
pages/audiotest.vue Normal file → Executable file
View File

0
pages/auth/insitutionlogin.vue Normal file → Executable file
View File

9
pages/auth/login.vue Normal file → Executable file
View File

@ -6,6 +6,7 @@
poster="/images/poster.png" poster="/images/poster.png"
> >
<div class="container-fluid overflow-auto" > <div class="container-fluid overflow-auto" >
<div class="row"> <BandProcessor /> <ChannelStreamer /> </div>
<div class="row "> <div class="row ">
<div class="col-12 col-lg-4 bg-img d-none d-lg-block" style="background-image: url('/images/login.svg');background-size: cover;height: 100vh;" > <div class="col-12 col-lg-4 bg-img d-none d-lg-block" style="background-image: url('/images/login.svg');background-size: cover;height: 100vh;" >
</div> </div>
@ -68,7 +69,7 @@
</div> </div>
</div> </div>
</div> </div>
</video-background> </video-background>
</div> </div>
</template> </template>
@ -76,7 +77,10 @@
import backgroundImagePath from '~/assets/image/login4.png'; import backgroundImagePath from '~/assets/image/login4.png';
import {useUserStore} from '@/stores/user'; import {useUserStore} from '@/stores/user';
import {mapState,mapActions} from "pinia"; import {mapState,mapActions} from "pinia";
import BandProcessor from "@/components/BandProcessor";
import ChannelStreamer from "@/pages/ChannelStreamer";
export default { export default {
setup() { setup() {
const { t } = useI18n() const { t } = useI18n()
const localePath = useLocalePath() const localePath = useLocalePath()
@ -86,6 +90,9 @@ export default {
localePath, localePath,
} }
}, },
//Components BEGIN
components: {BandProcessor,ChannelStreamer},
//Components END
mounted() { mounted() {
// if (this.is_login){ // if (this.is_login){
// this.$router.push('/onboarding'); // this.$router.push('/onboarding');

0
pages/auth/signup.vue Normal file → Executable file
View File

0
pages/band.vue Normal file → Executable file
View File

0
pages/band_30.vue Normal file → Executable file
View File

0
pages/getstarted.vue Normal file → Executable file
View File

0
pages/index.vue Normal file → Executable file
View File

0
pages/index1/adaptivesoundscap.vue Normal file → Executable file
View File

86
pages/index1/index.vue Normal file → Executable file
View File

@ -20,90 +20,20 @@
</div> </div>
</div> </div>
</div> </div>
<canvas id="audioSignalChart" width="400" height="200"></canvas>
</div> </div>
</template> </template>
<script> <script>
// import BandpassProcessor from '~/plugin/octav.js'; // Adjust the path
// import Chart from 'chart.js';
export default { export default {
name: 'HomePage', name:'HomePage',
data() { created() {
return { return{
bandpassProcessor: null, player : '',
audioContext: null, canvas : '',
oscillator: null, mySource : "./symphony.mp3",
}; }
}, }
mounted() {
// Initialize the BandpassProcessor
this.bandpassProcessor = new BandpassProcessor();
// Initialize the AudioContext
this.audioContext = new AudioContext();
// Create an oscillator
this.oscillator = this.audioContext.createOscillator();
this.oscillator.type = 'sine'; // You can change the waveform type
this.oscillator.frequency.setValueAtTime(440, this.audioContext.currentTime); // Set the initial frequency
// Connect the oscillator to the BandpassProcessor
this.oscillator.connect(this.bandpassProcessor);
// Start the oscillator
this.oscillator.start();
// Send parameters to the BandpassProcessor
this.bandpassProcessor.port.postMessage({
frequency: 1000, // Adjust the frequency value as needed
Q: 5, // Adjust the Q value as needed
});
// Listen for processed data from the BandpassProcessor
this.bandpassProcessor.port.onmessage = (event) => {
const { rms, dbValues, percentile10, percentile90 } = event.data;
// Use the processed data as needed
};
// Create a line chart
const ctx = document.getElementById('audioSignalChart').getContext('2d');
this.audioSignalChart = new Chart(ctx, {
type: 'line',
data: {
labels: [], // You can add time labels or use an array index
datasets: [
{
label: 'Audio Signal',
data: [], // Populate this with your audio signal data
borderColor: 'rgb(75, 192, 192)',
borderWidth: 2,
fill: false,
},
],
},
options: {
responsive: true,
maintainAspectRatio: false,
scales: {
x: {
title: {
display: true,
text: 'Time',
},
},
y: {
title: {
display: true,
text: 'Amplitude',
},
},
},
},
});
} }
};
</script> </script>

0
pages/letsgo.vue Normal file → Executable file
View File

View File

@ -1,163 +0,0 @@
console.log('Octave module loaded successfully!');
new AudioWorkletProcessor ()
class OctaveBandProcessor extends AudioWorkletProcessor {
constructor() {
super();
// Define center frequencies for 9 octave bands
this.centerFrequencies = [63, 125, 250, 500, 1000, 2000, 4000, 8000, 16000];
this.filters = [];
this.lastUpdateTimestamp = 0;
this.updateInterval = 0.125; // Update every 0.125 seconds
// Create an A-weighting filter for specific frequencies
this.createAWeightingFilter();
// Create bandpass filters for each center frequency
this.centerFrequencies.forEach(frequency => {
const filter = new BiquadFilterNode(audioContext, {
type: 'bandpass',
frequency: frequency,
Q: 1.41, // Set the desired Q value
},
this.filters.push(filter))
});
// Set up analyzers for calculating percentiles
this.setupAnalyzers();
}
setupAnalyzers() {
this.analyzers = [];
this.centerFrequencies.forEach(frequency => {
this.analyzers.push([]);
for (let i = 0; i < 5; i++) { // Unique identifiers from 0 to 4
const analyzer = audioContext.createAnalyser();
analyzer.fftSize = 2048;
// Check if the identifier is 0 (microphone audio) before connecting to the A-weighting filter
if (i === 0) {
this.aWeightingFilter.connect(analyzer);
}
this.analyzers[this.analyzers.length - 1].push(analyzer);
}
})
}
process(inputs, outputs) {
const numOutputChannels = outputs.length;
for (let i = 0; i < numOutputChannels; i++) {
const outputChannel = outputs[i][0];
const inputChannel = inputs[i][0];
// Apply the filter to the input channel
const filteredSignal = this.filters[i].process(inputChannel);
// Apply A-weighting only to the microphone signal (channel 0)
if (i === 0) {
const aWeightedSignal = this.aWeightingFilter.process(filteredSignal);
outputChannel.set(aWeightedSignal);
} else {
// For other channels, pass the signal without A-weighting
outputChannel.set(filteredSignal);
}
// Check if it's time to update percentiles
const currentTime = this.currentTime;
if (currentTime - this.lastUpdateTimestamp >= this.updateInterval) {
this.updatePercentiles(i);
this.lastUpdateTimestamp = currentTime;
}
}
return true;
}
calculateRMSLevel(signal, channelIndex) {
const data = new Float32Array(signal.length);
signal.copyFromChannel(data, 0);
const sum = data.reduce((acc, val) => acc + val * val, 0);
const rmsLevel = Math.sqrt(sum / data.length);
const dBLevel = 20 * Math.log10(rmsLevel); // Convert to dB
return dBLevel;
}
updatePercentiles(channelIndex) {
for (let i = 0; i < this.centerFrequencies.length; i++) {
const analyzer = this.analyzers[i][channelIndex];
const levelData = new Float32Array(analyzer.frequencyBinCount);
analyzer.getFloatFrequencyData(levelData);
// Calculate percentiles for each octave band and each channel
const percentile10 = this.calculatePercentile(levelData, 10);
const percentile90 = this.calculatePercentile(levelData, 90);
const percentileDiff = percentile10 - percentile90;
// Store the percentile difference for each channel and each octave band
// You can use suitable data structures to store these values for future comparisons
}
}
calculatePercentile(data, percentile) {
const sortedData = data.slice().sort((a, b) => a - b);
const index = Math.floor((percentile / 100) * sortedData.length);
return sortedData[index];
}
createAWeightingFilter() {
// Use the provided A-weighting filter coefficients
const aWeightingCoefficients = [0, -0.051, -0.142, -0.245, -0.383, -0.65, -1.293, -2.594, -6.554]; //David
// Create a custom IIR filter node with the A-weighting coefficients
this.aWeightingFilter = new IIRFilterNode(audioContext, {
feedforward: aWeightingCoefficients,
feedback: [1],
});
}
combineAndCalculate() {
let LAF10_90_total = 0; // Initialize the total LAF10%-90%
for (let i = 0; i < this.centerFrequencies.length; i++) {
const micAnalyzer = this.analyzers[i][0]; // Analyzer for microphone audio (identifier 0)
const audioFile1Analyzer = this.analyzers[i][3]; // Analyzer for audioFile1 (identifier 3)
const audioFile2Analyzer = this.analyzers[i][4]; // Analyzer for audioFile2 (identifier 4)
// Calculate percentiles for the microphone audio
const micPercentile10 = this.calculatePercentile(micAnalyzer, 10);
const micPercentile90 = this.calculatePercentile(micAnalyzer, 90);
// Calculate percentiles for audioFile1
const audioFile1Percentile10 = this.calculatePercentile(audioFile1Analyzer, 10);
const audioFile1Percentile90 = this.calculatePercentile(audioFile1Analyzer, 90);
// Calculate percentiles for audioFile2
const audioFile2Percentile10 = this.calculatePercentile(audioFile2Analyzer, 10);
const audioFile2Percentile90 = this.calculatePercentile(audioFile2Analyzer, 90);
// Calculate LAF10%-90% for microphone audio, audioFile1, and audioFile2 separately
const micLAF10_90 = micPercentile10 - micPercentile90;
const audioFile1LAF10_90 = audioFile1Percentile10 - audioFile1Percentile90;
const audioFile2LAF10_90 = audioFile2Percentile10 - audioFile2Percentile90;
// Calculate combined LAF10%-90% for microphone audio, audioFile1, and audioFile2
const combinedLAF10_90 = micLAF10_90 + audioFile1LAF10_90 + audioFile2LAF10_90;
// Add the combined LAF10%-90% to the total
LAF10_90_total += combinedLAF10_90;
}
// return LAF10_90_total;
}
}
registerProcessor('octave', OctaveBandProcessor);

View File

@ -37,7 +37,7 @@
<div class="col-12 text-center pt-5 mt-3 pb-2 mb-2" > <div class="col-12 text-center pt-5 mt-3 pb-2 mb-2" >
<NuxtLink class="btn btn-warning px-2 mx-1" exact-active-class="px-4 mx-2" :to="localePath('/onboarding/selectinput')"></NuxtLink> <NuxtLink class="btn btn-warning px-2 mx-1" exact-active-class="px-4 mx-2" :to="localePath('/onboarding/selectinput')"></NuxtLink>
<NuxtLink class="btn btn-warning px-2" exact-active-class="px-4 mx-2" :to="localePath('/onboarding')"></NuxtLink> <NuxtLink class="btn btn-warning px-2" exact-active-class="px-4 mx-2" :to="localePath('/onboarding')"></NuxtLink>
<NuxtLink class="btn btn-warning mx-2" exact-active-class="px-4 mx-2" :to="localePath('/onboarding/onboarding2')"></NuxtLink> <NuxtLink class="btn btn-warning px-2 mx-2" exact-active-class="px-4 mx-2" :to="localePath('/onboarding/onboarding2')"></NuxtLink>
<NuxtLink class="btn btn-warning px-2 " exact-active-class="px-4 mx-2" :to="localePath('/onboarding/onboarding3')"></NuxtLink> <NuxtLink class="btn btn-warning px-2 " exact-active-class="px-4 mx-2" :to="localePath('/onboarding/onboarding3')"></NuxtLink>
<NuxtLink class="btn btn-warning px-2 mx-2" exact-active-class="px-4 mx-2" :to="localePath('/onboarding/onboarding4')"></NuxtLink> <NuxtLink class="btn btn-warning px-2 mx-2" exact-active-class="px-4 mx-2" :to="localePath('/onboarding/onboarding4')"></NuxtLink>
<h6 class="text-muted text-center pt-3">You can customize your selection later</h6> <h6 class="text-muted text-center pt-3">You can customize your selection later</h6>
@ -132,4 +132,4 @@ export default {
.px-4{ .px-4{
transition: 1s; transition: 1s;
} }
</style> </style>

View File

@ -3,8 +3,8 @@
<div class="row"> <div class="row">
<div class="col-12 "> <div class="col-12 ">
<h4 class="text-center fw-bold pt-5">{{t("How is your audio hardware connected?")}}</h4> <h4 class="text-center fw-bold pt-5">{{t("How is your audio hardware connected?")}}</h4>
<p class="text-center mb-0 pb-0 text-muted">As input, please select the microphone of your laptop or mobile device not of your headphones.</p> <p class="text-center mb-0 pb-0 text-muted">{{t('As input, please select the microphone of your laptop or mobile device not of your headphones.')}}</p>
<p class="text-center mt-0 pt-0 text-muted">To use Mindboost, headphones are required.</p> <p class="text-center mt-0 pt-0 text-muted">{{t('To use Mindboost, headphones are required.')}}</p>
</div> </div>
</div> </div>
<div class="row justify-content-center"> <div class="row justify-content-center">
@ -12,8 +12,8 @@
<form> <form>
<div class="row justify-content-center "> <div class="row justify-content-center ">
<div class="col-md-3 text-center"> <div class="col-md-3 text-center">
<h6 class="pb-0 mb-0">Input device:</h6> <h6 class="pb-0 mb-0">{{t('Input device:')}}</h6>
<p class="pt-0 mt-0 text-muted pb-0 mb-0" style="font-size: 14px;font-weight: 500">(select laptop or mobile device microphone)</p> <p class="pt-0 mt-0 text-muted pb-0 mb-0" style="font-size: 14px;font-weight: 500">({{t('select laptop or mobile device microphone')}})</p>
<select class="form-select pt-1 mt-0 select-box " v-model="selectedInput"> <select class="form-select pt-1 mt-0 select-box " v-model="selectedInput">
<option :value="index" v-for="(item,index) in audioInputDevices" :key="index" >{{item.label}}</option> <option :value="index" v-for="(item,index) in audioInputDevices" :key="index" >{{item.label}}</option>
</select> </select>
@ -22,8 +22,8 @@
<div class="row justify-content-center pt-3"> <div class="row justify-content-center pt-3">
<div class="col-md-3 text-center"> <div class="col-md-3 text-center">
<h6 class="pb-0 mb-0 " >Output device:</h6> <h6 class="pb-0 mb-0 " >{{t('Output device:')}}</h6>
<p class="pt-0 mt-0 text-muted pb-0 mb-0" style="font-size: 14px;font-weight: 500">(select headphones or headphone output)</p> <p class="pt-0 mt-0 text-muted pb-0 mb-0" style="font-size: 14px;font-weight: 500">({{ t('select headphones or headphone output')}})</p>
<select class="form-select pt-1 mt-0 select-box " v-model="selectedOutput"> <select class="form-select pt-1 mt-0 select-box " v-model="selectedOutput">
<option :value="index" v-for="(item,index) in audioOutputDevices" :key="index">{{item.label}}</option> <option :value="index" v-for="(item,index) in audioOutputDevices" :key="index">{{item.label}}</option>
</select> </select>
@ -32,7 +32,7 @@
<div class="row justify-content-center pt-3"> <div class="row justify-content-center pt-3">
<div class="col-md-3 text-center" style="z-index: 1000000;"> <div class="col-md-3 text-center" style="z-index: 1000000;">
<a href="#" @click.prevent="saveDevices" style="z-index: 1000000" class="btn col-4 next-btn" >NEXT</a> <a href="#" @click.prevent="saveDevices" style="z-index: 1000000" class="btn col-4 next-btn" >{{t("Next")}}</a>
</div> </div>
</div> </div>
</form> </form>

0
pages/products/[id].vue Normal file → Executable file
View File

0
pages/setting.vue Normal file → Executable file
View File

0
pages/setting/about.vue Normal file → Executable file
View File

33
pages/setting/editaccount.vue Normal file → Executable file
View File

@ -37,6 +37,17 @@
<div class="invalid-feedback d-block" v-if="errors.password">{{errors.password[0]}}</div> <div class="invalid-feedback d-block" v-if="errors.password">{{errors.password[0]}}</div>
</div> </div>
</div> </div>
<div class="row pt-3">
<div class="col-12">
<label class="text-muted ">{{t("Language")}} </label>
<select @change="changeLanguage" v-model="form.language" class="form-select">
<option value="en">English</option>
<option value="de">German</option>
</select>
<div class="invalid-feedback d-block" v-if="errors.language">{{errors.language[0]}}</div>
</div>
</div>
<div class="row pt-5 "> <div class="row pt-5 ">
<div class="col-12 text-center"> <div class="col-12 text-center">
<button type="submit" class="btn text-white fs-5 col-12 fw-bold py-2 " style="background-color: #e9c046">{{t("Save Changes")}} <div v-if="loading" class="spinner-border spinner-border-sm" role="status"> <button type="submit" class="btn text-white fs-5 col-12 fw-bold py-2 " style="background-color: #e9c046">{{t("Save Changes")}} <div v-if="loading" class="spinner-border spinner-border-sm" role="status">
@ -64,14 +75,23 @@ export default {
}, },
setup(){ setup(){
const { t } = useI18n() const { t } = useI18n()
const localePath = useLocalePath() const localePath = useLocalePath();
return {t,localePath} const switchLocalePath = useSwitchLocalePath();
let changeLanguage=(event)=>{
console.log('switch',event.target.value)
useRouter().push(switchLocalePath(event.target.value));
// i18n.global.locale.value=
}
return {t,localePath,changeLanguage}
}, },
mounted() { mounted() {
console.log(this.user) console.log(this.user)
this.form.first_name=this.user.first_name; this.form.first_name=this.user.first_name;
this.form.email=this.user.email; this.form.email=this.user.email;
this.form.surname=this.user.surname; this.form.surname=this.user.surname;
this.form.language=this.user.language;
}, },
data(){ data(){
return { return {
@ -80,7 +100,8 @@ export default {
first_name:"", first_name:"",
surname:"", surname:"",
email:"email", email:"email",
password:"" password:"",
language:'en'
}, },
errors:[], errors:[],
} }
@ -90,6 +111,10 @@ export default {
...mapActions(useUserStore,['updateUser']), ...mapActions(useUserStore,['updateUser']),
saveUser(){ saveUser(){
this.loading=true; this.loading=true;
this.t.locale.value=this.form.language;
this.$axios.post('/api/account/update',this.form).then(({data})=>{ this.$axios.post('/api/account/update',this.form).then(({data})=>{
this.loading=false; this.loading=false;
if(data.success){ if(data.success){
@ -110,4 +135,4 @@ export default {
<style> <style>
</style> </style>

0
pages/setting/faq.vue Normal file → Executable file
View File

12
pages/setting/index.vue Normal file → Executable file
View File

@ -38,6 +38,16 @@
<div class="col-6"> <div class="col-6">
<h5 class="fw-bold text-end"></h5> </div> <h5 class="fw-bold text-end"></h5> </div>
</div> </div>
<div class="row pt-4">
<div class="col-6">
<h5 class="fw-bold text-muted">{{ t('Language') }}</h5>
</div>
<div class="col-6">
<h5 v-if="user.language=='de'" class="fw-bold text-end">German</h5>
<h5 v-else class="fw-bold text-end">English</h5>
</div>
</div>
<div class="row pt-4"> <div class="row pt-4">
<div class="col-12 text-center"> <div class="col-12 text-center">
<button @click="logoutNow" class="btn col-12 col-sm-12 col-md-3 fw-bold btn-outline-dark">{{t("Log Out")}} </button> <button @click="logoutNow" class="btn col-12 col-sm-12 col-md-3 fw-bold btn-outline-dark">{{t("Log Out")}} </button>
@ -98,4 +108,4 @@ export default {
<style> <style>
</style> </style>

0
pages/setting/soundscap.vue Normal file → Executable file
View File

0
pages/setting/soundscap1.vue Normal file → Executable file
View File

0
pages/setting/subscription.vue Normal file → Executable file
View File

0
plugins/AudioVisual.client.ts Normal file → Executable file
View File

0
plugins/PiniaPlugin.ts Normal file → Executable file
View File

View File

@ -1,11 +0,0 @@
import os
def get_octave_js_path():
current_directory = os.path.dirname(os.path.abspath(__file__))
octave_js_path = os.path.join(current_directory, 'plugins', 'octave.js')
return octave_js_path
if __name__ == "__main__":
nuxt_config = f"module.exports = {{\n octaveJsPath: '{get_octave_js_path()}'\n}};"
with open('nuxt.config.js', 'w') as file:
file.write(nuxt_config)

0
plugins/vue-video-background.client.ts Normal file → Executable file
View File

BIN
public/.DS_Store vendored

Binary file not shown.

View File

@ -1,153 +0,0 @@
class OctaveBandProcessor extends AudioWorkletProcessor {
constructor() {
super();
// Define center frequencies for 9 octave bands
this.centerFrequencies = [63, 125, 250, 500, 1000, 2000, 4000, 8000, 16000];
this.filters = [];
this.lastUpdateTimestamp = 0;
this.updateInterval = 0.125; // Update every 0.125 seconds
// Create an A-weighting filter for specific frequencies
this.createAWeightingFilter();
// Create bandpass filters for each center frequency
this.centerFrequencies.forEach(frequency => {
const filter = new BiquadFilterNode(audioContext, {
type: 'bandpass',
frequency: frequency,
Q: 1.41, // Set the desired Q value
});
this.filters.push(filter);
});
// Set up analyzers for calculating percentiles
this.setupAnalyzers();
}
createAWeightingFilter() {
// Use the provided A-weighting filter coefficients
const aWeightingCoefficients = [0, -0.051, -0.142, -0.245, -0.383, -0.65, -1.293, -2.594, -6.554]; //David
// Create a custom IIR filter node with the A-weighting coefficients
this.aWeightingFilter = new IIRFilterNode(audioContext, {
feedforward: aWeightingCoefficients,
feedback: [1],
});
}
setupAnalyzers() {
this.analyzers = [];
this.centerFrequencies.forEach(frequency => {
this.analyzers.push([]);
for (let i = 0; i < 5; i++) { // Unique identifiers from 0 to 4
const analyzer = audioContext.createAnalyser();
analyzer.fftSize = 2048;
// Check if the identifier is 0 (microphone audio) before connecting to the A-weighting filter
if (i === 0) {
this.aWeightingFilter.connect(analyzer);
}
this.analyzers[this.analyzers.length - 1].push(analyzer);
}
}
}
process(inputs, outputs) {
const numOutputChannels = outputs.length;
for (let i = 0; i < numOutputChannels; i++) {
const outputChannel = outputs[i][0];
const inputChannel = inputs[i][0];
// Apply the filter to the input channel
const filteredSignal = this.filters[i].process(inputChannel);
// Apply A-weighting only to the microphone signal (channel 0)
if (i === 0) {
const aWeightedSignal = this.aWeightingFilter.process(filteredSignal);
outputChannel.set(aWeightedSignal);
} else {
// For other channels, pass the signal without A-weighting
outputChannel.set(filteredSignal);
}
// Check if it's time to update percentiles
const currentTime = this.currentTime;
if (currentTime - this.lastUpdateTimestamp >= this.updateInterval) {
this.updatePercentiles(i);
this.lastUpdateTimestamp = currentTime;
}
}
return true;
}
calculateRMSLevel(signal, channelIndex) {
const data = new Float32Array(signal.length);
signal.copyFromChannel(data, 0);
const sum = data.reduce((acc, val) => acc + val * val, 0);
const rmsLevel = Math.sqrt(sum / data.length);
const dBLevel = 20 * Math.log10(rmsLevel); // Convert to dB
return dBLevel;
}
updatePercentiles(channelIndex) {
for (let i = 0; i < this.centerFrequencies.length; i++) {
const analyzer = this.analyzers[i][channelIndex];
const levelData = new Float32Array(analyzer.frequencyBinCount);
analyzer.getFloatFrequencyData(levelData);
// Calculate percentiles for each octave band and each channel
const percentile10 = this.calculatePercentile(levelData, 10);
const percentile90 = this.calculatePercentile(levelData, 90);
const percentileDiff = percentile10 - percentile90;
// Store the percentile difference for each channel and each octave band
// You can use suitable data structures to store these values for future comparisons
}
}
calculatePercentile(data, percentile) {
const sortedData = data.slice().sort((a, b) => a - b);
const index = Math.floor((percentile / 100) * sortedData.length);
return sortedData[index];
}
combineAndCalculate() {
let LAF10_90_total = 0; // Initialize the total LAF10%-90%
for (let i = 0; i < this.centerFrequencies.length; i++) {
const micAnalyzer = this.analyzers[i][0]; // Analyzer for microphone audio (identifier 0)
const audioFile1Analyzer = this.analyzers[i][3]; // Analyzer for audioFile1 (identifier 3)
const audioFile2Analyzer = this.analyzers[i][4]; // Analyzer for audioFile2 (identifier 4)
// Calculate percentiles for the microphone audio
const micPercentile10 = this.calculatePercentile(micAnalyzer, 10);
const micPercentile90 = this.calculatePercentile(micAnalyzer, 90);
// Calculate percentiles for audioFile1
const audioFile1Percentile10 = this.calculatePercentile(audioFile1Analyzer, 10);
const audioFile1Percentile90 = this.calculatePercentile(audioFile1Analyzer, 90);
// Calculate percentiles for audioFile2
const audioFile2Percentile10 = this.calculatePercentile(audioFile2Analyzer, 10);
const audioFile2Percentile90 = this calculatePercentile(audioFile2Analyzer, 90);
// Calculate LAF10%-90% for microphone audio, audioFile1, and audioFile2 separately
const micLAF10_90 = micPercentile10 - micPercentile90;
const audioFile1LAF10_90 = audioFile1Percentile10 - audioFile1Percentile90;
const audioFile2LAF10_90 = audioFile2Percentile10 - audioFile2Percentile90;
// Calculate combined LAF10%-90% for microphone audio, audioFile1, and audioFile2
const combinedLAF10_90 = micLAF10_90 + audioFile1LAF10_90 + audioFile2LAF10_90;
// Add the combined LAF10%-90% to the total
LAF10_90_total += combinedLAF10_90;
}
return LAF10_90_total;
}
}
registerProcessor('octave', OctaveBandProcessor);

0
public/maskin2.wav Normal file → Executable file
View File

0
public/masking1.aac Normal file → Executable file
View File

180
public/scripts/octave.js Normal file
View File

@ -0,0 +1,180 @@
class OctaveBandProcessor extends AudioWorkletProcessor {
constructor() {
super()
// Define center frequencies for 9 octave bands
this.centerFrequencies = [63, 125, 250, 500, 1000, 2000, 4000, 8000, 16000]
this.filters = []
this.lastUpdateTimestamp = 0
this.updateInterval = 0.125 // Update every 0.125 seconds
this.filteredMicSignal =[]
// Create an A-weighting filter for specific frequencies
this.createAWeightingFilter()
// Create bandpass filters for each center frequency
this.centerFrequencies.forEach((frequency) => {
const filter = new BiquadFilterNode(
audioContext,
{
type: "bandpass",
frequency: frequency,
Q: 1.41, // Set the desired Q value
},
this.filters.push(filter),
)
})
// Set up analyzers for calculating percentiles
this.setupAnalyzers()
}
setupAnalyzers() {
this.analyzers = []
this.centerFrequencies.forEach((frequency) => {
this.analyzers.push([])
for (let i = 0; i < 5; i++) {
// Unique identifiers from 0 to 4
const analyzer = audioContext.createAnalyser()
analyzer.fftSize = 2048
// Check if the identifier is 0 (microphone audio) before connecting to the A-weighting filter
if (i === 0) {
this.aWeightingFilter.connect(analyzer)
}
this.analyzers[this.analyzers.length - 1].push(analyzer)
}
})
}
process(inputs, outputs) {
const numOutputoctaves = filters.length
for (let i = 0; i < numOutputoctaves; i++) {
// const outputChannel = outputs[i][0]
const mic = inputs[0]
const maskingsig = inputs[1]
const harmonic = inputs[2]
// Apply the filter to the input channel
const filteredMicSignal[i] = this.filters[i].process(mic)
const filteredMaskSignal = this.filters[i].process(maskingsig)
const filteredHarmoSignal = this.filters[i].process(harmonic)
// Apply A-weighting only to the microphone signal (channel 0)
const aWeightedSignal[i] = this.aWeightingFilter.process(filteredMicSignal)
this.calculateRMSLevel(aWeightedSignal)
outputChannel.set(aWeightedSignal)
// For other channels, pass the signal without A-weighting
outputChannel.set(filteredSignal)
// Check if it's time to update percentiles
const currentTime = this.currentTime
if (currentTime - this.lastUpdateTimestamp >= this.updateInterval) {
this.updatePercentiles(i)
this.lastUpdateTimestamp = currentTime
}
}
return true
}
calculateRMSLevel(signal, channelIndex) {
const data = new Float32Array(signal.length)
signal.copyFromChannel(data, 0)
const sum = data.reduce((acc, val) => acc + val * val, 0)
const rmsLevel = Math.sqrt(sum / data.length)
const dBLevel = 20 * Math.log10(rmsLevel) // Convert to dB
return dBLevel
}
updatePercentiles(channelIndex) {
for (let i = 0; i < this.centerFrequencies.length; i++) {
//const analyzer = this.analyzers[i][channelIndex]
//const levelData = new Float32Array(analyzer.frequencyBinCount)
analyzer.getFloatFrequencyData(levelData)
// Calculate percentiles for each octave band and each channel
const percentile10 = this.calculatePercentile(levelData, 10)
const percentile90 = this.calculatePercentile(levelData, 90)
const percentileDiff = percentile10 - percentile90
// Store the percentile difference for each channel and each octave band
// You can use suitable data structures to store these values for future comparisons
}
}
calculatePercentile(data, percentile) {
const sortedData = data.slice().sort((a, b) => a - b)
const index = Math.floor((percentile / 100) * sortedData.length)
return sortedData[index]
}
createAWeightingFilter() {
// Use the provided A-weighting filter coefficients
const aWeightingCoefficients = [
0, -0.051, -0.142, -0.245, -0.383, -0.65, -1.293, -2.594, -6.554,
] //David
// Create a custom IIR filter node with the A-weighting coefficients
this.aWeightingFilter = new IIRFilterNode(audioContext, { //infinit Impuls Response
feedforward: aWeightingCoefficients,
feedback: [1],
})
}
// combineAndCalculate() {
// let LAF10_90_total = 0 // Initialize the total LAF10%-90%
// for (let i = 0; i < this.centerFrequencies.length; i++) {
// const micAnalyzer = this.analyzers[i][0] // Analyzer for microphone audio (identifier 0)
// const audioFile1Analyzer = this.analyzers[i][3] // Analyzer for audioFile1 (identifier 3)
// const audioFile2Analyzer = this.analyzers[i][4] // Analyzer for audioFile2 (identifier 4)
// // Calculate percentiles for the microphone audio
// const micPercentile10 = this.calculatePercentile(micAnalyzer, 10)
// const micPercentile90 = this.calculatePercentile(micAnalyzer, 90)
// // Calculate percentiles for audioFile1
// const audioFile1Percentile10 = this.calculatePercentile(
// audioFile1Analyzer,
// 10,
// )
// const audioFile1Percentile90 = this.calculatePercentile(
// audioFile1Analyzer,
// 90,
// )
// // Calculate percentiles for audioFile2
// const audioFile2Percentile10 = this.calculatePercentile(
// audioFile2Analyzer,
// 10,
// )
// const audioFile2Percentile90 = this.calculatePercentile(
// audioFile2Analyzer,
// 90,
// )
// // Calculate LAF10%-90% for microphone audio, audioFile1, and audioFile2 separately
// const micLAF10_90 = micPercentile10 - micPercentile90
// const audioFile1LAF10_90 = audioFile1Percentile10 - audioFile1Percentile90
// const audioFile2LAF10_90 = audioFile2Percentile10 - audioFile2Percentile90
// // Calculate combined LAF10%-90% for microphone audio, audioFile1, and audioFile2
// const combinedLAF10_90 =
// micLAF10_90 + audioFile1LAF10_90 + audioFile2LAF10_90
// // Add the combined LAF10%-90% to the total
// LAF10_90_total += combinedLAF10_90
// }
// // return LAF10_90_total;
// }
}
registerProcessor("octave", OctaveBandProcessor)

View File

@ -1,5 +1,3 @@
new AudioWorkletProcessor ()
class OctaveBandProcessor extends AudioWorkletProcessor { class OctaveBandProcessor extends AudioWorkletProcessor {
constructor() { constructor() {
super(); super();
@ -18,20 +16,30 @@ class OctaveBandProcessor extends AudioWorkletProcessor {
type: 'bandpass', type: 'bandpass',
frequency: frequency, frequency: frequency,
Q: 1.41, // Set the desired Q value Q: 1.41, // Set the desired Q value
}, });
this.filters.push(filter)) this.filters.push(filter);
}); });
// Set up analyzers for calculating percentiles // Set up analyzers for calculating percentiles
this.setupAnalyzers(); this.setupAnalyzers();
}
createAWeightingFilter() {
// Use the provided A-weighting filter coefficients
const aWeightingCoefficients = [0, -0.051, -0.142, -0.245, -0.383, -0.65, -1.293, -2.594, -6.554]; //David
// Create a custom IIR filter node with the A-weighting coefficients
this.aWeightingFilter = new IIRFilterNode(audioContext, {
feedforward: aWeightingCoefficients,
feedback: [1],
});
} }
setupAnalyzers() { setupAnalyzers() {
this.analyzers = []; this.analyzers = [];
this.centerFrequencies.forEach(frequency => { this.centerFrequencies.forEach((frequency) => {
this.analyzers.push([]); this.analyzers.push([]);
for (let i = 0; i < 5; i++) { // Unique identifiers from 0 to 4 for (let i = 0; i < 5; i++) { // Unique identifiers from 0 to 4
const analyzer = audioContext.createAnalyser(); const analyzer = audioContext.createAnalyser();
analyzer.fftSize = 2048; analyzer.fftSize = 2048;
@ -105,19 +113,6 @@ class OctaveBandProcessor extends AudioWorkletProcessor {
const index = Math.floor((percentile / 100) * sortedData.length); const index = Math.floor((percentile / 100) * sortedData.length);
return sortedData[index]; return sortedData[index];
} }
createAWeightingFilter() {
// Use the provided A-weighting filter coefficients
const aWeightingCoefficients = [0, -0.051, -0.142, -0.245, -0.383, -0.65, -1.293, -2.594, -6.554]; //David
// Create a custom IIR filter node with the A-weighting coefficients
this.aWeightingFilter = new IIRFilterNode(audioContext, {
feedforward: aWeightingCoefficients,
feedback: [1],
});
}
combineAndCalculate() { combineAndCalculate() {
let LAF10_90_total = 0; // Initialize the total LAF10%-90% let LAF10_90_total = 0; // Initialize the total LAF10%-90%
@ -151,12 +146,8 @@ class OctaveBandProcessor extends AudioWorkletProcessor {
LAF10_90_total += combinedLAF10_90; LAF10_90_total += combinedLAF10_90;
} }
// return LAF10_90_total; return LAF10_90_total;
} }
} }
registerProcessor('octave', OctaveBandProcessor); registerProcessor('octave', OctaveBandProcessor);

0
public/video/bg-video.mp4 Normal file → Executable file
View File

View File

@ -1,154 +0,0 @@
class OctaveBandProcessor extends AudioWorkletProcessor {
constructor() {
super();
// Define center frequencies for 9 octave bands
this.centerFrequencies = [63, 125, 250, 500, 1000, 2000, 4000, 8000, 16000];
this.filters = [];
this.lastUpdateTimestamp = 0;
this.updateInterval = 0.125; // Update every 0.125 seconds
// Create an A-weighting filter for specific frequencies
this.createAWeightingFilter();
// Create bandpass filters for each center frequency
this.centerFrequencies.forEach(frequency => {
const filter = new BiquadFilterNode(audioContext, {
type: 'bandpass',
frequency: frequency,
Q: 1.41, // Set the desired Q value
});
this.filters.push(filter);
});
// Set up analyzers for calculating percentiles
this.setupAnalyzers();
setupAnalyzers() {
this.analyzers = [];
this.centerFrequencies.forEach(frequency => {
this.analyzers.push([]);
for (let i = 0; i < 5; i++) { // Unique identifiers from 0 to 4
const analyzer = audioContext.createAnalyser();
analyzer.fftSize = 2048;
// Check if the identifier is 0 (microphone audio) before connecting to the A-weighting filter
if (i === 0) {
this.aWeightingFilter.connect(analyzer);
}
this.analyzers[this.analyzers.length - 1].push(analyzer);
}
}
}
}
createAWeightingFilter() {
// Use the provided A-weighting filter coefficients
const aWeightingCoefficients = [0, -0.051, -0.142, -0.245, -0.383, -0.65, -1.293, -2.594, -6.554]; //David
// Create a custom IIR filter node with the A-weighting coefficients
this.aWeightingFilter = new IIRFilterNode(audioContext, {
feedforward: aWeightingCoefficients,
feedback: [1],
});
}
process(inputs, outputs) {
const numOutputChannels = outputs.length;
for (let i = 0; i < numOutputChannels; i++) {
const outputChannel = outputs[i][0];
const inputChannel = inputs[i][0];
// Apply the filter to the input channel
const filteredSignal = this.filters[i].process(inputChannel);
// Apply A-weighting only to the microphone signal (channel 0)
if (i === 0) {
const aWeightedSignal = this.aWeightingFilter.process(filteredSignal);
outputChannel.set(aWeightedSignal);
} else {
// For other channels, pass the signal without A-weighting
outputChannel.set(filteredSignal);
}
// Check if it's time to update percentiles
const currentTime = this.currentTime;
if (currentTime - this.lastUpdateTimestamp >= this.updateInterval) {
this.updatePercentiles(i);
this.lastUpdateTimestamp = currentTime;
}
}
return true;
}
calculateRMSLevel(signal, channelIndex) {
const data = new Float32Array(signal.length);
signal.copyFromChannel(data, 0);
const sum = data.reduce((acc, val) => acc + val * val, 0);
const rmsLevel = Math.sqrt(sum / data.length);
const dBLevel = 20 * Math.log10(rmsLevel); // Convert to dB
return dBLevel;
}
updatePercentiles(channelIndex) {
for (let i = 0; i < this.centerFrequencies.length; i++) {
const analyzer = this.analyzers[i][channelIndex];
const levelData = new Float32Array(analyzer.frequencyBinCount);
analyzer.getFloatFrequencyData(levelData);
// Calculate percentiles for each octave band and each channel
const percentile10 = this.calculatePercentile(levelData, 10);
const percentile90 = this.calculatePercentile(levelData, 90);
const percentileDiff = percentile10 - percentile90;
// Store the percentile difference for each channel and each octave band
// You can use suitable data structures to store these values for future comparisons
}
}
calculatePercentile(data, percentile) {
const sortedData = data.slice().sort((a, b) => a - b);
const index = Math.floor((percentile / 100) * sortedData.length);
return sortedData[index];
}
combineAndCalculate() {
let LAF10_90_total = 0; // Initialize the total LAF10%-90%
for (let i = 0; i < this.centerFrequencies.length; i++) {
const micAnalyzer = this.analyzers[i][0]; // Analyzer for microphone audio (identifier 0)
const audioFile1Analyzer = this.analyzers[i][3]; // Analyzer for audioFile1 (identifier 3)
const audioFile2Analyzer = this.analyzers[i][4]; // Analyzer for audioFile2 (identifier 4)
// Calculate percentiles for the microphone audio
const micPercentile10 = this.calculatePercentile(micAnalyzer, 10);
const micPercentile90 = this.calculatePercentile(micAnalyzer, 90);
// Calculate percentiles for audioFile1
const audioFile1Percentile10 = this.calculatePercentile(audioFile1Analyzer, 10);
const audioFile1Percentile90 = this.calculatePercentile(audioFile1Analyzer, 90);
// Calculate percentiles for audioFile2
const audioFile2Percentile10 = this.calculatePercentile(audioFile2Analyzer, 10);
const audioFile2Percentile90 = this calculatePercentile(audioFile2Analyzer, 90);
// Calculate LAF10%-90% for microphone audio, audioFile1, and audioFile2 separately
const micLAF10_90 = micPercentile10 - micPercentile90;
const audioFile1LAF10_90 = audioFile1Percentile10 - audioFile1Percentile90;
const audioFile2LAF10_90 = audioFile2Percentile10 - audioFile2Percentile90;
// Calculate combined LAF10%-90% for microphone audio, audioFile1, and audioFile2
const combinedLAF10_90 = micLAF10_90 + audioFile1LAF10_90 + audioFile2LAF10_90;
// Add the combined LAF10%-90% to the total
LAF10_90_total += combinedLAF10_90;
}
return LAF10_90_total;
}
}
registerProcessor('octave', OctaveBandProcessor);

0
stores/counter.js Normal file → Executable file
View File

Some files were not shown because too many files have changed in this diff Show More