forked from bychen9/vocal-range-detector
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathaudio-processor.js
94 lines (83 loc) · 2.98 KB
/
audio-processor.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
// Conversions to and from frequencies based on technique used at
// https://www.johndcook.com/music_hertz_bark.html
// Lookup arrays for note names.
const keysSharp = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'];
const keysFlat = ['C', 'Db', 'D', 'Eb', 'E', 'F', 'Gb', 'G', 'Ab', 'A', 'Bb', 'B'];
// Lookup table for steps, used to convert a key (e.g. `F#5`) to a frequency.
const steps = {
C: 0,
'C#': 1,
Db: 1,
D: 2,
'D#': 3,
Eb: 3,
E: 4,
F: 5,
'F#': 6,
Gb: 6,
G: 7,
'G#': 8,
Ab: 8,
A: 9,
'A#': 10,
Bb: 10,
B: 11
};
const a4 = 440;
/**
* Options for parsing audio data.
* @typedef {Object} AnalyseAudioDataOptions
* @property {number} a4 Frequency of A4. Defaults to `440`.
* @property {number} sampleRate Sample rate of the audio data.
* @property {Float32Array} audioData The audio data to analyse.
* @property {string} accidentals Whether to use sharps or flats. Defaults to `flats`.
*/
/**
* Analyses audio data to extract pitch and other details. Returns null if audio
* data could not be parsed.
* @param {AnalyseAudioDataOptions} options Options for parsing.
*/
function analyseAudioData (sampleRate, audioData, accidentals = 'sharps') {
const frequency = YINDetector(audioData, sampleRate);
if (frequency === null) {
return null;
}
// Convert the frequency to a musical pitch.
/* eslint-disable capitalized-comments */
// c = a(2^-4.75)
const c0 = a4 * Math.pow(2.0, -4.75);
// h = round(12log2(f / c))
const halfStepsBelowMiddleC = Math.round(12.0 * Math.log2(frequency / c0));
// o = floor(h / 12)
const octave = Math.floor(halfStepsBelowMiddleC / 12.0);
const keys = accidentals === 'flats' ? keysFlat : keysSharp;
const key = keys[Math.floor(halfStepsBelowMiddleC % 12)];
// Obtain the correct frequency, in hertz, of the pitch the audio is at,
// and then use that value determine how many cents the audio is off by.
// z = fround(c * 2^((s + 12o) / 12))
const correctHz = Math.fround(c0 * Math.pow(2.0, (steps[key] + (12 * octave)) / 12.0));
// w = 1200log2(f / z)
const centsOff = 1200 * Math.log2(frequency / correctHz);
/* eslint-enable capitalized-comments */
return {frequency, octave, key, correctHz, centsOff};
}
function noteToFrequency(key, octave) {
const c0 = a4 * Math.pow(2.0, -4.75);
return Math.fround(c0 * Math.pow(2.0, (steps[key] + (12 * octave)) / 12.0));
}
function playNote(key, octave) {
let oscillator = audioContext.createOscillator();
oscillator.type = "sine";
oscillator.connect(audioContext.destination);
oscillator.frequency.setValueAtTime(noteToFrequency(key, octave), audioContext.currentTime);
oscillator.start();
oscillator.stop(audioContext.currentTime + 2);
}
let play = document.getElementById("play");
if (play != null) {
play.onclick = function() {
let key = noteToSing.innerHTML.substring(17, noteToSing.innerHTML.length - 2);
let octave = noteToSing.innerHTML.charAt(noteToSing.innerHTML.length - 2);
playNote(key, octave);
}
}