-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathwaveform-path.js
153 lines (134 loc) · 6.24 KB
/
waveform-path.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
export const getAudioData = (url) => {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
const audioContext = new AudioContext();
return fetch(url)
.then(response => response.arrayBuffer())
.then(arrayBuffer => audioContext.decodeAudioData(arrayBuffer))
.catch(error => {
console.error(error);
});
};
export const polarPath = (audioBuffer, options) => {
const {
channel = 0,
samples = audioBuffer.length,
distance = 50,
length = 100,
top = 0,
left = 0,
type = 'steps',
startdeg = 0,
enddeg = 360,
invertdeg = false,
invertpath = false,
paths = [{d:'Q', sdeg: 0, sr:0, deg: 50, r: 100, edeg:100, er:0}],
animation = false,
animationframes = 10,
normalize = true,
} = options;
const framesData = getFramesData(audioBuffer, channel, animation, animationframes);
const filteredData = getFilterData(framesData, samples);
const normalizeData = (normalize ? getNormalizeData(filteredData) : filteredData);
let path = "";
const fixenddeg = (enddeg < startdeg ? enddeg+360 : enddeg);
const deg = (!invertdeg ? (fixenddeg-startdeg) / samples : (startdeg-fixenddeg) / samples );
const fixOrientation = (!invertdeg ? 90+startdeg : 90+startdeg+180 );
const invert = (!invertpath ? 1 : -1);
const pathslength = paths.length;
const fixpathslength = (type == 'mirror' ? pathslength*2 : pathslength);
const pi180 = Math.PI / 180;
const normalizeDataLength = normalizeData.length;
for(let f = 0; f < normalizeDataLength; f++) {
if(f>0) {
const pathlength = path.length;
const lastvalue = path.charAt(pathlength - 1);
if(lastvalue == ";" || pathlength === 0) {
path+=' M 0 0 ;';
} else {
path += ';';
}
}
let last_pos_x = -9999;
let last_pos_y = -9999;
for (let i = 0; i < samples; i++) {
const positive = (type != 'bars' ? (i % 2 ? 1: -1) : 1);
let mirror = 1;
for(let j = 0; j < fixpathslength; j++) {
let k = j;
if(j >= pathslength) {
k = j - pathslength;
mirror = -1;
}
paths[k].minshow = paths[k].minshow ?? 0;
paths[k].maxshow = paths[k].maxshow ?? 1;
paths[k].normalize = paths[k].normalize ?? false;
const normalizeDataValue = (paths[k].normalize ? 1 : normalizeData[f][i]);
if(paths[k].minshow <= normalizeData[f][i] && paths[k].maxshow >= normalizeData[f][i]) {
const angleStart = ((deg*(i+paths[k].sdeg/100)) - fixOrientation) * pi180;
const angle = ((deg*(i+paths[k].deg/100)) - fixOrientation) * pi180;
const angleEnd = ((deg*(i+paths[k].edeg/100)) - fixOrientation) * pi180;
const pos_x = left + ((length*(paths[k].sr/100)*normalizeDataValue)* positive*mirror*invert + distance) * Math.cos(angleStart);
const pos_y = top + ((length*(paths[k].sr/100)*normalizeDataValue)* positive*mirror*invert + distance) * Math.sin(angleStart);
const center_pos_x = left + ((length*(paths[k].r/100)*normalizeDataValue)* positive*mirror*invert + distance) * Math.cos(angle);
const center_pos_y = top + ((length*(paths[k].r/100)*normalizeDataValue)* positive*mirror*invert + distance) * Math.sin(angle);
const end_pos_x = left + ((length*(paths[k].er/100)*normalizeDataValue)* positive*mirror*invert + distance) * Math.cos(angleEnd);
const end_pos_y = top + ((length*(paths[k].er/100)*normalizeDataValue)* positive*mirror*invert + distance) * Math.sin(angleEnd);
if(pos_x !== last_pos_x || pos_y !== last_pos_y) {
path += `M ${pos_x} ${pos_y} `;
}
path += `Q ${center_pos_x} ${center_pos_y} ${end_pos_x} ${end_pos_y} `;
last_pos_x = end_pos_x;
last_pos_y = end_pos_y;
}
}
}
}
return path;
}
const getFramesData = (audioBuffer, channel, animation, animationframes) => {
const rawData = audioBuffer.getChannelData(channel);
const framesData = [];
if(animation) {
const frames = audioBuffer.sampleRate / animationframes;
for (let index = 0; index < rawData.length; index += frames) {
const partraw = rawData.slice(index, index+frames);
framesData.push(partraw);
}
} else {
framesData.push(rawData);
}
return framesData;
}
const getFilterData = (framesData, samples) => {
const filteredData = [];
const framesDataLength = framesData.length;
for(let f = 0; f < framesDataLength; f++) {
const blockSize = Math.floor(framesData[f].length / samples); // the number of samples in each subdivision
const filteredDataBlock = [];
for (let i = 0; i < samples; i++) {
let blockStart = blockSize * i; // the location of the first sample in the block
let sum = 0;
for (let j = 0; j < blockSize; j++) {
sum = sum + Math.abs(framesData[f][blockStart + j]); // find the sum of all the samples in the block
}
filteredDataBlock.push(sum / blockSize); // divide the sum by the block size to get the average
}
filteredData.push(filteredDataBlock);
}
return filteredData;
}
const getNormalizeData = (filteredData) => {
const multipliers = [];
const filteredDataLength = filteredData.length
for(let i = 0; i < filteredDataLength; i++) {
const multiplier = Math.max(...filteredData[i]);
multipliers.push(multiplier);
}
const maxMultiplier = Math.pow(Math.max(...multipliers), -1);
const normalizeData = [];
for(let i = 0; i < filteredDataLength; i++) {
const normalizeDataBlock = filteredData[i].map(n => n * maxMultiplier);
normalizeData.push(normalizeDataBlock);
}
return normalizeData;
}