From 2731dd792033d2478c5857eabbf14bc9fc1f4e17 Mon Sep 17 00:00:00 2001
From: Tomohiro IKEDA
Date: Sat, 28 Sep 2024 22:00:02 +0900
Subject: [PATCH] feat: Chorus
---
docs/docs.css | 3 +-
docs/docs.js | 246 +++++++++++++++++++++++++++++++++++++++++++
docs/index.html | 273 ++++++++++++++++++++++++++++++++++++++++++++++++
3 files changed, 521 insertions(+), 1 deletion(-)
diff --git a/docs/docs.css b/docs/docs.css
index e7858ec..9bf4438 100644
--- a/docs/docs.css
+++ b/docs/docs.css
@@ -286,7 +286,8 @@ button[type="button"] {
opacity: 0.6;
}
-.app-vibrato {
+.app-vibrato,
+.app-chorus {
display: flex;
gap: 8px;
align-items: center;
diff --git a/docs/docs.js b/docs/docs.js
index 867cd81..59a96ad 100644
--- a/docs/docs.js
+++ b/docs/docs.js
@@ -3571,6 +3571,84 @@ const createAudioNode = (name, x, y, w = 300, h = 100) => {
return g;
};
+const createAudioParam = (name, x, y, w = 80, h = 40) => {
+ const g = document.createElementNS(xmlns, 'g');
+
+ const ellipse = document.createElementNS(xmlns, 'ellipse');
+ const text = document.createElementNS(xmlns, 'text');
+
+ ellipse.setAttribute('cx', x.toString(10));
+ ellipse.setAttribute('cy', y.toString(10));
+ ellipse.setAttribute('rx', w.toString(10));
+ ellipse.setAttribute('ry', h.toString(10));
+ ellipse.setAttribute('stroke', baseColor);
+ ellipse.setAttribute('stroke-width', lineWidth.toString(10));
+ ellipse.setAttribute('stroke-linecap', lineCap);
+ ellipse.setAttribute('stroke-linejoin', lineJoin);
+ ellipse.setAttribute('fill', 'rgb(255 255 255)');
+
+ text.textContent = name;
+
+ text.setAttribute('x', (x + 4).toString(10));
+ text.setAttribute('y', (y + h / 2 - 14).toString(10));
+ text.setAttribute('text-anchor', 'middle');
+ text.setAttribute('stroke', 'none');
+ text.setAttribute('fill', baseColor);
+ text.setAttribute('font-size', '16px');
+
+ g.appendChild(ellipse);
+ g.appendChild(text);
+
+ return g;
+};
+
+const createLFO = (x, y) => {
+ const w = 300;
+ const h = 100;
+
+ const g = document.createElementNS(xmlns, 'g');
+
+ const rect = document.createElementNS(xmlns, 'rect');
+
+ const text = document.createElementNS(xmlns, 'text');
+ const subText = document.createElementNS(xmlns, 'text');
+
+ rect.setAttribute('x', x.toString(10));
+ rect.setAttribute('y', y.toString(10));
+ rect.setAttribute('width', w.toString(10));
+ rect.setAttribute('height', h.toString(10));
+ rect.setAttribute('stroke', baseColor);
+ rect.setAttribute('stroke-width', lineWidth.toString(10));
+ rect.setAttribute('stroke-linecap', lineCap);
+ rect.setAttribute('stroke-linejoin', lineJoin);
+ rect.setAttribute('fill', 'rgb(255 255 255)');
+
+ text.textContent = 'LFO';
+
+ text.setAttribute('x', (x + w / 2).toString(10));
+ text.setAttribute('y', (y + h / 2 - 4).toString(10));
+ text.setAttribute('text-anchor', 'middle');
+ text.setAttribute('stroke', 'none');
+ text.setAttribute('fill', baseColor);
+ text.setAttribute('font-size', '20px');
+ text.textContent = 'LFO';
+
+ subText.textContent = '(OscillatorNode -> GainNode)';
+
+ subText.setAttribute('x', (x + w / 2).toString(10));
+ subText.setAttribute('y', (y + h / 2 + 16).toString(10));
+ subText.setAttribute('text-anchor', 'middle');
+ subText.setAttribute('stroke', 'none');
+ subText.setAttribute('fill', baseColor);
+ subText.setAttribute('font-size', '16px');
+
+ g.appendChild(rect);
+ g.appendChild(text);
+ g.appendChild(subText);
+
+ return g;
+};
+
const createConnection = (startX, startY, endX, endY) => {
const path = document.createElementNS(xmlns, 'path');
@@ -4636,6 +4714,170 @@ const createFIRFilter = (svg) => {
svg.appendChild(outputText);
};
+const createNodeConnectionsForChorus = (svg) => {
+ const g = document.createElementNS(xmlns, 'g');
+
+ const oscillatorNodeRect = createAudioNode('OscillatorNode', 0, 0);
+ const dryNodeRect = createAudioNode('GainNode (Dry)', 0, 200);
+ const delayNodeRect = createAudioNode('DelayNode', 400, 100);
+ const wetNodeRect = createAudioNode('GainNode (Wet)', 400, 300);
+ const audioDestinationNodeRect = createAudioNode('AudioDestinationNode', 0, 400);
+
+ const oscillatorNodeAndDryPath = createConnection(150 - 2, 100, 150 - 2, 300);
+ const dryAndAudiodDestinationNodePath = createConnection(150 - 2, 300, 150 - 2, 400);
+ const delayNodeAndWetPath = createConnection(550 - 2, 200, 550 - 2, 300);
+
+ const oscillatorNodeAndDelayNodeNodePath1 = createConnection(300, 50 - 2, 548, 50 - 2);
+ const oscillatorNodeAndDelayNodeNodePath2 = createConnection(548, 50 - 2, 548, 100 - 2);
+
+ const wetAndAudioDestiationNodePath1 = createConnection(548, 400 + 2, 548, 450 - 2);
+ const wetAndAudioDestiationNodePath2 = createConnection(548, 450 - 2, 300, 450 - 2);
+
+ const oscillatorNodeAndDryArrow = createConnectionArrow(150 - 2, 200 - 14, 'down');
+ const dryAndAudiodDestinationNodeArrow = createConnectionArrow(150 - 2, 400 - 14, 'down');
+
+ const oscillatorNodeAndDelayNodeNodeArrow = createConnectionArrow(548, 100 - 14, 'down');
+ const delayNodeAndWetArrow = createConnectionArrow(548, 300 - 14, 'down');
+ const wetAndAudioDestiationArrow = createConnectionArrow(300 + 14, 450 - 2, 'left');
+
+ const lfoRect = createLFO(800, 150);
+ const delayTimeParamEllipse = createAudioParam('delayTime', 650, 200);
+ const lfoAndDelayTimeParamPath = createConnection(800, 200 - 2, 732, 200 - 2);
+ const lfoAndDelayTimeParamArrow = createConnectionArrow(732 + 12, 200 - 2, 'left');
+
+ g.appendChild(oscillatorNodeRect);
+ g.appendChild(oscillatorNodeAndDryPath);
+ g.appendChild(dryNodeRect);
+ g.appendChild(dryAndAudiodDestinationNodePath);
+ g.appendChild(audioDestinationNodeRect);
+ g.appendChild(delayNodeRect);
+ g.appendChild(delayNodeAndWetPath);
+ g.appendChild(wetNodeRect);
+
+ g.appendChild(oscillatorNodeAndDelayNodeNodePath1);
+ g.appendChild(oscillatorNodeAndDelayNodeNodePath2);
+
+ g.appendChild(wetAndAudioDestiationNodePath1);
+ g.appendChild(wetAndAudioDestiationNodePath2);
+
+ g.appendChild(oscillatorNodeAndDryArrow);
+ g.appendChild(dryAndAudiodDestinationNodeArrow);
+
+ g.appendChild(oscillatorNodeAndDelayNodeNodeArrow);
+ g.appendChild(delayNodeAndWetArrow);
+ g.appendChild(wetAndAudioDestiationArrow);
+
+ g.appendChild(lfoRect);
+ g.appendChild(delayTimeParamEllipse);
+ g.appendChild(lfoAndDelayTimeParamPath);
+ g.appendChild(lfoAndDelayTimeParamArrow);
+
+ svg.appendChild(g);
+};
+
+const chorus = () => {
+ let oscillator = null;
+ let lfo = null;
+
+ let depthRate = 0;
+ let rateValue = 0;
+ let mixValue = 0;
+
+ const delay = new DelayNode(audiocontext);
+ const depth = new GainNode(audiocontext, { gain: delay.delayTime.value * depthRate });
+ const dry = new GainNode(audiocontext, { gain: 1 - mixValue });
+ const wet = new GainNode(audiocontext, { gain: mixValue });
+
+ const buttonElement = document.getElementById('button-chorus');
+
+ const rangeDelayTimeElement = document.getElementById('range-chorus-delay-time');
+ const rangeDepthElement = document.getElementById('range-chorus-depth');
+ const rangeRateElement = document.getElementById('range-chorus-rate');
+ const rangeMixElement = document.getElementById('range-chorus-mix');
+
+ const spanPrintDelayTimeElement = document.getElementById('print-chorus-delay-time-value');
+ const spanPrintDepthElement = document.getElementById('print-chorus-depth-value');
+ const spanPrintRateElement = document.getElementById('print-chorus-rate-value');
+ const spanPrintMixElement = document.getElementById('print-chorus-mix-value');
+
+ const onDown = (event) => {
+ if (oscillator !== null || lfo !== null) {
+ return;
+ }
+
+ oscillator = new OscillatorNode(audiocontext);
+ lfo = new OscillatorNode(audiocontext, { frequency: rateValue });
+
+ oscillator.connect(dry);
+ dry.connect(audiocontext.destination);
+
+ oscillator.connect(delay);
+ delay.connect(wet);
+ wet.connect(audiocontext.destination);
+
+ lfo.connect(depth);
+ depth.connect(delay.delayTime);
+
+ oscillator.start(0);
+ lfo.start(0);
+
+ buttonElement.textContent = 'stop';
+ };
+
+ const onUp = (event) => {
+ if (oscillator === null || lfo === null) {
+ return;
+ }
+
+ oscillator.stop(0);
+ lfo.stop(0);
+
+ oscillator = null;
+ lfo = null;
+
+ buttonElement.textContent = 'start';
+ };
+
+ buttonElement.addEventListener('mousedown', onDown);
+ buttonElement.addEventListener('touchstart', onDown);
+ buttonElement.addEventListener('mouseup', onUp);
+ buttonElement.addEventListener('touchend', onUp);
+
+ rangeDelayTimeElement.addEventListener('input', (event) => {
+ delay.delayTime.value = event.currentTarget.valueAsNumber * 0.001;
+ depth.gain.value = delay.delayTime.value * depthRate;
+
+ spanPrintDelayTimeElement.textContent = `${Math.trunc(delay.delayTime.value * 1000)} msec`;
+ });
+
+ rangeDepthElement.addEventListener('input', (event) => {
+ depthRate = event.currentTarget.valueAsNumber;
+
+ depth.gain.value = delay.delayTime.value * depthRate;
+
+ spanPrintDepthElement.textContent = depthRate.toString(10);
+ });
+
+ rangeRateElement.addEventListener('input', (event) => {
+ rateValue = event.currentTarget.valueAsNumber;
+
+ if (lfo) {
+ lfo.frequency.value = rateValue;
+ }
+
+ spanPrintRateElement.textContent = rateValue.toString(10);
+ });
+
+ rangeMixElement.addEventListener('input', (event) => {
+ mixValue = event.currentTarget.valueAsNumber;
+
+ dry.gain.value = 1 - mixValue;
+ wet.gain.value = mixValue;
+
+ spanPrintMixElement.textContent = mixValue.toString(10);
+ });
+};
+
createCoordinateRect(document.getElementById('svg-figure-sin-function'));
createSinFunctionPath(document.getElementById('svg-figure-sin-function'));
@@ -4705,3 +4947,7 @@ createConvolution(document.getElementById('svg-convolution'));
animateConvolution(document.getElementById('svg-convolution-animation'));
createFIRFilter(document.getElementById('svg-fir-filter'));
+
+createNodeConnectionsForChorus(document.getElementById('svg-node-connections-for-chorus'));
+
+chorus();
diff --git a/docs/index.html b/docs/index.html
index 062b43a..5722e6e 100644
--- a/docs/index.html
+++ b/docs/index.html
@@ -6104,6 +6104,279 @@ コーラス・フランジャー
コーラス・フランジャーはディレイが基本となっているので, ディレイの実装がよくわからない ... という場合は,
前のセクションでディレイの実装を理解してから, このセクションを進めてください.
+
+ コーラス
+
+ コーラスは, ディレイタイムを周期的に変化させたエフェクト音を原音とミックスすることにより実装できます.
+ ディレイタイムを周期的に変化させることが実装のポイントになりますが, ここで LFO を利用することで, ディレイタイムを周期的に変化させることができます.
+ つまり, Web Audio API においては, LFO の接続先を DelayNode
の AudioParam である
+ delayTime
プロパティに接続すれば実装完了です.
+
+ まず, 原音の出力の接続と, エフェクト音の出力の接続 (DelayNode
の接続) のみを実装します.
+ const context = new AudioContext();
+
+const delay = new DelayNode(context, { delayTime: 0.02 });
+
+const oscillator = new OscillatorNode(context);
+
+// Connect nodes for original sound
+// OscillatorNode (Input) -> AudioDestinationNode (Output)
+oscillator.connect(context.destination);
+
+// Connect nodes for delay sound
+// OscillatorNode (Input) -> DelayNode (Delay) -> AudioDestinationNode (Output)
+oscillator.connect(delay);
+delay.connect(context.destination);
+
+oscillator.start(0);
+oscillator.stop(context.currentTime + 2);
+
+ そして, LFO の実装で解説したように, LFO のための OscillatorNode
インスタンスと
+ GainNode
インスタンス (Depth パラメータ) を生成して, DelayNode
の delayTime
プロパティ に接続します.
+
+ const context = new AudioContext();
+
+const baseDelayTime = 0.020;
+const depthValue = 0.005;
+const rateValue = 1;
+
+const delay = new DelayNode(context, { delayTime: baseDelayTime });
+
+const oscillator = new OscillatorNode(context);
+
+const lfo = new OscillatorNode(context, { frequency: rateValue });
+const depth = new GainNode(context, { gain: depthValue });
+
+// Connect nodes for original sound
+// OscillatorNode (Input) -> AudioDestinationNode (Output)
+oscillator.connect(context.destination);
+
+// Connect nodes for delay sound
+// OscillatorNode (Input) -> DelayNode (Delay) -> AudioDestinationNode (Output)
+oscillator.connect(delay);
+delay.connect(context.destination);
+
+// Connect nodes for LFO that changes delay time periodically
+// OscillatorNode (LFO) -> GainNode (Depth) -> delayTime (AudioParam)
+lfo.connect(depth);
+depth.connect(delay.delayTime);
+
+// Start oscillator and LFO
+oscillator.start(0);
+lfo.start(0);
+
+// Stop oscillator and LFO
+oscillator.stop(context.currentTime + 5);
+lfo.stop(context.currentTime + 5);
+
+ ディレイのノード接続からフィードバックを除いて, LFO を DelayNode
の delayTime
プロパティ (AudioParam
)
+ に接続したノード接続と同じです. パラメータに関しては, コーラスの場合, 基準となるディレイタイムを 20 - 30 msec
にして,
+ ± 5 ~ 10 msec
, Rate はゆっくりと 1 Hz
ぐらいがよいでしょう. (もっとも, 実際のプロダクトでは,
+ ある程度自由度高く設定できるように, 汎用的な LFO になるように実装することになるでしょう).
+
+
+ コーラスの原理的な実装としてはこれで完了ですが, エフェクターとしてはまだコーラスっぽくありません.
+ 原音とエフェクト音が同じゲインで合成されているので, 原音とエフェクト音が別々に出力されているように聴こえると思います.
+ 原音が少し揺れているぐらいにエフェクト音を合成するとコーラスらしくなるので, Dry / Wet のための GainNode
を接続して,
+ 原音とエフェクト音のゲインを調整します.
+
+ const context = new AudioContext();
+
+const baseDelayTime = 0.020;
+const depthValue = 0.005;
+const rateValue = 1;
+
+const delay = new DelayNode(context, { delayTime: baseDelayTime });
+
+const oscillator = new OscillatorNode(context);
+
+const lfo = new OscillatorNode(context, { frequency: rateValue });
+const depth = new GainNode(context, { gain: depthValue });
+
+const dry = new GainNode(context, { gain: 0.7 }); // for gain of original sound
+const wet = new GainNode(context, { gain: 0.3 }); // for gain of chorus sound
+
+// Connect nodes for original sound
+// OscillatorNode (Input) -> GainNode (Dry) -> AudioDestinationNode (Output)
+oscillator.connect(dry);
+dry.connect(context.destination);
+
+// Connect nodes for delay sound
+// OscillatorNode (Input) -> DelayNode (Delay) -> GainNode (Wet) -> AudioDestinationNode (Output)
+oscillator.connect(delay);
+delay.connect(wet);
+wet.connect(context.destination);
+
+// Connect nodes for LFO that changes delay time periodically
+// OscillatorNode (LFO) -> GainNode (Depth) -> delayTime (AudioParam)
+lfo.connect(depth);
+depth.connect(delay.delayTime);
+
+// Start oscillator and LFO
+oscillator.start(0);
+lfo.start(0);
+
+// Stop oscillator and LFO
+oscillator.stop(context.currentTime + 5);
+lfo.stop(context.currentTime + 5);
+
+
+ 以上で, コーラスの実装は完了です. ハードコーディングしているパラメータが多いので, ある程度実際のアプリケーションを想定して, UI
+ からパラメータ設定を可能にすると以下のようなコードとなるでしょう (Dry / Wet は同時に設定する Mix としています).
+
+ <button type="button">start</button>
+<label for="range-chorus-delay-time">Delay time</label>
+<input type="range" id="range-chorus-delay-time" value="0" min="0" max="50" step="1" />
+<span id="print-chorus-delay-time-value">0 msec</span>
+<label for="range-chorus-depth">Depth</label>
+<input type="range" id="range-chorus-depth" value="0" min="0" max="1" step="0.05" />
+<span id="print-chorus-depth-value">0</span>
+<label for="range-chorus-rate">Rate</label>
+<input type="range" id="range-chorus-rate" value="0" min="0" max="1" step="0.05" />
+<span id="print-chorus-rate-value">0</span>
+<label for="range-chorus-mix">Mix</label>
+<input type="range" id="range-chorus-mix" value="0" min="0" max="1" step="0.05" />
+<span id="print-chorus-mix-value">0</span>
+ const context = new AudioContext();
+
+let oscillator = null;
+let lfo = null;
+
+let depthRate = 0;
+let rateValue = 0;
+let mixValue = 0;
+
+const delay = new DelayNode(context);
+const depth = new GainNode(context, { gain: delay.delayTime.value * depthRate });
+const dry = new GainNode(context, { gain: 1 - mixValue });
+const wet = new GainNode(context, { gain: mixValue });
+
+const buttonElement = document.querySelector('button[type="button"]');
+
+const rangeDelayTimeElement = document.getElementById('range-chorus-delay-time');
+const rangeDepthElement = document.getElementById('range-chorus-depth');
+const rangeRateElement = document.getElementById('range-chorus-rate');
+const rangeMixElement = document.getElementById('range-chorus-mix');
+
+const spanPrintDelayTimeElement = document.getElementById('print-chorus-delay-time-value');
+const spanPrintDepthElement = document.getElementById('print-chorus-depth-value');
+const spanPrintRateElement = document.getElementById('print-chorus-rate-value');
+const spanPrintMixElement = document.getElementById('print-chorus-mix-value');
+
+buttonElement.addEventListener('mousedown', (event) => {
+ if ((oscillator !== null) || (lfo !== null)) {
+ return;
+ }
+
+ oscillator = new OscillatorNode(context);
+ lfo = new OscillatorNode(context, { frequency: rateValue });
+
+ // Connect nodes for original sound
+ // OscillatorNode (Input) -> GainNode (Dry) -> AudioDestinationNode (Output)
+ oscillator.connect(dry);
+ dry.connect(context.destination);
+
+ // Connect nodes for delay sound
+ // OscillatorNode (Input) -> DelayNode (Delay) -> GainNode (Wet) -> AudioDestinationNode (Output)
+ oscillator.connect(delay);
+ delay.connect(wet);
+ wet.connect(context.destination);
+
+ // Connect nodes for LFO that changes delay time periodically
+ // OscillatorNode (LFO) -> GainNode (Depth) -> delayTime (AudioParam)
+ lfo.connect(depth);
+ depth.connect(delay.delayTime);
+
+ // Start oscillator and LFO immediately
+ oscillator.start(0);
+ lfo.start(0);
+
+ buttonElement.textContent = 'stop';
+});
+
+buttonElement.addEventListener('mouseup', (event) => {
+ if ((oscillator === null) || (lfo === null)) {
+ return;
+ }
+
+ // Stop immediately
+ oscillator.stop(0);
+ lfo.stop(0);
+
+ // GC (Garbage Collection)
+ oscillator = null;
+ lfo = null;
+
+ buttonElement.textContent = 'start';
+});
+
+rangeDelayTimeElement.addEventListener('input', (event) => {
+ delay.delayTime.value = event.currentTarget.valueAsNumber * 0.001;
+ depth.gain.value = delay.delayTime.value * depthRate;
+
+ spanPrintDelayTimeElement.textContent = `${Math.trunc(delay.delayTime.value * 1000)} msec`;
+});
+
+rangeDepthElement.addEventListener('input', (event) => {
+ depthRate = event.currentTarget.valueAsNumber;
+
+ depth.gain.value = delay.delayTime.value * depthRate;
+
+ spanPrintDepthElement.textContent = depthRate.toString(10);
+});
+
+rangeRateElement.addEventListener('input', (event) => {
+ rateValue = event.currentTarget.valueAsNumber;
+
+ if (lfo) {
+ lfo.frequency.value = rateValue;
+ }
+
+ spanPrintRateElement.textContent = rateValue.toString(10);
+});
+
+rangeMixElement.addEventListener('input', (event) => {
+ mixValue = event.currentTarget.valueAsNumber;
+
+ dry.gain.value = 1 - mixValue;
+ wet.gain.value = mixValue;
+
+ spanPrintMixElement.textContent = mixValue.toString(10);
+});
+
+
+
+
+ 0 msec
+
+
+ 0
+
+
+ 0
+
+
+ 0
+
+