This applet illustrates the beat phenomenon. The red and blue waves(the red is hidden behind the blue) at the top of the panel are the waves being superposed. The green wave at the bottom of the panel is the superposition. The movement of the waves is shown in slow motion. The sound you hear is in real time.
When two tuning forks with frequencies close to each other are sounded together the combined sound exhibits a periodic variation in its amplitude. That is, the combined sound appears to rise(wax) and fall(wane) in intensity. This is the beat phenomenon and the frequency with which the amplitude varies is the beat frequency. The beat frequency is the absolute difference between the frequencies of the waves being combined. It is the constructive and destructive interference between the combining waves that causes this phenomenon.
Let the combining waves be and . Then the combined sound is . When the frequencies are close to each other will be small and we will not be able to hear the cosine term in the final expression as a distinct tone. Instead we perceive it as a term that modulates the amplitude of a sound produced by the sine term which has a frequency of , the average of the combining frequencies. The envelope of the green wave(the superposition) is given by the cosine term.
Here is the code:
let freq_1_sldr, freq_2_sldr;
let freq_1_lbl, freq_2_lbl;
let freq_1_plus, freq_2_plus, freq_1_minus, freq_2_minus;
let play_btn;
let canvas_div;
let canvas, ctx;
let resumed, playing;
let phase_1, phase_2, phase1_inc, phase2_inc;
//let ν_1, ν_2;
let ω_1, ω_2;
let k1, k2;
let y_orig_1, y_orig_2, y_orig_3;
let t;
let tick;
let audio_context, audio_node;
const sin = Math.sin;
const two_π = 2 * Math.PI;
const amp = 0.4,
wave_amp = 24.0;
const vel = 300;
let sound_objs = [];
function strokePath(pts, stroke_style) {
let [first, ...rest] = pts;
ctx.strokeStyle = stroke_style;
ctx.beginPath();
ctx.moveTo(first[0], first[1]);
for (let [x, y] of rest) {
ctx.lineTo(x, y);
}
ctx.stroke();
}
function draw() {
ctx.fillRect(0, 0, canvas.width, canvas.height);
let wave_1_data = [];
let wave_2_data = [];
let wave_3_data = [];
let time_phase_1 = sound_objs[0].ω * t;
let time_phase_2 = sound_objs[1].ω * t;
for (let x = 0; x <= canvas.width; x += 2) {
let displcmnt_1 = wave_amp * sin(sound_objs[0].k * x - time_phase_1);
let displcmnt_2 = wave_amp * sin(sound_objs[1].k * x - time_phase_2);
wave_1_data.push([x, y_orig_1 - displcmnt_1]);
wave_2_data.push([x, y_orig_2 - displcmnt_2]);
wave_3_data.push([x, y_orig_3 - displcmnt_1 - displcmnt_2]);
}
strokePath(wave_1_data, '#dc143c');
strokePath(wave_2_data, '#1e90ff');
strokePath(wave_3_data, '#228b22');
}
function changeSoundProps(index, value) {
let so = sound_objs[index];
so.lbl.value = String(value);
let ω = two_π * value;
so.ω = ω;
so.k = ω / (vel * canvas.width);
//so.phase = 0.0;
so.phase_inc = ω / audio_context.sampleRate;
}
function onSldrChange(e) {
const target = e.target;
switch (target) {
case freq_1_sldr:
changeSoundProps(0, parseInt(target.value));
break;
case freq_2_sldr:
changeSoundProps(1, parseInt(target.value));
break;
}
t = 0.0;
draw();
}
function onFreqStep(e) {
const target = e.target;
switch (target) {
case freq_1_plus:
{
let cur_value = parseInt(freq_1_sldr.value);
let max = parseInt(freq_1_sldr.max);
if (cur_value === max) {
return;
}
let new_value = cur_value + 1;
changeSoundProps(0, new_value);
freq_1_sldr.value = String(new_value);
}
break;
case freq_1_minus:
{
let cur_value = parseInt(freq_1_sldr.value);
let min = parseInt(freq_1_sldr.min);
if (cur_value === min) {
return;
}
let new_value = cur_value - 1;
changeSoundProps(0, new_value);
freq_1_sldr.value = String(new_value);
}
break;
case freq_2_plus:
{
let cur_value = parseInt(freq_2_sldr.value);
let max = parseInt(freq_2_sldr.max);
if (cur_value === max) {
return;
}
let new_value = cur_value + 1;
changeSoundProps(1, new_value);
freq_2_sldr.value = String(new_value);
}
break;
case freq_2_minus:
{
let cur_value = parseInt(freq_2_sldr.value);
let min = parseInt(freq_2_sldr.min);
if (cur_value === min) {
return;
}
let new_value = cur_value - 1;
changeSoundProps(1, new_value);
freq_2_sldr.value = String(new_value);
}
break;
}
t = 0.0;
draw();
}
function onPlay() {
if (!resumed) {
resumed = true;
audio_context.resume();
}
if (playing) {
playing = false;
play_btn.value = 'Start';
audio_node.disconnect();
tick = () => {};
} else {
playing = true;
play_btn.value = 'Stop';
audio_node.connect(audio_context.destination);
tick = () => {
requestAnimationFrame(tick);
t += 0.00002;
draw();
};
requestAnimationFrame(tick);
}
}
function init() {
freq_1_sldr = document.getElementById('freq_1_sldr');
freq_2_sldr = document.getElementById('freq_2_sldr');
freq_1_sldr.value = '440';
freq_2_sldr.value = '441';
for (let sldr of [freq_1_sldr, freq_2_sldr]) {
sldr.addEventListener('change', onSldrChange);
}
freq_1_lbl = document.getElementById('freq_1_lbl');
freq_2_lbl = document.getElementById('freq_2_lbl');
freq_1_plus = document.getElementById('freq_1_plus');
freq_2_plus = document.getElementById('freq_2_plus');
freq_1_minus = document.getElementById('freq_1_minus');
freq_2_minus = document.getElementById('freq_2_minus');
for (let step_btn of [freq_1_plus, freq_2_plus, freq_1_minus, freq_2_minus]) {
step_btn.addEventListener('click', onFreqStep);
}
play_btn = document.getElementById('play_btn');
play_btn.addEventListener('click', onPlay);
canvas_div = document.getElementById('canvas_div');
canvas = document.getElementById('canvas');
canvas.width = canvas_div.clientWidth;
canvas.height = canvas_div.clientHeight;
ctx = canvas.getContext('2d');
ctx.fillStyle = 'white';
ctx.lineWidth = 2;
resumed = false;
playing = false;
//ν_1 = 440;
//ν_2 = 441;
audio_context = new AudioContext();
audio_node = audio_context.createScriptProcessor(2048, 2, 2);
let ω = two_π * 440;
sound_objs.push({
lbl: freq_1_lbl,
ω: ω,
k: ω / (vel * canvas.width),
phase: 0.0,
phase_inc: ω / audio_context.sampleRate,
});
ω = two_π * 441;
sound_objs.push({
lbl: freq_2_lbl,
ω: ω,
k: ω / (vel * canvas.width),
phase: 0.0,
phase_inc: ω / audio_context.sampleRate,
});
y_orig_1 = 2 + wave_amp;
y_orig_2 = y_orig_1;
y_orig_3 = canvas.height - 2 - 2 * wave_amp;
t = 0;
tick = () => {};
audio_node.onaudioprocess = (ae) => {
let chnl_data_0 = ae.outputBuffer.getChannelData(0);
let chnl_data_1 = ae.outputBuffer.getChannelData(1);
const data_length = chnl_data_0.length;
for (let i = 0; i < data_length; i++) {
chnl_data_0[i] = chnl_data_1[i] =
amp * (sin(sound_objs[0].phase) + sin(sound_objs[1].phase));
for (let so of sound_objs) {
so.phase = (so.phase + so.phase_inc) % two_π;
}
}
};
draw();
}
function onResize() {
canvas.width = canvas_div.clientWidth;
ctx.fillStyle = 'white';
ctx.lineWidth = 2;
if (!playing) {
draw();
}
}
window.addEventListener('load', init);
window.addEventListener('resize', onResize, false);
window.addEventListener('orientationchange', onResize, false);