autonomous.contact/public/js/audioplayer.js

234 lines
6 KiB
JavaScript

// adapted from: https://codepen.io/uixamp / https://arisetyo.github.io
function human_readable_time(duration) {
return new Date(duration * 1000).toISOString().slice(duration > 3600 ? 11 : 14, 19);
}
const FREQUENCY_SAMPLES = 512;
const BAR_WIDTH = 4;
const PROGRESS_TICKS = 1000;
const MS_BETWEEN_PROGRESS_UPDATES = 200;
new MutationObserver((mutations) => {
const uninitialized_audio_elements = document.querySelectorAll(
".audio-container:not([data-audio-player])",
);
for (const player of uninitialized_audio_elements) {
const CURRENT_TIMES = player.querySelectorAll(
".audio-controls-container .time-container .current",
);
const DURATIONS = player.querySelectorAll(
".audio-controls-container .time-container .duration",
);
let progress_being_changed_by_user = false;
const AUDIO = player.querySelector("audio");
AUDIO.controls = false;
AUDIO.addEventListener("ended", () => {
delete player.dataset.playing;
});
AUDIO.addEventListener("loadedmetadata", () => {
for (const duration of DURATIONS) {
duration.innerHTML = human_readable_time(AUDIO.duration);
}
});
AUDIO.addEventListener("timeupdate", () => {
if (!progress_being_changed_by_user) {
PROGRESS.value = parseInt((AUDIO.currentTime / AUDIO.duration) * PROGRESS_TICKS);
}
for (const current of CURRENT_TIMES) {
current.innerHTML = human_readable_time(AUDIO.currentTime);
}
});
AUDIO.addEventListener("volumechange", () => {
if (!volume_being_changed_by_user) {
VOLUME.value = parseInt(AUDIO.volume * 100);
}
for (const volume_display of VOLUME_DISPLAYS) {
volume_display.innerHTML = parseInt(AUDIO.volume * 100);
}
});
player.addEventListener("keydown", (event) => {
const key = event.which ?? event.keyCode;
console.dir({
key,
playing: player.dataset.playing,
});
switch (key) {
case 32: // space
event.preventDefault();
if (player.dataset.playing) {
AUDIO.pause();
delete player.dataset.playing;
} else {
AUDIO.play();
player.dataset.playing = true;
}
break;
case 37:
event.preventDefault();
AUDIO.currentTime -= 5;
break;
case 39:
event.preventDefault();
AUDIO.currentTime += 5;
break;
default:
break;
}
});
const VOLUME = player.querySelector('.audio-controls-container input[name="volume"]');
const VOLUME_DISPLAYS = player.querySelectorAll(
".audio-controls-container .volume-display",
);
let volume_being_changed_by_user = false;
VOLUME.addEventListener("pointerdown", (event) => {
volume_being_changed_by_user = true;
});
VOLUME.addEventListener("pointerup", () => {
volume_being_changed_by_user = false;
});
function on_volume_changed() {
if (volume_being_changed_by_user) {
AUDIO.volume = Math.min(1.0, Math.max(VOLUME.value / 100, 0));
}
for (const volume_display of VOLUME_DISPLAYS) {
volume_display.innerHTML = parseInt(AUDIO.volume * 100);
}
}
VOLUME.addEventListener("change", on_volume_changed);
VOLUME.addEventListener("input", on_volume_changed);
VOLUME.value = Math.min(100, Math.max(AUDIO.volume * 100, 0));
const PROGRESS = player.querySelector('.audio-controls-container input[name="progress"]');
PROGRESS.min = 0;
PROGRESS.max = PROGRESS_TICKS;
PROGRESS.addEventListener("pointerdown", (event) => {
progress_being_changed_by_user = true;
});
PROGRESS.addEventListener("pointerup", () => {
progress_being_changed_by_user = false;
});
function on_progress_changed() {
if (progress_being_changed_by_user) {
AUDIO.currentTime = (PROGRESS.value / PROGRESS_TICKS) * AUDIO.duration;
}
}
PROGRESS.addEventListener("change", on_progress_changed);
PROGRESS.addEventListener("input", on_progress_changed);
let CANVAS;
let ACTX;
let ANALYSER;
let DATA;
let SOURCE;
let CTX;
let VIZ_RGB;
function init() {
CANVAS = CANVAS ?? player.querySelector("canvas");
VIZ_RGB = window
.getComputedStyle(CANVAS)
.color.slice(4, -1)
.split(",")
.map((v) => parseInt(v));
ACTX = ACTX ?? new AudioContext();
if (!ANALYSER) {
ANALYSER = ACTX.createAnalyser();
ANALYSER.fftSize = 4 * FREQUENCY_SAMPLES;
ANALYSER.smoothingTimeConstant = 0.8;
}
DATA = DATA ?? new Uint8Array(ANALYSER.frequencyBinCount);
if (!SOURCE) {
SOURCE = ACTX.createMediaElementSource(AUDIO);
SOURCE.connect(ANALYSER);
SOURCE.connect(ACTX.destination);
}
CTX = CTX ?? CANVAS.getContext("2d");
for (const current of CURRENT_TIMES) {
current.innerHTML = human_readable_time(AUDIO.currentTime);
}
}
function draw() {
if (!player.dataset.playing) {
return;
}
ANALYSER.getByteFrequencyData(DATA);
// draw on the canvas element
CTX.clearRect(0, 0, CANVAS.width, CANVAS.height);
for (let i = 0; i < DATA.length; i = i + BAR_WIDTH) {
// normalize the value
const value = DATA[i] / 255;
const y = CANVAS.height - CANVAS.height * value;
CTX.fillStyle = `rgb(${Math.min(255, parseInt(value * VIZ_RGB[0] + VIZ_RGB[0] * 0.25))}, ${Math.min(255, parseInt(value * VIZ_RGB[1] + VIZ_RGB[1] * 0.25))}, ${Math.min(255, parseInt(value * VIZ_RGB[2] + VIZ_RGB[2] * 0.25))})`;
CTX.fillRect(i, y, 2, 8);
}
requestAnimationFrame(draw);
}
player.querySelector(".audio-control.skip-back")?.addEventListener("click", (event) => {
init();
AUDIO.currentTime = 0;
});
player
.querySelector(".audio-control.play-pause-toggle")
.addEventListener("click", (event) => {
init();
const is_playing = player.dataset.playing;
if (is_playing) {
AUDIO.pause();
delete player.dataset.playing;
// CTX.clearRect(0, 0, canvasEl.width, canvasEl.height);
return;
}
AUDIO.play();
player.dataset.playing = true;
draw();
});
player.querySelector(".audio-control.skip-forward")?.addEventListener("click", (event) => {
init();
AUDIO.currentTime = AUDIO.duration;
AUDIO.pause();
delete player.dataset.playing;
});
player.dataset.audioPlayer = true;
}
}).observe(document, {
subtree: true,
childList: true,
});