diff --git a/.vscode/extensions.json b/.vscode/extensions.json
new file mode 100644
index 0000000..940260d
--- /dev/null
+++ b/.vscode/extensions.json
@@ -0,0 +1,3 @@
+{
+ "recommendations": ["dbaeumer.vscode-eslint"]
+}
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..2b1a567
--- /dev/null
+++ b/README.md
@@ -0,0 +1,15 @@
+## ThreeJS Audio Visualizer
+
+Currently a work in progress. It will be able to take in an MP3 file, and distort the imported 3D hand model along to the highs and lows.
+
+## Development
+
+To get started, you simply need to run the following commands:
+
+`npm install && npm run dev`
+
+Then you can access localhost:8000. There's no hot code reloading, so you'll need to refresh after any code changes.
+
+## Deploy
+
+Run `npm run build` then serve index.html, dist, and static.
diff --git a/index.html b/index.html
index 53a30af..221b1e3 100644
--- a/index.html
+++ b/index.html
@@ -20,6 +20,25 @@
>
Loading model: 0%
+
+ BEGIN
+
{
+ const loadingDiv = document.getElementById("loader");
+ loadingDiv.innerHTML = "Loading audio: 0%";
+
+ return new Promise((resolve, reject) => {
+ loader.load(
+ "/static/audio.mp3",
+ (audio) => {
+ const sound = new Audio(listener);
+ sound.setBuffer(audio);
+ sound.setLoop(false);
+ sound.setVolume(0.1);
+ loadingDiv.innerHTML = "";
+ return resolve(sound);
+ },
+ (progress) =>
+ (loadingDiv.innerHTML = `Loading audio: ${
+ (progress.loaded / progress.total) * 100
+ }%`),
+ (error: ErrorEvent) => {
+ console.log(error.target);
+ reject(error);
+ }
+ );
+ });
+}
diff --git a/src/index.ts b/src/index.ts
index 2f5cc38..1034997 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -1,68 +1,135 @@
-import { Scene, PerspectiveCamera, WebGLRenderer, PointLight } from "three";
-import { Load } from "./model";
+import {
+ Scene,
+ PerspectiveCamera,
+ WebGLRenderer,
+ PointLight,
+ Float32BufferAttribute,
+ AudioListener,
+ AudioAnalyser,
+ Clock,
+} from "three";
+import { LoadAudio } from "./audio";
+import { LoadModel } from "./model";
+
+const GLOBAL = {
+ renderer: null,
+ scene: null,
+ camera: null,
+ light: null,
+ hand: null,
+ positions: null,
+ distortionLevel: null,
+ audioListener: null,
+ audioAnalyser: null,
+ clock: null,
+};
+const avg = (list) => list.reduce((prev, curr) => prev + curr) / list.length;
-let renderer, scene, camera, light;
init().then(() => animate());
async function init() {
const container = document.getElementById("container");
- scene = new Scene();
- camera = new PerspectiveCamera(
+ GLOBAL.scene = new Scene();
+ GLOBAL.camera = new PerspectiveCamera(
75,
window.innerWidth / window.innerHeight,
1,
10000
);
- camera.position.z = 5;
+ GLOBAL.camera.position.z = 3;
- light = new PointLight(0x119911, 0);
- light.counter = 0;
- light.position.set(0, 0.2, 0.2);
- scene.add(light);
+ GLOBAL.light = new PointLight(0x119911, 1);
+ GLOBAL.light.counter = 0;
+ GLOBAL.light.position.set(0, 0.15, 0.15);
+ GLOBAL.scene.add(GLOBAL.light);
- renderer = new WebGLRenderer();
- renderer.setPixelRatio(window.devicePixelRatio);
- renderer.setSize(window.innerWidth, window.innerHeight);
- container.appendChild(renderer.domElement);
+ GLOBAL.renderer = new WebGLRenderer();
+ GLOBAL.renderer.setPixelRatio(window.devicePixelRatio);
+ GLOBAL.renderer.setSize(window.innerWidth, window.innerHeight);
+ container.appendChild(GLOBAL.renderer.domElement);
window.addEventListener("resize", onWindowResize);
+ GLOBAL.audioListener = new AudioListener();
+ GLOBAL.scene.add(GLOBAL.audioListener);
+
+ GLOBAL.clock = new Clock();
+
try {
- const gltf = await Load();
- document.getElementById("loader").innerHTML = "";
+ const model = await LoadModel();
+ initializeModel(model);
- // remove second hand with text above it
- const objToRemove = gltf.scene.getObjectByName("Object_3");
- objToRemove.parent.remove(objToRemove);
+ const audio = await LoadAudio(GLOBAL.audioListener);
+ initializeAudio(audio);
- // turn remaining hand into wireframe
- const hand = gltf.scene.getObjectByName("Object_4");
- hand.material.wireframe = true;
-
- // center hand in scene
- hand.position.x = hand.position.x + 1.5;
-
- scene.add(gltf.scene);
+ const startButton = document.getElementById("startButton");
+ startButton.style.display = "block";
+ startButton.addEventListener("click", () => {
+ audio.play();
+ startButton.remove();
+ });
} catch (err) {
console.warn(err);
}
}
-function animate() {
- requestAnimationFrame(animate);
- render();
+function initializeAudio(audio) {
+ GLOBAL.audioAnalyser = new AudioAnalyser(audio, 512);
}
-function render() {
+function initializeModel(model) {
+ // remove second hand with text above it
+ const objToRemove = model.scene.getObjectByName("Object_3");
+ objToRemove.parent.remove(objToRemove);
+
+ // turn remaining hand into wireframe
+ GLOBAL.hand = model.scene.getObjectByName("Object_4");
+ GLOBAL.hand.material.wireframe = true;
+
+ // set up distortion for each vertex
+ GLOBAL.hand.originalPositions = GLOBAL.hand.geometry.getAttribute("position");
+ GLOBAL.hand.distortions = GLOBAL.hand.originalPositions.array
+ .slice(0)
+ .map(() => Math.random() * 2 - 1);
+ GLOBAL.positions = GLOBAL.hand.geometry.getAttribute("position");
+
+ // center hand in scene
+ GLOBAL.hand.position.x = GLOBAL.hand.position.x + 1.5;
+
+ GLOBAL.scene.add(model.scene);
+}
+
+function animate() {
+ requestAnimationFrame(animate);
+
+ const delta = GLOBAL.clock.getDelta();
+
+ const soundArray = GLOBAL.audioAnalyser.getFrequencyData();
+ const soundAvg = avg(soundArray) / soundArray.length;
+
+ render(delta, Math.pow(soundAvg * 5, 5));
+}
+
+function render(delta, soundAvg) {
// modulate light intensity between 0.5 and 1.5
- light.counter += 0.01;
- light.intensity = Math.sin(light.counter) / 2 + 1;
- renderer.render(scene, camera);
+ GLOBAL.light.counter += delta + 0.02;
+ GLOBAL.light.intensity = Math.sin(GLOBAL.light.counter) / 2 + 1;
+
+ const newPositions = new Float32BufferAttribute(
+ GLOBAL.positions.array.map((_position, index) => {
+ const distortion = GLOBAL.hand.distortions[index] * soundAvg;
+ return distortion / 10 + GLOBAL.hand.originalPositions.array[index];
+ }),
+ 3
+ );
+
+ GLOBAL.hand.geometry.setAttribute("position", newPositions);
+ GLOBAL.renderer.render(GLOBAL.scene, GLOBAL.camera);
}
function onWindowResize() {
- camera.aspect = window.innerWidth / window.innerHeight;
- camera.updateProjectionMatrix();
+ GLOBAL.camera.aspect = window.innerWidth / window.innerHeight;
+ GLOBAL.camera.updateProjectionMatrix();
- renderer.setSize(window.innerWidth, window.innerHeight);
+ GLOBAL.renderer.setSize(window.innerWidth, window.innerHeight);
}
diff --git a/src/model.ts b/src/model.ts
index d640aad..adb865c 100644
--- a/src/model.ts
+++ b/src/model.ts
@@ -11,7 +11,7 @@ export interface GLTF {
asset: object;
}
-export function Load(): Promise {
+export function LoadModel(): Promise {
const loadingDiv = document.getElementById("loader");
return new Promise((resolve, reject) => {
loader.load(
diff --git a/static/audio.mp3 b/static/audio.mp3
new file mode 100644
index 0000000..32c31d7
Binary files /dev/null and b/static/audio.mp3 differ