add audio listener that offsets vertices of hand by average volume, add start button that plays the music

This commit is contained in:
Silas 2022-03-11 01:02:03 -05:00
parent 081c73ca75
commit c182456a21
Signed by: silentsilas
GPG Key ID: 4199EFB7DAA34349
8 changed files with 174 additions and 39 deletions

3
.vscode/extensions.json vendored Normal file
View File

@ -0,0 +1,3 @@
{
"recommendations": ["dbaeumer.vscode-eslint"]
}

15
README.md Normal file
View File

@ -0,0 +1,15 @@
## ThreeJS Audio Visualizer
Currently a work in progress. It will be able to take in an MP3 file, and distort the imported 3D hand model along to the highs and lows.
## Development
To get started, you simply need to run the following commands:
`npm install && npm run dev`
Then you can access localhost:8000. There's no hot code reloading, so you'll need to refresh after any code changes.
## Deploy
Run `npm run build` then serve index.html, dist, and static.

View File

@ -20,6 +20,25 @@
> >
Loading model: 0% Loading model: 0%
</div> </div>
<div
id="startButton"
style="
display: none;
color: #efefef;
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
width: 80px;
height: 25px;
border: 2px solid #efefef;
text-align: center;
padding: 20px 40px;
cursor: pointer;
"
>
BEGIN
</div>
</div> </div>
<div <div
style=" style="

View File

@ -6,7 +6,8 @@
"scripts": { "scripts": {
"dev": "esbuild src/index.ts --outfile=dist/index.js --bundle --loader:.ts=ts --serve=localhost:8000 --servedir=.", "dev": "esbuild src/index.ts --outfile=dist/index.js --bundle --loader:.ts=ts --serve=localhost:8000 --servedir=.",
"lint": "eslint . --ext .ts", "lint": "eslint . --ext .ts",
"test": "echo \"Error: no test specified\" && exit 1" "test": "echo \"Error: no test specified\" && exit 1",
"build": "esbuild src/index.ts --outfile=dist/index.js --bundle --loader:.ts=ts"
}, },
"author": "Silas", "author": "Silas",
"license": "MIT", "license": "MIT",

30
src/audio.ts Normal file
View File

@ -0,0 +1,30 @@
import { Audio, AudioLoader } from "three";
const loader = new AudioLoader();
export function LoadAudio(listener): Promise<Audio> {
const loadingDiv = document.getElementById("loader");
loadingDiv.innerHTML = "Loading audio: 0%";
return new Promise((resolve, reject) => {
loader.load(
"/static/audio.mp3",
(audio) => {
const sound = new Audio(listener);
sound.setBuffer(audio);
sound.setLoop(false);
sound.setVolume(0.1);
loadingDiv.innerHTML = "";
return resolve(sound);
},
(progress) =>
(loadingDiv.innerHTML = `Loading audio: ${
(progress.loaded / progress.total) * 100
}%`),
(error: ErrorEvent) => {
console.log(error.target);
reject(error);
}
);
});
}

View File

@ -1,68 +1,135 @@
import { Scene, PerspectiveCamera, WebGLRenderer, PointLight } from "three"; import {
import { Load } from "./model"; Scene,
PerspectiveCamera,
WebGLRenderer,
PointLight,
Float32BufferAttribute,
AudioListener,
AudioAnalyser,
Clock,
} from "three";
import { LoadAudio } from "./audio";
import { LoadModel } from "./model";
const GLOBAL = {
renderer: null,
scene: null,
camera: null,
light: null,
hand: null,
positions: null,
distortionLevel: null,
audioListener: null,
audioAnalyser: null,
clock: null,
};
const avg = (list) => list.reduce((prev, curr) => prev + curr) / list.length;
let renderer, scene, camera, light;
init().then(() => animate()); init().then(() => animate());
async function init() { async function init() {
const container = document.getElementById("container"); const container = document.getElementById("container");
scene = new Scene(); GLOBAL.scene = new Scene();
camera = new PerspectiveCamera( GLOBAL.camera = new PerspectiveCamera(
75, 75,
window.innerWidth / window.innerHeight, window.innerWidth / window.innerHeight,
1, 1,
10000 10000
); );
camera.position.z = 5; GLOBAL.camera.position.z = 3;
light = new PointLight(0x119911, 0); GLOBAL.light = new PointLight(0x119911, 1);
light.counter = 0; GLOBAL.light.counter = 0;
light.position.set(0, 0.2, 0.2); GLOBAL.light.position.set(0, 0.15, 0.15);
scene.add(light); GLOBAL.scene.add(GLOBAL.light);
renderer = new WebGLRenderer(); GLOBAL.renderer = new WebGLRenderer();
renderer.setPixelRatio(window.devicePixelRatio); GLOBAL.renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight); GLOBAL.renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement); container.appendChild(GLOBAL.renderer.domElement);
window.addEventListener("resize", onWindowResize); window.addEventListener("resize", onWindowResize);
GLOBAL.audioListener = new AudioListener();
GLOBAL.scene.add(GLOBAL.audioListener);
GLOBAL.clock = new Clock();
try { try {
const gltf = await Load(); const model = await LoadModel();
document.getElementById("loader").innerHTML = ""; initializeModel(model);
// remove second hand with text above it const audio = await LoadAudio(GLOBAL.audioListener);
const objToRemove = gltf.scene.getObjectByName("Object_3"); initializeAudio(audio);
objToRemove.parent.remove(objToRemove);
// turn remaining hand into wireframe const startButton = document.getElementById("startButton");
const hand = gltf.scene.getObjectByName("Object_4"); startButton.style.display = "block";
hand.material.wireframe = true; startButton.addEventListener("click", () => {
audio.play();
// center hand in scene startButton.remove();
hand.position.x = hand.position.x + 1.5; });
scene.add(gltf.scene);
} catch (err) { } catch (err) {
console.warn(err); console.warn(err);
} }
} }
function animate() { function initializeAudio(audio) {
requestAnimationFrame(animate); GLOBAL.audioAnalyser = new AudioAnalyser(audio, 512);
render();
} }
function render() { function initializeModel(model) {
// remove second hand with text above it
const objToRemove = model.scene.getObjectByName("Object_3");
objToRemove.parent.remove(objToRemove);
// turn remaining hand into wireframe
GLOBAL.hand = model.scene.getObjectByName("Object_4");
GLOBAL.hand.material.wireframe = true;
// set up distortion for each vertex
GLOBAL.hand.originalPositions = GLOBAL.hand.geometry.getAttribute("position");
GLOBAL.hand.distortions = GLOBAL.hand.originalPositions.array
.slice(0)
.map(() => Math.random() * 2 - 1);
GLOBAL.positions = GLOBAL.hand.geometry.getAttribute("position");
// center hand in scene
GLOBAL.hand.position.x = GLOBAL.hand.position.x + 1.5;
GLOBAL.scene.add(model.scene);
}
function animate() {
requestAnimationFrame(animate);
const delta = GLOBAL.clock.getDelta();
const soundArray = GLOBAL.audioAnalyser.getFrequencyData();
const soundAvg = avg(soundArray) / soundArray.length;
render(delta, Math.pow(soundAvg * 5, 5));
}
function render(delta, soundAvg) {
// modulate light intensity between 0.5 and 1.5 // modulate light intensity between 0.5 and 1.5
light.counter += 0.01; GLOBAL.light.counter += delta + 0.02;
light.intensity = Math.sin(light.counter) / 2 + 1; GLOBAL.light.intensity = Math.sin(GLOBAL.light.counter) / 2 + 1;
renderer.render(scene, camera);
const newPositions = new Float32BufferAttribute(
GLOBAL.positions.array.map((_position, index) => {
const distortion = GLOBAL.hand.distortions[index] * soundAvg;
return distortion / 10 + GLOBAL.hand.originalPositions.array[index];
}),
3
);
GLOBAL.hand.geometry.setAttribute("position", newPositions);
GLOBAL.renderer.render(GLOBAL.scene, GLOBAL.camera);
} }
function onWindowResize() { function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight; GLOBAL.camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix(); GLOBAL.camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight); GLOBAL.renderer.setSize(window.innerWidth, window.innerHeight);
} }

View File

@ -11,7 +11,7 @@ export interface GLTF {
asset: object; asset: object;
} }
export function Load(): Promise<GLTF> { export function LoadModel(): Promise<GLTF> {
const loadingDiv = document.getElementById("loader"); const loadingDiv = document.getElementById("loader");
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
loader.load( loader.load(

BIN
static/audio.mp3 Normal file

Binary file not shown.