Click, listen and watch... Sorry, you may have to wait for the sound file to load...
A fork of Mic in by Pierre MARZIN
xxxxxxxxxx
//Pierre MARZIN 01/2019
//I'm using p5js Sound library to load and analyse
//a song. Then, I pass the results of the analysis, as a texture, to the fragment shader.
//This shader will transform these informations in a 3d "landscape",
//using distance functions, and a rotating camera...
var button, mic,fft;
var img;
var program;
var gl;
function setup() {
pixelDensity(1);
createCanvas(windowWidth, windowHeight,WEBGL);
//img is the image where the spectrum data will be stored
img=createImage(256,1);
img.loadPixels();
rectMode(CENTER);
mic=new p5.AudioIn();
mic.start();
//shader is loaded with vert and frag definitions
program = new p5.Shader(this._renderer,vert, frag);
//cf Sound library doc
fft = new p5.FFT();
fft.setInput(mic);
}
function draw() {
//Once you clicked or touch
//level reflects sound amplitude
var level=pow(mic.getLevel(),.5);
var spectrum = fft.analyze();
//spectrum is an array, containing the levels of each frequency
//I convert it into a P5 Image, so to pass it to the shader as a texture
for (var i = 0; i<4*256; i+=4) {
var s=spectrum[i/4];
img.pixels[i]=s;
img.pixels[i+1]=s;
img.pixels[i+2]=s;
img.pixels[i+3]=s;
}
img.updatePixels();
this.shader(program);
//shaders uniforms are fed with our data
program.setUniform('volume', level);
program.setUniform('iMouse', [mouseX, mouseY]);
program.setUniform('iResolution', [width, height]);
program.setUniform('iTime', millis()/1000);
program.setUniform('iChannel0',img);
//the scene is rendered as a rectangle
rect(0, 0, width, height);
}