Hi guys,

I am trying to work out how I can pass audio FFT data into an ofTexture object that then can be passed in as a texture to a shader. Shadertoy.com does this and somehow lets you access the audio data from a texture2d object in GLSL. I asked Inigo from shader toy how this works and this was his response…

“The FFT signal, which is 512 pixels/frequencies long, gets normalized to 0…1 and mapped to 0…255. The wave form, which is also 512 pixels/smapled long, gets renormalized too from -16387…16384 to 0…1. FFT goes in the first row, waveform in the second row. So this is a 512x2 gray scale 8 bit texture.”

So I have tried to re-create this but for some reason I dont think I am passing in the data to the ofTexture object correctly. The below code gives me this sort of image into an ofTexture, *edit* I am using Kyle’s ofxFFT addon for the FFT part.

```
#include "testApp.h"
//--------------------------------------------------------------
void testApp::setup(){
w = ofGetWidth();
h = ofGetHeight();
ofDisableArbTex();
mTexture.allocate(512,2,GL_LUMINANCE, true);
fbo.allocate(ofGetWidth(),ofGetHeight(), GL_RGB);
fbo.begin();
ofClear(0,0,0,0);
fbo.end();
mShader.load( "shaders/shader"); //Loads both frag and vert if they are named correctly
//FFT
fft.setup(16384);
}
//--------------------------------------------------------------
void testApp::update(){
//FFT
fft.update();
vector<float>& buffer = fft.getBins();
unsigned char signal[1024];
for (int i = 0; i < 512; i++) {
float scaleFFT = ofMap(i,0,512,0.0,1.0);
scaleFFT *= 255;
signal[i] = (unsigned char) scaleFFT;
}
for (int i = 0; i < 512; i++) {
signal[i] = (unsigned char) i;
signal[512+i] = (unsigned char) (buffer.at(i)*255);
}
mTexture.loadData(signal, 512, 2, GL_LUMINANCE);
}
//--------------------------------------------------------------
void testApp::draw(){
fbo.begin();
mTexture.bind();
mShader.begin();
mShader.setUniform1f("iGlobalTime", ofGetElapsedTimef() );
mShader.setUniform3f("iResolution", ofGetWidth() , ofGetHeight(), 1 ) ;
mShader.setUniformTexture("iChannel0", mTexture, 0);
glBegin(GL_QUADS);
glTexCoord2f(0,0); glVertex3f(0,0,0);
glTexCoord2f(1,0); glVertex3f(w,0,0);
glTexCoord2f(1,1); glVertex3f(w,h,0);
glTexCoord2f(0,1); glVertex3f(0,h,0);
glEnd();
mShader.end();
mTexture.unbind();
fbo.end();
// fbo.draw(0,0,ofGetWidth(), ofGetHeight());
mTexture.draw(0,0,ofGetWidth(),ofGetHeight());
}
```

And the shader frag code

```
uniform vec3 iResolution; // viewport resolution (in pixels)
uniform float iGlobalTime; // shader playback time (in seconds)
uniform sampler2D iChannel0; // input channel. XX = 2D/Cube
#define PI 3.14159
#define EPS .001
#define T .03 // Thickness
#define W 2. // Width
#define A .09 // Amplitude
#define V 1. // Velocity
void main(void)
{
vec2 c = gl_FragCoord.xy / iResolution.xy;
vec4 s = texture2D(iChannel0, c * .5);
c = vec2(0., A*s.y*sin((c.x*W+iGlobalTime*V)* 2.5)) + (c*2.-1.);
float g = max(abs(s.y/(pow(c.y, 2.1*sin(s.x*PI))))*T,
abs(.1/(c.y+EPS)));
gl_FragColor = vec4(g*g*s.y*.6, g*s.w*.44, g*g*.7, 1.);
}
```

The only thing is when I try to draw the texture2d object that I am passing in i get nothing. I dont think I am passing the FFT data and the audio signal into the ofTexture in the correct way. Can anyone see what I am doing wrong here? Any help would be super appreciated.

Thanks!