Hi there,
I’m having strange results while sending a tex from my ofApp to QuartzComposer via ofxSyphon.
The syphon contraption seems to work well in QC since I tried it with different Syphon server from other apps and also with the basic example provided with the ofxSyphon library.
Basically I get some pixels from a movie player, process these and load the processed pixels to an ofTexture that I send with the Syphon server.
It seems like my texture intended for QC is grabbing some area of the screen sometime in my app, sometime outside my app. Some other time the pixels I’m sending appears in QC but behind 2 black triangles covering the picture so finally I only have the top border, the right border and the diagonal from bottom left to top right that show the original pixels. Another time I had a portion of my pixels upside-down and frozen.
When I send my texture to the Syphon Recorder app I have the same result.
I’ve tried many manner to load the data to the texture e.g. with unsigned char* and GL_RGBA type with no success.
I also tried to replicate a piece of code from the basic example inside my app but although it works in the example the same problem appear.
Any idea before I tear off my whole hair ?
Here is part of my code to see how I proceed an a screen capture to see how it look.
//main.cpp
//========================================================================
//...
int main( ) {
ofAppGlutWindow window;
ofSetupOpenGL(&window, 1280, 800, OF_WINDOW);
ofRunApp(new ofApp());
}
//ofApp.h
//========================================================================
//...
ofVideoPlayer player;
ofPixels source;
ofPixels processed;
ofTexture processedTex;
ofPixels processVideo( ofPixels pixels);
ofSyphonServer server;
int width = 640;
int height = 380;
float zoomPosX, zoomPosY;
float zoomFactor;
float contrast, brightness, saturation;
bool zoomOn;
bool videoProcessingOn;
//...
//ofApp.cpp
//========================================================================
//...
//SETUP
server.setName("From my App");
source.allocate(width, height, OF_PIXELS_RGB);
processed.allocate(width, height, OF_PIXELS_RGB);
processedTex.allocate(width, height, GL_RGB);
player.setLoopState(OF_LOOP_NORMAL);
player.setPixelFormat(OF_PIXELS_RGB);
player.setUseTexture(false);
player.load("/path/to/movie.mp4");
player.play();
//...
//UPDATE
//...
player.update();
if (player.isFrameNew()) {
source = player.getPixels();
processed = processVideo(source);
}
processedTex.loadData(processed, GL_RGB);
//...
//DRAW
ofBackground(backgroundColor);
ofSetHexColor(0xffffff);
//...
processedTex.draw(20, 20, width, height);
server.publishTexture(&processedTex);
//PROCESS VIDEO
//========================================================================
ofPixels ofApp::processVideo(ofPixels pixels) {
ofPixels mod = pixels;
if (zoomOn ) {
float x = mod.getWidth() * (1 - 1 / zoomFactor + zoomPosX * (1 - 1 / zoomFactor)) / 2;
float y = mod.getHeight() * (1 - 1 / zoomFactor + zoomPosY * (1 - 1 / zoomFactor)) / 2;
float w = mod.getWidth() / zoomFactor;
float h = mod.getHeight() / zoomFactor;
mod.crop( (int)x, (int)y, (int)w, (int)h) );
mod.resize(width, height, OF_INTERPOLATE_NEAREST_NEIGHBOR);
}
if (videoProcessingOn ) {
float cfactor = ( 259.0 * ( 255.0 + contrast * 128 ) ) / ( 255 * ( 259 - contrast * 128 ) );
for (int i = 0; i < mod.getWidth(); i ++) {
for (int j = 0; j < mod.getHeight(); j++) {
ofColor c = mod.getColor(i, j);
float h = c.getHue();
float s = c.getSaturation() * saturation;
float b = c.getBrightness();
b = cfactor * ( b - 128 ) + 128;
b *= brightness;
s = ofClamp(s, 0, 255.0);
b = ofClamp(b, 0, 255.0);
c.setHsb(h, s, b);
mod.setColor(i, j, c);
}
}
}
return mod;
}
Here a screen capture :
Thanks !!!