Hi,
When I try to draw ofImage texture in shader, it draws the image in a wrong orientation, like only uv rotated -90 degrees, not the entire pixels of the image.
- original image
- seems only uv is wrong cause image’s ratio seems correct
- when uv is rotated 90 degs
I thought it’s some sort of a horizontal&landscape profile in a metadata of pics cause I was using an image taken from iphone. However, it still happens when I use random images created from photoshop or so.
Maybe I was drawing a quad in a wrong order? or passing the uv in a wrong way in vertex shader? Any thoughts are much appreciated.
Here’s the code, you can reproduce the situation by commenting out 2 lines that I indicate below.
#define GLSL(version, shader) "#version " #version "\n" #shader
ofImage image;
ofShader shader;
void ofApp::setup(){
ofDisableArbTex();
image.load("image.jpg");
std::string vert = GLSL(120,
varying vec2 texCoord;
void main(){
gl_Position = gl_ProjectionMatrix * gl_ModelViewMatrix * gl_Vertex;
texCoord = gl_MultiTexCoord0.xy;
}
);
std::string frag = GLSL(120,
uniform sampler2D image;
varying vec2 texCoord;
void main(){
vec2 uv = texCoord;
// so I had to rotate uv 90 degs here to display the image in a right orientation. but why?
// you can comment out following 2 lines to reproduce the situation
uv = vec2((uv.x-.5),uv.y-.5)*mat2(cos(1.5708),sin(1.5708),-sin(1.5708),cos(1.5708));
uv += vec2(.5);
vec3 c = texture2D(image, uv).rgb;
gl_FragColor = vec4(c, 1.);
}
);
shader.setupShaderFromSource(GL_VERTEX_SHADER, vert);
shader.setupShaderFromSource(GL_FRAGMENT_SHADER, frag);
shader.linkProgram();
}
void ofApp::draw(){
shader.begin();
{
shader.setUniformTexture("image", image.getTexture(), 0);
float _w = image.getWidth(), _h = image.getHeight();
glBegin(GL_QUADS);
{
glVertex3f( 0, 0, 0), glTexCoord2f( 0, 0);
glVertex3f( 0, _h, 0), glTexCoord2f( 0, 1.f);
glVertex3f(_w, _h, 0), glTexCoord2f(1.f, 1.f);
glVertex3f(_w, 0, 0), glTexCoord2f(1.f, 0);
}
glEnd();
}
shader.end();
}