Difference between OpenGLES and OpenGL 3.0


#1

Hello, I’m moving a shader from openGLES to OpenGL 3.0.
The shaders are almost identical, but that one for OpenGL 3.0 does not show the texture. This is the OpenGLES version

const int MAX_MARCHING_STEPS = 64;
const float EPSILON = 0.0011;
const float NEAR_CLIP = 0.0;
const float FAR_CLIP = 100.00;

float sdfSphere(vec3 pos, float radius){
    return length(pos) - radius;
}

float map(vec3 pos){
    return sdfSphere(pos, 2.4);
}

vec3 computeNormal(vec3 pos){
    vec2 eps = vec2(0.01, 0.);
    return normalize(vec3(
        map(pos + eps.xyy) - map(pos - eps.xyy),
        map(pos + eps.yxy) - map(pos - eps.yxy),
        map(pos + eps.yyx) - map(pos - eps.yyx)
    ));
}

float raymarching(vec3 eye, vec3 marchingDirection){
    float depth = NEAR_CLIP;
    for (int i = 0; i < MAX_MARCHING_STEPS; i++) {
        float dist = map(eye + depth * marchingDirection);
        if (dist < EPSILON){
            return depth;
        }

        depth += dist;

        if (depth >= FAR_CLIP) {
            return FAR_CLIP;
        }
    }
    return FAR_CLIP;
}

vec3 getRefTexture(vec3 normal, vec3 dir) {
    vec3 eye = -dir;
    vec3 r = reflect( eye, normal );
    vec4 color = texture2D(iChannel1, (0.5 * (r.xy) + .5));
    return color.xyz;
}

void main(){
    vec2 uv = 2.0 * gl_FragCoord.xy / iResolution.xy - 1.0;
    uv.x *= iResolution.x / iResolution.y;

    vec3 eye = vec3(0.0, 0.0, -10);
    float fov = 2.;
    vec3 dir = normalize(vec3(uv, fov));

    float shortestDistanceToScene = raymarching(eye, dir);

    vec3 color;
    vec3 bgColor = vec3(0.1, 0.35, 0.75);

    if (shortestDistanceToScene < FAR_CLIP - EPSILON) {
        vec3 collision = (eye += (shortestDistanceToScene*0.995) * dir );
        vec3 normal = computeNormal(collision);
        color = getRefTexture(normal, dir);
    } else {
        color = bgColor;
    }
    gl_FragColor = vec4(color , 1.0);
}

That gives me this:

And this is the openGL 3.0 version:

#version 150
uniform sampler2DRect tex0;
uniform vec2 resolution;

in vec2 vTexCoord; //used just for debugging purpose
out vec4 fragColor;

int MAX_MARCHING_STEPS = 64;
const float EPSILON = 0.0011;
const float NEAR_CLIP = 0.0;
const float FAR_CLIP = 100.00;

float sdfSphere(vec3 pos, float radius){
    return length(pos) - radius;
}

float map(vec3 pos){
    return sdfSphere(pos, 2.4);
}

float raymarching(vec3 eye, vec3 marchingDirection){
    float depth = NEAR_CLIP;
    for (int i = 0; i < MAX_MARCHING_STEPS; i++) {
        float dist = map(eye + depth * marchingDirection);
        if (dist < EPSILON){
            return depth;
        }

        depth += dist;

        if (depth >= FAR_CLIP) {
            return FAR_CLIP;
        }
    }
    return FAR_CLIP;
}

vec3 computeNormal(vec3 pos){
    vec2 eps = vec2(0.01, 0.);
    return normalize(vec3(
                          map(pos + eps.xyy) - map(pos - eps.xyy),
                          map(pos + eps.yxy) - map(pos - eps.yxy),
                          map(pos + eps.yyx) - map(pos - eps.yyx)
                          ));
}

vec3 getRefTexture(vec3 normal, vec3 dir) {
    vec3 eye = -dir;
    vec3 r = reflect( eye, normal );
    vec4 color = texture(tex0, (0.5 * (r.xy) + .5));
    return color.xyz;
}

void main(){
    vec2 uv = 2.0 * gl_FragCoord.xy / resolution.xy - 1.0;
    uv.x *= resolution.x / resolution.y;

    vec3 eye = vec3(0.0, 0.0, -10);
    float fov = 2.;
    vec3 dir = normalize(vec3(uv, fov));

    float shortestDistanceToScene = raymarching(eye, dir);

    vec3 color;
    vec3 bgColor = vec3(0.1, 0.35, 0.75);

    if (shortestDistanceToScene < FAR_CLIP - EPSILON) {
        vec3 collision = (eye += (shortestDistanceToScene*0.995) * dir );
        vec3 normal = computeNormal(collision);
        color = getRefTexture(normal, dir);
    } else {
        color = bgColor;
    }

    // if I uncomment this, the texture is displayed as expected. That means the texture is available to the shader
    // color = texture(tex0, (0.5 * (vTexCoord.xy) + .5));
    fragColor = vec4(color , 1.0);
}

And it gives me this result:

Did I miss something?
The texture is passed correctly to the shader in the ofApp.cpp file, and I can print it on screen.

myShader.setUniformTexture("tex0", image0, 1);

The only difference that I see is the way the sampler is defined

And this is my ofApp.cpp file

void ofApp::draw(){
    cam.begin();
    tex0.bind();
    plane.mapTexCoordsFromTexture(image0.getTexture());
    myShader.begin();
    setUniforms();
    plane.draw();
    myShader.end();
    tex0.unbind();
    cam.end();
}

void ofApp::setUniforms(){
    float resolution[] = {float(ofGetWidth()), float(ofGetHeight())};
    myShader.setUniform2fv("resolution",resolution);
    myShader.setUniformTexture("tex0", image0, 1);
}

#2

if you use sampler2dRect the texture coordinates won’t be normalized but in pixels. the easiest is to not use texture2drect at all.

when declaring the sampler, just use sampler2D instead of sampler2DRect and then in OF in the setup function before anything else call: ofDisableARBTex() which disables rectangular textures by default.


#3

it worked!i have seen that was a problem of coordinates becuase drawing color = vec3(dir) or color = vec3(normal) was giving me the same result in GLES and OpenGL3,but i did not know about sampler2DRect. many thanks :wink: