Light on a plane

I’m having a strange issue with a simple shader that I’m applying to a sphere and to a plane. On the sphere it works, on the plane it looks like there is no light.

This is the vertex shader:

#version 150
uniform mat4 modelViewProjectionMatrix;

in vec4 position;
in vec4 normal;

out vec4 vPosition;
out vec3 vNormal;

void main() {
    vNormal = normal.xyz;
    vPosition = modelViewProjectionMatrix * position;
    gl_Position = vPosition;
}

And this is the fragment shader:

#version 150

uniform vec3 lightPos;
in vec4 vPosition;
in vec3 vNormal;

out vec4 vFragColor;

void main() {
    vec4 color = vec4(1.0,0.0,0.0,1.0);
    vec3 lightDirection = normalize(lightPos - vPosition.xyz);
    float dProd = max(0.3, dot(vNormal, lightDirection));
    vec4 colorWithLight = vec4( vec3( dProd ) * vec3( color ), 1.0 );
    vFragColor = colorWithLight;
}

And this is the draw method:

void ofApp::draw(){
    ofDisableDepthTest();
    gui.draw();
    ofEnableDepthTest();
    cam.begin();
    shader.begin();
    shader.setUniform3f("lightPos", lightPos);
    sphere.draw();
    plane.draw();
    shader.end();
    light.draw();
    cam.end();
}

Am I doing something wrong?

it’s probably backwards, with the normals looking down. try rotating it 180 degrees in z or x

same thing, both surfaces are dark. This is my ofApp.cpp code:

void ofApp::setup(){
    ofEnableDepthTest();
    objectLocation = glm::vec3(0.0,0.0,0.0);

    // Plane
    plane.set(640, 640);
    plane.setPosition(objectLocation);
    plane.setResolution(1024, 1024);
    plane.rollDeg(180);

    //Sphere
    sphere.set(100, 100);
    if (!ofIsGLProgrammableRenderer()) {
        ofLogError("this app supports only open the programmable render pipeline");
        return 1;
    } else {
        shader.load(shadersFolder+"/through.vert", shadersFolder+"/render.frag");
    };

    gui.setup();
    gui.setPosition(ofPoint(0, 30));
    gui.add(lightPos.setup("lightPosition",
                           ofVec3f(ofGetWidth()*.5, ofGetHeight()*.5, 100),
                           ofVec3f(0, 0, -100),
                           ofVec3f(ofGetWidth(), ofGetHeight(),200)));

    light.setup();
    light.enable();
    light.setPosition(lightPos);
    light.lookAt(objectLocation);
}

//--------------------------------------------------------------
void ofApp::update(){
    light.setPosition(lightPos);
    light.lookAt(objectLocation);
}

//--------------------------------------------------------------
void ofApp::draw(){
    ofDisableDepthTest();
    gui.draw();
    ofEnableDepthTest();
    cam.begin();
    shader.begin();
    shader.setUniform3f("lightPos", lightPos);
    sphere.draw();
    plane.draw();
    shader.end();
    light.draw();
    cam.end();
}

And this the header:

#pragma once

#include "ofMain.h"
#include "ofxGui.h"

class ofApp : public ofBaseApp{

	public:
		void setup();
		void update();
		void draw();

		void keyPressed(int key);
		void keyReleased(int key);
		void mouseMoved(int x, int y );
		void mouseDragged(int x, int y, int button);
		void mousePressed(int x, int y, int button);
		void mouseReleased(int x, int y, int button);
		void mouseEntered(int x, int y);
		void mouseExited(int x, int y);
		void windowResized(int w, int h);
		void dragEvent(ofDragInfo dragInfo);
		void gotMessage(ofMessage msg);

    ofShader shader;
    string shadersFolder = "shaders";

    glm::vec3 objectLocation;
    ofPlanePrimitive plane;
    ofSpherePrimitive sphere;
    ofLight light;
    ofEasyCam cam;
    ofxPanel gui;
    ofxVec3Slider lightPos;
};

it doesn’t make any sense that by turning the plane the sphere render would change, no idea wha’t s going on try drawing the normals of both primitives (mosty the plane) to check that you are lighting the correct side

Thanks Arturo. I did not want to change the render of the sphere, I’ve put the sphere on the scene just to check if my shader was correct. Anyway, at the end the plane it is illuminated, but the light it is visible only from a near distance