Unexpected behavior with kinect app and advice

Hi , i ve made this program that sonnifies point clouds using ofxPd .
The idea is to convert each point from the point cloud into a sonic grain but not sonifying the whole point cloud instead Im sonifying ONLY the points in the point cloud that have moved from the previous frame.

For calculating this, “the points in the point cloud that have moved from the previous frame” , first I created a mask doing frame differencing with images, using the absDiff() method of ofxCvGrayscaleImage and then I used this mask(ofxCvGrayscaleImage) for filtering my point cloud in order to get only the points that have moved .
For filtering my point cloud i looped through the pixels of my “frame differenced” image ( my mask ) to get the x, y of the changed values (white pixels) and then I used ofxKinect:: getDistanceAt(x, y) to get the “z” value.
Then I send the z value to pure data , each time i send a “z” value to pure data , it triggers a grain and uses the “z” value to control the frequency of that grain.

I got the app working but with one unexpected behaivor that i cant figure out.
The app only only works with a very limited range. If I’m very near to the kinect it doesn’t work, and if I’m far way it doesn’t work either, the app only works if i move my body or some object only in a very narrow space. I tried changing different values, but im not able to change this “narrowness”.
any idea why is this? and how can i fix this?

Other problem i see is that changing the threshold doenst work as i expected. For example if i put high threshold , and i dont move the hand , it still sense movement and create grains but I need that when people are quiet in front of the kinect , it doesnt create any grain.

any idea why i cannot achieve this with threshold?

and my last question is , what do you think of the coding? its my first app in openframeworks and i would be cool to get some feedback of things that can be improved in the code, maybe the programming style? or tricks for speeding up the app?

Thanks in advance.

Here is the code, if you are interested i can upload all the code with the puredata patch.
Is there anywhere where i can upload my whole project?

cheers, here is the code:

void testApp::setup() {

     int ticksPerBuffer = 8;	// 8 * 64 = buffer len of 512

	ofSoundStreamSetup(2, 1, this, 44100, ofxPd::blockSize()*ticksPerBuffer, 3);

	// setup the app core
	core.setup(2, 1, 44100, ticksPerBuffer);
    iter = 0;
   	midiChan = 1; // midi channels are 1-16
    
	// subscribe to receive source names
	pd.subscribe("toOF");
	pd.subscribe("env");
    pd.subscribe("transpose");
    
	// add the data/pd folder to the search path
	pd.addToSearchPath("pd/abs");
    
	// audio processing on
	pd.start();
    
    ofSetLogLevel(OF_LOG_VERBOSE);
	
	// enable depth->video image calibration
	kinect.setRegistration(true);
    
	kinect.init();
    kinect.open();		// opens first available kinect
	
	// print the intrinsic IR sensor values
	if(kinect.isConnected()) {
		ofLogNotice() << "sensor-emitter dist: " << kinect.getSensorEmitterDistance() << "cm";
		ofLogNotice() << "sensor-camera dist:  " << kinect.getSensorCameraDistance() << "cm";
		ofLogNotice() << "zero plane pixel size: " << kinect.getZeroPlanePixelSize() << "mm";
		ofLogNotice() << "zero plane dist: " << kinect.getZeroPlaneDistance() << "mm";
	}
	

	
	colorImg.allocate(kinect.width, kinect.height);
	grayImage.allocate(kinect.width, kinect.height);
	grayThreshNear.allocate(kinect.width, kinect.height);
	grayThreshFar.allocate(kinect.width, kinect.height);
    
    grayBg.allocate(kinect.width, kinect.height);
	grayDiff.allocate(kinect.width, kinect.height);
    
	nearThreshold = 230;
	farThreshold = 70;
	bThreshWithOpenCV = true;
	
	ofSetFrameRate(60);
	
	// zero the tilt on startup
	angle = 0;
	kinect.setCameraTiltAngle(angle);
	
	// start from the front
	bDrawPointCloud = false;
    
    // I create the patches
    
    for(int i = 0; i <50; ++i) {
        
        Patch p = pd.openPatch("pd/instance2.pd");
        instances.push_back(p);
    }
}

void testApp::update() {
	

    ofBackground(100, 100, 100);
	
	kinect.update();
	
	// there is a new frame and we are connected
	if(kinect.isFrameNew()) {
		
		// load grayscale depth image from the kinect source
		grayImage.setFromPixels(kinect.getDepthPixels(), kinect.width, kinect.height);
		
		// we do two thresholds - one for the far plane and one for the near plane
		// we then do a cvAnd to get the pixels which are a union of the two thresholds
		if(bThreshWithOpenCV) {
			grayThreshNear = grayImage;
			grayThreshFar = grayImage;
			grayThreshNear.threshold(nearThreshold, true);
			grayThreshFar.threshold(farThreshold);
			cvAnd(grayThreshNear.getCvImage(), grayThreshFar.getCvImage(), grayImage.getCvImage(), NULL);
		} else {
			
			// or we do it ourselves - show people how they can work with the pixels
			unsigned char * pix = grayImage.getPixels();
			
			int numPixels = grayImage.getWidth() * grayImage.getHeight();
			for(int i = 0; i < numPixels; i++) {
				if(pix[i] < nearThreshold && pix[i] > farThreshold) {
					pix[i] = 255;
				} else {
					pix[i] = 0;
				}
			}
		}
        // update the cv images
		grayImage.flagImageChanged();
		
		// find contours which are between the size of 20 pixels and 1/3 the w*h pixels.
		// also, find holes is set to true so we will get interior contours as well....
		contourFinder.findContours(grayImage, 10, (kinect.width*kinect.height)/2, 20, false);
	}
	

    
    if(kinect.isFrameNew()) {
         grayImageCurrent.setFromPixels(kinect.getDepthPixelsRef());
         
         grayDiff.absDiff(grayBg, grayImageCurrent);
         
         grayDiff.threshold(239);   // preguntar en la lista de of porque no funciona bien threshold?

         grayBg =  grayImageCurrent ;
         
         pixo = grayDiff.getPixels();
         
         for(int x = 0; x < grayDiff.getHeight(); x += 1) {
             for(int y = 0; y < grayDiff.getWidth(); y += 1) {
                 
                 if(pixo[y*  int(grayDiff.getWidth()) + x] == 255 ) {
                     
                 pd << StartMessage() << "In" << kinect.getDistanceAt(x, y) << FinishList(instances[iter].dollarZeroStr()+"-grain");
               
                iter = iter + 1;
                     
                 if(iter > 49 ) {
                         iter = 0;
                     }
                 }
             }
         }
     }
}

//--------------------------------------------------------------
void testApp::draw() {
    
    ofSetColor(255, 255, 255);
	
	if(bDrawPointCloud) {
		easyCam.begin();
		drawPointCloud();
		easyCam.end();
	} else {
		// draw from the live kinect
		kinect.drawDepth(10, 10, 400, 300);
		kinect.draw(420, 10, 400, 300);
		
		grayImage.draw(10, 320, 400, 300);
		contourFinder.draw(10, 320, 400, 300);
        //este
        grayDiff.draw(420, 320, 400, 300);
		

	}
	
	// draw instructions
	ofSetColor(255, 255, 255);
	stringstream reportStream;
    
    if(kinect.hasAccelControl()) {
        reportStream << "accel is: " << ofToString(kinect.getMksAccel().x, 2) << " / "
        << ofToString(kinect.getMksAccel().y, 2) << " / "
        << ofToString(kinect.getMksAccel().z, 2) << endl;
    } else {
        reportStream << "Note: this is a newer Xbox Kinect or Kinect For Windows device," << endl
		<< "motor / led / accel controls are not currently supported" << endl << endl;
    }
    
	reportStream << "press p to switch between images and point cloud, rotate the point cloud with the mouse" << endl
	<< "using opencv threshold = " << bThreshWithOpenCV <<" (press spacebar)" << endl
	<< "set near threshold " << nearThreshold << " (press: + -)" << endl
	<< "set far threshold " << farThreshold << " (press: < >) num blobs found " << contourFinder.nBlobs
	<< ", fps: " << ofGetFrameRate() << endl
	<< "press c to close the connection and o to open it again, connection is: " << kinect.isConnected() << endl;
    
    if(kinect.hasCamTiltControl()) {
    	reportStream << "press UP and DOWN to change the tilt angle: " << angle << " degrees" << endl
        << "press 1-5 & 0 to change the led mode" << endl;
    }
    
	ofDrawBitmapString(reportStream.str(), 20, 652);}

void testApp::drawPointCloud() {
int w = 640;
	int h = 480;
	ofMesh mesh;
	mesh.setMode(OF_PRIMITIVE_POINTS);
	int step = 2;
    for(int y = 0; y < h; y += step) {
		for(int x = 0; x < w; x += step) {
			if(kinect.getDistanceAt(x, y) > 0) {
				mesh.addColor(kinect.getColorAt(x,y));
				mesh.addVertex(kinect.getWorldCoordinateAt(x, y));
            };
		}
	}
	glPointSize(3);
	ofPushMatrix();
	// the projected points are 'upside down' and 'backwards'
	ofScale(1, -1, -1);
	ofTranslate(0, 0, -1000); // center the points a bit
	ofEnableDepthTest();
	mesh.drawVertices();
	ofDisableDepthTest();
	ofPopMatrix();
}


void testApp::exit() {
    core.exit();
	kinect.setCameraTiltAngle(0); // zero the tilt on exit
	kinect.close();
	

}


void testApp::keyPressed (int key) {
    core.keyPressed(key);
    
	switch (key) {
            

        case 'q':
            //do something
            break;
            
		case ' ':
			bThreshWithOpenCV = !bThreshWithOpenCV;
			break;
			
		case'p':
			bDrawPointCloud = !bDrawPointCloud;
			break;
			
		case '>':
		case '.':
			farThreshold ++;
			if (farThreshold > 255) farThreshold = 255;
			break;
			
		case '<':
		case ',':
			farThreshold --;
			if (farThreshold < 0) farThreshold = 0;
			break;
			
		case '+':
		case '=':
			nearThreshold ++;
			if (nearThreshold > 255) nearThreshold = 255;
			break;
			
		case '-':
			nearThreshold --;
			if (nearThreshold < 0) nearThreshold = 0;
			break;
			
		case 'w':
			kinect.enableDepthNearValueWhite(!kinect.isDepthNearValueWhite());
			break;
			
		case 'o':
			kinect.setCameraTiltAngle(angle); // go back to prev tilt
			kinect.open();
			break;
			
		case 'c':
			kinect.setCameraTiltAngle(0); // zero the tilt
			kinect.close();
			break;
			
		case '1':
			kinect.setLed(ofxKinect::LED_GREEN);
			break;
			
		case '2':
			kinect.setLed(ofxKinect::LED_YELLOW);
			break;
			
		case '3':
			kinect.setLed(ofxKinect::LED_RED);
			break;
			
		case '4':
			kinect.setLed(ofxKinect::LED_BLINK_GREEN);
			break;
			
		case '5':
			kinect.setLed(ofxKinect::LED_BLINK_YELLOW_RED);
			break;
			
		case '0':
			kinect.setLed(ofxKinect::LED_OFF);
			break;
			
		case OF_KEY_UP:
			angle++;
			if(angle>30) angle=30;
			kinect.setCameraTiltAngle(angle);
			break;
			
		case OF_KEY_DOWN:
			angle--;
			if(angle<-30) angle=-30;
			kinect.setCameraTiltAngle(angle);
			break;
	}
    }
//--------------------------------------------------------------
void testApp::mouseMoved(int x, int y) {}

//--------------------------------------------------------------
void testApp::mouseDragged(int x, int y, int button) {}

//--------------------------------------------------------------
void testApp::mousePressed(int x, int y, int button) {}

//--------------------------------------------------------------
void testApp::mouseReleased(int x, int y, int button) {}

//--------------------------------------------------------------
void testApp::windowResized(int w, int h) {}

//--------------------------------------------------------------
void testApp::audioReceived(float * input, int bufferSize, int nChannels) {
	core.audioReceived(input, bufferSize, nChannels);
}
//--------------------------------------------------------------
void testApp::audioRequested(float * output, int bufferSize, int nChannels) {
	core.audioRequested(output, bufferSize, nChannels);
}