Hi,
I need some help with how to combine tracked blobs with a motion mask. I have figured out how to play one video and fill the motion mask with a second video (this was a big accomplishment for me! I used code from: http://forum.openframeworks.cc/t/additive-motion-memory-for-video-masking./2378/0 to help) and now I also have another h file that I can draw the tracked blobs on top of a video, but I can’t figure out how to make this all work together. Ideally, what I really want is to have one video layer playing underneath, then fill the tracked blobs and detected motion with pixels from a second video clip.
Can I add the tracked blobs to the motion mask? Or, how can I also fill the blobs with the video pixels from the second video?
Also, the very last thing I want to do is:
The video clip underneath is of the ocean with the sun overhead, I want to draw the second video (which has reflection of light on the water) into the space where the viewers are standing (tracked blob) or moving (motion mask) but then… I would really like to also draw a line shape from the farthest right and farthest left pixel of each blob to a fixed point under where the sun is in the video (like the line of light we see reflected on the water when the sun is setting). So, can I turn the tracked blobs into a shape, with a vertex point under the sun? AND combine them to the motion mask?
Please see attached photo of sketch of what I ultimately want to accomplish.
If you can help me and give any suggestions or recommendations of examples to look at I would really, really appreciate it; VERY much.
Thank You.
Also here are the two programs I have so far
#include "testApp.h"
//////////////////////////////////////////////
// SETUP.
//////////////////////////////////////////////
void testApp :: setup()
{
ofSetVerticalSync( true ); //i don't know what this is
ofSetFrameRate( 60 );//frame rate of program i think
ofBackground( 255, 255,255 );//background color
initVideoTwo();//second video
initCamera();//camera feed
initVideoMask();//video mask
initVideo();//first video
initMotion();//motion
}
void testApp :: initCamera()//this is catching the camera
{
cameraWidth = 640;
cameraHeight = 480;
camera.setVerbose( true );
camera.initGrabber( cameraWidth, cameraHeight );
}
void testApp :: initVideo()//this is the first video
{
video.loadMovie( "watersunreflect.mov" );
//unsigned char * pixels = video.getPixels();
//video.setLoopState(OF_LOOP_NORMAL);
video.play();
}
void testApp :: initVideoTwo()//this is the second video
{
videotwo.loadMovie( "watersun640_480.mov" );
//unsigned char * pixels = videotwo.getPixels();
//videotwo.setLoopState(OF_LOOP_NORMAL);
videotwo.play();
}
void testApp :: initMotion()//this is the motion
{
cameraColorImage.allocate( cameraWidth, cameraHeight );
cameraGrayImage.allocate( cameraWidth, cameraHeight );
cameraGrayPrevImage.allocate( cameraWidth, cameraHeight );
cameraGrayDiffImage.allocate( cameraWidth, cameraHeight );
cameraDiffFloatImage.allocate( cameraWidth, cameraHeight );
cameraMotionFloatImage.allocate( cameraWidth, cameraHeight );
cameraMotionFadeAmount = .95f;//allows for slow fade of motion drawn
}
void testApp :: initVideoMask()//this is the mask
{
maskTexture.allocate( cameraWidth, cameraHeight, GL_RGBA );
maskPixles = new unsigned char[ cameraWidth * cameraHeight * 4 ];
}
//////////////////////////////////////////////
// UPDATE.
//////////////////////////////////////////////
void testApp :: update()
{
updateCamera();
if( cameraNewFrame )
{
updateMotion( camera.getPixels() );
updateVideoMask();
}
}
void testApp :: updateCamera() //get the new camera frame
{
camera.grabFrame();
cameraNewFrame = camera.isFrameNew();
}
void testApp :: updateMotion( unsigned char *pixels )//catch the new motion
{
cameraColorImage.setFromPixels( pixels, cameraWidth, cameraHeight );
cameraGrayPrevImage = cameraGrayImage;
cameraGrayImage = cameraColorImage;
cameraGrayDiffImage.absDiff( cameraGrayImage, cameraGrayPrevImage );
cameraGrayDiffImage.threshold( 30 );
cameraDiffFloatImage = cameraGrayDiffImage;
cameraMotionFloatImage *= cameraMotionFadeAmount;
cameraMotionFloatImage += cameraDiffFloatImage;
cameraMotionFloatImage.blur( 20 );
}
void testApp :: updateVideoMask()
{
videoPixels = video.getPixels();//whatever is in here is what fills the mask
motionPixles = cameraMotionFloatImage.getPixels();
for( int i=0; i<cameraWidth; i++ )
{
for( int j=0; j<cameraHeight; j++ )
{
int p = j * cameraWidth + i;
maskPixles[ p * 4 + 0 ] = videoPixels[ p * 3 + 0 ];
maskPixles[ p * 4 + 1 ] = videoPixels[ p * 3 + 1 ];
maskPixles[ p * 4 + 2 ] = videoPixels[ p * 3 + 2 ];
maskPixles[ p * 4 + 3 ] = motionPixles[ p ];
}
}
maskTexture.loadData( maskPixles, cameraWidth, cameraHeight, GL_RGBA );
}
//////////////////////////////////////////////
// DRAW.
//////////////////////////////////////////////
void testApp :: draw()//to the screen!!!
{
glDisable(GL_BLEND);
//drawCamera();
drawVideoTwo();
drawVideoMask();
//drawVideo();//i don't need to draw this cause it is already in the mask
//drawMotion();
}
void testApp :: drawCamera()//actually i don't want to draw the camera
{
ofSetColor( 0xFFFFFF );
//camera.draw( 0, 0, cameraWidth, cameraHeight );
}
void testApp :: drawMotion()//this is the motion, which I do want to draw but I cannot figure out how not to draw the background
{
// ofEnableAlphaBlending();//allows for image transparency
// ofSetColor( 255,0,0,50 );//changes color and transparency of motion
//cameraMotionFloatImage.draw(0, 0, cameraWidth, cameraHeight );//maybe this is better?
// cameraMotionFloatImage.draw(360,280);//was 400 200
// ofDisableAlphaBlending();//stop allowing for image transparency
}
void testApp :: drawMotionDebug()
{
//
}
//void testApp :: drawVideo()//this is the video with sun)in blobs and motion but i dont' need to draw it cause its already in the mask
//{
// ofEnableAlphaBlending();//new
// ofSetColor( 255,255,255,50 );
// video.draw( 360, 280, 0, 0 );
//ofDisableAlphaBlending();//new
//}
void testApp :: drawVideoTwo()//this is the video w/out sun(background)
{
ofEnableAlphaBlending();
ofSetColor(255,255,255,255);
videotwo.draw( 360, 280);
ofDisableAlphaBlending();
}
void testApp :: drawVideoMask()
{
ofEnableAlphaBlending();
ofSetColor(255,255,255,255);//new
maskTexture.draw( 360, 280 );
ofDisableAlphaBlending();
}
}
#include "testApp.h"
//--------------------------------------------------------------
void testApp::setup(){
ofSetFrameRate(60);
threshold = 60;
bLearnBackground = true;
vidGrabber.initGrabber(320,240);
colorImg.allocate(320,240);
grayImg.allocate(320,240);
bgImg.allocate(320,240);
video.loadMovie( "watersunreflect.mov" );
video.setLoopState(OF_LOOP_NORMAL);
video.play();
videotwo.loadMovie( "watersun640_480.mov" );
videotwo.setLoopState(OF_LOOP_NORMAL);
videotwo.play();
}
//--------------------------------------------------------------
void testApp::update(){
ofBackground (100,100,100);
vidGrabber.grabFrame();
if(vidGrabber.isFrameNew() ) {
colorImg = vidGrabber.getPixels();
grayImg = colorImg;
if (bLearnBackground) {
bgImg = grayImg;
bLearnBackground = false;
}
grayImg.absDiff( bgImg);
grayImg.blur(11);
grayImg.threshold (threshold);
contourFinder.findContours(grayImg, 50, 20000, 10, false, true);
blobTracker.trackBlobs (contourFinder.blobs) ;
}
}
//--------------------------------------------------------------
void testApp::draw(){
ofEnableAlphaBlending();
ofSetColor( 0xffffff);
//colorImg.draw(20,200);
//grayImg.draw (360,200);
videotwo.draw(0,0);
blobTracker.draw( 320, 240);
ofDisableAlphaBlending();
}