halcyonandon
YaBB Newbies
Offline
Posts: 3
Motion Tracking replace Mouse Tracking?
May 18th , 2007, 5:28am
I'm working with an open source sketch where an effect follows the mouse cursor. I would like to alter this code so the effect follows the difference in frames of a live cam (without showing the camera feed on screen). I'm attaching both sources for you guys. Any ideas, thoughts and suggestions are very much appreciated. //********************************************************************** // Water Effect v2. For Processing language : www.processing.org // // Code by movax (Kristopher Collins. www.viz.nu) // // Original effect invented by: ? // // I first saw this type of effect in a demo by the group Iguana circa 1995 //********************************************************************** float sludgefactor=6.; int border=1; float Xoffset,Yoffset; //Refraction vector float map1[]=new float[640*480]; //Heightmap buffer 1 float map2[]=new float[640*480]; //Heightmap buffer 2 PImage P; //Image for background/bottom of pool float pt1[]; //Pointer to reference heighmap float pt2[]; //Pointer to reference heighmap float tmpt[]; //Temp pointer void setup(){ size(300,300,P3D); loadPixels(); colorMode(RGB,1.0,1.0,1.0,1.0); P=loadImage("rocks.jpg"); //Must be at least sketch size (bigger is OK) background(0); pt1=map1; //Set pointers pt2=map2; frameRate(40); } float lu(int x, int y, float array[]){ // look up a table return(array[(y*width)+x]); } void draw(){ splash(mouseX,mouseY,.1,2); // Create splashes where mouse cursor // The main calculation is below. We go through every pixel in the array pointed to by pt1, setting its value // based on the previous heights stored in pt2. This can be optimized but is left as-is for readability. for(int j=1;j<height-1;j++){ for(int i=1;i<width-1;i++){ pt1[i+j*width]=.98*((( lu(i+1, j, pt2)+ lu(i-1, j,pt2)+ lu(i, j+1, pt2)+ lu(i, j-1, pt2) )/2.)-lu(i,j,pt1)); } } // Now that we have our new heightmap stored in pt1, we render the image by finding a refraction vector for // each pixel, and grabbing the color it points to in our image. for(int j=border;j<height-border;j++){ for(int i=border;i<width-border;i++){ Xoffset=(lu(i+1,j,pt1)+lu(i-1,j,pt1))/sludgefactor; Yoffset=(lu(i,j+1,pt1)+lu(i,j+1,pt1))/sludgefactor; if(Xoffset+i > width-border || Xoffset+i < border ) Xoffset=-Xoffset; //keep refraction vectors from going off the screen if(Yoffset+j > height-border || Yoffset+j < border ) Yoffset=-Yoffset; pixels[i+j*width]=P.pixels[int((i+Xoffset)+(j+Yoffset)*P.width)] ; //Draw our pixel! } } tmpt=pt2; //Hold pt2 location pt2=pt1; //Switch what pointers reference, effectively pt1=tmpt; //"copying" the current state into the previous state buffer updatePixels(); } void keyPressed(){ switch (keyCode){ case UP : sludgefactor++; break; case DOWN : sludgefactor--; break; } } void splash(int x, int y, float power, int sqsize){ //Ugly routine for creating a splash if(x>=sqsize && y>=sqsize && x<width-sqsize && y<height-sqsize){ for(int xx=-sqsize;xx<sqsize;xx++){ for(int yy=-sqsize;yy<sqsize;yy++){ pt2[((y+yy)*width)+x+xx]+=power; } } } } *** And this was the Motion tracking source I was planning to base it on. I also have a version using JMyron and another that takes one reference capture and compares all future frames to that capture. float sensitivity; // sensitivity is a percentage of pixels changed boolean newFrame; // switch var for determining when a new frame is received color[] prevFrame; // vars for holding the current and previous frames void setup(){ size(320,240); sensitivity=0.24; newFrame=false; stroke(240); noFill();; prevFrame=new color[320*240]; beginVideo(320,240,30); framerate(30); } public void videoEvent(){ newFrame=true; } void loop(){ if(newFrame){ newFrame=false; image(video,0,0); // update display with current frame for(int y=0; y<30-1; y++){ // loop through video in 40x30 grid for(int x=0; x<40-1; x++){ // println(mtc++); int cxLoc=x*(320/40); // x position on grid int cyLoc=y*(240/30); // y position on grid if(motionTest(cxLoc,cyLoc,width/40,width/30)==true){ rect(cxLoc+4,cyLoc+4,(width/40)-2,(width/30)-2); //indicate motion } } } } } boolean motionTest(int srcX,int srcY,int tw, int th){ int dc=0; // counter to track number of differences color newPixel; for(int y=0;y<th;y++){ for(int x=0;x<tw;x++){ int srcPos=((srcY+y)*width)+(srcX+x); newPixel = video.pixels[srcPos]; if(abs(red(newPixel)-red(prevFrame[srcPos]))>25) dc++; prevFrame[srcPos]=newPixel; // update prevFrame w/ current pixel clr } } // test to see if number of difference is 'significant' if(dc>(sensitivity*(tw*th))){ return true; }else{ return false; } }