Modifying motion detection. How to create average frame from two frames ?

edited October 2017 in Arduino

Hey guys ! I'am trying to modify code for motion detection from Dan's tutorial:

In his original code, he is comparing current frame only with the previous one. What I'am trying to do is comparing current frame with the average frame of two previous frames (actually I'am taking 99% pixel value of pre-previous frame and 1% pixel value of previous frame. It should make detection more accurate). I edited "captureEvent" function so it can load second frame and added new "GetFrame" function which takes two frames as argument and returns an average frame. In theory it should work but it doesn't and I only get gray, empty window. What did I screw up ? I know code is a little bit long but I suspect mistake to be in "GetFrame" or "captureEvent" function.

EDIT: I have problem placing code here using markers in editor so Iam pasting it from pastebin: https://pastebin.com/jkBuex0v

Answers

  • edited October 2017

    EDIT:

  • edited October 2017

    Capture event is an event -- when it gets called, there is only one video frame to read.

    Here is one long-form way approach to what you might be trying to do:

    1. starting up:

      1. load the current frame of video into prev, pprev, and comparison
    2. continue:

      1. load the current frame
      2. test current / comparison
      3. prep for next time:
        1. move prev into pprev
        2. move current into prev
        3. blend prev and pprev into comparison

    There is also a more efficient method of doing prep, which only requires one buffer image (not two) and the current frame to accomplish your previous-two-blend:

    • prep for next time: blend prev (next frame it will be pprev) and current ( next frame it would be prev) into comparison
  • Thx for the reply ! First of all I want to run it, then I will think about increasing efficiency. Secondly I fixed the captureEvent function and now it should load frames properly. From 3rd iteration pprev, prev and video are getting new frames in proper order. Also I noticed that the problem is caused by GetFrame function. Commenting it makes everything works fine. So I have to focus on that I guess. Here is the actual code:

    import processing.video.*; 
    import processing.serial.*; 
    
    Capture video;
    Serial myPort; 
    
    PImage pprev;
    PImage prev;
    PImage comprasion; //This will be the result of combining pprev with prev
    
    float threshold = 60;
    
    float motionX = 0;
    float motionY = 0;
    
    float lerpX = 0;
    float lerpY = 0;
    
    
    void setup() 
    {
      size(640, 480);
      //myPort = new Serial(this, "COM1", 9600);
      //String[] cameras = Capture.list();
      //printArray(cameras);
      video = new Capture(this, width, height, 30);
      video.start();
      pprev = createImage(640, 480, RGB);
      prev = createImage(640, 480, RGB);
      comprasion = createImage(640, 480, RGB);
      // Start off tracking for red
    }
    
    void captureEvent(Capture video) 
    { 
      pprev.copy(prev, 0, 0, prev.width, prev.height, 0, 0, prev.width, prev.height); 
      pprev.updatePixels();
    
      prev.copy(video, 0, 0, video.width, video.height, 0, 0, prev.width, prev.height);
      prev.updatePixels();
    
      video.read();
    }
    
    void draw() 
    {  
      video.loadPixels();
      pprev.loadPixels();
      prev.loadPixels();
      comprasion.loadPixels();
    
      image(video, 0, 0);
    
      //threshold = map(mouseX, 0, width, 0, 100);
      threshold = 60;
      int count = 0;
      float avgX = 0;
      float avgY = 0;
    
      loadPixels();
    
      for (int x = 0; x < video.width; x++ ) 
      {
        for (int y = 0; y < video.height; y++ ) 
        {
          int loc = x + y * video.width;
    
          //comprasion = GetFrame(pprev, prev);
    
          color currentColor = video.pixels[loc];
          float r1 = red(currentColor);
          float g1 = green(currentColor);
          float b1 = blue(currentColor);
    
          color prevColor = prev.pixels[loc];
          float r2 = red(prevColor);
          float g2 = green(prevColor);
          float b2 = blue(prevColor);
    
          float d = distSq(r1, g1, b1, r2, g2, b2); 
    
          if (d > threshold*threshold) {
            //stroke(255);
            //strokeWeight(1);
            //point(x, y);
            avgX += x;
            avgY += y;
            count++;
            //myPort.write('1');
            //pixels[loc] = color(0);
          } else {
            //pixels[loc] = color(255);
          }
        }
      }
      updatePixels();
    
      //We decide to detect motion or not
      if (count > 4000) 
      { 
        //myPort.write('1');
        motionX = avgX / count;
        motionY = avgY / count;
        // Draw a circle at the tracked pixel
    
        lerpX = lerp(lerpX, motionX, 0.2); 
        lerpY = lerp(lerpY, motionY, 0.2); 
    
        fill(255, 50, 50);
        strokeWeight(2.0);
        stroke(0);
        ellipse(lerpX, lerpY, 36, 36);
      } else 
      {
        //myPort.write('0');
      }
      //image(video, 0, 0);y
      //image(prev, 100, 0, 100, 100);
    
      //println(mouseX, threshold);
    }
    
    float distSq(float x1, float y1, float z1, float x2, float y2, float z2) 
    {
      float d = (x2-x1)*(x2-x1) + (y2-y1)*(y2-y1) +(z2-z1)*(z2-z1);
      return d;
    }
    
    PImage GetFrame (PImage a, PImage b) //This fuction should return an average frame of two previous frames.
    {
      PImage c = createImage(640, 480, RGB);
    
      a.loadPixels();
      b.loadPixels();
      c.loadPixels();
    
      for (int x = 0; x < video.width; x++ ) 
      {
        for (int y = 0; y < video.height; y++ ) 
        {
          int loc = x + y * video.width;
    
          c.pixels[loc] = a.pixels[loc]*(99/100) + b.pixels[loc]*(1/100); //This should fill c image with correct pixels.
        }
      }
      updatePixels();
      return c;
    }
    
  • I'm confused. The code you posted doesn't call captureEvent() anywhere...?

  • You need to revise your code. I don't think you are averaging two previous frame but you are including your current one. Your code should look like this:

    void captureEvent(Capture video) 
    { 
      pprev=prev.get();  
      prev=get();
      video.read();
    }
    

    Also notice line 68 is being called inside a nested loop. But GetFrame() is made of the same nested loop concept. Move line 68 outside and above the nested loop.

    I didn't solve the post because I am not clear of what you are doing at the end of draw() and how you are using the comparison PImage object. Hopefully these tips drives you to your solution.

    Kf

  • edited November 2017

    @jeremydouglass As I understand, captureEvent() is run automatically whenever new frame is available. https://processing.org/reference/libraries/video/captureEvent_.html

    So, GetFrame function was useless. Also as You said I didn't need the "comprasion" frame. Iam now simply calculating new color values basing on pprev and prev frames. But its still not working corectly. Its detecting motion without motion and always drawing circle near the center. I dont know why. rc = 99/100*r1 is almost the same color (pixel) as r1 (line 77,78,79) .

    import processing.video.*; 
    import processing.serial.*; 
    
    Capture video;
    Serial myPort; 
    
    PImage pprev;
    PImage prev;
    
    float threshold = 60;
    
    float motionX = 0;
    float motionY = 0;
    
    float lerpX = 0;
    float lerpY = 0;
    
    
    void setup() 
    {
      size(640, 480);
      //myPort = new Serial(this, "COM1", 9600);
      //String[] cameras = Capture.list();
      //printArray(cameras);
      video = new Capture(this, width, height, 30);
      video.start();
      pprev = createImage(640, 480, RGB);
      prev = createImage(640, 480, RGB);
     //comprasion = createImage(640, 480, RGB);
     //Start off tracking for red
    }
    
    
    void captureEvent(Capture video) 
    { 
      pprev.copy(prev, 0, 0, prev.width, prev.height, 0, 0, prev.width, prev.height); 
      pprev.updatePixels();
    
      prev.copy(video, 0, 0, video.width, video.height, 0, 0, prev.width, prev.height);
      prev.updatePixels();
    
      video.read(); 
    }
    
    void draw() 
    {  
      video.loadPixels();
      pprev.loadPixels();
      prev.loadPixels();
    
      image(video, 0, 0);
    
      //threshold = map(mouseX, 0, width, 0, 100);
      threshold = 40;
      int count = 0;
      float avgX = 0;
      float avgY = 0;
    
      loadPixels();
    
      for (int x = 0; x < video.width; x++ ) 
      {
        for (int y = 0; y < video.height; y++ ) 
        {
          int loc = x + y * video.width;
    
          color pprevColor = pprev.pixels[loc];
          float r1 = red(pprevColor);
          float g1 = green(pprevColor);
          float b1 = blue(pprevColor);
    
          color prevColor = prev.pixels[loc];
          float r2 = red(prevColor);
          float g2 = green(prevColor);
          float b2 = blue(prevColor);
    
          float rc = (999/1000)*r1; // + (1/100)*r2;
          float gc = (999/1000)*g1; //+ (1/100)*g2;
          float bc = (999/1000)*b1; //+ (1/100)*b2;
    
          color currentColor = video.pixels[loc];
          float r3 = red(currentColor);
          float g3 = green(currentColor);
          float b3 = blue(currentColor);
    
          float d = distSq(r3, g3, b3, rc, gc, bc); 
    
          if (d > threshold*threshold) 
          {
            //stroke(255);
            //strokeWeight(1);
            //point(x, y);
            avgX += x;
            avgY += y;
            count++;
            //myPort.write('1');
            //pixels[loc] = color(0);
          } 
          else 
          {
            //pixels[loc] = color(255);
          }
        }
      }
      updatePixels();
    
      //We decide to detect motion or not
      if (count > 4000) 
      { 
        //myPort.write('1');
        motionX = avgX / count;
        motionY = avgY / count;
        // Draw a circle at the tracked pixel
    
    
      lerpX = lerp(lerpX, motionX, 0.2); 
      lerpY = lerp(lerpY, motionY, 0.2); 
    
      fill(255, 50, 50);
      strokeWeight(2.0);
      stroke(0);
      ellipse(lerpX, lerpY, 36, 36);
      }
      else 
      {
        //myPort.write('0');
      }
      //image(video, 0, 0);y
      //image(prev, 100, 0, 100, 100);
    
      //println(mouseX, threshold);
    }
    
    float distSq(float x1, float y1, float z1, float x2, float y2, float z2) 
    {
      float d = (x2-x1)*(x2-x1) + (y2-y1)*(y2-y1) +(z2-z1)*(z2-z1);
      return d;
    }  
    
Sign In or Register to comment.