IndexOutOfBoundsException when trying to use an external webcam

edited June 2016 in Kinect

Hi! I'm pretty new to Processing, so I'm sorry if this is really simple to solve but I just don't see it or for other mistakes I'm making.

I made a code for a 'tattoo projection mapping' project, it worked - but only with my built-in webcam. I want to use an external webcam, but I can't seem to change the code without getting an error.

I used the 'GettingStartedCapture' example from the Video library to see what the name of my webcam was. When I run this code separately, everything works.

The only thing I changed in my code is
video = new Capture(this, 320, 240);
to
video = new Capture(this, "name=Microsoft® LifeCam HD-3000,size=640x480,fps=30");

The error I get is 'IndexOutOfBoundsException: Index: 3, Size: 0' on the line opencv.loadImage(video);

Line 65 is the line I've changed, line 84 is the line I get the error on.

Full code (long code, sorry for excess code)

    // docs.opencv.org/master/db/dd6/classcv_1_1RotatedRect.html#gsc.tab=0
    // processing.org/reference/textureMode_.html
    // processing.org/reference/vertex_.html
    // rotatedRect angle calculation: stackoverflow.com/questions/24073127/opencvs-rotatedrect-angle-does-not-provide-enough-information

    // A lot of native OpenCV for java code is used. Mainly because not everything is implemented in the Processing library.

    boolean animationHasBeenStarted;
    import gab.opencv.*;
    import org.opencv.imgproc.Imgproc;
    import org.opencv.core.Core;
    //import org.opencv.imgproc.Moments;

    import org.opencv.core.Mat;
    import org.opencv.core.MatOfPoint;
    import org.opencv.core.MatOfPoint2f;
    import org.opencv.core.MatOfPoint2f;
    import org.opencv.core.CvType;
    import org.opencv.core.RotatedRect;

    import java.awt.Rectangle;

    import org.opencv.core.Point;
    import org.opencv.core.Size;

    import org.opencv.core.Scalar;

    import processing.video.*;
    Movie tattooImg;
    //ArrayList<Contour> contours;
    //ArrayList<MatOfPoint> contours;
    //ArrayList<MatOfPoint2f> approximations;
    //ArrayList<MatOfPoint2f> markers;

    //ArrayList<PVector> hierarchyVectors;

    PImage src, dst;
    Mat hierarchy;

    //ArrayList<Contour> polygons;
    //ArrayList<Moments> mu;

    MatOfPoint largestContoursMat; 

    //ArrayList<Contour> contours;
    //ArrayList<Contour> polygons;

    OpenCV opencv;

    Mat workMat;

    double largest_area = 0.0;

    Capture video;

    PImage maskImg; 

    RotatedRect rRect;


    void setup() {

      size(320, 240, P2D);

      video = new Capture(this, "name=Microsoft® LifeCam HD-3000,size=640x480,fps=30");
      video.start();  

      opencv = new OpenCV( this, video.width, video.height); 
      opencv.useColor();  

      maskImg = createImage(opencv.width, opencv.height, RGB);

      tattooImg = new Movie(this, "Tattoo 2.mov");
      //tattooImg.loop();
      //tattooImg.speed(1.5);
    }

    void draw() {
      //image(tattooImg, mouseX, mouseY);

      if (video.available()) {
        video.read();
        //markerDetector.processFrame(video, true);
        opencv.loadImage(video);

        // call process function
        processWithOpenCV();
      }

      image( opencv.getOutput(), 0, 0 );
      // image( maskImg,320,0);

      // draw some things on top of the image
      // only when we have found the largestContour. 
      // and when the area size is above a certain threshold
      if (largestContoursMat != null && largest_area > 2500.0) {

        if (animationHasBeenStarted == false) 
        { 
          tattooImg.play();
          tattooImg.speed(0.2);

          animationHasBeenStarted = true;
        }


        //strokeWeight(2);
        //stroke(255,0,0);
        //noFill();

        noStroke();

        beginShape();
        //textureMode(NORMAL);
        texture(tattooImg);

        Point[] vertices = new Point[4];  
        rRect.points(vertices); 
        //vertices[4] = vertices[0];

        //Point[] points = largestContoursMat.toArray();
        //Point[] points = contoursMat.get();

        //for (int j = 0; j < vertices.length; j++) {
        //  vertex((float)vertices[j].x, (float)vertices[j].y);
        //}

        vertex((float)vertices[0].x, (float)vertices[0].y, 0, 0);
        vertex((float)vertices[1].x, (float)vertices[1].y, tattooImg.width, 0);
        vertex((float)vertices[2].x, (float)vertices[2].y, tattooImg.width, tattooImg.height);
        vertex((float)vertices[3].x, (float)vertices[3].y, 0, tattooImg.height);

        endShape();

        float blob_angle_deg = (float) rRect.angle;
        if (rRect.size.width < rRect.size.height) {
          blob_angle_deg = 90 + blob_angle_deg;
        }

        //text(blob_angle_deg, 10,10);

        noFill();
        //strokeWeight(2);
        //stroke(0,0,255);

        //beginShape();

        //MatOfPoint c = contoursMat.get(largest_contour_index);
        //Point[] points = largestContoursMat.toArray();
        //Point[] points = contoursMat.get();

        //for (int j = 0; j < points.length; j++) {
        //  vertex((float)points[j].x, (float)points[j].y);
        // }
        // endShape();


        //pushMatrix();
        //  rotate(radians(blob_angle_deg));
        //  translate((float)vertices[0].x, (float)vertices[0].y);
        //  scale( (float) (rRect.size.width/tattooImg.width), (float)(rRect.size.height/tattooImg.height));
        //  image(tattooImg,0,0);
        //popMatrix();
      } else {
        animationHasBeenStarted = false;
      }
    }
    void movieEvent(Movie m) {
      m.read();
    }

    void processWithOpenCV() {

      // create the matrix in the size of the input image 
      // can this be done faster?
      Mat workMat  = OpenCV.imitate(opencv.getColor());

      // here we put the video image in the matrix. 
      OpenCV.toCv(video, workMat);
      // switch colors
      OpenCV.ARGBtoBGRA(workMat, workMat);

      // convert to YCrCb
      Imgproc.cvtColor(workMat, workMat, Imgproc.COLOR_BGR2YCrCb);

      // check skin range
      Core.inRange(workMat, new Scalar(0, 133, 77), new Scalar(255, 173, 127), workMat);

      // eliminate noise with erode and dilate
      // http://www.tutorialspoint.com/java_dip/eroding_dilating.htm
      int erosion_size = 4;
      int dilation_size = 4;

      Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new  Size(2*erosion_size + 1, 2*erosion_size+1));
      Imgproc.erode(workMat, workMat, element);

      Mat element1 = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new  Size(2*dilation_size + 1, 2*dilation_size+1));
      Imgproc.dilate(workMat, workMat, element1);

      // blur it a bit
      Imgproc.GaussianBlur(workMat, workMat, new Size(5, 5), 0);

      maskImg = opencv.getSnapshot(workMat);

      //put the matrix in our opencv object, just for display
      //opencv.setGray(workMat);

      Mat hierarchyMat = new Mat();
      ArrayList<MatOfPoint> contoursMat = new ArrayList<MatOfPoint>(); 

      Imgproc.findContours(workMat, contoursMat, hierarchyMat, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); 

      // reset the global largest_area
      largest_area = 0.0; 
      int    largest_contour_index = 0;

      if (contoursMat.size() > 0) {

        for ( int i = 0; i< contoursMat.size(); i++) {   

          MatOfPoint c = contoursMat.get(i);

          double a = Imgproc.contourArea(c); //,false);  //  Find the area of contour

          if (a > largest_area) {
            largest_area = a;
            largest_contour_index = i;                //Store the index of largest contour
          }
        }

        //println(largest_area);

        //Convert contours(i) from MatOfPoint to MatOfPoint2f
        MatOfPoint2f contourMMOP2f = new MatOfPoint2f();

        // get the largest Contour and get the RotatedRect from it. 
        largestContoursMat = contoursMat.get(largest_contour_index); 
        contoursMat.get(largest_contour_index).convertTo(contourMMOP2f, CvType.CV_32FC2);

        rRect = Imgproc.minAreaRect(contourMMOP2f);
      }
    }
Tagged:

Answers

  • Hey

    When using the "GettingStartedCapture" example, does your video init line work?

    That would be the first thing I would test. I would also suggest to use the other constructor calls. Let me explain. If this is when you get from the list of available capture devices:

      //[0] "name=Integrated Camera,size=640x480,fps=15"
      //[1] "name=Integrated Camera,size=640x480,fps=30"
      //[2] "name=Integrated Camera,size=320x180,fps=15"
    

    Then call

    video = new Capture(this, cameras[1]);

    I hope this helps,

    Kf

  • edited June 2016

    Thank you for looking at it!

    Yes, that works. I also tried video = new Capture(this, cameras[1]); or with any other number, it works in the "GettingStartedCapture" example, but not in my own code and I still get the same error.

  • Answer ✓

    Found the solution - you have to call the width and height first before you call the camera. So Capture(parent, requestWidth, requestHeight, cameraName) instead of
    video = new Capture(this, cameraName);

    In this case I used video = new Capture(this, 320, 240, cameras[0]);

Sign In or Register to comment.