How to capture web cam frames and analyse them
in
Core Library Questions
•
2 years ago
I'm trying to get a set of frames from a web cam and analyse them: take a angled slice through the set. Now I've got some capturing and drawing of a range of frames working. But it seams I'm only getting one frame captured and drawn over the whole range. Can anyone look at the capturing and specifically at the saving of the frames and see what i'm doing wrong?
- import controlP5.*;
import processing.video.*;
import toxi.geom.*; - ArrayDeque<PImage> imagesDeck;
PImage[] images = new PImage[200];
PImage slice;
PVector sliceVector;
PVector[][] sliceVectorSpace = new PVector[800][600];
ControlP5 controlP5; - Slider2D sx;
Slider2D sy;
Slider2D sz; - Capture video;
- int deckSize = 40;
- void setup() {
size(1600, 1000, P3D);
noStroke();
background(0);
frameRate(10);
imagesDeck = new ArrayDeque<PImage>(200);
controlP5 = new ControlP5(this);
sx = controlP5.addSlider2D("xaxis", 0, 0, 400, 10);
sx.setArrayValue(new float[] {50, 50}); - sy = controlP5.addSlider2D("yaxis", 0, 40, 400, 10);
sy.setArrayValue(new float[] {50, 50});
sz = controlP5.addSlider2D("zaxis", 0, 80, 400, 10);
sz.setArrayValue(new float[] {50, 50}); - slice = createImage(800, 600, RGB);
video = new Capture(this, 800, 600, 5);
} - void draw() {
- controlP5.draw();
video.read();
video.loadPixels();
addImage(video.pixels);
translate((width / 2) + -50, height / 2, 10);
rotateY(PI/1.3);
drawStream();
//drawSlice();
} - void drawSlice()
{
updateSlice();
if(slice != null)
{
beginShape();
slice.loadPixels();
texture(slice);
vertex(-400, -400, 0, 0, 0);
vertex(400, -400, 0, 800, 0);
vertex(400, 300, 0, 800, 600);
vertex(-400, 300, 0, 0, 600);
endShape();
}
} - void updateSlice()
{
loadPixels();
int imagesLength = images.length; - Plane plane = new Plane(new Vec3D(0,2,8),new Vec3D(0.5,0.9,0.1).normalize());
Ray3D ray = new Ray3D(new Vec3D(0,0,0),new Vec3D(0.3,0.6,0.8).normalize());
for(int i = 0; i < imagesLength; i++)
{
for(int x = 0; x < 800; x++)
{
for(int y = 0; y < 600; y++)
{
ray.x = x;
ray.y = y;
float dirtyDepth = plane.intersectRayDistance(ray);
int depth = round(dirtyDepth);
//println("depth:" + depth);
if(-1 < depth && depth < imagesLength)
{
//println("x:" + x + ":y:" + y + ":depth:" + depth);
//println("imagesLength:" + imagesLength);
//println("i:" + i);
loadPixels();
//println("slice:" + slice.pixels.length);
//println("slice.pixels:" + slice.pixels[0]);
if(images[i] != null)
{
//println("image");
images[i].loadPixels();
slice.set(x, y, images[i].get(x, y));
}
}
}
}
}
} - void drawStream()
{
PImage[] imageArray = imagesDeck.toArray(new PImage[0]);
int imageLength = imageArray.length; - for (int i = 0; i < imageLength; i++)
{
if(imageArray != null)
{
PImage wrkImage = imageArray[i];
wrkImage.loadPixels();
beginShape();
textureMode(IMAGE);
texture(wrkImage);
vertex(-400, -400, 0, 0, 0);
vertex(400, -400, 0, 800, 0);
vertex(400, 300, 0, 800, 600);
vertex(-400, 300, 0, 0, 600);
endShape();
} - translate(0,0,25);
}
} - void addImage(int[] wrkPixels)
{
if(imagesDeck.size() > deckSize)
{
imagesDeck.pollLast();
}
PImage wrkImg = createImage(video.width, video.height, RGB);
wrkImg.loadPixels();
wrkImg.pixels = wrkPixels;
wrkImg.updatePixels();
imagesDeck.push(wrkImg);
}
1