How to create lines between particles created on mousePressed and points of Cloud (kinectPv2) ?

edited July 2017 in Kinect

Hi, sorry for my English i'm French ! :)

I'm working on a project using smartphones, kinect and KinectPV2 library.

So I have two parts : the client and the server On my computer (that is my server) I can record my environment with my kinect v2 and KinectPV2 using the code"Point cloud" found here http://codigogenerativo.com/code/kinectpv2-k4w2-processing-library/

I can send with my smartphone (client) some particles to my computer too. So on my computer there are a point cloud which represents my environment recorded by the kinect and some particles generated with my phone.

I would like to create some links between all of this points and particles. For exemple when I create some particles, if they're close to the points of the cloud, a line will be drawn between them. I know it's with the dist() and I think I have to make those collisions in the loops but in my code I don't really know where and how I make those collisions. Help please if you can :)

There is my code :

//kinect//


//librairies kinect
import java.nio.*;
import KinectPV2.*;

KinectPV2 kinect;


int  vertLoc;

//transformations
float a = 0;
int zval = -50;
float scaleVal = 320;


//value to scale the depth point when accessing each individual point in the PC.
float scaleDepthPoint = 100.0;

//Distance Threashold
int maxD = 4000; // 4m
int minD = 0;  //  0m

//openGL object and shader
PGL     pgl;
PShader sh;

//VBO buffer location in the GPU
int vertexVboId;



//touchEvent receiver

// on importe la bibliothèque OscP5
import oscP5.*;
import netP5.*;

//création d'un objet OscP5 appelé 'oscP5'
OscP5 oscP5;
NetAddress myRemoteLocation;

//creation des variables qui récupèreront les positions x et y du doigt sur les écrans des smartphones
float positionX, positionY, positionX02, positionY02, positionX03, positionY03;

// creation des particles
import java.util.Iterator;
ArrayList<Part> parts;
float w = 600;
float h = 900;
float g = 1.8;
float noiseoff=0;
//GifMaker gifExport;
color bgc = #495455;
boolean record=false;




public void setup() {
  size(1024, 848, P3D);

  kinect = new KinectPV2(this);

  kinect.enableDepthImg(true);

  kinect.enablePointCloud(true);

  kinect.setLowThresholdPC(minD);
  kinect.setHighThresholdPC(maxD);

  kinect.init();

  sh = loadShader("frag.glsl", "vert.glsl");

  PGL pgl = beginPGL();

  IntBuffer intBuffer = IntBuffer.allocate(1);
  pgl.genBuffers(1, intBuffer);

  //memory location of the VBO
  vertexVboId = intBuffer.get(0);

  endPGL();

  //touchEvent receiver
  //paramétrage de l'objet oscP5 qui recevra les données 
  oscP5 = new OscP5(this, 12000);
  myRemoteLocation = new NetAddress("192.168.0.101", 32000);
  //au départ, on place notre cercle au centre
  positionX = width/2;
  positionY = height/2;
  //on desactive les dessin des contours
  noStroke();

  // creation des particules
  smooth();
  background(bgc);
  frameRate(25);
  parts = new ArrayList();
}

public void draw() {
  background(0);

  //draw the depth capture images
  //image(kinect.getDepthImage(), 0, 0, 320, 240);
  //image(kinect.getPointCloudDepthImage(), 320, 0, 320, 240);


  // creation des particles : si record = true alors on lance la fonction création des particules
  if (record) {
    paint();
    paint02();
    paint03();
  }

  updateParticles();
  fill(255);


  //translate the scene to the center
  translate(width / 2, height / 2, zval);
  scale(scaleVal, -1 * scaleVal, scaleVal);
  rotate(a, 0.0f, 1.0f, 0.0f);

  // Threahold of the point Cloud.
  kinect.setLowThresholdPC(minD);
  kinect.setHighThresholdPC(maxD);

  //get the points in 3d space
  FloatBuffer pointCloudBuffer = kinect.getPointCloudDepthPos();

  // obtain XYZ the values of the point cloud

  stroke(0, 0, 0);
  for (int i = 0; i < kinect.WIDTHDepth * kinect.HEIGHTDepth; i+=3) {
    float x = pointCloudBuffer.get(i*3 + 0) * scaleDepthPoint;
    float y = pointCloudBuffer.get(i*3 + 1) * scaleDepthPoint;
    float z = pointCloudBuffer.get(i*3 + 2) * scaleDepthPoint;
  }


  //begin openGL calls and bind the shader
  pgl = beginPGL();
  sh.bind();

  //obtain the vertex location in the shaders.
  //useful to know what shader to use when drawing the vertex positions
  vertLoc = pgl.getAttribLocation(sh.glProgram, "vertex");

  pgl.enableVertexAttribArray(vertLoc);

  //data size times 3 for each XYZ coordinate
  int vertData = kinect.WIDTHDepth * kinect.HEIGHTDepth * 3;

  //bind vertex positions to the VBO
  {
    pgl.bindBuffer(PGL.ARRAY_BUFFER, vertexVboId);
    // fill VBO with data
    pgl.bufferData(PGL.ARRAY_BUFFER, Float.BYTES * vertData, pointCloudBuffer, PGL.DYNAMIC_DRAW);
    // associate currently bound VBO with shader attribute
    pgl.vertexAttribPointer(vertLoc, 3, PGL.FLOAT, false, Float.BYTES * 3, 0 );
  }

  // unbind VBOs
  pgl.bindBuffer(PGL.ARRAY_BUFFER, 0);

  //draw the point buffer as a set of POINTS
  pgl.drawArrays(PGL.POINTS, 0, vertData);

  //disable the vertex positions
  pgl.disableVertexAttribArray(vertLoc);

  //finish drawing
  sh.unbind();
  endPGL();



  stroke(255, 0, 0);
  text(frameRate, 50, height - 50);
}


//touchEvent receiver
// fonction creation des particles pour le smartphone 01
void paint() {
  float tx = positionX;
  float ty = positionY;
  color c1 = #FFFFFF;
  color c2 = #FFFFFF;

  float x = random(w);
  float y = random(h);
  float t=15+random(20);

  color c = lerpColor(c1, c2, random(1));
  Part p = new Part(tx, ty, random(5)+1, c);  
  p.velocity.x=0;
  p.velocity.y=0;
  p.acceleration.x=random(1)-.5;
  p.acceleration.y=random(1)-.5;
  p.life=1;
  parts.add(p);
}

// fonction creation des particles pour le smartphone 02
void paint02() {
  float tx = positionX02;
  float ty = positionY02;
  color c1 = #FFFFFF;
  color c2 = #FFFFFF;

  float x = random(w);
  float y = random(h);
  float t=15+random(20);

  color c = lerpColor(c1, c2, random(1));
  Part p = new Part(tx, ty, random(5)+1, c);  
  p.velocity.x=0;
  p.velocity.y=0;
  p.acceleration.x=random(1)-.5;
  p.acceleration.y=random(1)-.5;
  p.life=1;
  parts.add(p);
}

// fonction creation des particles pour le smartphone 03
void paint03() {
  float tx = positionX03;
  float ty = positionY03;
  color c1 = #FFFFFF;
  color c2 = #FFFFFF;

  float x = random(w);
  float y = random(h);
  float t=15+random(20);

  color c = lerpColor(c1, c2, random(1));
  Part p = new Part(tx, ty, random(5)+1, c);  
  p.velocity.x=0;
  p.velocity.y=0;
  p.acceleration.x=random(1)-.5;
  p.acceleration.y=random(1)-.5;
  p.life=1;
  parts.add(p);
}


void updateParticles() {
  if (parts.size()>=0) {
    for (int i = parts.size()-1; i >= 0; i--) {
      Part p = (Part) parts.get(i);

      p.update();
      //p.render();
      if (p.life<0) {
        parts.remove(p);
      }
    }
    for (Part p : parts) {

      // p.update();
      p.render();
    }
  }
}

//class pour les particules

class Part {
  float life = 1;
  float maxspeed=10;
  // float g=1.8;
  PVector position = new PVector(0, 0);
  PVector velocity = new PVector(0, 0);
  PVector acceleration = new PVector(0, 0);
  float size = 10;
  color c;
  float min_d = 90;
  Part nei = null;
  Part(float x, float y, float size, color c) {
    position.x=x;
    position.y=y;
    this.size = size;
    this.c=c;
  }
  void update() {
    life-=.01;

    // size=random(5);
    if (position.x>w) {
      position.x=0;
    } else if (position.x<0) {
      position.x=w;
    }
    if (position.y>h) {
      position.y=0;
    } else if (position.y<0) {
      position.y=w;
    }  

    //collision entre particules et génération des lignes entre elles
    for (Part p : parts) {
      if (p!=this) {       
        float d = PVector.dist(p.position, position);
        if (d<min_d) {
          acceleration = PVector.sub(p.position, position);
          acceleration.normalize();
          acceleration.mult(.1);

          pushMatrix();
          translate(position.x, position.y);
          //stroke(c, 110-d/min_d*100);
          stroke(255, 1+life*155);
          //strokeWeight(d/min_d*2);
          //    line(-velocity.x, -velocity.y, 0, 0);
          line(p.position.x-position.x, p.position.y-position.y, 0, 0);
          popMatrix();
        }
      }
    }
    velocity.add(acceleration);
    velocity.limit(3);

    position.add(velocity);
  }
  void render() {   
    pushMatrix();
    translate(position.x, position.y);


    //stroke(c);
    //    line(-velocity.x, -velocity.y, 0, 0);
    //line(nei.position.x-position.x, nei.position.y-position.y, 0, 0);
    noStroke();

    fill(c, 3+life*255);
    //fill(c);
    //rectMode(CENTER);
    //rect(0, 0, size+6, size+6);
    ellipseMode(CENTER);
    // taille particules
    ellipse(0, 0, size+1, size+1);

    popMatrix();
  }
  //
}



// methode oscevent permettant d'ecouter les evenements OSC 
void oscEvent(OscMessage theOscMessage) {
  // si l'applet reçoit un messag OSC avec l'address pattern "/positionsCurseur"
  if (theOscMessage.checkAddrPattern("/positionsCurseur")==true) {
    //on assigne les valeurs de l'index 0, de type integer (.intValue)  du message OSC 
    //à la variable positionX que l'on assignera à la coordonnée x de notre cercle
    positionX = theOscMessage.get(0).intValue();
    //on assigne les valeurs de l'index 1, de type integer (.intValue)  du message OSC 
    //à la variable positionY que l'on assignera à la coordonnée y de notre cercle
    positionY = theOscMessage.get(1).intValue();    
    record=true;
  } else if (theOscMessage.checkAddrPattern("/positionsCurseur02")==true) {
    //on assigne les valeurs de l'index 0, de type integer (.intValue)  du message OSC 
    //à la variable positionX que l'on assignera à la coordonnée x de notre cercle
    positionX02 = theOscMessage.get(0).intValue();
    //on assigne les valeurs de l'index 1, de type integer (.intValue)  du message OSC 
    //à la variable positionY que l'on assignera à la coordonnée y de notre cercle
    positionY02 = theOscMessage.get(1).intValue();    
    record=true;
  } else if (theOscMessage.checkAddrPattern("/positionsCurseur03")==true) {
    //on assigne les valeurs de l'index 0, de type integer (.intValue)  du message OSC 
    //à la variable positionX que l'on assignera à la coordonnée x de notre cercle
    positionX03 = theOscMessage.get(0).intValue();
    //on assigne les valeurs de l'index 1, de type integer (.intValue)  du message OSC 
    //à la variable positionY que l'on assignera à la coordonnée y de notre cercle
    positionY03 = theOscMessage.get(1).intValue();    
    record=true;
  }
}




/*

 public void mousePressed() {
 // saveFrame();
 }


 public void keyPressed() {
 if (key == 'a') {
 zval +=10;
 println("Z Value "+zval);
 }
 if (key == 's') {
 zval -= 10;
 println("Z Value "+zval);
 }

 if (key == 'z') {
 scaleVal += 0.1;
 println("Scale scene: "+scaleVal);
 }
 if (key == 'x') {
 scaleVal -= 0.1;
 println("Scale scene: "+scaleVal);
 }

 if (key == 'q') {
 a += 0.1;
 println("rotate scene: "+ a);
 }
 if (key == 'w') {
 a -= 0.1;
 println("rotate scene: "+a);
 }

 if (key == '1') {
 minD += 10;
 println("Change min: "+minD);
 }

 if (key == '2') {
 minD -= 10;
 println("Change min: "+minD);
 }

 if (key == '3') {
 maxD += 10;
 println("Change max: "+maxD);
 }

 if (key == '4') {
 maxD -= 10;
 println("Change max: "+maxD);
 }

 if(key == 'c'){
 scaleDepthPoint += 1;
 println("Change Scale Depth Point: "+scaleDepthPoint);
 }

 if(key == 'v'){
 scaleDepthPoint -= 1;
 println("Change Scale Depth Point: "+scaleDepthPoint);
 }

 }

 */


/*
Simple class that manager saving each FloatBuffer and writes the data into a OBJ file
 */
class FrameBuffer {

  FloatBuffer frame;

  //id of the frame
  int frameId;

  FrameBuffer(FloatBuffer f) {
    frame = clone(f);
  }

  void setFrameId(int fId) {
    frameId = fId;
  }

  /*
  Writing of the obj file,
   */
  void saveOBJFrame() {
    int vertData = 1024 * 848;
    String[] points = new String[vertData];

    //Iterate through all the XYZ points
    for (int i = 0; i < vertData; i++) {
      float x =  frame.get(i*3 + 0);
      float y =  frame.get(i*3 + 1);
      float z =  frame.get(i*3 + 3);
      points[i] = "v "+x+" "+y+" "+z;
    }

    saveStrings("data/frame0"+frameId+".obj", points);
    println("Done Saving Frame "+frameId);
  }

  //Simple function that copys the FloatBuffer to another FloatBuffer
  public  FloatBuffer clone(FloatBuffer original) {
    FloatBuffer clone = FloatBuffer.allocate(original.capacity());
    original.rewind();//copy from the beginning
    clone.put(original);
    original.rewind();
    clone.flip();
    return clone;
  }
}

//camera information based on the Kinect v2 hardware
static class CameraParams {
  static float cx = 254.878f;
  static float cy = 205.395f;
  static float fx = 365.456f;
  static float fy = 365.456f;
  static float k1 = 0.0905474;
  static float k2 = -0.26819;
  static float k3 = 0.0950862;
  static float p1 = 0.0;
  static float p2 = 0.0;
}

//calculte the xyz camera position based on the depth data
PVector depthToPointCloudPos(int x, int y, float depthValue) {
  PVector point = new PVector();
  point.z = (depthValue);// / (1.0f); // Convert from mm to meters
  point.x = (x - CameraParams.cx) * point.z / CameraParams.fx;
  point.y = (y - CameraParams.cy) * point.z / CameraParams.fy;
  return point;
}

If someone know how to create colision and create lines between my particles (created with the paint02() and Class Part) and the points of the cloud I will be thanksfull ! :)

Tagged:
Sign In or Register to comment.