[SOLVED] Attractor (x, y) controlled by hand position. SimpleOpenNI, Kinect.

edited March 2015 in Kinect

Hi, I am looking to use the the (x,y) position of my hand to control the (x, y) position of an attractor in my sketch, so wherever the user moves their hand, the on screen particles are repelled towards/away.

I have not been coding for very long and am unsure of how to do this. I thought I had it a couple times but nothing seems to be working.

For a better idea of how i want the code to work, change "myAttractor.x = mapHandVec.x;" to "myAttractor.x = MouseX;" same with "myAttractor.y = MouseY;".

Both libraries used are available for download from within the processing library menu.

Any help/replies would be super appreciated!

Thanks in advance,

Ross

Windows 8 Processing 2.2.1


code page 1:

class Attractor {
  // position
  float x=0, y=0;

  // radius of impact
  float radius = 200;
  // strength: positive for attraction, negative for repulsion
  float strength = 1; 
  // parameter that influences the form of the function
  float ramp = 0.5;    //// 0.01 - 0.99


  Attractor(float theX, float theY) {
    x = theX;
    y = theY;
  }


  void attract(Node theNode) {
    // calculate distance
    float dx = x - theNode.x;
    float dy = y - theNode.y;
    float d = mag(dx, dy);

    if (d > 0 && d < radius) {
      // calculate force
      float s = pow(d / radius, 1 / ramp);
      float f = s * 9 * strength * (1 / (s + 1) + ((s - 3) / 4)) / d;

      // apply force to node velocity
      theNode.velocity.x += dx * f;
      theNode.velocity.y += dy * f;
    }
  }

}

Code Page 2:

import generativedesign.*;
import SimpleOpenNI.*;

SimpleOpenNI context;

PVector handVec = new PVector();
PVector mapHandVec = new PVector();

// initial parameters
int xCount = 70;
int yCount = 70;
float gridSize = 600;

// nodes array
Node[] myNodes = new Node[xCount*yCount];

// attractor
Attractor myAttractor;


// image output
boolean saveOneFrame = false;
boolean saveToPrint = false;

void setup() { 

  context = new SimpleOpenNI(this);
  context.setMirror(true);
  context.enableDepth();
  context.enableHand();

  context.startGesture(SimpleOpenNI.GESTURE_WAVE);

  size(640,640);

  // setup drawing parameters
  colorMode(RGB, 255, 255, 255, 100);
  smooth();
  noStroke();
  fill(0);

  cursor(CROSS);

  // setup node grid
  initGrid();

  // setup attractor
  myAttractor = new Attractor(0, 0);
  myAttractor.strength = -3;
  myAttractor.ramp = 2;
}

//end void setup

void draw() {

  context.update();
  context.convertRealWorldToProjective(handVec,mapHandVec);

 //trying to map values to attractor (very unsuccessfully)

  myAttractor.x = mapHandVec.x;
  myAttractor.y = mapHandVec.y;

  for (int i = 0; i < myNodes.length; i++) {
      myAttractor.attract(myNodes[i]);

    myNodes[i].update();

    // draw nodes

    if (saveToPrint) {
      ellipse(myNodes[i].x, myNodes[i].y, 1, 1);
      if (i%1000 == 0) {
        println("saving to pdf - step " + int(i/1000 + 1) + "/" + int(myNodes.length / 1000));
      }
    }
    else {
      rect(myNodes[i].x, myNodes[i].y, 1, 1);
    }
  }
}

  //end void draw

void initGrid() {
  int i = 0;
  for (int y = 0; y < yCount; y++) {
    for (int x = 0; x < xCount; x++) {
      float xPos = x*(gridSize/(xCount-1))+(width-gridSize)/2;
      float yPos = y*(gridSize/(yCount-1))+(height-gridSize)/2;
      myNodes[i] = new Node(xPos, yPos);
      myNodes[i].setBoundary(0, 0, width, height);
      myNodes[i].setDamping(0.8);  //// 0.0 - 1.0
      i++;
    }
  }
}

//end void initGrid


void keyPressed() {
  if (key=='r' || key=='R') {
    initGrid();
    background(230);
  }
}

void onCreateHands(int handId, PVector pos, float time)
{
  println("onCreateHands - handId: " + handId + ", pos: " + pos + ", time:" + time);
  handVec = pos;
}

void onUpdateHands(int handId, PVector pos, float time)
{
  println("onUpdateHandsCb - handId: " + handId + ", pos: " + pos + ", time:" + time);
  handVec = pos;
}
void onRecognizeGesture(String strGesture, PVector idPosition, PVector endPosition)
{
  println("onRecognizeGesture - strGesture: " + strGesture + ", idPosition: " + idPosition + ", endPosition:" + endPosition);

  context.endGesture(SimpleOpenNI.GESTURE_WAVE);
  context.startTrackingHand(endPosition);
}

Answers

  • I figured it out. 1st page of code remains unchanged, but i'll post the updated 2nd.

    //gravity
    import generativedesign.*;
    import processing.pdf.*;
    
    //kinect
    import java.util.Map;
    import java.util.Iterator;
    
    import SimpleOpenNI.*;
    
    // gravity
    int xCount = 70;
    int yCount = 70;
    float gridSize = 600; 
    
    Node[] myNodes = new Node[xCount*yCount];
    Attractor myAttractor; 
    
    // kinect
    SimpleOpenNI context;
    int handVecListSize = 20;
    Map<Integer,ArrayList<PVector>>  handPathList = new HashMap<Integer,ArrayList<PVector>>();
    color[]       userClr = new color[]{ color(255,0,0),
                                         color(0,255,0),
                                         color(0,0,255),
                                         color(255,255,0),
                                         color(255,0,255),
                                         color(0,255,255)
                                       };
    
    
     void setup() { 
      size(640,640);
      background(230);
      smooth();
      noStroke();
      fill(0);
    
      //kinect
        context = new SimpleOpenNI(this);
      if(context.isInit() == false)
      {
         println("Can't init SimpleOpenNI, maybe the camera is not connected!"); 
         exit();
         return;  
      }  
      context.enableDepth();
      context.setMirror(true);
      context.enableHand();
      context.startGesture(SimpleOpenNI.GESTURE_WAVE);
    
      //gravity
    
      initGrid();
    
      myAttractor = new Attractor(0, 0);
      myAttractor.strength = -3;
      myAttractor.ramp = 2;
    }
    
    void draw() {
      //kinect
    
      context.update();
    
      Iterator itr = handPathList.entrySet().iterator();
      while(itr.hasNext())
      {
      Map.Entry mapEntry = (Map.Entry)itr.next();
      int handId = (Integer)mapEntry.getKey();
      ArrayList<PVector> vecList = (ArrayList<PVector>)mapEntry.getValue();
      PVector p;
      PVector p2d = new PVector();
      Iterator itrVec = vecList.iterator();
      p = (PVector) itrVec.next();
      context.convertRealWorldToProjective(p,p2d);
    
      //gravity
       myAttractor.x = p2d.x;
      myAttractor.y = p2d.y;
    
       //kinect
       p = vecList.get(0);
       context.convertRealWorldToProjective(p,p2d);
       //point(p2d.x,p2d.y);
      }
        //gravity
      for (int i = 0; i < myNodes.length; i++) {
          myAttractor.attract(myNodes[i]);
         myNodes[i].update();
           rect(myNodes[i].x, myNodes[i].y, 1, 1);
    }
    }
    
    void initGrid() {
      int i = 0;
      for (int y = 0; y < yCount; y++) {
        for (int x = 0; x < xCount; x++) {
          float xPos = x*(gridSize/(xCount-1))+(width-gridSize)/2;
          float yPos = y*(gridSize/(yCount-1))+(height-gridSize)/2;
          myNodes[i] = new Node(xPos, yPos);
          myNodes[i].setBoundary(0, 0, width, height);
          myNodes[i].setDamping(0.8);  //// 0.0 - 1.0
          i++;
        }
      }
    }
    
    
    void keyPressed() {
      if (key=='r' || key=='R') {
        initGrid();
        background(230);
      }
    }
    
    void onNewHand(SimpleOpenNI curContext,int handId,PVector pos)
    {
      println("onNewHand - handId: " + handId + ", pos: " + pos);
    
      ArrayList<PVector> vecList = new ArrayList<PVector>();
      vecList.add(pos);
    
      handPathList.put(handId,vecList);
    }
    
    void onTrackedHand(SimpleOpenNI curContext,int handId,PVector pos)
    {
      //println("onTrackedHand - handId: " + handId + ", pos: " + pos );
    
      ArrayList<PVector> vecList = handPathList.get(handId);
      if(vecList != null)
      {
        vecList.add(0,pos);
            if(vecList.size() >= handVecListSize)
          // remove the last point 
          vecList.remove(vecList.size()-1); 
      }  
    }
    
    void onLostHand(SimpleOpenNI curContext,int handId)
    {
      println("onLostHand - handId: " + handId);
      handPathList.remove(handId);
    }
    
    // -----------------------------------------------------------------
    // gesture events
    
    void onCompletedGesture(SimpleOpenNI curContext,int gestureType, PVector pos)
    {
      println("onCompletedGesture - gestureType: " + gestureType + ", pos: " + pos);
    
      int handId = context.startTrackingHand(pos);
      println("hand stracked: " + handId);
    }
    
  • edited November 2014

    I'm looking to add additional code so when the sketch is running, no attraction happens until the user waves (void onCompletedGesture) and is in control of the (x,y) of the attractive force, and for the attraction to stop when the user stops interacting with the sketch (void onLostHand).

    I have specified in the first page that 'float strength = 1;' and was wondering if i could potentially redefine this to =0 initially, to =1 onCompletedGesture and to =0 onLostHand. I'm not sure if this is possible or how to do this, so any help would be great. Thank you.

  • Hello, Can you help working on kinect. Im very new in this. this is for my final year project. i want to change the graph for the data pwm that has been send from kinect to arduino. can i get your email?

Sign In or Register to comment.