Tracking-Implementation // SimpleOpenNI Library
in
Contributed Library Questions
•
9 months ago
Hello @all,
hope you've had all great holidays. I would like to steer a videosequence (playback in dependency of the position) tracking myself via XBOX kinect. Currently the code works with mouseX. How it is possible to replace mouseX information with my tracking information?
Thanks in advance for your feedback.
Best regards
buc
Below the code:
- import SimpleOpenNI.*;
- import org.json.*;
- import processing.video.*;
- SimpleOpenNI kinect;
- int BeamerWidth = 1024;
- int BeamerHeight = 768;
- int currentVid=0;
- boolean tracking;
- //float steering;
- //float ux;
- final static byte videosNum = 2;
- final static Movie[] allVideos = new Movie[videosNum];
- void setup() {
- size(BeamerWidth, BeamerHeight);
- smooth();
- background(0);
- for (int i=0; i<videosNum; i++)
- allVideos[i] = new Movie (this, "probesequenz0"+(i+1)+".ogg");
- kinect = new SimpleOpenNI (this);
- kinect.enableDepth();
- kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
- }
- void draw() {
- // A new time position is calculated using the current mouse location:
- float f = constrain((float)mouseX / width, 0, 1);
- float destination = allVideos[currentVid].duration() * f;
- float current = allVideos[currentVid].time();
- println("current: " + current + ", destination: " + destination);
- if (current < destination)
- allVideos[currentVid].speed(1);
- else
- allVideos[currentVid].speed(-1);
- allVideos[currentVid].play();
- println("speed: " + 15.0*abs(destination-current)/abs(destination+current)+1);
- image(allVideos[currentVid], 0, 0, width, height);
- rect(0, 0, allVideos[currentVid].time()/allVideos[currentVid].duration()*width, 10);
- frame.setTitle("scratch (" + round(frameRate) + " fps)");
- if (keyPressed) {
- if (key == '1') {
- currentVid=1;
- }
- else {
- currentVid=0;
- }
- }
- }
- float steer ()
- {
- kinect.update();
- tracking = false;
- for (int i = 1; i<6; i++)
- {
- if ( kinect.isTrackingSkeleton(i) )
- {
- tracking = true;
- PVector jointNeck = new PVector();
- kinect.getJointPositionSkeleton(i,SimpleOpenNI.SKEL_NECK, jointNeck);
- //if (jointLeftHand.z > jointRightHand.z) return (-jointRightHand.x);
- //return (-jointLeftHand.x);
- }
- }
- //return(0.0);
- }
- void onNewUser (int userID) {
- println ("start pose detection");
- kinect.requestCalibrationSkeleton(userID,true);
- }
- void onEndCalibration (int userID, boolean successful) {
- if (successful) {
- println(" User Calibrated!!!");
- kinect.startTrackingSkeleton(userID);
- }
- else {
- println(" Failed to calibrate user !!!");
- kinect.startPoseDetection("Psi", userID);
- }
- }
- void onStartPose(String pose, int userID) {
- println("Started pose for user");
- kinect.stopPoseDetection(userID);
- kinect.requestCalibrationSkeleton(userID, true);
- }
- void movieEvent(Movie allVideos) {
- allVideos.read();
- }
1