Getting "XN_STREAM_PROP..." errors in a new Record/Play sketch for Kinect

edited February 2014 in Kinect

Hello, for the moment I have a Kinect that works and I and a friend are getting started building some sketches to get at what I want to capture. Here is the situation: I want to capture the gestures of hands in spontaneous movement, like during a conversation. Eventually I want to get a detailed depth map over time recorded, and we have not found something that does that yet. we started by trying to combine Hands3d and record/play to do this. We get it to record with the hand recognition, but in the playback we get 2 errors and no hand recognition.

We are getting the following error when I run playback:

SimpleOpenNI Version 1.96 After initialization: record: /Users/.........../Processing/first_hand_sketch/data/recording.oni Couldn't getXN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE Couldn't get XN_STREAM_PROPERTY_MAX_SHIFT

I can't find anything on these errors, and it seems like we are missing some NODE_

My friend knows Java, but is new to Processing, I am still new to Processing, so I can't see much. Is there an easy fix, or are we trying to reinvent the wheel and there is already a sketch that does this?!

So far the code looks like this:

/* -------------------------------------------------- * A simple sketch to get started recording hands and depth built with segments of: * * *SimpleOpenNI Hands3d Test * prog: Max Rheiner / Interaction Design / Zhdk / http://iad.zhdk.ch/ * date: 12/12/2012 (m/d/y) * SimpleOpenNI Record/Play Test * prog: Max Rheiner / Interaction Design / Zhdk / http://iad.zhdk.ch/ * date: 12/12/2012 (m/d/y) * -------------------------------------------- * Processing Wrapper for the OpenNI/Kinect 2 library * http://code.google.com/p/simple-openni * ----------------------------------------- */

import java.util.Map;
import java.util.Iterator;

import SimpleOpenNI.*;

SimpleOpenNI context;
float        zoomF =0.5f;
float        rotX = radians(180);  // by default rotate the hole scene 180deg around the x-axis, 
                                   // the data from openni comes upside down
float        rotY = radians(0);
int          handVecListSize = 30;
Map<Integer,ArrayList<PVector>>  handPathList = new HashMap<Integer,ArrayList<PVector>>();
color[]       userClr = new color[]{ color(255,0,0),
                                     color(0,255,0),
                                     color(0,0,255),
                                     color(255,255,0),
                                     color(255,0,255),
                                     color(0,255,255)
                                   };
String       recordedPath = "recording.oni";
String       recordingPath = "recording.oni";
boolean      fromRecording = true;
boolean      record = false;

void setup()
{
  size(1024,768,OPENGL);

  if (fromRecording) {
    context = new SimpleOpenNI(this, recordedPath);
    context.setPlaybackSpeedPlayer(1.0f);
  }
  else {
    context = new SimpleOpenNI(this);
  }
  if(context.isInit() == false)
  {
     println("Can't init SimpleOpenNI, maybe the camera is not connected!"); 
     exit();
     return;  
  }

  // disable mirror
  context.setMirror(false);

  // enable depthMap generation 
  context.enableDepth();

  // enable hands + gesture generation
  context.enableHand();
  context.startGesture(SimpleOpenNI.GESTURE_WAVE);

  if (!fromRecording && record) {
    // setup the recording 
    context.enableRecorder(recordingPath);

    // select the recording channels
    context.addNodeToRecording(SimpleOpenNI.NODE_DEPTH,true);
    context.addNodeToRecording(SimpleOpenNI.NODE_SCENE,true);
    context.addNodeToRecording(SimpleOpenNI.NODE_PLAYER,true);
    //context.addNodeToRecording(SimpleOpenNI.NODE_IMAGE,true);
  }

  // set how smooth the hand capturing should be
  //context.setSmoothingHands(.5);

  stroke(255,255,255);
  smooth();

  perspective(radians(45),
              float(width)/float(height),
              10.0f,150000.0f);
 }

void draw()
{
  if (fromRecording) context.setPlaybackSpeedPlayer(1.0f);

  // update the cam
  context.update();

  background(0,0,0);

  // set the scene pos
  translate(width/2, height/2, 0);
  rotateX(rotX);
  rotateY(rotY);
  scale(zoomF);

  // draw the 3d point depth map
  int[]   depthMap = context.depthMap();
  int     steps   = 10;  // to speed up the drawing, draw every third point
  int     index;
  PVector realWorldPoint;

  translate(0,0,-1000);  // set the rotation center of the scene 1000 infront of the camera

  // draw point cloud
  stroke(200); 
  beginShape(POINTS);
  for(int y=0;y < context.depthHeight();y+=steps)
  {
    for(int x=0;x < context.depthWidth();x+=steps)
    {
      index = x + y * context.depthWidth();
      if(depthMap[index] > 0)
      { 
        // draw the projected point
        realWorldPoint = context.depthMapRealWorld()[index];
        vertex(realWorldPoint.x,realWorldPoint.y,realWorldPoint.z); 
      }
    } 
  } 
  endShape();

  // draw the tracked hands
  if(handPathList.size() > 0)  
  {    
    Iterator itr = handPathList.entrySet().iterator();     
    while(itr.hasNext())
    {
      Map.Entry mapEntry = (Map.Entry)itr.next(); 
      int handId =  (Integer)mapEntry.getKey();
      ArrayList<PVector> vecList = (ArrayList<PVector>)mapEntry.getValue();
      PVector p;

      pushStyle();
        stroke(userClr[ (handId - 1) % userClr.length ]);
        noFill();           
        Iterator itrVec = vecList.iterator(); 
        beginShape();
          while( itrVec.hasNext() ) 
          { 
            p = (PVector) itrVec.next(); 
            vertex(p.x,p.y,p.z);
          }
        endShape();   

        stroke(userClr[ (handId - 1) % userClr.length ]);
        strokeWeight(4);
        p = vecList.get(0);
        point(p.x,p.y,p.z);
      popStyle(); 
    }        
  }

  // draw the kinect cam
  context.drawCamFrustum();
}


// -----------------------------------------------------------------
// hand events

void onNewHand(SimpleOpenNI curContext,int handId,PVector pos)
{
  println("onNewHand - handId: " + handId + ", pos: " + pos);

  ArrayList<PVector> vecList = new ArrayList<PVector>();
  vecList.add(pos);

  handPathList.put(handId,vecList);
}

void onTrackedHand(SimpleOpenNI curContext,int handId,PVector pos)
{
  //println("onTrackedHand - handId: " + handId + ", pos: " + pos );

  ArrayList<PVector> vecList = handPathList.get(handId);
  if(vecList != null)
  {
    vecList.add(0,pos);
    if(vecList.size() >= handVecListSize)
      // remove the last point 
      vecList.remove(vecList.size()-1); 
  }  
}

void onLostHand(SimpleOpenNI curContext,int handId)
{
  println("onLostHand - handId: " + handId);

  handPathList.remove(handId);
}

// -----------------------------------------------------------------
// gesture events

void onCompletedGesture(SimpleOpenNI curContext,int gestureType, PVector pos)
{
  println("onCompletedGesture - gestureType: " + gestureType + ", pos: " + pos);

  context.startTrackingHand(pos);

  int handId = context.startTrackingHand(pos);
  println("hand stracked: " + handId);
}

// -----------------------------------------------------------------
// Keyboard event
void keyPressed()
{
  switch(key)
  {
  case ' ':
    context.setMirror(!context.mirror());
    break;
  }

  switch(keyCode)
  {
    case LEFT:
      rotY += 0.1f;
      break;
    case RIGHT:
      rotY -= 0.1f;
      break;
    case UP:
      if(keyEvent.isShiftDown())
        zoomF += 0.01f;
      else
        rotX += 0.1f;
      break;
    case DOWN:
      if(keyEvent.isShiftDown())
      {
        zoomF -= 0.01f;
        if(zoomF < 0.01)
          zoomF = 0.01;
      }
      else
        rotX -= 0.1f;
      break;
  }
}

Answers

  • I had similar problems while recording userMap and skeleton data and also received Couldn't getXN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE. It might be a bug in SimpleOpenNI 1.96 recorder.
    I managed to record a file using an old version of SimpleOpenNI and reuse it in 1.96 and postet more details here.

Sign In or Register to comment.