trouble with making the minim sounds only play one time.

Using the SimpleOpenNI hands examples I want to make a project called: Body Piano. Basically the users can play sound when reach their hands to certain areas.

What I got so far is that the sound will keep playing when the position of hands is on a certain area. I want it only play one time if the hands' positions are not changing.

Could someone point me in the right direction? Thanks a lot!!

/* --------------------------------------------------------------------------
 * SimpleOpenNI Hands3d Test
 * --------------------------------------------------------------------------
 * Processing Wrapper for the OpenNI/Kinect 2 library
 * http://code.google.com/p/simple-openni
 * --------------------------------------------------------------------------
 * prog:  Max Rheiner / Interaction Design / Zhdk / http://iad.zhdk.ch/
 * date:  12/12/2012 (m/d/y)
 * ----------------------------------------------------------------------------
 * This demos shows how to use the gesture/hand generator.
 * It's not the most reliable yet, a two hands example will follow
 * ----------------------------------------------------------------------------
 */

import java.util.Map;
import java.util.Iterator;

import SimpleOpenNI.*;
import ddf.minim.*;

AudioPlayer s1, s2, s3, s4;

Minim minim;
//PImage s1;
//PImage s2;
PImage o1;

SimpleOpenNI context;
int handVecListSize = 20;
Map<Integer, ArrayList<PVector>>  handPathList = new HashMap<Integer, ArrayList<PVector>>();
color[]       userClr = new color[] { 
  color(255, 0, 0), 
  color(0, 255, 0), 
  color(0, 0, 255), 
  color(255, 255, 0), 
  color(255, 0, 255), 
  color(0, 255, 255)
};
void setup()
{
  //  frameRate(200);
  size(640, 480);
  minim = new Minim(this);
  s1 = minim.loadFile("High.mp3");
  s2 = minim.loadFile("Mid.mp3");
  s3 = minim.loadFile("Low.mp3");
  //  s4 = minim.loadFile("o1.mp3");

  context = new SimpleOpenNI(this);
  if (context.isInit() == false)
  {
    println("Can't init SimpleOpenNI, maybe the camera is not connected!"); 
    exit();
    return;
  }   

  // enable depthMap generation 
  context.enableDepth();

  // disable mirror
  context.setMirror(true);

  // enable hands + gesture generation
  //context.enableGesture();
  context.enableHand();
  context.startGesture(SimpleOpenNI.GESTURE_HAND_RAISE);

  // set how smooth the hand capturing should be
  //context.setSmoothingHands(.5);
}

void draw()
{

  //  s1 = loadImage("s1.png");
  //  s2 = loadImage("s2.png");
  o1 = loadImage("o1.jpg");
  background(01);
  // update the cam
  context.update();

  //  image(o1, 0, 0);
  imageMode(CENTER);
  // draw the tracked hands
  if (handPathList.size() > 0)  
  {    
    Iterator itr = handPathList.entrySet().iterator();     
    while (itr.hasNext ())
    {
      Map.Entry mapEntry = (Map.Entry)itr.next(); 
      int handId =  (Integer)mapEntry.getKey();
      ArrayList<PVector> vecList = (ArrayList<PVector>)mapEntry.getValue();
      PVector p;
      PVector p2d = new PVector();

      stroke(userClr[ (handId - 1) % userClr.length ]);
      noFill(); 
      strokeWeight(20);        
      Iterator itrVec = vecList.iterator(); 
      beginShape();
      while ( itrVec.hasNext () ) 
      { 
        p = (PVector) itrVec.next(); 


        context.convertRealWorldToProjective(p, p2d);
        vertex(p2d.x, p2d.y);
      }
      endShape();   
 pushStyle();


        if (s1.isPlaying() == true) {
          //do nothing
        }
        else {
          if (   p2d.x > 0 && p2d.x < 320 && p2d.y > 240 && p2d.y < 480) {
            s1.play(1);

            s1.rewind();
          }
        }


        if (s2.isPlaying() == true) {
          //do nothing
        }
        else {
          if ( p2d.x > 0 && p2d.x < 640 && p2d.y > 0 && p2d.y < 240) {
            s2.play(1);

            s2.rewind();
          }
        }


        if (s3.isPlaying() == true) {
          //do nothing
        }
        else {
          if ( p2d.x > 320 && p2d.x < 640 && p2d.y > 240 && p2d.y < 480) {
            s3.play(1);

            s3.rewind();
          }
        }
        popStyle();

      stroke(userClr[ (handId - 1) % userClr.length ]);
      strokeWeight(4);
      p = vecList.get(0);
      context.convertRealWorldToProjective(p, p2d);
    }
  }
}


// -----------------------------------------------------------------
// hand events

void onNewHand(SimpleOpenNI curContext, int handId, PVector pos)
{
  println("onNewHand - handId: " + handId + ", pos: " + pos);

  ArrayList<PVector> vecList = new ArrayList<PVector>();
  vecList.add(pos);

  handPathList.put(handId, vecList);
}

void onTrackedHand(SimpleOpenNI curContext, int handId, PVector pos)
{
  //println("onTrackedHand - handId: " + handId + ", pos: " + pos );

  ArrayList<PVector> vecList = handPathList.get(handId);
  if (vecList != null)
  {
    vecList.add(0, pos);
    if (vecList.size() >= handVecListSize)
      // remove the last point 
      vecList.remove(vecList.size()-1);
  }
}

void onLostHand(SimpleOpenNI curContext, int handId)
{
  println("onLostHand - handId: " + handId);
  handPathList.remove(handId);
}

// -----------------------------------------------------------------
// gesture events

void onCompletedGesture(SimpleOpenNI curContext, int gestureType, PVector pos)
{
  println("onCompletedGesture - gestureType: " + gestureType + ", pos: " + pos);

  int handId = context.startTrackingHand(pos);
  println("hand stracked: " + handId);
}

// -----------------------------------------------------------------
// Keyboard event
void keyPressed()
{

  switch(key)
  {
  case ' ':
    context.setMirror(!context.mirror());
    break;
  case '1':
    context.setMirror(true);
    break;
  case '2':
    context.setMirror(false);
    break;
  }
}

Answers

  • This sounds like an interesting project! I wish I could help out but I'm new to all this myself. Why don't you try StackOverflow.com? They might be able to help out seeing that nobody here can answer your question.

  • I think the problem is just that the AudioPlayer exits the playing state when it finishes playing, so isPlaying() becomes false and your hand test code runs again, which starts the sound again. I think you will need to have a boolean value in your sketch for each key position and track whether a hand is in a key region or not, rather than counting on the AudioPlayer's isPlaying method to do that for you.

Sign In or Register to comment.