Play sound when someone comes within a certain distance of a kinect
in
Integration and Hardware
•
1 year ago
Hello,
Basically I am trying to get some soudns playing when a user is at varying distances away from the kinect: too close, just right and far away.
I have been trying to modify this wonderful code by Daniel Schiffman:
// Daniel Shiffman
// Tracking the average location beyond a given depth threshold
// Thanks to Dan O'Sullivan
// http://www.shiffman.net
// https://github.com/shiffman/libfreenect/tree/master/wrappers/java/processing
import org.openkinect.*;
import org.openkinect.processing.*;
import ddf.minim.*;
Minim minim;
AudioPlayer player;
// Showing how we can farm all the kinect stuff out to a separate class
KinectTracker tracker;
// Kinect Library object
Kinect kinect;
float[] depthLookUp = new float[2048];
void setup() {
size(640,520);
kinect = new Kinect(this);
tracker = new KinectTracker();
//size(512, 200, P3D);
for (int i = 0; i < depthLookUp.length; i++) {
depthLookUp[i] = rawDepthToMeters(i);
}
}
void draw() {
background(255);
// Run the tracking analysis
tracker.track();
// Show the image
tracker.display();
// Let's draw the raw location
PVector v1 = tracker.getPos();
fill(50,100,250,200);
noStroke();
ellipse(v1.x,v1.y,20,20);
// Let's draw the "lerped" location
PVector v2 = tracker.getLerpedPos();
fill(100,250,50,200);
noStroke();
ellipse(v2.x,v2.y,20,20);
// Display some info
int t = tracker.getThreshold();
int[] depth = kinect.getRawDepth();
fill(0);
text("threshold: " + t + " " + "framerate: " + (int)frameRate + " " + "UP increase threshold, DOWN decrease threshold",10,500);
}
void keyPressed() {
int t = tracker.getThreshold();
if (key == CODED) {
if (keyCode == UP) {
t+=5;
tracker.setThreshold(t);
}
else if (keyCode == DOWN) {
t-=5;
tracker.setThreshold(t);
}
}
}
float rawDepthToMeters(int depthValue) {
if (depthValue < 2047) {
return (float)(1.0 / ((double)(depthValue) * -0.0030711016 + 3.3309495161));
}
return 0.0f;
}
void play(){
int[] depth = kinect.getRawDepth();
tracker.track();
int t = tracker.getThreshold();
minim = new Minim(this);
// load a file, give the AudioPlayer buffers that are 2048 samples long
player = minim.loadFile("1.wav", 2048);
// play the file
player.play();
}
void stop() {
tracker.quit();
// always close Minim audio classes when you are done with them
player.close();
// always stop Minim before exiting
minim.stop();
super.stop();
}
What would be the best way to do this? I am relatively new to processing so any help is appreciated. I added in the audio playing stuff. The code also uses this kinect class:
class KinectTracker {
// Size of kinect image
int kw = 640;
int kh = 480;
int threshold = 745;
// Raw location
PVector loc;
// Interpolated location
PVector lerpedLoc;
// Depth data
int[] depth;
PImage display;
KinectTracker() {
kinect.start();
kinect.enableDepth(true);
// We could skip processing the grayscale image for efficiency
// but this example is just demonstrating everything
kinect.processDepthImage(true);
display = createImage(kw,kh,PConstants.RGB);
loc = new PVector(0,0);
lerpedLoc = new PVector(0,0);
}
void track() {
// Get the raw depth as array of integers
depth = kinect.getRawDepth();
// Being overly cautious here
if (depth == null) return;
float sumX = 0;
float sumY = 0;
float count = 0;
for(int x = 0; x < kw; x++) {
for(int y = 0; y < kh; y++) {
// Mirroring the image
int offset = kw-x-1+y*kw;
// Grabbing the raw depth
int rawDepth = depth[offset];
// Testing against threshold
if (rawDepth < threshold) {
sumX += x;
sumY += y;
count++;
}
}
}
// As long as we found something
if (count != 0) {
loc = new PVector(sumX/count,sumY/count);
}
// Interpolating the location, doing it arbitrarily for now
lerpedLoc.x = PApplet.lerp(lerpedLoc.x, loc.x, 0.3f);
lerpedLoc.y = PApplet.lerp(lerpedLoc.y, loc.y, 0.3f);
}
PVector getLerpedPos() {
return lerpedLoc;
}
PVector getPos() {
return loc;
}
void display() {
PImage img = kinect.getDepthImage();
// Being overly cautious here
if (depth == null || img == null) return;
// Going to rewrite the depth image to show which pixels are in threshold
// A lot of this is redundant, but this is just for demonstration purposes
display.loadPixels();
for(int x = 0; x < kw; x++) {
for(int y = 0; y < kh; y++) {
// mirroring image
int offset = kw-x-1+y*kw;
// Raw depth
int rawDepth = depth[offset];
int pix = x+y*display.width;
if (rawDepth < threshold) {
// A red color instead
display.pixels[pix] = color(150,50,50);
}
else {
display.pixels[pix] = img.pixels[offset];
}
}
}
display.updatePixels();
// Draw the image
image(display,0,0);
}
void quit() {
kinect.quit();
}
int getThreshold() {
return threshold;
}
void setThreshold(int t) {
threshold = t;
}
}
Basically I am trying to get some soudns playing when a user is at varying distances away from the kinect: too close, just right and far away.
I have been trying to modify this wonderful code by Daniel Schiffman:
// Daniel Shiffman
// Tracking the average location beyond a given depth threshold
// Thanks to Dan O'Sullivan
// http://www.shiffman.net
// https://github.com/shiffman/libfreenect/tree/master/wrappers/java/processing
import org.openkinect.*;
import org.openkinect.processing.*;
import ddf.minim.*;
Minim minim;
AudioPlayer player;
// Showing how we can farm all the kinect stuff out to a separate class
KinectTracker tracker;
// Kinect Library object
Kinect kinect;
float[] depthLookUp = new float[2048];
void setup() {
size(640,520);
kinect = new Kinect(this);
tracker = new KinectTracker();
//size(512, 200, P3D);
for (int i = 0; i < depthLookUp.length; i++) {
depthLookUp[i] = rawDepthToMeters(i);
}
}
void draw() {
background(255);
// Run the tracking analysis
tracker.track();
// Show the image
tracker.display();
// Let's draw the raw location
PVector v1 = tracker.getPos();
fill(50,100,250,200);
noStroke();
ellipse(v1.x,v1.y,20,20);
// Let's draw the "lerped" location
PVector v2 = tracker.getLerpedPos();
fill(100,250,50,200);
noStroke();
ellipse(v2.x,v2.y,20,20);
// Display some info
int t = tracker.getThreshold();
int[] depth = kinect.getRawDepth();
fill(0);
text("threshold: " + t + " " + "framerate: " + (int)frameRate + " " + "UP increase threshold, DOWN decrease threshold",10,500);
}
void keyPressed() {
int t = tracker.getThreshold();
if (key == CODED) {
if (keyCode == UP) {
t+=5;
tracker.setThreshold(t);
}
else if (keyCode == DOWN) {
t-=5;
tracker.setThreshold(t);
}
}
}
float rawDepthToMeters(int depthValue) {
if (depthValue < 2047) {
return (float)(1.0 / ((double)(depthValue) * -0.0030711016 + 3.3309495161));
}
return 0.0f;
}
void play(){
int[] depth = kinect.getRawDepth();
tracker.track();
int t = tracker.getThreshold();
minim = new Minim(this);
// load a file, give the AudioPlayer buffers that are 2048 samples long
player = minim.loadFile("1.wav", 2048);
// play the file
player.play();
}
void stop() {
tracker.quit();
// always close Minim audio classes when you are done with them
player.close();
// always stop Minim before exiting
minim.stop();
super.stop();
}
What would be the best way to do this? I am relatively new to processing so any help is appreciated. I added in the audio playing stuff. The code also uses this kinect class:
class KinectTracker {
// Size of kinect image
int kw = 640;
int kh = 480;
int threshold = 745;
// Raw location
PVector loc;
// Interpolated location
PVector lerpedLoc;
// Depth data
int[] depth;
PImage display;
KinectTracker() {
kinect.start();
kinect.enableDepth(true);
// We could skip processing the grayscale image for efficiency
// but this example is just demonstrating everything
kinect.processDepthImage(true);
display = createImage(kw,kh,PConstants.RGB);
loc = new PVector(0,0);
lerpedLoc = new PVector(0,0);
}
void track() {
// Get the raw depth as array of integers
depth = kinect.getRawDepth();
// Being overly cautious here
if (depth == null) return;
float sumX = 0;
float sumY = 0;
float count = 0;
for(int x = 0; x < kw; x++) {
for(int y = 0; y < kh; y++) {
// Mirroring the image
int offset = kw-x-1+y*kw;
// Grabbing the raw depth
int rawDepth = depth[offset];
// Testing against threshold
if (rawDepth < threshold) {
sumX += x;
sumY += y;
count++;
}
}
}
// As long as we found something
if (count != 0) {
loc = new PVector(sumX/count,sumY/count);
}
// Interpolating the location, doing it arbitrarily for now
lerpedLoc.x = PApplet.lerp(lerpedLoc.x, loc.x, 0.3f);
lerpedLoc.y = PApplet.lerp(lerpedLoc.y, loc.y, 0.3f);
}
PVector getLerpedPos() {
return lerpedLoc;
}
PVector getPos() {
return loc;
}
void display() {
PImage img = kinect.getDepthImage();
// Being overly cautious here
if (depth == null || img == null) return;
// Going to rewrite the depth image to show which pixels are in threshold
// A lot of this is redundant, but this is just for demonstration purposes
display.loadPixels();
for(int x = 0; x < kw; x++) {
for(int y = 0; y < kh; y++) {
// mirroring image
int offset = kw-x-1+y*kw;
// Raw depth
int rawDepth = depth[offset];
int pix = x+y*display.width;
if (rawDepth < threshold) {
// A red color instead
display.pixels[pix] = color(150,50,50);
}
else {
display.pixels[pix] = img.pixels[offset];
}
}
}
display.updatePixels();
// Draw the image
image(display,0,0);
}
void quit() {
kinect.quit();
}
int getThreshold() {
return threshold;
}
void setThreshold(int t) {
threshold = t;
}
}
1