Minim audio "clips" when playing sounds
in
Core Library Questions
•
2 years ago
Hi,
So I am working on this art installation and would really appreciate some help getting it up and running. I am new to Processing if that helps.
The program that I am writing is supposed to take a live video feed (using openCV), process it and return a centroid position to a variable. That all works just fine. Phase two is to use the centroid position to play a note using the Minim Library, which also works.
The problem is that when the sound plays, it clips at the ends sometimes, like there is static on the line or the eq has maxed out or something. Can anyone tell me why this is happening? Does anyone know how to fix it?
Thanks in advance!
- //cats in a bag paino/ catsinabag test alterations:
-
import java.awt.*;import processing.core.*;import ddf.minim.*; // for audioimport ddf.minim.ugens.*;
import ddf.minim.analysis.*;import ddf.minim.*;import ddf.minim.signals.*;import hypermedia.video.*;
Minim minim = new Minim( this );AudioOutput out = minim.getLineOut();OpenCV video = null;
// camera width/heightint capture_width = 320;int capture_height = 240;
// threshold for background subtractionint threshold = 105;
// some variables to control the contrast/brightness of the camera imageint contrast = 0, brightness = 0;
//my global variables for thingsint blobWidth;int blobHight;int centroidX;int centroidY;
String currentNote="";
boolean draw_blobs=true, draw_centroid=true, show_difference=true;
// we are going to track the centroid of all blobs and keep the previous and current estimateint previous_centroid_x = capture_width / 2;int previous_centroid_y = capture_height / 2;int current_centroid_x = capture_width / 2;int current_centroid_y = capture_height / 2;
void setup(){// Size of the windowsize(capture_width*4, capture_height+20);
video = new OpenCV(this);
// Setup our capture device using opencvvideo.capture(capture_width, capture_height, 1);//capture(width, height, index of the camera to be used);}
void draw() {// set the background color to blackbackground(0);
// display the frame per second of our program. if this gets too low, our program will appear less interactive as the latency will be highertext("fps: " + frameRate, 10, capture_height+10);
// Makes pixils accessable.video.read();
video.contrast(contrast);video.brightness(brightness);// call the method for background subtractiondoBackgroundSubtraction();
// and blob detectiondoBlobDetection();
//call audio funtionAudio();stroke(255);// draw the output waveforms, so there's something to look atfor(int i = 0; i < out.bufferSize() - 1; i++){float x1 = map(i, 0, out.bufferSize(), 0, capture_width);float x2 = map(i+1, 0, out.bufferSize(), 0, capture_width);line(x1, 50 + out.left.get(i)*50, x2, 50 + out.left.get(i+1)*50);line(x1, 150 + out.right.get(i)*50, x2, 150 + out.right.get(i+1)*50);}}
void doBackgroundSubtraction() {
//SOURCE (original image from camera or movie), the BUFFER (image after any operations like convert, brightness, threshold, etc...), the MEMORY (the image stored when OpenCV.remember(..) is called)image( video.image(OpenCV.SOURCE), 0, 0 );text( "original source image", 10, 10 );
// Convert OpenCV.BUFFER image to greyscalevideo.convert(OpenCV.GRAY);
video.absDiff();video.threshold(threshold);text( "threshold: " + threshold, capture_width*2+10, capture_height+10 );
video.blur(OpenCV.GAUSSIAN, 11);
if(show_difference){image( video.image(OpenCV.BUFFER), capture_width*2, 0 );text( "absoulte-difference blurred image", capture_width*2+10, 10 );}
image( video.image(OpenCV.MEMORY), capture_width, 0 );text( "memory image", capture_width+10, 10 );}
void doBlobDetection() {
// Do the blob detectionBlob[] blobs = video.blobs(400, capture_width*capture_height/3, 5, true);// public Blob[] blobs(int minArea, int maxArea, int maxBlobs, boolean findHoles)
pushMatrix();translate(capture_width*2, 0);
// the total x,y centroid locations to find an average centroid location of all blobsint total_x = 0;int total_y = 0;int area, maxarea=0;
for( int blob_num = 0; blob_num < blobs.length; blob_num++ ) {
if(draw_blobs){// draw the bounding box from the blob detectionRectangle bounding_box = blobs[blob_num].rectangle;
noFill();stroke(122);this.rect( bounding_box.x, bounding_box.y, bounding_box.width, bounding_box.height );
blobWidth = bounding_box.width;blobHight = bounding_box.height;text("Statistics", capture_width+10, 10 );text("Blob Width: " + blobWidth, capture_width+15, 30);text("Blob Height: " + blobHight, capture_width + 15, 50);text("Centroid: " + centroidX + ", " + centroidY, capture_width + 15, 70);
}
// accumulate the centroidsPoint centroid = blobs[blob_num].centroid;total_x += centroid.x;total_y += centroid.y;}
if(blobs.length > 0){previous_centroid_x = current_centroid_x;previous_centroid_y = current_centroid_y;
current_centroid_x = (total_x/blobs.length + previous_centroid_x) / 2;current_centroid_y = (total_y/blobs.length + previous_centroid_x) / 2;}
if(draw_centroid){// draw a crosshair at the centroid locationthis.ellipse(current_centroid_x, current_centroid_y, 5, 5);this.line( current_centroid_x-5, current_centroid_y, current_centroid_x+5, current_centroid_y );this.line( current_centroid_x, current_centroid_y-5, current_centroid_x, current_centroid_y+5 );
area = current_centroid_x*current_centroid_y;if(area > maxarea) {centroidX = current_centroid_x;centroidY = current_centroid_y;maxarea = area;}
}popMatrix();}
void keyPressed() {if ( key == ' ' ) {video.remember(OpenCV.SOURCE);}if ( key == '+') {if (threshold >= 255) threshold = 255;else threshold++;}if ( key == '-' ) {if (threshold <= 0) threshold = 0;else threshold--;}
}void mouseDragged() {contrast = (int) map( mouseX, 0, width, -90, 90 );brightness = (int) map( mouseY, 0, width, -90, 90 );}
void Audio(){// other variables: blobWidth, blobHeightSineWave mySine;MyNote newNote;
// Map the key to a pitch (in Hz), and instantiate the Note objectfloat pitch = 0;
if(centroidX > 0 && centroidX < 50){if (currentNote != "C4"){pitch = 262;}currentNote="C4";}if(centroidX > 50 && centroidX < 100){if (currentNote != "D4"){pitch = 294;}currentNote="D4";}if(centroidX > 100 && centroidX < 150){if (currentNote != "E4"){pitch = 330;}currentNote="E4";}if(centroidX > 150 && centroidX < 200){if (currentNote != "F4"){pitch = 349;}currentNote="F4";}if(centroidX > 200 && centroidX < 300){if (currentNote != "G4"){pitch = 392;}currentNote="G4";}if(centroidX > 300 && centroidX < 400){if (currentNote != "A4"){pitch = 440;}currentNote="A4";}if(centroidY < 100 ){out.setGain(15);}else if(centroidY > 100 ){out.setGain(5);}text("Note: " + currentNote, capture_width*3 + 15, 90);{
if (pitch > 0) {newNote = new MyNote(pitch, 0.2);}}}//
void stop(){// song.close();minim.stop();//// // this line stops the sketchsuper.stop();}
class MyNote implements AudioSignal{private float freq;private float level;private float alph;private SineWave sine;MyNote(float pitch, float amplitude){freq = pitch;level = amplitude;sine = new SineWave(freq, level, out.sampleRate());alph = 0.9; // Decay constant for the envelopeout.addSignal(this);}
void updateLevel(){// Called once per buffer to decay the amplitude awaylevel = level * alph;sine.setAmp(level);// This also handles stopping this oscillator when its level is very low.if (level < 0.01) {out.removeSignal(this);}// this will lead to destruction of the object, since the only active// reference to it is from the LineOut}void generate(float [] samp){// generate the next buffer's worth of sinusoidsine.generate(samp);// decay the amplitude a little bit moreupdateLevel();}// AudioSignal requires both mono and stereo generate functionsvoid generate(float [] sampL, float [] sampR){sine.generate(sampL, sampR);updateLevel();}
}
1