Howdy, Stranger!

We are about to switch to a new forum software. Until then we have removed the registration on this forum.

  • using while with sound library (micLevel) causes baffling freezes
    • In JS, most programs run under 1 thread only. :-B
    • It means if the end of draw() is never reached, p5.AudioIn::getLevel() won't get updated! :-&
    • And your while () loop is gonna get stuck "forever"! :-SS
  • using while with sound library (micLevel) causes baffling freezes

    This code freezes/crashes every time I run it & make a loud sound. In my sound environment, micLevel is returning values around .01 most of the time, so the while condition should return FALSE most of the time. Is this a bug? Or some audio issue? Or am I obliviously doing something dumb? Thanks for any help!

    var mic; var micLevel;

    function setup() { createCanvas(500,500); mic = new p5.AudioIn() mic.start(); }

    function draw() { background(127); micLevel = mic.getLevel(); text(micLevel, 10, 20); while (micLevel > .1) { micLevel = mic.getLevel(); //this line doesn't seem to update micLevel variable. text(micLevel, 10, 20); } }

  • Trying To Figure Out How to Make My Lines Move Independently

    each line has two ends. You use xspeed and yspeed for BOTH. I changed that by using xspeed2 and yspeed2 for the 2nd end now.

    import processing.sound.*; 
    
    // Bouncer a; 
    
    Bouncer []c= new Bouncer [150]; 
    
    AudioIn in; 
    Amplitude amp; 
    
    void setup() {
    
      size( 1500, 1000);
    
      for (int i=0; i<c.length; i++) { 
        c[i] = new Bouncer(random(-8, 8), random (-8, 8), random(255));
      }
    
      background(0, .000005);
    }
    
    void draw() {
    
      // if (random(100)>96)
      background(0, .000005);
    
      for (int i=0; i<c.length; i++) { 
        c[i].display(); 
        c[i].bounce();
      }
    }
    
    // ======================================================
    
    class Bouncer {
    
      float r, g, b, // color !!
        s, 
        xpos, ypos, 
        xpos2, ypos2, 
        xspeed, yspeed, 
        xspeed2=random(-8, 8), yspeed2=random(-8, 8);  // !!!!!!!!!!
    
      Bouncer(float tempxspeed, float tempyspeed, 
        float tempr) {
    
        xspeed = tempxspeed;
        yspeed = tempyspeed;
    
        xpos = width/2;
        ypos = height/2;
        xpos2 = width/4;
        ypos2 = height/4;
    
        r = tempr;
    
        g = random(255);
        b = 255;
        s = 15;
      }
    
      void display() {
    
        stroke(r, g, b);
        //fill(255);
        line(xpos, ypos, 
          xpos2, ypos2);
      }
    
      void bounce() {
    
        if ( xpos > width - s/2 || xpos < s/2 ) {
          xspeed = xspeed * -1;
        }
    
        if ( ypos > height - s/2 || ypos < s/2 ) {
          yspeed = yspeed * -1;
        }
    
        if ( xpos2 > width - s/2 || xpos2 < s/2 ) {
          xspeed2 = xspeed2 * -1;
        }
    
        if ( ypos2 > height - s/2 || ypos2 < s/2 ) {
          yspeed2 = yspeed2 * -1;
        }
    
        xpos = xpos + xspeed;
        ypos = ypos + yspeed;
    
        xpos2 = xpos2 + xspeed2;
        ypos2 = ypos2 + yspeed2;
      }
    }//class
    //
    
  • Trying To Figure Out How to Make My Lines Move Independently

    Hi Processing World!

    I'm still a newbie here, and never have coded something substantial in my life. I'm wondering if I can get some help in figuring out how to make my lines move independently from each other and go off of different angles, instead of just bouncing back off the screen in the same direction. Either I need to have them rotate as they hit the side of the screen, or I need point X to follow point Y. Not sure what my solution would be but I hope someone here can help.

    Thank you,

    Clinton

    Bouncer a; Bouncer []c= new Bouncer [150]; import processing.sound.*; AudioIn in; Amplitude amp; void setup(){

    size( 1500,1500); for (int i=0; i<c.length;i++){ c[i] = new Bouncer(random(-8,8), random (-8,8), random(255)); }

    }

    void draw(){

    background(0,.000005);

    for (int i=0; i<c.length;i++){ c[i].display(); c[i].bounce(); }

    }

    class Bouncer {

    float r, g, b, s, xpos, ypos, xpos2, ypos2, xspeed, yspeed; Bouncer(float tempxspeed, float tempyspeed, float tempr) {

    xspeed = tempxspeed;
    yspeed = tempyspeed;
    xpos = width/2;
    ypos = height/2;
    xpos2 = width/4;
    ypos2 = height/4;
    r = tempr;
    g = random(255);
    b = 255;
    s = 5;
    

    }

    void display() {

    stroke(255);
    fill(255);
    line(xpos, ypos, xpos2, ypos2);
    

    }

    void bounce() {

    if ( xpos > width - s/2 || xpos < s/2 ) {
      xspeed = xspeed * -1;
    }
    
    if ( ypos > height - s/2 || ypos < s/2 ) {
      yspeed = yspeed * -1;
    }
    
    if ( xpos2 > width - s/2 || xpos2 < s/2 ) {
      xspeed = xspeed * -1;
    }
    
    if ( ypos2 > height - s/2 || ypos2 < s/2 ) {
      yspeed = yspeed * -1;
    }
    
    xpos = xpos + xspeed;
    ypos = ypos + yspeed;
    xpos2 = xpos2 + xspeed;
    ypos2 = ypos2 + yspeed;
    

    } }

  • Making a Sound Meter

    Please format your code. Edit your post (gear on top right side of any of your posts), select your code and hit ctrl+o. Leave an empty line above and below your block of code. Details here: https://forum.processing.org/two/discussion/15473/readme-how-to-format-code-and-text

    Demo below.

    Kf

    import processing.sound.*; 
    
    final int STEPX=4;
    final int STEPY=30;
    
    AudioIn in; 
    Amplitude amp; 
    float x=50; 
    float y=STEPY; 
    float c; 
    PVector prevPoint;
    
    
    void setup() { 
      size (displayWidth, 500); 
      amp=new Amplitude(this); 
      in=new AudioIn(this, 0); 
      in.start(); 
      amp.input(in); 
      background(255);
      prevPoint=new PVector(0, y);
    }
    
    void draw() { 
      c=amp.analyze()*50; 
      beginShape(); 
      stroke(25);
    
      line(prevPoint.x,prevPoint.y, x+STEPX, y+c);
    
    
    
      if (x>width) { 
        y=y+STEPY; 
        x=0;
      }
    
      prevPoint.set( x=x+STEPX, y+c);
    
    }
    

    P.S. The code for the line() function above could be replace with:

      beginShape();
      vertex(prevPoint.x, prevPoint.y);
      vertex( x+STEPX, y+c);
      endShape();
    
  • Making a Sound Meter

    Hello!

    I have a problem. I am a newbie with processing and I'm trying to make a sound meter similar to one that you would see on a lie detecter test/or a Joy Division album cover. I've only been using processing for a month now, and the problem I'm running into is that every time I make a sound my image jumps and looks like squares instead of curved lines.

    Hoping I could get some help....

    -Clinton

    import processing.sound.*; AudioIn in; Amplitude amp; float x=50; float y=10; float c; void setup() { frameRate (300); size (1500, 500); amp=new Amplitude(this); in=new AudioIn(this, 0); in.start(); amp.input(in); background(255); }

    void draw() { c=amp.analyze()*50; beginShape(); stroke(25);

    line(x, y, x, y-c); line(x, y, x, y+c); x++; if (x>width) { y=y+10; x=0; } }

  • Interactive video with sound input

    So I tried to stick together two codes - one for webcam input --> black&white output, other from library - the Audio Input, that draws colorful circle depending on the sound input.

    Its not the main idea, but I just wanted to sea wether it will work - making something colorful on to an video with the sound. And no surprise, the code didn't work. What should I change? Or isn't it even possible to draw over a video that is capturing at the same time?

    So this is it (I bolded the parts of sound input, without them the code is working fine )

    import processing.video.*; **import processing.sound.*;

    AudioIn input; Amplitude rms; int scale=1;**

    color black = color(0); color white = color(255); int numPixels; Capture video;

    void setup() { size(640, 480); // Change size to 320 x 240 if too slow at 640 x 480 strokeWeight(5);

    // This the default video input, see the GettingStartedCapture // example if it creates an error video = new Capture(this, width, height);

    // Start capturing the images from the camera video.start();

    numPixels = video.width * video.height; noCursor(); smooth();

    **// FOR SOUND //Create an Audio input and grab the 1st channel input = new AudioIn(this, 0);

    // start the Audio Input
    input.start();
    
    // create a new Amplitude analyzer
    rms = new Amplitude(this);
    
    // Patch the input to an volume analyzer
    rms.input(input);**
    

    }

    void draw() { if (video.available()) { video.read(); video.loadPixels(); int threshold = 120; // Set the threshold value float pixelBrightness; // Declare variable to store a pixel's color // Turn each pixel in the video frame black or white depending on its brightness loadPixels(); for (int i = 0; i < numPixels; i++) { pixelBrightness = brightness(video.pixels[i]); if (pixelBrightness > threshold) { // If the pixel is brighter than the pixels[i] = white; // threshold value, make it white } else { // Otherwise, pixels[i] = black; // make it black } } updatePixels();

    **// FOR SOUND // adjust the volume of the audio input input.amp(map(mouseY, 0, height, 0.0, 1.0));

    // rms.analyze() return a value between 0 and 1. To adjust
    // the scaling and mapping of an ellipse we scale from 0 to 0.5
    scale=int(map(rms.analyze(), 0, 0.5, 1, 350));
    noStroke();
    
    fill(255,0,150);
    // We draw an ellispe coupled to the audio analysis
    ellipse(width/2, height/2, 1*scale, 1*scale);**
    

    }

  • Get Audio Input with minim?

    Hi This is code that I got from the internet and I modified it a bit.

    The original code was using mp3 files to draw.
    However I want to get the sound and fft from Audio Input.

    I'm trying to get the sound from audio but I think the original code using minim library so I can't get the sound right.

    How can I get the sound from mic like I get the sound from mp3?

    import ddf.minim.*;
    import ddf.minim.analysis.*;
    import processing.sound.*;
    
    FFT fft;
    AudioIn song;
    
    // Variables that define the "zones" of the spectrum
    // For example, for bass, we take only the first 4% of the total spectrum
    float specLow = 0.03; // 3%
    float specMid = 0.125;  // 12.5%
    float specHi = 0.20;   // 20%
    
    // This leaves 64% of the possible spectrum that will not be used.
    // These values are usually too high for the human ear anyway.
    
    // Scoring values for each zone
    float scoreLow = 0;
    float scoreMid = 0;
    float scoreHi = 0;
    
    // Previous value, to soften the reduction
    float oldScoreLow = scoreLow;
    float oldScoreMid = scoreMid;
    float oldScoreHi = scoreHi;
    
    // Softening value
    float scoreDecreaseRate = 25;
    
    // Cubes appearing in space
    int nbCubes;
    Cube[] cubes;
    
    //Lines that appear on the sides
    int nbWalls = 500;
    Wall[] murs;
    
    void setup()
    {
      //Display in 3D on the entire screen
      fullScreen(P3D);
    
      //Create the FFT object to analyze the song
      //fft = new FFT(song.bufferSize(), song.sampleRate());
      fft = new FFT(this, song.sampleRate());
    
    
      //Get Audio In
      song = new AudioIn(this, 0);
      song.start();
      fft.input(song);
    
      //One cube per frequency bandOne cube per frequency band
      nbCubes = (int)(fft.specSize()*specHi);
      cubes = new Cube[nbCubes];
    
      //As many walls as we want
      walls = new Wall[nbWalls];
    
      //Create all objects
      //Create cubic objects
      for (int i = 0; i < nbCubes; i++) {
       cubes[i] = new Cube(); 
      }
    
      //Create wall objects
      //walls LEFT
      for (int i = 0; i < nbWalls; i+=4) {
       walls[i] = new Wall(0, height/2, 10, height); 
      }
    
      //walls RIGHT
      for (int i = 1; i < nbWalls; i+=4) {
       walls[i] = new Wall(width, height/2, 10, height); 
      }
    
      //walls DOWN
      for (int i = 2; i < nbWalls; i+=4) {
       walls[i] = new Wall(width/2, height, width, 10); 
      }
    
      //walls UP
      for (int i = 3; i < nbWalls; i+=4) {
       walls[i] = new Wall(width/2, 0, width, 10); 
      }
    
      //Black background
      background(0);
    
      //Start the song
      song.play(0);
    }
    
    
    
    void draw()
    {  
      //Forward the song. One draw () for each "frame" of the song ...
      fft.forward(song.mix);
    
      //Calculation of "scores" (power) for three categories of sound
      //First, save old values
      oldScoreLow = scoreLow;
      oldScoreMid = scoreMid;
      oldScoreHi = scoreHi;
    
      //Reset values
      scoreLow = 0;
      scoreMid = 0;
      scoreHi = 0;
    
      //Calculate the new "scores"
      for(int i = 0; i < fft.specSize()*specLow; i++)
      {
        scoreLow += fft.getBand(i);
      }
    
      for(int i = (int)(fft.specSize()*specLow); i < fft.specSize()*specMid; i++)
      {
        scoreMid += fft.getBand(i);
      }
    
      for(int i = (int)(fft.specSize()*specMid); i < fft.specSize()*specHi; i++)
      {
        scoreHi += fft.getBand(i);
      }
    
      //To slow down the descent.
      if (oldScoreLow > scoreLow) {
        scoreLow = oldScoreLow - scoreDecreaseRate;
      }
    
      if (oldScoreMid > scoreMid) {
        scoreMid = oldScoreMid - scoreDecreaseRate;
      }
    
      if (oldScoreHi > scoreHi) {
        scoreHi = oldScoreHi - scoreDecreaseRate;
      }
    
      //Volume for all frequencies at this time, with the highest sounds higher.
      //This allows the animation to go faster for the higher pitched sounds, which is more noticeable
      float scoreGlobal = 0.66*scoreLow + 0.8*scoreMid + 1*scoreHi;
    
      //Subtle color of background
      background(scoreLow/100, scoreMid/100, scoreHi/100);
    
      //Cube for each frequency band
      for(int i = 0; i < nbCubes; i++)
      {
        //Value of the frequency band
        float bandValue = fft.getBand(i);
    
        //The color is represented as: red for bass, green for medium sounds and blue for high.
        //The opacity is determined by the volume of the tape and the overall volume.
        cubes[i].display(scoreLow, scoreMid, scoreHi, bandValue, scoreGlobal);
      }
    
      //Walls lines, here we must keep the value of the previous tape and the next to connect them together
      float previousBandValue = fft.getBand(0);
    
      //Distance between each line point, negative because on the z dimension
      float dist = -25;
    
      //Multiply the height by this constant
      float heightMult = 2;
    
      //For each band
      for(int i = 1; i < fft.specSize(); i++)
      {
        //Value of the frequency band, we multiply the bands farther to make them more visible.
        float bandValue = fft.getBand(i)*(1 + (i/50));
    
        //Selection of the color according to the forces of the different types of sounds
        stroke(100+scoreLow, 100+scoreMid, 100+scoreHi, 255-i);
        strokeWeight(1 + (scoreGlobal/100));
    
        //lower left line
        line(0, height-(previousBandValue*heightMult), dist*(i-1), 0, height-(bandValue*heightMult), dist*i);
        line((previousBandValue*heightMult), height, dist*(i-1), (bandValue*heightMult), height, dist*i);
        line(0, height-(previousBandValue*heightMult), dist*(i-1), (bandValue*heightMult), height, dist*i);
    
        //upper left line
        line(0, (previousBandValue*heightMult), dist*(i-1), 0, (bandValue*heightMult), dist*i);
        line((previousBandValue*heightMult), 0, dist*(i-1), (bandValue*heightMult), 0, dist*i);
        line(0, (previousBandValue*heightMult), dist*(i-1), (bandValue*heightMult), 0, dist*i);
    
        //ligne inferieure droite
        line(width, height-(previousBandValue*heightMult), dist*(i-1), width, height-(bandValue*heightMult), dist*i);
        line(width-(previousBandValue*heightMult), height, dist*(i-1), width-(bandValue*heightMult), height, dist*i);
        line(width, height-(previousBandValue*heightMult), dist*(i-1), width-(bandValue*heightMult), height, dist*i);
    
        //lower right line
        line(width, (previousBandValue*heightMult), dist*(i-1), width, (bandValue*heightMult), dist*i);
        line(width-(previousBandValue*heightMult), 0, dist*(i-1), width-(bandValue*heightMult), 0, dist*i);
        line(width, (previousBandValue*heightMult), dist*(i-1), width-(bandValue*heightMult), 0, dist*i);
    
        //Save the value for the next loop round
        previousBandValue = bandValue;
      }
    
      //Rectangular walls
      for(int i = 0; i < nbWalls; i++)
      {
        //Each wall is assigned a band, and its strength is sent to it.
        float intensity = fft.getBand(i%((int)(fft.specSize()*specHi)));
        walls[i].display(scoreLow, scoreMid, scoreHi, intensity, scoreGlobal);
      }
    }
    
    //Class for cubes floating in space
    class Cube {
      //Z position of spawn and maximum Z position
      float startingZ = -10000;
      float maxZ = 1000;
    
      //Position values
      float x, y, z;
      float rotX, rotY, rotZ;
      float sumRotX, sumRotY, sumRotZ;
    
      //--------------------------builder--------------------------
      //--------------------------builder--------------------------
      //--------------------------builder--------------------------
    
      Cube() {
        //Make the cube appear in a random place
        x = random(0, width);
        y = random(0, height);
        z = random(startingZ, maxZ);
    
        //Give the cube a random rotation
        rotX = random(0, 1);
        rotY = random(0, 1);
        rotZ = random(0, 1);
      }
    
      void display(float scoreLow, float scoreMid, float scoreHi, float intensity, float scoreGlobal) {
        //Selection of the color, opacity determined by the intensity (volume of the band)
        color displayColor = color(scoreLow*0.67, scoreMid*0.67, scoreHi*0.67, intensity*5);
        fill(displayColor, 255);
    
        //Color lines, they disappear with the individual intensity of the cube
        color strokeColor = color(255, 150-(20*intensity));
        stroke(strokeColor);
        strokeWeight(1 + (scoreGlobal/300));
    
        //Creating a transformation matrix to perform rotations, enlargements
        pushMatrix();
    
        //Shifting
        translate(x, y, z);
    
        //Calculation of the rotation according to the intensity for the cube
        sumRotX += intensity*(rotX/1000);
        sumRotY += intensity*(rotY/1000);
        sumRotZ += intensity*(rotZ/1000);
    
        //Application of the rotation
        rotateX(sumRotX);
        rotateY(sumRotY);
        rotateZ(sumRotZ);
    
        //Creation of the box, variable size according to the intensity for the cube
        box(100+(intensity/2));
    
        //Application of the matrix
        popMatrix();
    
        //Z displacement
        z+= (1+(intensity/5)+(pow((scoreGlobal/150), 2)));
    
        //Replace the box at the back when it is no longer visible
        if (z >= maxZ) {
          x = random(0, width);
          y = random(0, height);
          z = startingZ;
        }
      }
    }
    
    //Class to display the lines on the sides
    class Wall {
      //Minimum and maximum position Z
      float startingZ = -10000;
      float maxZ = 50;
    
      //Position values
      float x, y, z;
      float sizeX, sizeY;
    
      //--------------------------builder--------------------------
      //--------------------------builder--------------------------
      //--------------------------builder--------------------------
    
      Wall(float x, float y, float sizeX, float sizeY) {
        //Make the line appear at the specified place
        this.x = x;
        this.y = y;
        //Random depth
        this.z = random(startingZ, maxZ);  
    
        //We determine the size because the walls on the floors have a different size than those on the sides
        this.sizeX = sizeX;
        this.sizeY = sizeY;
      }
    
      //Display function
      void display(float scoreLow, float scoreMid, float scoreHi, float intensity, float scoreGlobal) {
        //Color determined by low, medium and high sounds
        //Opacity determined by the overall volume
        color displayColor = color(scoreLow*0.67, scoreMid*0.67, scoreHi*0.67, scoreGlobal);
    
        //Make lines disappear in the distance to give an illusion of fog
        fill(displayColor, ((scoreGlobal-5)/1000)*(255+(z/25)));
        noStroke();
    
        //First band, the one that moves according to the force
        //Transformation Matrix
        pushMatrix();
    
        //Shifting
        translate(x, y, z);
    
        //extension
        if (intensity > 100) intensity = 100;
        scale(sizeX*(intensity/100), sizeY*(intensity/100), 20);
    
        //Creation of the "box"
        box(1);
        popMatrix();
    
        //Second band, the one that is still the same size
        displayColor = color(scoreLow*0.5, scoreMid*0.5, scoreHi*0.5, scoreGlobal);
        fill(displayColor, (scoreGlobal/5000)*(255+(z/25)));
        //Transformation Matrix
        pushMatrix();
    
        //Shifting
        translate(x, y, z);
    
        //extension
        scale(sizeX, sizeY, 10);
    
        //Creation of the "box"
        box(1);
        popMatrix();
    
        //Z displacement
        z+= (pow((scoreGlobal/150), 2));
        if (z >= maxZ) {
          z = startingZ;  
        }
      }
    }
    
  • Sketch will only use 1 audio input

    To preface, I'm using a Focusrite Scarlett 2i2 audio interface with 2 mics plugged into it for the audio inputs. I'm also running on Mac OS Sierra (not sure if this matters). Whenever I try to use two AudioIn variables, my program will only use one of them and applies that channel to both AudioIns.

    To clarify, this works:

    AudioIn inputP1 void setup(){ inputP1 = new AudioIn(this, 0); }

    But this one takes channel 1 and applies it to both inputP1 and inputP2:

    AudioIn inputP1 AudioIn inputP2 void setup(){ inputP1 = new AudioIn(this, 0); inputP2 = new AudioIn(this, 1); }

    It detects both channels just fine, as long as there is only one AudioIn variable. Is there any way I can use both channels as separate variables in the same sketch?

  • (SOLVED)(Minim/Beads/Sound) How can I play back the LineIn signal (LineIn to LineOut)

    I wrote a music visualization which so far worked using the Minim library playing back mp3 files. Now I tried modifying it so that it would visualize an audio signal in real time. That way, I ran into a latency problem - the visuals are slightly out of sync with the sound (the problem gets less pronounced with buffer sizes as small as 128bits, however still very noticeable.)

    Can I use an audio source other than my PC, connect it via LineIn to the processing sketch and play it back from there, so that the visualization and the playback have the same delay?

    I tried achieving this using Beads, this solution using Minim as well as its LiveInput class, but had issues with very glitchy audio playback in all cases.

    The processing.sound library seems to handle that better, this bit of code seems to do exactly what I want:

    import processing.sound.*;

    AudioIn input;

    void setup() {
    input = new AudioIn(this, 0); input.play(); }

    void draw() { background(0); }

    However, I have experienced it to be very prone to crashing on Windows as soon as the code becomes slightly more complex than that (Target VM fails to initialize, reproducable with some of the included examples, the one most closely related to what I am trying to achieve would be FFTSpectrum), and it does not seem to be in active development anymore.

    Is there any way to accomplish this only using Minim or is there a way to combine both libraries - using minim for the analysis and processing.sound for the playback?

  • sound library in Python Mode

    Hi All,

    Apologies right off the bat for what may be a stupid question - but I'm working in Python mode and would like to be able to use the sound library. Specifically, I have an FFT visualization script from Java mode that I'd like to be able to adapt for something I'm working on in Python mode. I'm not sure how to go about translating the code that references the library, though (I'm pasting some for example here). Are there any resources out there that might help with this? I've looked for documentation on the sound library within Python mode but haven't found anything yet. Any other suggestions? Thanks!

    // Create an Input stream which is routed into the Amplitude analyzer fft = new FFT(this, bands); in = new AudioIn(this, 0);

    // start the Audio Input in.start();

    // patch the AudioIn fft.input(in);

  • how can my lines become thicker (bold)when mouse is over?
    import processing.sound.*;
    
    Amplitude amp;
    AudioIn in;
    
    float z = 0; 
    int value = 0;
    void setup() {
      size(800, 800);
      background(255);
    
      // Create an Input stream which is routed into the Amplitude analyzer
      amp = new Amplitude(this);
      in = new AudioIn(this, 0);
      in.start();
      amp.input(in);
    }      
    
    void draw()
    {
      noStroke();
        fill(0, 10);
    
        stroke(0, 100, value);
      println(amp.analyze());
    
      if (amp.analyze() > 0.03){
      for (float y = 0; y < height; y = y + 20) {
    
            for (float x = 0; x < width; x = x + 1) {
                point(x, y + map(noise(x/150, y/150, z), 0, 1, -100, 100));
            }
        }
        // when y is 500 the program will move forward. In this case increment z
        z = z + 0.02;
    
      } else {
        background(255);
    
      }}
    
  • Identifying Parameter Passed to Draw Funciton

    @GoToLoop can you elaborate more on line 14. Every time mouseIsPressed, the canvas is being drawn but passed the parameter of latitude? Also, can you explain text(draw.lat + '\n' + draw.lon, width>>1, height>>1);

    I understand the text function but I do not understand the function of draw.lat and draw.lon. The API I am using is here, http://api.open-notify.org/iss-now.json. I am attempting to have the JSON data refresh every second and update to the text function. What I have is below.

    HTML

    <!DOCTYPE html>
    <html>
        <head>
            <script src="Resources/p5.js"></script>
            <script language="javascript" type="text/javascript" src="Resources/p5.dom.js"></script>
            <script language="javascript" type="text/javascript" src="Resources/p5.sound.js"></script>
            <script language="javascript" type="text/javascript" src="Sketches/Painthead/Painthead.js"></script>
            <title>painthead</title>
        </head>
        <body>
        </body>
    </html>
    

    JS (Painthead.js)

    var PH13 = function ( p ) {
        var myCanvas;
        var mic;
        var Display;
        var lat;
        var long;
    
        p.GetValue = function() {
            var xhr = new XMLHttpRequest();
            xhr.onreadystatechange = function() {
                    if (this.readyState == 4 && this.status == 200) {
                        var requestResponse = xhr.responseText;
                        var issInfo = JSON.parse(requestResponse);
                        var lat = issInfo.iss_position.latitude;
                        var long = issInfo.iss_position.longitude;
                        p.draw(lat, long);
                    }
                };
                xhr.open("GET", "http://api.open-notify.org/iss-now.json", true);
                xhr.send();
        }
        p.centerCanvas = function() {
            var x = (p.windowWidth - p.width) / 2;
            myCanvas.position(x, 8450);
            }
    
        p.setup = function() {
            myCanvas = p.createCanvas(1000, 600);
            p.centerCanvas();
            mic = new p5.AudioIn()
            mic.start();
            p.GetValue();
            }
    
        p.windowResized = function() {
            p.centerCanvas();
            } 
    
        p.draw = function(lat, long) {
            micLevel = mic.getLevel();   
            if (p.mouseIsPressed) {
                if ((micLevel >= 0) && (micLevel <.003)) {
                    vol = 0.0001;
                    volspec = 0.1;
                } else if ((micLevel >= 0.003) && (micLevel <= .4)){
                    vol = 1.4;
                    volspec = 0.06;
                } else {
                    vol = 2000;
                    volspec = 0;
                }
                p.GetValue();
                p.background('#fff1e1');
                p.text("IS", 3000*volspec*micLevel+400, 3000*volspec*micLevel+300);
                p.text("S", 4000*vol*micLevel+411, -2000*vol*micLevel+300);
                p.text("POSITION", -2000*vol*micLevel+ 425, -3500*vol*micLevel+300);
                p.text("Lat: ", -6000*vol*micLevel+412, -4000*vol*micLevel+350);
                p.text(Lat, 6000*volspec*micLevel+435, -4000*volspec*micLevel+350);
                p.text("Long: ", -3000*vol*micLevel+412, 4000*vol*micLevel+390);
                p.text(Long, 3000*volspec*micLevel+443, -4000*volspec*micLevel+390);
            } else {
                p.background(255);
            }
        }
    }
    myp5 = new p5(PH13);
    

    This works the first time the user's mouseIsPressed. The lat and long update. If the user lets go of mouse and attempts to press their mouse again, to reveal the lat and long values, an error

    Uncaught TypeError: Cannot read property 'toString' of undefined
        at p5.Renderer2D.text (p5.js:13140)
        at p5.text (p5.js:26611)
        at PH13.p.draw (Painthead.js:688)
        at p5.redraw (p5.js:14113)
        at p5.<anonymous> (p5.js:9147)
    

    is received. I am assuming this is because at some point the draw function is attempting to print the vals of lat and long, but they are not defined because the callback from p.GetValue has not had time to execute, or there is some discontinuity there. I am aware of the loadJSON function here https://p5js.org/reference/#/p5/loadJSON but am not sure how to update this information every second the draw loop is executed, as I want this information updated while the user is viewing. Sorry for the long post. Thanks for the help @GoToLoop and @kfrajer

  • Sound reactive visual, swapping movie clips

    Instead of playing the mp3 file I suppose I need to change it to audio in from mic.

    I know there is AudioIn() and I need to start the audio in stream, but I don't know where to begin... Do I need to set up a function like this?

    var mic; function setup(){ mic = new AudioIn() mic.start(); }

    Please advise. Thank you a bunch,

  • Keystone Library error

    I am trying to keystone a generative diagram using the keystone library with processing 3.1 The keystone example code when input into my design executes fine, but doesn't call in the graphics to the keystone area. I am not sure whats wrong, is it because my patch calls the graphic via a class and a parented patch? Is there anything I should know to get this running okay?

    /*
    
    import deadpixel.keystone.*;
     import megamu.mesh.*;
    
    Keystone ks;
    CornerPinSurface surface;
    
    PGraphics offscreen;
    
    
    // Menu GUI. Bolleans to change visualizations
    boolean debug = false;
    boolean view = true;
    boolean info=true;
    boolean voronoi=false;
    boolean lines=true;
    
    
    
    FlowField flowfield; // Flowfield object
    ArrayList<Vehicle> vehicles; // An ArrayList of vehicles
    int nrParticles = 100; // number of elements/particles
    //float[][] points = new float[nrParticles][2]; // Array for the VORONOI cells          //   #############   HACK
    
    
    
      void setup() {
      size(1280,720, P3D);
    
    
    
    
      ks = new Keystone(this);
      surface = ks.createCornerPinSurface(640, 360, 20);
      offscreen = createGraphics(640,360, P3D);
    
    
    
    
      // Resolution of the flowfield. nr of cells
      flowfield = new FlowField(50);
    
      // create the elements in a random position
      vehicles = new ArrayList<Vehicle>();
      for (int i = 0; i < nrParticles; i++) {
        vehicles.add(new Vehicle(new PVector(random(width), random(height)), random(2, 15), random(0.1, 1.5)));
      }
    
    
      //Audiostuff      
      input = new AudioIn(this, 0);  //Create an Audio input and grab the 1st channel
      input.start();// start the Audio Input
    
      rms = new Amplitude(this); // create a new Amplitude analyze
      rms.input(input);  // Patch the input to an volume analyzer
      input.amp(2.0);
    
      smooth();
    
    
    }
    
    void draw() {
    
    
      PVector surfaceMouse = surface.getTransformedMouse();
    
    
      //amplitude stuff
      float analise = map(rms.analyze(), 0, 0.5, 0.0, 50.0);
      audioIn+= (analise-audioIn)*0.01; //smoothing the audioIn vall
    
      background(0);
    
      flowfield.update(); // Flowfield update and display 
      if (debug) flowfield.display(); // If debug mode True, display flowfield 
    
      // Tell all the vehicles to follow the flow field
      for (Vehicle v : vehicles) {
        v.follow(flowfield);
        v.run();
      }
    
      // DRAWING VORONOI
      nrParticles=vehicles.size();                                                   //   #############   HACK
      int nrVoronois=int(map(720, 0, height, 0, nrParticles));    
    
      float[][] points = new float[nrParticles][2]; // Array for the VORONOI cells   //   #############   HACK
    
      //GETTING VEHICLES POSITION TO VORONOI'S POINTS
      for (int i=0; i<vehicles.size(); i++) {   
        points[i][0]= vehicles.get(i).location.x;
        points[i][1]= vehicles.get(i).location.y;
      }
    
      Voronoi myVoronoi = new Voronoi(points);
      MPolygon[] myRegions = myVoronoi.getRegions();
    
      for (int i=0; i<nrVoronois; i++)
      {
        // an array of points
       float[][] regionCoordinates = myRegions[i].getCoords();
    
       fill(int(map(i*255.0, 147, i*255.0/nrParticles, 130 * (i % 2), 255)));
    
         //fill(int(map(i*255.0/nrParticles, 130 * (i % 2), 225 * (i % 2),0,0)));
    
    
    
    
    //fill(255,int(map(sum[i],0,10,255,0)));  // dar valor do FFT ao interior do voronoi
        if (voronoi) myRegions[i].draw(this); // draw this shape
      }
    
      float[][] myEdges = myVoronoi.getEdges();
    
      for (int i=0; i<myEdges.length; i++)
    
      {
    
         float startX = myEdges[i][0];
        float startY = myEdges[i][1];
        float endX = myEdges[i][2];
        float endY = myEdges[i][3];
        stroke(255);
        if (lines) line( startX, startY, endX, endY );
    
    
    }
    
    // Draw the scene, offscreen
      offscreen.beginDraw();
      offscreen.background(255);
      offscreen.fill(0, 255, 0);
      offscreen.ellipse(surfaceMouse.x, surfaceMouse.y, 75, 75);
      offscreen.endDraw();
    
      // most likely, you'll want a black background to minimize
      // bleeding around your projection area
      background(0);
    
      // render the scene, transformed using the corner pin surface
      surface.render(offscreen);
    
    
    
    
    
    }
    
    // Keyboard Interaction
    void keyPressed() {
    
      switch(key) {
      case 'c':
        // enter/leave calibration mode, where surfaces can be warped 
        // and moved
        ks.toggleCalibration();
        break;
    
      case 'o':
        // loads the saved layout
        ks.load();
        break;
    
      case 's':
        // saves the layout
        ks.save();
        break;
      }
    
      if (key == ' ') {
        debug = !debug;
      }
      if (key=='i') {  
        info=!info;
      }
      if (key=='p') {  
        view=!view;
      }
      if (key=='v') {  
        voronoi=!voronoi;
      }
      if (key=='l') {  
        lines=!lines;
      }
      if (key=='r') {
        vehicles.remove(0);
        println(vehicles.size());
      }
      if (key=='a') {   
        vehicles.add(new Vehicle(new PVector(random(width), 0-3), random(2, 5), random(0.1, 0.5)));
        println(vehicles.size());
    
    
      }
    
    
    }
    
  • Printing the location of the ISS onto an instance of a p5.js canvas

    I am attempting to print the location of the International Space Station in an instance of a p5.js sketch.

    HTML

    <!DOCTYPE html>
    <html>
        <head>
            <link rel="stylesheet" type="text/css" href="PaintStyle.css">.
            <script src="Resources/p5.js"></script>
            <script language="javascript" type="text/javascript" src="Resources/p5.dom.js"></script>
            <script language="javascript" type="text/javascript" src="Resources/p5.sound.js"></script>
            <script language="javascript" type="text/javascript" src="Sketches/Painthead/Painthead.js"></script>
            <script language="javascript" type="text/javascript" src="Resources/ISSTrack.js"></script>
            <title>painthead</title>
        </head>
        <body>
            <div id="back">
                <a href="/projects.html">back</a>
            </div>
        </body>
    </html>
    

    ISSTrack.js

    function GetValue() {
                var xhr = new XMLHttpRequest();
                xhr.onreadystatechange = function() {
                    if (this.readyState == 4 && this.status == 200) {
                        var requestResponse = xhr.responseText;
                        var issInfo = JSON.parse(requestResponse);
                        var Lat = issInfo.iss_position.latitude;
                        var Long = issInfo.iss_position.longitude;
                        callback(Lat, Long); //Calls function to print to console
                        ReverseGeoCode(Lat, Long); //Call to reverse geocode
                    }
                };
                xhr.open("GET", "http://api.open-notify.org/iss-now.json", true);
                xhr.send();
            }
    
    //****PRINT LAT AND LONG TO CONSOLE*****            
                function callback(Lat, Long) {
                    console.log(Lat); 
                    console.log(Long);
                }
    //            
                GetValue(); //initial call
                setInterval(GetValue, 5000); //Repeat every 5 seconds
    
    // *******REVERSE GEOCODING TO FIND LOCATION*******            
    function ReverseGeoCode (Lat, Long) {        
                var xhr = new XMLHttpRequest();
                xhr.open("GET", "http://"+"locationiq.org/v1/reverse.php?format=json&key=APIKEYHERE&lat="+Lat+"&lon="+Long, true);
                xhr.send();
                xhr.onreadystatechange = function() {
                    if (this.readyState == 4 && this.status == 200) {
                        var requestResponse = xhr.responseText; //Store full response in var
                        var geoCodeInfo = JSON.parse(requestResponse); //turn received information into a javascript object
                        if (geoCodeInfo.error) {
                            var Display = "somewhere over an ocean"; 
                        } else {
                            var Display = geoCodeInfo.display_name; //Set display name to var
                        }
                        console.log(Display);
                    }
                };
    }
    

    Painthead.js (the important instance)

    var PH13 = function ( p ) {
        var myCanvas;
        var mic;
    
        p.centerCanvas = function() {
            var x = (p.windowWidth - p.width) / 2;
            myCanvas.position(x, 8450);
            }
    
        p.setup = function() {
            myCanvas = p.createCanvas(1000, 600);
            p.centerCanvas();
            mic = new p5.AudioIn()
            mic.start();
            p.centerCanvas();
            }
    
        p.windowResized = function() {
            p.centerCanvas();
            } 
    
        p.draw = function() {
            if (p.mouseIsPressed) {
                micLevel = mic.getLevel();
                if ((micLevel >= 0) && (micLevel <.003)) {
                    vol = 0.0001;
                    volspec = 0.1;
                } else if ((micLevel >= 0.003) && (micLevel <= .4)){
                    vol = 1.4;
                    volspec = 0.06;
                } else {
                    vol = 1000;
                    volspec = 0;
                }
                p.background('#fff1e1');
               //print Lat & Long of ISS as well as Display var from ISSTrack.js
            }
            else {
                p.background(255);
            }
        }
    }
    myp5 = new p5(PH13);
    

    I want to be able to make the strings Lat, Long, and Display move based on the input mic from the users computer. I know how to make the text responsive, I just do not know how to pass the string to p.draw() and print it through the p5.js call text(). The only fix I can think of would be to call GetValue within p.draw(), but I do not want to run the function that often as notify-open (ISS tracking api) states "So polling more than 1 Hz would be useless except to add unnecessary strain to the servers." So in total I want to be able to poll the location of the ISS, have it go through the reverse GeoCoding function once every 5 seconds, send those variables into the correct instance, and print them on the canvas.

  • [PAID] Help to debug patch

    The problem is that you have points defined based on nrParticles, but this variable is never updated when you add or remove vehicles. Notice that the 2D array of points also depends on this variable. A quick solution is this below. I am presenting the code of only the main file.

    It is a hacked solution and it works.

    Kf

    /*
    Code for Reduction/Reflection's audioreactive visuals.
     Made by Rodrigo Carvalho / Visiophone (2016)
     www.visiophone-lab.com
    
     Code build over the "Flow Field" example by
     Daniel Shiffman on "The Nature of Code"
     http://natureofcode.com
    
     Needs Mesh library by Lee Byron http://leebyron.com/mesh/
     Works on Processing 3.0.1
     */
    
     import megamu.mesh.*;
    
    
    // Menu GUI. Bolleans to change visualizations
    boolean debug = false;
    boolean view = true;
    boolean info=true;
    boolean voronoi=false;
    boolean lines=true;
    
    FlowField flowfield; // Flowfield object
    ArrayList<Vehicle> vehicles; // An ArrayList of vehicles
    int nrParticles = 30; // number of elements/particles
    //float[][] points = new float[nrParticles][2]; // Array for the VORONOI cells          //   #############   HACK
    
    
    
      void setup() {
    
    
    
    
      size(1280, 720, P3D);
      PJOGL.profile=1;
    
    
    
      // Resolution of the flowfield. nr of cells
      flowfield = new FlowField(50);
    
      // create the elements in a random position
      vehicles = new ArrayList<Vehicle>();
      for (int i = 0; i < nrParticles; i++) {
        vehicles.add(new Vehicle(new PVector(random(width), random(height)), random(2, 15), random(0.1, 1.5)));
      }
    
      noCursor(); // hide the mouse
    
      //// INICIATE VORONOY STUFF                       //   #############   HACK
      //for (int i=0; i<nrParticles; i++) {
      //  points[i][0]= random(800);
      //  points[i][1]= random(800);
      //}
    
      //Audiostuff      
      input = new AudioIn(this, 0);  //Create an Audio input and grab the 1st channel
      input.start();// start the Audio Input
    
      rms = new Amplitude(this); // create a new Amplitude analyze
      rms.input(input);  // Patch the input to an volume analyzer
      input.amp(1.0);
    
      smooth();
    
    
    
    }
    
    void draw() {
    
      //amplitude stuff
      float analise = map(rms.analyze(), 0, 0.5, 0.0, 50.0);
      audioIn+= (analise-audioIn)*0.01; //smoothing the audioIn vall
    
      background(0);
    
      flowfield.update(); // Flowfield update and display 
      if (debug) flowfield.display(); // If debug mode True, display flowfield 
    
      // Tell all the vehicles to follow the flow field
      for (Vehicle v : vehicles) {
        v.follow(flowfield);
        v.run();
      }
    
      // DRAWING VORONOI
      nrParticles=vehicles.size();                                                   //   #############   HACK
      int nrVoronois=int(map(720, 0, height, 0, nrParticles));    
    
      float[][] points = new float[nrParticles][2]; // Array for the VORONOI cells   //   #############   HACK
    
      //GETTING VEHICLES POSITION TO VORONOI'S POINTS
      for (int i=0; i<vehicles.size(); i++) {   
        points[i][0]= vehicles.get(i).location.x;
        points[i][1]= vehicles.get(i).location.y;
      }
    
      Voronoi myVoronoi = new Voronoi(points);
      MPolygon[] myRegions = myVoronoi.getRegions();
    
      for (int i=0; i<nrVoronois; i++)
      {
        // an array of points
       float[][] regionCoordinates = myRegions[i].getCoords();
    
       fill(int(map(i*255.0, 147, i*255.0/nrParticles, 130 * (i % 2), 255)));
    
         //fill(int(map(i*255.0/nrParticles, 130 * (i % 2), 225 * (i % 2),0,0)));
    
    
    
    
    //fill(255,int(map(sum[i],0,10,255,0)));  // dar valor do FFT ao interior do voronoi
        if (voronoi) myRegions[i].draw(this); // draw this shape
      }
    
      float[][] myEdges = myVoronoi.getEdges();
    
      for (int i=0; i<myEdges.length; i++)
    
      {
        float startX = myEdges[i][0];
        float startY = myEdges[i][1];
        float endX = myEdges[i][2];
        float endY = myEdges[i][3];
        stroke(255);
        if (lines) line( startX, startY, endX, endY );
    
    
    }
    
    
    
    
    // Menu GUI
      //if (info) {
        // Instructions
       // fill(0, 220);
       // stroke(180);
       // rect(10, 10, 220, 150);
       // fill(255);
       // text("FPS: "+frameRate, 20, 30);
       // text("VIEW INFO ('i'): "+info, 20, 45);
       // text("VIEW FLOWFIELD ('SPACE'): "+debug, 20, 60);
       // text("VIEW PARTICLE ('p'): "+view, 20, 75);
       // text("VIEW VORONOI REGIONS ('v'):" +voronoi, 20, 90);
       // text("VIEW VORONOI LINES ('l'):" +lines, 20, 105);
       // text("NR ELEMENTS :"+nrParticles, 20, 135);
     // }
    }
    
    // Keyboard Interaction
    void keyPressed() {
      if (key == ' ') {
        debug = !debug;
      }
      if (key=='i') {  
        info=!info;
      }
      if (key=='p') {  
        view=!view;
      }
      if (key=='v') {  
        voronoi=!voronoi;
      }
      if (key=='l') {  
        lines=!lines;
      }
      if (key=='r') {
        vehicles.remove(0);
        println(vehicles.size());
      }
      if (key=='a') {   
        vehicles.add(new Vehicle(new PVector(random(width), 0-3), random(2, 5), random(0.1, 0.5)));
        println(vehicles.size());
      }
    
    }
    
  • Array Index Out Of Bounds Exception:0

    I am having a problem with the below code -

    it is happening at line 72: This is the line that gets highlighted when the patch crashes.

    Voronoi myVoronoi = new Voronoi( points );

    Has anyone any ideas what the issue is? Thank you!

    import megamu.mesh.*;
    
    // Menu GUI. Bolleans to change visualizations
    boolean debug = false;
    boolean view = true;
    boolean info=true;
    boolean voronoi=false;
    boolean lines=true;
    
    FlowField flowfield; // Flowfield object
    ArrayList<Vehicle> vehicles; // An ArrayList of vehicles
    int nrParticles = 300; // number of elements/particles
    float[][] points = new float[nrParticles][2]; // Array for the VORONOI cells
    
      void setup() {
      size(1280, 720);
    
      // Resolution of the flowfield. nr of cells
      flowfield = new FlowField(50);
    
      // create the elements in a random position
      vehicles = new ArrayList<Vehicle>();
      for (int i = 0; i < nrParticles; i++) {
        vehicles.add(new Vehicle(new PVector(random(width), random(height)), random(2, 15), random(0.1, 1.5)));
      }
    
      noCursor(); // hide the mouse
    
      // INICIATE VORONOY STUFF
      for (int i=0; i<nrParticles; i++) {
        points[i][0]= random(800);
        points[i][1]= random(800);
      }
    
      //Audiostuff      
      input = new AudioIn(this, 0);  //Create an Audio input and grab the 1st channel
      input.start();// start the Audio Input
    
      rms = new Amplitude(this); // create a new Amplitude analyze
      rms.input(input);  // Patch the input to an volume analyzer
      input.amp(1.0);
    
      smooth();
    }
    
    void draw() {
    
      //amplitude stuff
      float analise = map(rms.analyze(), 0, 0.5, 0.0, 50.0);
      audioIn+= (analise-audioIn)*0.01; //smoothing the audioIn vall
    
      background(0);
    
      flowfield.update(); // Flowfield update and display 
      if (debug) flowfield.display(); // If debug mode True, display flowfield 
    
      // Tell all the vehicles to follow the flow field
      for (Vehicle v : vehicles) {
        v.follow(flowfield);
        v.run();
      }
    
      // DRAWING VORONOI
      int nrVoronois=int(map(mouseY, 0, height, 0, nrParticles));
    
      //GETTING VEHICLES POSITION TO VORONOI'S POINTS
      for (int i=0; i<vehicles.size(); i++) {   
        points[i][0]= vehicles.get(i).location.x;
        points[i][1]= vehicles.get(i).location.y;
      }
    
      Voronoi myVoronoi = new Voronoi( points );
      MPolygon[] myRegions = myVoronoi.getRegions();
    
      for (int i=0; i<nrVoronois; i++)
      {
        // an array of points
        float[][] regionCoordinates = myRegions[i].getCoords();
    
        fill(int(map(i*255.0, 147, i*255.0/nrParticles, 130 * (i % 2), 255)));
    
         //fill(int(i*255.0/nrParticles, 130 * (i % 2), 225 * (i % 2)));
    
    //fill(255,int(map(sum[i],0,10,255,0)));  // dar valor do FFT ao interior do voronoi
        if (voronoi) myRegions[i].draw(this); // draw this shape
      }
    
      float[][] myEdges = myVoronoi.getEdges();
    
      for (int i=0; i<myEdges.length; i++)
    
      {
        float startX = myEdges[i][0];
        float startY = myEdges[i][1];
        float endX = myEdges[i][2];
        float endY = myEdges[i][3];
        stroke(255);
        if (lines) line( startX, startY, endX, endY  
      }
    
  • [video] Play random video sequence

    Hi everyone, I'm new to Processing and I've been working on this video-art / installation project for my graduation.

    My question is: How can I set a random video to be played next?

    I managed to get the videos playing, but I can't set another random video to follow. My intention is to have a sequence of videos (maybe 10 or more) that will play in a shuffled order. I have looked around this and some other forums but couldn't find a specific answer. Hope you guys can help me out. Thanks!

    ps. if something doesn't make sense in my code please tell me :)

    import processing.video.*;
    import processing.sound.*;
    
    AudioIn input;
    Amplitude analyzer;
    int scale=4;
    
    Movie mov1, mov2;
    Capture cam;
    
    int n = 5; //total number of videos
    
    float vidN = random(1, n+1);
    int x = int (vidN);
    
    float vidN2 = random(1, n+1);
    int x2 = int (vidN2);
    
    void setup() {
      //frameRate (30);
      size(1280, 720);
      colorMode (HSB);
      //fullScreen();
      //background(0);
    
      mov1 = new Movie(this, nf(x, 2)+".mp4");
      mov1.loop();
      mov1.volume(100);
    
      mov2 = new Movie(this, nf(x2, 2)+".mp4");
      mov2.loop();
      mov2.volume(00);
    
      cam = new Capture(this, width, height);
      cam.start();  
    
      blendMode(DIFFERENCE);
    
      imageMode(CENTER);
    
      //Create an Audio input and grab the 1st channel
      input = new AudioIn(this, 0);
    
      // start the Audio Input
      input.start();
    
      // create a new Amplitude analyzer
      analyzer = new Amplitude(this);
    
      // Patch the input to an volume analyzer
      analyzer.input(input);
    }
    
    void movieEvent(Movie m) {
      if (m == mov1) {
        mov1.read();
      } else if (m == mov2) {
        mov2.read();
      }
    }
    
    void draw() {    
      float vol = analyzer.analyze();
    
      noStroke ();
    
      tint (255, 80);
      if (cam.available()) {
        cam.read();
        image(cam, width/2, height/2, width, height); // Draw the webcam cam onto the screen
    
        vol = vol*scale;
    
        tint (255, 100-vol*100);
        image(mov1, width/2, height/2, width, height);
    
        tint (255, 100-vol*100);
        image(mov2, width/2, height/2, width, height);
      }
    }  
    
  • Changing Color Over Time With MIDI

    This example uses the input from the mic.

    Kf

    float ampt;
    PImage photo;
    
    void setup() {
      size(1200, 800);
      background(255);
    
      // Create an Input stream which is routed into the Amplitude analyzer
      amp = new Amplitude(this);
      in = new AudioIn(this, 0);
      in.start();
      amp.input(in);
      photo = loadImage("fig.jpg");
    }      
    
    void draw() {
      background(255);
      ampt = amp.analyze();
      println(ampt);
    
      float myColor = ampt*256; 
      tint((myColor), (myColor), (myColor));
    
      image(photo, 0, 0, width, height);
      loadPixels(); 
      int halfImage = width*height/2;
      for (int i = 0; i < halfImage; i++) {
        pixels[i+halfImage] = color(constrain((pixels[i]>>16)*round(random(ampt)*100), 0, 255) & 0xff, pixels[i]>>8 & 0xff, pixels[i]&0xff);
      }
      updatePixels();
    }