We are about to switch to a new forum software. Until then we have removed the registration on this forum.
this code takes in input a wav(for now just wav file!!) file, but unlike minim works in deferred time, so you can save every frame without having the problem of a non-constant frameRate, so we will not have problems synchronizing with the audio later.
the code generates points approximating the waveform of the buffer read in the for loop. the points are connected by lines, and the rangeMax parameter is used to define the maximum distance for which a line joins two points. the other modifiable parameters are: start - this parameter determines from which sample to start reading the audio buffer. end - this parameter determines which sample to stop reading the audio buffer. buffer - the size of the buffer read at each cycle
Note fps are determined by the buffer size because a second of audio is equivalent to 44100 samples, so the calculation to know the fps is: 44100 / buffer. so if the buffer is 1024 the corresponding fps will be: 44100/1024 = 43.06640625 to a larger buffer will correspond more points read every cycle, therefore less fps. to synchronize everything so you will need to import the photos into a program like Adobe Premiere or similar and re-map the photo display time according to the size of end less start. so if we have saved 30 seconds of audio we will have to remap the total number of frames generated in this time. or if you prefere you can generate 24 fps settings buffer=44100/24
on my pc it work perfectly, but if i try it here: https://www.openprocessing.org/sketch/556158 it doesn't work and give me this error: $this_1.str is not a function
can someone help me please? thank you very much!
here the code:
Wav wav;
ArrayList<PVector> pos;
int start=0, end=200;
int rangeMax=300, sampleStart=44100*start, sampleFinal=44100*end, buffer=1024;
float max, min, sample_Max;
color back=0, line=50, ball=color(0, 0, 255);
float[] r;
void setup() {
size(1920, 1080, P2D);
wav = new Wav();
wav.read("ikeda_01.wav");
r=wav.getSampleArray();
float max=max(r);
float min=min(r);
sample_Max=max(abs(min), abs(max));
println(sample_Max);
pos=new ArrayList<PVector>();
frameRate(1000);
println(buffer);
}
void draw() {
//println(frameCount);
while (pos.size()>0) {
for (int i=0; i<pos.size(); i+=1) {
pos.remove(i);
}
}
if (abs(r[sampleStart])<sample_Max/3) {
back=color(0);
line=color(255, 50);
ball=color(10, 100, 255);
//println(frameCount, "ciao");
} else {
back=color(255);
line=color(0);
ball=color(255, 0, 0);
}
background(back);
stroke(ball);
strokeWeight(12);
for (int i=sampleStart; i<(sampleStart+buffer); i+=3) {
int si=(int)map(i, sampleStart, (sampleStart+buffer), 0, width);
float val=height/2+(r[i]/sample_Max)*(height/2);
point(si, val);
PVector momPos=new PVector(si, val);
pos.add(momPos);
}
sampleStart+=buffer;
stroke(line);
strokeWeight(1);
for (int i=0; i<pos.size(); i+=1) {
for (int y=1; y<pos.size(); y+=1) {
if (pos.get(i).dist(pos.get(y))<rangeMax) {
line(pos.get(i).x, pos.get(i).y, pos.get(y).x, pos.get(y).y);
}
}
}
if (sampleStart>=sampleFinal) {
println(sampleStart);
exit();
}
if (frameCount<10) saveFrame("E:/MediaWork/ProcesampleStarting/Sketch/pointline_audio_offline/frame/" + "asterion000"+str(frameCount)+".png");
if (frameCount>=10 && frameCount<100) saveFrame("E:/MediaWork/ProcesampleStarting/Sketch/pointline_audio_offline/frame/" + "asterion00"+str(frameCount)+".png");
if (frameCount>=100 && frameCount<1000) saveFrame("E:/MediaWork/ProcesampleStarting/Sketch/pointline_audio_offline/frame/" + "asterion0"+str(frameCount)+".png");
if (frameCount>=1000 && frameCount<10000) saveFrame("E:/MediaWork/ProcesampleStarting/Sketch/pointline_audio_offline/frame/" + "asterion"+str(frameCount)+".png");
println("save image n:", frameCount);
//if (frameCount==120) exit();
}
/*
Method of Class:
void read(String fileName)
void write(float[] r_, String fileName)
void write(float[] r_, float[] l_, String fileName)
void write(float[] r_, int numbit_, int samplingrate_, String fileName)
void write(float[] r_, float[] l_, int numbit_, int samplingrate_, String fileName)
int getSampleLength()
float getSampleValueLeft(int n)
float getSampleValueRight(int n)
float getSampleValue(int n)
float getSampleArray()
float getSampleArrayLeft()
float getSampleArrayRight()
int getBit()
int getSamplingRate()
*/
class Wav {
int sampleLength, stereomono, numbit;
int b1, b2, b3, b4, x=0;
int samplingrate;
int Dim_file_dati;
float [] r, l;
int c1, c2, j;
byte[] b;
String str=str(hour())+"_"+str(minute())+"_"+str(second());
Wav() {
}
void read(String fileName) {
b=loadBytes(fileName);
stereomono=b[22];
b1=(b[24]+ 256) % 256;
b2=(b[25]+ 256) % 256;
b3=(b[26]+ 256) % 256;
b4=(b[27]+ 256) % 256;
samplingrate = b4*16777216+b3*65536+b2*256+b1;
b1=(b[34] + 256) % 256;
b2=(b[35] + 256) % 256;
numbit=b2*256+b1;
b1=(b[40]+ 256) % 256;
b2=(b[41]+ 256) % 256;
b3=(b[42]+ 256) % 256;
b4=(b[43]+ 256) % 256;
Dim_file_dati=b4*16777216+b3*65536+b2*256+b1;
sampleLength=Dim_file_dati/(stereomono*(numbit/8));
r = new float [sampleLength];
l = new float [sampleLength];
btf();
}
void btf() {
if (stereomono==1 && numbit==16) {
j=44;
for (int i=0; i<sampleLength; i++)
{
c1=(b[j]+256) % 256;
j++;
c2=(b[j]+256) % 256;
j++;
r[i]= int(c2*256+c1);
if (r[i] > 32768) r[i]=r[i]-65536;
}
}
if (stereomono==2 && numbit==16) {
j=44;
for (int i=0; i<sampleLength; i++) {
c1=(b[j]+256) % 256;
j++;
c2=(b[j]+256) % 256;
j++;
r[i]= int(c2*256+c1);
if (r[i] > 32768) r[i]=r[i]-65536;
c1=(b[j]+256) % 256;
j++;
c2=(b[j]+256) % 256;
j++;
l[i]= int(c2*256+c1);
if (l[i] > 32768) l[i]=l[i]-65536;
}
}
}
int getBit() {
return numbit;
}
int getSamplingRate() {
return samplingrate;
}
int getSampleLength() {
return sampleLength;
}
float getSampleValue(int n) {
return r[n];
}
float[] getSampleArray() {
return r;
}
float getSampleValueLeft(int n) {
return l[n];
}
float getSampleValueRight(int n) {
return r[n];
}
float[] getSampleArrayLeft() {
return l;
}
float[] getSampleArrayRight() {
return r;
}
void write(float[] r) {
write(r, str);
}
void write(float[] r, float[] l) {
write(r, l, str);
}
void write(float[] r, String fileName) {
write(r, 16, 44100, fileName);
}
void write(float[] r, float[] l, String fileName) {
write(r, l, 16, 44100, fileName);
}
void write(float[] r, int numbit, int samplingrate, String fileName) {
str=fileName;
sampleLength=r.length;
this.numbit=numbit;
stereomono=1;
this.samplingrate=samplingrate;
this.r =r;
normaLize(100);
header();
}
void write(float[] r, float[] l, int numbit, int samplingrate, String fileName) {
str=fileName;
sampleLength=r.length;
this.numbit=numbit;
stereomono=2;
this.samplingrate=samplingrate;
this.r =r;
this.l=l;
normaLize(100);
header();
}
void normaLize(float gain) {
float maxSampleValueFinal=0;
float maxSample=max(r);
float minSample=min(r);
float maxSampleValue=max(abs(minSample), abs(maxSample));
if (stereomono==2) {
maxSample=max(r);
minSample=min(r);
maxSampleValueFinal=max(abs(minSample), abs(maxSample));
}
gain=gain*32767/100;
for (int i=0; i<sampleLength; i++)
{
r[i]=gain*r[i]/maxSampleValue;
if (stereomono==2) l[i]=gain*l[i]/maxSampleValueFinal;
}
}
void header() {
int aux=sampleLength;
int DimFile, dimFileData, sr = samplingrate;
byte Stereomono = byte(stereomono), Numbit = byte(numbit);
dimFileData = aux * Stereomono * (Numbit / 8);
DimFile = dimFileData + 44;
byte[] f=new byte[DimFile];
f[0]='R';
f[1]='I';
f[2]='F';
f[3]='F';
byte f1, f2, f3, f4;
f1=byte((DimFile-8)/16777216);
f2=byte(((DimFile-8)- f1 * 16777216) / 65536);
f3= byte(((DimFile-8) - f1 * 16777216 - f2 * 65536) / 256);
f4 = byte((DimFile-8) % 256);
f[4]=f4;
f[5]=f3;
f[6]=f2;
f[7]=f1;
f[8]='W';
f[9]='A';
f[10]='V';
f[11]='E';
f[12]='f';
f[13]='m';
f[14]='t';
f[15]=' ';
f[16]=16;
f[17]=0;
f[18]=0;
f[19]=0;
f[20]=1;
f[21]=0;
f[22]=Stereomono;
f[23]=0;
f1=byte(sr/16777216);
f2=byte((sr - f1 * 16777216) / 65536);
f3= byte((sr - f1 * 16777216 - f2 * 65536) / 256);
f4 = byte(sr % 256);
f[24]=byte(68);
f[25]=byte(172);
f[26]=byte(0);
f[27]=byte(0);
int byte_per_secondo= sr * Stereomono * Numbit / 8;
f1=byte(byte_per_secondo/16777216);
f2=byte((byte_per_secondo- f1 * 16777216) / 65536);
f3= byte((byte_per_secondo - f1 * 16777216 - f2 * 65536) / 256);
f4 = byte(byte_per_secondo % 256);
f[28]=f4;
f[29]=f3;
f[30]=f2;
f[31]=f1;
int byte_da_leggere_in_ogni_istante = Stereomono * Numbit / 8;
f[32]=byte(byte_da_leggere_in_ogni_istante);
f[33]=0;
f[34]=Numbit;
f[35]=0;
f[36]='d';
f[37]='a';
f[38]='t';
f[39]='a';
f1=byte(dimFileData/16777216);
f2=byte((dimFileData- f1 * 16777216) / 65536);
f3= byte((dimFileData - f1 * 16777216 - f2 * 65536) / 256);
f4 = byte(dimFileData % 256);
f[40]=f4;
f[41]=f3;
f[42]=f2;
f[43]=f1;
byte[] out=new byte[stereomono*2*sampleLength+44];
int d1, d2;
for (int j=0; j<44; j++) {
out[j]=f[j];
}
for (int i=0; i<sampleLength; i++) {
r[i]=r[i]+65536; // Questa somma si fa per evitare gli errori causati dai numeri negativi.
d1=byte(r[i]/256);
d2= byte(int(r[i]) % 256);
out[j]=byte(d2);
j++;
out[j]=byte(d1);
j++;
if (stereomono==2) {
l[i]=l[i]+65536; // Questa somma si fa per evitare gli errori causati dai numeri negativi.
d1=byte(l[i]/256);
d2= byte(int(l[i]) % 256);
out[j]=byte(d2);
j++;
out[j]=byte(d1);
j++;
}
}
saveBytes(str+".wav", out);
println(str);
}
}
It's only worth having threads if you have a task that can be properly parallelized. So, maybe a useful learning exercise, but you need to work out what bits of your design are dependent on each other, and which can happen independently.
So, if you're doing lots of calculations in your updates, it might be worth parallelizing those, but they all have to complete before you draw()
It's only worth parallelizing update() and draw() if you can design in such a way that update() can change data at the same time as draw() is rendering data (the cause of your earlier problems). One way is to have two complete state models - both update() and draw() can read from one model at the same time, while update() is writing to the other model - at the end of draw() you synchronize and switch the models, and so on.
There's lots of resources online around parallelizing game loops. Good to get your head around, if not easy reading. :-)
My best advice to you at this stage is not too use threading. Either use delta time (change between frames) or update logic multiple times per draw(). Threading is hard! You'll get a lot of advice on here that might not work correctly, and likely won't fix your issue, and probably won't perform any better than doing this single threaded in the first place. Don't prematurely optimize as they say - get it working properly, then figure out whether you have a performance issue.
One problem you have is that with the issues you're talking about you'd probably want to synchronize your update()? and draw() methods, because otherwise you'll end up drawing in the middle of changing state. But if you just do that, they're effectively waiting for each other, and therefore you get no benefit from threading, just the overhead.
You want to draw the state at the end of completing the game loop before the game loop starts calculating the next state. There are various approaches to doing that in parallel, but they all involve needing a way to calculate the updates in one step while drawing the previous - eg. perhaps two logic states A & B, with the ability to update A while drawing B, and swapping to draw A while updating B.
Note also that depending on the renderer, doing things with PShape, etc in a different thread might cause issues no matter what you do.
What code should I put in Processing to synchronize a PIR sensor to open the webcam with an effect and an audio track?.
Look at the examples of arduino.
should be something like
arduino.digitalRead(....
connect this with if....
I decided to have a look at your code logically (so far the issue has been a structural one - you have large sections of code that aren't inside functions, which is clearly wrong).
Anyway, actually looking at the code, one thing jumps out at me right away:
void draw(){
// ...
delay(1000);
// ...
}
OH DEAR. This is probably not good. You are using delay(). You probably do not want to be using delay(). It does not do what you think it does. It is a terrible function and likely the cause of many of your issues.
What does delay() do? It BLOCKS EVERYTHING ELSE IN THE SKETCH FROM DOING ANYTHING for some amount of time. In this case, 1 second. It's not a nice way to wait. It's not a good way to make two events happen some time apart. It is the absolutely wrongest function for this.
Worse, you have it inside draw(). The draw() function is supposed to be re-rendering your sketch's graphics - and normally it calls draw() about 60 times every second. So these two delay() calls in draw() mean that your sketch is only rendering a single frame of animation every 2 seconds! This is probably not what you want!
Let's forget about the code you have. Let's just talk about your project. What are you trying to do? What is the sketch? You keep talking about wanting to synchronize sound and video to some sort of sensor, but that's not telling us what the sketch should do!
In simple terms, list the steps of things that the sketch does in order. An example might be:
Once we have a plain English description of what your goal is, we might be able to help you better.
How Can I synchronize it with Arduino?, Is not via port 9600?.
Hi again,
I am trying to connect Arduino with Processing using a PIR Sensor. The code in Arduino is alright, however I can not synchronize both to detect the movement and open the webcam and the sound. Please, Can you help me?.
Arduino:
//the time we give the sensor to calibrate (10-60 secs according to the datasheet)
int calibrationTime = 30;
//the time when the sensor outputs a low impulse
long unsigned int lowIn;
//the amount of milliseconds the sensor has to be low
//before we assume all motion has stopped
long unsigned int pause = 5000;
boolean lockLow = true;
boolean takeLowTime;
int pirPin = 3; //the digital pin connected to the PIR sensor's output
int ledPin = 13;
/////////////////////////////
//SETUP
void setup(){
Serial.begin(9600);
pinMode(pirPin, INPUT);
pinMode(ledPin, OUTPUT);
digitalWrite(pirPin, LOW);
//give the sensor some time to calibrate
Serial.print("calibrating sensor ");
for(int i = 0; i < calibrationTime; i++){
Serial.print(".");
delay(1000);
}
Serial.println(" done");
Serial.println("SENSOR ACTIVE");
delay(50);
}
////////////////////////////
//LOOP
void loop(){
if(digitalRead(pirPin) == HIGH){
digitalWrite(ledPin, HIGH); //the led visualizes the sensors output pin state
if(lockLow){
//makes sure we wait for a transition to LOW before any further output is made:
lockLow = false;
Serial.println("---");
Serial.print("motion detected at ");
Serial.print(millis()/1000);
Serial.println(" sec");
delay(50);
}
takeLowTime = true;
}
if(digitalRead(pirPin) == LOW){
digitalWrite(ledPin, LOW); //the led visualizes the sensors output pin state
if(takeLowTime){
lowIn = millis(); //save the time of the transition from high to LOW
takeLowTime = false; //make sure this is only done at the start of a LOW phase
}
//if the sensor is low for more than the given pause,
//we assume that no more motion is going to happen
if(!lockLow && millis() - lowIn > pause){
//makes sure this block of code is only executed again after
//a new motion sequence has been detected
lockLow = true;
Serial.print("motion ended at "); //output
Serial.print((millis() - pause)/1000);
Serial.println(" sec");
delay(50);
}
}
}
Processing:
import processing.serial.*;
import processing.video.*;
import ddf.minim.*;
import ddf.minim.AudioPlayer;
// Size of each cell in the grid
int cellSize = 20;
// Number of columns and rows in our system
int cols, rows;
// Variable for capture device
Capture inputCam01;
Movie topLayer;
Minim minim;
AudioPlayer song;
Serial myPort;
void setup()
{
size(1280, 720);
frameRate(30);
cols = width / cellSize;
rows = height / cellSize;
colorMode(RGB, 255, 255, 255, 100);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
inputCam01 = new Capture(this, width, height);
// Start capturing the images from the camera
inputCam01.start();
background(0);
// we pass this to Minim so that it can load files from the data directory
minim = new Minim(this);
// loadFile will look in all the same places as loadImage does.
// this means you can find files that are in the data folder and the
// sketch folder. you can also pass an absolute path, or a URL.
song = minim.loadFile("data/untitled.wav");
song.play();
song.loop();
}
{
// I know that the first port in the serial list on my mac
// is Serial.list()[0].
// On Windows machines, this generally opens COM1.
// Open whatever port is the one you're using.
String portName = Serial.list()[0]; //change the 0 to a 1 or 2 etc. to match your port
myPort = new Serial(this, portName, 9600);
}
void movieEvent(Movie topLayer)
{
topLayer.read();
}
void draw()
{
if (inputCam01.available()) {
inputCam01.read();
inputCam01.loadPixels();
image(inputCam01, 0, 0);
// Begin loop for columns
for (int i = 0; i < cols; i++) {
// Begin loop for rows
for (int j = 0; j < rows; j++) {
// Where are we, pixel-wise?
int x = i*cellSize;
int y = j*cellSize;
int loc = (inputCam01.width - x - 1) + y*inputCam01.width; // Reversing x to mirror the image
float r = red(inputCam01.pixels[loc]);
// Make a new color with an alpha component
color c = color(r, 50, 50, 75);
// Code for drawing a single rect
// Using translate in order for rotation to work properly
pushMatrix();
translate(x+cellSize/2, y+cellSize/2);
// Rotation formula based on brightness
rotate((2 * PI * brightness(c) / 255.0));
rectMode(CENTER);
fill(c);
noStroke();
// Rects are larger than the cell for some overlap
rect(0, 0, cellSize+6, cellSize+6);
popMatrix();
}
}
}
}
one way that i've found to synchronize serial data (coming from an arduino) is putting a timestamp on the serial data
actually perhaps part of my problem is the Arduino part?
I've changed the way the teensys feed to panels from the default format... im breaking up the video vertically into 5 strips, placing my x offset 20 percent over each time (I think) ... I assumed this was a percentage, maybe its not?
I've used this on the teensys:
/** OctoWS2811 VideoDisplay.ino - Video on LEDs, from a PC, Mac, Raspberry Pi
www.pjrc.com/teensy/td_libs_OctoWS2811.html
Copyright (c) 2013 Paul Stoffregen, PJRC.COM, LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Update: The movie2serial program which transmit data has moved to "extras"
github.com/PaulStoffregen/OctoWS2811/tree/master/extras
Required Connections
--------------------
pin 2: LED Strip #1 OctoWS2811 drives 8 LED Strips.
pin 14: LED strip #2 All 8 are the same length.
pin 7: LED strip #3
pin 8: LED strip #4 A 100 to 220 ohm resistor should used
pin 6: LED strip #5 between each Teensy pin and the
pin 20: LED strip #6 wire to the LED strip, to minimize
pin 21: LED strip #7 high frequency ringining & noise.
pin 5: LED strip #8
pin 15 & 16 - Connect together, but do not use
pin 4: Do not use
pin 3: Do not use as PWM. Normal use is ok.
pin 12: Frame Sync
When using more than 1 Teensy to display a video image, connect
the Frame Sync signal between every board. All boards will
synchronize their WS2811 update using this signal.
Beware of image distortion from long LED strip lengths. During
the WS2811 update, the LEDs update in sequence, not all at the
same instant! The first pixel updates after 30 microseconds,
the second pixel after 60 us, and so on. A strip of 120 LEDs
updates in 3.6 ms, which is 10.8% of a 30 Hz video frame time.
Doubling the strip length to 240 LEDs increases the lag to 21.6%
of a video frame. For best results, use shorter length strips.
Multiple boards linked by the frame sync signal provides superior
video timing accuracy.
A Multi-TT USB hub should be used if 2 or more Teensy boards
are connected. The Multi-TT feature allows proper USB bandwidth
allocation. Single-TT hubs, or direct connection to multiple
ports on the same motherboard, may give poor performance.
*/
#include <OctoWS2811.h>
// The actual arrangement of the LEDs connected to this Teensy 3.0 board.
// LED_HEIGHT *must* be a multiple of 8. When 16, 24, 32 are used, each
// strip spans 2, 3, 4 rows. LED_LAYOUT indicates the direction the strips
// are arranged. If 0, each strip begins on the left for its first row,
// then goes right to left for its second row, then left to right,
// zig-zagging for each successive row.
#define LED_WIDTH 8 // number of LEDs horizontally
#define LED_HEIGHT 104 // number of LEDs vertically (must be multiple of 8)
#define LED_LAYOUT 0 // 0 = even rows left->right, 1 = even rows right->left
// The portion of the video image to show on this set of LEDs. All 4 numbers
// are percentages, from 0 to 100. For a large LED installation with many
// Teensy 3.0 boards driving groups of LEDs, these parameters allow you to
// program each Teensy to tell the video application which portion of the
// video it displays. By reading these numbers, the video application can
// automatically configure itself, regardless of which serial port COM number
// or device names are assigned to each Teensy 3.0 by your operating system.
//#define VIDEO_XOFFSET 0
//#define VIDEO_YOFFSET 0 // display entire image
//#define VIDEO_WIDTH 100
//#define VIDEO_HEIGHT 100
//#define VIDEO_XOFFSET 0
//#define VIDEO_YOFFSET 0 // display upper half
//#define VIDEO_WIDTH 100
//#define VIDEO_HEIGHT 50
//#define VIDEO_XOFFSET 0
//#define VIDEO_YOFFSET 50 // display lower half
//#define VIDEO_WIDTH 100
//#define VIDEO_HEIGHT 50
//#define VIDEO_XOFFSET 0
//#define VIDEO_YOFFSET 0 // display left half?
//#define VIDEO_WIDTH 50
//#define VIDEO_HEIGHT 100
//#define VIDEO_XOFFSET 50
//#define VIDEO_YOFFSET 0 // display right half?
//#define VIDEO_WIDTH 50
//#define VIDEO_HEIGHT 100
#define VIDEO_XOFFSET 60
#define VIDEO_YOFFSET 0 // display third vertical fifth?
#define VIDEO_WIDTH 20
#define VIDEO_HEIGHT 100
const int ledsPerStrip = LED_WIDTH * LED_HEIGHT / 4;
DMAMEM int displayMemory[ledsPerStrip*6];
int drawingMemory[ledsPerStrip*6];
elapsedMicros elapsedUsecSinceLastFrameSync = 0;
const int config = WS2811_800kHz; // color config is on the PC side
OctoWS2811 leds(ledsPerStrip, displayMemory, drawingMemory, config);
void setup() {
pinMode(12, INPUT_PULLUP); // Frame Sync
Serial.setTimeout(50);
leds.begin();
leds.show();
}
void loop() {
//
// wait for a Start-Of-Message character:
//
// '*' = Frame of image data, with frame sync pulse to be sent
// a specified number of microseconds after reception of
// the first byte (typically at 75% of the frame time, to
// allow other boards to fully receive their data).
// Normally '*' is used when the sender controls the pace
// of playback by transmitting each frame as it should
// appear.
//
// '$' = Frame of image data, with frame sync pulse to be sent
// a specified number of microseconds after the previous
// frame sync. Normally this is used when the sender
// transmits each frame as quickly as possible, and we
// control the pacing of video playback by updating the
// LEDs based on time elapsed from the previous frame.
//
// '%' = Frame of image data, to be displayed with a frame sync
// pulse is received from another board. In a multi-board
// system, the sender would normally transmit one '*' or '$'
// message and '%' messages to all other boards, so every
// Teensy 3.0 updates at the exact same moment.
//
// '@' = Reset the elapsed time, used for '$' messages. This
// should be sent before the first '$' message, so many
// frames are not played quickly if time as elapsed since
// startup or prior video playing.
//
// '?' = Query LED and Video parameters. Teensy 3.0 responds
// with a comma delimited list of information.
//
int startChar = Serial.read();
if (startChar == '*') {
// receive a "master" frame - we send the frame sync to other boards
// the sender is controlling the video pace. The 16 bit number is
// how far into this frame to send the sync to other boards.
unsigned int startAt = micros();
unsigned int usecUntilFrameSync = 0;
int count = Serial.readBytes((char *)&usecUntilFrameSync, 2);
if (count != 2) return;
count = Serial.readBytes((char *)drawingMemory, sizeof(drawingMemory));
if (count == sizeof(drawingMemory)) {
unsigned int endAt = micros();
unsigned int usToWaitBeforeSyncOutput = 100;
if (endAt - startAt < usecUntilFrameSync) {
usToWaitBeforeSyncOutput = usecUntilFrameSync - (endAt - startAt);
}
digitalWrite(12, HIGH);
pinMode(12, OUTPUT);
delayMicroseconds(usToWaitBeforeSyncOutput);
digitalWrite(12, LOW);
// WS2811 update begins immediately after falling edge of frame sync
digitalWrite(13, HIGH);
leds.show();
digitalWrite(13, LOW);
}
} else if (startChar == '$') {
// receive a "master" frame - we send the frame sync to other boards
// we are controlling the video pace. The 16 bit number is how long
// after the prior frame sync to wait until showing this frame
unsigned int usecUntilFrameSync = 0;
int count = Serial.readBytes((char *)&usecUntilFrameSync, 2);
if (count != 2) return;
count = Serial.readBytes((char *)drawingMemory, sizeof(drawingMemory));
if (count == sizeof(drawingMemory)) {
digitalWrite(12, HIGH);
pinMode(12, OUTPUT);
while (elapsedUsecSinceLastFrameSync < usecUntilFrameSync) /* wait */ ;
elapsedUsecSinceLastFrameSync -= usecUntilFrameSync;
digitalWrite(12, LOW);
// WS2811 update begins immediately after falling edge of frame sync
digitalWrite(13, HIGH);
leds.show();
digitalWrite(13, LOW);
}
} else if (startChar == '%') {
// receive a "slave" frame - wait to show it until the frame sync arrives
pinMode(12, INPUT_PULLUP);
unsigned int unusedField = 0;
int count = Serial.readBytes((char *)&unusedField, 2);
if (count != 2) return;
count = Serial.readBytes((char *)drawingMemory, sizeof(drawingMemory));
if (count == sizeof(drawingMemory)) {
elapsedMillis wait = 0;
while (digitalRead(12) != HIGH && wait < 30) ; // wait for sync high
while (digitalRead(12) != LOW && wait < 30) ; // wait for sync high->low
// WS2811 update begins immediately after falling edge of frame sync
if (wait < 30) {
digitalWrite(13, HIGH);
leds.show();
digitalWrite(13, LOW);
}
}
} else if (startChar == '@') {
// reset the elapsed frame time, for startup of '$' message playing
elapsedUsecSinceLastFrameSync = 0;
} else if (startChar == '?') {
// when the video application asks, give it all our info
// for easy and automatic configuration
Serial.print(LED_WIDTH);
Serial.write(',');
Serial.print(LED_HEIGHT);
Serial.write(',');
Serial.print(LED_LAYOUT);
Serial.write(',');
Serial.print(0);
Serial.write(',');
Serial.print(0);
Serial.write(',');
Serial.print(VIDEO_XOFFSET);
Serial.write(',');
Serial.print(VIDEO_YOFFSET);
Serial.write(',');
Serial.print(VIDEO_WIDTH);
Serial.write(',');
Serial.print(VIDEO_HEIGHT);
Serial.write(',');
Serial.print(0);
Serial.write(',');
Serial.print(0);
Serial.write(',');
Serial.print(0);
Serial.println();
} else if (startChar >= 0) {
// discard unknown characters
}
}
`
Hi Guys!
So I am working on a serial Grapher for my diploma thesis and I stumbled over a problem. The serial data which I am receiving is beeing written in the standard serial buffer, but my data visualization is extremely deferred! I've tried to clear the buffer everytime I put the data in the graph but I am using a serial protocoll called SvVis which also send the ID of the Channel. An now it messes some of my channels up because it's not matching anymore because some data is missing everytime.
cha = serialPort.readChar();
ch = cha;
if(ch == 10){ //ID = 10 -> String: end marked with 0
String message=SvVisReadString(serialPort);
println(message);
}
else if(ch>0 && ch< 10)//2 Byte Data in 3.13 format
{
println("3.13 Format");
}
else if(ch>10 && ch<21){ //2 Byte Data (short Little Endian)
println("------------------");
data=SvVisRead2Bytes(serialPort);
println("Port "+ch+ " Data: " + data);
String datastring = Integer.toString(data);
nums[ch-11]=datastring;
}
else if(ch>20 && ch<31){ //4 Byte Data (float Little Endian)
println("------------------");
float_data=SvVisRead4Bytes(serialPort);
println("ID" + id + " Data: " + float_data);
String datastring = Float.toString(float_data);
nums[ch-21]=datastring;
}
serialPort.clear();
updateCharts(nums,vis);
Is there a working way to synchronize my serial data input with my visualization? I've tried different solutions with lastChar() and buffer() but i didn't get it to work.
I was thinking about how to apply synchronized
on Python Mode. ~O)
And "Listen Up" seems a nice opportunity to try that out. :-bd
And it turns out we can annotate Jython functions w/ @make_synchronized
\m/
Whose monitor object becomes their 1st parameter.
So its monitor is dynamic in nature, determined by the moment of invocation.
Besides synchronization, we can have Java style arrays on Jython as well via module "jarray". <:-P
And we can use arrayCopy() on them too! :bz
So here it is "Listen Up" for Python Mode for completeness' sake.
Even though it's impractical due to its über slowness! :o3
"""
Listen Up (v3.2.2)
by Darwexter (2015/May/19)
mod GoToLoop (2015/May/20)
https://Forum.Processing.org/two/discussion/10900/
stitch-together-audio-samples-from-microphone-to-get-continuous-signal#Item_10
"""
add_library('Minim')
from synchronize import make_synchronized
from jarray import zeros
from copy import copy as clone
from java.util import Arrays
class ListenUp(AudioListener):
COLS, ROWS = 1024, 16
DIM = COLS * ROWS
DIM_COLS = DIM - COLS
COLS_RANGE, ROWS_RANGE = tuple(range(COLS)), tuple(range(ROWS))
RATE, BITS = 44100, 16
AMP, GAP, FPS = 50, 40, 120
INK, BG = 0xffF0A000, PImage.ALPHA_MASK
def __init__(self, ARR_TYPE='f'): self.waves = zeros(ListenUp.DIM, ARR_TYPE)
@make_synchronized
def getWavesClone(self): return clone(self.waves)
@make_synchronized
def copyWavesInto(self, w):
arrayCopy(self.waves, w)
return w
@make_synchronized
def samples(self, sampL, sampR=zeros(0, 'f')):
w = self.waves
arrayCopy(w, 0, w, ListenUp.COLS, ListenUp.DIM_COLS)
arrayCopy(sampL, w)
redraw()
listener = ListenUp()
waves = listener.getWavesClone()
def settings():
size(ListenUp.COLS, ListenUp.ROWS * ListenUp.GAP, JAVA2D)
noSmooth()
# noLoop()
Minim(this)\
.getLineIn(Minim.MONO, ListenUp.COLS, ListenUp.RATE, ListenUp.BITS)\
.addListener(listener)
def setup(): frameRate(ListenUp.FPS), loadPixels()
def draw():
Arrays.fill(pixels, ListenUp.BG)
listener.copyWavesInto(waves)
offset = height / ListenUp.ROWS
offhalf = offset >> 1
len1 = len(pixels) - 1
for y in ListenUp.ROWS_RANGE:
buf = y * ListenUp.COLS
gap = y * offset + offhalf
for x in ListenUp.COLS_RANGE:
h = gap + PApplet.round(ListenUp.AMP * waves[x + buf])
idx = constrain(x + h*width, 0, len1)
pixels[idx] = ListenUp.INK
updatePixels()
this.surface.title = `round(frameRate, 1)`
Hi guys, as many of you know, often exporting multimedia content from processing, which contains both audio and video, can be very difficult for synchronization issues. I've created a class capable of reading and saving wav files, copying the file to a float vector and implementing a method for fft analysis and various audio filters. What I would like to do now is to create from these classes a library whose operation would be similar to that of minim, only being deferred, we will be choosing fps and anything else useful to synchronize perfectly audio and video. Any libraries useful would like to give me a hand?
I tried to synchronize just the start time of both timers.
setTimeout(() => makeTimer(timer2, 500), 500);
Hello all,
I recently started learning p5js about a month ago, by following Daniel Shiffman's video tutorials on youtube. These videos are great and he makes the learning process much more painless. I can not appreciate enough. Thank you and thank you again.
Today I was studying closures and coding along 9.6: JavaScript Closure - p5.js Tutorial. I always try to go and code something a bit extra than the tutorials do.
Program creates two paragraph elements and names them as timer1 and timer2 at setup, then changes them to counters at different rates. Without setTimeout(), timer1 counts with seconds and timer2 goes with half seconds. So timer2 starts half a second earlier than timer1. Here is the code:
var timer1;
var timer2;
function setup() {
noCanvas();
timer1 = createP('timer1');
timer2 = createP('timer2');
makeTimer(timer1, 1000);
setTimeout(makeTimer(timer2, 500), 1000);
//could not figure out why setTimeout is not working
}
function makeTimer(elt, time) {
var counter = 0;
setInterval(timeThat, time);
function timeThat() {
elt.html(counter);
counter++;
}
}
I tried to synchronize just the start time of both timers. Interestingly, the program is working but setTimeout for timer2 doesn't seem to work, it still starts half a second earlier than timer1. Could not figure out why it is not working.
Previous relevant posts: https://forum.processing.org/two/search?Search=synchronize
Kf
Please don't keep posting almost the exact same question and code over and over.
If you have follow-up questions, are stuck, or made a change that isn't working, post a new comment (optionally, with updated code) on your original question. It will jump to the top of the discussion list and previous participants will be notified.
Posting new questions makes it impossible to know what you have already been told, or see what you have already tried, and it confuses later people looking for solutions who find your many almost-identical not-quite-solved questions.
Hey there, could someone please tell how do I make these falling ellipses synchronize with the audio clip? Like when the sound is louder, there are more ellipses.. I'm new to the minim library so I have no idea, thank you!!
import ddf.minim.*;
Minim minim;
AudioPlayer player;
int numero = 1000; // gotas de chuva
Rain[] rains = new Rain[numero];
void setup()
{
size(1280, 720);
frameRate(40);
noStroke();
for(int i=0; i<numero ;i=i+1){
rains[i]=new Rain();
}
// we pass this to Minim so that it can load files from the data directory
minim = new Minim(this);
// loadFile will look in all the same places as loadImage does.
// this means you can find files that are in the data folder and the
// sketch folder. you can also pass an absolute path, or a URL.
player = minim.loadFile("rain.mp3");
// play the file from start to finish.
// if you want to play the file again,
// you need to call rewind() first.
player.play();
}
void draw()
{
background(147,147,147);
stroke(255);
for(int i=0 ; i<numero ; i=i+1){
rains[i].update();
}
// draw the waveforms
// the values returned by left.get() and right.get() will be between -1 and 1,
// so we need to scale them up to see the waveform
// note that if the file is MONO, left.get() and right.get() will return the same value
for(int i = 0; i < player.bufferSize() - 1; i++)
{
float x1 = map( i, 0, player.bufferSize(), 30, width );
float x2 = map( i+1, 0, player.bufferSize(), 0, width );
}
}
class Rain {
float x = random(0,1280);
float y = random(0,1280);
float size = 1; // size of raindrop
float speed = random(5,30); // speed range
void update()
{
y += speed;
fill(185,197,209);
ellipse(x, y-20, size, size*6); // rasto
fill(255-(100-speed));
ellipse(x, y, size, size*6); //inicio
if (y> height)
x = random(0,1280);
y = random(0,1280);
}
}
Hello there. I'm trying make the rain particles fall synchronized with the sound using the minim library, but I know nothing about this and I have no idea how to make this happen. If someone could explain me how to do it, it would be great. This is what I have right now. Thank you and sorry for the slightly dumb question!
import ddf.minim.*;
Minim minim;
AudioPlayer player;
int numero = 200; // how many rain drops on your screen??
Rain[] rains = new Rain[numero];
void setup()
{
size(512, 200, P3D);
frameRate(40);
noStroke();
for(int i=0; i<numero ;i=i+1){
rains[i]=new Rain();
}
// we pass this to Minim so that it can load files from the data directory
minim = new Minim(this);
// loadFile will look in all the same places as loadImage does.
// this means you can find files that are in the data folder and the
// sketch folder. you can also pass an absolute path, or a URL.
player = minim.loadFile("sound.mp3");
// play the file from start to finish.
// if you want to play the file again,
// you need to call rewind() first.
player.play();
}
void draw()
{
background(0);
stroke(255);
for(int i=0 ; i<numero ; i=i+1){
rains[i].update();
}
// draw the waveforms
// the values returned by left.get() and right.get() will be between -1 and 1,
// so we need to scale them up to see the waveform
// note that if the file is MONO, left.get() and right.get() will return the same value
for(int i = 0; i < player.bufferSize() - 1; i++)
{
float x1 = map( i, 0, player.bufferSize(), 0, width );
float x2 = map( i+1, 0, player.bufferSize(), 0, width );
line( x1, 50 + player.left.get(i)*50, x2, 50 + player.left.get(i+1)*50 );
line( x1, 150 + player.right.get(i)*50, x2, 150 + player.right.get(i+1)*50 );
}
}
class Rain { //this class setups the shape and movement of raindrop.
float x = random(0,600);
float y = random(-1000,0);
float size = random(3,7); // size of raindrop
float speed = random(20,80); // speed range
void update()
{
y += speed;
fill(185,197,209);
ellipse(x, y-20, size, size*6); // tail of raindrop
fill(255-(100-speed));
ellipse(x, y, size, size*6); //head of raindrop
if (y> height) //initialize raindrop which arrives bottom.
{
x = random(0,600);
y = random(-1200,0);
}
}
}
Hello there. I'm trying make the rain particles fall synchronized with the sound using the minim library, but I know nothing about this and I have no idea how to make this happen. If someone could explain me how to do it, it would be great. This is what I have right now. Thank you and sorry for the slightly dumb question!
import ddf.minim.*;
Minim minim;
AudioPlayer player;
int numero = 200; // how many rain drops on your screen??
Rain[] rains = new Rain[numero];
void setup()
{
size(512, 200, P3D);
frameRate(40);
noStroke();
for(int i=0; i<numero ;i=i+1){
rains[i]=new Rain();
}
// we pass this to Minim so that it can load files from the data directory
minim = new Minim(this);
// loadFile will look in all the same places as loadImage does.
// this means you can find files that are in the data folder and the
// sketch folder. you can also pass an absolute path, or a URL.
player = minim.loadFile("sound.mp3");
// play the file from start to finish.
// if you want to play the file again,
// you need to call rewind() first.
player.play();
}
void draw()
{
background(0);
stroke(255);
for(int i=0 ; i<numero ; i=i+1){
rains[i].update();
}
// draw the waveforms
// the values returned by left.get() and right.get() will be between -1 and 1,
// so we need to scale them up to see the waveform
// note that if the file is MONO, left.get() and right.get() will return the same value
for(int i = 0; i < player.bufferSize() - 1; i++)
{
float x1 = map( i, 0, player.bufferSize(), 0, width );
float x2 = map( i+1, 0, player.bufferSize(), 0, width );
line( x1, 50 + player.left.get(i)*50, x2, 50 + player.left.get(i+1)*50 );
line( x1, 150 + player.right.get(i)*50, x2, 150 + player.right.get(i+1)*50 );
}
}
class Rain { //this class setups the shape and movement of raindrop.
float x = random(0,600);
float y = random(-1000,0);
float size = random(3,7); // size of raindrop
float speed = random(20,80); // speed range
void update()
{
y += speed;
fill(185,197,209);
ellipse(x, y-20, size, size*6); // tail of raindrop
fill(255-(100-speed));
ellipse(x, y, size, size*6); //head of raindrop
if (y> height) //initialize raindrop which arrives bottom.
{
x = random(0,600);
y = random(-1200,0);
}
}
}
@jeremydouglass I think you're right that the scans are all periodically hitting the same region. I tried dropping in your code, and I still got the same effect, so I think changing the increment size and scan direction still isn't enough.
What I ended up doing was throwing a second iterator into the mix at a different rate so the two scans are more free running. I still see the points synchronize every once in a while but it's less noticeable:
a += 0.003; b+=0.0001;
for (var i = 0; i < 80; i++) {
ellipse(noise(i+b, a)*w*1.3,noise(2000+i+b, 2000+a+b)*w*1.3,5, 5);
}
I'm not sure if it's just that all points are hitting large zero regions, or it's actually a repetition on the perlin noise, I guess the former is more likely. It's a bit fuzzy as whats going on, but this seems to work.