Loading...
Logo
Processing Forum
Marco's Profile
11 Posts
18 Responses
0 Followers

Activity Trend

Last 30 days
Show:
Private Message
    Hi! I want to convert this output to UNIX Time Stamp  but unable to find out how... It´s the current system date with zero hours/minutes/seconds.  Can someone help me with an example? Would be very appreciated!  Best regards!

    1. int day = day();
    2. int month = month();
    3. int year = year();
    4. int hour = 0; //hour();
    5. int minute = 0; //minute();
    6. int second = 0; //second();

    7. println(day+"/"+month+"/"+year);
    8. println(hour+":"+minute+":"+second);
    Hi! I´m a student working on my thesis project, and using openpaths. I'm having a problem when trying to use my data that is returned from the API. On the "blprintOpenPathsExample.pde" example there is an attempt to convert the response.getBody() in to a JSONArray. Wich gives me an error "JSONArray is ambiguos". After looking at the javadocs, I found out that response.getBody() returns a String. So, as a work around, I'm saving dynamically that string as a local "data.json" file and then load it back as a JSONArray. But I would like to convert it directly without having to save a local file. I´m sure its possible... Am I doing something wrong? Can someone help me? Would be very appreciated! 
    Best regards!

    Here is my code:

    1. final String ACCESS = "";
    2. final String SECRET = "";
    3. final String URL    = "https://openpaths.cc/api/1";

    4. void openPaths() 
    5. {
    6.   OAuthService service = new ServiceBuilder()
    7.     .provider(OpenPathsApi.class)
    8.     .apiKey(ACCESS)
    9.     .apiSecret(SECRET)
    10.     .build();
    11.     
    12.   OAuthRequest request = new OAuthRequest(Verb.GET, URL);
    13.   Token token = new Token("", "");
    14.   service.signRequest(token, request);
    15.   
    16.   request.addQuerystringParameter("start_time", String.valueOf(System.currentTimeMillis() / 1000 - 30*24*60*60));
    17.   request.addQuerystringParameter("end_time", String.valueOf(System.currentTimeMillis() / 1000));
    18.   
    19.   Response response = request.send();
    20.   //println(response.getBody());
    21.   String[] receivedJSONBody = { response.getBody() };
    22.   //println(receivedJSONBody);
    23.   saveStrings("openpaths.json", receivedJSONBody);
    24.   
    25.   JSONArray loadedJSONObjects = loadJSONArray("openpaths.json");
    26.   
    27.   for (int i=0; i<loadedJSONObjects.size(); i++) 
    28.   {
    29.     JSONObject location = loadedJSONObjects.getJSONObject(i);
    30.     
    31.     double longitude = location.getFloat("lon");
    32.     double latitude = location.getFloat ("lat");
    33.     double altitude = location.getFloat ("alt");
    34.     long timeStamp =    location.getInt ("t");
    35.     
    36.     println("Longitude: "+longitude);
    37.     println("Latitude: "+latitude);
    38.     println("Altitude: "+altitude);
    39.     println("Time Stamp: "+timeStamp);
    40.     println("");
    41.   }
    42.   
    43.   
    44. }
    My problem is that I want to use a PS3 Eye, as I did in the past, in a mac osx 10.6.8 with Processing 2.0b8.

    Before, with Processing 1.5, I used Macam (macam.componente). Now as I understood, Processing as 2.0+ version stopped using Quiktime, but if I am not wrong,  macam.componente uses Quiktime... Só now I don´t know how I am going to use my PS3 Eye in my latest project with Processing 2.0b8

    Does anybody know how I can use PS3 Eye Camera with Processing 2.0b8?

    It´s getting kind of urgente now :(...

    Thanks
    Hi!

    I´m working on a sketch where I´m using an Arduino for an interface and want to trigger some sounds with it.

    My problem relies on when I trigger the sound before I start the arduino port, in the 'void setup' it works fine, but when I try to trigger the sound after I start the arduino port, like in 'void draw', it won´t make any sound.

    Is there any incompatibility when using these two libraries together?

    Thanks!
    Hi,

    I´m using breakShape to draw a rectangle that has a circular hole in the middle, and It works just fine!
    My problem is when I want that rectangle to have a pattern instead of a fill(color).
    If somebody could help me with this... maybe there is a work around I´m not seeing...
    My goal is to save an image.png that has a square pattern with a transparente hole in the middle.

    The code that I´m putting here put me in a dead end, because breakShape only works with JAVA2D, and textures don´t work with JAVA2D.

    Need help....



    int r = 65;
    int a = 15;
    PGraphics pg, pg2;

    void setup() {
      size(200, 200, P2D);
      pg = createGraphics(width, height, P2D);
      pg2 = createGraphics(width, height, JAVA2D);
    }

    void draw() {
      
      pg.beginDraw();
        pg.smooth();
        pg.noStroke();
        for (int i=0; i<=width; i+=6) {
          
          pg.beginShape();
            pg.fill(175, 0, 0);
            pg.vertex(i, 0);
            pg.vertex(i+1, 0);
            pg.vertex(i+1, height);
            pg.vertex(i, height);
            pg.vertex(i, 0);
          pg.endShape(CLOSE);
          
          pg.beginShape();
            pg.fill(255, 0, 0);
            pg.vertex(i+1, 0);
            pg.vertex(i+6, 0);
            pg.vertex(i+6, height);
            pg.vertex(i+1, height);
            pg.vertex(i+1, 0);
          pg.endShape(CLOSE);
        }
      pg.endDraw();
      
      pg2.beginDraw();
        pg2.smooth();
        pg2.noStroke();
        pg2.noFill();
        pg2.textureMode(NORMALIZED);
        pg2.beginShape();
          pg2.texture(pg);
          pg2.vertex(0, 0, 0, 0);
          pg2.vertex(width, 0, 1, 0);
          pg2.vertex(width, height, 1, 1);
          pg2.vertex(0, height, 0, 1);
          pg2.vertex(0, 0, 0, 0);
        pg2.breakShape();
          pg2.vertex(width/2, r);
          pg2.bezierVertex(r+a, r, r, r+a, r, height/2);
          pg2.bezierVertex(r, (height-r)-a, r+a, (height-r), width/2, height-r);
          pg2.bezierVertex((width-r)-a, (height-r), (width-r), (height-r)-a, width-r, height/2);
          pg2.bezierVertex((width-r), r+a, (width-r)-a, r, width/2, r);
          pg2.vertex(width/2, r);
        pg2.endShape(CLOSE);
      pg2.endDraw();
      
      
      image(pg2, 0, 0, width, height);
    }

    void mousePressed() {
      pg2.save("data/circleHole.png");
    }




    Thanks!
    Hi!, I am using nyARToolkit.
    When I flip videocapture from my webcam doing the mirror effect, the overlayed objects don´t "flip" they go opposite way, where they original went before flipping. I know that this happen because I am feeding the library function the instance from the webcam and not the fliped frame. The problem is when try to feed the function the  fliped frame he doesn´t recognize the marker.

    Please help, it´s for a school project.
    Thanks for your time.
    Here is my code:



    import processing.video.*;
    import jp.nyatla.nyar4psg.*;
    import processing.opengl.*;
    Capture webcam;
    SingleARTKMarker realidade_aumentada;

    Cubo cubo1;

    void setup() {
      
      try {
        quicktime.QTSession.open();
      } 
      catch (quicktime.QTException qte) {
        qte.printStackTrace();
      }
      
      size(640, 480, OPENGL);
      
      String[] devices = Capture.list();
      //println(devices);
      webcam = new Capture(this, width, height, devices[4], 30);
      //webcam.settings();
      
      realidade_aumentada = new SingleARTKMarker (this, width, height, "camera_para.dat", SingleARTKMarker.CS_LEFT);
      //println (realidade_aumentada.VERSION);
      String[] marker= { "patt.volcom" };
      realidade_aumentada.setARCodes (marker, 80);
      realidade_aumentada.setConfidenceThreshold (0.6, 0.5);
      
      cubo1 = new Cubo(20);
    }

    void draw() {
      
      if (webcam.available()) {
        
        colorMode(RGB, 255, 255, 255, 100);
        
        PImage frame = new PImage(webcam.width, webcam.height, RGB);
        
        webcam.read();
        webcam.loadPixels();
        
        for (int x=0; x<webcam.width; x++) {
          for (int y=0; y<webcam.height; y++) {
            int loc = (webcam.width- x- 1) + y*webcam.width;
            frame.pixels[x+y*width] = color(red(webcam.pixels[loc]), green(webcam.pixels[loc]), blue(webcam.pixels[loc]));
          }
        }
        
        frame.updatePixels();
        
        hint (DISABLE_DEPTH_TEST);
          image(frame, 0, 0);
        hint (ENABLE_DEPTH_TEST);
        
        
        switch (realidade_aumentada.detect(webcam)) { // with frame he doesn´t recognize the marker
          
          case SingleARTKMarker.ST_NOMARKER:
            break;
        
          case SingleARTKMarker.ST_NEWMARKER:
            println("Apareceu o Maracador");
            break;
        
          case SingleARTKMarker.ST_UPDATEMARKER:
            PGraphicsOpenGL pgl = (PGraphicsOpenGL) g;
            realidade_aumentada.beginTransform (pgl);
      
              pushMatrix();
                translate (0, 0, 20);
                cubo1.desenha();
              popMatrix();
              
            realidade_aumentada.endTransform();
            break;
        
          case SingleARTKMarker.ST_REMOVEMARKER:
            println("Desapareceu o Maracador");
            break;
        }
      
      }
    }

    class Cubo {
      
      private float tamanho = 0.0;
      private color cor = color (0);
      
      Cubo (float tamanhoexterior) {
        tamanho = tamanhoexterior;
        cor = color ( random(0, 255), random(0, 255), random(0, 255) );
      }
        
      void desenha () {
        noStroke ();
        fill (cor);
        box(tamanho);
      }
      
    }