diff --git a/build/build.xml b/build/build.xml
index 4d0f0b233..f848cdb44 100755
--- a/build/build.xml
+++ b/build/build.xml
@@ -151,7 +151,7 @@
-
+
@@ -226,7 +226,6 @@
-
@@ -237,7 +236,6 @@
-
@@ -495,9 +493,6 @@
-
-
-
-
-
-
-
@@ -679,7 +670,7 @@
-
+
@@ -790,16 +781,6 @@
-
-
-
-
-
-
-
-
-
-
-
+
diff --git a/core/src/processing/core/PVector.java b/core/src/processing/core/PVector.java
index 49e54a0d0..a99f427f7 100644
--- a/core/src/processing/core/PVector.java
+++ b/core/src/processing/core/PVector.java
@@ -119,6 +119,7 @@ public class PVector implements Serializable {
/** Array so that this can be temporarily used in an array context */
transient protected float[] array;
+
/**
* Constructor for an empty vector: x, y, and z are set to 0.
*/
@@ -149,6 +150,7 @@ public class PVector implements Serializable {
this.z = 0;
}
+
/**
* ( begin auto-generated from PVector_set.xml )
*
@@ -163,28 +165,33 @@ public class PVector implements Serializable {
* @param z the z component of the vector
* @brief Set the components of the vector
*/
- public void set(float x, float y, float z) {
+ public PVector set(float x, float y, float z) {
this.x = x;
this.y = y;
this.z = z;
+ return this;
}
+
/**
* @param x the x component of the vector
* @param y the y component of the vector
*/
- public void set(float x, float y) {
+ public PVector set(float x, float y) {
this.x = x;
this.y = y;
+ return this;
}
+
/**
* @param v any variable of type PVector
*/
- public void set(PVector v) {
+ public PVector set(PVector v) {
x = v.x;
y = v.y;
z = v.z;
+ return this;
}
@@ -192,7 +199,7 @@ public class PVector implements Serializable {
* Set the x, y (and maybe z) coordinates using a float[] array as the source.
* @param source array to copy from
*/
- public void set(float[] source) {
+ public PVector set(float[] source) {
if (source.length >= 2) {
x = source[0];
y = source[1];
@@ -200,6 +207,7 @@ public class PVector implements Serializable {
if (source.length >= 3) {
z = source[2];
}
+ return this;
}
@@ -217,9 +225,10 @@ public class PVector implements Serializable {
* @see PVector#random3D()
*/
static public PVector random2D() {
- return random2D(null,null);
+ return random2D(null, null);
}
+
/**
* Make a new 2D unit vector with a random direction
* using Processing's current random number generator
@@ -227,7 +236,7 @@ public class PVector implements Serializable {
* @return the random PVector
*/
static public PVector random2D(PApplet parent) {
- return random2D(null,parent);
+ return random2D(null, parent);
}
/**
@@ -236,18 +245,23 @@ public class PVector implements Serializable {
* @return the random PVector
*/
static public PVector random2D(PVector target) {
- return random2D(target,null);
+ return random2D(target, null);
}
+
/**
- * Make a new 2D unit vector with a random direction
+ * Make a new 2D unit vector with a random direction. Pass in the parent
+ * PApplet if you want randomSeed() to work (and be predictable). Or leave
+ * it null and be... random.
* @return the random PVector
*/
static public PVector random2D(PVector target, PApplet parent) {
- if (parent == null) return fromAngle((float)(Math.random()*Math.PI*2),target);
- else return fromAngle(parent.random(PConstants.TWO_PI),target);
+ return (parent == null) ?
+ fromAngle((float) (Math.random() * Math.PI*2), target) :
+ fromAngle(parent.random(PConstants.TAU), target);
}
+
/**
* ( begin auto-generated from PVector_random3D.xml )
*
@@ -262,9 +276,10 @@ public class PVector implements Serializable {
* @see PVector#random2D()
*/
static public PVector random3D() {
- return random3D(null,null);
+ return random3D(null, null);
}
+
/**
* Make a new 3D unit vector with a random direction
* using Processing's current random number generator
@@ -272,18 +287,20 @@ public class PVector implements Serializable {
* @return the random PVector
*/
static public PVector random3D(PApplet parent) {
- return random3D(null,parent);
+ return random3D(null, parent);
}
+
/**
* Set a 3D vector to a random unit vector with a random direction
* @param target the target vector (if null, a new vector will be created)
* @return the random PVector
*/
static public PVector random3D(PVector target) {
- return random3D(target,null);
+ return random3D(target, null);
}
+
/**
* Make a new 3D unit vector with a random direction
* @return the random PVector
@@ -309,6 +326,7 @@ public class PVector implements Serializable {
return target;
}
+
/**
* ( begin auto-generated from PVector_sub.xml )
*
@@ -342,6 +360,12 @@ public class PVector implements Serializable {
return target;
}
+
+ public PVector copy() {
+ return new PVector(x, y, z);
+ }
+
+
/**
* ( begin auto-generated from PVector_get.xml )
*
@@ -353,10 +377,12 @@ public class PVector implements Serializable {
* @usage web_application
* @brief Get a copy of the vector
*/
+ @Deprecated
public PVector get() {
- return new PVector(x, y, z);
+ return copy();
}
+
/**
* @param target
*/
@@ -393,6 +419,7 @@ public class PVector implements Serializable {
return (float) Math.sqrt(x*x + y*y + z*z);
}
+
/**
* ( begin auto-generated from PVector_mag.xml )
*
@@ -413,6 +440,7 @@ public class PVector implements Serializable {
return (x*x + y*y + z*z);
}
+
/**
* ( begin auto-generated from PVector_add.xml )
*
@@ -429,21 +457,24 @@ public class PVector implements Serializable {
* @param v the vector to be added
* @brief Adds x, y, and z components to a vector, one vector to another, or two independent vectors
*/
- public void add(PVector v) {
+ public PVector add(PVector v) {
x += v.x;
y += v.y;
z += v.z;
+ return this;
}
+
/**
* @param x x component of the vector
* @param y y component of the vector
* @param z z component of the vector
*/
- public void add(float x, float y, float z) {
+ public PVector add(float x, float y, float z) {
this.x += x;
this.y += y;
this.z += z;
+ return this;
}
@@ -487,21 +518,24 @@ public class PVector implements Serializable {
* @param v any variable of type PVector
* @brief Subtract x, y, and z components from a vector, one vector from another, or two independent vectors
*/
- public void sub(PVector v) {
+ public PVector sub(PVector v) {
x -= v.x;
y -= v.y;
z -= v.z;
+ return this;
}
+
/**
* @param x the x component of the vector
* @param y the y component of the vector
* @param z the z component of the vector
*/
- public void sub(float x, float y, float z) {
+ public PVector sub(float x, float y, float z) {
this.x -= x;
this.y -= y;
this.z -= z;
+ return this;
}
@@ -542,10 +576,11 @@ public class PVector implements Serializable {
* @brief Multiply a vector by a scalar
* @param n the number to multiply with the vector
*/
- public void mult(float n) {
+ public PVector mult(float n) {
x *= n;
y *= n;
z *= n;
+ return this;
}
@@ -571,7 +606,6 @@ public class PVector implements Serializable {
}
-
/**
* ( begin auto-generated from PVector_div.xml )
*
@@ -584,10 +618,11 @@ public class PVector implements Serializable {
* @brief Divide a vector by a scalar
* @param n the number by which to divide the vector
*/
- public void div(float n) {
+ public PVector div(float n) {
x /= n;
y /= n;
z /= n;
+ return this;
}
@@ -600,6 +635,7 @@ public class PVector implements Serializable {
return div(v, n, null);
}
+
/**
* Divide a vector by a scalar and store the result in another vector.
* @param target PVector in which to store the result
@@ -665,6 +701,7 @@ public class PVector implements Serializable {
return x*v.x + y*v.y + z*v.z;
}
+
/**
* @param x x component of the vector
* @param y y component of the vector
@@ -674,6 +711,7 @@ public class PVector implements Serializable {
return this.x*x + this.y*y + this.z*z;
}
+
/**
* @param v1 any variable of type PVector
* @param v2 any variable of type PVector
@@ -717,6 +755,7 @@ public class PVector implements Serializable {
return target;
}
+
/**
* @param v1 any variable of type PVector
* @param v2 any variable of type PVector
@@ -747,11 +786,12 @@ public class PVector implements Serializable {
* @usage web_application
* @brief Normalize the vector to a length of 1
*/
- public void normalize() {
+ public PVector normalize() {
float m = mag();
if (m != 0 && m != 1) {
div(m);
}
+ return this;
}
@@ -785,13 +825,15 @@ public class PVector implements Serializable {
* @param max the maximum magnitude for the vector
* @brief Limit the magnitude of the vector
*/
- public void limit(float max) {
+ public PVector limit(float max) {
if (magSq() > max*max) {
normalize();
mult(max);
}
+ return this;
}
+
/**
* ( begin auto-generated from PVector_setMag.xml )
*
@@ -804,11 +846,13 @@ public class PVector implements Serializable {
* @param len the new length for this vector
* @brief Set the magnitude of the vector
*/
- public void setMag(float len) {
+ public PVector setMag(float len) {
normalize();
mult(len);
+ return this;
}
+
/**
* Sets the magnitude of this vector, storing the result in another vector.
* @param target Set to null to create a new vector
@@ -821,6 +865,7 @@ public class PVector implements Serializable {
return target;
}
+
/**
* ( begin auto-generated from PVector_setMag.xml )
*
@@ -857,11 +902,12 @@ public class PVector implements Serializable {
* @brief Rotate the vector by an angle (2D only)
* @param theta the angle of rotation
*/
- public void rotate(float theta) {
- float xTemp = x;
+ public PVector rotate(float theta) {
+ float temp = x;
// Might need to check for rounding errors like with angleBetween function?
x = x*PApplet.cos(theta) - y*PApplet.sin(theta);
- y = xTemp*PApplet.sin(theta) + y*PApplet.cos(theta);
+ y = temp*PApplet.sin(theta) + y*PApplet.cos(theta);
+ return this;
}
@@ -879,35 +925,40 @@ public class PVector implements Serializable {
* @param amt The amount of interpolation; some value between 0.0 (old vector) and 1.0 (new vector). 0.1 is very near the new vector. 0.5 is halfway in between.
* @see PApplet#lerp(float, float, float)
*/
- public void lerp(PVector v, float amt) {
- x = PApplet.lerp(x,v.x,amt);
- y = PApplet.lerp(y,v.y,amt);
- z = PApplet.lerp(z,v.z,amt);
+ public PVector lerp(PVector v, float amt) {
+ x = PApplet.lerp(x, v.x, amt);
+ y = PApplet.lerp(y, v.y, amt);
+ z = PApplet.lerp(z, v.z, amt);
+ return this;
}
+
/**
* Linear interpolate between two vectors (returns a new PVector object)
* @param v1 the vector to start from
* @param v2 the vector to lerp to
*/
public static PVector lerp(PVector v1, PVector v2, float amt) {
- PVector v = v1.get();
+ PVector v = v1.copy();
v.lerp(v2, amt);
return v;
}
+
/**
* Linear interpolate the vector to x,y,z values
* @param x the x component to lerp to
* @param y the y component to lerp to
* @param z the z component to lerp to
*/
- public void lerp(float x, float y, float z, float amt) {
- this.x = PApplet.lerp(this.x,x,amt);
- this.y = PApplet.lerp(this.y,y,amt);
- this.z = PApplet.lerp(this.z,z,amt);
+ public PVector lerp(float x, float y, float z, float amt) {
+ this.x = PApplet.lerp(this.x, x, amt);
+ this.y = PApplet.lerp(this.y, y, amt);
+ this.z = PApplet.lerp(this.z, z, amt);
+ return this;
}
+
/**
* ( begin auto-generated from PVector_angleBetween.xml )
*
@@ -976,14 +1027,17 @@ public class PVector implements Serializable {
return array;
}
+
@Override
public boolean equals(Object obj) {
- if (!(obj instanceof PVector))
+ if (!(obj instanceof PVector)) {
return false;
+ }
final PVector p = (PVector) obj;
return x == p.x && y == p.y && z == p.z;
}
+
@Override
public int hashCode() {
int result = 1;
diff --git a/core/todo.txt b/core/todo.txt
index ea362845a..c2774207f 100644
--- a/core/todo.txt
+++ b/core/todo.txt
@@ -1,7 +1,19 @@
0228 core
+X add copy() method to PVector
+X modify PVector to include better methods for chaining operations
+X http://code.google.com/p/processing/issues/detail?id=218
+X https://github.com/processing/processing/issues/257
+X PVector discussion with Dan
+o Jer and Dan will look at their code, plus toxiclibs
+
+_ bring back chaining in JSON (and add to XML)
high
+_ Closing opengl sketch from the PDE doesn't stop java process on windows
+_ https://github.com/processing/processing/issues/2335
+_ StingList.insert() error (should be an easy fix)
+_ https://github.com/processing/processing/issues/2548
_ pull for image resize and alpha issues
_ https://github.com/processing/processing/pull/2324
_ dataPath() not working when app is not run from app dir on Linux
@@ -430,13 +442,8 @@ _ https://github.com/processing/processing/issues/1596
CORE / PVector
-_ PVector discussion with Dan
-_ Jer and Dan will look at their code, plus toxiclibs
-_ modify PVector to include better methods for chaining operations
-_ http://code.google.com/p/processing/issues/detail?id=218
_ add screen(PVector), model(PVector) and world(PVector)?
_ maybe screenVec()? or screenXYZ()?
-_ PVector chaining -> Dan looking into this
CORE / OpenGL (Andres)
diff --git a/java/libraries/video/.classpath b/java/libraries/video/.classpath
deleted file mode 100644
index 80820b4c9..000000000
--- a/java/libraries/video/.classpath
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-
-
-
-
-
-
diff --git a/java/libraries/video/.gitignore b/java/libraries/video/.gitignore
deleted file mode 100644
index ba077a403..000000000
--- a/java/libraries/video/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-bin
diff --git a/java/libraries/video/.project b/java/libraries/video/.project
deleted file mode 100644
index aa59004d2..000000000
--- a/java/libraries/video/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
- processing-video
-
-
-
-
-
- org.eclipse.jdt.core.javabuilder
-
-
-
-
-
- org.eclipse.jdt.core.javanature
-
-
diff --git a/java/libraries/video/.settings/org.eclipse.jdt.core.prefs b/java/libraries/video/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100755
index 1b3d9a205..000000000
--- a/java/libraries/video/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,12 +0,0 @@
-#Sat Nov 12 10:54:16 CST 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.debug.lineNumber=generate
-org.eclipse.jdt.core.compiler.debug.localVariable=generate
-org.eclipse.jdt.core.compiler.debug.sourceFile=generate
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/java/libraries/video/build.xml b/java/libraries/video/build.xml
deleted file mode 100755
index 202242085..000000000
--- a/java/libraries/video/build.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/java/libraries/video/examples/Capture/AsciiVideo/AsciiVideo.pde b/java/libraries/video/examples/Capture/AsciiVideo/AsciiVideo.pde
deleted file mode 100644
index 59364288b..000000000
--- a/java/libraries/video/examples/Capture/AsciiVideo/AsciiVideo.pde
+++ /dev/null
@@ -1,140 +0,0 @@
-/**
- * ASCII Video
- * by Ben Fry.
- *
- *
- * Text characters have been used to represent images since the earliest computers.
- * This sketch is a simple homage that re-interprets live video as ASCII text.
- * See the keyPressed function for more options, like changing the font size.
- */
-
-import processing.video.*;
-
-Capture video;
-boolean cheatScreen;
-
-// All ASCII characters, sorted according to their visual density
-String letterOrder =
- " .`-_':,;^=+/\"|)\\<>)iv%xclrs{*}I?!][1taeo7zjLu" +
- "nT#JCwfy325Fp6mqSghVd4EgXPGZbYkOA&8U$@KHDBWNMR0Q";
-char[] letters;
-
-float[] bright;
-char[] chars;
-
-PFont font;
-float fontSize = 1.5;
-
-
-void setup() {
- size(640, 480);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, 160, 120);
-
- // Start capturing the images from the camera
- video.start();
-
- int count = video.width * video.height;
- //println(count);
-
- font = loadFont("UniversLTStd-Light-48.vlw");
-
- // for the 256 levels of brightness, distribute the letters across
- // the an array of 256 elements to use for the lookup
- letters = new char[256];
- for (int i = 0; i < 256; i++) {
- int index = int(map(i, 0, 256, 0, letterOrder.length()));
- letters[i] = letterOrder.charAt(index);
- }
-
- // current characters for each position in the video
- chars = new char[count];
-
- // current brightness for each point
- bright = new float[count];
- for (int i = 0; i < count; i++) {
- // set each brightness at the midpoint to start
- bright[i] = 128;
- }
-}
-
-
-void captureEvent(Capture c) {
- c.read();
-}
-
-
-void draw() {
- background(0);
-
- pushMatrix();
-
- float hgap = width / float(video.width);
- float vgap = height / float(video.height);
-
- scale(max(hgap, vgap) * fontSize);
- textFont(font, fontSize);
-
- int index = 0;
- video.loadPixels();
- for (int y = 1; y < video.height; y++) {
-
- // Move down for next line
- translate(0, 1.0 / fontSize);
-
- pushMatrix();
- for (int x = 0; x < video.width; x++) {
- int pixelColor = video.pixels[index];
- // Faster method of calculating r, g, b than red(), green(), blue()
- int r = (pixelColor >> 16) & 0xff;
- int g = (pixelColor >> 8) & 0xff;
- int b = pixelColor & 0xff;
-
- // Another option would be to properly calculate brightness as luminance:
- // luminance = 0.3*red + 0.59*green + 0.11*blue
- // Or you could instead red + green + blue, and make the the values[] array
- // 256*3 elements long instead of just 256.
- int pixelBright = max(r, g, b);
-
- // The 0.1 value is used to damp the changes so that letters flicker less
- float diff = pixelBright - bright[index];
- bright[index] += diff * 0.1;
-
- fill(pixelColor);
- int num = int(bright[index]);
- text(letters[num], 0, 0);
-
- // Move to the next pixel
- index++;
-
- // Move over for next character
- translate(1.0 / fontSize, 0);
- }
- popMatrix();
- }
- popMatrix();
-
- if (cheatScreen) {
- //image(video, 0, height - video.height);
- // set() is faster than image() when drawing untransformed images
- set(0, height - video.height, video);
- }
-}
-
-
-/**
- * Handle key presses:
- * 'c' toggles the cheat screen that shows the original image in the corner
- * 'g' grabs an image and saves the frame to a tiff image
- * 'f' and 'F' increase and decrease the font size
- */
-void keyPressed() {
- switch (key) {
- case 'g': saveFrame(); break;
- case 'c': cheatScreen = !cheatScreen; break;
- case 'f': fontSize *= 1.1; break;
- case 'F': fontSize *= 0.9; break;
- }
-}
diff --git a/java/libraries/video/examples/Capture/AsciiVideo/data/UniversLTStd-Light-48.vlw b/java/libraries/video/examples/Capture/AsciiVideo/data/UniversLTStd-Light-48.vlw
deleted file mode 100644
index 0d624969b..000000000
Binary files a/java/libraries/video/examples/Capture/AsciiVideo/data/UniversLTStd-Light-48.vlw and /dev/null differ
diff --git a/java/libraries/video/examples/Capture/BackgroundSubtraction/BackgroundSubtraction.pde b/java/libraries/video/examples/Capture/BackgroundSubtraction/BackgroundSubtraction.pde
deleted file mode 100644
index 01f16016f..000000000
--- a/java/libraries/video/examples/Capture/BackgroundSubtraction/BackgroundSubtraction.pde
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Background Subtraction
- * by Golan Levin.
- *
- * Detect the presence of people and objects in the frame using a simple
- * background-subtraction technique. To initialize the background, press a key.
- */
-
-
-import processing.video.*;
-
-int numPixels;
-int[] backgroundPixels;
-Capture video;
-
-void setup() {
- size(640, 480);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- //video = new Capture(this, 160, 120);
- video = new Capture(this, width, height);
-
- // Start capturing the images from the camera
- video.start();
-
- numPixels = video.width * video.height;
- // Create array to store the background image
- backgroundPixels = new int[numPixels];
- // Make the pixels[] array available for direct manipulation
- loadPixels();
-}
-
-void draw() {
- if (video.available()) {
- video.read(); // Read a new video frame
- video.loadPixels(); // Make the pixels of video available
- // Difference between the current frame and the stored background
- int presenceSum = 0;
- for (int i = 0; i < numPixels; i++) { // For each pixel in the video frame...
- // Fetch the current color in that location, and also the color
- // of the background in that spot
- color currColor = video.pixels[i];
- color bkgdColor = backgroundPixels[i];
- // Extract the red, green, and blue components of the current pixel's color
- int currR = (currColor >> 16) & 0xFF;
- int currG = (currColor >> 8) & 0xFF;
- int currB = currColor & 0xFF;
- // Extract the red, green, and blue components of the background pixel's color
- int bkgdR = (bkgdColor >> 16) & 0xFF;
- int bkgdG = (bkgdColor >> 8) & 0xFF;
- int bkgdB = bkgdColor & 0xFF;
- // Compute the difference of the red, green, and blue values
- int diffR = abs(currR - bkgdR);
- int diffG = abs(currG - bkgdG);
- int diffB = abs(currB - bkgdB);
- // Add these differences to the running tally
- presenceSum += diffR + diffG + diffB;
- // Render the difference image to the screen
- pixels[i] = color(diffR, diffG, diffB);
- // The following line does the same thing much faster, but is more technical
- //pixels[i] = 0xFF000000 | (diffR << 16) | (diffG << 8) | diffB;
- }
- updatePixels(); // Notify that the pixels[] array has changed
- println(presenceSum); // Print out the total amount of movement
- }
-}
-
-// When a key is pressed, capture the background image into the backgroundPixels
-// buffer, by copying each of the current frame's pixels into it.
-void keyPressed() {
- video.loadPixels();
- arraycopy(video.pixels, backgroundPixels);
-}
diff --git a/java/libraries/video/examples/Capture/BrightnessThresholding/BrightnessThresholding.pde b/java/libraries/video/examples/Capture/BrightnessThresholding/BrightnessThresholding.pde
deleted file mode 100644
index 0902784ec..000000000
--- a/java/libraries/video/examples/Capture/BrightnessThresholding/BrightnessThresholding.pde
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Brightness Thresholding
- * by Golan Levin.
- *
- * Determines whether a test location (such as the cursor) is contained within
- * the silhouette of a dark object.
- */
-
-
-import processing.video.*;
-
-color black = color(0);
-color white = color(255);
-int numPixels;
-Capture video;
-
-void setup() {
- size(640, 480); // Change size to 320 x 240 if too slow at 640 x 480
- strokeWeight(5);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, width, height);
-
- // Start capturing the images from the camera
- video.start();
-
- numPixels = video.width * video.height;
- noCursor();
- smooth();
-}
-
-void draw() {
- if (video.available()) {
- video.read();
- video.loadPixels();
- int threshold = 127; // Set the threshold value
- float pixelBrightness; // Declare variable to store a pixel's color
- // Turn each pixel in the video frame black or white depending on its brightness
- loadPixels();
- for (int i = 0; i < numPixels; i++) {
- pixelBrightness = brightness(video.pixels[i]);
- if (pixelBrightness > threshold) { // If the pixel is brighter than the
- pixels[i] = white; // threshold value, make it white
- }
- else { // Otherwise,
- pixels[i] = black; // make it black
- }
- }
- updatePixels();
- // Test a location to see where it is contained. Fetch the pixel at the test
- // location (the cursor), and compute its brightness
- int testValue = get(mouseX, mouseY);
- float testBrightness = brightness(testValue);
- if (testBrightness > threshold) { // If the test location is brighter than
- fill(black); // the threshold set the fill to black
- }
- else { // Otherwise,
- fill(white); // set the fill to white
- }
- ellipse(mouseX, mouseY, 20, 20);
- }
-}
diff --git a/java/libraries/video/examples/Capture/BrightnessTracking/BrightnessTracking.pde b/java/libraries/video/examples/Capture/BrightnessTracking/BrightnessTracking.pde
deleted file mode 100644
index b1a1b5677..000000000
--- a/java/libraries/video/examples/Capture/BrightnessTracking/BrightnessTracking.pde
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Brightness Tracking
- * by Golan Levin.
- *
- * Tracks the brightest pixel in a live video signal.
- */
-
-
-import processing.video.*;
-
-Capture video;
-
-void setup() {
- size(640, 480);
- // Uses the default video input, see the reference if this causes an error
- video = new Capture(this, width, height);
- video.start();
- noStroke();
- smooth();
-}
-
-void draw() {
- if (video.available()) {
- video.read();
- image(video, 0, 0, width, height); // Draw the webcam video onto the screen
- int brightestX = 0; // X-coordinate of the brightest video pixel
- int brightestY = 0; // Y-coordinate of the brightest video pixel
- float brightestValue = 0; // Brightness of the brightest video pixel
- // Search for the brightest pixel: For each row of pixels in the video image and
- // for each pixel in the yth row, compute each pixel's index in the video
- video.loadPixels();
- int index = 0;
- for (int y = 0; y < video.height; y++) {
- for (int x = 0; x < video.width; x++) {
- // Get the color stored in the pixel
- int pixelValue = video.pixels[index];
- // Determine the brightness of the pixel
- float pixelBrightness = brightness(pixelValue);
- // If that value is brighter than any previous, then store the
- // brightness of that pixel, as well as its (x,y) location
- if (pixelBrightness > brightestValue) {
- brightestValue = pixelBrightness;
- brightestY = y;
- brightestX = x;
- }
- index++;
- }
- }
- // Draw a large, yellow circle at the brightest pixel
- fill(255, 204, 0, 128);
- ellipse(brightestX, brightestY, 200, 200);
- }
-}
diff --git a/java/libraries/video/examples/Capture/ColorSorting/ColorSorting.pde b/java/libraries/video/examples/Capture/ColorSorting/ColorSorting.pde
deleted file mode 100644
index e040a1923..000000000
--- a/java/libraries/video/examples/Capture/ColorSorting/ColorSorting.pde
+++ /dev/null
@@ -1,146 +0,0 @@
-/**
- * Color Sorting
- * by Ben Fry.
- *
- * Example that sorts all colors from the incoming video
- * and arranges them into vertical bars.
- */
-
-
-import processing.video.*;
-
-Capture video;
-boolean cheatScreen;
-
-Tuple[] captureColors;
-Tuple[] drawColors;
-int[] bright;
-
-// How many pixels to skip in either direction
-int increment = 5;
-
-void setup() {
- size(800, 600);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, 160, 120);
-
- // Start capturing the images from the camera
- video.start();
-
- int count = (video.width * video.height) / (increment * increment);
- bright = new int[count];
- captureColors = new Tuple[count];
- drawColors = new Tuple[count];
- for (int i = 0; i < count; i++) {
- captureColors[i] = new Tuple();
- drawColors[i] = new Tuple(0.5, 0.5, 0.5);
- }
-}
-
-
-void draw() {
- if (video.available()) {
- video.read();
- video.loadPixels();
-
- background(0);
- noStroke();
-
- int index = 0;
- for (int j = 0; j < video.height; j += increment) {
- for (int i = 0; i < video.width; i += increment) {
- int pixelColor = video.pixels[j*video.width + i];
-
- int r = (pixelColor >> 16) & 0xff;
- int g = (pixelColor >> 8) & 0xff;
- int b = pixelColor & 0xff;
-
- // Technically would be sqrt of the following, but no need to do
- // sqrt before comparing the elements since we're only ordering
- bright[index] = r*r + g*g + b*b;
- captureColors[index].set(r, g, b);
-
- index++;
- }
- }
- sort(index, bright, captureColors);
-
- beginShape(QUAD_STRIP);
- for (int i = 0; i < index; i++) {
- drawColors[i].target(captureColors[i], 0.1);
- drawColors[i].phil();
-
- float x = map(i, 0, index, 0, width);
- vertex(x, 0);
- vertex(x, height);
- }
- endShape();
-
- if (cheatScreen) {
- //image(video, 0, height - video.height);
- // Faster method of displaying pixels array on screen
- set(0, height - video.height, video);
- }
- }
-}
-
-
-void keyPressed() {
- if (key == 'g') {
- saveFrame();
- } else if (key == 'c') {
- cheatScreen = !cheatScreen;
- }
-}
-
-
-// Functions to handle sorting the color data
-
-
-void sort(int length, int[] a, Tuple[] stuff) {
- sortSub(a, stuff, 0, length - 1);
-}
-
-
-void sortSwap(int[] a, Tuple[] stuff, int i, int j) {
- int T = a[i];
- a[i] = a[j];
- a[j] = T;
-
- Tuple v = stuff[i];
- stuff[i] = stuff[j];
- stuff[j] = v;
-}
-
-
-void sortSub(int[] a, Tuple[] stuff, int lo0, int hi0) {
- int lo = lo0;
- int hi = hi0;
- int mid;
-
- if (hi0 > lo0) {
- mid = a[(lo0 + hi0) / 2];
-
- while (lo <= hi) {
- while ((lo < hi0) && (a[lo] < mid)) {
- ++lo;
- }
- while ((hi > lo0) && (a[hi] > mid)) {
- --hi;
- }
- if (lo <= hi) {
- sortSwap(a, stuff, lo, hi);
- ++lo;
- --hi;
- }
- }
-
- if (lo0 < hi)
- sortSub(a, stuff, lo0, hi);
-
- if (lo < hi0)
- sortSub(a, stuff, lo, hi0);
- }
-}
diff --git a/java/libraries/video/examples/Capture/ColorSorting/Tuple.pde b/java/libraries/video/examples/Capture/ColorSorting/Tuple.pde
deleted file mode 100644
index c3d8b5900..000000000
--- a/java/libraries/video/examples/Capture/ColorSorting/Tuple.pde
+++ /dev/null
@@ -1,29 +0,0 @@
-// Simple vector class that holds an x,y,z position.
-
-class Tuple {
- float x, y, z;
-
- Tuple() { }
-
- Tuple(float x, float y, float z) {
- set(x, y, z);
- }
-
- void set(float x, float y, float z) {
- this.x = x;
- this.y = y;
- this.z = z;
- }
-
- void target(Tuple another, float amount) {
- float amount1 = 1.0 - amount;
- x = x*amount1 + another.x*amount;
- y = y*amount1 + another.y*amount;
- z = z*amount1 + another.z*amount;
- }
-
- void phil() {
- fill(x, y, z);
- }
-}
-
diff --git a/java/libraries/video/examples/Capture/FrameDifferencing/FrameDifferencing.pde b/java/libraries/video/examples/Capture/FrameDifferencing/FrameDifferencing.pde
deleted file mode 100644
index 78869cd6d..000000000
--- a/java/libraries/video/examples/Capture/FrameDifferencing/FrameDifferencing.pde
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Frame Differencing
- * by Golan Levin.
- *
- * Quantify the amount of movement in the video frame using frame-differencing.
- */
-
-
-import processing.video.*;
-
-int numPixels;
-int[] previousFrame;
-Capture video;
-
-void setup() {
- size(640, 480);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, width, height);
-
- // Start capturing the images from the camera
- video.start();
-
- numPixels = video.width * video.height;
- // Create an array to store the previously captured frame
- previousFrame = new int[numPixels];
- loadPixels();
-}
-
-void draw() {
- if (video.available()) {
- // When using video to manipulate the screen, use video.available() and
- // video.read() inside the draw() method so that it's safe to draw to the screen
- video.read(); // Read the new frame from the camera
- video.loadPixels(); // Make its pixels[] array available
-
- int movementSum = 0; // Amount of movement in the frame
- for (int i = 0; i < numPixels; i++) { // For each pixel in the video frame...
- color currColor = video.pixels[i];
- color prevColor = previousFrame[i];
- // Extract the red, green, and blue components from current pixel
- int currR = (currColor >> 16) & 0xFF; // Like red(), but faster
- int currG = (currColor >> 8) & 0xFF;
- int currB = currColor & 0xFF;
- // Extract red, green, and blue components from previous pixel
- int prevR = (prevColor >> 16) & 0xFF;
- int prevG = (prevColor >> 8) & 0xFF;
- int prevB = prevColor & 0xFF;
- // Compute the difference of the red, green, and blue values
- int diffR = abs(currR - prevR);
- int diffG = abs(currG - prevG);
- int diffB = abs(currB - prevB);
- // Add these differences to the running tally
- movementSum += diffR + diffG + diffB;
- // Render the difference image to the screen
- pixels[i] = color(diffR, diffG, diffB);
- // The following line is much faster, but more confusing to read
- //pixels[i] = 0xff000000 | (diffR << 16) | (diffG << 8) | diffB;
- // Save the current color into the 'previous' buffer
- previousFrame[i] = currColor;
- }
- // To prevent flicker from frames that are all black (no movement),
- // only update the screen if the image has changed.
- if (movementSum > 0) {
- updatePixels();
- println(movementSum); // Print the total amount of movement to the console
- }
- }
-}
diff --git a/java/libraries/video/examples/Capture/Framingham/Framingham.pde b/java/libraries/video/examples/Capture/Framingham/Framingham.pde
deleted file mode 100644
index 70d775d72..000000000
--- a/java/libraries/video/examples/Capture/Framingham/Framingham.pde
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Framingham
- * by Ben Fry.
- *
- * Show subsequent frames from video input as a grid. Also fun with movie files.
- */
-
-
-import processing.video.*;
-
-Capture video;
-int column;
-int columnCount;
-int lastRow;
-
-// Buffer used to move all the pixels up
-int[] scoot;
-
-
-void setup() {
- size(640, 480);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, 160, 120);
-
- // Start capturing the images from the camera
- video.start();
-
- column = 0;
- columnCount = width / video.width;
- int rowCount = height / video.height;
- lastRow = rowCount - 1;
-
- scoot = new int[lastRow*video.height * width];
- background(0);
-}
-
-
-void draw() {
- // By using video.available, only the frame rate need be set inside setup()
- if (video.available()) {
- video.read();
- video.loadPixels();
- image(video, video.width*column, video.height*lastRow);
- column++;
- if (column == columnCount) {
- loadPixels();
-
- // Scoot everybody up one row
- arrayCopy(pixels, video.height*width, scoot, 0, scoot.length);
- arrayCopy(scoot, 0, pixels, 0, scoot.length);
-
- // Set the moved row to black
- for (int i = scoot.length; i < width*height; i++) {
- pixels[i] = #000000;
- }
- column = 0;
- updatePixels();
- }
- }
-}
diff --git a/java/libraries/video/examples/Capture/GettingStartedCapture/GettingStartedCapture.pde b/java/libraries/video/examples/Capture/GettingStartedCapture/GettingStartedCapture.pde
deleted file mode 100644
index 517ba8786..000000000
--- a/java/libraries/video/examples/Capture/GettingStartedCapture/GettingStartedCapture.pde
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Getting Started with Capture.
- *
- * Reading and displaying an image from an attached Capture device.
- */
-
-import processing.video.*;
-
-Capture cam;
-
-void setup() {
- size(640, 480);
-
- String[] cameras = Capture.list();
-
- if (cameras == null) {
- println("Failed to retrieve the list of available cameras, will try the default...");
- cam = new Capture(this, 640, 480);
- } if (cameras.length == 0) {
- println("There are no cameras available for capture.");
- exit();
- } else {
- println("Available cameras:");
- for (int i = 0; i < cameras.length; i++) {
- println(cameras[i]);
- }
-
- // The camera can be initialized directly using an element
- // from the array returned by list():
- cam = new Capture(this, cameras[0]);
- // Or, the settings can be defined based on the text in the list
- //cam = new Capture(this, 640, 480, "Built-in iSight", 30);
-
- // Start capturing the images from the camera
- cam.start();
- }
-}
-
-void draw() {
- if (cam.available() == true) {
- cam.read();
- }
- image(cam, 0, 0);
- // The following does the same as the above image() line, but
- // is faster when just drawing the image without any additional
- // resizing, transformations, or tint.
- //set(0, 0, cam);
-}
-
diff --git a/java/libraries/video/examples/Capture/HsvSpace/HsvSpace.pde b/java/libraries/video/examples/Capture/HsvSpace/HsvSpace.pde
deleted file mode 100644
index c77208410..000000000
--- a/java/libraries/video/examples/Capture/HsvSpace/HsvSpace.pde
+++ /dev/null
@@ -1,213 +0,0 @@
-/**
- * HSV Space
- * by Ben Fry.
- *
- * Arrange the pixels from live video into the HSV Color Cone.
- */
-
-import processing.video.*;
-import java.awt.Color;
-
-Capture video;
-int count;
-boolean cheatScreen = true;
-
-static final float BOX_SIZE = 0.75;
-static final float CONE_HEIGHT = 1.2;
-static final float MAX_RADIUS = 10;
-static final float ROT_INCREMENT = 3.0;
-static final float TRANS_INCREMENT = 1;
-static final float STEP_AMOUNT = 0.1;
-
-Tuple[] farbe;
-Tuple[] trans;
-
-float[] hsb = new float[3];
-
-float leftRightAngle;
-float upDownAngle;
-float fwdBackTrans;
-float upDownTrans;
-float leftRightTrans;
-boolean motion;
-
-boolean blobby = false;
-
-
-void setup() {
- size(640, 480, P3D);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, 160, 120);
-
- // Start capturing the images from the camera
- video.start();
-
- count = video.width * video.height;
-
- sphereDetail(60);
-
- upDownTrans = 0;
- leftRightTrans = 0;
- motion = false;
-
- leftRightAngle = 101.501297;
- upDownAngle = -180.098694;
- fwdBackTrans = 14.800003;
-
- farbe = new Tuple[count];
- trans = new Tuple[count];
- for (int i = 0; i < count; i++) {
- farbe[i] = new Tuple();
- trans[i] = new Tuple();
- }
-}
-
-
-void draw() {
- background(0);
-
- if (!blobby) {
- lights();
- }
-
- pushMatrix();
- translate(width/2, height/2);
- scale(min(width, height) / 10.0);
-
- translate(0, 0, -20 + fwdBackTrans);
- rotateY(radians(36 + leftRightAngle)); //, 0, 1, 0);
- rotateX(radians(-228 + upDownAngle)); //, 1, 0, 0);
-
- strokeWeight(0.1);
- if (blobby) {
- stroke(0.35, 0.35, 0.25, 0.15);
- wireCone(MAX_RADIUS, MAX_RADIUS * CONE_HEIGHT, 18, 18);
- }
- else {
- stroke(0.35, 0.35, 0.25, 0.25);
- wireCone(MAX_RADIUS, MAX_RADIUS * CONE_HEIGHT, 180, 18);
- }
-
- noStroke();
- video.loadPixels();
- for (int i = 0; i < count; i++) {
- int pixelColor = video.pixels[i];
- int r = (pixelColor >> 16) & 0xff;
- int g = (pixelColor >> 8) & 0xff;
- int b = pixelColor & 0xff;
- Color.RGBtoHSB(r, g, b, hsb);
-
- float radius = hsb[1] * hsb[2];
- float angle = hsb[0] * 360.0 * DEG_TO_RAD;
- float nx = MAX_RADIUS * radius * cos(angle);
- float ny = MAX_RADIUS * radius * sin(angle);
- float nz = hsb[2] * MAX_RADIUS * CONE_HEIGHT;
-
- trans[i].set(trans[i].x - (trans[i].x - nx)*STEP_AMOUNT,
- trans[i].y - (trans[i].y - ny)*STEP_AMOUNT,
- trans[i].z - (trans[i].z - nz)*STEP_AMOUNT);
-
- farbe[i].set(farbe[i].x - (farbe[i].x - r)*STEP_AMOUNT,
- farbe[i].y - (farbe[i].y - g)*STEP_AMOUNT,
- farbe[i].z - (farbe[i].z - b)*STEP_AMOUNT);
-
- pushMatrix();
- farbe[i].phil();
- trans[i].tran();
-
- rotate(radians(45), 1, 1, 0);
- if (blobby) {
- sphere(BOX_SIZE * 2); //, 20, 20);
- } else {
- box(BOX_SIZE);
- }
-
- popMatrix();
- }
- popMatrix();
-
- if (motion) {
- upDownAngle--;
- leftRightAngle--;
- }
-
- if (cheatScreen) {
- image(video, 0, height - video.height);
- }
-}
-
-
-void captureEvent(Capture c) {
- c.read();
-}
-
-
-void keyPressed() {
- switch (key) {
- case 'g':
- saveFrame();
- break;
- case 'c':
- cheatScreen = !cheatScreen;
- break;
-
- case 'm':
- motion = !motion;
- break;
- case '=':
- fwdBackTrans += TRANS_INCREMENT;
- break;
- case '-':
- fwdBackTrans -= TRANS_INCREMENT;
- break;
- case 'b':
- blobby = !blobby;
- break;
- }
-}
-
-
-void mouseDragged() {
- float dX, dY;
-
- switch (mouseButton) {
- case LEFT: // left right up down
- dX = pmouseX - mouseX;
- dY = pmouseY - mouseY;
- leftRightAngle -= dX * 0.2;
- upDownAngle += dY * 0.4;
- break;
-
- case CENTER:
- dX = pmouseX - mouseX;
- dY = pmouseY - mouseY;
- leftRightTrans -= TRANS_INCREMENT * dX;
- upDownTrans -= TRANS_INCREMENT * dY;
- break;
-
- case RIGHT: // in and out
- dY = (float) (pmouseY - mouseY);
- fwdBackTrans -= TRANS_INCREMENT * dY;
- break;
- }
-}
-
-
-void wireCone(float radius, float height, int stepX, int stepY) {
- int steps = 10;
- stroke(40);
- for (int i = 0; i < steps; i++) {
- float angle = map(i, 0, steps, 0, TWO_PI);
- float x = radius * cos(angle);
- float y = radius * sin(angle);
- line(x, y, height, 0, 0, 0);
- }
- noFill();
- pushMatrix();
- translate(0, 0, height);
- ellipseMode(CENTER);
- ellipse(0, 0, radius, radius);
- popMatrix();
-}
diff --git a/java/libraries/video/examples/Capture/HsvSpace/Tuple.pde b/java/libraries/video/examples/Capture/HsvSpace/Tuple.pde
deleted file mode 100644
index 19c1507aa..000000000
--- a/java/libraries/video/examples/Capture/HsvSpace/Tuple.pde
+++ /dev/null
@@ -1,33 +0,0 @@
-// Simple vector class that holds an x,y,z position.
-
-class Tuple {
- float x, y, z;
-
- Tuple() { }
-
- Tuple(float x, float y, float z) {
- set(x, y, z);
- }
-
- void set(float x, float y, float z) {
- this.x = x;
- this.y = y;
- this.z = z;
- }
-
- void target(Tuple another, float amount) {
- float amount1 = 1.0 - amount;
- x = x*amount1 + another.x*amount;
- y = y*amount1 + another.y*amount;
- z = z*amount1 + another.z*amount;
- }
-
- void phil() {
- fill(x, y, z);
- }
-
- void tran() {
- translate(x, y, z);
- }
-}
-
diff --git a/java/libraries/video/examples/Capture/LivePocky/LivePocky.pde b/java/libraries/video/examples/Capture/LivePocky/LivePocky.pde
deleted file mode 100644
index 64b4b6db5..000000000
--- a/java/libraries/video/examples/Capture/LivePocky/LivePocky.pde
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Live Pocky
- * by Ben Fry.
- *
- * Unwrap each frame of live video into a single line of pixels.
- */
-
-import processing.video.*;
-
-Capture video;
-int count;
-int writeRow;
-int maxRows;
-int topRow;
-int buffer[];
-
-
-void setup() {
- size(600, 400);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, 320, 240);
-
- // Start capturing the images from the camera
- video.start();
-
- maxRows = height * 2;
- buffer = new int[width * maxRows];
- writeRow = height - 1;
- topRow = 0;
-
- background(0);
- loadPixels();
-}
-
-
-void draw() {
- video.loadPixels();
- arraycopy(video.pixels, 0, buffer, writeRow * width, width);
- writeRow++;
- if (writeRow == maxRows) {
- writeRow = 0;
- }
- topRow++;
-
- for (int y = 0; y < height; y++) {
- int row = (topRow + y) % maxRows;
- arraycopy(buffer, row * width, g.pixels, y*width, width);
- }
- updatePixels();
-}
-
-
-void captureEvent(Capture c) {
- c.read();
-}
diff --git a/java/libraries/video/examples/Capture/Mirror/Mirror.pde b/java/libraries/video/examples/Capture/Mirror/Mirror.pde
deleted file mode 100644
index 0c527c802..000000000
--- a/java/libraries/video/examples/Capture/Mirror/Mirror.pde
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Mirror
- * by Daniel Shiffman.
- *
- * Each pixel from the video source is drawn as a rectangle with rotation based on brightness.
- */
-
-import processing.video.*;
-
-
-// Size of each cell in the grid
-int cellSize = 20;
-// Number of columns and rows in our system
-int cols, rows;
-// Variable for capture device
-Capture video;
-
-
-void setup() {
- size(640, 480);
- frameRate(30);
- cols = width / cellSize;
- rows = height / cellSize;
- colorMode(RGB, 255, 255, 255, 100);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, width, height);
-
- // Start capturing the images from the camera
- video.start();
-
- background(0);
-}
-
-
-void draw() {
- if (video.available()) {
- video.read();
- video.loadPixels();
-
- // Begin loop for columns
- for (int i = 0; i < cols; i++) {
- // Begin loop for rows
- for (int j = 0; j < rows; j++) {
-
- // Where are we, pixel-wise?
- int x = i*cellSize;
- int y = j*cellSize;
- int loc = (video.width - x - 1) + y*video.width; // Reversing x to mirror the image
-
- float r = red(video.pixels[loc]);
- float g = green(video.pixels[loc]);
- float b = blue(video.pixels[loc]);
- // Make a new color with an alpha component
- color c = color(r, g, b, 75);
-
- // Code for drawing a single rect
- // Using translate in order for rotation to work properly
- pushMatrix();
- translate(x+cellSize/2, y+cellSize/2);
- // Rotation formula based on brightness
- rotate((2 * PI * brightness(c) / 255.0));
- rectMode(CENTER);
- fill(c);
- noStroke();
- // Rects are larger than the cell for some overlap
- rect(0, 0, cellSize+6, cellSize+6);
- popMatrix();
- }
- }
- }
-}
diff --git a/java/libraries/video/examples/Capture/Mirror2/Mirror2.pde b/java/libraries/video/examples/Capture/Mirror2/Mirror2.pde
deleted file mode 100644
index 242e55a10..000000000
--- a/java/libraries/video/examples/Capture/Mirror2/Mirror2.pde
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Mirror 2
- * by Daniel Shiffman.
- *
- * Each pixel from the video source is drawn as a rectangle with size based on brightness.
- */
-
-import processing.video.*;
-
-// Size of each cell in the grid
-int cellSize = 15;
-// Number of columns and rows in our system
-int cols, rows;
-// Variable for capture device
-Capture video;
-
-
-void setup() {
- size(640, 480);
- // Set up columns and rows
- cols = width / cellSize;
- rows = height / cellSize;
- colorMode(RGB, 255, 255, 255, 100);
- rectMode(CENTER);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, width, height);
-
- // Start capturing the images from the camera
- video.start();
-
- background(0);
-}
-
-
-void draw() {
- if (video.available()) {
- video.read();
- video.loadPixels();
-
- background(0, 0, 255);
-
- // Begin loop for columns
- for (int i = 0; i < cols;i++) {
- // Begin loop for rows
- for (int j = 0; j < rows;j++) {
-
- // Where are we, pixel-wise?
- int x = i * cellSize;
- int y = j * cellSize;
- int loc = (video.width - x - 1) + y*video.width; // Reversing x to mirror the image
-
- // Each rect is colored white with a size determined by brightness
- color c = video.pixels[loc];
- float sz = (brightness(c) / 255.0) * cellSize;
- fill(255);
- noStroke();
- rect(x + cellSize/2, y + cellSize/2, sz, sz);
- }
- }
- }
-}
diff --git a/java/libraries/video/examples/Capture/RadialPocky/RadialPocky.pde b/java/libraries/video/examples/Capture/RadialPocky/RadialPocky.pde
deleted file mode 100644
index ed4ca728c..000000000
--- a/java/libraries/video/examples/Capture/RadialPocky/RadialPocky.pde
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Radial Pocky
- * by Ben Fry.
- *
- * Unwrap each frame of live video into a single line of pixels along a circle
- */
-
-import processing.video.*;
-
-Capture video;
-int videoCount;
-int currentAngle;
-int pixelCount;
-int angleCount = 200; // how many divisions
-
-int radii[];
-int angles[];
-
-
-void setup() {
- // size must be set to video.width*video.height*2 in both directions
- size(600, 600);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, 160, 120);
-
- // Start capturing the images from the camera
- video.start();
-
- videoCount = video.width * video.height;
-
- pixelCount = width*height;
- int centerX = width / 2;
- int centerY = height / 2;
- radii = new int[pixelCount];
- angles = new int[pixelCount];
-
- int offset = 0;
- for (int y = 0; y < height; y++) {
- for (int x = 0; x < width; x++) {
- int dx = centerX - x;
- int dy = centerY - y;
-
- float angle = atan2(dy, dx);
- if (angle < 0) angle += TWO_PI;
- angles[offset] = (int) (angleCount * (angle / TWO_PI));
-
- int radius = (int) mag(dx, dy);
- if (radius >= videoCount) {
- radius = -1;
- angles[offset] = -1;
- }
- radii[offset] = radius;
-
- offset++;
- }
- }
- background(0);
-}
-
-
-void draw() {
- if (video.available()) {
- video.read();
- video.loadPixels();
-
- loadPixels();
- for (int i = 0; i < pixelCount; i++) {
- if (angles[i] == currentAngle) {
- pixels[i] = video.pixels[radii[i]];
- }
- }
- updatePixels();
-
- currentAngle++;
- if (currentAngle == angleCount) {
- currentAngle = 0;
- }
- }
-}
diff --git a/java/libraries/video/examples/Capture/SlitScan/SlitScan.pde b/java/libraries/video/examples/Capture/SlitScan/SlitScan.pde
deleted file mode 100644
index 8f4e06a02..000000000
--- a/java/libraries/video/examples/Capture/SlitScan/SlitScan.pde
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Simple Real-Time Slit-Scan Program.
- * By Golan Levin.
- *
- * This demonstration depends on the canvas height being equal
- * to the video capture height. If you would prefer otherwise,
- * consider using the image copy() function rather than the
- * direct pixel-accessing approach I have used here.
- */
-
-
-import processing.video.*;
-
-Capture video;
-
-int videoSliceX;
-int drawPositionX;
-
-void setup() {
- size(600, 240);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this,320, 240);
-
- // Start capturing the images from the camera
- video.start();
-
- videoSliceX = video.width / 2;
- drawPositionX = width - 1;
- background(0);
-}
-
-
-void draw() {
- if (video.available()) {
- video.read();
- video.loadPixels();
-
- // Copy a column of pixels from the middle of the video
- // To a location moving slowly across the canvas.
- loadPixels();
- for (int y = 0; y < video.height; y++){
- int setPixelIndex = y*width + drawPositionX;
- int getPixelIndex = y*video.width + videoSliceX;
- pixels[setPixelIndex] = video.pixels[getPixelIndex];
- }
- updatePixels();
-
- drawPositionX--;
- // Wrap the position back to the beginning if necessary.
- if (drawPositionX < 0) {
- drawPositionX = width - 1;
- }
- }
-}
diff --git a/java/libraries/video/examples/Capture/Spatiotemporal/Spatiotemporal.pde b/java/libraries/video/examples/Capture/Spatiotemporal/Spatiotemporal.pde
deleted file mode 100644
index 24a1a83d4..000000000
--- a/java/libraries/video/examples/Capture/Spatiotemporal/Spatiotemporal.pde
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * Spatiotemporal
- * by David Muth
- *
- * Records a number of video frames into memory, then plays back the video
- * buffer by turning the time axis into the x-axis and vice versa
- */
-
-import processing.video.*;
-
-Capture video;
-int signal = 0;
-
-//the buffer for storing video frames
-ArrayList frames;
-
-//different program modes for recording and playback
-int mode = 0;
-int MODE_NEWBUFFER = 0;
-int MODE_RECORDING = 1;
-int MODE_PLAYBACK = 2;
-
-int currentX = 0;
-
-void setup() {
- size(640, 480);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, width, height);
-
- // Start capturing the images from the camera
- video.start();
-}
-
-void captureEvent(Capture c) {
- c.read();
-
- //create a new buffer in case one is needed
- if (mode == MODE_NEWBUFFER) {
- frames = new ArrayList();
- mode = MODE_RECORDING;
- }
-
- //record into the buffer until there are enough frames
- if (mode == MODE_RECORDING) {
- //copy the current video frame into an image, so it can be stored in the buffer
- PImage img = createImage(width, height, RGB);
- video.loadPixels();
- arrayCopy(video.pixels, img.pixels);
-
- frames.add(img);
-
- //in case enough frames have been recorded, switch to playback mode
- if (frames.size() >= width) {
- mode = MODE_PLAYBACK;
- }
- }
-}
-
-void draw() {
- loadPixels();
-
- //code for the recording mode
- if (mode == MODE_RECORDING) {
- //set the image counter to 0
- int currentImage = 0;
-
- //begin a loop for displaying pixel columns
- for (int x = 0; x < video.width; x++) {
- //go through the frame buffer and pick an image using the image counter
- if (currentImage < frames.size()) {
- PImage img = (PImage)frames.get(currentImage);
-
- //display a pixel column of the current image
- if (img != null) {
- img.loadPixels();
-
- for (int y = 0; y < video.height; y++) {
- pixels[x + y * width] = img.pixels[x + y * video.width];
- }
- }
-
- //increase the image counter
- currentImage++;
-
- }
- else {
- break;
- }
- }
- }
-
- //code for displaying the spatiotemporal transformation
- if (mode == MODE_PLAYBACK) {
-
- //begin a loop for displaying pixel columns
- for (int x = 0; x < video.width; x++) {
- //get an image from the buffer using loopcounter x as the index
- PImage img = (PImage)frames.get(x);
-
- if (img != null) {
- img.loadPixels();
-
- //pick the same column from each image for display,
- //then distribute the columns over the x-axis on the screen
- for(int y = 0; y < video.height; y++) {
- pixels[x + y * width] = img.pixels[currentX + y * video.width];
- }
- }
- }
-
- //a different column shall be used next time draw() is being called
- currentX++;
-
- //if the end of the buffer is reached
- if(currentX >= video.width) {
- //create a new buffer when the next video frame arrives
- mode = MODE_NEWBUFFER;
- //reset the column counter
- currentX = 0;
- }
- }
-
- updatePixels();
-}
-
-
-
-
-
diff --git a/java/libraries/video/examples/Capture/TimeDisplacement/TimeDisplacement.pde b/java/libraries/video/examples/Capture/TimeDisplacement/TimeDisplacement.pde
deleted file mode 100644
index 460f8adcf..000000000
--- a/java/libraries/video/examples/Capture/TimeDisplacement/TimeDisplacement.pde
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Time Displacement
- * by David Muth
- *
- * Keeps a buffer of video frames in memory and displays pixel rows
- * taken from consecutive frames distributed over the y-axis
- */
-
-import processing.video.*;
-
-Capture video;
-int signal = 0;
-
-//the buffer for storing video frames
-ArrayList frames = new ArrayList();
-
-void setup() {
- size(640, 480);
-
- // This the default video input, see the GettingStartedCapture
- // example if it creates an error
- video = new Capture(this, width, height);
-
- // Start capturing the images from the camera
- video.start();
-}
-
-void captureEvent(Capture camera) {
- camera.read();
-
- // Copy the current video frame into an image, so it can be stored in the buffer
- PImage img = createImage(width, height, RGB);
- video.loadPixels();
- arrayCopy(video.pixels, img.pixels);
-
- frames.add(img);
-
- // Once there are enough frames, remove the oldest one when adding a new one
- if (frames.size() > height/4) {
- frames.remove(0);
- }
-}
-
-void draw() {
- // Set the image counter to 0
- int currentImage = 0;
-
- loadPixels();
-
- // Begin a loop for displaying pixel rows of 4 pixels height
- for (int y = 0; y < video.height; y+=4) {
- // Go through the frame buffer and pick an image, starting with the oldest one
- if (currentImage < frames.size()) {
- PImage img = (PImage)frames.get(currentImage);
-
- if (img != null) {
- img.loadPixels();
-
- // Put 4 rows of pixels on the screen
- for (int x = 0; x < video.width; x++) {
- pixels[x + y * width] = img.pixels[x + y * video.width];
- pixels[x + (y + 1) * width] = img.pixels[x + (y + 1) * video.width];
- pixels[x + (y + 2) * width] = img.pixels[x + (y + 2) * video.width];
- pixels[x + (y + 3) * width] = img.pixels[x + (y + 3) * video.width];
- }
- }
-
- // Increase the image counter
- currentImage++;
-
- } else {
- break;
- }
- }
-
- updatePixels();
-
- // For recording an image sequence
- //saveFrame("frame-####.jpg");
-}
-
-
-
-
diff --git a/java/libraries/video/examples/Movie/Frames/Frames.pde b/java/libraries/video/examples/Movie/Frames/Frames.pde
deleted file mode 100644
index 04fbf81fb..000000000
--- a/java/libraries/video/examples/Movie/Frames/Frames.pde
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Frames
- * by Andres Colubri.
- *
- * Moves through the video one frame at the time by using the
- * arrow keys. It estimates the frame counts using the framerate
- * of the movie file, so it might not be exact in some cases.
- */
-
-import processing.video.*;
-
-Movie mov;
-int newFrame = 0;
-int movFrameRate = 30;
-
-void setup() {
- size(640, 360);
- background(0);
- // Load and set the video to play. Setting the video
- // in play mode is needed so at least one frame is read
- // and we can get duration, size and other information from
- // the video stream.
- mov = new Movie(this, "transit.mov");
-
- // Pausing the video at the first frame.
- mov.play();
- mov.jump(0);
- mov.pause();
-}
-
-void movieEvent(Movie m) {
- m.read();
-}
-
-void draw() {
- background(0);
- image(mov, 0, 0, width, height);
- fill(255);
- text(getFrame() + " / " + (getLength() - 1), 10, 30);
-}
-
-void keyPressed() {
- if (key == CODED) {
- if (keyCode == LEFT) {
- if (0 < newFrame) newFrame--;
- } else if (keyCode == RIGHT) {
- if (newFrame < getLength() - 1) newFrame++;
- }
- }
- setFrame(newFrame);
-}
-
-int getFrame() {
- return ceil(mov.time() * 30) - 1;
-}
-
-void setFrame(int n) {
- mov.play();
-
- // The duration of a single frame:
- float frameDuration = 1.0 / movFrameRate;
-
- // We move to the middle of the frame by adding 0.5:
- float where = (n + 0.5) * frameDuration;
-
- // Taking into account border effects:
- float diff = mov.duration() - where;
- if (diff < 0) {
- where += diff - 0.25 * frameDuration;
- }
-
- mov.jump(where);
- mov.pause();
-}
-
-int getLength() {
- return int(mov.duration() * movFrameRate);
-}
-
diff --git a/java/libraries/video/examples/Movie/Loop/Loop.pde b/java/libraries/video/examples/Movie/Loop/Loop.pde
deleted file mode 100644
index 05383b620..000000000
--- a/java/libraries/video/examples/Movie/Loop/Loop.pde
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Loop.
- *
- * Shows how to load and play a QuickTime movie file.
- *
- */
-
-import processing.video.*;
-
-Movie movie;
-
-void setup() {
- size(640, 360);
- background(0);
- // Load and play the video in a loop
- movie = new Movie(this, "transit.mov");
- movie.loop();
-}
-
-void movieEvent(Movie m) {
- m.read();
-}
-
-void draw() {
- //if (movie.available() == true) {
- // movie.read();
- //}
- image(movie, 0, 0, width, height);
-}
diff --git a/java/libraries/video/examples/Movie/Pixelate/Pixelate.pde b/java/libraries/video/examples/Movie/Pixelate/Pixelate.pde
deleted file mode 100644
index 1ac791581..000000000
--- a/java/libraries/video/examples/Movie/Pixelate/Pixelate.pde
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Pixelate
- * by Hernando Barragan.
- *
- * Load a QuickTime file and display the video signal
- * using rectangles as pixels by reading the values stored
- * in the current video frame pixels array.
- */
-
-import processing.video.*;
-
-int numPixelsWide, numPixelsHigh;
-int blockSize = 10;
-Movie mov;
-color movColors[];
-
-void setup() {
- size(640, 360);
- noStroke();
- mov = new Movie(this, "transit.mov");
- mov.loop();
- numPixelsWide = width / blockSize;
- numPixelsHigh = height / blockSize;
- println(numPixelsWide);
- movColors = new color[numPixelsWide * numPixelsHigh];
-}
-
-// Display values from movie
-void draw() {
- if (mov.available() == true) {
- mov.read();
- mov.loadPixels();
- int count = 0;
- for (int j = 0; j < numPixelsHigh; j++) {
- for (int i = 0; i < numPixelsWide; i++) {
- movColors[count] = mov.get(i*blockSize, j*blockSize);
- count++;
- }
- }
- }
-
- background(255);
- for (int j = 0; j < numPixelsHigh; j++) {
- for (int i = 0; i < numPixelsWide; i++) {
- fill(movColors[j*numPixelsWide + i]);
- rect(i*blockSize, j*blockSize, blockSize, blockSize);
- }
- }
-
-}
-
diff --git a/java/libraries/video/examples/Movie/Reverse/Reverse.pde b/java/libraries/video/examples/Movie/Reverse/Reverse.pde
deleted file mode 100644
index 817368aa8..000000000
--- a/java/libraries/video/examples/Movie/Reverse/Reverse.pde
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Reverse playback example.
- *
- * The Movie.speed() method allows to change the playback speed.
- * Use negative values for backwards playback. Note that not all
- * video formats support backwards playback. This depends on the
- * underlying gstreamer plugins used by gsvideo. For example, the
- * theora codec does support backward playback, but not so the H264
- * codec, at least in its current version.
- *
- */
-
-import processing.video.*;
-
-Movie mov;
-boolean speedSet = false;
-boolean once = true;
-
-void setup() {
- size(640, 360);
- background(0);
- mov = new Movie(this, "transit.mkv");
- mov.play();
-}
-
-void movieEvent(Movie m) {
- m.read();
- if (speedSet == true) {
- speedSet = false;
- }
-}
-
-void draw() {
- if (speedSet == false && once == true) {
- // Setting the speed should be done only once,
- // this is the reason for the if statement.
- speedSet = true;
- once = false;
- mov.jump(mov.duration());
- // -1 means backward playback at normal speed.
- mov.speed(-1.0);
- // Setting to play again, since the movie stop
- // playback once it reached the end.
- mov.play();
- }
- image(mov, 0, 0, width, height);
-}
-
diff --git a/java/libraries/video/examples/Movie/Scratch/Scratch.pde b/java/libraries/video/examples/Movie/Scratch/Scratch.pde
deleted file mode 100644
index dbd9fdcec..000000000
--- a/java/libraries/video/examples/Movie/Scratch/Scratch.pde
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Scratch
- * by Andres Colubri.
- *
- * Move the cursor horizontally across the screen to set
- * the position in the movie file.
- */
-
-import processing.video.*;
-
-Movie mov;
-
-void setup() {
- size(640, 360);
- background(0);
-
- mov = new Movie(this, "transit.mov");
-
- // Pausing the video at the first frame.
- mov.play();
- mov.jump(0);
- mov.pause();
-}
-
-void draw() {
-
- if (mov.available()) {
- mov.read();
- // A new time position is calculated using the current mouse location:
- float f = map(mouseX, 0, width, 0, 1);
- float t = mov.duration() * f;
- mov.play();
- mov.jump(t);
- mov.pause();
- }
-
- image(mov, 0, 0);
-}
-
diff --git a/java/libraries/video/examples/Movie/Speed/Speed.pde b/java/libraries/video/examples/Movie/Speed/Speed.pde
deleted file mode 100644
index c08137651..000000000
--- a/java/libraries/video/examples/Movie/Speed/Speed.pde
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Speed.
- *
- * Use the Movie.speed() method to change
- * the playback speed.
- *
- */
-
-import processing.video.*;
-
-Movie mov;
-
-void setup() {
- size(640, 360);
- background(0);
- mov = new Movie(this, "transit.mov");
- mov.loop();
-}
-
-void movieEvent(Movie movie) {
- mov.read();
-}
-
-void draw() {
- image(mov, 0, 0);
-
- float newSpeed = map(mouseX, 0, width, 0.1, 2);
- mov.speed(newSpeed);
-
- fill(255);
- text(nfc(newSpeed, 2) + "X", 10, 30);
-}
-
diff --git a/java/libraries/video/library/.gitignore b/java/libraries/video/library/.gitignore
deleted file mode 100644
index 374cc6ccf..000000000
--- a/java/libraries/video/library/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-video.jar
diff --git a/java/libraries/video/library/export.txt b/java/libraries/video/library/export.txt
deleted file mode 100644
index 189254a96..000000000
--- a/java/libraries/video/library/export.txt
+++ /dev/null
@@ -1 +0,0 @@
-name = Video
diff --git a/java/libraries/video/src/processing/video/Capture.java b/java/libraries/video/src/processing/video/Capture.java
deleted file mode 100644
index 5d4221fb5..000000000
--- a/java/libraries/video/src/processing/video/Capture.java
+++ /dev/null
@@ -1,1227 +0,0 @@
-/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
-
-/*
- Part of the Processing project - http://processing.org
-
- Copyright (c) 2004-12 Ben Fry and Casey Reas
- The previous version of this code was developed by Hernando Barragan
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General
- Public License along with this library; if not, write to the
- Free Software Foundation, Inc., 59 Temple Place, Suite 330,
- Boston, MA 02111-1307 USA
-*/
-
-package processing.video;
-
-import processing.core.*;
-
-import java.nio.*;
-import java.util.ArrayList;
-import java.io.File;
-import java.lang.reflect.*;
-
-import org.gstreamer.*;
-import org.gstreamer.Buffer;
-import org.gstreamer.elements.*;
-import org.gstreamer.interfaces.PropertyProbe;
-import org.gstreamer.interfaces.Property;
-
-/**
- * ( begin auto-generated from Capture.xml )
- *
- * Datatype for storing and manipulating video frames from an attached
- * capture device such as a camera. Use Capture.list() to show the
- * names of any attached devices. Using the version of the constructor
- * without name will attempt to use the last device used by a
- * QuickTime program.
- *
- * ( end auto-generated )
- *
- * Advanced
- * Class for storing and manipulating video frames from an attached capture
- * device such as a camera.
- * @webref video
- * @usage application
- */
-public class Capture extends PImage implements PConstants {
- protected static String sourceElementName;
- protected static String devicePropertyName;
- protected static String indexPropertyName;
- // Default gstreamer capture plugin for each platform, and property names.
- static {
- if (PApplet.platform == MACOSX) {
- sourceElementName = "qtkitvideosrc";
- devicePropertyName = "device-name";
- indexPropertyName = "device-index";
- } else if (PApplet.platform == WINDOWS) {
- sourceElementName = "ksvideosrc";
- devicePropertyName = "device-name";
- indexPropertyName = "device-index";
- } else if (PApplet.platform == LINUX) {
- sourceElementName = "v4l2src";
- // The "device" property in v4l2src expects the device location
- // (/dev/video0, etc). v4l2src has "device-name", which requires the
- // human-readable name... but how to query in linux?.
- devicePropertyName = "device";
- indexPropertyName = "device-fd";
- } else {}
- }
- protected static boolean useResMacHack = true;
-
- public float frameRate;
- public Pipeline pipeline;
-
- protected boolean capturing = false;
-
- protected String frameRateString;
- protected int bufWidth;
- protected int bufHeight;
-
- protected String sourceName;
- protected Element sourceElement;
-
- protected Method captureEventMethod;
- protected Object eventHandler;
-
- protected boolean available;
- protected boolean pipelineReady;
- protected boolean newFrame;
-
- protected RGBDataAppSink rgbSink = null;
- protected int[] copyPixels = null;
-
- protected boolean firstFrame = true;
-
- protected int reqWidth;
- protected int reqHeight;
-
- protected boolean useBufferSink = false;
- protected boolean outdatedPixels = true;
- protected Object bufferSink;
- protected Method sinkCopyMethod;
- protected Method sinkSetMethod;
- protected Method sinkDisposeMethod;
- protected Method sinkGetMethod;
- protected String copyMask;
- protected Buffer natBuffer = null;
- protected BufferDataAppSink natSink = null;
-
-
- public Capture(PApplet parent) {
- String[] configs = Capture.list();
- if (configs.length == 0) {
- throw new RuntimeException("There are no cameras available for capture");
- }
- String name = getName(configs[0]);
- int[] size = getSize(configs[0]);
- String fps = getFrameRate(configs[0]);
- String idName;
- Object idValue;
- if (devicePropertyName.equals("")) {
- // For plugins without device name property, the name is casted
- // as an index
- idName = indexPropertyName;
- idValue = new Integer(PApplet.parseInt(name));
- } else {
- idName = devicePropertyName;
- idValue = name;
- }
- initGStreamer(parent, size[0], size[1], sourceElementName,
- idName, idValue, fps);
- }
-
-
- public Capture(PApplet parent, String requestConfig) {
- String name = getName(requestConfig);
- int[] size = getSize(requestConfig);
- String fps = getFrameRate(requestConfig);
- String idName;
- Object idValue;
- if (devicePropertyName.equals("")) {
- // For plugins without device name property, the name is casted
- // as an index
- idName = indexPropertyName;
- idValue = new Integer(PApplet.parseInt(name));
- } else {
- idName = devicePropertyName;
- idValue = name;
- }
- initGStreamer(parent, size[0], size[1], sourceElementName,
- idName, idValue, fps);
- }
-
-
- /**
- * @param parent typically use "this"
- * @param requestWidth width of the frame
- * @param requestHeight height of the frame
- */
- public Capture(PApplet parent, int requestWidth, int requestHeight) {
- super(requestWidth, requestHeight, RGB);
- initGStreamer(parent, requestWidth, requestHeight, sourceElementName,
- null, null, "");
- }
-
-
- /**
- * Advanced
- * Constructor that takes resolution and framerate.
- *
- * @param frameRate number of frames to read per second
- */
- public Capture(PApplet parent, int requestWidth, int requestHeight,
- int frameRate) {
- super(requestWidth, requestHeight, RGB);
- initGStreamer(parent, requestWidth, requestHeight, sourceElementName,
- null, null, frameRate + "/1");
- }
-
-
- /**
- * Advanced
- * This constructor allows to specify resolution and camera name.
- *
- * @param cameraName name of the camera
- */
- public Capture(PApplet parent, int requestWidth, int requestHeight,
- String cameraName) {
- super(requestWidth, requestHeight, RGB);
- String idName;
- Object idValue;
- if (devicePropertyName.equals("")) {
- // For plugins without device name property, the name is casted
- // as an index
- idName = indexPropertyName;
- idValue = new Integer(PApplet.parseInt(cameraName));
- } else {
- idName = devicePropertyName;
- idValue = cameraName;
- }
- initGStreamer(parent, requestWidth, requestHeight, sourceElementName,
- idName, idValue, "");
- }
-
-
- /**
- * Advanced
- * This constructor allows to specify the camera name and the desired
- * framerate, in addition to the resolution.
- */
- public Capture(PApplet parent, int requestWidth, int requestHeight,
- String cameraName, int frameRate) {
- super(requestWidth, requestHeight, RGB);
- String idName;
- Object idValue;
- if (devicePropertyName.equals("")) {
- // For plugins without device name property, the name is casted
- // as an index
- idName = indexPropertyName;
- idValue = new Integer(PApplet.parseInt(cameraName));
- } else {
- idName = devicePropertyName;
- idValue = cameraName;
- }
- initGStreamer(parent, requestWidth, requestHeight, sourceElementName,
- idName, idValue, frameRate + "/1");
- }
-
-
- /**
- * Disposes all the native resources associated to this capture device.
- *
- * NOTE: This is not official API and may/will be removed at any time.
- */
- public void dispose() {
- if (pipeline != null) {
- try {
- if (pipeline.isPlaying()) {
- pipeline.stop();
- pipeline.getState();
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- pixels = null;
-
- copyPixels = null;
- if (rgbSink != null) {
- rgbSink.removeListener();
- rgbSink.dispose();
- rgbSink = null;
- }
-
- natBuffer = null;
- if (natSink != null) {
- natSink.removeListener();
- natSink.dispose();
- natSink = null;
- }
-
- pipeline.dispose();
- pipeline = null;
-
- parent.g.removeCache(this);
- parent.unregisterMethod("dispose", this);
- parent.unregisterMethod("post", this);
- }
- }
-
-
- /**
- * Finalizer of the class.
- */
- protected void finalize() throws Throwable {
- try {
- dispose();
- } finally {
- super.finalize();
- }
- }
-
-
- /**
- * ( begin auto-generated from Capture_available.xml )
- *
- * Returns "true" when a new video frame is available to read.
- *
- * ( end auto-generated )
- *
- * @webref capture
- * @brief Returns "true" when a new video frame is available to read
- */
- public boolean available() {
- return available;
- }
-
-
- /**
- * ( begin auto-generated from Capture_start.xml )
- *
- * Starts capturing frames from the selected device.
- *
- * ( end auto-generated )
- *
- * @webref capture
- * @brief Starts capturing frames from the selected device
- */
- public void start() {
- boolean init = false;
- if (!pipelineReady) {
- initPipeline();
- init = true;
- }
-
- capturing = true;
- pipeline.play();
-
- if (init) {
- checkResIsValid();
- }
- }
-
-
- /**
- * ( begin auto-generated from Capture_stop.xml )
- *
- * Stops capturing frames from an attached device.
- *
- * ( end auto-generated )
- *
- * @webref capture
- * @brief Stops capturing frames from an attached device
- */
- public void stop() {
- if (!pipelineReady) {
- initPipeline();
- }
-
- capturing = false;
- pipeline.stop();
- pipeline.getState();
- }
-
-
- /**
- * ( begin auto-generated from Capture_read.xml )
- *
- * Reads the current video frame.
- *
- * ( end auto-generated )
- *
- * Advanced
- * This method() and invokeEvent() are now synchronized, so that invokeEvent()
- * can't be called whilst we're busy reading. Problematic frame error
- * fixed by Charl P. Botha
- *
- * @webref capture
- * @brief Reads the current video frame
- */
- public synchronized void read() {
- if (frameRate < 0) {
- // Framerate not set yet, so we obtain from stream,
- // which is already playing since we are in read().
- frameRate = getSourceFrameRate();
- }
-
- if (useBufferSink) { // The native buffer from gstreamer is copied to the buffer sink.
- outdatedPixels = true;
- if (natBuffer == null) {
- return;
- }
-
- if (firstFrame) {
- super.init(bufWidth, bufHeight, ARGB);
- firstFrame = false;
- }
-
- if (bufferSink == null) {
- Object cache = parent.g.getCache(this);
- if (cache == null) {
- return;
- }
- setBufferSink(cache);
- getSinkMethods();
- }
-
- ByteBuffer byteBuffer = natBuffer.getByteBuffer();
-
- try {
- sinkCopyMethod.invoke(bufferSink,
- new Object[] { natBuffer, byteBuffer, bufWidth, bufHeight });
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- natBuffer = null;
- } else { // The pixels just read from gstreamer are copied to the pixels array.
- if (copyPixels == null) {
- return;
- }
-
- if (firstFrame) {
- super.init(bufWidth, bufHeight, RGB);
- firstFrame = false;
- }
-
- int[] temp = pixels;
- pixels = copyPixels;
- updatePixels();
- copyPixels = temp;
- }
-
- available = false;
- newFrame = true;
- }
-
-
- public synchronized void loadPixels() {
- super.loadPixels();
- if (useBufferSink) {
- if (natBuffer != null) {
- // This means that the OpenGL texture hasn't been created so far (the
- // video frame not drawn using image()), but the user wants to use the
- // pixel array, which we can just get from natBuffer.
- IntBuffer buf = natBuffer.getByteBuffer().asIntBuffer();
- buf.rewind();
- buf.get(pixels);
- Video.convertToARGB(pixels, width, height);
- } else if (sinkGetMethod != null) {
- try {
- // sinkGetMethod will copy the latest buffer to the pixels array,
- // and the pixels will be copied to the texture when the OpenGL
- // renderer needs to draw it.
- sinkGetMethod.invoke(bufferSink, new Object[] { pixels });
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- outdatedPixels = false;
- }
- }
-
-
- public int get(int x, int y) {
- if (outdatedPixels) loadPixels();
- return super.get(x, y);
- }
-
-
- protected void getImpl(int sourceX, int sourceY,
- int sourceWidth, int sourceHeight,
- PImage target, int targetX, int targetY) {
- if (outdatedPixels) loadPixels();
- super.getImpl(sourceX, sourceY, sourceWidth, sourceHeight,
- target, targetX, targetY);
- }
-
-
- ////////////////////////////////////////////////////////////
-
- // List methods.
-
-
- /**
- * ( begin auto-generated from Capture_list.xml )
- *
- * Gets a list of all available capture devices such as a camera. Use
- * print() to write the information to the text window.
- *
- * ( end auto-generated )
- *
- * @webref capture
- * @brief Gets a list of all available capture devices such as a camera
- */
- static public String[] list() {
- if (devicePropertyName.equals("")) {
- return list(sourceElementName, indexPropertyName);
- } else {
- return list(sourceElementName, devicePropertyName);
- }
- }
-
-
- static protected String[] list(String sourceName, String propertyName) {
- Video.init();
- ArrayList devices = listDevices(sourceName, propertyName);
-
- ArrayList configList = new ArrayList();
- for (String device: devices) {
- ArrayList resolutions = listResolutions(sourceName, propertyName,
- device);
- if (0 < resolutions.size()) {
- for (String res: resolutions) {
- configList.add("name=" + device + "," + res);
- }
- } else {
- configList.add("name=" + device);
- }
- }
-
- String[] configs = new String[configList.size()];
- for (int i = 0; i < configs.length; i++) {
- configs[i] = configList.get(i);
- }
-
- return configs;
- }
-
-
- static protected ArrayList listDevices(String sourceName,
- String propertyName) {
- ArrayList devices = new ArrayList();
- try {
- // Using property-probe interface
- Element videoSource = ElementFactory.make(sourceName, "Source");
- PropertyProbe probe = PropertyProbe.wrap(videoSource);
- if (probe != null) {
- Property property = probe.getProperty(propertyName);
- if (property != null) {
- Object[] values = probe.getValues(property);
- if (values != null) {
- for (int i = 0; i < values.length; i++) {
- if (values[i] instanceof String) {
- devices.add((String)values[i]);
- } else if (values[i] instanceof Integer) {
- devices.add(((Integer)values[i]).toString());
- }
- }
- }
- }
- }
- } catch (IllegalArgumentException e) {
- if (PApplet.platform == LINUX) {
- // Linux hack to detect currently connected cameras
- // by looking for device files named /dev/video0, /dev/video1, etc.
- devices = new ArrayList();
- String dir = "/dev";
- File libPath = new File(dir);
- String[] files = libPath.list();
- if (files != null) {
- for (int i = 0; i < files.length; i++) {
- if (-1 < files[i].indexOf("video")) {
- devices.add("/dev/" + files[i]);
- }
- }
- }
- } else {
- PGraphics.showWarning("The capture plugin does not support " +
- "device query!");
- devices = new ArrayList();
- }
- }
- return devices;
- }
-
-
- static protected ArrayList listResolutions(String sourceName,
- String propertyName,
- Object propertyValue) {
- // Creating temporary pipeline so that we can query
- // the resolutions supported by the device.
- Pipeline testPipeline = new Pipeline("test");
- Element source = ElementFactory.make(sourceName, "source");
- source.set(propertyName, propertyValue);
-
- BufferDataAppSink sink = new BufferDataAppSink("sink", "",
- new BufferDataAppSink.Listener() {
- public void bufferFrame(int w, int h, Buffer buffer) { }
- });
- testPipeline.addMany(source, sink);
- Element.linkMany(source, sink);
-
- // Play/pause sequence (with getState() calls to to make sure
- // all async operations are done) to trigger the capture momentarily
- // for the device and obtain its supported resolutions.
- testPipeline.play();
- testPipeline.getState();
- testPipeline.pause();
- testPipeline.getState();
-
- ArrayList resolutions = new ArrayList();
- addResFromSource(resolutions, source);
-
- testPipeline.stop();
- testPipeline.getState();
-
- if (sink != null) {
- sink.removeListener();
- sink.dispose();
- }
-
- testPipeline.dispose();
- return resolutions;
- }
-
-
- static protected void addResFromSource(ArrayList res, Element src) {
- if (PApplet.platform == MACOSX && useResMacHack) {
- addResFromSourceMacHack(res, src);
- } else {
- addResFromSourceImpl(res, src);
- }
- }
-
-
- static protected void addResFromSourceImpl(ArrayList res,
- Element src) {
- for (Pad pad : src.getPads()) {
- Caps caps = pad.getCaps();
- int n = caps.size();
- for (int i = 0; i < n; i++) {
- Structure str = caps.getStructure(i);
-
- if (!str.hasIntField("width") || !str.hasIntField("height")) continue;
-
- int w = ((Integer)str.getValue("width")).intValue();
- int h = ((Integer)str.getValue("height")).intValue();
-
- if (PApplet.platform == WINDOWS) {
- // In Windows the getValueList() method doesn't seem to
- // return a valid list of fraction values, so working on
- // the string representation of the caps structure.
- addResFromString(res, str.toString(), w, h);
- } else {
- addResFromStructure(res, str, w, h);
- }
- }
- }
- }
-
-
- // The problem on OSX, at least when using qtkitvideosrc, is that it is only
- // possible to obtain a single supported caps, the native maximum, using
- // getNegotiatedCaps. getCaps() just gives the maximum possible ranges that
- // are useless to build a list of supported resolutions. Using the fact that
- // QTKit allows to capture streams at arbitrary resolutions, then the list is
- // faked by repeatedly dividing the maximum by 2 until the width becomes too
- // small (or not divisible by 2).
- static protected void addResFromSourceMacHack(ArrayList res,
- Element src) {
- for (Pad pad : src.getPads()) {
- Caps caps = pad.getNegotiatedCaps();
- int n = caps.size();
- if (0 < n) {
- Structure str = caps.getStructure(0);
-
- if (!str.hasIntField("width") || !str.hasIntField("height")) return;
-
- int w = ((Integer)str.getValue("width")).intValue();
- int h = ((Integer)str.getValue("height")).intValue();
- while (80 <= w) {
- int num = 30;
- int den = 1;
- try {
- Fraction fr = str.getFraction("framerate");
- num = fr.numerator;
- den = fr.denominator;
- } catch (Exception e) {
- }
-
- res.add(makeResolutionString(w, h, num, den));
- if (num == 30 && den == 1) {
- // Adding additional framerates to allow for slower capture. Again,
- // QTKit can output frames at arbitrary rates.
- res.add(makeResolutionString(w, h, 15, 1));
- res.add(makeResolutionString(w, h, 1, 1));
- }
-
- if (w % 2 == 0 && h % 2 == 0) {
- w /= 2;
- h /= 2;
- } else {
- break;
- }
- }
- }
- }
- }
-
-
- static protected void addResFromString(ArrayList res, String str,
- int w, int h) {
- int n0 = str.indexOf("framerate=(fraction)");
- if (-1 < n0) {
- String temp = str.substring(n0 + 20, str.length());
- int n1 = temp.indexOf("[");
- int n2 = temp.indexOf("]");
- if (-1 < n1 && -1 < n2) {
- // A list of fractions enclosed between '[' and ']'
- temp = temp.substring(n1 + 1, n2);
- String[] fractions = temp.split(",");
- for (int k = 0; k < fractions.length; k++) {
- String fpsStr = fractions[k].trim();
- res.add(makeResolutionString(w, h, fpsStr));
- }
- } else {
- // A single fraction
- int n3 = temp.indexOf(",");
- int n4 = temp.indexOf(";");
- if (-1 < n3 || -1 < n4) {
- int n5 = -1;
- if (n3 == -1) {
- n5 = n4;
- } else if (n4 == -1) {
- n5 = n3;
- } else {
- n5 = PApplet.min(n3, n4);
- }
-
- temp = temp.substring(0, n5);
- String fpsStr = temp.trim();
- res.add(makeResolutionString(w, h, fpsStr));
- }
- }
- }
- }
-
-
- static protected void addResFromStructure(ArrayList res,
- Structure str, int w, int h) {
- boolean singleFrac = false;
- try {
- Fraction fr = str.getFraction("framerate");
- res.add(makeResolutionString(w, h, fr.numerator, fr.denominator));
- singleFrac = true;
- } catch (Exception e) {
- }
-
- if (!singleFrac) {
- ValueList flist = null;
-
- try {
- flist = str.getValueList("framerate");
- } catch (Exception e) {
- }
-
- if (flist != null) {
- // All the framerates are put together, but this is not
- // entirely accurate since there might be some of them
- // that work only for certain resolutions.
- for (int k = 0; k < flist.getSize(); k++) {
- Fraction fr = flist.getFraction(k);
- res.add(makeResolutionString(w, h, fr.numerator, fr.denominator));
- }
- }
- }
- }
-
-
- static protected String makeResolutionString(int width, int height, int
- fpsNumerator,
- int fpsDenominator) {
- String res = "size=" + width + "x" + height + ",fps=" + fpsNumerator;
- if (fpsDenominator != 1) {
- res += "/" + fpsDenominator;
- }
- return res;
- }
-
-
- static protected String makeResolutionString(int width, int height,
- String fpsStr) {
- String res = "size=" + width + "x" + height;
- String[] parts = fpsStr.split("/");
- if (parts.length == 2) {
- int fpsNumerator = PApplet.parseInt(parts[0]);
- int fpsDenominator = PApplet.parseInt(parts[1]);
- res += ",fps=" + fpsNumerator;
- if (fpsDenominator != 1) {
- res += "/" + fpsDenominator;
- }
- }
- return res;
- }
-
-
- protected void checkResIsValid() {
- ArrayList resolutions = new ArrayList();
- addResFromSource(resolutions, sourceElement);
-
- boolean valid = resolutions.size() == 0;
- for (String res: resolutions) {
- if (validRes(res)) {
- valid = true;
- break;
- }
- }
-
- if (!valid) {
- String fpsStr = "";
- if (!frameRateString.equals("")) {
- fpsStr = ", " + frameRateString + "fps";
- }
- throw new RuntimeException("The requested resolution of " + reqWidth +
- "x" + reqHeight + fpsStr +
- " is not supported by the selected capture " +
- "device.\n");
- }
- }
-
-
- protected void checkValidDevices(String src) {
- ArrayList devices;
- if (devicePropertyName.equals("")) {
- devices = listDevices(src, indexPropertyName);
- } else {
- devices = listDevices(src, devicePropertyName);
- }
- if (devices.size() == 0) {
- throw new RuntimeException("There are no capture devices connected to " +
- "this computer.\n");
- }
- }
-
-
- protected boolean validRes(String res) {
- int[] size = getSize(res);
- String fps = getFrameRate(res);
- return (reqWidth == 0 || reqHeight == 0 ||
- (size[0] == reqWidth && size[1] == reqHeight)) &&
- (frameRateString.equals("") || frameRateString.equals(fps));
- }
-
-
- ////////////////////////////////////////////////////////////
-
- // Initialization methods.
-
-
- // The main initialization here.
- protected void initGStreamer(PApplet parent, int rw, int rh, String src,
- String idName, Object idValue,
- String fps) {
- this.parent = parent;
-
- Video.init();
- checkValidDevices(src);
-
- // register methods
- parent.registerMethod("dispose", this);
- parent.registerMethod("post", this);
-
- setEventHandlerObject(parent);
-
- pipeline = new Pipeline("Video Capture");
-
- frameRateString = fps;
- if (frameRateString.equals("")) {
- frameRate = -1;
- } else {
- String[] parts = frameRateString.split("/");
- if (parts.length == 2) {
- int fpsDenominator = PApplet.parseInt(parts[0]);
- int fpsNumerator = PApplet.parseInt(parts[1]);
- frameRate = (float)fpsDenominator / (float)fpsNumerator;
- } else if (parts.length == 1) {
- frameRateString += "/1";
- frameRate = PApplet.parseFloat(parts[0]);
- } else {
- frameRateString = "";
- frameRate = -1;
- }
- }
-
- reqWidth = rw;
- reqHeight = rh;
-
- sourceName = src;
- sourceElement = ElementFactory.make(src, "Source");
-
- if (idName != null && !idName.equals("")) {
- sourceElement.set(idName, idValue);
- }
-
- bufWidth = bufHeight = 0;
- pipelineReady = false;
- }
-
-
- protected void initPipeline() {
- String whStr = "";
- if (0 < reqWidth && 0 < reqHeight) {
- whStr = "width=" + reqWidth + ", height=" + reqHeight;
- } else {
- PGraphics.showWarning("Resolution information not available, attempting" +
- " to open the capture device at 320x240");
- whStr = "width=320, height=240";
- }
-
- String fpsStr = "";
- if (!frameRateString.equals("")) {
- // If the framerate string is empty we left the source element
- // to use the default value.
- fpsStr = ", framerate=" + frameRateString;
- }
-
- if (bufferSink != null || (Video.useGLBufferSink && parent.g.isGL())) {
- useBufferSink = true;
-
- if (bufferSink != null) {
- getSinkMethods();
- }
-
- if (copyMask == null || copyMask.equals("")) {
- initCopyMask();
- }
-
- String caps = whStr + fpsStr + ", " + copyMask;
-
- natSink = new BufferDataAppSink("nat", caps,
- new BufferDataAppSink.Listener() {
- public void bufferFrame(int w, int h, Buffer buffer) {
- invokeEvent(w, h, buffer);
- }
- });
-
- natSink.setAutoDisposeBuffer(false);
-
- // No need for rgbSink.dispose(), because the addMany() doesn't increment the
- // refcount of the videoSink object.
-
- pipeline.addMany(sourceElement, natSink);
- Element.linkMany(sourceElement, natSink);
-
- } else {
- Element conv = ElementFactory.make("ffmpegcolorspace", "ColorConverter");
-
- Element videofilter = ElementFactory.make("capsfilter", "ColorFilter");
- videofilter.setCaps(new Caps("video/x-raw-rgb, width=" + reqWidth +
- ", height=" + reqHeight +
- ", bpp=32, depth=24" + fpsStr));
-
- rgbSink = new RGBDataAppSink("rgb",
- new RGBDataAppSink.Listener() {
- public void rgbFrame(int w, int h, IntBuffer buffer) {
- invokeEvent(w, h, buffer);
- }
- });
-
- // Setting direct buffer passing in the video sink.
- rgbSink.setPassDirectBuffer(Video.passDirectBuffer);
-
- // No need for rgbSink.dispose(), because the addMany() doesn't increment
- // the refcount of the videoSink object.
-
- pipeline.addMany(sourceElement, conv, videofilter, rgbSink);
- Element.linkMany(sourceElement, conv, videofilter, rgbSink);
- }
-
- pipelineReady = true;
- newFrame = false;
- }
-
-
- /**
- * Uses a generic object as handler of the capture. This object should have a
- * captureEvent method that receives a Capture argument. This method will
- * be called upon a new frame read event.
- *
- */
- protected void setEventHandlerObject(Object obj) {
- eventHandler = obj;
-
- try {
- captureEventMethod = obj.getClass().getMethod("captureEvent", Capture.class);
- return;
- } catch (Exception e) {
- // no such method, or an error.. which is fine, just ignore
- }
-
- // The captureEvent method may be declared as receiving Object, rather
- // than Capture.
- try {
- captureEventMethod = obj.getClass().getMethod("captureEvent", Object.class);
- return;
- } catch (Exception e) {
- // no such method, or an error.. which is fine, just ignore
- }
- }
-
-
- ////////////////////////////////////////////////////////////
-
- // Stream event handling.
-
-
- /**
- * invokeEvent() and read() are synchronized so that they can not be
- * called simultaneously. when they were not synchronized, this caused
- * the infamous problematic frame crash.
- * found and fixed by Charl P. Botha
- */
- protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
- available = true;
- bufWidth = w;
- bufHeight = h;
- if (copyPixels == null) {
- copyPixels = new int[w * h];
- }
- buffer.rewind();
- try {
- buffer.get(copyPixels);
- } catch (BufferUnderflowException e) {
- e.printStackTrace();
- copyPixels = null;
- return;
- }
- fireCaptureEvent();
- }
-
-
- protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
- available = true;
- bufWidth = w;
- bufHeight = h;
- if (natBuffer != null) {
- // To handle the situation where read() is not called in the sketch,
- // so that the native buffers are not being sent to the sink,
- // and therefore, not disposed by it.
- natBuffer.dispose();
- }
- natBuffer = buffer;
- fireCaptureEvent();
- }
-
-
- private void fireCaptureEvent() {
- if (captureEventMethod != null) {
- try {
- captureEventMethod.invoke(eventHandler, this);
-
- } catch (Exception e) {
- System.err.println("error, disabling captureEvent()");
- e.printStackTrace();
- captureEventMethod = null;
- }
- }
- }
-
-
- ////////////////////////////////////////////////////////////
-
- // Stream query methods.
-
-
- protected float getSourceFrameRate() {
- for (Element sink : pipeline.getSinks()) {
- for (Pad pad : sink.getPads()) {
- Fraction frameRate = org.gstreamer.Video.getVideoFrameRate(pad);
- if (frameRate != null) {
- return (float)frameRate.toDouble();
- }
- }
- }
- return 0;
- }
-
-
- protected String getName(String config) {
- String name = "";
- String[] parts = PApplet.split(config, ',');
- for (String part: parts) {
- if (-1 < part.indexOf("name")) {
- String[] values = PApplet.split(part, '=');
- if (0 < values.length) {
- name = values[1];
- }
- }
- }
- return name;
- }
-
-
- protected int[] getSize(String config) {
- int[] wh = {0, 0};
- String[] parts = PApplet.split(config, ',');
- for (String part: parts) {
- if (-1 < part.indexOf("size")) {
- String[] values = PApplet.split(part, '=');
- if (0 < values.length) {
- String[] whstr = PApplet.split(values[1], 'x');
- if (whstr.length == 2) {
- wh[0] = PApplet.parseInt(whstr[0]);
- wh[1] = PApplet.parseInt(whstr[1]);
- }
- }
- }
- }
- return wh;
- }
-
-
- protected String getFrameRate(String config) {
- String fps = "";
- String[] parts = PApplet.split(config, ',');
- for (String part: parts) {
- if (-1 < part.indexOf("fps")) {
- String[] values = PApplet.split(part, '=');
- if (0 < values.length) {
- fps = values[1];
- if (fps.indexOf("/") == -1) {
- fps += "/1";
- }
- }
- }
- }
- return fps;
- }
-
-
- ////////////////////////////////////////////////////////////
-
- // Buffer source interface.
-
-
- /**
- * Sets the object to use as destination for the frames read from the stream.
- * The color conversion mask is automatically set to the one required to
- * copy the frames to OpenGL.
- *
- * NOTE: This is not official API and may/will be removed at any time.
- *
- * @param Object dest
- */
- public void setBufferSink(Object sink) {
- bufferSink = sink;
- initCopyMask();
- }
-
-
- /**
- * Sets the object to use as destination for the frames read from the stream.
- *
- * NOTE: This is not official API and may/will be removed at any time.
- *
- * @param Object dest
- * @param String mask
- */
- public void setBufferSink(Object sink, String mask) {
- bufferSink = sink;
- copyMask = mask;
- }
-
-
- /**
- * NOTE: This is not official API and may/will be removed at any time.
- */
- public boolean hasBufferSink() {
- return bufferSink != null;
- }
-
-
- /**
- * NOTE: This is not official API and may/will be removed at any time.
- */
- public synchronized void disposeBuffer(Object buf) {
- ((Buffer)buf).dispose();
- }
-
-
- protected void getSinkMethods() {
- try {
- sinkCopyMethod = bufferSink.getClass().getMethod("copyBufferFromSource",
- new Class[] { Object.class, ByteBuffer.class, int.class, int.class });
- } catch (Exception e) {
- throw new RuntimeException("Capture: provided sink object doesn't have " +
- "a copyBufferFromSource method.");
- }
-
- try {
- sinkSetMethod = bufferSink.getClass().getMethod("setBufferSource",
- new Class[] { Object.class });
- sinkSetMethod.invoke(bufferSink, new Object[] { this });
- } catch (Exception e) {
- throw new RuntimeException("Capture: provided sink object doesn't have "+
- "a setBufferSource method.");
- }
-
- try {
- sinkDisposeMethod = bufferSink.getClass().getMethod("disposeSourceBuffer",
- new Class[] { });
- } catch (Exception e) {
- throw new RuntimeException("Capture: provided sink object doesn't have " +
- "a disposeSourceBuffer method.");
- }
-
- try {
- sinkGetMethod = bufferSink.getClass().getMethod("getBufferPixels",
- new Class[] { int[].class });
- } catch (Exception e) {
- throw new RuntimeException("Capture: provided sink object doesn't have " +
- "a getBufferPixels method.");
- }
- }
-
-
- protected void initCopyMask() {
- if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
- copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
- } else {
- copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
- }
- }
-
-
- public synchronized void post() {
- if (useBufferSink && sinkDisposeMethod != null) {
- try {
- sinkDisposeMethod.invoke(bufferSink, new Object[] {});
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
-}
diff --git a/java/libraries/video/src/processing/video/LibraryLoader.java b/java/libraries/video/src/processing/video/LibraryLoader.java
deleted file mode 100644
index befa4c6d4..000000000
--- a/java/libraries/video/src/processing/video/LibraryLoader.java
+++ /dev/null
@@ -1,236 +0,0 @@
-/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
-
-/*
- Part of the Processing project - http://processing.org
-
- Copyright (c) 2011-12 Ben Fry and Casey Reas
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General
- Public License along with this library; if not, write to the
- Free Software Foundation, Inc., 59 Temple Place, Suite 330,
- Boston, MA 02111-1307 USA
-*/
-
-package processing.video;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import com.sun.jna.Library;
-import com.sun.jna.Native;
-import com.sun.jna.Platform;
-
-/**
- * This class loads the gstreamer native libraries.
- * By Andres Colubri
- * Based on code by Tal Shalif
- *
- */
-public class LibraryLoader {
-
- public interface DummyLibrary extends Library {
- }
-
- private static LibraryLoader instance;
-
- static final Object[][] WINDOWS_DEPENDENCIES = {
- // glib libraries
- { "gio-2.0", new String[] {}, true },
- { "glib-2.0", new String[] {}, true },
- { "gmodule-2.0", new String[] {}, true },
- { "gobject-2.0", new String[] {}, true },
- { "gthread-2.0", new String[] {}, true },
-
- // Core gstreamer libraries
- { "gstapp-0.10", new String[] {}, true },
- { "gstaudio-0.10", new String[] {}, true },
- { "gstbase-0.10", new String[] {}, true },
- { "gstbasevideo-0.10", new String[] {}, true },
- { "gstcdda-0.10", new String[] {}, true },
- { "gstcontroller-0.10", new String[] {}, true },
- { "gstdataprotocol-0.10", new String[] {}, true },
- { "gstfft-0.10", new String[] {}, true },
- { "gstinterfaces-0.10", new String[] {}, true },
- { "gstnet-0.10", new String[] {}, true },
- { "gstnetbuffer-0.10", new String[] {}, true },
- { "gstpbutils-0.10", new String[] {}, true },
- { "gstphotography-0.10", new String[] {}, true },
- { "gstreamer-0.10", new String[] {}, true },
- { "gstriff-0.10", new String[] {}, true },
- { "gstrtp-0.10", new String[] {}, true },
- { "gstrtsp-0.10", new String[] {}, true },
- { "gstsdp-0.10", new String[] {}, true },
- { "gstsignalprocessor-0.10", new String[] {}, true },
- { "gsttag-0.10", new String[] {}, true },
- { "gstvideo-0.10", new String[] {}, true },
-
- // External libraries
- { "libiconv-2", new String[] {}, false },
- { "libintl-8", new String[] {}, false },
- { "libjpeg-8", new String[] {}, false },
- { "libogg-0", new String[] {}, false },
- { "liborc-0.4-0", new String[] {}, false },
- { "liborc-test-0.4-0", new String[] {}, false },
- { "libpng14-14", new String[] {}, false },
- { "libtheora-0", new String[] {}, false },
- { "libtheoradec-1", new String[] {}, false },
- { "libtheoraenc-1", new String[] {}, false },
- { "libvorbis-0", new String[] {}, false },
- { "libvorbisenc-2", new String[] {}, false },
- { "libvorbisfile-3", new String[] {}, false },
- { "libxml2-2", new String[] {}, false },
- { "zlib1", new String[] {}, false } };
-
- static final Object[][] MACOSX_DEPENDENCIES = {
- { "gstbase-0.10", new String[] { "gstreamer-0.10" }, true },
- { "gstinterfaces-0.10", new String[] { "gstreamer-0.10" }, true },
- { "gstcontroller-0.10", new String[] { "gstreamer-0.10" }, true },
- { "gstaudio-0.10", new String[] { "gstbase-0.10" }, true },
- { "gstvideo-0.10", new String[] { "gstbase-0.10" }, true } };
-
- static final Object[][] DEFAULT_DEPENDENCIES = {
- { "gstreamer-0.10", new String[] {}, true },
- { "gstbase-0.10", new String[] { "gstreamer-0.10" }, true },
- { "gstinterfaces-0.10", new String[] { "gstreamer-0.10" }, true },
- { "gstcontroller-0.10", new String[] { "gstreamer-0.10" }, true },
- { "gstaudio-0.10", new String[] { "gstbase-0.10" }, true },
- { "gstvideo-0.10", new String[] { "gstbase-0.10" }, true }, };
-
-
- static final Object[][] dependencies =
- Platform.isWindows() ? WINDOWS_DEPENDENCIES :
- Platform.isMac() ? MACOSX_DEPENDENCIES : DEFAULT_DEPENDENCIES;
-
-
- private static final Map loadedMap =
- new HashMap();
-
-
- private static final int RECURSIVE_LOAD_MAX_DEPTH = 5;
-
-
- private LibraryLoader() {
- }
-
-
- private void preLoadLibs() {
- for (Object[] a : dependencies) {
- load(a[0].toString(), DummyLibrary.class, true, 0, (Boolean) a[2]);
- }
- }
-
-
- private String[] findDeps(String name) {
-
- for (Object[] a : dependencies) {
- if (name.equals(a[0])) {
-
- return (String[]) a[1];
- }
- }
-
- return new String[] {}; // library dependancy load chain unspecified -
- // probably client call
- }
-
-
- public Object load(String name, Class> clazz, boolean reqLib) {
- return load(name, clazz, true, 0, reqLib);
- }
-
-
- private Object load(String name, Class> clazz, boolean forceReload,
- int depth, boolean reqLib) {
-
- assert depth < RECURSIVE_LOAD_MAX_DEPTH : String.format(
- "recursive max load depth %s has been exceeded", depth);
-
- Object library = loadedMap.get(name);
-
- if (null == library || forceReload) {
-
- // Logger.getAnonymousLogger().info(String.format("%" + ((depth + 1) * 2)
- // + "sloading %s", "->", name));
-
- try {
- String[] deps = findDeps(name);
-
- for (String lib : deps) {
- load(lib, DummyLibrary.class, false, depth + 1, reqLib);
- }
-
- library = loadLibrary(name, clazz, reqLib);
-
- if (library != null) {
- loadedMap.put(name, library);
- }
- } catch (Exception e) {
- if (reqLib)
- throw new RuntimeException(String.format(
- "can not load required library %s", name, e));
- else
- System.out.println(String.format("can not load library %s", name, e));
- }
- }
-
- return library;
- }
-
-
- private static Object loadLibrary(String name, Class> clazz,
- boolean reqLib) {
-
- // Logger.getAnonymousLogger().info(String.format("loading %s", name));
-
- String[] nameFormats;
- nameFormats = Platform.isWindows() ? new String[] { "lib%s", "lib%s-0",
- "%s" } : new String[] { "%s-0", "%s" };
-
- UnsatisfiedLinkError linkError = null;
-
- for (String fmt : nameFormats) {
- try {
- String s = String.format(fmt, name);
- //System.out.println("Trying to load library file " + s);
- Object obj = Native.loadLibrary(s, clazz);
- //System.out.println("Loaded library " + s + " succesfully!");
- return obj;
- } catch (UnsatisfiedLinkError ex) {
- linkError = ex;
- }
- }
-
- if (reqLib)
- throw new UnsatisfiedLinkError(
- String.format(
- "can't load library %s (%1$s|lib%1$s|lib%1$s-0) with " +
- "-Djna.library.path=%s. Last error:%s",
- name, System.getProperty("jna.library.path"), linkError));
- else {
- System.out.println(String.format(
- "can't load library %s (%1$s|lib%1$s|lib%1$s-0) with " +
- "-Djna.library.path=%s. Last error:%s",
- name, System.getProperty("jna.library.path"), linkError));
- return null;
- }
- }
-
-
- public static synchronized LibraryLoader getInstance() {
- if (null == instance) {
- instance = new LibraryLoader();
- instance.preLoadLibs();
- }
- return instance;
- }
-}
diff --git a/java/libraries/video/src/processing/video/LibraryPath.java b/java/libraries/video/src/processing/video/LibraryPath.java
deleted file mode 100644
index ccabe2467..000000000
--- a/java/libraries/video/src/processing/video/LibraryPath.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
-
-/*
- Part of the Processing project - http://processing.org
-
- Copyright (c) 2011-12 Ben Fry and Casey Reas
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General
- Public License along with this library; if not, write to the
- Free Software Foundation, Inc., 59 Temple Place, Suite 330,
- Boston, MA 02111-1307 USA
-*/
-
-package processing.video;
-
-import java.net.URL;
-
-import com.sun.jna.Platform;
-
-class LibraryPath {
- // This method returns the folder inside which the gstreamer library folder
- // is located.
- String get() {
- URL url = this.getClass().getResource("LibraryPath.class");
- if (url != null) {
- // Convert URL to string, taking care of spaces represented by the "%20"
- // string.
- String path = url.toString().replace("%20", " ");
- int n0 = path.indexOf('/');
-
- int n1 = -1;
-
- if (Platform.isLinux()) {
- return "";
- } else {
- n1 = path.indexOf("video.jar");
- if (Platform.isWindows()) {
- // In Windows, path string starts with "jar file/C:/..."
- // so the substring up to the first / is removed.
- n0++;
- }
- }
-
- if ((-1 < n0) && (-1 < n1)) {
- return path.substring(n0, n1);
- } else {
- return "";
- }
- }
- return "";
- }
-}
\ No newline at end of file
diff --git a/java/libraries/video/src/processing/video/Movie.java b/java/libraries/video/src/processing/video/Movie.java
deleted file mode 100644
index 958be1109..000000000
--- a/java/libraries/video/src/processing/video/Movie.java
+++ /dev/null
@@ -1,1026 +0,0 @@
-/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
-
-/*
- Part of the Processing project - http://processing.org
-
- Copyright (c) 2004-12 Ben Fry and Casey Reas
- The previous version of this code was developed by Hernando Barragan
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General
- Public License along with this library; if not, write to the
- Free Software Foundation, Inc., 59 Temple Place, Suite 330,
- Boston, MA 02111-1307 USA
-*/
-
-package processing.video;
-
-import processing.core.*;
-
-import java.awt.Dimension;
-import java.io.*;
-import java.net.URI;
-import java.nio.*;
-import java.util.concurrent.TimeUnit;
-import java.lang.reflect.*;
-
-import org.gstreamer.*;
-import org.gstreamer.Buffer;
-import org.gstreamer.elements.*;
-
-
-/**
- * ( begin auto-generated from Movie.xml )
- *
- * Datatype for storing and playing movies in Apple's QuickTime format.
- * Movies must be located in the sketch's data directory or an accessible
- * place on the network to load without an error.
- *
- * ( end auto-generated )
- *
- * @webref video
- * @usage application
- */
-public class Movie extends PImage implements PConstants {
- public static String[] supportedProtocols = { "http" };
- public float frameRate;
- public String filename;
- public PlayBin2 playbin;
-
- protected boolean playing = false;
- protected boolean paused = false;
- protected boolean repeat = false;
-
- protected float rate;
- protected int bufWidth;
- protected int bufHeight;
- protected float volume;
-
- protected Method movieEventMethod;
- protected Object eventHandler;
-
- protected boolean available;
- protected boolean sinkReady;
- protected boolean newFrame;
-
- protected RGBDataAppSink rgbSink = null;
- protected int[] copyPixels = null;
-
- protected boolean firstFrame = true;
- protected boolean seeking = false;
-
- protected boolean useBufferSink = false;
- protected boolean outdatedPixels = true;
- protected Object bufferSink;
- protected Method sinkCopyMethod;
- protected Method sinkSetMethod;
- protected Method sinkDisposeMethod;
- protected Method sinkGetMethod;
- protected String copyMask;
- protected Buffer natBuffer = null;
- protected BufferDataAppSink natSink = null;
-
-
- /**
- * Creates an instance of GSMovie loading the movie from filename.
- *
- * @param parent PApplet
- * @param filename String
- */
- public Movie(PApplet parent, String filename) {
- super(0, 0, RGB);
- initGStreamer(parent, filename);
- }
-
-
- /**
- * Disposes all the native resources associated to this movie.
- *
- * NOTE: This is not official API and may/will be removed at any time.
- */
- public void dispose() {
- if (playbin != null) {
- try {
- if (playbin.isPlaying()) {
- playbin.stop();
- playbin.getState();
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- pixels = null;
-
- copyPixels = null;
- if (rgbSink != null) {
- rgbSink.removeListener();
- rgbSink.dispose();
- rgbSink = null;
- }
-
- natBuffer = null;
- if (natSink != null) {
- natSink.removeListener();
- natSink.dispose();
- natSink = null;
- }
-
- playbin.dispose();
- playbin = null;
-
- parent.g.removeCache(this);
- parent.unregisterMethod("dispose", this);
- parent.unregisterMethod("post", this);
- }
- }
-
-
- /**
- * Finalizer of the class.
- */
- protected void finalize() throws Throwable {
- try {
- dispose();
- } finally {
- super.finalize();
- }
- }
-
-
- /**
- * ( begin auto-generated from Movie_frameRate.xml )
- *
- * Sets how often frames are read from the movie. Setting the fps
- * parameter to 4, for example, will cause 4 frames to be read per second.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @param ifps speed of the movie in frames per second
- * @brief Sets the target frame rate
- */
- public void frameRate(float ifps) {
- if (seeking) return;
-
- // We calculate the target ratio in the case both the
- // current and target framerates are valid (greater than
- // zero), otherwise we leave it as 1.
- float f = (0 < ifps && 0 < frameRate) ? ifps / frameRate : 1;
-
- if (playing) {
- playbin.pause();
- playbin.getState();
- }
-
- long t = playbin.queryPosition(TimeUnit.NANOSECONDS);
-
- boolean res;
- long start, stop;
- if (rate > 0) {
- start = t;
- stop = -1;
- } else {
- start = 0;
- stop = t;
- }
-
- res = playbin.seek(rate * f, Format.TIME, SeekFlags.FLUSH,
- SeekType.SET, start, SeekType.SET, stop);
- playbin.getState();
-
- if (!res) {
- PGraphics.showWarning("Seek operation failed.");
- }
-
- if (playing) {
- playbin.play();
- }
-
- frameRate = ifps;
-
- // getState() will wait until any async state change
- // (like seek in this case) has completed
- seeking = true;
- playbin.getState();
- seeking = false;
- }
-
-
- /**
- * ( begin auto-generated from Movie_speed.xml )
- *
- * Sets the relative playback speed of the movie. The rate
- * parameters sets the speed where 2.0 will play the movie twice as fast,
- * 0.5 will play at half the speed, and -1 will play the movie in normal
- * speed in reverse.
- *
- * ( end auto-generated )
- *
-
- * @webref movie
-
- * @usage web_application
- * @param irate speed multiplier for movie playback
- * @brief Sets the relative playback speed
- */
- public void speed(float irate) {
- // If the frameRate() method is called continuously with very similar
- // rate values, playback might become sluggish. This condition attempts
- // to take care of that.
- if (PApplet.abs(rate - irate) > 0.1) {
- rate = irate;
- frameRate(frameRate); // The framerate is the same, but the rate (speed) could be different.
- }
- }
-
-
- /**
- * ( begin auto-generated from Movie_duration.xml )
- *
- * Returns the length of the movie in seconds. If the movie is 1 minute and
- * 20 seconds long the value returned will be 80.0.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @brief Returns length of movie in seconds
- */
- public float duration() {
- float sec = playbin.queryDuration().toSeconds();
- float nanosec = playbin.queryDuration().getNanoSeconds();
- return sec + Video.nanoSecToSecFrac(nanosec);
- }
-
-
- /**
- * ( begin auto-generated from Movie_time.xml )
- *
- * Returns the location of the playback head in seconds. For example, if
- * the movie has been playing for 4 seconds, the number 4.0 will be returned.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @brief Returns location of playback head in units of seconds
- */
- public float time() {
- float sec = playbin.queryPosition().toSeconds();
- float nanosec = playbin.queryPosition().getNanoSeconds();
- return sec + Video.nanoSecToSecFrac(nanosec);
- }
-
-
- /**
- * ( begin auto-generated from Movie_jump.xml )
- *
- * Jumps to a specific location within a movie. The parameter where
- * is in terms of seconds. For example, if the movie is 12.2 seconds long,
- * calling jump(6.1) would go to the middle of the movie.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @param where position to jump to specified in seconds
- * @brief Jumps to a specific location
- */
- public void jump(float where) {
- if (seeking) return;
-
- if (!sinkReady) {
- initSink();
- }
-
- // Round the time to a multiple of the source framerate, in
- // order to eliminate stutter. Suggested by Daniel Shiffman
- float fps = getSourceFrameRate();
- int frame = (int)(where * fps);
- where = frame / fps;
-
- boolean res;
- long pos = Video.secToNanoLong(where);
-
- res = playbin.seek(rate, Format.TIME, SeekFlags.FLUSH,
- SeekType.SET, pos, SeekType.NONE, -1);
-
- if (!res) {
- PGraphics.showWarning("Seek operation failed.");
- }
-
- // getState() will wait until any async state change
- // (like seek in this case) has completed
- seeking = true;
- playbin.getState();
- seeking = false;
- /*
- if (seeking) return; // don't seek again until the current seek operation is done.
-
- if (!sinkReady) {
- initSink();
- }
-
- // Round the time to a multiple of the source framerate, in
- // order to eliminate stutter. Suggested by Daniel Shiffman
- float fps = getSourceFrameRate();
- int frame = (int)(where * fps);
- final float seconds = frame / fps;
-
- // Put the seek operation inside a thread to avoid blocking the main
- // animation thread
- Thread seeker = new Thread() {
- @Override
- public void run() {
- long pos = Video.secToNanoLong(seconds);
- boolean res = playbin.seek(rate, Format.TIME, SeekFlags.FLUSH,
- SeekType.SET, pos, SeekType.NONE, -1);
- if (!res) {
- PGraphics.showWarning("Seek operation failed.");
- }
-
- // getState() will wait until any async state change
- // (like seek in this case) has completed
- seeking = true;
- playbin.getState();
- seeking = false;
- }
- };
- seeker.start();
- */
- }
-
-
- /**
- * ( begin auto-generated from Movie_available.xml )
- *
- * Returns "true" when a new movie frame is available to read.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @brief Returns "true" when a new movie frame is available to read.
- */
- public boolean available() {
- return available;
- }
-
-
- /**
- * ( begin auto-generated from Movie_play.xml )
- *
- * Plays a movie one time and stops at the last frame.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @brief Plays movie one time and stops at the last frame
- */
- public void play() {
- if (seeking) return;
-
- if (!sinkReady) {
- initSink();
- }
-
- playing = true;
- paused = false;
- playbin.play();
- playbin.getState();
- }
-
-
- /**
- * ( begin auto-generated from Movie_loop.xml )
- *
- * Plays a movie continuously, restarting it when it's over.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @brief Plays a movie continuously, restarting it when it's over.
- */
- public void loop() {
- if (seeking) return;
-
- repeat = true;
- play();
- }
-
-
- /**
- * ( begin auto-generated from Movie_noLoop.xml )
- *
- * If a movie is looping, calling noLoop() will cause it to play until the
- * end and then stop on the last frame.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @brief Stops the movie from looping
- */
- public void noLoop() {
- if (seeking) return;
-
- if (!sinkReady) {
- initSink();
- }
-
- repeat = false;
- }
-
-
- /**
- * ( begin auto-generated from Movie_pause.xml )
- *
- * Pauses a movie during playback. If a movie is started again with play(),
- * it will continue from where it was paused.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @brief Pauses the movie
- */
- public void pause() {
- if (seeking) return;
-
- if (!sinkReady) {
- initSink();
- }
-
- playing = false;
- paused = true;
- playbin.pause();
- playbin.getState();
- }
-
-
- /**
- * ( begin auto-generated from Movie_stop.xml )
- *
- * Stops a movie from continuing. The playback returns to the beginning so
- * when a movie is played, it will begin from the beginning.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @brief Stops the movie
- */
- public void stop() {
- if (seeking) return;
-
- if (!sinkReady) {
- initSink();
- }
-
- if (playing) {
- jump(0);
- playing = false;
- }
- paused = false;
- playbin.stop();
- playbin.getState();
- }
-
-
- /**
- * ( begin auto-generated from Movie_read.xml )
- *
- * Reads the current frame of the movie.
- *
- * ( end auto-generated )
- *
- * @webref movie
- * @usage web_application
- * @brief Reads the current frame
- */
- public synchronized void read() {
- if (frameRate < 0) {
- // Framerate not set yet, so we obtain from stream,
- // which is already playing since we are in read().
- frameRate = getSourceFrameRate();
- }
- if (volume < 0) {
- // Idem for volume
- volume = (float)playbin.getVolume();
- }
-
- if (useBufferSink) { // The native buffer from gstreamer is copied to the buffer sink.
- outdatedPixels = true;
- if (natBuffer == null) {
- return;
- }
-
- if (firstFrame) {
- super.init(bufWidth, bufHeight, ARGB);
- firstFrame = false;
- }
-
- if (bufferSink == null) {
- Object cache = parent.g.getCache(this);
- if (cache == null) {
- return;
- }
- setBufferSink(cache);
- getSinkMethods();
- }
-
- ByteBuffer byteBuffer = natBuffer.getByteBuffer();
-
- try {
- sinkCopyMethod.invoke(bufferSink, new Object[] { natBuffer, byteBuffer, bufWidth, bufHeight });
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- natBuffer = null;
- } else { // The pixels just read from gstreamer are copied to the pixels array.
- if (copyPixels == null) {
- return;
- }
-
- if (firstFrame) {
- super.init(bufWidth, bufHeight, RGB);
- firstFrame = false;
- }
-
- int[] temp = pixels;
- pixels = copyPixels;
- updatePixels();
- copyPixels = temp;
- }
-
- available = false;
- newFrame = true;
- }
-
-
- /**
- * Change the volume. Values are from 0 to 1.
- *
- * @param float v
- */
- public void volume(float v) {
- if (playing && PApplet.abs(volume - v) > 0.001f) {
- playbin.setVolume(v);
- volume = v;
- }
- }
-
-
- public synchronized void loadPixels() {
- super.loadPixels();
- if (useBufferSink) {
- if (natBuffer != null) {
- // This means that the OpenGL texture hasn't been created so far (the
- // video frame not drawn using image()), but the user wants to use the
- // pixel array, which we can just get from natBuffer.
- IntBuffer buf = natBuffer.getByteBuffer().asIntBuffer();
- buf.rewind();
- buf.get(pixels);
- Video.convertToARGB(pixels, width, height);
- } else if (sinkGetMethod != null) {
- try {
- // sinkGetMethod will copy the latest buffer to the pixels array,
- // and the pixels will be copied to the texture when the OpenGL
- // renderer needs to draw it.
- sinkGetMethod.invoke(bufferSink, new Object[] { pixels });
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- outdatedPixels = false;
- }
- }
-
-
- public int get(int x, int y) {
- if (outdatedPixels) loadPixels();
- return super.get(x, y);
- }
-
-
- protected void getImpl(int sourceX, int sourceY,
- int sourceWidth, int sourceHeight,
- PImage target, int targetX, int targetY) {
- if (outdatedPixels) loadPixels();
- super.getImpl(sourceX, sourceY, sourceWidth, sourceHeight,
- target, targetX, targetY);
- }
-
-
- ////////////////////////////////////////////////////////////
-
- // Initialization methods.
-
-
- protected void initGStreamer(PApplet parent, String filename) {
- this.parent = parent;
- playbin = null;
-
- File file;
-
- Video.init();
-
- // first check to see if this can be read locally from a file.
- try {
- try {
- // first try a local file using the dataPath. usually this will
- // work ok, but sometimes the dataPath is inside a jar file,
- // which is less fun, so this will crap out.
- file = new File(parent.dataPath(filename));
- if (file.exists()) {
- playbin = new PlayBin2("Movie Player");
- playbin.setInputFile(file);
- }
- } catch (Exception e) {
- } // ignored
-
- // read from a file just hanging out in the local folder.
- // this might happen when the video library is used with some
- // other application, or the person enters a full path name
- if (playbin == null) {
- try {
- file = new File(filename);
- if (file.exists()) {
- playbin = new PlayBin2("Movie Player");
- playbin.setInputFile(file);
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- if (playbin == null) {
- // Try network read...
- for (int i = 0; i < supportedProtocols.length; i++) {
- if (filename.startsWith(supportedProtocols[i] + "://")) {
- try {
- playbin = new PlayBin2("Movie Player");
- playbin.setURI(URI.create(filename));
- break;
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
- }
- } catch (SecurityException se) {
- // online, whups. catch the security exception out here rather than
- // doing it three times (or whatever) for each of the cases above.
- }
-
- if (playbin == null) {
- parent.die("Could not load movie file " + filename, null);
- }
-
- // we've got a valid movie! let's rock.
- try {
- this.filename = filename; // for error messages
-
- // register methods
- parent.registerMethod("dispose", this);
- parent.registerMethod("post", this);
-
- setEventHandlerObject(parent);
-
- rate = 1.0f;
- frameRate = -1;
- volume = -1;
- sinkReady = false;
- bufWidth = bufHeight = 0;
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
-
- /**
- * Uses a generic object as handler of the movie. This object should have a
- * movieEvent method that receives a GSMovie argument. This method will
- * be called upon a new frame read event.
- *
- */
- protected void setEventHandlerObject(Object obj) {
- eventHandler = obj;
-
- try {
- movieEventMethod = eventHandler.getClass().getMethod("movieEvent", Movie.class);
- return;
- } catch (Exception e) {
- // no such method, or an error... which is fine, just ignore
- }
-
- // movieEvent can alternatively be defined as receiving an Object, to allow
- // Processing mode implementors to support the video library without linking
- // to it at build-time.
- try {
- movieEventMethod = eventHandler.getClass().getMethod("movieEvent", Object.class);
- } catch (Exception e) {
- // no such method, or an error... which is fine, just ignore
- }
- }
-
-
- protected void initSink() {
- if (bufferSink != null || (Video.useGLBufferSink && parent.g.isGL())) {
- useBufferSink = true;
-
- if (bufferSink != null) {
- getSinkMethods();
- }
-
- if (copyMask == null || copyMask.equals("")) {
- initCopyMask();
- }
-
- natSink = new BufferDataAppSink("nat", copyMask,
- new BufferDataAppSink.Listener() {
- public void bufferFrame(int w, int h, Buffer buffer) {
- invokeEvent(w, h, buffer);
- }
- });
-
- natSink.setAutoDisposeBuffer(false);
- playbin.setVideoSink(natSink);
- // The setVideoSink() method sets the videoSink as a property of the
- // PlayBin, which increments the refcount of the videoSink element.
- // Disposing here once to decrement the refcount.
- natSink.dispose();
- } else {
- rgbSink = new RGBDataAppSink("rgb",
- new RGBDataAppSink.Listener() {
- public void rgbFrame(int w, int h, IntBuffer buffer) {
- invokeEvent(w, h, buffer);
- }
- });
-
- // Setting direct buffer passing in the video sink.
- rgbSink.setPassDirectBuffer(Video.passDirectBuffer);
- playbin.setVideoSink(rgbSink);
- // The setVideoSink() method sets the videoSink as a property of the
- // PlayBin, which increments the refcount of the videoSink element.
- // Disposing here once to decrement the refcount.
- rgbSink.dispose();
- }
-
- // Creating bus to handle end-of-stream event.
- Bus bus = playbin.getBus();
- bus.connect(new Bus.EOS() {
- public void endOfStream(GstObject element) {
- eosEvent();
- }
- });
-
- sinkReady = true;
- newFrame = false;
- }
-
-
- ////////////////////////////////////////////////////////////
-
- // Stream event handling.
-
-
- protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
- available = true;
- bufWidth = w;
- bufHeight = h;
-
- if (copyPixels == null) {
- copyPixels = new int[w * h];
- }
- buffer.rewind();
- try {
- buffer.get(copyPixels);
- } catch (BufferUnderflowException e) {
- e.printStackTrace();
- copyPixels = null;
- return;
- }
-
- if (playing) {
- fireMovieEvent();
- }
- }
-
- protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
- available = true;
- bufWidth = w;
- bufHeight = h;
- if (natBuffer != null) {
- // To handle the situation where read() is not called in the sketch, so
- // that the native buffers are not being sent to the sinke, and therefore, not disposed
- // by it.
- natBuffer.dispose();
- }
- natBuffer = buffer;
-
- if (playing) {
- fireMovieEvent();
- }
- }
-
- private void fireMovieEvent() {
- // Creates a movieEvent.
- if (movieEventMethod != null) {
- try {
- movieEventMethod.invoke(eventHandler, this);
- } catch (Exception e) {
- System.err.println("error, disabling movieEvent() for " + filename);
- e.printStackTrace();
- movieEventMethod = null;
- }
- }
- }
-
- protected void eosEvent() {
- if (repeat) {
- if (0 < rate) {
- // Playing forward, so we return to the beginning
- jump(0);
- } else {
- // Playing backwards, so we go to the end.
- jump(duration());
- }
-
- // The rate is set automatically to 1 when restarting the
- // stream, so we need to call frameRate in order to reset
- // to the latest fps rate.
- frameRate(frameRate);
- } else {
- playing = false;
- }
- }
-
-
- ////////////////////////////////////////////////////////////
-
- // Stream query methods.
-
-
- /**
- * Get the height of the source video. Note: calling this method repeatedly
- * can slow down playback performance.
- *
- * @return int
- */
- protected int getSourceHeight() {
- Dimension dim = playbin.getVideoSize();
- if (dim != null) {
- return dim.height;
- } else {
- return 0;
- }
- }
-
-
- /**
- * Get the original framerate of the source video. Note: calling this method
- * repeatedly can slow down playback performance.
- *
- * @return float
- */
- protected float getSourceFrameRate() {
- return (float)playbin.getVideoSinkFrameRate();
- }
-
-
- /**
- * Get the width of the source video. Note: calling this method repeatedly
- * can slow down playback performance.
- *
- * @return int
- */
- protected int getSourceWidth() {
- Dimension dim = playbin.getVideoSize();
- if (dim != null) {
- return dim.width;
- } else {
- return 0;
- }
- }
-
-
- ////////////////////////////////////////////////////////////
-
- // Buffer source interface.
-
-
- /**
- * Sets the object to use as destination for the frames read from the stream.
- * The color conversion mask is automatically set to the one required to
- * copy the frames to OpenGL.
- *
- * NOTE: This is not official API and may/will be removed at any time.
- *
- * @param Object dest
- */
- public void setBufferSink(Object sink) {
- bufferSink = sink;
- initCopyMask();
- }
-
-
- /**
- * Sets the object to use as destination for the frames read from the stream.
- *
- * NOTE: This is not official API and may/will be removed at any time.
- *
- * @param Object dest
- * @param String mask
- */
- public void setBufferSink(Object sink, String mask) {
- bufferSink = sink;
- copyMask = mask;
- }
-
-
- /**
- * NOTE: This is not official API and may/will be removed at any time.
- */
- public boolean hasBufferSink() {
- return bufferSink != null;
- }
-
-
- /**
- * NOTE: This is not official API and may/will be removed at any time.
- */
- public synchronized void disposeBuffer(Object buf) {
- ((Buffer)buf).dispose();
- }
-
-
- protected void getSinkMethods() {
- try {
- sinkCopyMethod = bufferSink.getClass().getMethod("copyBufferFromSource",
- new Class[] { Object.class, ByteBuffer.class, int.class, int.class });
- } catch (Exception e) {
- throw new RuntimeException("Movie: provided sink object doesn't have a " +
- "copyBufferFromSource method.");
- }
-
- try {
- sinkSetMethod = bufferSink.getClass().getMethod("setBufferSource",
- new Class[] { Object.class });
- sinkSetMethod.invoke(bufferSink, new Object[] { this });
- } catch (Exception e) {
- throw new RuntimeException("Movie: provided sink object doesn't have a " +
- "setBufferSource method.");
- }
-
- try {
- sinkDisposeMethod = bufferSink.getClass().getMethod("disposeSourceBuffer",
- new Class[] { });
- } catch (Exception e) {
- throw new RuntimeException("Movie: provided sink object doesn't have " +
- "a disposeSourceBuffer method.");
- }
-
- try {
- sinkGetMethod = bufferSink.getClass().getMethod("getBufferPixels",
- new Class[] { int[].class });
- } catch (Exception e) {
- throw new RuntimeException("Movie: provided sink object doesn't have " +
- "a getBufferPixels method.");
- }
- }
-
-
- protected void initCopyMask() {
- if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
- copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
- } else {
- copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
- }
- }
-
-
- public synchronized void post() {
- if (useBufferSink && sinkDisposeMethod != null) {
- try {
- sinkDisposeMethod.invoke(bufferSink, new Object[] {});
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
-}
diff --git a/java/libraries/video/src/processing/video/Video.java b/java/libraries/video/src/processing/video/Video.java
deleted file mode 100644
index 1eaa0e090..000000000
--- a/java/libraries/video/src/processing/video/Video.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
-
-/*
- Part of the Processing project - http://processing.org
-
- Copyright (c) 2011-12 Ben Fry and Casey Reas
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General
- Public License along with this library; if not, write to the
- Free Software Foundation, Inc., 59 Temple Place, Suite 330,
- Boston, MA 02111-1307 USA
-*/
-
-package processing.video;
-
-import org.gstreamer.*;
-import processing.core.PApplet;
-import processing.core.PConstants;
-
-import java.io.File;
-import java.nio.ByteOrder;
-import java.util.List;
-
-/**
- * This class contains some basic functions used by the rest of the classes in
- * this library.
- */
-public class Video implements PConstants {
- // Path that the video library will use to load the GStreamer base libraries
- // and plugins from. They can be passed from the application using the
- // gstreamer.library.path and gstreamer.plugin.path system variables (see
- // comments in initImpl() below).
- protected static String gstreamerLibPath = "";
- protected static String gstreamerPluginPath = "";
-
- // Direct buffer pass enabled by default. With this mode enabled, no new
- // buffers are created and disposed by the GC in each frame (thanks to Octavi
- // Estape for suggesting this improvement) which should help performance in
- // most situations.
- protected static boolean passDirectBuffer = true;
-
- // OpenGL texture used as buffer sink by default, when the renderer is
- // GL-based. This can improve performance significantly, since the video
- // frames are automatically copied into the texture without passing through
- // the pixels arrays, as well as having the color conversion into RGBA handled
- // natively by GStreamer.
- protected static boolean useGLBufferSink = true;
-
- protected static boolean defaultGLibContext = false;
-
- protected static long INSTANCES_COUNT = 0;
-
- protected static int bitsJVM;
- static {
- bitsJVM = PApplet.parseInt(System.getProperty("sun.arch.data.model"));
- }
-
-
- static protected void init() {
- if (INSTANCES_COUNT == 0) {
- initImpl();
- }
- INSTANCES_COUNT++;
- }
-
-
- static protected void restart() {
- removePlugins();
- Gst.deinit();
- initImpl();
- }
-
-
- static protected void initImpl() {
- // The location of the GStreamer base libraries can be passed from the
- // application to the vide library via a system variable. In Eclipse, add to
- // "VM Arguments" in "Run Configurations" the following line:
- // -Dgstreamer.library.path=path
- String libPath = System.getProperty("gstreamer.library.path");
- if (libPath != null) {
- gstreamerLibPath = libPath;
-
- // If the GStreamer installation referred by gstreamer.library.path is not
- // a system installation, then the path containing the plugins needs to be
- // specified separately, otherwise the plugins will be automatically
- // loaded from the default location. The system property for the plugin
- // path is "gstreamer.plugin.path"
- String pluginPath = System.getProperty("gstreamer.plugin.path");
- if (pluginPath != null) {
- gstreamerPluginPath = pluginPath;
- }
- } else {
- // Paths are build automatically from the curren location of the video
- // library.
- if (PApplet.platform == LINUX) {
- buildLinuxPaths();
- } else if (PApplet.platform == WINDOWS) {
- buildWindowsPaths();
- } else if (PApplet.platform == MACOSX) {
- buildMacOSXPaths();
- }
- }
-
- if (!gstreamerLibPath.equals("")) {
- System.setProperty("jna.library.path", gstreamerLibPath);
- }
-
- if (PApplet.platform == WINDOWS) {
- LibraryLoader loader = LibraryLoader.getInstance();
- if (loader == null) {
- System.err.println("Cannot load local version of GStreamer libraries.");
- }
- }
-
- String[] args = { "" };
- Gst.setUseDefaultContext(defaultGLibContext);
- Gst.init("Processing core video", args);
-
- addPlugins();
- }
-
-
- static protected void addPlugins() {
- if (!gstreamerPluginPath.equals("")) {
- Registry reg = Registry.getDefault();
- boolean res;
- res = reg.scanPath(gstreamerPluginPath);
- if (!res) {
- System.err.println("Cannot load GStreamer plugins from " +
- gstreamerPluginPath);
- }
- }
- }
-
-
- static protected void removePlugins() {
- Registry reg = Registry.getDefault();
- List list = reg.getPluginList();
- for (Plugin plg : list) {
- reg.removePlugin(plg);
- }
- }
-
-
- static protected void buildLinuxPaths() {
- gstreamerLibPath = "";
- gstreamerPluginPath = "";
- }
-
-
- static protected void buildWindowsPaths() {
- LibraryPath libPath = new LibraryPath();
- String path = libPath.get();
- gstreamerLibPath = buildGStreamerLibPath(path, "\\windows" + bitsJVM);
- gstreamerPluginPath = gstreamerLibPath + "\\plugins";
- }
-
-
- static protected void buildMacOSXPaths() {
- LibraryPath libPath = new LibraryPath();
- String path = libPath.get();
- gstreamerLibPath = buildGStreamerLibPath(path, "/macosx" + bitsJVM);
- gstreamerPluginPath = gstreamerLibPath + "/plugins";
- }
-
-
- static protected String buildGStreamerLibPath(String base, String os) {
- File path = new File(base + os);
- if (path.exists()) {
- return base + os;
- } else {
- return base;
- }
- }
-
-
- static protected float nanoSecToSecFrac(float nanosec) {
- for (int i = 0; i < 3; i++)
- nanosec /= 1E3;
- return nanosec;
- }
-
-
- static protected long secToNanoLong(float sec) {
- Float f = new Float(sec * 1E9);
- return f.longValue();
- }
-
-
- /**
- * Reorders an OpenGL pixel array (RGBA) into ARGB. The array must be
- * of size width * height.
- * @param pixels int[]
- */
- static protected void convertToARGB(int[] pixels, int width, int height) {
- int t = 0;
- int p = 0;
- if (ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN) {
- // RGBA to ARGB conversion: shifting RGB 8 bits to the right,
- // and placing A 24 bits to the left.
- for (int y = 0; y < height; y++) {
- for (int x = 0; x < width; x++) {
- int pixel = pixels[p++];
- pixels[t++] = (pixel >>> 8) | ((pixel << 24) & 0xFF000000);
- }
- }
- } else {
- // We have to convert ABGR into ARGB, so R and B must be swapped,
- // A and G just brought back in.
- for (int y = 0; y < height; y++) {
- for (int x = 0; x < width; x++) {
- int pixel = pixels[p++];
- pixels[t++] = ((pixel & 0xFF) << 16) | ((pixel & 0xFF0000) >> 16) |
- (pixel & 0xFF00FF00);
- }
- }
- }
- }
-}
diff --git a/todo.txt b/todo.txt
index 340c441f5..00ea7707d 100644
--- a/todo.txt
+++ b/todo.txt
@@ -1,5 +1,22 @@
0228 pde
+_ shouldn't write sketch.properties unless it's a non-default mode
+_ https://github.com/processing/processing/issues/2531
+_ huge i18n patch
+_ https://github.com/processing/processing/pull/2084
+_ make ant fail when trying to delete JRE files that don't exist
+_ some aren't being removed properly
+earlier
+X for() loop with nothing inside parens crashes Auto Format
+X https://github.com/processing/processing/issues/2141
+
+gsoc
+_ `return` keyword not treated as such when followed by a bracket
+_ https://github.com/processing/processing/issues/2099
+_ IllegalArgumentException when clicking between editor windows
+_ https://github.com/processing/processing/issues/2530
+_ "String index out of range" error
+_ https://github.com/processing/processing/issues/1940
medium
_ possible to open a sketch multiple times
@@ -42,8 +59,6 @@ _ the Find window (also the save windows) also have the same problem
_ move old Google Code SVN back to processing.org
_ then cull out the old branches/tags from the Github repo
_ and/or start bundling separate source downloads
-_ "String index out of range" error
-_ https://github.com/processing/processing/issues/1940
_ look through all isPopupTrigger() code
_ make sure both press/release are implemented
_ emacs style errors in commander aren't quite right
@@ -692,8 +707,6 @@ _ update will update classes from shared in the current folder
TOOLS / Auto Format
-_ for() loop with nothing inside parens crashes Auto Format
-_ https://github.com/processing/processing/issues/2141
_ extra indent found
_ https://github.com/processing/processing/issues/1041
_ Switch block cases not indented
@@ -861,6 +874,8 @@ find YOUR_APP/Contents/ -type f \
DIST / Linux
+_ Processing is named processing-app-Base in Gnome 3
+_ https://github.com/processing/processing/issues/2534
_ how to run "headless" from user Batuff
_ sudo apt-get install xvfb
_ Xvfb :2 -screen 0 1024x768x24 &