diff --git a/core/todo.txt b/core/todo.txt
index d21190f67..5e7ad8897 100644
--- a/core/todo.txt
+++ b/core/todo.txt
@@ -11,10 +11,11 @@ A (was only happening once b/c was drawing first in perspective)
A seems to be mapping to 0, 0 - width/2, height/2
A fix 3D > OrthoVsPerspective example once ortho works properly
A there's a depth problem in addition to the ortho weirdness
-
A using createGraphics() image repeatedly runs out of memory with OPENGL
A http://code.google.com/p/processing/issues/detail?id=483
X works with OPENGL2, so no problem
+A finish OPENGL2 renderer
+A http://code.google.com/p/processing/issues/detail?id=495
_ Potential race condition when resizing sketches
_ http://code.google.com/p/processing/issues/detail?id=697
diff --git a/java/libraries/video/.classpath b/java/libraries/video/.classpath
new file mode 100644
index 000000000..41e779422
--- /dev/null
+++ b/java/libraries/video/.classpath
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+
+
+
diff --git a/java/libraries/video/.project b/java/libraries/video/.project
new file mode 100644
index 000000000..c53a8c5b2
--- /dev/null
+++ b/java/libraries/video/.project
@@ -0,0 +1,17 @@
+
+
+ GSVideo
+
+
+
+
+
+ org.eclipse.jdt.core.javabuilder
+
+
+
+
+
+ org.eclipse.jdt.core.javanature
+
+
diff --git a/java/libraries/video/build.xml b/java/libraries/video/build.xml
new file mode 100644
index 000000000..16c148945
--- /dev/null
+++ b/java/libraries/video/build.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/java/libraries/video/examples/Capture/AsciiVideo/AsciiVideo.pde b/java/libraries/video/examples/Capture/AsciiVideo/AsciiVideo.pde
new file mode 100644
index 000000000..2faf42c20
--- /dev/null
+++ b/java/libraries/video/examples/Capture/AsciiVideo/AsciiVideo.pde
@@ -0,0 +1,139 @@
+/**
+ * ASCII Video
+ * by Ben Fry.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Text characters have been used to represent images since the earliest computers.
+ * This sketch is a simple homage that re-interprets live video as ASCII text.
+ * See the keyPressed function for more options, like changing the font size.
+ */
+
+import processing.opengl.*;
+import codeanticode.gsvideo.*;
+
+GSCapture video;
+boolean cheatScreen;
+
+// All ASCII characters, sorted according to their visual density
+String letterOrder =
+ " .`-_':,;^=+/\"|)\\<>)iv%xclrs{*}I?!][1taeo7zjLu" +
+ "nT#JCwfy325Fp6mqSghVd4EgXPGZbYkOA&8U$@KHDBWNMR0Q";
+char[] letters;
+
+float[] bright;
+char[] chars;
+
+PFont font;
+float fontSize = 1.5;
+
+
+public void setup() {
+ size(640, 480, P3D);
+ // Or run full screen, more fun! Use with Sketch -> Present
+ //size(screen.width, screen.height, OPENGL);
+
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, 160, 120);
+ video.play();
+ int count = video.width * video.height;
+ println(count);
+
+ font = loadFont("UniversLTStd-Light-48.vlw");
+
+ // for the 256 levels of brightness, distribute the letters across
+ // the an array of 256 elements to use for the lookup
+ letters = new char[256];
+ for (int i = 0; i < 256; i++) {
+ int index = int(map(i, 0, 256, 0, letterOrder.length()));
+ letters[i] = letterOrder.charAt(index);
+ }
+
+ // current characters for each position in the video
+ chars = new char[count];
+
+ // current brightness for each point
+ bright = new float[count];
+ for (int i = 0; i < count; i++) {
+ // set each brightness at the midpoint to start
+ bright[i] = 128;
+ }
+}
+
+
+public void captureEvent(GSCapture c) {
+ c.read();
+}
+
+
+void draw() {
+ background(0);
+
+ pushMatrix();
+
+ float hgap = width / float(video.width);
+ float vgap = height / float(video.height);
+
+ scale(max(hgap, vgap) * fontSize);
+ textFont(font, fontSize);
+
+ int index = 0;
+ for (int y = 1; y < video.height; y++) {
+
+ // Move down for next line
+ translate(0, 1.0 / fontSize);
+
+ pushMatrix();
+ for (int x = 0; x < video.width; x++) {
+ int pixelColor = video.pixels[index];
+ // Faster method of calculating r, g, b than red(), green(), blue()
+ int r = (pixelColor >> 16) & 0xff;
+ int g = (pixelColor >> 8) & 0xff;
+ int b = pixelColor & 0xff;
+
+ // Another option would be to properly calculate brightness as luminance:
+ // luminance = 0.3*red + 0.59*green + 0.11*blue
+ // Or you could instead red + green + blue, and make the the values[] array
+ // 256*3 elements long instead of just 256.
+ int pixelBright = max(r, g, b);
+
+ // The 0.1 value is used to damp the changes so that letters flicker less
+ float diff = pixelBright - bright[index];
+ bright[index] += diff * 0.1;
+
+ fill(pixelColor);
+ int num = int(bright[index]);
+ text(letters[num], 0, 0);
+
+ // Move to the next pixel
+ index++;
+
+ // Move over for next character
+ translate(1.0 / fontSize, 0);
+ }
+ popMatrix();
+ }
+ popMatrix();
+
+ if (cheatScreen) {
+ //image(video, 0, height - video.height);
+ // set() is faster than image() when drawing untransformed images
+ set(0, height - video.height, video);
+ }
+}
+
+
+/**
+ * Handle key presses:
+ * 'c' toggles the cheat screen that shows the original image in the corner
+ * 'g' grabs an image and saves the frame to a tiff image
+ * 'f' and 'F' increase and decrease the font size
+ */
+public void keyPressed() {
+ switch (key) {
+ case 'g': saveFrame(); break;
+ case 'c': cheatScreen = !cheatScreen; break;
+ case 'f': fontSize *= 1.1; break;
+ case 'F': fontSize *= 0.9; break;
+ }
+}
diff --git a/java/libraries/video/examples/Capture/AsciiVideo/data/UniversLTStd-Light-48.vlw b/java/libraries/video/examples/Capture/AsciiVideo/data/UniversLTStd-Light-48.vlw
new file mode 100644
index 000000000..0d624969b
Binary files /dev/null and b/java/libraries/video/examples/Capture/AsciiVideo/data/UniversLTStd-Light-48.vlw differ
diff --git a/java/libraries/video/examples/Capture/BackgroundSubtraction/BackgroundSubtraction.pde b/java/libraries/video/examples/Capture/BackgroundSubtraction/BackgroundSubtraction.pde
new file mode 100644
index 000000000..efc09e8c1
--- /dev/null
+++ b/java/libraries/video/examples/Capture/BackgroundSubtraction/BackgroundSubtraction.pde
@@ -0,0 +1,71 @@
+/**
+ * Background Subtraction
+ * by Golan Levin.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Detect the presence of people and objects in the frame using a simple
+ * background-subtraction technique. To initialize the background, press a key.
+ */
+
+
+import codeanticode.gsvideo.*;
+
+int numPixels;
+int[] backgroundPixels;
+GSCapture video;
+
+void setup() {
+ // Change size to 320 x 240 if too slow at 640 x 480
+ size(640, 480);
+
+ video = new GSCapture(this, width, height);
+ video.play();
+ numPixels = video.width * video.height;
+ // Create array to store the background image
+ backgroundPixels = new int[numPixels];
+ // Make the pixels[] array available for direct manipulation
+ loadPixels();
+}
+
+void draw() {
+ if (video.available()) {
+ video.read(); // Read a new video frame
+ video.loadPixels(); // Make the pixels of video available
+ // Difference between the current frame and the stored background
+ int presenceSum = 0;
+ for (int i = 0; i < numPixels; i++) { // For each pixel in the video frame...
+ // Fetch the current color in that location, and also the color
+ // of the background in that spot
+ color currColor = video.pixels[i];
+ color bkgdColor = backgroundPixels[i];
+ // Extract the red, green, and blue components of the current pixelÕs color
+ int currR = (currColor >> 16) & 0xFF;
+ int currG = (currColor >> 8) & 0xFF;
+ int currB = currColor & 0xFF;
+ // Extract the red, green, and blue components of the background pixelÕs color
+ int bkgdR = (bkgdColor >> 16) & 0xFF;
+ int bkgdG = (bkgdColor >> 8) & 0xFF;
+ int bkgdB = bkgdColor & 0xFF;
+ // Compute the difference of the red, green, and blue values
+ int diffR = abs(currR - bkgdR);
+ int diffG = abs(currG - bkgdG);
+ int diffB = abs(currB - bkgdB);
+ // Add these differences to the running tally
+ presenceSum += diffR + diffG + diffB;
+ // Render the difference image to the screen
+ pixels[i] = color(diffR, diffG, diffB);
+ // The following line does the same thing much faster, but is more technical
+ //pixels[i] = 0xFF000000 | (diffR << 16) | (diffG << 8) | diffB;
+ }
+ updatePixels(); // Notify that the pixels[] array has changed
+ println(presenceSum); // Print out the total amount of movement
+ }
+}
+
+// When a key is pressed, capture the background image into the backgroundPixels
+// buffer, by copying each of the current frameÕs pixels into it.
+void keyPressed() {
+ video.loadPixels();
+ arraycopy(video.pixels, backgroundPixels);
+}
diff --git a/java/libraries/video/examples/Capture/BrightnessThresholding/BrightnessThresholding.pde b/java/libraries/video/examples/Capture/BrightnessThresholding/BrightnessThresholding.pde
new file mode 100644
index 000000000..c83c9826f
--- /dev/null
+++ b/java/libraries/video/examples/Capture/BrightnessThresholding/BrightnessThresholding.pde
@@ -0,0 +1,60 @@
+/**
+ * Brightness Thresholding
+ * by Golan Levin.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Determines whether a test location (such as the cursor) is contained within
+ * the silhouette of a dark object.
+ */
+
+
+import codeanticode.gsvideo.*;
+
+color black = color(0);
+color white = color(255);
+int numPixels;
+GSCapture video;
+
+void setup() {
+ size(640, 480); // Change size to 320 x 240 if too slow at 640 x 480
+ strokeWeight(5);
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, width, height);
+ video.play();
+ numPixels = video.width * video.height;
+ noCursor();
+ smooth();
+}
+
+void draw() {
+ if (video.available()) {
+ video.read();
+ video.loadPixels();
+ int threshold = 127; // Set the threshold value
+ float pixelBrightness; // Declare variable to store a pixel's color
+ // Turn each pixel in the video frame black or white depending on its brightness
+ loadPixels();
+ for (int i = 0; i < numPixels; i++) {
+ pixelBrightness = brightness(video.pixels[i]);
+ if (pixelBrightness > threshold) { // If the pixel is brighter than the
+ pixels[i] = white; // threshold value, make it white
+ }
+ else { // Otherwise,
+ pixels[i] = black; // make it black
+ }
+ }
+ updatePixels();
+ // Test a location to see where it is contained. Fetch the pixel at the test
+ // location (the cursor), and compute its brightness
+ int testValue = get(mouseX, mouseY);
+ float testBrightness = brightness(testValue);
+ if (testBrightness > threshold) { // If the test location is brighter than
+ fill(black); // the threshold set the fill to black
+ }
+ else { // Otherwise,
+ fill(white); // set the fill to white
+ }
+ ellipse(mouseX, mouseY, 20, 20);
+ }
+}
diff --git a/java/libraries/video/examples/Capture/BrightnessTracking/BrightnessTracking.pde b/java/libraries/video/examples/Capture/BrightnessTracking/BrightnessTracking.pde
new file mode 100644
index 000000000..d06044f1a
--- /dev/null
+++ b/java/libraries/video/examples/Capture/BrightnessTracking/BrightnessTracking.pde
@@ -0,0 +1,55 @@
+/**
+ * Brightness Tracking
+ * by Golan Levin.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Tracks the brightest pixel in a live video signal.
+ */
+
+
+import codeanticode.gsvideo.*;
+
+GSCapture video;
+
+void setup() {
+ size(640, 480); // Change size to 320 x 240 if too slow at 640 x 480
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, width, height);
+ video.play();
+ noStroke();
+ smooth();
+}
+
+void draw() {
+ if (video.available()) {
+ video.read();
+ image(video, 0, 0, width, height); // Draw the webcam video onto the screen
+ int brightestX = 0; // X-coordinate of the brightest video pixel
+ int brightestY = 0; // Y-coordinate of the brightest video pixel
+ float brightestValue = 0; // Brightness of the brightest video pixel
+ // Search for the brightest pixel: For each row of pixels in the video image and
+ // for each pixel in the yth row, compute each pixel's index in the video
+ video.loadPixels();
+ int index = 0;
+ for (int y = 0; y < video.height; y++) {
+ for (int x = 0; x < video.width; x++) {
+ // Get the color stored in the pixel
+ int pixelValue = video.pixels[index];
+ // Determine the brightness of the pixel
+ float pixelBrightness = brightness(pixelValue);
+ // If that value is brighter than any previous, then store the
+ // brightness of that pixel, as well as its (x,y) location
+ if (pixelBrightness > brightestValue) {
+ brightestValue = pixelBrightness;
+ brightestY = y;
+ brightestX = x;
+ }
+ index++;
+ }
+ }
+ // Draw a large, yellow circle at the brightest pixel
+ fill(255, 204, 0, 128);
+ ellipse(brightestX, brightestY, 200, 200);
+ }
+}
diff --git a/java/libraries/video/examples/Capture/ColorSorting/ColorSorting.pde b/java/libraries/video/examples/Capture/ColorSorting/ColorSorting.pde
new file mode 100644
index 000000000..5efbff70e
--- /dev/null
+++ b/java/libraries/video/examples/Capture/ColorSorting/ColorSorting.pde
@@ -0,0 +1,146 @@
+/**
+ * Color Sorting
+ * by Ben Fry.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Example that sorts all colors from the incoming video
+ * and arranges them into vertical bars.
+ */
+
+
+import codeanticode.gsvideo.*;
+
+GSCapture video;
+boolean cheatScreen;
+
+Tuple[] captureColors;
+Tuple[] drawColors;
+int[] bright;
+
+// How many pixels to skip in either direction
+int increment = 5;
+
+
+public void setup() {
+ size(800, 600, P3D);
+
+ noCursor();
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, 160, 120);
+ video.play();
+
+ int count = (video.width * video.height) / (increment * increment);
+ bright = new int[count];
+ captureColors = new Tuple[count];
+ drawColors = new Tuple[count];
+ for (int i = 0; i < count; i++) {
+ captureColors[i] = new Tuple();
+ drawColors[i] = new Tuple(0.5, 0.5, 0.5);
+ }
+}
+
+
+public void draw() {
+ if (video.available()) {
+ video.read();
+
+ background(0);
+ noStroke();
+
+ int index = 0;
+ for (int j = 0; j < video.height; j += increment) {
+ for (int i = 0; i < video.width; i += increment) {
+ int pixelColor = video.pixels[j*video.width + i];
+
+ int r = (pixelColor >> 16) & 0xff;
+ int g = (pixelColor >> 8) & 0xff;
+ int b = pixelColor & 0xff;
+
+ // Technically would be sqrt of the following, but no need to do
+ // sqrt before comparing the elements since we're only ordering
+ bright[index] = r*r + g*g + b*b;
+ captureColors[index].set(r, g, b);
+
+ index++;
+ }
+ }
+ sort(index, bright, captureColors);
+
+ beginShape(QUAD_STRIP);
+ for (int i = 0; i < index; i++) {
+ drawColors[i].target(captureColors[i], 0.1);
+ drawColors[i].phil();
+
+ float x = map(i, 0, index, 0, width);
+ vertex(x, 0);
+ vertex(x, height);
+ }
+ endShape();
+
+ if (cheatScreen) {
+ //image(video, 0, height - video.height);
+ // Faster method of displaying pixels array on screen
+ set(0, height - video.height, video);
+ }
+ }
+}
+
+
+public void keyPressed() {
+ if (key == 'g') {
+ saveFrame();
+ } else if (key == 'c') {
+ cheatScreen = !cheatScreen;
+ }
+}
+
+
+// Functions to handle sorting the color data
+
+
+void sort(int length, int[] a, Tuple[] stuff) {
+ sortSub(a, stuff, 0, length - 1);
+}
+
+
+void sortSwap(int[] a, Tuple[] stuff, int i, int j) {
+ int T = a[i];
+ a[i] = a[j];
+ a[j] = T;
+
+ Tuple v = stuff[i];
+ stuff[i] = stuff[j];
+ stuff[j] = v;
+}
+
+
+void sortSub(int[] a, Tuple[] stuff, int lo0, int hi0) {
+ int lo = lo0;
+ int hi = hi0;
+ int mid;
+
+ if (hi0 > lo0) {
+ mid = a[(lo0 + hi0) / 2];
+
+ while (lo <= hi) {
+ while ((lo < hi0) && (a[lo] < mid)) {
+ ++lo;
+ }
+ while ((hi > lo0) && (a[hi] > mid)) {
+ --hi;
+ }
+ if (lo <= hi) {
+ sortSwap(a, stuff, lo, hi);
+ ++lo;
+ --hi;
+ }
+ }
+
+ if (lo0 < hi)
+ sortSub(a, stuff, lo0, hi);
+
+ if (lo < hi0)
+ sortSub(a, stuff, lo, hi0);
+ }
+}
diff --git a/java/libraries/video/examples/Capture/ColorSorting/Tuple.pde b/java/libraries/video/examples/Capture/ColorSorting/Tuple.pde
new file mode 100644
index 000000000..fe6d1d9af
--- /dev/null
+++ b/java/libraries/video/examples/Capture/ColorSorting/Tuple.pde
@@ -0,0 +1,29 @@
+// Simple vector class that holds an x,y,z position.
+
+class Tuple {
+ float x, y, z;
+
+ public Tuple() { }
+
+ public Tuple(float x, float y, float z) {
+ set(x, y, z);
+ }
+
+ public void set(float x, float y, float z) {
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ }
+
+ public void target(Tuple another, float amount) {
+ float amount1 = 1.0 - amount;
+ x = x*amount1 + another.x*amount;
+ y = y*amount1 + another.y*amount;
+ z = z*amount1 + another.z*amount;
+ }
+
+ public void phil() {
+ fill(x, y, z);
+ }
+}
+
diff --git a/java/libraries/video/examples/Capture/Disgrand/Disgrand.pde b/java/libraries/video/examples/Capture/Disgrand/Disgrand.pde
new file mode 100644
index 000000000..9b1eda976
--- /dev/null
+++ b/java/libraries/video/examples/Capture/Disgrand/Disgrand.pde
@@ -0,0 +1,146 @@
+/**
+ * Disgrand
+ * by Ben Fry.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Example that sorts all colors from the incoming video
+ * and arranges them into vertical bars.
+ */
+
+
+import codeanticode.gsvideo.*;
+
+GSCapture video;
+boolean cheatScreen;
+
+Tuple[] captureColors;
+Tuple[] drawColors;
+int[] bright;
+
+// How many pixels to skip in either direction
+int increment = 5;
+
+
+public void setup() {
+ size(800, 600, P3D);
+
+ noCursor();
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, 160, 120);
+ video.play();
+
+ int count = (video.width * video.height) / (increment * increment);
+ bright = new int[count];
+ captureColors = new Tuple[count];
+ drawColors = new Tuple[count];
+ for (int i = 0; i < count; i++) {
+ captureColors[i] = new Tuple();
+ drawColors[i] = new Tuple(0.5, 0.5, 0.5);
+ }
+}
+
+
+public void draw() {
+ if (video.available()) {
+ video.read();
+
+ background(0);
+ noStroke();
+
+ int index = 0;
+ for (int j = 0; j < video.height; j += increment) {
+ for (int i = 0; i < video.width; i += increment) {
+ int pixelColor = video.pixels[j*video.width + i];
+
+ int r = (pixelColor >> 16) & 0xff;
+ int g = (pixelColor >> 8) & 0xff;
+ int b = pixelColor & 0xff;
+
+ // Technically would be sqrt of the following, but no need to do
+ // sqrt before comparing the elements since we're only ordering
+ bright[index] = r*r + g*g + b*b;
+ captureColors[index].set(r, g, b);
+
+ index++;
+ }
+ }
+ sort(index, bright, captureColors);
+
+ beginShape(QUAD_STRIP);
+ for (int i = 0; i < index; i++) {
+ drawColors[i].target(captureColors[i], 0.1);
+ drawColors[i].phil();
+
+ float x = map(i, 0, index, 0, width);
+ vertex(x, 0);
+ vertex(x, height);
+ }
+ endShape();
+
+ if (cheatScreen) {
+ //image(video, 0, height - video.height);
+ // Faster method of displaying pixels array on screen
+ set(0, height - video.height, video);
+ }
+ }
+}
+
+
+public void keyPressed() {
+ if (key == 'g') {
+ saveFrame();
+ } else if (key == 'c') {
+ cheatScreen = !cheatScreen;
+ }
+}
+
+
+// Functions to handle sorting the color data
+
+
+void sort(int length, int[] a, Tuple[] stuff) {
+ sortSub(a, stuff, 0, length - 1);
+}
+
+
+void sortSwap(int[] a, Tuple[] stuff, int i, int j) {
+ int T = a[i];
+ a[i] = a[j];
+ a[j] = T;
+
+ Tuple v = stuff[i];
+ stuff[i] = stuff[j];
+ stuff[j] = v;
+}
+
+
+void sortSub(int[] a, Tuple[] stuff, int lo0, int hi0) {
+ int lo = lo0;
+ int hi = hi0;
+ int mid;
+
+ if (hi0 > lo0) {
+ mid = a[(lo0 + hi0) / 2];
+
+ while (lo <= hi) {
+ while ((lo < hi0) && (a[lo] < mid)) {
+ ++lo;
+ }
+ while ((hi > lo0) && (a[hi] > mid)) {
+ --hi;
+ }
+ if (lo <= hi) {
+ sortSwap(a, stuff, lo, hi);
+ ++lo;
+ --hi;
+ }
+ }
+
+ if (lo0 < hi)
+ sortSub(a, stuff, lo0, hi);
+
+ if (lo < hi0)
+ sortSub(a, stuff, lo, hi0);
+ }
+}
diff --git a/java/libraries/video/examples/Capture/Disgrand/Tuple.pde b/java/libraries/video/examples/Capture/Disgrand/Tuple.pde
new file mode 100644
index 000000000..fe6d1d9af
--- /dev/null
+++ b/java/libraries/video/examples/Capture/Disgrand/Tuple.pde
@@ -0,0 +1,29 @@
+// Simple vector class that holds an x,y,z position.
+
+class Tuple {
+ float x, y, z;
+
+ public Tuple() { }
+
+ public Tuple(float x, float y, float z) {
+ set(x, y, z);
+ }
+
+ public void set(float x, float y, float z) {
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ }
+
+ public void target(Tuple another, float amount) {
+ float amount1 = 1.0 - amount;
+ x = x*amount1 + another.x*amount;
+ y = y*amount1 + another.y*amount;
+ z = z*amount1 + another.z*amount;
+ }
+
+ public void phil() {
+ fill(x, y, z);
+ }
+}
+
diff --git a/java/libraries/video/examples/Capture/FrameDifferencing/FrameDifferencing.pde b/java/libraries/video/examples/Capture/FrameDifferencing/FrameDifferencing.pde
new file mode 100644
index 000000000..7452203c8
--- /dev/null
+++ b/java/libraries/video/examples/Capture/FrameDifferencing/FrameDifferencing.pde
@@ -0,0 +1,67 @@
+/**
+ * Frame Differencing
+ * by Golan Levin.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Quantify the amount of movement in the video frame using frame-differencing.
+ */
+
+
+import codeanticode.gsvideo.*;
+
+int numPixels;
+int[] previousFrame;
+GSCapture video;
+
+void setup() {
+ size(640, 480); // Change size to 320 x 240 if too slow at 640 x 480
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, width, height);
+ video.play();
+ numPixels = video.width * video.height;
+ // Create an array to store the previously captured frame
+ previousFrame = new int[numPixels];
+ loadPixels();
+}
+
+void draw() {
+ if (video.available()) {
+ // When using video to manipulate the screen, use video.available() and
+ // video.read() inside the draw() method so that it's safe to draw to the screen
+ video.read(); // Read the new frame from the camera
+ video.loadPixels(); // Make its pixels[] array available
+
+ int movementSum = 0; // Amount of movement in the frame
+ for (int i = 0; i < numPixels; i++) { // For each pixel in the video frame...
+ color currColor = video.pixels[i];
+ color prevColor = previousFrame[i];
+ // Extract the red, green, and blue components from current pixel
+ int currR = (currColor >> 16) & 0xFF; // Like red(), but faster
+ int currG = (currColor >> 8) & 0xFF;
+ int currB = currColor & 0xFF;
+ // Extract red, green, and blue components from previous pixel
+ int prevR = (prevColor >> 16) & 0xFF;
+ int prevG = (prevColor >> 8) & 0xFF;
+ int prevB = prevColor & 0xFF;
+ // Compute the difference of the red, green, and blue values
+ int diffR = abs(currR - prevR);
+ int diffG = abs(currG - prevG);
+ int diffB = abs(currB - prevB);
+ // Add these differences to the running tally
+ movementSum += diffR + diffG + diffB;
+ // Render the difference image to the screen
+ pixels[i] = color(diffR, diffG, diffB);
+ // The following line is much faster, but more confusing to read
+ //pixels[i] = 0xff000000 | (diffR << 16) | (diffG << 8) | diffB;
+ // Save the current color into the 'previous' buffer
+ previousFrame[i] = currColor;
+ }
+ // To prevent flicker from frames that are all black (no movement),
+ // only update the screen if the image has changed.
+ if (movementSum > 0) {
+ updatePixels();
+ println(movementSum); // Print the total amount of movement to the console
+ }
+ }
+}
diff --git a/java/libraries/video/examples/Capture/Framingham/Framingham.pde b/java/libraries/video/examples/Capture/Framingham/Framingham.pde
new file mode 100644
index 000000000..b7ae30c99
--- /dev/null
+++ b/java/libraries/video/examples/Capture/Framingham/Framingham.pde
@@ -0,0 +1,61 @@
+/**
+ * Framingham
+ * by Ben Fry.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Show subsequent frames from video input as a grid. Also fun with movie files.
+ */
+
+
+import codeanticode.gsvideo.*;
+
+GSCapture video;
+int column;
+int columnCount;
+int lastRow;
+
+// Buffer used to move all the pixels up
+int[] scoot;
+
+
+void setup() {
+ size(640, 480, P3D);
+
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, 160, 120);
+ video.play();
+ // Also try with other video sizes
+
+ column = 0;
+ columnCount = width / video.width;
+ int rowCount = height / video.height;
+ lastRow = rowCount - 1;
+
+ scoot = new int[lastRow*video.height * width];
+ background(0);
+}
+
+
+void draw() {
+ // By using video.available, only the frame rate need be set inside setup()
+ if (video.available()) {
+ video.read();
+ set(video.width*column, video.height*lastRow, video);
+ column++;
+ if (column == columnCount) {
+ loadPixels();
+
+ // Scoot everybody up one row
+ arraycopy(pixels, video.height*width, scoot, 0, scoot.length);
+ arraycopy(scoot, 0, pixels, 0, scoot.length);
+
+ // Set the moved row to black
+ for (int i = scoot.length; i < width*height; i++) {
+ pixels[i] = #000000;
+ }
+ column = 0;
+ updatePixels();
+ }
+ }
+}
diff --git a/java/libraries/video/examples/Capture/GettingStartedCaptureLinux/GettingStartedCaptureLinux.pde b/java/libraries/video/examples/Capture/GettingStartedCaptureLinux/GettingStartedCaptureLinux.pde
new file mode 100644
index 000000000..370db7722
--- /dev/null
+++ b/java/libraries/video/examples/Capture/GettingStartedCaptureLinux/GettingStartedCaptureLinux.pde
@@ -0,0 +1,68 @@
+/**
+ * Getting Started with Capture.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Reading and displaying an image from an attached Capture device.
+ */
+import codeanticode.gsvideo.*;
+
+GSCapture cam;
+
+void setup() {
+ size(640, 480);
+
+/*
+ // List functionality still not ready on Linux
+ String[] cameras = GSCapture.list();
+
+ if (cameras.length == 0)
+ {
+ println("There are no cameras available for capture.");
+ exit();
+ } else {
+ println("Available cameras:");
+ for (int i = 0; i < cameras.length; i++)
+ println(cameras[i]);
+ cam = new GSCapture(this, 640, 480, cameras[0]);
+ }
+
+ However, different cameras can be selected by using their device file:
+ cam = new GSCapture(this, 640, 480, "/dev/video0");
+ cam = new GSCapture(this, 640, 480, "/dev/video1");
+ etc.
+ */
+
+ cam = new GSCapture(this, 640, 480);
+ cam.play();
+
+ /*
+ // You can get the resolutions supported by the
+ // capture device using the resolutions() method.
+ // It must be called after creating the capture
+ // object.
+ int[][] res = cam.resolutions();
+ for (int i = 0; i < res.length; i++) {
+ println(res[i][0] + "x" + res[i][1]);
+ }
+ */
+
+ /*
+ // You can also get the framerates supported by the
+ // capture device:
+ String[] fps = cam.framerates();
+ for (int i = 0; i < fps.length; i++) {
+ println(fps[i]);
+ }
+ */
+}
+
+void draw() {
+ if (cam.available() == true) {
+ cam.read();
+ image(cam, 0, 0);
+ // The following does the same, and is faster when just drawing the image
+ // without any additional resizing, transformations, or tint.
+ //set(0, 0, cam);
+ }
+}
diff --git a/java/libraries/video/examples/Capture/GettingStartedCaptureMac/GettingStartedCaptureMac.pde b/java/libraries/video/examples/Capture/GettingStartedCaptureMac/GettingStartedCaptureMac.pde
new file mode 100644
index 000000000..2fa79694e
--- /dev/null
+++ b/java/libraries/video/examples/Capture/GettingStartedCaptureMac/GettingStartedCaptureMac.pde
@@ -0,0 +1,63 @@
+/**
+ * Getting Started with Capture.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Reading and displaying an image from an attached Capture device.
+ */
+import codeanticode.gsvideo.*;
+
+GSCapture cam;
+
+void setup() {
+ size(640, 480);
+
+ /*
+ // List functionality hasn't been tested on Mac OSX. Uncomment this
+ // code to try it out.
+ String[] cameras = GSCapture.list();
+
+ if (cameras.length == 0)
+ {
+ println("There are no cameras available for capture.");
+ exit();
+ } else {
+ println("Available cameras:");
+ for (int i = 0; i < cameras.length; i++)
+ println(cameras[i]);
+ cam = new GSCapture(this, 320, 240, cameras[0]);
+ }
+ */
+ cam = new GSCapture(this, 640, 480);
+ cam.play();
+
+ /*
+ // You can get the resolutions supported by the
+ // capture device using the resolutions() method.
+ // It must be called after creating the capture
+ // object.
+ int[][] res = cam.resolutions();
+ for (int i = 0; i < res.length; i++) {
+ println(res[i][0] + "x" + res[i][1]);
+ }
+ */
+
+ /*
+ // You can also get the framerates supported by the
+ // capture device:
+ String[] fps = cam.framerates();
+ for (int i = 0; i < fps.length; i++) {
+ println(fps[i]);
+ }
+ */
+}
+
+void draw() {
+ if (cam.available() == true) {
+ cam.read();
+ image(cam, 0, 0);
+ // The following does the same, and is faster when just drawing the image
+ // without any additional resizing, transformations, or tint.
+ //set(0, 0, cam);
+ }
+}
diff --git a/java/libraries/video/examples/Capture/GettingStartedCaptureWin/GettingStartedCaptureWin.pde b/java/libraries/video/examples/Capture/GettingStartedCaptureWin/GettingStartedCaptureWin.pde
new file mode 100644
index 000000000..1d7dcf937
--- /dev/null
+++ b/java/libraries/video/examples/Capture/GettingStartedCaptureWin/GettingStartedCaptureWin.pde
@@ -0,0 +1,59 @@
+/**
+ * Getting Started with Capture.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Reading and displaying an image from an attached Capture device.
+ */
+import codeanticode.gsvideo.*;
+
+GSCapture cam;
+
+void setup() {
+ size(640, 480);
+
+ String[] cameras = GSCapture.list();
+
+ if (cameras.length == 0)
+ {
+ println("There are no cameras available for capture.");
+ exit();
+ } else {
+ println("Available cameras:");
+ for (int i = 0; i < cameras.length; i++) {
+ println(cameras[i]);
+ }
+ cam = new GSCapture(this, 640, 480, cameras[0]);
+ cam.play();
+
+ /*
+ // You can get the resolutions supported by the
+ // capture device using the resolutions() method.
+ // It must be called after creating the capture
+ // object.
+ int[][] res = cam.resolutions();
+ for (int i = 0; i < res.length; i++) {
+ println(res[i][0] + "x" + res[i][1]);
+ }
+ */
+
+ /*
+ // You can also get the framerates supported by the
+ // capture device:
+ String[] fps = cam.framerates();
+ for (int i = 0; i < fps.length; i++) {
+ println(fps[i]);
+ }
+ */
+ }
+}
+
+void draw() {
+ if (cam.available() == true) {
+ cam.read();
+ image(cam, 0, 0);
+ // The following does the same, and is faster when just drawing the image
+ // without any additional resizing, transformations, or tint.
+ //set(0, 0, cam);
+ }
+}
diff --git a/java/libraries/video/examples/Capture/HsvSpace/HsvSpace.pde b/java/libraries/video/examples/Capture/HsvSpace/HsvSpace.pde
new file mode 100644
index 000000000..712486b11
--- /dev/null
+++ b/java/libraries/video/examples/Capture/HsvSpace/HsvSpace.pde
@@ -0,0 +1,208 @@
+/**
+ * HSV Space
+ * by Ben Fry.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Arrange the pixels from live video into the HSV Color Cone.
+ */
+import processing.opengl.*;
+import codeanticode.gsvideo.*;
+import java.awt.Color;
+
+GSCapture video;
+int count;
+boolean cheatScreen = true;
+
+static final float BOX_SIZE = 0.75;
+static final float CONE_HEIGHT = 1.2;
+static final float MAX_RADIUS = 10;
+static final float ROT_INCREMENT = 3.0;
+static final float TRANS_INCREMENT = 1;
+static final float STEP_AMOUNT = 0.1;
+
+Tuple[] farbe;
+Tuple[] trans;
+
+float[] hsb = new float[3];
+
+float leftRightAngle;
+float upDownAngle;
+float fwdBackTrans;
+float upDownTrans;
+float leftRightTrans;
+boolean motion;
+
+boolean blobby = false;
+
+
+public void setup() {
+ size(640, 480, P3D);
+ //size(screen.width, screen.height, OPENGL);
+
+ video = new GSCapture(this, 160, 120);
+ video.play();
+ count = video.width * video.height;
+
+ sphereDetail(60);
+
+ upDownTrans = 0;
+ leftRightTrans = 0;
+ motion = false;
+
+ leftRightAngle = 101.501297;
+ upDownAngle = -180.098694;
+ fwdBackTrans = 14.800003;
+
+ farbe = new Tuple[count];
+ trans = new Tuple[count];
+ for (int i = 0; i < count; i++) {
+ farbe[i] = new Tuple();
+ trans[i] = new Tuple();
+ }
+}
+
+
+void draw() {
+ background(0);
+
+ if (!blobby) lights();
+
+ pushMatrix();
+ translate(width/2, height/2);
+ scale(min(width, height) / 10.0);
+
+ translate(0, 0, -20 + fwdBackTrans);
+ rotateY(radians(36 + leftRightAngle)); //, 0, 1, 0);
+ rotateX(radians(-228 + upDownAngle)); //, 1, 0, 0);
+
+ if (blobby) {
+ stroke(0.35, 0.35, 0.25, 0.15);
+ wireCone(MAX_RADIUS, MAX_RADIUS * CONE_HEIGHT, 18, 18);
+ }
+ else {
+ stroke(0.35, 0.35, 0.25, 0.25);
+ wireCone(MAX_RADIUS, MAX_RADIUS * CONE_HEIGHT, 180, 18);
+ }
+
+ noStroke();
+ for (int i = 0; i < count; i++) {
+ int pixelColor = video.pixels[i];
+ int r = (pixelColor >> 16) & 0xff;
+ int g = (pixelColor >> 8) & 0xff;
+ int b = pixelColor & 0xff;
+ Color.RGBtoHSB(r, g, b, hsb);
+
+ float radius = hsb[1] * hsb[2];
+ float angle = hsb[0] * 360.0 * DEG_TO_RAD;
+ float nx = MAX_RADIUS * radius * cos(angle);
+ float ny = MAX_RADIUS * radius * sin(angle);
+ float nz = hsb[2] * MAX_RADIUS * CONE_HEIGHT;
+
+ trans[i].set(trans[i].x - (trans[i].x - nx)*STEP_AMOUNT,
+ trans[i].y - (trans[i].y - ny)*STEP_AMOUNT,
+ trans[i].z - (trans[i].z - nz)*STEP_AMOUNT);
+
+ farbe[i].set(farbe[i].x - (farbe[i].x - r)*STEP_AMOUNT,
+ farbe[i].y - (farbe[i].y - g)*STEP_AMOUNT,
+ farbe[i].z - (farbe[i].z - b)*STEP_AMOUNT);
+
+ pushMatrix();
+ farbe[i].phil();
+ trans[i].tran();
+
+ rotate(radians(45), 1, 1, 0);
+ if (blobby) {
+ sphere(BOX_SIZE * 2); //, 20, 20);
+ } else {
+ box(BOX_SIZE);
+ }
+
+ popMatrix();
+ }
+ popMatrix();
+
+ if (motion) {
+ upDownAngle--;
+ leftRightAngle--;
+ }
+
+ if (cheatScreen) {
+ image(video, 0, height - video.height);
+ }
+}
+
+
+void captureEvent(GSCapture c) {
+ c.read();
+ c.loadPixels();
+}
+
+
+void keyPressed() {
+ switch (key) {
+ case 'g':
+ saveFrame();
+ break;
+ case 'c':
+ cheatScreen = !cheatScreen;
+ break;
+
+ case 'm':
+ motion = !motion;
+ break;
+ case '=':
+ fwdBackTrans += TRANS_INCREMENT;
+ break;
+ case '-':
+ fwdBackTrans -= TRANS_INCREMENT;
+ break;
+ case 'b':
+ blobby = !blobby;
+ break;
+ }
+}
+
+
+void mouseDragged() {
+ float dX, dY;
+
+ switch (mouseButton) {
+ case LEFT: // left right up down
+ dX = pmouseX - mouseX;
+ dY = pmouseY - mouseY;
+ leftRightAngle -= dX * 0.2;
+ upDownAngle += dY * 0.4;
+ break;
+
+ case CENTER:
+ dX = pmouseX - mouseX;
+ dY = pmouseY - mouseY;
+ leftRightTrans -= TRANS_INCREMENT * dX;
+ upDownTrans -= TRANS_INCREMENT * dY;
+ break;
+
+ case RIGHT: // in and out
+ dY = (float) (pmouseY - mouseY);
+ fwdBackTrans -= TRANS_INCREMENT * dY;
+ break;
+ }
+}
+
+
+void wireCone(float radius, float height, int stepX, int stepY) {
+ int steps = 10;
+ stroke(40);
+ for (int i = 0; i < steps; i++) {
+ float angle = map(i, 0, steps, 0, TWO_PI);
+ float x = radius * cos(angle);
+ float y = radius * sin(angle);
+ line(x, y, height, 0, 0, 0);
+ }
+ noFill();
+ pushMatrix();
+ translate(0, 0, height);
+ ellipseMode(CENTER_RADIUS);
+ ellipse(0, 0, radius, radius);
+ popMatrix();
+}
diff --git a/java/libraries/video/examples/Capture/HsvSpace/Tuple.pde b/java/libraries/video/examples/Capture/HsvSpace/Tuple.pde
new file mode 100644
index 000000000..f4c898a33
--- /dev/null
+++ b/java/libraries/video/examples/Capture/HsvSpace/Tuple.pde
@@ -0,0 +1,33 @@
+// Simple vector class that holds an x,y,z position.
+
+class Tuple {
+ float x, y, z;
+
+ public Tuple() { }
+
+ public Tuple(float x, float y, float z) {
+ set(x, y, z);
+ }
+
+ public void set(float x, float y, float z) {
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ }
+
+ public void target(Tuple another, float amount) {
+ float amount1 = 1.0 - amount;
+ x = x*amount1 + another.x*amount;
+ y = y*amount1 + another.y*amount;
+ z = z*amount1 + another.z*amount;
+ }
+
+ public void phil() {
+ fill(x, y, z);
+ }
+
+ public void tran() {
+ translate(x, y, z);
+ }
+}
+
diff --git a/java/libraries/video/examples/Capture/LivePocky/LivePocky.pde b/java/libraries/video/examples/Capture/LivePocky/LivePocky.pde
new file mode 100644
index 000000000..d632cd059
--- /dev/null
+++ b/java/libraries/video/examples/Capture/LivePocky/LivePocky.pde
@@ -0,0 +1,56 @@
+/**
+ * Live Pocky
+ * by Ben Fry.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Unwrap each frame of live video into a single line of pixels.
+ */
+
+import codeanticode.gsvideo.*;
+
+GSCapture video;
+int count;
+int writeRow;
+int maxRows;
+int topRow;
+int buffer[];
+
+
+void setup() {
+ size(600, 400);
+
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, 160, 120);
+ video.play();
+
+ maxRows = height * 2;
+ buffer = new int[width * maxRows];
+ writeRow = height - 1;
+ topRow = 0;
+
+ //frameRate(10);
+ background(0);
+ loadPixels();
+}
+
+
+void draw() {
+ for (int y = 0; y < height; y++) {
+ int row = (topRow + y) % maxRows;
+ arraycopy(buffer, row * width, g.pixels, y*width, width);
+ }
+ updatePixels();
+}
+
+
+void captureEvent(GSCapture c) {
+ c.read();
+ c.loadPixels();
+ arraycopy(c.pixels, 0, buffer, writeRow * width, width);
+ writeRow++;
+ if (writeRow == maxRows) {
+ writeRow = 0;
+ }
+ topRow++;
+}
diff --git a/java/libraries/video/examples/Capture/Mirror/Mirror.pde b/java/libraries/video/examples/Capture/Mirror/Mirror.pde
new file mode 100644
index 000000000..a385e1d7f
--- /dev/null
+++ b/java/libraries/video/examples/Capture/Mirror/Mirror.pde
@@ -0,0 +1,75 @@
+/**
+ * Mirror
+ * by Daniel Shiffman.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Each pixel from the video source is drawn as a rectangle with rotation based on brightness.
+ */
+
+import codeanticode.gsvideo.*;
+
+
+// Size of each cell in the grid
+int cellSize = 20;
+// Number of columns and rows in our system
+int cols, rows;
+// Variable for capture device
+GSCapture video;
+
+
+void setup() {
+ size(640, 480, P3D);
+ frameRate(30);
+ cols = width / cellSize;
+ rows = height / cellSize;
+ colorMode(RGB, 255, 255, 255, 100);
+
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, width, height);
+ video.play();
+
+ background(0);
+}
+
+
+void draw() {
+ if (video.available()) {
+ video.read();
+ video.loadPixels();
+
+ // Not bothering to clear background
+ // background(0);
+
+ // Begin loop for columns
+ for (int i = 0; i < cols; i++) {
+ // Begin loop for rows
+ for (int j = 0; j < rows; j++) {
+
+ // Where are we, pixel-wise?
+ int x = i*cellSize;
+ int y = j*cellSize;
+ int loc = (video.width - x - 1) + y*video.width; // Reversing x to mirror the image
+
+ float r = red(video.pixels[loc]);
+ float g = green(video.pixels[loc]);
+ float b = blue(video.pixels[loc]);
+ // Make a new color with an alpha component
+ color c = color(r, g, b, 75);
+
+ // Code for drawing a single rect
+ // Using translate in order for rotation to work properly
+ pushMatrix();
+ translate(x+cellSize/2, y+cellSize/2);
+ // Rotation formula based on brightness
+ rotate((2 * PI * brightness(c) / 255.0));
+ rectMode(CENTER);
+ fill(c);
+ noStroke();
+ // Rects are larger than the cell for some overlap
+ rect(0, 0, cellSize+6, cellSize+6);
+ popMatrix();
+ }
+ }
+ }
+}
diff --git a/java/libraries/video/examples/Capture/Mirror2/Mirror2.pde b/java/libraries/video/examples/Capture/Mirror2/Mirror2.pde
new file mode 100644
index 000000000..d8128b209
--- /dev/null
+++ b/java/libraries/video/examples/Capture/Mirror2/Mirror2.pde
@@ -0,0 +1,62 @@
+/**
+ * Mirror 2
+ * by Daniel Shiffman.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Each pixel from the video source is drawn as a rectangle with size based on brightness.
+ */
+
+import codeanticode.gsvideo.*;
+
+// Size of each cell in the grid
+int cellSize = 15;
+// Number of columns and rows in our system
+int cols, rows;
+// Variable for capture device
+GSCapture video;
+
+
+void setup() {
+ size(640, 480, P3D);
+ //set up columns and rows
+ cols = width / cellSize;
+ rows = height / cellSize;
+ colorMode(RGB, 255, 255, 255, 100);
+ rectMode(CENTER);
+
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, width, height);
+ video.play();
+
+ background(0);
+}
+
+
+void draw() {
+ if (video.available()) {
+ video.read();
+ video.loadPixels();
+
+ background(0, 0, 255);
+
+ // Begin loop for columns
+ for (int i = 0; i < cols;i++) {
+ // Begin loop for rows
+ for (int j = 0; j < rows;j++) {
+
+ // Where are we, pixel-wise?
+ int x = i * cellSize;
+ int y = j * cellSize;
+ int loc = (video.width - x - 1) + y*video.width; // Reversing x to mirror the image
+
+ // Each rect is colored white with a size determined by brightness
+ color c = video.pixels[loc];
+ float sz = (brightness(c) / 255.0) * cellSize;
+ fill(255);
+ noStroke();
+ rect(x + cellSize/2, y + cellSize/2, sz, sz);
+ }
+ }
+ }
+}
diff --git a/java/libraries/video/examples/Capture/RadialPocky/RadialPocky.pde b/java/libraries/video/examples/Capture/RadialPocky/RadialPocky.pde
new file mode 100644
index 000000000..eddac3636
--- /dev/null
+++ b/java/libraries/video/examples/Capture/RadialPocky/RadialPocky.pde
@@ -0,0 +1,78 @@
+/**
+ * Radial Pocky
+ * by Ben Fry.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * Unwrap each frame of live video into a single line of pixels along a circle
+ */
+
+import codeanticode.gsvideo.*;
+
+
+GSCapture video;
+int videoCount;
+int currentAngle;
+int pixelCount;
+int angleCount = 200; // how many divisions
+
+int radii[];
+int angles[];
+
+
+void setup() {
+ // size must be set to video.width*video.height*2 in both directions
+ size(600, 600);
+
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, 160, 120);
+ video.play();
+ videoCount = video.width * video.height;
+
+ pixelCount = width*height;
+ int centerX = width / 2;
+ int centerY = height / 2;
+ radii = new int[pixelCount];
+ angles = new int[pixelCount];
+
+ int offset = 0;
+ for (int y = 0; y < height; y++) {
+ for (int x = 0; x < width; x++) {
+ int dx = centerX - x;
+ int dy = centerY - y;
+
+ float angle = atan2(dy, dx);
+ if (angle < 0) angle += TWO_PI;
+ angles[offset] = (int) (angleCount * (angle / TWO_PI));
+
+ int radius = (int) mag(dx, dy);
+ if (radius >= videoCount) {
+ radius = -1;
+ angles[offset] = -1;
+ }
+ radii[offset] = radius;
+
+ offset++;
+ }
+ }
+ background(0);
+}
+
+
+void draw() {
+ if (video.available()) {
+ video.read();
+ video.loadPixels();
+
+ loadPixels();
+ for (int i = 0; i < pixelCount; i++) {
+ if (angles[i] == currentAngle) {
+ pixels[i] = video.pixels[radii[i]];
+ }
+ }
+ updatePixels();
+
+ currentAngle++;
+ if (currentAngle == angleCount) currentAngle = 0;
+ }
+}
diff --git a/java/libraries/video/examples/Capture/SlitScan/SlitScan.pde b/java/libraries/video/examples/Capture/SlitScan/SlitScan.pde
new file mode 100644
index 000000000..20353a0d0
--- /dev/null
+++ b/java/libraries/video/examples/Capture/SlitScan/SlitScan.pde
@@ -0,0 +1,57 @@
+/**
+ * Simple Real-Time Slit-Scan Program.
+ * By Golan Levin.
+ *
+ * GSVideo version by Andres Colubri.
+ *
+ * This demonstration depends on the canvas height being equal
+ * to the video capture height. If you would prefer otherwise,
+ * consider using the image copy() function rather than the
+ * direct pixel-accessing approach I have used here.
+ *
+ * Created December 2006.
+ * Updated June 2007 by fry.
+ */
+import codeanticode.gsvideo.*;
+
+GSCapture video;
+
+int videoSliceX;
+int drawPositionX;
+
+
+void setup() {
+ size(600, 240);
+
+ // Uses the default video input, see the reference if this causes an error
+ video = new GSCapture(this, 320, 240);
+ video.play();
+
+ videoSliceX = video.width / 2;
+ drawPositionX = width - 1;
+ background(0);
+}
+
+
+void draw() {
+ if (video.available()) {
+ video.read();
+ video.loadPixels();
+
+ // Copy a column of pixels from the middle of the video
+ // To a location moving slowly across the canvas.
+ loadPixels();
+ for (int y = 0; y < video.height; y++){
+ int setPixelIndex = y*width + drawPositionX;
+ int getPixelIndex = y*video.width + videoSliceX;
+ pixels[setPixelIndex] = video.pixels[getPixelIndex];
+ }
+ updatePixels();
+
+ drawPositionX--;
+ // Wrap the position back to the beginning if necessary.
+ if (drawPositionX < 0) {
+ drawPositionX = width - 1;
+ }
+ }
+}
diff --git a/java/libraries/video/examples/GLGraphics/Capture/Capture.pde b/java/libraries/video/examples/GLGraphics/Capture/Capture.pde
new file mode 100644
index 000000000..a03c35e59
--- /dev/null
+++ b/java/libraries/video/examples/GLGraphics/Capture/Capture.pde
@@ -0,0 +1,58 @@
+// Using integration with GLGraphics for fast video playback.
+// All the decoding stages, until the color conversion from YUV
+// to RGB are handled by gstreamer, and the video frames are
+// directly transfered over to the OpenGL texture encapsulated
+// by the GLTexture object.
+// You need the GLGraphics library (0.99+) to use this functionality:
+// http://glgraphics.sourceforge.net/
+
+import processing.opengl.*;
+import codeanticode.glgraphics.*;
+import codeanticode.gsvideo.*;
+
+GSCapture cam;
+GLTexture tex;
+
+void setup() {
+ size(640, 480, GLConstants.GLGRAPHICS);
+
+ cam = new GSCapture(this, 640, 480);
+
+ // Use texture tex as the destination for the camera pixels.
+ tex = new GLTexture(this);
+ cam.setPixelDest(tex);
+ cam.play();
+
+ /*
+ // You can get the resolutions supported by the
+ // capture device using the resolutions() method.
+ // It must be called after creating the capture
+ // object.
+ int[][] res = cam.resolutions();
+ for (int i = 0; i < res.length; i++) {
+ println(res[i][0] + "x" + res[i][1]);
+ }
+ */
+
+ /*
+ // You can also get the framerates supported by the
+ // capture device:
+ String[] fps = cam.framerates();
+ for (int i = 0; i < fps.length; i++) {
+ println(fps[i]);
+ }
+ */
+}
+
+void captureEvent(GSCapture cam) {
+ cam.read();
+}
+
+void draw() {
+ // If there is a new frame available from the camera, the
+ // putPixelsIntoTexture() function will copy it to the
+ // video card and will return true.
+ if (tex.putPixelsIntoTexture()) {
+ image(tex, 0, 0, width, height);
+ }
+}
diff --git a/java/libraries/video/examples/GLGraphics/HDMovie/HDMovie.pde b/java/libraries/video/examples/GLGraphics/HDMovie/HDMovie.pde
new file mode 100644
index 000000000..11ae9a05b
--- /dev/null
+++ b/java/libraries/video/examples/GLGraphics/HDMovie/HDMovie.pde
@@ -0,0 +1,80 @@
+// Using integration with GLGraphics for fast video playback.
+// All the decoding stages, until the color conversion from YUV
+// to RGB are handled by gstreamer, and the video frames are
+// directly transfered over to the OpenGL texture encapsulated
+// by the GLTexture object.
+// You need the GLGraphics library (0.99+) to use this functionality:
+// http://glgraphics.sourceforge.net/
+
+import processing.opengl.*;
+import codeanticode.glgraphics.*;
+import codeanticode.gsvideo.*;
+
+GSMovie mov;
+GLTexture tex;
+
+int fcount, lastm;
+float frate;
+int fint = 3;
+
+void setup() {
+ size(1280, 800, GLConstants.GLGRAPHICS);
+ frameRate(90);
+
+ mov = new GSMovie(this, "movie.avi");
+
+ // Use texture tex as the destination for the movie pixels.
+ tex = new GLTexture(this);
+ mov.setPixelDest(tex);
+
+ // This is the size of the buffer where frames are stored
+ // when they are not rendered quickly enough.
+ tex.setPixelBufferSize(10);
+ // New frames put into the texture when the buffer is full
+ // are deleted forever, so this could lead dropeed frames:
+ tex.delPixelsWhenBufferFull(false);
+ // Otherwise, they are kept by gstreamer and will be sent
+ // again later. This avoids loosing any frames, but increases
+ // the memory used by the application.
+
+ mov.loop();
+
+ background(0);
+ noStroke();
+}
+
+void draw() {
+ // Using the available() method and reading the new frame inside draw()
+ // instead of movieEvent() is the most effective way to keep the
+ // audio and video synchronization.
+ if (mov.available()) {
+ mov.read();
+ // putPixelsIntoTexture() copies the frame pixels to the OpenGL texture
+ // encapsulated by
+ if (tex.putPixelsIntoTexture()) {
+
+ // Calculating height to keep aspect ratio.
+ float h = width * tex.height / tex.width;
+ float b = 0.5 * (height - h);
+
+ image(tex, 0, b, width, h);
+
+ String info = "Resolution: " + mov.width + "x" + mov.height +
+ " , framerate: " + nfc(frate, 2) +
+ " , number of buffered frames: " + tex.getPixelBufferUse();
+
+ fill(0);
+ rect(0, 0, textWidth(info), b);
+ fill(255);
+ text(info, 0, 15);
+
+ fcount += 1;
+ int m = millis();
+ if (m - lastm > 1000 * fint) {
+ frate = float(fcount) / fint;
+ fcount = 0;
+ lastm = m;
+ }
+ }
+ }
+}
diff --git a/java/libraries/video/examples/GLGraphics/Movie/Movie.pde b/java/libraries/video/examples/GLGraphics/Movie/Movie.pde
new file mode 100644
index 000000000..b4cf42de0
--- /dev/null
+++ b/java/libraries/video/examples/GLGraphics/Movie/Movie.pde
@@ -0,0 +1,40 @@
+// Using integration with GLGraphics for fast video playback.
+// All the decoding stages, until the color conversion from YUV
+// to RGB are handled by gstreamer, and the video frames are
+// directly transfered over to the OpenGL texture encapsulated
+// by the GLTexture object.
+// You need the GLGraphics library (0.99+) to use this functionality:
+// http://glgraphics.sourceforge.net/
+
+import processing.opengl.*;
+import codeanticode.glgraphics.*;
+import codeanticode.gsvideo.*;
+
+GSMovie movie;
+GLTexture tex;
+
+void setup() {
+ size(640, 480, GLConstants.GLGRAPHICS);
+ background(0);
+
+ movie = new GSMovie(this, "station.mov");
+
+ // Use texture tex as the destination for the movie pixels.
+ tex = new GLTexture(this);
+ movie.setPixelDest(tex);
+ movie.loop();
+}
+
+void movieEvent(GSMovie movie) {
+ movie.read();
+}
+
+void draw() {
+ // If there is a new frame available from the movie, the
+ // putPixelsIntoTexture() function will copy it to the
+ // video card and will return true.
+ if (tex.putPixelsIntoTexture()) {
+ tint(255, 20);
+ image(tex, mouseX-movie.width/2, mouseY-movie.height/2);
+ }
+}
diff --git a/java/libraries/video/examples/GLGraphics/Pipeline/Pipeline.pde b/java/libraries/video/examples/GLGraphics/Pipeline/Pipeline.pde
new file mode 100644
index 000000000..9af8668d6
--- /dev/null
+++ b/java/libraries/video/examples/GLGraphics/Pipeline/Pipeline.pde
@@ -0,0 +1,38 @@
+// Using integration with GLGraphics for fast video playback.
+// All the decoding stages, until the color conversion from YUV
+// to RGB are handled by gstreamer, and the video frames are
+// directly transfered over to the OpenGL texture encapsulated
+// by the GLTexture object.
+// You need the GLGraphics library (0.99+) to use this functionality:
+// http://glgraphics.sourceforge.net/
+
+import processing.opengl.*;
+import codeanticode.glgraphics.*;
+import codeanticode.gsvideo.*;
+
+GSPipeline pipeline;
+GLTexture tex;
+
+void setup() {
+ size(320, 240, GLConstants.GLGRAPHICS);
+
+ pipeline = new GSPipeline(this, "videotestsrc");
+
+ // Use texture tex as the destination for the pipeline pixels.
+ tex = new GLTexture(this);
+ pipeline.setPixelDest(tex);
+ pipeline.play();
+}
+
+void pipelineEvent(GSPipeline pipeline) {
+ pipeline.read();
+}
+
+void draw() {
+ // If there is a new frame available from the pipeline, the
+ // putPixelsIntoTexture() function will copy it to the
+ // video card and will return true.
+ if (tex.putPixelsIntoTexture()) {
+ image(tex, 0, 0, width, height);
+ }
+}
diff --git a/java/libraries/video/examples/Movie/Frames/Frames.pde b/java/libraries/video/examples/Movie/Frames/Frames.pde
new file mode 100644
index 000000000..26ece2f61
--- /dev/null
+++ b/java/libraries/video/examples/Movie/Frames/Frames.pde
@@ -0,0 +1,54 @@
+/**
+ * Frames.
+ * by Andres Colubri
+ *
+ * Moves through the video one frame at the time by using the
+ * arrow keys.
+ */
+
+import codeanticode.gsvideo.*;
+
+GSMovie movie;
+int newFrame = 0;
+PFont font;
+
+void setup() {
+ size(320, 240);
+ background(0);
+ // Load and set the video to play. Setting the video
+ // in play mode is needed so at least one frame is read
+ // and we can get duration, size and other information from
+ // the video stream.
+ movie = new GSMovie(this, "station.mov");
+ movie.play();
+
+ font = loadFont("DejaVuSans-24.vlw");
+ textFont(font, 24);
+}
+
+void movieEvent(GSMovie movie) {
+ movie.read();
+}
+
+void draw() {
+ if (newFrame != movie.frame()) {
+ // The movie stream must be in play mode in order to jump to another
+ // position along the stream. Otherwise it won't work.
+ movie.play();
+ movie.jump(newFrame);
+ movie.pause();
+ }
+ image(movie, 0, 0, width, height);
+ fill(240, 20, 30);
+ text(movie.frame() + " / " + (movie.length() - 1), 10, 30);
+}
+
+void keyPressed() {
+ if (key == CODED) {
+ if (keyCode == LEFT) {
+ if (0 < newFrame) newFrame--;
+ } else if (keyCode == RIGHT) {
+ if (newFrame < movie.length() - 1) newFrame++;
+ }
+ }
+}
diff --git a/java/libraries/video/examples/Movie/Frames/data/DejaVuSans-24.vlw b/java/libraries/video/examples/Movie/Frames/data/DejaVuSans-24.vlw
new file mode 100644
index 000000000..d05a95cb6
Binary files /dev/null and b/java/libraries/video/examples/Movie/Frames/data/DejaVuSans-24.vlw differ
diff --git a/java/libraries/video/examples/Movie/Loop/Loop.pde b/java/libraries/video/examples/Movie/Loop/Loop.pde
new file mode 100644
index 000000000..39fe9bee6
--- /dev/null
+++ b/java/libraries/video/examples/Movie/Loop/Loop.pde
@@ -0,0 +1,37 @@
+/**
+ * Loop.
+ * Built-in video library replaced with gsvideo by Andres Colubri
+ *
+ * Move the cursor across the screen to draw.
+ * Shows how to load and play a QuickTime movie file.
+ *
+ * Note: GSVideo uses GStreamer as the underlying multimedia library
+ * for reading media files, decoding, encoding, etc.
+ * It is based on a set of Java bindings for GStreamer called
+ * gstreamer-java originally created by Wayne Meissner and currently
+ * mantained by a small team of volunteers. GStreamer-java can be
+ * used from any Java program, and it is available for download at
+ * the following website:
+ * http://code.google.com/p/gstreamer-java/
+ */
+
+import codeanticode.gsvideo.*;
+
+GSMovie movie;
+
+void setup() {
+ size(640, 480);
+ background(0);
+ // Load and play the video in a loop
+ movie = new GSMovie(this, "station.mov");
+ movie.loop();
+}
+
+void movieEvent(GSMovie movie) {
+ movie.read();
+}
+
+void draw() {
+ tint(255, 20);
+ image(movie, mouseX-movie.width/2, mouseY-movie.height/2);
+}
diff --git a/java/libraries/video/examples/Movie/Pixelate/Pixelate.pde b/java/libraries/video/examples/Movie/Pixelate/Pixelate.pde
new file mode 100644
index 000000000..f7ae5ac87
--- /dev/null
+++ b/java/libraries/video/examples/Movie/Pixelate/Pixelate.pde
@@ -0,0 +1,50 @@
+/**
+ * Pixelate
+ * by Hernando Barragan.
+ * Built-in video library replaced with gsvideo by Andres Colubri
+ *
+ * Load a QuickTime file and display the video signal
+ * using rectangles as pixels by reading the values stored
+ * in the current video frame pixels array.
+ */
+
+import codeanticode.gsvideo.*;
+
+int numPixels;
+int blockSize = 10;
+GSMovie myMovie;
+color myMovieColors[];
+
+void setup() {
+ size(640, 480);
+ noStroke();
+ background(0);
+ myMovie = new GSMovie(this, "station.mov");
+ myMovie.loop();
+ numPixels = width / blockSize;
+ myMovieColors = new color[numPixels * numPixels];
+}
+
+
+// Read new values from movie
+void movieEvent(GSMovie m) {
+ m.read();
+ m.loadPixels();
+
+ for (int j = 0; j < numPixels; j++) {
+ for (int i = 0; i < numPixels; i++) {
+ myMovieColors[j*numPixels + i] = m.get(i, j);
+ }
+ }
+}
+
+
+// Display values from movie
+void draw() {
+ for (int j = 0; j < numPixels; j++) {
+ for (int i = 0; i < numPixels; i++) {
+ fill(myMovieColors[j*numPixels + i]);
+ rect(i*blockSize, j*blockSize, blockSize-1, blockSize-1);
+ }
+ }
+}
diff --git a/java/libraries/video/examples/Movie/Reverse/Reverse.pde b/java/libraries/video/examples/Movie/Reverse/Reverse.pde
new file mode 100644
index 000000000..a34fc39e9
--- /dev/null
+++ b/java/libraries/video/examples/Movie/Reverse/Reverse.pde
@@ -0,0 +1,44 @@
+/**
+ * GSVideo movie reverse example.
+ *
+ * The GSMovie.speed() method allows to
+ * change the playback speed. Use negative
+ * values for backwards playback. Note that
+ * not all video formats support backwards
+ * playback. This depends on the underlying
+ * gstreamer plugins used by gsvideo. For
+ * example, the theora codec does support
+ * backward playback, but not so the H264
+ * codec, at least in its current version.
+ *
+ */
+
+import codeanticode.gsvideo.*;
+
+GSMovie myMovie;
+boolean speedSet = false;
+
+public void setup() {
+ size(320, 240);
+ background(0);
+ myMovie = new GSMovie(this, "balloon.ogg");
+ myMovie.play();
+}
+
+public void movieEvent(GSMovie myMovie) {
+ myMovie.read();
+}
+
+public void draw() {
+ if (myMovie.ready()) {
+ if (!speedSet) {
+ // Setting the speed should be done only once,
+ // this is the reason for the if statement.
+ speedSet = true;
+ myMovie.goToEnd();
+ // -1 means backward playback at normal speed,
+ myMovie.speed(-1.0);
+ }
+ }
+ image(myMovie, 0, 0, width, height);
+}
diff --git a/java/libraries/video/examples/Movie/Scratch/Scratch.pde b/java/libraries/video/examples/Movie/Scratch/Scratch.pde
new file mode 100644
index 000000000..450835079
--- /dev/null
+++ b/java/libraries/video/examples/Movie/Scratch/Scratch.pde
@@ -0,0 +1,49 @@
+/**
+ * Scratch.
+ * by Andres Colubri
+ *
+ * Move the cursor horizontally across the screen to set
+ * the position in the movie file.
+ */
+
+import codeanticode.gsvideo.*;
+
+GSMovie movie;
+
+void setup() {
+ size(640, 480);
+ background(0);
+ // Load and set the video to play. Setting the video
+ // in play mode is needed so at least one frame is read
+ // and we can get duration, size and other information from
+ // the video stream.
+ movie = new GSMovie(this, "station.mov");
+ movie.play();
+}
+
+void movieEvent(GSMovie movie) {
+ movie.read();
+}
+
+void draw() {
+ // A new time position is calculated using the current mouse location:
+ float f = constrain((float)mouseX / width, 0, 1);
+ float t = movie.duration() * f;
+
+ // If the new time is different enough from the current position,
+ // then we jump to the new position. But how different? Here the
+ // difference has been set to 0.1 (1 tenth of a second), but it can
+ // be smaller. My guess is that the smallest value should correspond
+ // to the duration of a single frame (for instance 1/24 if the frame rate
+ // of the video file is 24fps). Setting even smaller values seem to lead
+ // to choppiness. This will become trickier once the GSMovie.speed()
+ // and GSMovie.frameRate() methods become functional.
+ if (0.1 < abs(t - movie.time())) {
+ // The movie stream must be in play mode in order to jump to another
+ // position along the stream. Otherwise it won't work.
+ movie.play();
+ movie.jump(t);
+ movie.pause();
+ }
+ image(movie, 0, 0, width, height);
+}
diff --git a/java/libraries/video/examples/Movie/Speed/Speed.pde b/java/libraries/video/examples/Movie/Speed/Speed.pde
new file mode 100644
index 000000000..cc269772b
--- /dev/null
+++ b/java/libraries/video/examples/Movie/Speed/Speed.pde
@@ -0,0 +1,35 @@
+/**
+ * GSVideo movie speed example.
+ *
+ * Use the GSMovie.speed() method to change
+ * the playback speed.
+ *
+ */
+
+import codeanticode.gsvideo.*;
+
+GSMovie movie;
+
+public void setup() {
+ size(320, 240);
+ background(0);
+ movie = new GSMovie(this, "balloon.ogg");
+ movie.loop();
+
+ PFont font = loadFont("DejaVuSans-24.vlw");
+ textFont(font, 24);
+}
+
+public void movieEvent(GSMovie movie) {
+ movie.read();
+}
+
+public void draw() {
+ image(movie, 0, 0, width, height);
+
+ float newSpeed = map(mouseX, 0, width, 0.1, 2);
+ movie.speed(newSpeed);
+ fill(240, 20, 30);
+ text(nfc(newSpeed, 2) + "X", width - 80, 30);
+}
+
diff --git a/java/libraries/video/examples/Movie/Speed/data/DejaVuSans-24.vlw b/java/libraries/video/examples/Movie/Speed/data/DejaVuSans-24.vlw
new file mode 100644
index 000000000..d05a95cb6
Binary files /dev/null and b/java/libraries/video/examples/Movie/Speed/data/DejaVuSans-24.vlw differ
diff --git a/java/libraries/video/examples/MovieMaker/DrawingMovie/DrawingMovie.pde b/java/libraries/video/examples/MovieMaker/DrawingMovie/DrawingMovie.pde
new file mode 100644
index 000000000..243e97826
--- /dev/null
+++ b/java/libraries/video/examples/MovieMaker/DrawingMovie/DrawingMovie.pde
@@ -0,0 +1,116 @@
+/**
+ * GSVideo drawing movie example.
+ *
+ * Adapted from Daniel Shiffman's original Drawing Movie
+ * example by Andres Colubri
+ * Makes a movie of a line drawn by the mouse. Press
+ * the spacebar to finish and save the movie.
+ */
+
+import codeanticode.gsvideo.*;
+
+GSMovieMaker mm;
+int fps = 30;
+
+void setup() {
+ size(320, 240);
+ frameRate(fps);
+
+ PFont font = createFont("Courier", 24);
+ textFont(font, 24);
+
+ // Save as THEORA in a OGG file as MEDIUM quality (all quality settings are WORST, LOW,
+ // MEDIUM, HIGH and BEST):
+ mm = new GSMovieMaker(this, width, height, "drawing.ogg", GSMovieMaker.THEORA, GSMovieMaker.MEDIUM, fps);
+
+ // Available codecs are:
+ // THEORA
+ // XVID
+ // X264
+ // DIRAC
+ // MJPEG
+ // MJPEG2K
+ // As for the file formats, the following are autodetected from the filename extension:
+ // .ogg: OGG
+ // .avi: Microsoft's AVI
+ // .mov: Quicktime's MOV
+ // .flv: Flash Video
+ // .mkv: Matroska container
+ // .mp4: MPEG-4
+ // .3gp: 3GGP video
+ // .mpg: MPEG-1
+ // .mj2: Motion JPEG 2000
+ // Please note that some of the codecs/containers might not work as expected, depending
+ // on which gstreamer plugins are installed. Also, some codec/container combinations
+ // don't seem to be compatible, for example THEORA+AVI or X264+OGG.
+
+ // Encoding with DIRAC codec into an avi file:
+ //mm = new GSMovieMaker(this, width, height, "drawing.avi", GSMovieMaker.DIRAC, GSMovieMaker.BEST, fps);
+
+ // Important: Be sure of using the same framerate as the one set with frameRate().
+ // If the sketch's framerate is higher than the speed with which GSMovieMaker
+ // can compress frames and save them to file, then the computer's RAM will start to become
+ // clogged with unprocessed frames waiting on the gstreamer's queue. If all the physical RAM
+ // is exhausted, then the whole system might become extremely slow and unresponsive.
+ // Using the same framerate as in the frameRate() function seems to be a reasonable choice,
+ // assuming that CPU can keep up with encoding at the same pace with which Processing sends
+ // frames (which might not be the case is the CPU is slow). As the resolution increases,
+ // encoding becomes more costly and the risk of clogging the computer's RAM increases.
+
+ // The movie maker can also be initialized by explicitly specifying the name of the desired gstreamer's
+ // encoder and muxer elements. Also, arrays with property names and values for the encoder can be passed.
+ // In the following code, the DIRAC encoder (schroenc) and the Matroska muxer (matroskamux) are selected,
+ // with an encoding quality of 9.0 (schroenc accepts quality values between 0 and 10). The property arrays
+ // can be set to null in order to use default property values.
+ //String[] propName = { "quality" };
+ //Float f = 9.0f;
+ //Object[] propValue = { f };
+ //mm = new GSMovieMaker(this, width, height, "drawing.ogg", "schroenc", "oggmux", propName, propValue, fps);
+
+ // There are two queues in the movie recording process: a pre-encoding queue and an encoding
+ // queue. The former is stored in the Java side and the later inside gstreamer. When the
+ // encoding queue is full, frames start to accumulate in the pre-encoding queue until its
+ // maximum size is reached. After that point, new frames are dropped. To have no limit in the
+ // size of the pre-encoding queue, set it to zero.
+ // The size of both is set with the following function (first argument is the size of pre-
+ // encoding queue):
+ mm.setQueueSize(50, 10);
+
+ mm.start();
+
+ background(160, 32, 32);
+}
+
+void draw() {
+ stroke(7, 146, 168);
+ strokeWeight(4);
+
+ // Draw if mouse is pressed
+ if (mousePressed && pmouseX != 0 && mouseY != 0) {
+ line(pmouseX, pmouseY, mouseX, mouseY);
+ }
+
+ // Drawing framecount.
+ String s = "Frame " + frameCount;
+ fill(160, 32, 32);
+ noStroke();
+ rect(10, 6, textWidth(s), 24);
+ fill(255);
+ text(s, 10, 30);
+
+ loadPixels();
+ // Add window's pixels to movie
+ mm.addFrame(pixels);
+
+ println("Number of queued frames : " + mm.getQueuedFrames());
+ println("Number of dropped frames: " + mm.getDroppedFrames());
+}
+
+void keyPressed() {
+ if (key == ' ') {
+ // Finish the movie if space bar is pressed
+ mm.finish();
+ // Quit running the sketch once the file is written
+ exit();
+ }
+}
\ No newline at end of file
diff --git a/java/libraries/video/examples/Pipelines/Audio/Audio.pde b/java/libraries/video/examples/Pipelines/Audio/Audio.pde
new file mode 100644
index 000000000..d9d492a64
--- /dev/null
+++ b/java/libraries/video/examples/Pipelines/Audio/Audio.pde
@@ -0,0 +1,34 @@
+/**
+ * Audio pipeline.
+ * By Andres Colubri
+ *
+ */
+
+import codeanticode.gsvideo.*;
+
+GSPipeline pipeline;
+
+void setup() {
+ size(100, 100);
+
+ // An audio-only pipeline can be specified by setting the type parameter to GSVideo.AUDIO.
+ // In this way, GSVideo doesn't try to copy the stream to the Processing window.
+ // The other two possible types are GSVideo.VIDEO (default) and GSVideo.DATA.
+ // Linux:
+ pipeline = new GSPipeline(this, "audiotestsrc ! audioconvert ! alsasink", GSVideo.AUDIO);
+ // Windows:
+ //pipeline = new GSPipeline(this, "audiotestsrc ! audioconvert ! directsoundsink", GSVideo.AUDIO);
+
+ // The pipeline starts in paused state, so a call to the play()
+ // method is needed to get thins rolling.
+ pipeline.play();
+}
+
+void draw() {
+ // No need to draw anything on the screen. The audio gets
+ // automatically directed to the sound card.
+}
+
+
+
+
diff --git a/java/libraries/video/examples/Pipelines/Capture/Capture.pde b/java/libraries/video/examples/Pipelines/Capture/Capture.pde
new file mode 100644
index 000000000..574221d8d
--- /dev/null
+++ b/java/libraries/video/examples/Pipelines/Capture/Capture.pde
@@ -0,0 +1,58 @@
+/**
+ * Camera capture pipelines.
+ * By Andres Colubri
+ *
+ */
+
+import codeanticode.gsvideo.*;
+
+GSPipeline pipeline;
+
+void setup() {
+ size(640, 480);
+
+ // The ksvideosrc element allows to select a capture device by index (0, 1, 2, etc).
+ //pipeline = new GSPipeline(this, "ksvideosrc device-index=0 ! decodebin2");
+
+ // DirectShow capture pipelines:
+ // Uses the first availabe capture device.
+ //pipeline = new GSPipeline(this, "dshowvideosrc ! decodebin2");
+ // This one allows to choose the device based on its name property.
+ //pipeline = new GSPipeline(this, "dshowvideosrc device-name=\"Sony Visual Communication Camera VGP-VCC7\" ! decodebin2");
+
+ // Capture pipeline in MacOSX 64 bits. It uses the qtkitvideosrc element based on the
+ // new QTkit. The input device can be set using the device-index property, which expects an
+ // integer value, like ksvideosrc above.
+ //pipeline = new GSPipeline(this, "qtkitvideosrc");
+
+ // Vide4Linux2 capture pipeline.
+ pipeline = new GSPipeline(this, "v4l2src");
+
+ // The full pipeline that GSVideo passes to GStremeamer can be
+ // obtained with the getPipeline() method:
+ println("Pipeline string:");
+ println(pipeline.getPipeline());
+
+ // Tentative dv1394 capture pipeline. This thread on the Processing's discourse:
+ // http://processing.org/discourse/yabb2/YaBB.pl?num=1210072258/30
+ // could be very useful to setup dv capture.
+ //pipeline = new GSPipeline(this, "dv1394src port=0 ! queue ! dvdemux ! ffdec_dvvideo ! ffmpegcolorspace ! video/x-raw-yuv, width=720");
+
+ // The pipeline starts in paused state, so a call to the play()
+ // method is needed to get thins rolling.
+ pipeline.play();
+}
+
+void draw() {
+ // When the GSPipeline.available() method returns true,
+ // it means that a new frame is ready to be read.
+ if (pipeline.available()) {
+ pipeline.read();
+ image(pipeline, 0, 0);
+ }
+}
+
+
+
+
+
diff --git a/java/libraries/video/examples/Pipelines/Raw/Raw.pde b/java/libraries/video/examples/Pipelines/Raw/Raw.pde
new file mode 100644
index 000000000..c86c057a2
--- /dev/null
+++ b/java/libraries/video/examples/Pipelines/Raw/Raw.pde
@@ -0,0 +1,48 @@
+/**
+ * Raw pipeline.
+ * By Andres Colubri
+ *
+ */
+
+import codeanticode.gsvideo.*;
+
+GSPipeline pipeline;
+
+void setup() {
+ size(200, 200);
+
+ // A raw pipeline can be used to retrieve the data frames from the stream right after it has
+ // been decoded from the file.
+
+ // Reading audio frames from mp3 file. Note we need to add the decoding element (mad):
+ pipeline = new GSPipeline(this, "filesrc location=" + dataPath("groove.mp3") + " ! mad", GSVideo.RAW);
+
+ // Test audio signal generated by the audiotestsrc element. Here we don't need any decoding, as the
+ // frames coming out of audiotestsrc already contain valid audio data:
+ //pipeline = new GSPipeline(this, "audiotestsrc", GSVideo.RAW);
+
+ pipeline.loop();
+}
+
+void pipelineEvent(GSPipeline p) {
+ p.read();
+}
+
+void draw() {
+ background(0);
+
+ if (pipeline.data != null) {
+ //println("Data size: " + pipeline.data.length);
+ //println("Data caps: " + pipeline.dataCaps);
+
+ // Mapping audio bytes to pixel color.
+ loadPixels();
+ byte[] data = pipeline.data;
+ for (int i = 0; i < data.length; i++) {
+ int k = int(map(i, 0, data.length - 1, 0, width * height - 1));
+ pixels[k] = color(data[i] + 128, 0, 0, 255);
+ }
+ updatePixels();
+ }
+}
+
diff --git a/java/libraries/video/examples/Pipelines/Raw/data/groove.mp3 b/java/libraries/video/examples/Pipelines/Raw/data/groove.mp3
new file mode 100644
index 000000000..0a91e6c71
Binary files /dev/null and b/java/libraries/video/examples/Pipelines/Raw/data/groove.mp3 differ
diff --git a/java/libraries/video/examples/Pipelines/Test/Test.pde b/java/libraries/video/examples/Pipelines/Test/Test.pde
new file mode 100644
index 000000000..1810295bd
--- /dev/null
+++ b/java/libraries/video/examples/Pipelines/Test/Test.pde
@@ -0,0 +1,40 @@
+/**
+ * Test.
+ * By Andres Colubri
+ *
+ * This example shows how to create GStreamer pipelines using the GSPipeline object.
+ * Pipelines allow to connect different gstreamer elements (video sources, decoders, etc)
+ * in order to construct a video or audio stream. The command line tool gst-launch can be used
+ * to launch pipelines, and most pipelines specified with gst-launch can be used in GSPipeline,
+ * as the shown in this sketch.
+ * Some online material on GStreamer:
+ * http://www.cin.ufpe.br/~cinlug/wiki/index.php/Introducing_GStreamer
+ * http://www.twm-kd.com/computers/software/webcam-and-linux-gstreamer-tutorial/
+ */
+
+import codeanticode.gsvideo.*;
+
+GSPipeline pipeline;
+
+void setup() {
+ size(320, 240);
+
+ // VideoTestSrc pipeline. Note that there is no need to specify a
+ // video sink as the last element of the pipeline, because GSVideo
+ // automatically directs the video frames of the pipeline to
+ // Processing's drawing surface.
+ pipeline = new GSPipeline(this, "videotestsrc");
+
+ // The pipeline starts in paused state, so a call to the play()
+ // method is needed to get thins rolling.
+ pipeline.play();
+}
+
+void draw() {
+ // When the GSPipeline.available() method returns true,
+ // it means that a new frame is ready to be read.
+ if (pipeline.available()) {
+ pipeline.read();
+ image(pipeline, 0, 0);
+ }
+}
\ No newline at end of file
diff --git a/java/libraries/video/examples/Player/Audio/Audio.pde b/java/libraries/video/examples/Player/Audio/Audio.pde
new file mode 100644
index 000000000..2cd46975b
--- /dev/null
+++ b/java/libraries/video/examples/Player/Audio/Audio.pde
@@ -0,0 +1,26 @@
+/**
+ * Audio.
+ * Audio playback using the GSPlayer object.
+ * By Ryan Kelln
+ *
+ * Move the cursor across the screen to change volume.
+ */
+
+import codeanticode.gsvideo.*;
+
+GSPlayer sample;
+
+void setup() {
+ size(100, 100);
+ // The last parameter is used to indicate the stream type:
+ // VIDEO (default), AUDIO or DATA.
+ sample = new GSPlayer(this, "groove.mp3", GSVideo.AUDIO);
+ sample.loop();
+}
+
+void draw()
+{
+ //sample.jump(float(mouseY) / height * sample.duration());
+
+ sample.volume(float(mouseX) / width);
+}
diff --git a/java/libraries/video/examples/Player/Audio/data/groove.mp3 b/java/libraries/video/examples/Player/Audio/data/groove.mp3
new file mode 100644
index 000000000..0a91e6c71
Binary files /dev/null and b/java/libraries/video/examples/Player/Audio/data/groove.mp3 differ
diff --git a/java/libraries/video/examples/Player/Raw/Raw.pde b/java/libraries/video/examples/Player/Raw/Raw.pde
new file mode 100644
index 000000000..af708159d
--- /dev/null
+++ b/java/libraries/video/examples/Player/Raw/Raw.pde
@@ -0,0 +1,29 @@
+/**
+ * Raw.
+ *
+ * Gets raw data frames from video stream, without any color conversion.
+ */
+
+import codeanticode.gsvideo.*;
+
+GSPlayer video;
+
+void setup() {
+ size(100, 100);
+ video = new GSPlayer(this, "station.mov", GSVideo.RAW);
+ video.loop();
+}
+
+void playerEvent(GSPlayer player) {
+ player.read();
+}
+
+void draw() {
+ // The raw frame data is stored in video.data, which is a byte array.
+ // video.dataCaps is a string containing info about the incoming data.
+
+ if (video.data != null) {
+ println("Data size: " + video.data.length);
+ println("Data caps: " + video.dataCaps);
+ }
+}
diff --git a/java/libraries/video/library/export.txt b/java/libraries/video/library/export.txt
new file mode 100644
index 000000000..189254a96
--- /dev/null
+++ b/java/libraries/video/library/export.txt
@@ -0,0 +1 @@
+name = Video
diff --git a/java/libraries/video/src/processing/video/CLibrary.java b/java/libraries/video/src/processing/video/CLibrary.java
new file mode 100644
index 000000000..069143a2e
--- /dev/null
+++ b/java/libraries/video/src/processing/video/CLibrary.java
@@ -0,0 +1,38 @@
+/**
+ * Part of the GSVideo library: http://gsvideo.sourceforge.net/
+ * Copyright (c) 2008-11 Andres Colubri
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation, version 2.1.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General
+ * Public License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place, Suite 330,
+ * Boston, MA 02111-1307 USA
+ */
+
+package codeanticode.gsvideo;
+
+import com.sun.jna.Library;
+import com.sun.jna.Native;
+
+/**
+ * This JNA interface provides access to the environment variable-related functions in the C library.
+ * How to use:
+ * CLibrary clib = CLibrary.INSTANCE;
+ * String s = clib.getenv("DYLD_LIBRARY_PATH");
+ */
+public interface CLibrary extends Library {
+ CLibrary INSTANCE = (CLibrary)Native.loadLibrary("c", CLibrary.class);
+
+ int setenv(String name, String value, int overwrite);
+ String getenv(String name);
+ int unsetenv(String name);
+ int putenv(String string);
+}
diff --git a/java/libraries/video/src/processing/video/Capture.java b/java/libraries/video/src/processing/video/Capture.java
new file mode 100644
index 000000000..01cea75b7
--- /dev/null
+++ b/java/libraries/video/src/processing/video/Capture.java
@@ -0,0 +1,824 @@
+/**
+ * Part of the GSVideo library: http://gsvideo.sourceforge.net/
+ * Copyright (c) 2008-11 Andres Colubri
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation, version 2.1.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General
+ * Public License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place, Suite 330,
+ * Boston, MA 02111-1307 USA
+ */
+
+package codeanticode.gsvideo;
+
+import processing.core.*;
+
+import java.nio.*;
+import java.util.ArrayList;
+import java.lang.reflect.*;
+
+import org.gstreamer.*;
+import org.gstreamer.Buffer;
+import org.gstreamer.elements.*;
+import org.gstreamer.interfaces.PropertyProbe;
+import org.gstreamer.interfaces.Property;
+
+/**
+ * Class for storing and manipulating video frames from an attached capture
+ * device such as a camera.
+ */
+public class GSCapture extends PImage implements PConstants {
+ protected String source;
+
+ protected boolean playing = false;
+ protected boolean paused = false;
+
+ protected String fps;
+ protected int bufWidth;
+ protected int bufHeight;
+
+ protected Pipeline gpipeline;
+ protected Element gsource;
+
+ protected Method captureEventMethod;
+ protected Method copyBufferMethod;
+
+ protected Object eventHandler;
+ protected Object copyHandler;
+
+ protected boolean available;
+ protected boolean pipelineReady;
+
+ protected RGBDataAppSink rgbSink = null;
+ protected int[] copyPixels = null;
+
+ protected BufferDataAppSink natSink = null;
+ protected Buffer natBuffer = null;
+ protected boolean copyBufferMode = false;
+ protected String copyMask;
+
+ protected boolean firstFrame = true;
+
+ protected ArrayList suppResList;
+ protected ArrayList suppFpsList;
+
+ protected int reqWidth;
+ protected int reqHeight;
+
+ /**
+ * Basic constructor: tries to auto-detect all the capture parameters,
+ * with the exception of the resolution.
+ */
+ public GSCapture(PApplet parent, int requestWidth, int requestHeight) {
+ super(requestWidth, requestHeight, RGB);
+ initPlatform(parent, requestWidth, requestHeight, new String[] {}, new int[] {},
+ new String[] {}, new String[] {}, "");
+ }
+
+ /**
+ * Constructor that takes resolution and framerate indicated as a single number.
+ */
+ public GSCapture(PApplet parent, int requestWidth, int requestHeight, int frameRate) {
+ super(requestWidth, requestHeight, RGB);
+ initPlatform(parent, requestWidth, requestHeight, new String[] {}, new int[] {},
+ new String[] {}, new String[] {}, frameRate + "/1");
+ }
+
+ /**
+ * This constructor allows to specify the camera name. In Linux, for example, this
+ * should be a string of the form /dev/video0, /dev/video1, etc.
+ */
+ public GSCapture(PApplet parent, int requestWidth, int requestHeight, String cameraName) {
+ super(requestWidth, requestHeight, RGB);
+ initPlatform(parent, requestWidth, requestHeight, new String[] {}, new int[] {},
+ new String[] { devicePropertyName() }, new String[] { cameraName }, "");
+ }
+
+ /**
+ * This constructor allows to specify the camera name and the desired framerate.
+ */
+ public GSCapture(PApplet parent, int requestWidth, int requestHeight, int frameRate,
+ String cameraName) {
+ super(requestWidth, requestHeight, RGB);
+ initPlatform(parent, requestWidth, requestHeight, new String[] {}, new int[] {},
+ new String[] { devicePropertyName() }, new String[] { cameraName },
+ frameRate + "/1");
+ }
+
+ /**
+ * This constructor lets to indicate which source element to use (i.e.: v4l2src,
+ * osxvideosrc, dshowvideosrc, ksvideosrc, etc).
+ */
+ public GSCapture(PApplet parent, int requestWidth, int requestHeight, int frameRate,
+ String sourceName, String cameraName) {
+ super(requestWidth, requestHeight, RGB);
+ initGStreamer(parent, requestWidth, requestHeight, sourceName, new String[] {}, new int[] {},
+ new String[] { devicePropertyName() }, new String[] { cameraName },
+ frameRate + "/1");
+ }
+
+ /**
+ * This constructor accepts an arbitrary list of string properties for the source element.
+ * The camera name could be one of these properties. The framerate must be specified
+ * as a fraction string: 30/1, 15/2, etc.
+ */
+ public GSCapture(PApplet parent, int requestWidth, int requestHeight, String frameRate,
+ String sourceName, String[] strPropNames, String[] strPropValues) {
+ super(requestWidth, requestHeight, RGB);
+ initGStreamer(parent, requestWidth, requestHeight, sourceName, new String[] {}, new int[] {},
+ strPropNames, strPropValues, frameRate);
+ }
+
+ /**
+ * This constructor accepts an arbitrary list of string properties for the source element,
+ * as well as a list of integer properties. This could be useful if a camera cannot by
+ * specified by name but by index. Framerate must be a fraction string: 30/1, 15/2, etc.
+ */
+ public GSCapture(PApplet parent, int requestWidth, int requestHeight, String frameRate,
+ String sourceName, String[] strPropNames, String[] strPropValues,
+ String[] intPropNames, int[] intPropValues) {
+ super(requestWidth, requestHeight, RGB);
+ initGStreamer(parent, requestWidth, requestHeight, sourceName, intPropNames, intPropValues,
+ strPropNames, strPropValues, frameRate);
+ }
+
+ /**
+ * Releases the gstreamer resources associated to this capture object.
+ * It shouldn't be used after this.
+ */
+ public void delete() {
+ if (gpipeline != null) {
+ try {
+ if (gpipeline.isPlaying()) {
+ gpipeline.stop();
+ }
+ } catch (IllegalStateException e) {
+ System.err.println("error when deleting player, maybe some native resource is already disposed");
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ pixels = null;
+
+ copyPixels = null;
+ if (rgbSink != null) {
+ rgbSink.removeListener();
+ rgbSink.dispose();
+ rgbSink = null;
+ }
+
+ natBuffer = null;
+ if (natSink != null) {
+ natSink.removeListener();
+ natSink.dispose();
+ natSink = null;
+ }
+
+ gpipeline.dispose();
+ gpipeline = null;
+ }
+ }
+
+ /**
+ * Same as delete.
+ */
+ public void dispose() {
+ delete();
+ }
+
+ /**
+ * Sets the object to use as destination for the frames read from the stream.
+ * The color conversion mask is automatically set to the one required to
+ * copy the frames to OpenGL.
+ *
+ * @param Object dest
+ */
+ public void setPixelDest(Object dest) {
+ copyHandler = dest;
+ if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
+ copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
+ } else {
+ copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
+ }
+ }
+
+ /**
+ * Sets the object to use as destination for the frames read from the stream.
+ *
+ * @param Object dest
+ * @param String mask
+ */
+ public void setPixelDest(Object dest, String mask) {
+ copyHandler = dest;
+ copyMask = mask;
+ }
+
+ /**
+ * Uses a generic object as handler of the movie. This object should have a
+ * movieEvent method that receives a GSMovie argument. This method will
+ * be called upon a new frame read event.
+ *
+ */
+ public void setEventHandlerObject(Object obj) {
+ eventHandler = obj;
+
+ try {
+ captureEventMethod = parent.getClass().getMethod("captureEvent",
+ new Class[] { GSCapture.class });
+ } catch (Exception e) {
+ // no such method, or an error.. which is fine, just ignore
+ }
+ }
+
+ /**
+ * Returns true if the stream is already producing frames.
+ *
+ * @return boolean
+ */
+ public boolean ready() {
+ return 0 < bufWidth && 0 < bufHeight && pipelineReady;
+ }
+
+ /**
+ * Returns "true" when a new video frame is available to read.
+ *
+ * @return boolean
+ */
+ public boolean available() {
+ return available;
+ }
+
+ /**
+ * Returns whether the stream is playing or not.
+ *
+ * @return boolean
+ */
+ public boolean isPlaying() {
+ return playing;
+ }
+
+ /**
+ * Returns whether the stream is paused or not.
+ *
+ * @return boolean
+ */
+ public boolean isPaused() {
+ return paused;
+ }
+
+ /**
+ * Resumes the capture pipeline.
+ */
+ public void play() {
+ boolean init = false;
+ if (!pipelineReady) {
+ initPipeline();
+ init = true;
+ }
+
+ playing = true;
+ paused = false;
+ gpipeline.play();
+
+ if (init) {
+ // Resolution and FPS initialization needs to be done after the
+ // pipeline is set to play.
+ initResAndFps();
+ }
+ }
+
+ /**
+ * Stops the capture pipeline.
+ */
+ public void pause() {
+ playing = false;
+ paused = true;
+ gpipeline.pause();
+ }
+
+ /**
+ * Reads the current video frame.
+ *
+ * This method() and invokeEvent() are now synchronized, so that invokeEvent()
+ * can't be called whilst we're busy reading. Problematic frame error
+ * fixed by Charl P. Botha
+ */
+ public synchronized void read() {
+ // We loadPixels() first to ensure that at least we always have a non-null
+ // pixels array, even if without any valid image inside.
+ loadPixels();
+
+ if (copyBufferMode) {
+ // The native buffer from gstreamer is copies to the destination object.
+ if (natBuffer == null || copyBufferMethod == null) {
+ return;
+ }
+
+ if (firstFrame) {
+ super.init(bufWidth, bufHeight, RGB);
+ loadPixels();
+ firstFrame = false;
+ }
+
+ IntBuffer rgbBuffer = natBuffer.getByteBuffer().asIntBuffer();
+ try {
+ copyBufferMethod.invoke(copyHandler, new Object[] { natBuffer, rgbBuffer, bufWidth, bufHeight });
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ natBuffer = null;
+ } else {
+ if (copyPixels == null) {
+ return;
+ }
+
+ if (firstFrame) {
+ super.init(bufWidth, bufHeight, RGB);
+ loadPixels();
+ firstFrame = false;
+ }
+
+ int[] temp = pixels;
+ pixels = copyPixels;
+ updatePixels();
+ copyPixels = temp;
+ }
+
+ available = false;
+ }
+
+ /**
+ * Returns a list with the resolutions supported by the capture device.
+ * Each element of the list is in turn an array of two int, first being
+ * the width and second the height.
+ *
+ * @return int[][]
+ */
+ public int[][] resolutions() {
+ int n = suppResList.size();
+ int[][] res = new int[n][2];
+ for (int i = 0; i < n; i++) {
+ int[] wh = (int[])suppResList.get(i);
+ res[i] = new int[] {wh[0], wh[1]};
+ }
+ return res;
+ }
+
+ /**
+ * Returns a list with the framerates supported by the capture device,
+ * expressed as a string like: 30/1, 15/2, etc.
+ *
+ * @return String[]
+ */
+ public String[] framerates() {
+ int n = suppFpsList.size();
+ String[] res = new String[n];
+ for (int i = 0; i < n; i++) {
+ res[i] = (String)suppFpsList.get(i);
+ }
+ return res;
+ }
+
+ /**
+ * Returns a list of available capture devices.
+ *
+ * @return String[]
+ */
+ static public String[] list() {
+ if (PApplet.platform == LINUX) {
+ return list("v4l2src");
+ } else if (PApplet.platform == WINDOWS) {
+ return list("dshowvideosrc");
+ } else if (PApplet.platform == MACOSX) {
+ return list("osxvideosrc");
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Get a list of all available captures as a String array. i.e.
+ * println(Capture.list()) will show you the goodies.
+ *
+ * @param sourceName String
+ * @return String[]
+ */
+ static public String[] list(String sourceName) {
+ return list(sourceName, devicePropertyName());
+ }
+
+ static protected String[] list(String sourceName, String propertyName) {
+ GSVideo.init();
+ String[] valuesListing = new String[0];
+ Element videoSource = ElementFactory.make(sourceName, "Source");
+ PropertyProbe probe = PropertyProbe.wrap(videoSource);
+ if (probe != null) {
+ Property property = probe.getProperty(propertyName);
+ if (property != null) {
+ Object[] values = probe.getValues(property);
+ if (values != null) {
+ valuesListing = new String[values.length];
+ for (int i = 0; i < values.length; i++)
+ if (values[i] instanceof String)
+ valuesListing[i] = (String) values[i];
+ }
+ }
+ }
+ return valuesListing;
+ }
+
+ /**
+ * invokeEvent() and read() are synchronized so that they can not be
+ * called simultaneously. when they were not synchronized, this caused
+ * the infamous problematic frame crash.
+ * found and fixed by Charl P. Botha
+ */
+ protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
+ available = true;
+ bufWidth = w;
+ bufHeight = h;
+ if (copyPixels == null) {
+ copyPixels = new int[w * h];
+ }
+ buffer.rewind();
+ try {
+ buffer.get(copyPixels);
+ } catch (BufferUnderflowException e) {
+ e.printStackTrace();
+ copyPixels = null;
+ return;
+ }
+
+ // Creates a movieEvent.
+ if (captureEventMethod != null) {
+ try {
+ captureEventMethod.invoke(eventHandler, new Object[] { this });
+ } catch (Exception e) {
+ System.err.println("error, disabling captureEvent() for capture object");
+ e.printStackTrace();
+ captureEventMethod = null;
+ }
+ }
+ }
+
+ protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
+ available = true;
+ bufWidth = w;
+ bufHeight = h;
+ natBuffer = buffer;
+
+ // Creates a movieEvent.
+ if (captureEventMethod != null) {
+ try {
+ captureEventMethod.invoke(eventHandler, new Object[] { this });
+ } catch (Exception e) {
+ System.err.println("error, disabling captureEvent() for capture object");
+ e.printStackTrace();
+ captureEventMethod = null;
+ }
+ }
+ }
+
+ /**
+ * Returns the name of the source element used for capture.
+ *
+ * @return String
+ */
+ public String getSource() {
+ return source;
+ }
+
+ // Tries to guess the best correct source elements for each platform.
+ protected void initPlatform(PApplet parent, int requestWidth, int requestHeight,
+ String[] intPropNames, int[] intPropValues,
+ String[] strPropNames, String[] strPropValues,
+ String frameRate) {
+ if (PApplet.platform == LINUX) {
+ initGStreamer(parent, requestWidth, requestHeight, "v4l2src", intPropNames, intPropValues,
+ strPropNames, strPropValues, frameRate);
+ } else if (PApplet.platform == WINDOWS) {
+ initGStreamer(parent, requestWidth, requestHeight, "ksvideosrc", intPropNames,
+ intPropValues, strPropNames, strPropValues, frameRate);
+ //init(requestWidth, requestHeight, "dshowvideosrc", intPropNames,
+ // intPropValues, strPropNames, strPropValues, frameRate, addDecoder, null, "");
+ } else if (PApplet.platform == MACOSX) {
+ initGStreamer(parent, requestWidth, requestHeight, "osxvideosrc", intPropNames,
+ intPropValues, strPropNames, strPropValues, frameRate);
+ } else {
+ parent.die("Error: unrecognized platform.", null);
+ }
+ }
+
+ // The main initialization here.
+ protected void initGStreamer(PApplet parent, int requestWidth, int requestHeight, String sourceName,
+ String[] intPropNames, int[] intPropValues,
+ String[] strPropNames, String[] strPropValues, String frameRate) {
+ this.parent = parent;
+
+ GSVideo.init();
+
+ // register methods
+ parent.registerDispose(this);
+
+ setEventHandlerObject(parent);
+
+ gpipeline = new Pipeline("GSCapture");
+
+ this.source = sourceName;
+
+ fps = frameRate;
+ reqWidth = requestWidth;
+ reqHeight = requestHeight;
+
+ gsource = ElementFactory.make(sourceName, "Source");
+
+ if (intPropNames.length != intPropValues.length) {
+ parent.die("Error: number of integer property names is different from number of values.",
+ null);
+ }
+
+ for (int i = 0; i < intPropNames.length; i++) {
+ gsource.set(intPropNames[i], intPropValues[i]);
+ }
+
+ if (strPropNames.length != strPropValues.length) {
+ parent.die("Error: number of string property names is different from number of values.",
+ null);
+ }
+
+ for (int i = 0; i < strPropNames.length; i++) {
+ gsource.set(strPropNames[i], strPropValues[i]);
+ }
+
+ bufWidth = bufHeight = 0;
+ pipelineReady = false;
+ }
+
+ protected void initPipeline() {
+ String fpsStr = "";
+ if (!fps.equals("")) {
+ // If the framerate string is empty we left the source element
+ // to use the default value.
+ fpsStr = ", framerate=" + fps;
+ }
+
+ if (copyHandler != null) {
+ try {
+ copyBufferMethod = copyHandler.getClass().getMethod("addPixelsToBuffer",
+ new Class[] { Object.class, IntBuffer.class, int.class, int.class });
+ copyBufferMode = true;
+ } catch (Exception e) {
+ // no such method, or an error.. which is fine, just ignore
+ copyBufferMode = false;
+ }
+
+ if (copyBufferMode) {
+
+ try {
+ Method meth = copyHandler.getClass().getMethod("setPixelSource", new Class[] { Object.class});
+ meth.invoke(copyHandler, new Object[] { this });
+ } catch (Exception e) {
+ copyBufferMode = false;
+ }
+
+ if (copyBufferMode) {
+ String caps = "width=" + reqWidth + ", height=" + reqHeight + ", " + copyMask;
+
+ natSink = new BufferDataAppSink("nat", caps,
+ new BufferDataAppSink.Listener() {
+ public void bufferFrame(int w, int h, Buffer buffer) {
+ invokeEvent(w, h, buffer);
+ }
+ });
+
+ natSink.setAutoDisposeBuffer(false);
+
+ // No need for rgbSink.dispose(), because the addMany() doesn't increment the
+ // refcount of the videoSink object.
+
+ gpipeline.addMany(gsource, natSink);
+ Element.linkMany(gsource, natSink);
+ }
+ }
+ }
+
+ if (!copyBufferMode) {
+ Element conv = ElementFactory.make("ffmpegcolorspace", "ColorConverter");
+
+ Element videofilter = ElementFactory.make("capsfilter", "ColorFilter");
+ videofilter.setCaps(new Caps("video/x-raw-rgb, width=" + reqWidth + ", height=" + reqHeight +
+ ", bpp=32, depth=24" + fpsStr));
+
+ rgbSink = new RGBDataAppSink("rgb",
+ new RGBDataAppSink.Listener() {
+ public void rgbFrame(int w, int h, IntBuffer buffer) {
+ invokeEvent(w, h, buffer);
+ }
+ });
+ // Setting direct buffer passing in the video sink, so no new buffers are created
+ // and disposed by the GC on each frame (thanks to Octavi Estape for pointing
+ // out this one).
+ rgbSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
+
+ // No need for rgbSink.dispose(), because the addMany() doesn't increment the
+ // refcount of the videoSink object.
+
+ gpipeline.addMany(gsource, conv, videofilter, rgbSink);
+ Element.linkMany(gsource, conv, videofilter, rgbSink);
+ }
+
+ pipelineReady = true;
+ }
+
+ protected void initResAndFps() {
+ // The pipeline needs to be in playing state to be able to
+ // report the supported resolutions and framerates of the
+ // capture device.
+ getSuppResAndFpsList();
+
+ boolean suppRes = !(0 < suppResList.size()); // Default value is true if resolution list empty.
+ for (int i = 0; i < suppResList.size(); i++) {
+ int[] wh = (int[])suppResList.get(i);
+ if (reqWidth == wh[0] && reqHeight == wh[1]) {
+ suppRes = true;
+ break;
+ }
+ }
+
+ if (!suppRes) {
+ System.err.println("The requested resolution of " + reqWidth + "x" + reqHeight + " is not supported by the capture device.");
+ System.err.println("Use one of the following resolutions instead:");
+ for (int i = 0; i < suppResList.size(); i++) {
+ int[] wh = (int[])suppResList.get(i);
+ System.err.println(wh[0] + "x" + wh[1]);
+ }
+ }
+
+ boolean suppFps = !(0 < suppFpsList.size()); // Default value is true if fps list empty.
+ for (int i = 0; i < suppFpsList.size(); i++) {
+ String str = (String)suppFpsList.get(i);
+ if (fps.equals("") || fps.equals(str)) {
+ suppFps = true;
+ break;
+ }
+ }
+
+ if (!suppFps) {
+ System.err.println("The requested framerate of " + fps + " is not supported by the capture device.");
+ System.err.println("Use one of the following framerates instead:");
+ for (int i = 0; i < suppFpsList.size(); i++) {
+ String str = (String)suppFpsList.get(i);
+ System.err.println(str);
+ }
+ }
+ }
+
+ protected void getSuppResAndFpsList() {
+ suppResList = new ArrayList();
+ suppFpsList = new ArrayList();
+
+ for (Element src : gpipeline.getSources()) {
+ for (Pad pad : src.getPads()) {
+ Caps caps = pad.getCaps();
+ int n = caps.size();
+ for (int i = 0; i < n; i++) {
+ Structure str = caps.getStructure(i);
+
+ int w = ((Integer)str.getValue("width")).intValue();
+ int h = ((Integer)str.getValue("height")).intValue();
+
+ boolean newRes = true;
+ // Making sure we didn't add this resolution already.
+ // Different caps could have same resolution.
+ for (int j = 0; j < suppResList.size(); j++) {
+ int[] wh = (int[])suppResList.get(j);
+ if (w == wh[0] && h == wh[1]) {
+ newRes = false;
+ break;
+ }
+ }
+ if (newRes) {
+ suppResList.add(new int[] {w, h});
+ }
+
+ if (PApplet.platform == WINDOWS) {
+ // In Windows the getValueList() method doesn't seem to
+ // return a valid list of fraction values, so working on
+ // the string representation of the caps structure.
+ String str2 = str.toString();
+
+ int n0 = str2.indexOf("framerate=(fraction)");
+ if (-1 < n0) {
+ String temp = str2.substring(n0 + 20, str2.length());
+ int n1 = temp.indexOf("[");
+ int n2 = temp.indexOf("]");
+ if (-1 < n1 && -1 < n2) {
+ // A list of fractions enclosed between '[' and ']'
+ temp = temp.substring(n1 + 1, n2);
+ String[] fractions = temp.split(",");
+ for (int k = 0; k < fractions.length; k++) {
+ addFpsStr(fractions[k].trim());
+ }
+ } else {
+ // A single fraction
+ int n3 = temp.indexOf(",");
+ int n4 = temp.indexOf(";");
+ if (-1 < n3 || -1 < n4) {
+ int n5 = -1;
+ if (n3 == -1) {
+ n5 = n4;
+ } else if (n4 == -1) {
+ n5 = n3;
+ } else {
+ n5 = PApplet.min(n3, n4);
+ }
+
+ temp = temp.substring(0, n5);
+ addFpsStr(temp.trim());
+ }
+ }
+ }
+ } else {
+ boolean sigleFrac = false;
+ try {
+ Fraction fr = str.getFraction("framerate");
+ addFps(fr);
+ sigleFrac = true;
+ } catch (Exception e) {
+ }
+
+ if (!sigleFrac) {
+ ValueList flist = str.getValueList("framerate");
+ // All the framerates are put together, but this is not
+ // entirely accurate since there might be some of them'
+ // that work only for certain resolutions.
+ for (int k = 0; k < flist.getSize(); k++) {
+ Fraction fr = flist.getFraction(k);
+ addFps(fr);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ protected void addFps(Fraction fr) {
+ int frn = fr.numerator;
+ int frd = fr.denominator;
+ addFpsStr(frn + "/" + frd);
+ }
+
+ protected void addFpsStr(String frstr) {
+ boolean newFps = true;
+ for (int j = 0; j < suppFpsList.size(); j++) {
+ String frstr0 = (String)suppFpsList.get(j);
+ if (frstr.equals(frstr0)) {
+ newFps = false;
+ break;
+ }
+ }
+ if (newFps) {
+ suppFpsList.add(frstr);
+ }
+ }
+
+ static protected String devicePropertyName() {
+ // TODO: Check the property names
+ if (PApplet.platform == LINUX) {
+ return "device"; // Is this correct?
+ } else if (PApplet.platform == WINDOWS) {
+ return "device-name";
+ } else if (PApplet.platform == MACOSX) {
+ return "device";
+ } else {
+ return "";
+ }
+ }
+
+ static protected String indexPropertyName() {
+ // TODO: Check the property names
+ if (PApplet.platform == LINUX) {
+ return "device-index"; // Is this correct? Probably not.
+ } else if (PApplet.platform == WINDOWS) {
+ return "device-index";
+ } else if (PApplet.platform == MACOSX) {
+ return "device-index"; // Is this correct? Probably not.
+ } else {
+ return "";
+ }
+ }
+
+ public synchronized void disposeBuffer(Object buf) {
+ ((Buffer)buf).dispose();
+ }
+}
diff --git a/java/libraries/video/src/processing/video/GSLibraryLoader.java b/java/libraries/video/src/processing/video/GSLibraryLoader.java
new file mode 100644
index 000000000..6736569ad
--- /dev/null
+++ b/java/libraries/video/src/processing/video/GSLibraryLoader.java
@@ -0,0 +1,263 @@
+/**
+ * Part of the GSVideo library: http://gsvideo.sourceforge.net/
+ * Copyright (c) 2008-11 Andres Colubri
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation, version 2.1.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General
+ * Public License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place, Suite 330,
+ * Boston, MA 02111-1307 USA
+ */
+
+package codeanticode.gsvideo;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import com.sun.jna.Library;
+import com.sun.jna.Native;
+import com.sun.jna.Platform;
+
+// Library loader class by Tal Shalif
+public class GSLibraryLoader {
+
+ public interface DummyLibrary extends Library {
+ }
+
+ private static GSLibraryLoader instance;
+
+ // These dependencies correspond to gstreamer-winbuilds 0.10.6
+ static final Object[][] WIN32_DEPENDENCIES = {
+ { "SDL", new String[] {}, false }, { "glew32", new String[] {}, false },
+ { "iconv-2", new String[] {}, false },
+ { "liba52-0", new String[] {}, false },
+ { "libbz2", new String[] {}, false },
+ { "libcairo-2", new String[] {}, false },
+ { "libdca-0", new String[] {}, false },
+ { "libdvdcss-2", new String[] {}, false },
+ { "libdvdnav-4", new String[] {}, false },
+ { "libdvdnavmini-4", new String[] {}, false },
+ { "libdvdread-4", new String[] {}, false },
+ { "libfaac-0", new String[] {}, false },
+ { "libfaad-2", new String[] {}, false },
+ { "libfontconfig-1", new String[] {}, false },
+ { "libfreetype-6", new String[] {}, false },
+ { "libgcrypt-11", new String[] {}, false },
+ { "libgnutls-26", new String[] {}, false },
+ { "libgnutls-extra-26", new String[] {}, false },
+ { "libgnutls-openssl-26", new String[] {}, false },
+ { "libgpg-error-0", new String[] {}, false },
+ { "libid3tag-0", new String[] {}, false },
+ { "libjpeg-8", new String[] {}, false },
+ { "libmad-0", new String[] {}, false },
+ { "libmms-0", new String[] {}, false },
+ { "libmp3lame-0", new String[] {}, false },
+ { "libmpeg2-0", new String[] {}, false },
+ { "libmpeg2convert-0", new String[] {}, false },
+ { "libneon-27", new String[] {}, false },
+ { "libnice-0", new String[] {}, false },
+ { "libogg-0", new String[] {}, false },
+ { "liboil-0.3-0", new String[] {}, false },
+ { "libopenjpeg-2", new String[] {}, false },
+ { "libpango-1.0-0", new String[] {}, false },
+ { "libpangocairo-1.0-0", new String[] {}, false },
+ { "libpangoft2-1.0-0", new String[] {}, false },
+ { "libpangowin32-1.0-0", new String[] {}, false },
+ { "libpixman-1-0", new String[] {}, true },
+ { "libpng14-14", new String[] {}, false },
+ { "liborc-0.4-0", new String[] {}, false },
+ { "libschroedinger-1.0-0", new String[] {}, false },
+ { "libsoup-2.4-1", new String[] {}, false },
+ { "libspeex-1", new String[] {}, false },
+ { "libtheora-0", new String[] {}, false },
+ { "libtheoradec-1", new String[] {}, false },
+ { "libtheoraenc-1", new String[] {}, false },
+ { "libvorbis-0", new String[] {}, false },
+ { "libvorbisenc-2", new String[] {}, false },
+ { "libvorbisfile-3", new String[] {}, false },
+ { "libwavpack-1", new String[] {}, false },
+ { "libx264-107", new String[] {}, false },
+ { "libxml2-2", new String[] {}, false },
+ { "pthreadGC2", new String[] {}, false },
+ { "xvidcore", new String[] {}, false },
+ { "z", new String[] {}, false },
+ { "avutil-gpl-50", new String[] {}, false },
+ { "avformat-gpl-52", new String[] {}, false },
+ { "avcodec-gpl-52", new String[] {}, false },
+ { "swscale-gpl-0", new String[] {}, false },
+ { "libcelt-0", new String[] {}, false },
+ { "libgdk_pixbuf-2.0-0", new String[] {}, false },
+ { "librsvg-2-2", new String[] {}, false },
+ { "libflac-8", new String[] {}, false },
+
+ { "gio-2.0", new String[] {}, true },
+ { "glib-2.0", new String[] {}, true },
+ { "gmodule-2.0", new String[] {}, true },
+ { "gobject-2.0", new String[] {}, true },
+ { "gthread-2.0", new String[] {}, true },
+
+ { "gstapp-0.10", new String[] {}, true },
+ { "gstaudio-0.10", new String[] {}, true },
+ { "gstbase-0.10", new String[] {}, true },
+ { "gstcdda-0.10", new String[] {}, true },
+ { "gstcontroller-0.10", new String[] {}, true },
+ { "gstdataprotocol-0.10", new String[] {}, true },
+ { "gstfarsight-0.10", new String[] {}, true },
+ { "gstfft-0.10", new String[] {}, true },
+ { "gstgl-0.10", new String[] {}, true },
+ { "gstinterfaces-0.10", new String[] {}, true },
+ { "gstnet-0.10", new String[] {}, true },
+ { "gstnetbuffer-0.10", new String[] {}, true },
+ { "gstpbutils-0.10", new String[] {}, true },
+ { "gstphotography-0.10", new String[] {}, true },
+ { "gstreamer-0.10", new String[] {}, true },
+ { "gstriff-0.10", new String[] {}, true },
+ { "gstrtp-0.10", new String[] {}, true },
+ { "gstrtsp-0.10", new String[] {}, true },
+ { "gstsdp-0.10", new String[] {}, true },
+ { "gsttag-0.10", new String[] {}, true },
+ { "gstvideo-0.10", new String[] {}, true },
+ { "gstbasevideo-0.10", new String[] {}, true } };
+
+ static final Object[][] OSX_DEPENDENCIES = {
+ { "gstbase-0.10", new String[] { "gstreamer-0.10" }, true },
+ { "gstinterfaces-0.10", new String[] { "gstreamer-0.10" }, true },
+ { "gstcontroller-0.10", new String[] { "gstreamer-0.10" }, true },
+ { "gstaudio-0.10", new String[] { "gstbase-0.10" }, true },
+ { "gstvideo-0.10", new String[] { "gstbase-0.10" }, true } };
+
+ static final Object[][] DEFAULT_DEPENDENCIES = {
+ { "gstreamer-0.10", new String[] {}, true },
+ { "gstbase-0.10", new String[] { "gstreamer-0.10" }, true },
+ { "gstinterfaces-0.10", new String[] { "gstreamer-0.10" }, true },
+ { "gstcontroller-0.10", new String[] { "gstreamer-0.10" }, true },
+ { "gstaudio-0.10", new String[] { "gstbase-0.10" }, true },
+ { "gstvideo-0.10", new String[] { "gstbase-0.10" }, true }, };
+
+ static final Object[][] dependencies = Platform.isWindows() ? WIN32_DEPENDENCIES
+ : Platform.isMac() ? OSX_DEPENDENCIES : DEFAULT_DEPENDENCIES;
+
+ private static final Map loadedMap = new HashMap();
+
+ private static final int RECURSIVE_LOAD_MAX_DEPTH = 5;
+
+ private GSLibraryLoader() {
+ }
+
+ private void preLoadLibs() {
+ for (Object[] a : dependencies) {
+ load(a[0].toString(), DummyLibrary.class, true, 0, (Boolean) a[2]);
+ }
+ }
+
+ private String[] findDeps(String name) {
+
+ for (Object[] a : dependencies) {
+ if (name.equals(a[0])) {
+
+ return (String[]) a[1];
+ }
+ }
+
+ return new String[] {}; // library dependancy load chain unspecified -
+ // probably client call
+ }
+
+ public Object load(String name, Class> clazz, boolean reqLib) {
+ return load(name, clazz, true, 0, reqLib);
+ }
+
+ private Object load(String name, Class> clazz, boolean forceReload,
+ int depth, boolean reqLib) {
+
+ assert depth < RECURSIVE_LOAD_MAX_DEPTH : String.format(
+ "recursive max load depth %s has been exceeded", depth);
+
+ Object library = loadedMap.get(name);
+
+ if (null == library || forceReload) {
+
+ // Logger.getAnonymousLogger().info(String.format("%" + ((depth + 1) * 2)
+ // + "sloading %s", "->", name));
+
+ try {
+ String[] deps = findDeps(name);
+
+ for (String lib : deps) {
+ load(lib, DummyLibrary.class, false, depth + 1, reqLib);
+ }
+
+ library = loadLibrary(name, clazz, reqLib);
+
+ if (library != null) {
+ loadedMap.put(name, library);
+ }
+ } catch (Exception e) {
+ if (reqLib)
+ throw new RuntimeException(String.format("can not load library %s",
+ name, e));
+ else
+ System.out.println(String.format("can not load library %s", name, e));
+ }
+ }
+
+ return library;
+ }
+
+ private static Object loadLibrary(String name, Class> clazz, boolean reqLib) {
+
+ // Logger.getAnonymousLogger().info(String.format("loading %s", name));
+
+ String[] nameFormats;
+ nameFormats = Platform.isWindows() ? new String[] { "lib%s", "lib%s-0",
+ "%s" } : new String[] { "%s-0", "%s" };
+
+ UnsatisfiedLinkError linkError = null;
+
+ for (String fmt : nameFormats) {
+ try {
+ String s = String.format(fmt, name);
+ //System.out.println("Trying to load library file " + s);
+ Object obj = Native.loadLibrary(s, clazz);
+ //System.out.println("Loaded library " + s + " succesfully!");
+ return obj;
+ } catch (UnsatisfiedLinkError ex) {
+ linkError = ex;
+ }
+ }
+
+ if (reqLib)
+ throw new UnsatisfiedLinkError(
+ String
+ .format(
+ "can't load library %s (%1$s|lib%1$s|lib%1$s-0) with -Djna.library.path=%s. Last error:%s",
+ name, System.getProperty("jna.library.path"), linkError));
+ else {
+ System.out
+ .println(String
+ .format(
+ "can't load library %s (%1$s|lib%1$s|lib%1$s-0) with -Djna.library.path=%s. Last error:%s",
+ name, System.getProperty("jna.library.path"), linkError));
+ return null;
+ }
+ }
+
+ public static synchronized GSLibraryLoader getInstance() {
+
+ if (null == instance) {
+ instance = new GSLibraryLoader();
+ instance.preLoadLibs();
+ }
+
+ return instance;
+ }
+
+}
diff --git a/java/libraries/video/src/processing/video/GSLibraryPath.java b/java/libraries/video/src/processing/video/GSLibraryPath.java
new file mode 100644
index 000000000..ad7bc4ac4
--- /dev/null
+++ b/java/libraries/video/src/processing/video/GSLibraryPath.java
@@ -0,0 +1,64 @@
+/**
+ * Part of the GSVideo library: http://gsvideo.sourceforge.net/
+ * Copyright (c) 2008-11 Andres Colubri
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation, version 2.1.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General
+ * Public License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place, Suite 330,
+ * Boston, MA 02111-1307 USA
+ */
+
+package codeanticode.gsvideo;
+
+import java.net.URL;
+
+import com.sun.jna.Platform;
+
+class GSLibraryPath {
+ // This method returns the folder inside which the gstreamer library folder
+ // should be located.
+ String get() {
+ URL url = this.getClass().getResource("GSLibraryPath.class");
+ if (url != null) {
+ // Convert URL to string, taking care of spaces represented by the "%20"
+ // string.
+ String path = url.toString().replace("%20", " ");
+ int n0 = path.indexOf('/');
+
+ int n1 = -1;
+ if (Platform.isWindows()) {
+ n1 = path.indexOf("/lib/GSVideo.jar"); // location of GSVideo.jar in
+ // exported apps.
+ if (n1 == -1)
+ n1 = path.indexOf("/GSVideo.jar"); // location of GSVideo.jar in
+ // library folder.
+
+ // In Windows, path string starts with "jar file/C:/..."
+ // so the substring up to the first / is removed.
+ n0++;
+ } else if (Platform.isMac()) {
+ // In Mac, getting the index of GSVideo.jar is enough in the case of sketches running from the PDE
+ // as well as exported applications.
+ n1 = path.indexOf("GSVideo.jar");
+ } else if (Platform.isLinux()) {
+ // TODO: what's up?
+ }
+
+ if ((-1 < n0) && (-1 < n1)) {
+ return path.substring(n0, n1);
+ } else {
+ return "";
+ }
+ }
+ return "";
+ }
+}
\ No newline at end of file
diff --git a/java/libraries/video/src/processing/video/GSPipeline.java b/java/libraries/video/src/processing/video/GSPipeline.java
new file mode 100644
index 000000000..8dd426afc
--- /dev/null
+++ b/java/libraries/video/src/processing/video/GSPipeline.java
@@ -0,0 +1,718 @@
+/**
+ * Part of the GSVideo library: http://gsvideo.sourceforge.net/
+ * Copyright (c) 2008-11 Andres Colubri
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation, version 2.1.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General
+ * Public License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place, Suite 330,
+ * Boston, MA 02111-1307 USA
+ */
+
+package codeanticode.gsvideo;
+
+import processing.core.*;
+
+import java.nio.*;
+import java.lang.reflect.*;
+
+import org.gstreamer.*;
+import org.gstreamer.Buffer;
+import org.gstreamer.elements.*;
+
+/**
+ * This class allows to create a custom GStreamer pipeline.
+ */
+public class GSPipeline extends PImage implements PConstants {
+ protected int streamType;
+ protected String pipeline;
+
+ protected boolean playing = false;
+ protected boolean paused = false;
+ protected boolean repeat = false;
+
+ protected int bufWidth;
+ protected int bufHeight;
+ protected int bufSize;
+
+ protected Pipeline gpipeline;
+
+ protected Method pipelineEventMethod;
+ protected Method copyBufferMethod;
+
+ protected Object eventHandler;
+ protected Object copyHandler;
+
+ protected boolean available;
+ protected boolean pipelineReady;
+
+ protected RGBDataSink rgbSink = null;
+ protected int[] copyPixels = null;
+
+ protected BufferDataSink natSink = null;
+ protected Buffer natBuffer = null;
+ protected boolean copyBufferMode = false;
+ protected String copyMask;
+
+ protected ByteDataSink dataSink = null;
+ protected byte[] copyData = null;
+ public byte[] data = null;
+
+ public String dataCaps;
+ protected String tempDataCaps;
+
+ protected boolean firstFrame = true;
+
+ /**
+ * Creates an instance of GSPipeline using the provided pipeline
+ * string.
+ *
+ * @param parent PApplet
+ * @param pstr String
+ */
+ public GSPipeline(PApplet parent, String pstr) {
+ super(0, 0, RGB);
+ initGStreamer(parent, pstr, GSVideo.VIDEO);
+ }
+
+ /**
+ * Creates an instance of GSPipeline using the provided pipeline
+ * string.
+ *
+ * @param parent PApplet
+ * @param pstr String
+ * @param type int
+ */
+ public GSPipeline(PApplet parent, String pstr, int type) {
+ super(0, 0, RGB);
+ initGStreamer(parent, pstr, type);
+ }
+
+ /**
+ * Releases the gstreamer resources associated to this pipeline object.
+ * It shouldn't be used after this.
+ */
+ public void delete() {
+ if (gpipeline != null) {
+ try {
+ if (gpipeline.isPlaying()) {
+ gpipeline.stop();
+ }
+ } catch (IllegalStateException e) {
+ System.err.println("error when deleting player, maybe some native resource is already disposed");
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ pixels = null;
+ data = null;
+
+ copyPixels = null;
+ if (rgbSink != null) {
+ rgbSink.removeListener();
+ rgbSink.dispose();
+ rgbSink = null;
+ }
+
+ copyData = null;
+ if (dataSink != null) {
+ dataSink.removeListener();
+ dataSink.dispose();
+ dataSink = null;
+ }
+
+ natBuffer = null;
+ if (natSink != null) {
+ natSink.removeListener();
+ natSink.dispose();
+ natSink = null;
+ }
+
+ gpipeline.dispose();
+ gpipeline = null;
+ }
+ }
+
+ /**
+ * Same as delete.
+ */
+ public void dispose() {
+ delete();
+ }
+
+ /**
+ * Sets the object to use as destination for the frames read from the stream.
+ * The color conversion mask is automatically set to the one required to
+ * copy the frames to OpenGL.
+ *
+ * @param Object dest
+ */
+ public void setPixelDest(Object dest) {
+ copyHandler = dest;
+ if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
+ copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
+ } else {
+ copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
+ }
+ }
+
+ /**
+ * Sets the object to use as destination for the frames read from the stream.
+ *
+ * @param Object dest
+ * @param String mask
+ */
+ public void setPixelDest(Object dest, String mask) {
+ copyHandler = dest;
+ copyMask = mask;
+ }
+
+ /**
+ * Uses a generic object as handler of the pipeline. This object should have a
+ * pipelineEvent method that receives a GSPipeline argument. This method will
+ * be called upon a new frame read event.
+ *
+ */
+ public void setEventHandlerObject(Object obj) {
+ eventHandler = obj;
+
+ try {
+ pipelineEventMethod = eventHandler.getClass().getMethod("pipelineEvent",
+ new Class[] { GSPipeline.class });
+ } catch (Exception e) {
+ // no such method, or an error.. which is fine, just ignore
+ }
+ }
+
+ /**
+ * Get the full length of this movie (in seconds).
+ *
+ * @return float
+ */
+ public float duration() {
+ float sec = gpipeline.queryDuration().toSeconds();
+ float nanosec = gpipeline.queryDuration().getNanoSeconds();
+ return sec + GSVideo.nanoSecToSecFrac(nanosec);
+ }
+
+ /**
+ * Return the current time in seconds.
+ *
+ * @return float
+ */
+ public float time() {
+ float sec = gpipeline.queryPosition().toSeconds();
+ float nanosec = gpipeline.queryPosition().getNanoSeconds();
+ return sec + GSVideo.nanoSecToSecFrac(nanosec);
+ }
+
+ /**
+ * Jump to a specific location (in seconds). The number is a float so
+ * fractions of seconds can be used.
+ *
+ * @param float where
+ */
+ public void jump(float where) {
+ if (playing) {
+ gpipeline.pause();
+ }
+
+ boolean res;
+ long start = GSVideo.secToNanoLong(where);
+ long stop = -1; // or whatever > new_pos
+
+ res = gpipeline.seek(1.0, Format.TIME, SeekFlags.FLUSH,
+ SeekType.SET, start, SeekType.SET, stop);
+
+ if (!res) {
+ System.err.println("Seek operation failed.");
+ }
+
+ if (playing) {
+ gpipeline.play();
+ }
+ }
+
+ /**
+ * Returns true if the stream is already producing frames.
+ *
+ * @return boolean
+ */
+ public boolean ready() {
+ return 0 < bufSize && pipelineReady;
+ }
+
+ /**
+ * Return the true or false depending on whether there is a new frame ready to
+ * be read.
+ *
+ * @return boolean
+ */
+ public boolean available() {
+ return available;
+ }
+
+ /**
+ * Returns whether the stream is playing or not.
+ *
+ * @return boolean
+ */
+ public boolean isPlaying() {
+ return playing;
+ }
+
+ /**
+ * Returns whether the stream is paused or not. If isPlaying() and isPaused()
+ * both return false it means that the stream is stopped.
+ *
+ * @return boolean
+ */
+ public boolean isPaused() {
+ return paused;
+ }
+
+ /**
+ * Returns whether the stream is looping or not.
+ *
+ * @return boolean
+ */
+ public boolean isLooping() {
+ return repeat;
+ }
+
+ /**
+ * Begin playing the stream, with no repeat.
+ */
+ public void play() {
+ if (!pipelineReady) {
+ initPipeline();
+ }
+
+ playing = true;
+ paused = false;
+ gpipeline.play();
+ }
+
+ /**
+ * Begin playing the stream, with repeat.
+ */
+ public void loop() {
+ repeat = true;
+ play();
+ }
+
+ /**
+ * Shut off the repeating loop.
+ */
+ public void noLoop() {
+ repeat = false;
+ }
+
+ /**
+ * Pause the stream at its current time.
+ */
+ public void pause() {
+ playing = false;
+ paused = true;
+ gpipeline.pause();
+ }
+
+ /**
+ * Stop the stream, and rewind.
+ */
+ public void stop() {
+ if (playing) {
+ goToBeginning();
+ playing = false;
+ }
+ paused = false;
+ gpipeline.stop();
+ }
+
+ /**
+ * Reads the current video frame.
+ */
+ public synchronized void read() {
+ if (streamType == GSVideo.VIDEO) {
+ // We loadPixels() first to ensure that at least we always have a non-null
+ // pixels array, even if without any valid image inside.
+ loadPixels();
+
+ if (copyBufferMode) {
+ // The native buffer from gstreamer is copies to the destination object.
+ if (natBuffer == null || copyBufferMethod == null) {
+ return;
+ }
+
+ if (firstFrame) {
+ super.init(bufWidth, bufHeight, RGB);
+ loadPixels();
+ firstFrame = false;
+ }
+
+ IntBuffer rgbBuffer = natBuffer.getByteBuffer().asIntBuffer();
+ try {
+ copyBufferMethod.invoke(copyHandler, new Object[] { natBuffer, rgbBuffer, bufWidth, bufHeight });
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ natBuffer = null;
+ } else {
+ if (copyPixels == null) {
+ return;
+ }
+
+ if (firstFrame) {
+ super.init(bufWidth, bufHeight, RGB);
+ loadPixels();
+ firstFrame = false;
+ }
+
+ int[] temp = pixels;
+ pixels = copyPixels;
+ updatePixels();
+ copyPixels = temp;
+ }
+ } else if (streamType == GSVideo.RAW) {
+ if (copyData == null) {
+ return;
+ }
+
+ dataCaps = tempDataCaps;
+ if (data == null) {
+ data = new byte[copyData.length];
+ }
+
+ byte[] temp = data;
+ data = copyData;
+ copyData = temp;
+ }
+
+ available = false;
+ }
+
+ /**
+ * Goes to the first frame of the stream.
+ */
+ public void goToBeginning() {
+ boolean res = gpipeline.seek(ClockTime.fromNanos(0));
+ if (!res) {
+ System.err.println("Seek operation failed.");
+ }
+ }
+
+ /**
+ * Goes to the last frame of the stream.
+ */
+ public void goToEnd() {
+ long nanos = gpipeline.queryDuration().getNanoSeconds();
+ boolean res = gpipeline.seek(ClockTime.fromNanos(nanos));
+ if (!res) {
+ System.err.println("Seek operation failed.");
+ }
+ }
+
+ /**
+ * Get a float-value property from the pipeline.
+ *
+ * @param String name
+ * @return boolean
+ */
+ public float getProperty(String name) {
+ if (playing) {
+ return ((Number)gpipeline.get(name)).floatValue();
+ }
+ return 0;
+ }
+
+ /**
+ * Set a float-value property in the pipeline.
+ *
+ * @param String name
+ * @param float v
+ */
+ public void setProperty(String name, float v) {
+ if (playing) {
+ gpipeline.set(name, v);
+ }
+ }
+
+ /**
+ * Change the volume. Values are from 0 to 1. It will fail
+ * if the pipeline doesn't have a volume property available.
+ *
+ * @param float v
+ */
+ public void volume(float v) {
+ setProperty("volume", v);
+ }
+
+ /**
+ * Returns the text string used to build the pipeline.
+ *
+ * @return String
+ */
+ public String getPipeline() {
+ return pipeline;
+ }
+
+ protected void initGStreamer(PApplet parent, String pstr, int type) {
+ this.parent = parent;
+
+ gpipeline = null;
+
+ GSVideo.init();
+
+ // register methods
+ parent.registerDispose(this);
+
+ setEventHandlerObject(parent);
+
+ pipeline = pstr;
+
+ streamType = type;
+ bufWidth = bufHeight = bufSize = 0;
+ pipelineReady = false;
+ }
+
+ protected void initPipeline() {
+ // Determining if the last element is fakesink or filesink.
+ int idx;
+ String lastElem, lastElemName;
+ String[] parts;
+
+ idx = pipeline.lastIndexOf('!');
+ lastElem = pipeline.substring(idx + 1, pipeline.length()).trim();
+
+ parts = lastElem.split(" ");
+ if (0 < parts.length)
+ lastElemName = parts[0];
+ else
+ lastElemName = "";
+
+ boolean fakeSink = lastElemName.equals("fakesink");
+ boolean fileSink = lastElemName.equals("filesink");
+
+ if (PApplet.platform == WINDOWS) {
+ // Single backward slashes are replaced by double backward slashes,
+ // otherwise gstreamer won't understand file paths.
+ pipeline = pipeline.replace("\\", "\\\\");
+ }
+
+ if (fakeSink || fileSink) {
+ // If the pipeline ends in a fakesink or filesink element, the RGBDataSink
+ // is not added at the end of it...
+ gpipeline = Pipeline.launch(pipeline);
+
+ } else {
+ if (streamType == GSVideo.VIDEO) {
+ // For video pipelines, we add an RGBDataSink or NativeDataSink element at the end.
+
+ if (copyHandler != null) {
+ try {
+ copyBufferMethod = copyHandler.getClass().getMethod("addPixelsToBuffer",
+ new Class[] { Object.class, IntBuffer.class, int.class, int.class });
+ copyBufferMode = true;
+ } catch (Exception e) {
+ // no such method, or an error.. which is fine, just ignore
+ copyBufferMode = false;
+ }
+
+ if (copyBufferMode) {
+ try {
+ Method meth = copyHandler.getClass().getMethod("setPixelSource", new Class[] { Object.class});
+ meth.invoke(copyHandler, new Object[] { this });
+ } catch (Exception e) {
+ copyBufferMode = false;
+ }
+
+ if (copyBufferMode) {
+ String caps = " ! ffmpegcolorspace ! video/x-raw-rgb, bpp=32, depth=24, endianness=(int)4321, ";
+ caps += copyMask;
+
+ StringBuilder finalPipeStr = new StringBuilder(pipeline);
+ finalPipeStr.append(caps);
+ finalPipeStr.append(" ! fakesink name=nat");
+
+ pipeline = finalPipeStr.toString();
+ gpipeline = Pipeline.launch(pipeline);
+ natSink = new BufferDataSink("nat", gpipeline,
+ new BufferDataSink.Listener() {
+ public void bufferFrame(int w, int h, Buffer buffer) {
+ invokeEvent(w, h, buffer);
+ }
+ });
+
+ natSink.setAutoDisposeBuffer(false);
+ }
+ }
+ }
+
+ if (!copyBufferMode) {
+ // Making sure we are using the right color space and color masks:
+ String caps = " ! ffmpegcolorspace ! video/x-raw-rgb, bpp=32, depth=24, endianness=(int)4321, ";
+ // JNA creates ByteBuffer using native byte order, set masks according to that.
+ if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN)
+ caps += "red_mask=(int)0xFF00, green_mask=(int)0xFF0000, blue_mask=(int)0xFF000000";
+ else
+ caps += "red_mask=(int)0xFF0000, green_mask=(int)0xFF00, blue_mask=(int)0xFF";
+
+ StringBuilder finalPipeStr = new StringBuilder(pipeline);
+ finalPipeStr.append(caps);
+ finalPipeStr.append(" ! fakesink name=rgb");
+
+ pipeline = finalPipeStr.toString();
+ gpipeline = Pipeline.launch(pipeline);
+ rgbSink = new RGBDataSink("rgb", gpipeline, new RGBDataSink.Listener() {
+ public void rgbFrame(boolean pre, int w, int h, IntBuffer buffer) {
+ invokeEvent(w, h, buffer);
+ }
+ });
+
+ // Setting direct buffer passing in the video sink, so no new buffers are created
+ // and disposed by the GC on each frame (thanks to Octavi Estape for pointing
+ // out this one).
+ rgbSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
+
+ // No need for videoSink.dispose(), because the append() doesn't increment the
+ // refcount of the videoSink object.
+ }
+
+
+ } else if (streamType == GSVideo.AUDIO) {
+ // For audio pipelines, we launch the pipeline as it is.
+ gpipeline = Pipeline.launch(pipeline);
+ } else if (streamType == GSVideo.RAW) {
+ StringBuilder finalPipeStr = new StringBuilder(pipeline);
+ finalPipeStr.append(" ! fakesink name=data");
+
+ pipeline = finalPipeStr.toString();
+ gpipeline = Pipeline.launch(pipeline);
+ dataSink = new ByteDataSink("data", gpipeline,
+ new ByteDataSink.Listener() {
+ public void byteFrame(boolean pre, Caps caps, int size, ByteBuffer buffer) {
+ invokeEvent(caps, size, buffer);
+ }
+ });
+ dataSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
+ } else {
+ System.err.println("Unrecognized stream type: Please use VIDEO, AUDIO, or RAW.");
+ return;
+ }
+ }
+
+ // Creating bus to handle end-of-stream event.
+ Bus bus = gpipeline.getBus();
+ bus.connect(new Bus.EOS() {
+ public void endOfStream(GstObject element) {
+ eosEvent();
+ }
+ });
+
+ pipelineReady = true;
+ }
+
+ protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
+ available = true;
+ bufWidth = w;
+ bufHeight = h;
+ bufSize = w * h;
+
+ if (copyPixels == null) {
+ copyPixels = new int[w * h];
+ }
+ buffer.rewind();
+ try {
+ buffer.get(copyPixels);
+ } catch (BufferUnderflowException e) {
+ e.printStackTrace();
+ copyPixels = null;
+ return;
+ }
+
+ // Creates a pipelineEvent.
+ if (pipelineEventMethod != null) {
+ try {
+ pipelineEventMethod.invoke(eventHandler, new Object[] { this });
+ } catch (Exception e) {
+ System.err.println("error, disabling pipelineEvent() for " + pipeline);
+ e.printStackTrace();
+ pipelineEventMethod = null;
+ }
+ }
+ }
+
+ protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
+ available = true;
+ bufWidth = w;
+ bufHeight = h;
+ bufSize = w * h;
+ natBuffer = buffer;
+
+ if (playing) {
+ // Creates a movieEvent.
+ if (pipelineEventMethod != null) {
+ try {
+ pipelineEventMethod.invoke(eventHandler, new Object[] { this });
+ } catch (Exception e) {
+ System.err.println("error, disabling movieEvent() for " + pipeline);
+ e.printStackTrace();
+ pipelineEventMethod = null;
+ }
+ }
+ }
+ }
+
+ protected synchronized void invokeEvent(Caps caps, int n, ByteBuffer buffer) {
+ available = true;
+ bufSize = n;
+
+ tempDataCaps = caps.toString();
+
+ if (copyData == null) {
+ copyData = new byte[n];
+ }
+ buffer.rewind();
+ try {
+ buffer.get(copyData);
+ } catch (BufferUnderflowException e) {
+ e.printStackTrace();
+ copyData = null;
+ return;
+ }
+
+ if (playing) {
+ // Creates a playerEvent.
+ if (pipelineEventMethod != null) {
+ try {
+ pipelineEventMethod.invoke(eventHandler, new Object[] { this });
+ } catch (Exception e) {
+ System.err.println("error, disabling pipelineEvent() for " + pipeline);
+ e.printStackTrace();
+ pipelineEventMethod = null;
+ }
+ }
+ }
+ }
+
+ public synchronized void disposeBuffer(Object buf) {
+ ((Buffer)buf).dispose();
+ }
+
+ protected void eosEvent() {
+ if (repeat) {
+ goToBeginning();
+ } else {
+ playing = false;
+ }
+ }
+}
diff --git a/java/libraries/video/src/processing/video/GSPlayer.java b/java/libraries/video/src/processing/video/GSPlayer.java
new file mode 100644
index 000000000..66775fb57
--- /dev/null
+++ b/java/libraries/video/src/processing/video/GSPlayer.java
@@ -0,0 +1,833 @@
+/**
+ * Part of the GSVideo library: http://gsvideo.sourceforge.net/
+ * Copyright (c) 2008-11 Andres Colubri
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation, version 2.1.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General
+ * Public License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place, Suite 330,
+ * Boston, MA 02111-1307 USA
+ */
+
+package codeanticode.gsvideo;
+
+import processing.core.*;
+
+import java.awt.Dimension;
+import java.io.*;
+import java.nio.*;
+import java.util.concurrent.TimeUnit;
+import java.lang.reflect.*;
+
+import org.gstreamer.*;
+import org.gstreamer.Buffer;
+import org.gstreamer.elements.*;
+
+/**
+ * This class makes it possible to load and play generic media content through
+ * playbin (not only movies, but also audio files, etc).
+ */
+public class GSPlayer extends PImage implements PConstants {
+ protected int streamType;
+ protected String filename;
+
+ protected boolean playing = false;
+ protected boolean paused = false;
+ protected boolean repeat = false;
+
+ protected float fps;
+ protected float rate;
+ protected int bufWidth;
+ protected int bufHeight;
+ protected int bufSize;
+
+ protected PlayBin2 gplayer;
+
+ protected Method playerEventMethod;
+ protected Method copyBufferMethod;
+
+ protected Object eventHandler;
+ protected Object copyHandler;
+
+ protected boolean available;
+ protected boolean sinkReady;
+
+ protected RGBDataAppSink rgbSink = null;
+ protected int[] copyPixels = null;
+
+ protected BufferDataAppSink natSink = null;
+ protected Buffer natBuffer = null;
+ protected boolean copyBufferMode = false;
+ protected String copyMask;
+
+ protected ByteDataAppSink dataSink = null;
+ protected byte[] copyData = null;
+ public byte[] data = null;
+
+ public String dataCaps;
+ protected String tempDataCaps;
+
+ protected boolean firstFrame = true;
+
+ /**
+ * Creates an instance of GSPlayer loading the media file from filename,
+ * assuming that it is a video file.
+ *
+ * @param parent PApplet
+ * @param filename String
+ */
+ public GSPlayer(PApplet parent, String filename) {
+ super(0, 0, RGB);
+ initGStreamer(parent, filename, GSVideo.VIDEO);
+ }
+
+ /**
+ * Creates an instance of GSPlayer loading the media file from filename,
+ * and trying to understand it as the indicated type.
+ *
+ * @param parent PApplet
+ * @param filename String
+ * @param type int
+ */
+ public GSPlayer(PApplet parent, String filename, int type) {
+ super(0, 0, RGB);
+ initGStreamer(parent, filename, type);
+ }
+
+ /**
+ * Releases the gstreamer resources associated to this player object.
+ * It shouldn't be used after this.
+ */
+ public void delete() {
+ if (gplayer != null) {
+ try {
+ if (gplayer.isPlaying()) {
+ gplayer.stop();
+ }
+ } catch (IllegalStateException e) {
+ System.err.println("error when deleting player, maybe some native resource is already disposed");
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ pixels = null;
+ data = null;
+
+ copyPixels = null;
+ if (rgbSink != null) {
+ rgbSink.removeListener();
+ rgbSink.dispose();
+ rgbSink = null;
+ }
+
+ copyData = null;
+ if (dataSink != null) {
+ dataSink.removeListener();
+ dataSink.dispose();
+ dataSink = null;
+ }
+
+ natBuffer = null;
+ if (natSink != null) {
+ natSink.removeListener();
+ natSink.dispose();
+ natSink = null;
+ }
+
+ gplayer.dispose();
+ gplayer = null;
+ }
+ }
+
+ /**
+ * Same as delete.
+ */
+ public void dispose() {
+ delete();
+ }
+
+ /**
+ * Sets the object to use as destination for the frames read from the stream.
+ * The color conversion mask is automatically set to the one required to
+ * copy the frames to OpenGL.
+ *
+ * @param Object dest
+ */
+ public void setPixelDest(Object dest) {
+ copyHandler = dest;
+ if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
+ copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
+ } else {
+ copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
+ }
+ }
+
+ /**
+ * Sets the object to use as destination for the frames read from the stream.
+ *
+ * @param Object dest
+ * @param String mask
+ */
+ public void setPixelDest(Object dest, String mask) {
+ copyHandler = dest;
+ copyMask = mask;
+ }
+
+ /**
+ * Uses a generic object as handler of the media file. This object should have a
+ * movieEvent method that receives a GSMovie argument. This method will
+ * be called upon a new frame read event.
+ *
+ */
+ public void setEventHandlerObject(Object obj) {
+ eventHandler = obj;
+
+ try {
+ playerEventMethod = eventHandler.getClass().getMethod("playerEvent",
+ new Class[] { GSPlayer.class });
+ } catch (Exception e) {
+ // no such method, or an error.. which is fine, just ignore
+ }
+ }
+
+ /**
+ * Get the width of the source video. Note: calling this method repeatedly
+ * can slow down playback performance.
+ *
+ * @return int
+ */
+ public int getSourceWidth() {
+ Dimension dim = gplayer.getVideoSize();
+ if (dim != null) {
+ return dim.width;
+ } else {
+ return 0;
+ }
+ }
+
+ /**
+ * Get the height of the source video. Note: calling this method repeatedly
+ * can slow down playback performance.
+ *
+ * @return int
+ */
+ public int getSourceHeight() {
+ Dimension dim = gplayer.getVideoSize();
+ if (dim != null) {
+ return dim.height;
+ } else {
+ return 0;
+ }
+ }
+
+ /**
+ * Get the original framerate of the source video. Note: calling this method repeatedly
+ * can slow down playback performance.
+ *
+ * @return float
+ */
+ public float getSourceFrameRate() {
+ return (float)gplayer.getVideoSinkFrameRate();
+ }
+
+ /**
+ * Set how often new frames are to be read from the stream. Does not actually
+ * set the speed of the playback, that's handled by the speed() method.
+ *
+ * @param int ifps
+ * @see speed
+ */
+ public void frameRate(float ifps) {
+ // We calculate the target ratio in the case both the
+ // current and target framerates are valid (greater than
+ // zero), otherwise we leave it as 1.
+ float f = (0 < ifps && 0 < fps) ? ifps / fps : 1;
+
+ if (playing) {
+ gplayer.pause();
+ }
+
+ long t = gplayer.queryPosition(TimeUnit.NANOSECONDS);
+
+ boolean res;
+ long start, stop;
+ if (rate > 0) {
+ start = t;
+ stop = -1;
+ } else {
+ start = 0;
+ stop = t;
+ }
+
+ res = gplayer.seek(rate * f, Format.TIME, SeekFlags.FLUSH,
+ SeekType.SET, start, SeekType.SET, stop);
+
+ if (!res) {
+ System.err.println("Seek operation failed.");
+ }
+
+ if (playing) {
+ gplayer.play();
+ }
+
+ fps = ifps;
+ }
+
+ /**
+ * Set a multiplier for how fast/slow the movie should be run. The default is
+ * 1.0. speed(2) will play the movie at double speed (2x). speed(0.5) will
+ * play at half speed. speed(-1) will play backwards at regular speed.
+ *
+ * @param float irate
+ */
+ public void speed(float irate) {
+ // If the frameRate() method is called continuously with very similar
+ // rate values, playback might become sluggish. This condition attempts
+ // to take care of that.
+ if (PApplet.abs(rate - irate) > 0.1) {
+ rate = irate;
+ frameRate(fps); // The framerate is the same, but the rate (speed) could be different.
+ }
+ }
+
+ /**
+ * Get the full length of the current stream (in seconds).
+ *
+ * @return float
+ */
+ public float duration() {
+ float sec = gplayer.queryDuration().toSeconds();
+ float nanosec = gplayer.queryDuration().getNanoSeconds();
+ return sec + GSVideo.nanoSecToSecFrac(nanosec);
+ }
+
+ /**
+ * Return the current time in seconds.
+ *
+ * @return float
+ */
+ public float time() {
+ float sec = gplayer.queryPosition().toSeconds();
+ float nanosec = gplayer.queryPosition().getNanoSeconds();
+ return sec + GSVideo.nanoSecToSecFrac(nanosec);
+ }
+
+ /**
+ * Get the full length of this movie (in frames).
+ *
+ * @return float
+ */
+ public long length() {
+ return (int)(duration() * getSourceFrameRate());
+ }
+
+ /**
+ * Return the current frame.
+ *
+ * @return int
+ */
+ public int frame() {
+ return (int)(time() * getSourceFrameRate());
+ }
+
+ /**
+ * Jump to a specific location (in seconds). The number is a float so
+ * fractions of seconds can be used.
+ *
+ * @param float where
+ */
+ public void jump(float where) {
+ if (playing) {
+ gplayer.pause();
+ }
+
+ boolean res;
+ long start = GSVideo.secToNanoLong(where);
+ long stop = -1; // or whatever > new_pos
+
+ res = gplayer.seek(1.0, Format.TIME, SeekFlags.FLUSH,
+ SeekType.SET, start, SeekType.SET, stop);
+
+ if (!res) {
+ System.err.println("Seek operation failed.");
+ }
+
+ if (playing) {
+ gplayer.play();
+ }
+ }
+
+ /**
+ * Jump to a specific frame.
+ *
+ * @param frame int
+ */
+ public void jump(int frame) {
+ float srcFramerate = getSourceFrameRate();
+
+ // The duration of a single frame:
+ float frameDuration = 1 / srcFramerate;
+
+ // We move to the middle of the frame by adding 0.5:
+ float where = (frame + 0.5f) * frameDuration;
+
+ // Taking into account border effects:
+ float diff = duration() - where;
+ if (diff < 0) {
+ where += diff - 0.25f * frameDuration;
+ }
+
+ jump(where);
+ }
+
+ /**
+ * Returns true if the stream is already producing frames.
+ *
+ * @return boolean
+ */
+ public boolean ready() {
+ return 0 < bufSize && sinkReady;
+ }
+
+ /**
+ * Return the true or false depending on whether there is a new frame ready to
+ * be read.
+ *
+ * @return boolean
+ */
+ public boolean available() {
+ return available;
+ }
+
+ /**
+ * Returns whether the media is playing or not.
+ *
+ * @return boolean
+ */
+ public boolean isPlaying() {
+ return playing;
+ }
+
+ /**
+ * Returns whether the media is paused or not. If isPlaying() and isPaused()
+ * both return false it means that the media is stopped.
+ *
+ * @return boolean
+ */
+ public boolean isPaused() {
+ return paused;
+ }
+
+ /**
+ * Returns whether the media is looping or not.
+ *
+ * @return boolean
+ */
+ public boolean isLooping() {
+ return repeat;
+ }
+
+ /**
+ * Begin playing the stream, with no repeat.
+ */
+ public void play() {
+ if (!sinkReady) {
+ initSink();
+ }
+
+ playing = true;
+ paused = false;
+ gplayer.play();
+ }
+
+ /**
+ * Begin playing the stream, with repeat.
+ */
+ public void loop() {
+ repeat = true;
+ play();
+ }
+
+ /**
+ * Shut off the repeating loop.
+ */
+ public void noLoop() {
+ repeat = false;
+ }
+
+ /**
+ * Pause the stream at its current time.
+ */
+ public void pause() {
+ playing = false;
+ paused = true;
+ gplayer.pause();
+ }
+
+ /**
+ * Stop the stream, and rewind.
+ */
+ public void stop() {
+ if (playing) {
+ goToBeginning();
+ playing = false;
+ }
+ paused = false;
+ gplayer.stop();
+ }
+
+ /**
+ * Reads the current video frame or data buffer.
+ */
+ public synchronized void read() {
+ if (fps <= 0) {
+ // Framerate not set yet, so we obtain from stream,
+ // which is already playing since we are in read().
+ fps = getSourceFrameRate();
+ }
+
+ if (streamType == GSVideo.VIDEO) {
+ // We loadPixels() first to ensure that at least we always have a non-null
+ // pixels array, even if without any valid image inside.
+ loadPixels();
+
+ if (copyBufferMode) {
+ // The native buffer from gstreamer is copies to the destination object.
+ if (natBuffer == null || copyBufferMethod == null) {
+ return;
+ }
+
+ if (firstFrame) {
+ super.init(bufWidth, bufHeight, RGB);
+ loadPixels();
+ firstFrame = false;
+ }
+
+ IntBuffer rgbBuffer = natBuffer.getByteBuffer().asIntBuffer();
+ try {
+ copyBufferMethod.invoke(copyHandler, new Object[] { natBuffer, rgbBuffer, bufWidth, bufHeight });
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ natBuffer = null;
+ } else {
+ // Normal operation mode: the pixels just read from gstreamer
+ // are copied to the pixels array.
+ if (copyPixels == null) {
+ return;
+ }
+
+ if (firstFrame) {
+ super.init(bufWidth, bufHeight, RGB);
+ loadPixels();
+ firstFrame = false;
+ }
+
+ int[] temp = pixels;
+ pixels = copyPixels;
+ updatePixels();
+ copyPixels = temp;
+ }
+ } else if (streamType == GSVideo.RAW) {
+ if (copyData == null) {
+ return;
+ }
+
+ dataCaps = tempDataCaps;
+ if (data == null) {
+ data = new byte[copyData.length];
+ }
+
+ byte[] temp = data;
+ data = copyData;
+ copyData = temp;
+ }
+
+ available = false;
+ }
+
+ /**
+ * Goes to the first frame of the stream.
+ */
+ public void goToBeginning() {
+ gplayer.seek(ClockTime.fromNanos(0));
+ }
+
+ /**
+ * Change the volume. Values are from 0 to 1.
+ *
+ * @param float v
+ */
+ public void volume(float v) {
+ if (playing) {
+ gplayer.setVolume(v);
+ }
+ }
+
+ /**
+ * Returns the text string containing the filename of the media loaded.
+ *
+ * @return String
+ */
+ public String getFilename() {
+ return filename;
+ }
+
+ protected void initGStreamer(PApplet parent, String filename, int type) {
+ this.parent = parent;
+ gplayer = null;
+
+ File file;
+
+ GSVideo.init();
+
+ // first check to see if this can be read locally from a file.
+ try {
+ try {
+ // first try a local file using the dataPath. usually this will
+ // work ok, but sometimes the dataPath is inside a jar file,
+ // which is less fun, so this will crap out.
+ file = new File(parent.dataPath(filename));
+ if (file.exists()) {
+ gplayer = new PlayBin2("GSPlayer");
+ gplayer.setInputFile(file);
+ }
+ } catch (Exception e) {
+ } // ignored
+
+ // read from a file just hanging out in the local folder.
+ // this might happen when the video library is used with some
+ // other application, or the person enters a full path name
+ if (gplayer == null) {
+ try {
+ file = new File(filename);
+ if (file.exists()) {
+ gplayer = new PlayBin2("GSPlayer");
+ gplayer.setInputFile(file);
+ } else {
+ System.err.println("File " + filename + " does not exist. Please check location.");
+ }
+ } catch (Exception e) {
+ }
+ }
+ // Network read needs to be implemented...
+ } catch (SecurityException se) {
+ // online, whups. catch the security exception out here rather than
+ // doing it three times (or whatever) for each of the cases above.
+ }
+
+ if (gplayer == null) {
+ parent.die("Could not load media file " + filename, null);
+ }
+
+ // we've got a valid media file! let's rock.
+ try {
+ this.filename = filename; // for error messages
+
+ // register methods
+ parent.registerDispose(this);
+
+ setEventHandlerObject(parent);
+
+ rate = 1.0f;
+ fps = -1;
+ sinkReady = false;
+ bufWidth = bufHeight = bufSize = 0;
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ streamType = type;
+ }
+
+ protected void initSink() {
+ if (streamType == GSVideo.VIDEO) {
+
+ if (copyHandler != null) {
+ try {
+ copyBufferMethod = copyHandler.getClass().getMethod("addPixelsToBuffer",
+ new Class[] { Object.class, IntBuffer.class, int.class, int.class });
+ copyBufferMode = true;
+ } catch (Exception e) {
+ // no such method, or an error.. which is fine, just ignore
+ copyBufferMode = false;
+ }
+
+ if (copyBufferMode) {
+
+ try {
+ Method meth = copyHandler.getClass().getMethod("setPixelSource", new Class[] { Object.class});
+ meth.invoke(copyHandler, new Object[] { this });
+ } catch (Exception e) {
+ copyBufferMode = false;
+ }
+
+ if (copyBufferMode) {
+ natSink = new BufferDataAppSink("nat", copyMask,
+ new BufferDataAppSink.Listener() {
+ public void bufferFrame(int w, int h, Buffer buffer) {
+ invokeEvent(w, h, buffer);
+ }
+ });
+
+ natSink.setAutoDisposeBuffer(false);
+ gplayer.setVideoSink(natSink);
+ // The setVideoSink() method sets the videoSink as a property of the PlayBin,
+ // which increments the refcount of the videoSink element. Disposing here once
+ // to decrement the refcount.
+ natSink.dispose();
+ }
+ }
+ }
+
+ if (!copyBufferMode) {
+ rgbSink = new RGBDataAppSink("rgb",
+ new RGBDataAppSink.Listener() {
+ public void rgbFrame(int w, int h, IntBuffer buffer) {
+ invokeEvent(w, h, buffer);
+ }
+ });
+ // Setting direct buffer passing in the video sink, so no new buffers are created
+ // and disposed by the GC on each frame (thanks to Octavi Estape for pointing
+ // out this one).
+ rgbSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
+ gplayer.setVideoSink(rgbSink);
+ // The setVideoSink() method sets the videoSink as a property of the PlayBin,
+ // which increments the refcount of the videoSink element. Disposing here once
+ // to decrement the refcount.
+ rgbSink.dispose();
+ }
+ } else if (streamType == GSVideo.AUDIO) {
+ gplayer.setVideoSink(ElementFactory.make("fakesink", "audiosink"));
+ } else if (streamType == GSVideo.RAW) {
+ dataSink = new ByteDataAppSink("data",
+ new ByteDataAppSink.Listener() {
+ public void byteFrame(Caps caps, int size, ByteBuffer buffer) {
+ invokeEvent(caps, size, buffer);
+ }
+ });
+ dataSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
+ gplayer.setVideoSink(dataSink);
+ dataSink.dispose();
+ } else {
+ System.err.println("Unrecognized stream type: Please use VIDEO, AUDIO, or RAW.");
+ return;
+ }
+
+ // Creating bus to handle end-of-stream event.
+ Bus bus = gplayer.getBus();
+ bus.connect(new Bus.EOS() {
+ public void endOfStream(GstObject element) {
+ eosEvent();
+ }
+ });
+
+ sinkReady = true;
+ }
+
+ protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
+ available = true;
+ bufWidth = w;
+ bufHeight = h;
+ bufSize = w * h;
+
+ if (copyPixels == null) {
+ copyPixels = new int[w * h];
+ }
+ buffer.rewind();
+ try {
+ buffer.get(copyPixels);
+ } catch (BufferUnderflowException e) {
+ e.printStackTrace();
+ copyPixels = null;
+ return;
+ }
+
+ if (playing) {
+ // Creates a playerEvent.
+ if (playerEventMethod != null) {
+ try {
+ playerEventMethod.invoke(eventHandler, new Object[] { this });
+ } catch (Exception e) {
+ System.err.println("error, disabling playerEvent() for " + filename);
+ e.printStackTrace();
+ playerEventMethod = null;
+ }
+ }
+ }
+ }
+
+ protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
+ available = true;
+ bufWidth = w;
+ bufHeight = h;
+ bufSize = w * h;
+ natBuffer = buffer;
+
+ if (playing) {
+ // Creates a playerEvent.
+ if (playerEventMethod != null) {
+ try {
+ playerEventMethod.invoke(eventHandler, new Object[] { this });
+ } catch (Exception e) {
+ System.err.println("error, disabling movieEvent() for " + filename);
+ e.printStackTrace();
+ playerEventMethod = null;
+ }
+ }
+ }
+ }
+
+ protected synchronized void invokeEvent(Caps caps, int n, ByteBuffer buffer) {
+ available = true;
+ bufSize = n;
+
+ tempDataCaps = caps.toString();
+
+ if (copyData == null) {
+ copyData = new byte[n];
+ }
+ buffer.rewind();
+ try {
+ buffer.get(copyData);
+ } catch (BufferUnderflowException e) {
+ e.printStackTrace();
+ copyData = null;
+ return;
+ }
+
+ if (playing) {
+ // Creates a playerEvent.
+ if (playerEventMethod != null) {
+ try {
+ playerEventMethod.invoke(eventHandler, new Object[] { this });
+ } catch (Exception e) {
+ System.err.println("error, disabling playerEvent() for " + filename);
+ e.printStackTrace();
+ playerEventMethod = null;
+ }
+ }
+ }
+ }
+
+ public synchronized void disposeBuffer(Object buf) {
+ ((Buffer)buf).dispose();
+ }
+
+ protected void eosEvent() {
+ if (repeat) {
+ goToBeginning();
+ } else {
+ playing = false;
+ }
+ }
+}
diff --git a/java/libraries/video/src/processing/video/GSVideo.java b/java/libraries/video/src/processing/video/GSVideo.java
new file mode 100644
index 000000000..c2a06679f
--- /dev/null
+++ b/java/libraries/video/src/processing/video/GSVideo.java
@@ -0,0 +1,249 @@
+/**
+ * Part of the GSVideo library: http://gsvideo.sourceforge.net/
+ * Copyright (c) 2008-11 Andres Colubri
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation, version 2.1.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General
+ * Public License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place, Suite 330,
+ * Boston, MA 02111-1307 USA
+ */
+
+package codeanticode.gsvideo;
+
+import org.gstreamer.*;
+
+// TODO: update to latest gstreamer on windows, jmcvideo on mac, seeking in gspipeline, check sf tracker
+
+import processing.core.PApplet;
+import processing.core.PConstants;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * This class contains some basic functions used by the rest of the classes in
+ * this library.
+ */
+public class GSVideo implements PConstants {
+ protected static String VERSION_STRING = "0.9";
+ protected static long INSTANCES_COUNT = 0;
+
+ protected static String gstreamerBinPath = "";
+ protected static String gstreamerPluginsPath = "";
+
+ protected static boolean defaultGLibContext = false;
+
+ // Priority is given to global install of GStreamer if this is set to true.
+ public static boolean globalGStreamer = true;
+
+ // Direct buffer pass enabled by default.
+ public static boolean passDirectBuffer = true;
+
+ public static String globalGStreamerPath;
+ public static String globalPluginsFolder = "gstreamer-0.10";
+ // Default locations of the global install of gstreamer for each platform:
+ static {
+ if (PApplet.platform == MACOSX) {
+ globalGStreamerPath = "/System/Library/Frameworks/GStreamer.framework/Versions/Current/lib";
+ } else if (PApplet.platform == WINDOWS) {
+ globalGStreamerPath = "";
+ //globalGStreamerPath = "C://Program Files (x86)//OSSBuild//GStreamer//v0.10.7//lib";
+ } else if (PApplet.platform == LINUX) {
+ globalGStreamerPath = "/usr/lib";
+ } else {}
+ }
+
+ // Default location of the local install of gstreamer. Suggested by Charles Bourasseau.
+ // When it is left as empty string, GSVideo will attempt to use the path from GSLibraryPath.get(),
+ // otherwise it will use it as the path to the folder where the libgstreamer.dylib and other
+ // files are located.
+ public static String localGStreamerPath = "";
+ public static String localPluginsFolder = "plugins";
+
+ // Some constants to identify AUDIO, VIDEO and RAW streams.
+ static public final int AUDIO = 0;
+ static public final int VIDEO = 1;
+ static public final int RAW = 2;
+
+ public static void init() {
+ if (INSTANCES_COUNT == 0) {
+ PApplet.println("GSVideo version: " + VERSION_STRING);
+ initImpl();
+ }
+ INSTANCES_COUNT++;
+ }
+
+ public static void restart() {
+ removePlugins();
+ Gst.deinit();
+ initImpl();
+ }
+
+ protected static void initImpl() {
+ if (PApplet.platform == LINUX) {
+ // Linux only supports global gstreamer for now.
+ globalGStreamer = true;
+ setLinuxPath();
+ } else if (PApplet.platform == WINDOWS) {
+ setWindowsPath();
+ } else if (PApplet.platform == MACOSX) {
+ setMacOSXPath();
+ }
+
+ if (!gstreamerBinPath.equals("")) {
+ System.setProperty("jna.library.path", gstreamerBinPath);
+ }
+
+ if ((PApplet.platform == LINUX) && !globalGStreamer) {
+ System.err.println("Loading local version of GStreamer not supported in Linux at this time.");
+ }
+
+ if ((PApplet.platform == WINDOWS) && !globalGStreamer) {
+ GSLibraryLoader loader = GSLibraryLoader.getInstance();
+ if (loader == null) {
+ System.err.println("Cannot load local version of GStreamer libraries.");
+ }
+ }
+
+ if ((PApplet.platform == MACOSX) && !globalGStreamer) {
+ // Nothing to do here, since the dylib mechanism in OSX doesn't require the
+ // library loader.
+ }
+
+ String[] args = { "" };
+ Gst.setUseDefaultContext(defaultGLibContext);
+ Gst.init("GSVideo", args);
+
+ addPlugins();
+ }
+
+ protected static void addPlugins() {
+ if (!gstreamerPluginsPath.equals("")) {
+ Registry reg = Registry.getDefault();
+ boolean res;
+ res = reg.scanPath(gstreamerPluginsPath);
+ if (!res) {
+ System.err.println("Cannot load GStreamer plugins from " + gstreamerPluginsPath);
+ }
+ }
+ }
+
+ protected static void removePlugins() {
+ Registry reg = Registry.getDefault();
+ List list = reg.getPluginList();
+ for (int i = 0; i < list.size(); i++) {
+ Plugin plg = (Plugin)list.get(i);
+
+ reg.removePlugin(plg);
+ }
+ }
+
+ protected static void setLinuxPath() {
+ if (globalGStreamer && lookForGlobalGStreamer()) {
+ gstreamerBinPath = "";
+ gstreamerPluginsPath = "";
+ } else {
+ globalGStreamer = false;
+ if (localGStreamerPath.equals("")) {
+ GSLibraryPath libPath = new GSLibraryPath();
+ String path = libPath.get();
+ gstreamerBinPath = path + "/gstreamer/linux";
+ gstreamerPluginsPath = path + "/gstreamer/linux/" + localPluginsFolder;
+ } else {
+ gstreamerBinPath = localGStreamerPath;
+ gstreamerPluginsPath = localGStreamerPath + "/" + localPluginsFolder;
+ }
+ }
+ }
+
+ protected static void setWindowsPath() {
+ if (globalGStreamer && lookForGlobalGStreamer()) {
+ gstreamerBinPath = "";
+ gstreamerPluginsPath = "";
+ } else {
+ globalGStreamer = false;
+ if (localGStreamerPath.equals("")) {
+ GSLibraryPath libPath = new GSLibraryPath();
+ String path = libPath.get();
+ gstreamerBinPath = path + "\\gstreamer\\win";
+ gstreamerPluginsPath = path + "\\gstreamer\\win\\" + localPluginsFolder;
+ } else {
+ gstreamerBinPath = localGStreamerPath;
+ gstreamerPluginsPath = localGStreamerPath + "\\" + localPluginsFolder;
+ }
+ }
+ }
+
+ protected static void setMacOSXPath() {
+ if (globalGStreamer && lookForGlobalGStreamer()) {
+ gstreamerBinPath = globalGStreamerPath;
+ gstreamerPluginsPath = globalGStreamerPath + "/" + globalPluginsFolder;
+ } else {
+ globalGStreamer = false;
+ if (localGStreamerPath.equals("")) {
+ GSLibraryPath libPath = new GSLibraryPath();
+ String path = libPath.get();
+ gstreamerBinPath = path + "/gstreamer/macosx";
+ gstreamerPluginsPath = path + "/gstreamer/macosx/" + localPluginsFolder;
+ } else {
+ gstreamerBinPath = localGStreamerPath;
+ gstreamerPluginsPath = localGStreamerPath + "/" + localPluginsFolder;
+ }
+ }
+ }
+
+ protected static boolean lookForGlobalGStreamer() {
+ String[] searchPaths = null;
+ if (!globalGStreamerPath.equals("")) {
+ searchPaths = new String[] {globalGStreamerPath};
+ }
+
+ if (searchPaths == null) {
+ String lpaths = System.getProperty("java.library.path");
+ String pathsep = System.getProperty("path.separator");
+ searchPaths = lpaths.split(pathsep);
+ }
+
+ for (int i = 0; i < searchPaths.length; i++) {
+ String path = searchPaths[i];
+ if (libgstreamerPresent(path, "libgstreamer")) {
+ globalGStreamerPath = path;
+ return true;
+ }
+ }
+ return false;
+ }
+
+ protected static boolean libgstreamerPresent(String dir, String file) {
+ File libPath = new File(dir);
+ String[] files = libPath.list();
+ if (files != null) {
+ for (int i = 0; i < files.length; i++) {
+ if (-1 < files[i].indexOf(file)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ static protected float nanoSecToSecFrac(float nanosec) {
+ for (int i = 0; i < 3; i++)
+ nanosec /= 1E3;
+ return nanosec;
+ }
+
+ static protected long secToNanoLong(float sec) {
+ Float f = new Float(sec * 1E9);
+ return f.longValue();
+ }
+}
diff --git a/java/libraries/video/src/processing/video/Movie.java b/java/libraries/video/src/processing/video/Movie.java
new file mode 100644
index 000000000..6cf8b770f
--- /dev/null
+++ b/java/libraries/video/src/processing/video/Movie.java
@@ -0,0 +1,750 @@
+/**
+ * Part of the GSVideo library: http://gsvideo.sourceforge.net/
+ * Copyright (c) 2008-11 Andres Colubri
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation, version 2.1.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General
+ * Public License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place, Suite 330,
+ * Boston, MA 02111-1307 USA
+ */
+
+package codeanticode.gsvideo;
+
+import processing.core.*;
+
+import java.awt.Dimension;
+import java.io.*;
+import java.nio.*;
+import java.util.concurrent.TimeUnit;
+import java.lang.reflect.*;
+
+import org.gstreamer.*;
+import org.gstreamer.Buffer;
+import org.gstreamer.elements.*;
+
+/**
+ * This class makes it possible to load movies and to play them back in many
+ * ways including looping, pausing, and changing speed.
+ */
+public class GSMovie extends PImage implements PConstants {
+ protected String filename;
+
+ protected boolean playing = false;
+ protected boolean paused = false;
+ protected boolean repeat = false;
+
+ protected float fps;
+ protected float rate;
+ protected int bufWidth;
+ protected int bufHeight;
+
+ protected PlayBin2 gplayer;
+
+ protected Method movieEventMethod;
+ protected Method copyBufferMethod;
+
+ protected Object eventHandler;
+ protected Object copyHandler;
+
+ protected boolean available;
+ protected boolean sinkReady;
+
+ protected RGBDataAppSink rgbSink = null;
+ protected int[] copyPixels = null;
+
+ protected BufferDataAppSink natSink = null;
+ protected Buffer natBuffer = null;
+ protected boolean copyBufferMode = false;
+ protected String copyMask;
+
+ protected boolean firstFrame = true;
+
+ /**
+ * Creates an instance of GSMovie loading the movie from filename.
+ *
+ * @param parent PApplet
+ * @param filename String
+ */
+ public GSMovie(PApplet parent, String filename) {
+ super(0, 0, RGB);
+ initGStreamer(parent, filename);
+ }
+
+ /**
+ * Releases the gstreamer resources associated to this movie object.
+ * It shouldn't be used after this.
+ */
+ public void delete() {
+ if (gplayer != null) {
+ try {
+ if (gplayer.isPlaying()) {
+ gplayer.stop();
+ }
+ } catch (IllegalStateException e) {
+ System.err.println("error when deleting player, maybe some native resource is already disposed");
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ pixels = null;
+
+ copyPixels = null;
+ if (rgbSink != null) {
+ rgbSink.removeListener();
+ rgbSink.dispose();
+ rgbSink = null;
+ }
+
+ natBuffer = null;
+ if (natSink != null) {
+ natSink.removeListener();
+ natSink.dispose();
+ natSink = null;
+ }
+
+ gplayer.dispose();
+ gplayer = null;
+ }
+ }
+
+ /**
+ * Same as delete.
+ */
+ public void dispose() {
+ delete();
+ }
+
+ /**
+ * Sets the object to use as destination for the frames read from the stream.
+ * The color conversion mask is automatically set to the one required to
+ * copy the frames to OpenGL.
+ *
+ * @param Object dest
+ */
+ public void setPixelDest(Object dest) {
+ copyHandler = dest;
+ if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
+ copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
+ } else {
+ copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
+ }
+ }
+
+ /**
+ * Sets the object to use as destination for the frames read from the stream.
+ *
+ * @param Object dest
+ * @param String mask
+ */
+ public void setPixelDest(Object dest, String mask) {
+ copyHandler = dest;
+ copyMask = mask;
+ }
+
+ /**
+ * Uses a generic object as handler of the movie. This object should have a
+ * movieEvent method that receives a GSMovie argument. This method will
+ * be called upon a new frame read event.
+ *
+ */
+ public void setEventHandlerObject(Object obj) {
+ eventHandler = obj;
+
+ try {
+ movieEventMethod = eventHandler.getClass().getMethod("movieEvent",
+ new Class[] { GSMovie.class });
+ } catch (Exception e) {
+ // no such method, or an error.. which is fine, just ignore
+ }
+ }
+
+ /**
+ * Get the width of the source video. Note: calling this method repeatedly
+ * can slow down playback performance.
+ *
+ * @return int
+ */
+ public int getSourceWidth() {
+ Dimension dim = gplayer.getVideoSize();
+ if (dim != null) {
+ return dim.width;
+ } else {
+ return 0;
+ }
+ }
+
+ /**
+ * Get the height of the source video. Note: calling this method repeatedly
+ * can slow down playback performance.
+ *
+ * @return int
+ */
+ public int getSourceHeight() {
+ Dimension dim = gplayer.getVideoSize();
+ if (dim != null) {
+ return dim.height;
+ } else {
+ return 0;
+ }
+ }
+
+ /**
+ * Get the original framerate of the source video. Note: calling this method repeatedly
+ * can slow down playback performance.
+ *
+ * @return float
+ */
+ public float getSourceFrameRate() {
+ return (float)gplayer.getVideoSinkFrameRate();
+ }
+
+ /**
+ * Set how often new frames are to be read from the movie. Does not actually
+ * set the speed of the movie playback, that's handled by the speed() method.
+ *
+ * @param float ifps
+ * @see speed
+ */
+ public void frameRate(float ifps) {
+ // We calculate the target ratio in the case both the
+ // current and target framerates are valid (greater than
+ // zero), otherwise we leave it as 1.
+ float f = (0 < ifps && 0 < fps) ? ifps / fps : 1;
+
+ if (playing) {
+ gplayer.pause();
+ }
+
+ long t = gplayer.queryPosition(TimeUnit.NANOSECONDS);
+
+ boolean res;
+ long start, stop;
+ if (rate > 0) {
+ start = t;
+ stop = -1;
+ } else {
+ start = 0;
+ stop = t;
+ }
+
+ res = gplayer.seek(rate * f, Format.TIME, SeekFlags.FLUSH,
+ SeekType.SET, start, SeekType.SET, stop);
+
+ if (!res) {
+ System.err.println("Seek operation failed.");
+ }
+
+ if (playing) {
+ gplayer.play();
+ }
+
+ fps = ifps;
+ }
+
+ /**
+ * Set a multiplier for how fast/slow the movie should be run. The default is
+ * 1.0. speed(2) will play the movie at double speed (2x). speed(0.5) will
+ * play at half speed. speed(-1) will play backwards at regular speed.
+ *
+ * @param float irate
+ */
+ public void speed(float irate) {
+ // If the frameRate() method is called continuously with very similar
+ // rate values, playback might become sluggish. This condition attempts
+ // to take care of that.
+ if (PApplet.abs(rate - irate) > 0.1) {
+ rate = irate;
+ frameRate(fps); // The framerate is the same, but the rate (speed) could be different.
+ }
+ }
+
+ /**
+ * Get the full length of this movie (in seconds).
+ *
+ * @return float
+ */
+ public float duration() {
+ float sec = gplayer.queryDuration().toSeconds();
+ float nanosec = gplayer.queryDuration().getNanoSeconds();
+ return sec + GSVideo.nanoSecToSecFrac(nanosec);
+ }
+
+ /**
+ * Return the current time in seconds.
+ *
+ * @return float
+ */
+ public float time() {
+ float sec = gplayer.queryPosition().toSeconds();
+ float nanosec = gplayer.queryPosition().getNanoSeconds();
+ return sec + GSVideo.nanoSecToSecFrac(nanosec);
+ }
+
+ /**
+ * Get the full length of this movie (in frames).
+ *
+ * @return float
+ */
+ public long length() {
+ return (int)(duration() * getSourceFrameRate());
+ }
+
+ /**
+ * Return the current frame.
+ *
+ * @return int
+ */
+ public int frame() {
+ return (int)(time() * getSourceFrameRate());
+ }
+
+ /**
+ * Jump to a specific location (in seconds). The number is a float so
+ * fractions of seconds can be used.
+ *
+ * @param float where
+ */
+ public void jump(float where) {
+ if (playing) {
+ gplayer.pause();
+ }
+
+ boolean res;
+ long start = GSVideo.secToNanoLong(where);
+ long stop = -1; // or whatever > new_pos
+
+ res = gplayer.seek(1.0, Format.TIME, SeekFlags.FLUSH,
+ SeekType.SET, start, SeekType.SET, stop);
+
+ if (!res) {
+ System.err.println("Seek operation failed.");
+ }
+
+ if (playing) {
+ gplayer.play();
+ }
+ }
+
+ /**
+ * Jump to a specific frame.
+ *
+ * @param frame int
+ */
+ public void jump(int frame) {
+ float srcFramerate = getSourceFrameRate();
+
+ // The duration of a single frame:
+ float frameDuration = 1 / srcFramerate;
+
+ // We move to the middle of the frame by adding 0.5:
+ float where = (frame + 0.5f) * frameDuration;
+
+ // Taking into account border effects:
+ float diff = duration() - where;
+ if (diff < 0) {
+ where += diff - 0.25f * frameDuration;
+ }
+
+ jump(where);
+ }
+
+ /**
+ * Returns true if the stream is already producing frames.
+ *
+ * @return boolean
+ */
+ public boolean ready() {
+ return 0 < bufWidth && 0 < bufHeight && sinkReady;
+ }
+
+ /**
+ * Return the true or false depending on whether there is a new frame ready to
+ * be read.
+ *
+ * @return boolean
+ */
+ public boolean available() {
+ return available;
+ }
+
+ /**
+ * Returns whether the movie is playing or not.
+ *
+ * @return boolean
+ */
+ public boolean isPlaying() {
+ return playing;
+ }
+
+ /**
+ * Returns whether the movie is paused or not. If isPlaying() and isPaused()
+ * both return false it means that the movie is stopped.
+ *
+ * @return boolean
+ */
+ public boolean isPaused() {
+ return paused;
+ }
+
+ /**
+ * Returns whether the movie is looping or not.
+ *
+ * @return boolean
+ */
+ public boolean isLooping() {
+ return repeat;
+ }
+
+ /**
+ * Begin playing the movie, with no repeat.
+ */
+ public void play() {
+ if (!sinkReady) {
+ initSink();
+ }
+
+ playing = true;
+ paused = false;
+ gplayer.play();
+ }
+
+ /**
+ * Begin playing the movie, with repeat.
+ */
+ public void loop() {
+ repeat = true;
+ play();
+ }
+
+ /**
+ * Shut off the repeating loop.
+ */
+ public void noLoop() {
+ repeat = false;
+ }
+
+ /**
+ * Pause the movie at its current time.
+ */
+ public void pause() {
+ playing = false;
+ paused = true;
+ gplayer.pause();
+ }
+
+ /**
+ * Stop the movie, and rewind.
+ */
+ public void stop() {
+ if (playing) {
+ goToBeginning();
+ playing = false;
+ }
+ paused = false;
+ gplayer.stop();
+ }
+
+ /**
+ * Reads the current video frame.
+ */
+ public synchronized void read() {
+ if (fps <= 0) {
+ // Framerate not set yet, so we obtain from stream,
+ // which is already playing since we are in read().
+ fps = getSourceFrameRate();
+ }
+
+ // We loadPixels() first to ensure that at least we always have a non-null
+ // pixels array, even if without any valid image inside.
+ loadPixels();
+
+ if (copyBufferMode) {
+ // The native buffer from gstreamer is copies to the destination object.
+ if (natBuffer == null || copyBufferMethod == null) {
+ return;
+ }
+
+ if (firstFrame) {
+ super.init(bufWidth, bufHeight, RGB);
+ loadPixels();
+ firstFrame = false;
+ }
+
+ IntBuffer rgbBuffer = natBuffer.getByteBuffer().asIntBuffer();
+ try {
+ copyBufferMethod.invoke(copyHandler, new Object[] { natBuffer, rgbBuffer, bufWidth, bufHeight });
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ natBuffer = null;
+ } else {
+ // Normal operation mode: the pixels just read from gstreamer
+ // are copied to the pixels array.
+ if (copyPixels == null) {
+ return;
+ }
+
+ if (firstFrame) {
+ resize(bufWidth, bufHeight);
+ firstFrame = false;
+ }
+
+ int[] temp = pixels;
+ pixels = copyPixels;
+ updatePixels();
+ copyPixels = temp;
+ }
+
+ available = false;
+ }
+
+ /**
+ * Goes to the first frame of the movie.
+ */
+ public void goToBeginning() {
+ boolean res = gplayer.seek(ClockTime.fromNanos(0));
+ if (!res) {
+ System.err.println("Seek operation failed.");
+ }
+ }
+
+ /**
+ * Goes to the last frame of the movie.
+ */
+ public void goToEnd() {
+ long nanos = gplayer.queryDuration().getNanoSeconds();
+ boolean res = gplayer.seek(ClockTime.fromNanos(nanos));
+ if (!res) {
+ System.err.println("Seek operation failed.");
+ }
+ }
+
+ /**
+ * Change the volume. Values are from 0 to 1.
+ *
+ * @param float v
+ */
+ public void volume(float v) {
+ if (playing) {
+ gplayer.setVolume(v);
+ }
+ }
+
+ /**
+ * Returns the text string containing the filename of the video loaded.
+ *
+ * @return String
+ */
+ public String getFilename() {
+ return filename;
+ }
+
+ protected void initGStreamer(PApplet parent, String filename) {
+ this.parent = parent;
+ gplayer = null;
+
+ File file;
+
+ GSVideo.init();
+
+ // first check to see if this can be read locally from a file.
+ try {
+ try {
+ // first try a local file using the dataPath. usually this will
+ // work ok, but sometimes the dataPath is inside a jar file,
+ // which is less fun, so this will crap out.
+ file = new File(parent.dataPath(filename));
+ if (file.exists()) {
+ gplayer = new PlayBin2("GSMovie Player");
+ gplayer.setInputFile(file);
+ }
+ } catch (Exception e) {
+ } // ignored
+
+ // read from a file just hanging out in the local folder.
+ // this might happen when the video library is used with some
+ // other application, or the person enters a full path name
+ if (gplayer == null) {
+ try {
+ file = new File(filename);
+ if (file.exists()) {
+ gplayer = new PlayBin2("GSMovie Player");
+ gplayer.setInputFile(file);
+ } else {
+ System.err.println("File " + filename + " does not exist. Please check location.");
+ }
+ } catch (Exception e) {
+ PApplet.println("Shit coming...");
+ e.printStackTrace();
+ }
+ }
+ // Network read needs to be implemented...
+ } catch (SecurityException se) {
+ // online, whups. catch the security exception out here rather than
+ // doing it three times (or whatever) for each of the cases above.
+ }
+
+ if (gplayer == null) {
+ parent.die("Could not load movie file " + filename, null);
+ }
+
+ // we've got a valid movie! let's rock.
+ try {
+ // PApplet.println("we've got a valid movie! let's rock.");
+ this.filename = filename; // for error messages
+
+ // register methods
+ parent.registerDispose(this);
+
+ setEventHandlerObject(parent);
+
+ rate = 1.0f;
+ fps = -1;
+ sinkReady = false;
+ bufWidth = bufHeight = 0;
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ protected void initSink() {
+ if (copyHandler != null) {
+ try {
+ copyBufferMethod = copyHandler.getClass().getMethod("addPixelsToBuffer",
+ new Class[] { Object.class, IntBuffer.class, int.class, int.class });
+ copyBufferMode = true;
+ } catch (Exception e) {
+ // no such method, or an error.. which is fine, just ignore
+ copyBufferMode = false;
+ }
+
+ if (copyBufferMode) {
+
+ try {
+ Method meth = copyHandler.getClass().getMethod("setPixelSource", new Class[] { Object.class});
+ meth.invoke(copyHandler, new Object[] { this });
+ } catch (Exception e) {
+ copyBufferMode = false;
+ }
+
+ if (copyBufferMode) {
+ natSink = new BufferDataAppSink("nat", copyMask,
+ new BufferDataAppSink.Listener() {
+ public void bufferFrame(int w, int h, Buffer buffer) {
+ invokeEvent(w, h, buffer);
+ }
+ });
+
+ natSink.setAutoDisposeBuffer(false);
+ gplayer.setVideoSink(natSink);
+ // The setVideoSink() method sets the videoSink as a property of the PlayBin,
+ // which increments the refcount of the videoSink element. Disposing here once
+ // to decrement the refcount.
+ natSink.dispose();
+ }
+ }
+ }
+
+ if (!copyBufferMode) {
+ rgbSink = new RGBDataAppSink("rgb",
+ new RGBDataAppSink.Listener() {
+ public void rgbFrame(int w, int h, IntBuffer buffer) {
+ invokeEvent(w, h, buffer);
+ }
+ });
+
+ // Setting direct buffer passing in the video sink, so no new buffers are created
+ // and disposed by the GC on each frame (thanks to Octavi Estape for pointing
+ // out this one).
+ rgbSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
+ gplayer.setVideoSink(rgbSink);
+ // The setVideoSink() method sets the videoSink as a property of the PlayBin,
+ // which increments the refcount of the videoSink element. Disposing here once
+ // to decrement the refcount.
+ rgbSink.dispose();
+ }
+
+ // Creating bus to handle end-of-stream event.
+ Bus bus = gplayer.getBus();
+ bus.connect(new Bus.EOS() {
+ public void endOfStream(GstObject element) {
+ eosEvent();
+ }
+ });
+
+ sinkReady = true;
+ }
+
+ protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
+ available = true;
+ bufWidth = w;
+ bufHeight = h;
+
+ if (copyPixels == null) {
+ copyPixels = new int[w * h];
+ }
+ buffer.rewind();
+ try {
+ buffer.get(copyPixels);
+ } catch (BufferUnderflowException e) {
+ e.printStackTrace();
+ copyPixels = null;
+ return;
+ }
+
+ if (playing) {
+ // Creates a movieEvent.
+ if (movieEventMethod != null) {
+ try {
+ movieEventMethod.invoke(eventHandler, new Object[] { this });
+ } catch (Exception e) {
+ System.err.println("error, disabling movieEvent() for " + filename);
+ e.printStackTrace();
+ movieEventMethod = null;
+ }
+ }
+ }
+ }
+
+ protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
+ available = true;
+ bufWidth = w;
+ bufHeight = h;
+ natBuffer = buffer;
+
+ if (playing) {
+ // Creates a movieEvent.
+ if (movieEventMethod != null) {
+ try {
+ movieEventMethod.invoke(eventHandler, new Object[] { this });
+ } catch (Exception e) {
+ System.err.println("error, disabling movieEvent() for " + filename);
+ e.printStackTrace();
+ movieEventMethod = null;
+ }
+ }
+ }
+ }
+
+ public synchronized void disposeBuffer(Object buf) {
+ ((Buffer)buf).dispose();
+ }
+
+ protected void eosEvent() {
+ if (repeat) {
+ goToBeginning();
+ } else {
+ playing = false;
+ }
+ }
+}
diff --git a/java/libraries/video/src/processing/video/MovieMaker.java b/java/libraries/video/src/processing/video/MovieMaker.java
new file mode 100644
index 000000000..e91d68d5f
--- /dev/null
+++ b/java/libraries/video/src/processing/video/MovieMaker.java
@@ -0,0 +1,369 @@
+/**
+ * Part of the GSVideo library: http://gsvideo.sourceforge.net/
+ * Copyright (c) 2008-11 Andres Colubri
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation, version 2.1.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General
+ * Public License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place, Suite 330,
+ * Boston, MA 02111-1307 USA
+ */
+
+package codeanticode.gsvideo;
+
+import processing.core.*;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+
+import org.gstreamer.Buffer;
+import org.gstreamer.elements.RGBDataFileSink;
+
+/**
+ * This class makes movies from a running program.
+ */
+public class GSMovieMaker {
+ protected PApplet parent;
+ protected boolean recording;
+ protected RGBDataFileSink recorder;
+ protected int width, height;
+
+ public static final int THEORA = 0;
+ public static final int XVID = 1;
+ public static final int X264 = 2;
+ public static final int DIRAC = 3;
+ public static final int MJPEG = 4;
+ public static final int MJPEG2K = 5;
+
+ public static final int WORST = 0;
+ public static final int LOW = 1;
+ public static final int MEDIUM = 2;
+ public static final int HIGH = 3;
+ public static final int BEST = 4;
+
+ /**
+ * Constructor that sets the codec to THEORA, MEDIUM quality and 30 fps.
+ *
+ */
+ public GSMovieMaker(PApplet parent, int requestWidth, int requestHeight,
+ String filename) {
+ init(parent, requestWidth, requestHeight, filename, THEORA, MEDIUM, 30);
+ }
+
+ /**
+ * Constructor that allows to set codec type and fps.
+ *
+ */
+ public GSMovieMaker(PApplet parent, int requestWidth, int requestHeight,
+ String filename, int codecType, int ifps) {
+ init(parent, requestWidth, requestHeight, filename, codecType, MEDIUM, ifps);
+ }
+
+ /**
+ * Constructor that allows to set codec type, encoding quality and fps.
+ *
+ */
+ public GSMovieMaker(PApplet parent, int requestWidth, int requestHeight,
+ String filename, int codecType, int codecQuality, int ifps) {
+ init(parent, requestWidth, requestHeight, filename, codecType,
+ codecQuality, ifps);
+ }
+
+ /**
+ * Constructor that allows to set the gstreamer encoder and muxer by name.
+ * Properties for encoder and muxer are left to wherever the default values are.
+ *
+ */
+ public GSMovieMaker(PApplet parent, int requestWidth, int requestHeight,
+ String filename, String encoder, String muxer, int ifps) {
+ init(parent, requestWidth, requestHeight, filename, encoder, muxer, null, null, ifps);
+ }
+
+ /**
+ * Constructor that allows to set the gstreamer encoder and muxer by name, as
+ * well as the properties.
+ *
+ */
+ public GSMovieMaker(PApplet parent, int requestWidth, int requestHeight,
+ String filename, String encoder, String muxer, String[] propNames, Object[] propValues, int ifps) {
+ init(parent, requestWidth, requestHeight, filename, encoder, muxer, propNames, propValues, ifps);
+ }
+
+ /**
+ * Releases the gstreamer resources associated to this movie maker object.
+ * It shouldn't be used after this.
+ */
+ public void delete() {
+ recorder.stop();
+ recorder.dispose();
+ }
+
+ /**
+ * Same as delete.
+ */
+ public void dispose() {
+ delete();
+ }
+
+ /**
+ * Adds a new frame to the video being recorded..
+ *
+ * @param pixels
+ * int[]
+ */
+ public void addFrame(int[] pixels) {
+ if (recording && pixels.length == width * height) {
+ Buffer srcBuffer = new Buffer(width * height * 4);
+
+ ByteBuffer tmpBuffer = srcBuffer.getByteBuffer();
+ tmpBuffer.clear();
+ tmpBuffer.asIntBuffer().put(pixels);
+
+ recorder.pushRGBFrame(srcBuffer);
+ }
+ }
+
+ /**
+ * Starts recording.
+ *
+ */
+ public void start() {
+ recorder.start();
+ recording = true;
+ }
+
+ /**
+ * Finishes recording.
+ *
+ */
+ public void finish() {
+ recording = false;
+ recorder.stop();
+ }
+
+ /**
+ * Returns the number of frames currently in the pre-encoding queue,
+ * waiting to be encoded.
+ *
+ */
+ public int getQueuedFrames() {
+ return recorder.getNumQueuedFrames();
+ }
+
+ /**
+ * Returns the number of frames dropped until now.
+ *
+ */
+ public int getDroppedFrames() {
+ return recorder.getNumDroppedFrames();
+ }
+
+ /**
+ * Sets the maximum size of the pre-encoding and encoding queues.
+ * When the encoding queue is full, the frames start to be accumulated
+ * in the pre-encoding queue. By setting the size of the pre-encoding
+ * queue to zero, it can grow arbitrarily large.
+ *
+ */
+ public void setQueueSize(int npre, int nenc) {
+ recorder.setPreQueueSize(npre);
+ recorder.setSrcQueueSize(nenc);
+ }
+
+ /**
+ * Returns true or false depending on whether recording is going
+ * on right now or not.
+ *
+ * @returns boolean
+ */
+ public boolean isRecording() {
+ return recording;
+ }
+
+ protected void init(PApplet iparent, int requestWidth, int requestHeight,
+ String filename, int codecType, int codecQuality, int ifps) {
+ this.parent = iparent;
+
+ GSVideo.init();
+
+ // register methods
+ parent.registerDispose(this);
+
+ width = requestWidth;
+ height = requestHeight;
+
+ String[] propNames = null;
+ Object[] propValues = null;
+
+ String encoder = "";
+ String muxer = "";
+
+ // Determining container based on the filename extension.
+ String fn = filename.toLowerCase();
+ if (fn.endsWith(".ogg")) {
+ muxer = "oggmux";
+ } else if (fn.endsWith(".avi")) {
+ muxer = "avimux";
+ } else if (fn.endsWith(".mov")) {
+ muxer = "qtmux";
+ } else if (fn.endsWith(".flv")) {
+ muxer = "flvmux";
+ } else if (fn.endsWith(".mkv")) {
+ muxer = "matroskamux";
+ } else if (fn.endsWith(".mp4")) {
+ muxer = "mp4mux";
+ } else if (fn.endsWith(".3gp")) {
+ muxer = "gppmux";
+ } else if (fn.endsWith(".mpg")) {
+ muxer = "ffmux_mpeg";
+ } else if (fn.endsWith(".mj2")) {
+ muxer = "mj2mux";
+ } else {
+ parent.die("Unrecognized video container", null);
+ }
+
+ // Configuring encoder.
+ if (codecType == THEORA) {
+ encoder = "theoraenc";
+
+ propNames = new String[1];
+ propValues = new Object[1];
+
+ propNames[0] = "quality";
+ Integer q = 31;
+ if (codecQuality == WORST) {
+ q = 0;
+ } else if (codecQuality == LOW) {
+ q = 15;
+ } else if (codecQuality == MEDIUM) {
+ q = 31;
+ } else if (codecQuality == HIGH) {
+ q = 47;
+ } else if (codecQuality == BEST) {
+ q = 63;
+ }
+ propValues[0] = q;
+ } else if (codecType == DIRAC) {
+ encoder = "schroenc";
+
+ propNames = new String[1];
+ propValues = new Object[1];
+
+ propNames[0] = "quality";
+ Double q = 5.0d;
+ if (codecQuality == WORST) {
+ q = 0.0d;
+ } else if (codecQuality == LOW) {
+ q = 2.5d;
+ } else if (codecQuality == MEDIUM) {
+ q = 5.0d;
+ } else if (codecQuality == HIGH) {
+ q = 7.5d;
+ } else if (codecQuality == BEST) {
+ q = 10.0d;
+ }
+ propValues[0] = q;
+ } else if (codecType == XVID) {
+ encoder = "xvidenc";
+
+ // TODO: set Properties of xvidenc.
+ } else if (codecType == X264) {
+ encoder = "x264enc";
+
+ propNames = new String[2];
+ propValues = new Object[2];
+
+ // The pass property can take the following values:
+ // (0): cbr - Constant Bitrate Encoding (default)
+ // (4): quant - Constant Quantizer
+ // (5): qual - Constant Quality
+ // (17): pass1 - VBR Encoding - Pass 1
+ // (18): pass2 - VBR Encoding - Pass 2
+ // (19): pass3 - VBR Encoding - Pass 3
+ propNames[0] = "pass";
+ Integer p = 5;
+ propValues[0] = p;
+
+ // When Constant Quality is specified for pass, then
+ // the property quantizer is interpreted as the quality
+ // level.
+ propNames[1] = "quantizer";
+ Integer q = 21;
+ if (codecQuality == WORST) {
+ q = 50;
+ } else if (codecQuality == LOW) {
+ q = 35;
+ } else if (codecQuality == MEDIUM) {
+ q = 21;
+ } else if (codecQuality == HIGH) {
+ q = 15;
+ } else if (codecQuality == BEST) {
+ q = 1;
+ }
+ propValues[1] = q;
+
+ // The bitrate can be set with the bitrate property, which is integer and
+ // has range: 1 - 102400. Default: 2048 Current: 2048.
+ // This probably doesn't have any effect unless we set pass to cbr.
+ } else if (codecType == MJPEG) {
+ encoder = "jpegenc";
+
+ propNames = new String[1];
+ propValues = new Object[1];
+
+ propNames[0] = "quality";
+ Integer q = 85;
+ if (codecQuality == WORST) {
+ q = 0;
+ } else if (codecQuality == LOW) {
+ q = 30;
+ } else if (codecQuality == MEDIUM) {
+ q = 50;
+ } else if (codecQuality == HIGH) {
+ q = 85;
+ } else if (codecQuality == BEST) {
+ q = 100;
+ }
+ propValues[0] = q;
+ } else if (codecType == MJPEG2K) {
+ encoder = "jp2kenc";
+ } else {
+ parent.die("Unrecognized video codec", null);
+ }
+
+ initRecorder(filename, ifps, encoder, muxer, propNames, propValues);
+ }
+
+ protected void init(PApplet iparent, int requestWidth, int requestHeight, String filename,
+ String encoder, String muxer, String[] propNames, Object[] propValues, int ifps) {
+ this.parent = iparent;
+
+ GSVideo.init();
+
+ // register methods
+ parent.registerDispose(this);
+
+ width = requestWidth;
+ height = requestHeight;
+
+ initRecorder(filename, ifps, encoder, muxer, propNames, propValues);
+ }
+
+ protected void initRecorder(String filename, int ifps, String encoder, String muxer,
+ String[] propNames, Object[] propValues) {
+
+ File file = new File(parent.savePath(filename));
+ recorder = new RGBDataFileSink("MovieMaker", width, height, ifps, encoder,
+ propNames, propValues, muxer, file);
+ recording = false;
+ setQueueSize(60, 30);
+ }
+}