Merge remote-tracking branch 'remotes/upstream/master'

This commit is contained in:
Sean McKenna
2014-06-05 20:30:20 -04:00
41 changed files with 125 additions and 4676 deletions

View File

@@ -151,7 +151,7 @@
</condition>
<!-- Set the version of Java that must be present to build. -->
<property name="jdk.update.macosx" value="55" />
<property name="jdk.update.macosx" value="60" />
<property name="jdk.path.macosx" value="/Library/Java/JavaVirtualMachines/jdk1.7.0_${jdk.update.macosx}.jdk" />
<available file="${jdk.path.macosx}" property="macosx_jdk_found" />
@@ -226,7 +226,6 @@
<subant buildpath="../java/libraries/net" target="clean"/>
<subant buildpath="../java/libraries/pdf" target="clean"/>
<subant buildpath="../java/libraries/serial" target="clean"/>
<subant buildpath="../java/libraries/video" target="clean"/>
<subant buildpath="shared/tools/MovieMaker" target="clean"/>
</target>
@@ -237,7 +236,6 @@
<subant buildpath="../java/libraries/net" target="build"/>
<subant buildpath="../java/libraries/pdf" target="build"/>
<subant buildpath="../java/libraries/serial" target="build"/>
<subant buildpath="../java/libraries/video" target="build"/>
<subant buildpath="shared/tools/MovieMaker" target="build"/>
</target>
@@ -495,9 +493,6 @@
<fileset dir="${launch4j.dir}/bin" includes="ld-*" />
<fileset dir="${launch4j.dir}/bin" includes="windres-*" />
</delete>
<delete dir="${target.path}/modes/java/libraries/video/library/windows32" />
<delete dir="${target.path}/modes/java/libraries/video/library/windows64" />
</target>
<target name="macosx-run" depends="macosx-build"
@@ -623,10 +618,6 @@
<copy todir="linux/work/lib" flatten="true">
<fileset refid="runtime.jars" />
</copy>
<delete dir="${target.path}/modes/java/libraries/video/library/macosx64" />
<delete dir="${target.path}/modes/java/libraries/video/library/windows32" />
<delete dir="${target.path}/modes/java/libraries/video/library/windows64" />
<copy file="linux/processing" todir="linux/work" />
<chmod perm="ugo+x" file="linux/work/processing" />
@@ -679,7 +670,7 @@
<delete dir="linux/jre1.7.0_40" />
<!-- Remove unused JRE bloat. -->
<delete>
<delete failonerror="true">
<fileset refid="javafx-basics" />
<fileset refid="javafx-linux-${sun.arch.data.model}" />
<fileset refid="jre-optional-linux" />
@@ -790,16 +781,6 @@
<fileset dir="${launch4j.dir}/bin" includes="ld-*" />
<fileset dir="${launch4j.dir}/bin" includes="windres-*" />
</delete>
<delete dir="${target.path}/modes/java/libraries/video/library/macosx64" />
<condition property="video.delete" value="32">
<equals arg1="${sun.arch.data.model}" arg2="64" />
</condition>
<condition property="video.delete" value="64">
<equals arg1="${sun.arch.data.model}" arg2="32" />
</condition>
<delete dir="${target.path}/modes/java/libraries/video/library/windows${video.delete}" />
<taskdef name="launch4j"
classname="net.sf.launch4j.ant.Launch4jTask"
@@ -832,7 +813,7 @@
<move file="windows/work/jre1.7.0_40" tofile="windows/work/java" />
<!-- Remove space-wasting JavaFX garbage. -->
<delete>
<delete failonerror="true">
<fileset refid="javafx-basics" />
<fileset refid="javafx-windows" />
<fileset refid="jre-optional-windows" />

View File

@@ -119,6 +119,7 @@ public class PVector implements Serializable {
/** Array so that this can be temporarily used in an array context */
transient protected float[] array;
/**
* Constructor for an empty vector: x, y, and z are set to 0.
*/
@@ -149,6 +150,7 @@ public class PVector implements Serializable {
this.z = 0;
}
/**
* ( begin auto-generated from PVector_set.xml )
*
@@ -163,28 +165,33 @@ public class PVector implements Serializable {
* @param z the z component of the vector
* @brief Set the components of the vector
*/
public void set(float x, float y, float z) {
public PVector set(float x, float y, float z) {
this.x = x;
this.y = y;
this.z = z;
return this;
}
/**
* @param x the x component of the vector
* @param y the y component of the vector
*/
public void set(float x, float y) {
public PVector set(float x, float y) {
this.x = x;
this.y = y;
return this;
}
/**
* @param v any variable of type PVector
*/
public void set(PVector v) {
public PVector set(PVector v) {
x = v.x;
y = v.y;
z = v.z;
return this;
}
@@ -192,7 +199,7 @@ public class PVector implements Serializable {
* Set the x, y (and maybe z) coordinates using a float[] array as the source.
* @param source array to copy from
*/
public void set(float[] source) {
public PVector set(float[] source) {
if (source.length >= 2) {
x = source[0];
y = source[1];
@@ -200,6 +207,7 @@ public class PVector implements Serializable {
if (source.length >= 3) {
z = source[2];
}
return this;
}
@@ -217,9 +225,10 @@ public class PVector implements Serializable {
* @see PVector#random3D()
*/
static public PVector random2D() {
return random2D(null,null);
return random2D(null, null);
}
/**
* Make a new 2D unit vector with a random direction
* using Processing's current random number generator
@@ -227,7 +236,7 @@ public class PVector implements Serializable {
* @return the random PVector
*/
static public PVector random2D(PApplet parent) {
return random2D(null,parent);
return random2D(null, parent);
}
/**
@@ -236,18 +245,23 @@ public class PVector implements Serializable {
* @return the random PVector
*/
static public PVector random2D(PVector target) {
return random2D(target,null);
return random2D(target, null);
}
/**
* Make a new 2D unit vector with a random direction
* Make a new 2D unit vector with a random direction. Pass in the parent
* PApplet if you want randomSeed() to work (and be predictable). Or leave
* it null and be... random.
* @return the random PVector
*/
static public PVector random2D(PVector target, PApplet parent) {
if (parent == null) return fromAngle((float)(Math.random()*Math.PI*2),target);
else return fromAngle(parent.random(PConstants.TWO_PI),target);
return (parent == null) ?
fromAngle((float) (Math.random() * Math.PI*2), target) :
fromAngle(parent.random(PConstants.TAU), target);
}
/**
* ( begin auto-generated from PVector_random3D.xml )
*
@@ -262,9 +276,10 @@ public class PVector implements Serializable {
* @see PVector#random2D()
*/
static public PVector random3D() {
return random3D(null,null);
return random3D(null, null);
}
/**
* Make a new 3D unit vector with a random direction
* using Processing's current random number generator
@@ -272,18 +287,20 @@ public class PVector implements Serializable {
* @return the random PVector
*/
static public PVector random3D(PApplet parent) {
return random3D(null,parent);
return random3D(null, parent);
}
/**
* Set a 3D vector to a random unit vector with a random direction
* @param target the target vector (if null, a new vector will be created)
* @return the random PVector
*/
static public PVector random3D(PVector target) {
return random3D(target,null);
return random3D(target, null);
}
/**
* Make a new 3D unit vector with a random direction
* @return the random PVector
@@ -309,6 +326,7 @@ public class PVector implements Serializable {
return target;
}
/**
* ( begin auto-generated from PVector_sub.xml )
*
@@ -342,6 +360,12 @@ public class PVector implements Serializable {
return target;
}
public PVector copy() {
return new PVector(x, y, z);
}
/**
* ( begin auto-generated from PVector_get.xml )
*
@@ -353,10 +377,12 @@ public class PVector implements Serializable {
* @usage web_application
* @brief Get a copy of the vector
*/
@Deprecated
public PVector get() {
return new PVector(x, y, z);
return copy();
}
/**
* @param target
*/
@@ -393,6 +419,7 @@ public class PVector implements Serializable {
return (float) Math.sqrt(x*x + y*y + z*z);
}
/**
* ( begin auto-generated from PVector_mag.xml )
*
@@ -413,6 +440,7 @@ public class PVector implements Serializable {
return (x*x + y*y + z*z);
}
/**
* ( begin auto-generated from PVector_add.xml )
*
@@ -429,21 +457,24 @@ public class PVector implements Serializable {
* @param v the vector to be added
* @brief Adds x, y, and z components to a vector, one vector to another, or two independent vectors
*/
public void add(PVector v) {
public PVector add(PVector v) {
x += v.x;
y += v.y;
z += v.z;
return this;
}
/**
* @param x x component of the vector
* @param y y component of the vector
* @param z z component of the vector
*/
public void add(float x, float y, float z) {
public PVector add(float x, float y, float z) {
this.x += x;
this.y += y;
this.z += z;
return this;
}
@@ -487,21 +518,24 @@ public class PVector implements Serializable {
* @param v any variable of type PVector
* @brief Subtract x, y, and z components from a vector, one vector from another, or two independent vectors
*/
public void sub(PVector v) {
public PVector sub(PVector v) {
x -= v.x;
y -= v.y;
z -= v.z;
return this;
}
/**
* @param x the x component of the vector
* @param y the y component of the vector
* @param z the z component of the vector
*/
public void sub(float x, float y, float z) {
public PVector sub(float x, float y, float z) {
this.x -= x;
this.y -= y;
this.z -= z;
return this;
}
@@ -542,10 +576,11 @@ public class PVector implements Serializable {
* @brief Multiply a vector by a scalar
* @param n the number to multiply with the vector
*/
public void mult(float n) {
public PVector mult(float n) {
x *= n;
y *= n;
z *= n;
return this;
}
@@ -571,7 +606,6 @@ public class PVector implements Serializable {
}
/**
* ( begin auto-generated from PVector_div.xml )
*
@@ -584,10 +618,11 @@ public class PVector implements Serializable {
* @brief Divide a vector by a scalar
* @param n the number by which to divide the vector
*/
public void div(float n) {
public PVector div(float n) {
x /= n;
y /= n;
z /= n;
return this;
}
@@ -600,6 +635,7 @@ public class PVector implements Serializable {
return div(v, n, null);
}
/**
* Divide a vector by a scalar and store the result in another vector.
* @param target PVector in which to store the result
@@ -665,6 +701,7 @@ public class PVector implements Serializable {
return x*v.x + y*v.y + z*v.z;
}
/**
* @param x x component of the vector
* @param y y component of the vector
@@ -674,6 +711,7 @@ public class PVector implements Serializable {
return this.x*x + this.y*y + this.z*z;
}
/**
* @param v1 any variable of type PVector
* @param v2 any variable of type PVector
@@ -717,6 +755,7 @@ public class PVector implements Serializable {
return target;
}
/**
* @param v1 any variable of type PVector
* @param v2 any variable of type PVector
@@ -747,11 +786,12 @@ public class PVector implements Serializable {
* @usage web_application
* @brief Normalize the vector to a length of 1
*/
public void normalize() {
public PVector normalize() {
float m = mag();
if (m != 0 && m != 1) {
div(m);
}
return this;
}
@@ -785,13 +825,15 @@ public class PVector implements Serializable {
* @param max the maximum magnitude for the vector
* @brief Limit the magnitude of the vector
*/
public void limit(float max) {
public PVector limit(float max) {
if (magSq() > max*max) {
normalize();
mult(max);
}
return this;
}
/**
* ( begin auto-generated from PVector_setMag.xml )
*
@@ -804,11 +846,13 @@ public class PVector implements Serializable {
* @param len the new length for this vector
* @brief Set the magnitude of the vector
*/
public void setMag(float len) {
public PVector setMag(float len) {
normalize();
mult(len);
return this;
}
/**
* Sets the magnitude of this vector, storing the result in another vector.
* @param target Set to null to create a new vector
@@ -821,6 +865,7 @@ public class PVector implements Serializable {
return target;
}
/**
* ( begin auto-generated from PVector_setMag.xml )
*
@@ -857,11 +902,12 @@ public class PVector implements Serializable {
* @brief Rotate the vector by an angle (2D only)
* @param theta the angle of rotation
*/
public void rotate(float theta) {
float xTemp = x;
public PVector rotate(float theta) {
float temp = x;
// Might need to check for rounding errors like with angleBetween function?
x = x*PApplet.cos(theta) - y*PApplet.sin(theta);
y = xTemp*PApplet.sin(theta) + y*PApplet.cos(theta);
y = temp*PApplet.sin(theta) + y*PApplet.cos(theta);
return this;
}
@@ -879,35 +925,40 @@ public class PVector implements Serializable {
* @param amt The amount of interpolation; some value between 0.0 (old vector) and 1.0 (new vector). 0.1 is very near the new vector. 0.5 is halfway in between.
* @see PApplet#lerp(float, float, float)
*/
public void lerp(PVector v, float amt) {
x = PApplet.lerp(x,v.x,amt);
y = PApplet.lerp(y,v.y,amt);
z = PApplet.lerp(z,v.z,amt);
public PVector lerp(PVector v, float amt) {
x = PApplet.lerp(x, v.x, amt);
y = PApplet.lerp(y, v.y, amt);
z = PApplet.lerp(z, v.z, amt);
return this;
}
/**
* Linear interpolate between two vectors (returns a new PVector object)
* @param v1 the vector to start from
* @param v2 the vector to lerp to
*/
public static PVector lerp(PVector v1, PVector v2, float amt) {
PVector v = v1.get();
PVector v = v1.copy();
v.lerp(v2, amt);
return v;
}
/**
* Linear interpolate the vector to x,y,z values
* @param x the x component to lerp to
* @param y the y component to lerp to
* @param z the z component to lerp to
*/
public void lerp(float x, float y, float z, float amt) {
this.x = PApplet.lerp(this.x,x,amt);
this.y = PApplet.lerp(this.y,y,amt);
this.z = PApplet.lerp(this.z,z,amt);
public PVector lerp(float x, float y, float z, float amt) {
this.x = PApplet.lerp(this.x, x, amt);
this.y = PApplet.lerp(this.y, y, amt);
this.z = PApplet.lerp(this.z, z, amt);
return this;
}
/**
* ( begin auto-generated from PVector_angleBetween.xml )
*
@@ -976,14 +1027,17 @@ public class PVector implements Serializable {
return array;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof PVector))
if (!(obj instanceof PVector)) {
return false;
}
final PVector p = (PVector) obj;
return x == p.x && y == p.y && z == p.z;
}
@Override
public int hashCode() {
int result = 1;

View File

@@ -1,7 +1,19 @@
0228 core
X add copy() method to PVector
X modify PVector to include better methods for chaining operations
X http://code.google.com/p/processing/issues/detail?id=218
X https://github.com/processing/processing/issues/257
X PVector discussion with Dan
o Jer and Dan will look at their code, plus toxiclibs
_ bring back chaining in JSON (and add to XML)
high
_ Closing opengl sketch from the PDE doesn't stop java process on windows
_ https://github.com/processing/processing/issues/2335
_ StingList.insert() error (should be an easy fix)
_ https://github.com/processing/processing/issues/2548
_ pull for image resize and alpha issues
_ https://github.com/processing/processing/pull/2324
_ dataPath() not working when app is not run from app dir on Linux
@@ -430,13 +442,8 @@ _ https://github.com/processing/processing/issues/1596
CORE / PVector
_ PVector discussion with Dan
_ Jer and Dan will look at their code, plus toxiclibs
_ modify PVector to include better methods for chaining operations
_ http://code.google.com/p/processing/issues/detail?id=218
_ add screen(PVector), model(PVector) and world(PVector)?
_ maybe screenVec()? or screenXYZ()?
_ PVector chaining -> Dan looking into this
CORE / OpenGL (Andres)

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/processing-core"/>
<classpathentry kind="lib" path="library/gstreamer-java.jar"/>
<classpathentry kind="lib" path="library/jna.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@@ -1 +0,0 @@
bin

View File

@@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>processing-video</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View File

@@ -1,12 +0,0 @@
#Sat Nov 12 10:54:16 CST 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.source=1.6

View File

@@ -1,31 +0,0 @@
<?xml version="1.0"?>
<project name="Processing Video Library" default="build">
<target name="clean" description="Clean the build directories">
<delete dir="bin" />
<delete file="library/video.jar" />
</target>
<target name="compile" description="Compile sources">
<condition property="core-built">
<available file="../../../core/library/core.jar" />
</condition>
<fail unless="core-built" message="Please build the core library first and make sure it sits in ../../../core/library/core.jar" />
<mkdir dir="bin" />
<javac source="1.6"
target="1.6"
srcdir="src" destdir="bin"
encoding="UTF-8"
includeAntRuntime="false"
classpath="../../../core/library/core.jar; library/gstreamer-java.jar; library/jna.jar"
nowarn="true"
compiler="org.eclipse.jdt.core.JDTCompilerAdapter">
<compilerclasspath path="../../mode/ecj.jar" />
</javac>
</target>
<target name="build" depends="compile" description="Build video library">
<jar basedir="bin" destfile="library/video.jar" />
</target>
</project>

View File

@@ -1,140 +0,0 @@
/**
* ASCII Video
* by Ben Fry.
*
*
* Text characters have been used to represent images since the earliest computers.
* This sketch is a simple homage that re-interprets live video as ASCII text.
* See the keyPressed function for more options, like changing the font size.
*/
import processing.video.*;
Capture video;
boolean cheatScreen;
// All ASCII characters, sorted according to their visual density
String letterOrder =
" .`-_':,;^=+/\"|)\\<>)iv%xclrs{*}I?!][1taeo7zjLu" +
"nT#JCwfy325Fp6mqSghVd4EgXPGZbYkOA&8U$@KHDBWNMR0Q";
char[] letters;
float[] bright;
char[] chars;
PFont font;
float fontSize = 1.5;
void setup() {
size(640, 480);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, 160, 120);
// Start capturing the images from the camera
video.start();
int count = video.width * video.height;
//println(count);
font = loadFont("UniversLTStd-Light-48.vlw");
// for the 256 levels of brightness, distribute the letters across
// the an array of 256 elements to use for the lookup
letters = new char[256];
for (int i = 0; i < 256; i++) {
int index = int(map(i, 0, 256, 0, letterOrder.length()));
letters[i] = letterOrder.charAt(index);
}
// current characters for each position in the video
chars = new char[count];
// current brightness for each point
bright = new float[count];
for (int i = 0; i < count; i++) {
// set each brightness at the midpoint to start
bright[i] = 128;
}
}
void captureEvent(Capture c) {
c.read();
}
void draw() {
background(0);
pushMatrix();
float hgap = width / float(video.width);
float vgap = height / float(video.height);
scale(max(hgap, vgap) * fontSize);
textFont(font, fontSize);
int index = 0;
video.loadPixels();
for (int y = 1; y < video.height; y++) {
// Move down for next line
translate(0, 1.0 / fontSize);
pushMatrix();
for (int x = 0; x < video.width; x++) {
int pixelColor = video.pixels[index];
// Faster method of calculating r, g, b than red(), green(), blue()
int r = (pixelColor >> 16) & 0xff;
int g = (pixelColor >> 8) & 0xff;
int b = pixelColor & 0xff;
// Another option would be to properly calculate brightness as luminance:
// luminance = 0.3*red + 0.59*green + 0.11*blue
// Or you could instead red + green + blue, and make the the values[] array
// 256*3 elements long instead of just 256.
int pixelBright = max(r, g, b);
// The 0.1 value is used to damp the changes so that letters flicker less
float diff = pixelBright - bright[index];
bright[index] += diff * 0.1;
fill(pixelColor);
int num = int(bright[index]);
text(letters[num], 0, 0);
// Move to the next pixel
index++;
// Move over for next character
translate(1.0 / fontSize, 0);
}
popMatrix();
}
popMatrix();
if (cheatScreen) {
//image(video, 0, height - video.height);
// set() is faster than image() when drawing untransformed images
set(0, height - video.height, video);
}
}
/**
* Handle key presses:
* 'c' toggles the cheat screen that shows the original image in the corner
* 'g' grabs an image and saves the frame to a tiff image
* 'f' and 'F' increase and decrease the font size
*/
void keyPressed() {
switch (key) {
case 'g': saveFrame(); break;
case 'c': cheatScreen = !cheatScreen; break;
case 'f': fontSize *= 1.1; break;
case 'F': fontSize *= 0.9; break;
}
}

View File

@@ -1,74 +0,0 @@
/**
* Background Subtraction
* by Golan Levin.
*
* Detect the presence of people and objects in the frame using a simple
* background-subtraction technique. To initialize the background, press a key.
*/
import processing.video.*;
int numPixels;
int[] backgroundPixels;
Capture video;
void setup() {
size(640, 480);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
//video = new Capture(this, 160, 120);
video = new Capture(this, width, height);
// Start capturing the images from the camera
video.start();
numPixels = video.width * video.height;
// Create array to store the background image
backgroundPixels = new int[numPixels];
// Make the pixels[] array available for direct manipulation
loadPixels();
}
void draw() {
if (video.available()) {
video.read(); // Read a new video frame
video.loadPixels(); // Make the pixels of video available
// Difference between the current frame and the stored background
int presenceSum = 0;
for (int i = 0; i < numPixels; i++) { // For each pixel in the video frame...
// Fetch the current color in that location, and also the color
// of the background in that spot
color currColor = video.pixels[i];
color bkgdColor = backgroundPixels[i];
// Extract the red, green, and blue components of the current pixel's color
int currR = (currColor >> 16) & 0xFF;
int currG = (currColor >> 8) & 0xFF;
int currB = currColor & 0xFF;
// Extract the red, green, and blue components of the background pixel's color
int bkgdR = (bkgdColor >> 16) & 0xFF;
int bkgdG = (bkgdColor >> 8) & 0xFF;
int bkgdB = bkgdColor & 0xFF;
// Compute the difference of the red, green, and blue values
int diffR = abs(currR - bkgdR);
int diffG = abs(currG - bkgdG);
int diffB = abs(currB - bkgdB);
// Add these differences to the running tally
presenceSum += diffR + diffG + diffB;
// Render the difference image to the screen
pixels[i] = color(diffR, diffG, diffB);
// The following line does the same thing much faster, but is more technical
//pixels[i] = 0xFF000000 | (diffR << 16) | (diffG << 8) | diffB;
}
updatePixels(); // Notify that the pixels[] array has changed
println(presenceSum); // Print out the total amount of movement
}
}
// When a key is pressed, capture the background image into the backgroundPixels
// buffer, by copying each of the current frame's pixels into it.
void keyPressed() {
video.loadPixels();
arraycopy(video.pixels, backgroundPixels);
}

View File

@@ -1,63 +0,0 @@
/**
* Brightness Thresholding
* by Golan Levin.
*
* Determines whether a test location (such as the cursor) is contained within
* the silhouette of a dark object.
*/
import processing.video.*;
color black = color(0);
color white = color(255);
int numPixels;
Capture video;
void setup() {
size(640, 480); // Change size to 320 x 240 if too slow at 640 x 480
strokeWeight(5);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, width, height);
// Start capturing the images from the camera
video.start();
numPixels = video.width * video.height;
noCursor();
smooth();
}
void draw() {
if (video.available()) {
video.read();
video.loadPixels();
int threshold = 127; // Set the threshold value
float pixelBrightness; // Declare variable to store a pixel's color
// Turn each pixel in the video frame black or white depending on its brightness
loadPixels();
for (int i = 0; i < numPixels; i++) {
pixelBrightness = brightness(video.pixels[i]);
if (pixelBrightness > threshold) { // If the pixel is brighter than the
pixels[i] = white; // threshold value, make it white
}
else { // Otherwise,
pixels[i] = black; // make it black
}
}
updatePixels();
// Test a location to see where it is contained. Fetch the pixel at the test
// location (the cursor), and compute its brightness
int testValue = get(mouseX, mouseY);
float testBrightness = brightness(testValue);
if (testBrightness > threshold) { // If the test location is brighter than
fill(black); // the threshold set the fill to black
}
else { // Otherwise,
fill(white); // set the fill to white
}
ellipse(mouseX, mouseY, 20, 20);
}
}

View File

@@ -1,53 +0,0 @@
/**
* Brightness Tracking
* by Golan Levin.
*
* Tracks the brightest pixel in a live video signal.
*/
import processing.video.*;
Capture video;
void setup() {
size(640, 480);
// Uses the default video input, see the reference if this causes an error
video = new Capture(this, width, height);
video.start();
noStroke();
smooth();
}
void draw() {
if (video.available()) {
video.read();
image(video, 0, 0, width, height); // Draw the webcam video onto the screen
int brightestX = 0; // X-coordinate of the brightest video pixel
int brightestY = 0; // Y-coordinate of the brightest video pixel
float brightestValue = 0; // Brightness of the brightest video pixel
// Search for the brightest pixel: For each row of pixels in the video image and
// for each pixel in the yth row, compute each pixel's index in the video
video.loadPixels();
int index = 0;
for (int y = 0; y < video.height; y++) {
for (int x = 0; x < video.width; x++) {
// Get the color stored in the pixel
int pixelValue = video.pixels[index];
// Determine the brightness of the pixel
float pixelBrightness = brightness(pixelValue);
// If that value is brighter than any previous, then store the
// brightness of that pixel, as well as its (x,y) location
if (pixelBrightness > brightestValue) {
brightestValue = pixelBrightness;
brightestY = y;
brightestX = x;
}
index++;
}
}
// Draw a large, yellow circle at the brightest pixel
fill(255, 204, 0, 128);
ellipse(brightestX, brightestY, 200, 200);
}
}

View File

@@ -1,146 +0,0 @@
/**
* Color Sorting
* by Ben Fry.
*
* Example that sorts all colors from the incoming video
* and arranges them into vertical bars.
*/
import processing.video.*;
Capture video;
boolean cheatScreen;
Tuple[] captureColors;
Tuple[] drawColors;
int[] bright;
// How many pixels to skip in either direction
int increment = 5;
void setup() {
size(800, 600);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, 160, 120);
// Start capturing the images from the camera
video.start();
int count = (video.width * video.height) / (increment * increment);
bright = new int[count];
captureColors = new Tuple[count];
drawColors = new Tuple[count];
for (int i = 0; i < count; i++) {
captureColors[i] = new Tuple();
drawColors[i] = new Tuple(0.5, 0.5, 0.5);
}
}
void draw() {
if (video.available()) {
video.read();
video.loadPixels();
background(0);
noStroke();
int index = 0;
for (int j = 0; j < video.height; j += increment) {
for (int i = 0; i < video.width; i += increment) {
int pixelColor = video.pixels[j*video.width + i];
int r = (pixelColor >> 16) & 0xff;
int g = (pixelColor >> 8) & 0xff;
int b = pixelColor & 0xff;
// Technically would be sqrt of the following, but no need to do
// sqrt before comparing the elements since we're only ordering
bright[index] = r*r + g*g + b*b;
captureColors[index].set(r, g, b);
index++;
}
}
sort(index, bright, captureColors);
beginShape(QUAD_STRIP);
for (int i = 0; i < index; i++) {
drawColors[i].target(captureColors[i], 0.1);
drawColors[i].phil();
float x = map(i, 0, index, 0, width);
vertex(x, 0);
vertex(x, height);
}
endShape();
if (cheatScreen) {
//image(video, 0, height - video.height);
// Faster method of displaying pixels array on screen
set(0, height - video.height, video);
}
}
}
void keyPressed() {
if (key == 'g') {
saveFrame();
} else if (key == 'c') {
cheatScreen = !cheatScreen;
}
}
// Functions to handle sorting the color data
void sort(int length, int[] a, Tuple[] stuff) {
sortSub(a, stuff, 0, length - 1);
}
void sortSwap(int[] a, Tuple[] stuff, int i, int j) {
int T = a[i];
a[i] = a[j];
a[j] = T;
Tuple v = stuff[i];
stuff[i] = stuff[j];
stuff[j] = v;
}
void sortSub(int[] a, Tuple[] stuff, int lo0, int hi0) {
int lo = lo0;
int hi = hi0;
int mid;
if (hi0 > lo0) {
mid = a[(lo0 + hi0) / 2];
while (lo <= hi) {
while ((lo < hi0) && (a[lo] < mid)) {
++lo;
}
while ((hi > lo0) && (a[hi] > mid)) {
--hi;
}
if (lo <= hi) {
sortSwap(a, stuff, lo, hi);
++lo;
--hi;
}
}
if (lo0 < hi)
sortSub(a, stuff, lo0, hi);
if (lo < hi0)
sortSub(a, stuff, lo, hi0);
}
}

View File

@@ -1,29 +0,0 @@
// Simple vector class that holds an x,y,z position.
class Tuple {
float x, y, z;
Tuple() { }
Tuple(float x, float y, float z) {
set(x, y, z);
}
void set(float x, float y, float z) {
this.x = x;
this.y = y;
this.z = z;
}
void target(Tuple another, float amount) {
float amount1 = 1.0 - amount;
x = x*amount1 + another.x*amount;
y = y*amount1 + another.y*amount;
z = z*amount1 + another.z*amount;
}
void phil() {
fill(x, y, z);
}
}

View File

@@ -1,70 +0,0 @@
/**
* Frame Differencing
* by Golan Levin.
*
* Quantify the amount of movement in the video frame using frame-differencing.
*/
import processing.video.*;
int numPixels;
int[] previousFrame;
Capture video;
void setup() {
size(640, 480);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, width, height);
// Start capturing the images from the camera
video.start();
numPixels = video.width * video.height;
// Create an array to store the previously captured frame
previousFrame = new int[numPixels];
loadPixels();
}
void draw() {
if (video.available()) {
// When using video to manipulate the screen, use video.available() and
// video.read() inside the draw() method so that it's safe to draw to the screen
video.read(); // Read the new frame from the camera
video.loadPixels(); // Make its pixels[] array available
int movementSum = 0; // Amount of movement in the frame
for (int i = 0; i < numPixels; i++) { // For each pixel in the video frame...
color currColor = video.pixels[i];
color prevColor = previousFrame[i];
// Extract the red, green, and blue components from current pixel
int currR = (currColor >> 16) & 0xFF; // Like red(), but faster
int currG = (currColor >> 8) & 0xFF;
int currB = currColor & 0xFF;
// Extract red, green, and blue components from previous pixel
int prevR = (prevColor >> 16) & 0xFF;
int prevG = (prevColor >> 8) & 0xFF;
int prevB = prevColor & 0xFF;
// Compute the difference of the red, green, and blue values
int diffR = abs(currR - prevR);
int diffG = abs(currG - prevG);
int diffB = abs(currB - prevB);
// Add these differences to the running tally
movementSum += diffR + diffG + diffB;
// Render the difference image to the screen
pixels[i] = color(diffR, diffG, diffB);
// The following line is much faster, but more confusing to read
//pixels[i] = 0xff000000 | (diffR << 16) | (diffG << 8) | diffB;
// Save the current color into the 'previous' buffer
previousFrame[i] = currColor;
}
// To prevent flicker from frames that are all black (no movement),
// only update the screen if the image has changed.
if (movementSum > 0) {
updatePixels();
println(movementSum); // Print the total amount of movement to the console
}
}
}

View File

@@ -1,62 +0,0 @@
/**
* Framingham
* by Ben Fry.
*
* Show subsequent frames from video input as a grid. Also fun with movie files.
*/
import processing.video.*;
Capture video;
int column;
int columnCount;
int lastRow;
// Buffer used to move all the pixels up
int[] scoot;
void setup() {
size(640, 480);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, 160, 120);
// Start capturing the images from the camera
video.start();
column = 0;
columnCount = width / video.width;
int rowCount = height / video.height;
lastRow = rowCount - 1;
scoot = new int[lastRow*video.height * width];
background(0);
}
void draw() {
// By using video.available, only the frame rate need be set inside setup()
if (video.available()) {
video.read();
video.loadPixels();
image(video, video.width*column, video.height*lastRow);
column++;
if (column == columnCount) {
loadPixels();
// Scoot everybody up one row
arrayCopy(pixels, video.height*width, scoot, 0, scoot.length);
arrayCopy(scoot, 0, pixels, 0, scoot.length);
// Set the moved row to black
for (int i = scoot.length; i < width*height; i++) {
pixels[i] = #000000;
}
column = 0;
updatePixels();
}
}
}

View File

@@ -1,49 +0,0 @@
/**
* Getting Started with Capture.
*
* Reading and displaying an image from an attached Capture device.
*/
import processing.video.*;
Capture cam;
void setup() {
size(640, 480);
String[] cameras = Capture.list();
if (cameras == null) {
println("Failed to retrieve the list of available cameras, will try the default...");
cam = new Capture(this, 640, 480);
} if (cameras.length == 0) {
println("There are no cameras available for capture.");
exit();
} else {
println("Available cameras:");
for (int i = 0; i < cameras.length; i++) {
println(cameras[i]);
}
// The camera can be initialized directly using an element
// from the array returned by list():
cam = new Capture(this, cameras[0]);
// Or, the settings can be defined based on the text in the list
//cam = new Capture(this, 640, 480, "Built-in iSight", 30);
// Start capturing the images from the camera
cam.start();
}
}
void draw() {
if (cam.available() == true) {
cam.read();
}
image(cam, 0, 0);
// The following does the same as the above image() line, but
// is faster when just drawing the image without any additional
// resizing, transformations, or tint.
//set(0, 0, cam);
}

View File

@@ -1,213 +0,0 @@
/**
* HSV Space
* by Ben Fry.
*
* Arrange the pixels from live video into the HSV Color Cone.
*/
import processing.video.*;
import java.awt.Color;
Capture video;
int count;
boolean cheatScreen = true;
static final float BOX_SIZE = 0.75;
static final float CONE_HEIGHT = 1.2;
static final float MAX_RADIUS = 10;
static final float ROT_INCREMENT = 3.0;
static final float TRANS_INCREMENT = 1;
static final float STEP_AMOUNT = 0.1;
Tuple[] farbe;
Tuple[] trans;
float[] hsb = new float[3];
float leftRightAngle;
float upDownAngle;
float fwdBackTrans;
float upDownTrans;
float leftRightTrans;
boolean motion;
boolean blobby = false;
void setup() {
size(640, 480, P3D);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, 160, 120);
// Start capturing the images from the camera
video.start();
count = video.width * video.height;
sphereDetail(60);
upDownTrans = 0;
leftRightTrans = 0;
motion = false;
leftRightAngle = 101.501297;
upDownAngle = -180.098694;
fwdBackTrans = 14.800003;
farbe = new Tuple[count];
trans = new Tuple[count];
for (int i = 0; i < count; i++) {
farbe[i] = new Tuple();
trans[i] = new Tuple();
}
}
void draw() {
background(0);
if (!blobby) {
lights();
}
pushMatrix();
translate(width/2, height/2);
scale(min(width, height) / 10.0);
translate(0, 0, -20 + fwdBackTrans);
rotateY(radians(36 + leftRightAngle)); //, 0, 1, 0);
rotateX(radians(-228 + upDownAngle)); //, 1, 0, 0);
strokeWeight(0.1);
if (blobby) {
stroke(0.35, 0.35, 0.25, 0.15);
wireCone(MAX_RADIUS, MAX_RADIUS * CONE_HEIGHT, 18, 18);
}
else {
stroke(0.35, 0.35, 0.25, 0.25);
wireCone(MAX_RADIUS, MAX_RADIUS * CONE_HEIGHT, 180, 18);
}
noStroke();
video.loadPixels();
for (int i = 0; i < count; i++) {
int pixelColor = video.pixels[i];
int r = (pixelColor >> 16) & 0xff;
int g = (pixelColor >> 8) & 0xff;
int b = pixelColor & 0xff;
Color.RGBtoHSB(r, g, b, hsb);
float radius = hsb[1] * hsb[2];
float angle = hsb[0] * 360.0 * DEG_TO_RAD;
float nx = MAX_RADIUS * radius * cos(angle);
float ny = MAX_RADIUS * radius * sin(angle);
float nz = hsb[2] * MAX_RADIUS * CONE_HEIGHT;
trans[i].set(trans[i].x - (trans[i].x - nx)*STEP_AMOUNT,
trans[i].y - (trans[i].y - ny)*STEP_AMOUNT,
trans[i].z - (trans[i].z - nz)*STEP_AMOUNT);
farbe[i].set(farbe[i].x - (farbe[i].x - r)*STEP_AMOUNT,
farbe[i].y - (farbe[i].y - g)*STEP_AMOUNT,
farbe[i].z - (farbe[i].z - b)*STEP_AMOUNT);
pushMatrix();
farbe[i].phil();
trans[i].tran();
rotate(radians(45), 1, 1, 0);
if (blobby) {
sphere(BOX_SIZE * 2); //, 20, 20);
} else {
box(BOX_SIZE);
}
popMatrix();
}
popMatrix();
if (motion) {
upDownAngle--;
leftRightAngle--;
}
if (cheatScreen) {
image(video, 0, height - video.height);
}
}
void captureEvent(Capture c) {
c.read();
}
void keyPressed() {
switch (key) {
case 'g':
saveFrame();
break;
case 'c':
cheatScreen = !cheatScreen;
break;
case 'm':
motion = !motion;
break;
case '=':
fwdBackTrans += TRANS_INCREMENT;
break;
case '-':
fwdBackTrans -= TRANS_INCREMENT;
break;
case 'b':
blobby = !blobby;
break;
}
}
void mouseDragged() {
float dX, dY;
switch (mouseButton) {
case LEFT: // left right up down
dX = pmouseX - mouseX;
dY = pmouseY - mouseY;
leftRightAngle -= dX * 0.2;
upDownAngle += dY * 0.4;
break;
case CENTER:
dX = pmouseX - mouseX;
dY = pmouseY - mouseY;
leftRightTrans -= TRANS_INCREMENT * dX;
upDownTrans -= TRANS_INCREMENT * dY;
break;
case RIGHT: // in and out
dY = (float) (pmouseY - mouseY);
fwdBackTrans -= TRANS_INCREMENT * dY;
break;
}
}
void wireCone(float radius, float height, int stepX, int stepY) {
int steps = 10;
stroke(40);
for (int i = 0; i < steps; i++) {
float angle = map(i, 0, steps, 0, TWO_PI);
float x = radius * cos(angle);
float y = radius * sin(angle);
line(x, y, height, 0, 0, 0);
}
noFill();
pushMatrix();
translate(0, 0, height);
ellipseMode(CENTER);
ellipse(0, 0, radius, radius);
popMatrix();
}

View File

@@ -1,33 +0,0 @@
// Simple vector class that holds an x,y,z position.
class Tuple {
float x, y, z;
Tuple() { }
Tuple(float x, float y, float z) {
set(x, y, z);
}
void set(float x, float y, float z) {
this.x = x;
this.y = y;
this.z = z;
}
void target(Tuple another, float amount) {
float amount1 = 1.0 - amount;
x = x*amount1 + another.x*amount;
y = y*amount1 + another.y*amount;
z = z*amount1 + another.z*amount;
}
void phil() {
fill(x, y, z);
}
void tran() {
translate(x, y, z);
}
}

View File

@@ -1,57 +0,0 @@
/**
* Live Pocky
* by Ben Fry.
*
* Unwrap each frame of live video into a single line of pixels.
*/
import processing.video.*;
Capture video;
int count;
int writeRow;
int maxRows;
int topRow;
int buffer[];
void setup() {
size(600, 400);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, 320, 240);
// Start capturing the images from the camera
video.start();
maxRows = height * 2;
buffer = new int[width * maxRows];
writeRow = height - 1;
topRow = 0;
background(0);
loadPixels();
}
void draw() {
video.loadPixels();
arraycopy(video.pixels, 0, buffer, writeRow * width, width);
writeRow++;
if (writeRow == maxRows) {
writeRow = 0;
}
topRow++;
for (int y = 0; y < height; y++) {
int row = (topRow + y) % maxRows;
arraycopy(buffer, row * width, g.pixels, y*width, width);
}
updatePixels();
}
void captureEvent(Capture c) {
c.read();
}

View File

@@ -1,73 +0,0 @@
/**
* Mirror
* by Daniel Shiffman.
*
* Each pixel from the video source is drawn as a rectangle with rotation based on brightness.
*/
import processing.video.*;
// Size of each cell in the grid
int cellSize = 20;
// Number of columns and rows in our system
int cols, rows;
// Variable for capture device
Capture video;
void setup() {
size(640, 480);
frameRate(30);
cols = width / cellSize;
rows = height / cellSize;
colorMode(RGB, 255, 255, 255, 100);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, width, height);
// Start capturing the images from the camera
video.start();
background(0);
}
void draw() {
if (video.available()) {
video.read();
video.loadPixels();
// Begin loop for columns
for (int i = 0; i < cols; i++) {
// Begin loop for rows
for (int j = 0; j < rows; j++) {
// Where are we, pixel-wise?
int x = i*cellSize;
int y = j*cellSize;
int loc = (video.width - x - 1) + y*video.width; // Reversing x to mirror the image
float r = red(video.pixels[loc]);
float g = green(video.pixels[loc]);
float b = blue(video.pixels[loc]);
// Make a new color with an alpha component
color c = color(r, g, b, 75);
// Code for drawing a single rect
// Using translate in order for rotation to work properly
pushMatrix();
translate(x+cellSize/2, y+cellSize/2);
// Rotation formula based on brightness
rotate((2 * PI * brightness(c) / 255.0));
rectMode(CENTER);
fill(c);
noStroke();
// Rects are larger than the cell for some overlap
rect(0, 0, cellSize+6, cellSize+6);
popMatrix();
}
}
}
}

View File

@@ -1,63 +0,0 @@
/**
* Mirror 2
* by Daniel Shiffman.
*
* Each pixel from the video source is drawn as a rectangle with size based on brightness.
*/
import processing.video.*;
// Size of each cell in the grid
int cellSize = 15;
// Number of columns and rows in our system
int cols, rows;
// Variable for capture device
Capture video;
void setup() {
size(640, 480);
// Set up columns and rows
cols = width / cellSize;
rows = height / cellSize;
colorMode(RGB, 255, 255, 255, 100);
rectMode(CENTER);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, width, height);
// Start capturing the images from the camera
video.start();
background(0);
}
void draw() {
if (video.available()) {
video.read();
video.loadPixels();
background(0, 0, 255);
// Begin loop for columns
for (int i = 0; i < cols;i++) {
// Begin loop for rows
for (int j = 0; j < rows;j++) {
// Where are we, pixel-wise?
int x = i * cellSize;
int y = j * cellSize;
int loc = (video.width - x - 1) + y*video.width; // Reversing x to mirror the image
// Each rect is colored white with a size determined by brightness
color c = video.pixels[loc];
float sz = (brightness(c) / 255.0) * cellSize;
fill(255);
noStroke();
rect(x + cellSize/2, y + cellSize/2, sz, sz);
}
}
}
}

View File

@@ -1,81 +0,0 @@
/**
* Radial Pocky
* by Ben Fry.
*
* Unwrap each frame of live video into a single line of pixels along a circle
*/
import processing.video.*;
Capture video;
int videoCount;
int currentAngle;
int pixelCount;
int angleCount = 200; // how many divisions
int radii[];
int angles[];
void setup() {
// size must be set to video.width*video.height*2 in both directions
size(600, 600);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, 160, 120);
// Start capturing the images from the camera
video.start();
videoCount = video.width * video.height;
pixelCount = width*height;
int centerX = width / 2;
int centerY = height / 2;
radii = new int[pixelCount];
angles = new int[pixelCount];
int offset = 0;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int dx = centerX - x;
int dy = centerY - y;
float angle = atan2(dy, dx);
if (angle < 0) angle += TWO_PI;
angles[offset] = (int) (angleCount * (angle / TWO_PI));
int radius = (int) mag(dx, dy);
if (radius >= videoCount) {
radius = -1;
angles[offset] = -1;
}
radii[offset] = radius;
offset++;
}
}
background(0);
}
void draw() {
if (video.available()) {
video.read();
video.loadPixels();
loadPixels();
for (int i = 0; i < pixelCount; i++) {
if (angles[i] == currentAngle) {
pixels[i] = video.pixels[radii[i]];
}
}
updatePixels();
currentAngle++;
if (currentAngle == angleCount) {
currentAngle = 0;
}
}
}

View File

@@ -1,56 +0,0 @@
/**
* Simple Real-Time Slit-Scan Program.
* By Golan Levin.
*
* This demonstration depends on the canvas height being equal
* to the video capture height. If you would prefer otherwise,
* consider using the image copy() function rather than the
* direct pixel-accessing approach I have used here.
*/
import processing.video.*;
Capture video;
int videoSliceX;
int drawPositionX;
void setup() {
size(600, 240);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this,320, 240);
// Start capturing the images from the camera
video.start();
videoSliceX = video.width / 2;
drawPositionX = width - 1;
background(0);
}
void draw() {
if (video.available()) {
video.read();
video.loadPixels();
// Copy a column of pixels from the middle of the video
// To a location moving slowly across the canvas.
loadPixels();
for (int y = 0; y < video.height; y++){
int setPixelIndex = y*width + drawPositionX;
int getPixelIndex = y*video.width + videoSliceX;
pixels[setPixelIndex] = video.pixels[getPixelIndex];
}
updatePixels();
drawPositionX--;
// Wrap the position back to the beginning if necessary.
if (drawPositionX < 0) {
drawPositionX = width - 1;
}
}
}

View File

@@ -1,131 +0,0 @@
/**
* Spatiotemporal
* by David Muth
*
* Records a number of video frames into memory, then plays back the video
* buffer by turning the time axis into the x-axis and vice versa
*/
import processing.video.*;
Capture video;
int signal = 0;
//the buffer for storing video frames
ArrayList frames;
//different program modes for recording and playback
int mode = 0;
int MODE_NEWBUFFER = 0;
int MODE_RECORDING = 1;
int MODE_PLAYBACK = 2;
int currentX = 0;
void setup() {
size(640, 480);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, width, height);
// Start capturing the images from the camera
video.start();
}
void captureEvent(Capture c) {
c.read();
//create a new buffer in case one is needed
if (mode == MODE_NEWBUFFER) {
frames = new ArrayList();
mode = MODE_RECORDING;
}
//record into the buffer until there are enough frames
if (mode == MODE_RECORDING) {
//copy the current video frame into an image, so it can be stored in the buffer
PImage img = createImage(width, height, RGB);
video.loadPixels();
arrayCopy(video.pixels, img.pixels);
frames.add(img);
//in case enough frames have been recorded, switch to playback mode
if (frames.size() >= width) {
mode = MODE_PLAYBACK;
}
}
}
void draw() {
loadPixels();
//code for the recording mode
if (mode == MODE_RECORDING) {
//set the image counter to 0
int currentImage = 0;
//begin a loop for displaying pixel columns
for (int x = 0; x < video.width; x++) {
//go through the frame buffer and pick an image using the image counter
if (currentImage < frames.size()) {
PImage img = (PImage)frames.get(currentImage);
//display a pixel column of the current image
if (img != null) {
img.loadPixels();
for (int y = 0; y < video.height; y++) {
pixels[x + y * width] = img.pixels[x + y * video.width];
}
}
//increase the image counter
currentImage++;
}
else {
break;
}
}
}
//code for displaying the spatiotemporal transformation
if (mode == MODE_PLAYBACK) {
//begin a loop for displaying pixel columns
for (int x = 0; x < video.width; x++) {
//get an image from the buffer using loopcounter x as the index
PImage img = (PImage)frames.get(x);
if (img != null) {
img.loadPixels();
//pick the same column from each image for display,
//then distribute the columns over the x-axis on the screen
for(int y = 0; y < video.height; y++) {
pixels[x + y * width] = img.pixels[currentX + y * video.width];
}
}
}
//a different column shall be used next time draw() is being called
currentX++;
//if the end of the buffer is reached
if(currentX >= video.width) {
//create a new buffer when the next video frame arrives
mode = MODE_NEWBUFFER;
//reset the column counter
currentX = 0;
}
}
updatePixels();
}

View File

@@ -1,84 +0,0 @@
/**
* Time Displacement
* by David Muth
*
* Keeps a buffer of video frames in memory and displays pixel rows
* taken from consecutive frames distributed over the y-axis
*/
import processing.video.*;
Capture video;
int signal = 0;
//the buffer for storing video frames
ArrayList frames = new ArrayList();
void setup() {
size(640, 480);
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, width, height);
// Start capturing the images from the camera
video.start();
}
void captureEvent(Capture camera) {
camera.read();
// Copy the current video frame into an image, so it can be stored in the buffer
PImage img = createImage(width, height, RGB);
video.loadPixels();
arrayCopy(video.pixels, img.pixels);
frames.add(img);
// Once there are enough frames, remove the oldest one when adding a new one
if (frames.size() > height/4) {
frames.remove(0);
}
}
void draw() {
// Set the image counter to 0
int currentImage = 0;
loadPixels();
// Begin a loop for displaying pixel rows of 4 pixels height
for (int y = 0; y < video.height; y+=4) {
// Go through the frame buffer and pick an image, starting with the oldest one
if (currentImage < frames.size()) {
PImage img = (PImage)frames.get(currentImage);
if (img != null) {
img.loadPixels();
// Put 4 rows of pixels on the screen
for (int x = 0; x < video.width; x++) {
pixels[x + y * width] = img.pixels[x + y * video.width];
pixels[x + (y + 1) * width] = img.pixels[x + (y + 1) * video.width];
pixels[x + (y + 2) * width] = img.pixels[x + (y + 2) * video.width];
pixels[x + (y + 3) * width] = img.pixels[x + (y + 3) * video.width];
}
}
// Increase the image counter
currentImage++;
} else {
break;
}
}
updatePixels();
// For recording an image sequence
//saveFrame("frame-####.jpg");
}

View File

@@ -1,79 +0,0 @@
/**
* Frames
* by Andres Colubri.
*
* Moves through the video one frame at the time by using the
* arrow keys. It estimates the frame counts using the framerate
* of the movie file, so it might not be exact in some cases.
*/
import processing.video.*;
Movie mov;
int newFrame = 0;
int movFrameRate = 30;
void setup() {
size(640, 360);
background(0);
// Load and set the video to play. Setting the video
// in play mode is needed so at least one frame is read
// and we can get duration, size and other information from
// the video stream.
mov = new Movie(this, "transit.mov");
// Pausing the video at the first frame.
mov.play();
mov.jump(0);
mov.pause();
}
void movieEvent(Movie m) {
m.read();
}
void draw() {
background(0);
image(mov, 0, 0, width, height);
fill(255);
text(getFrame() + " / " + (getLength() - 1), 10, 30);
}
void keyPressed() {
if (key == CODED) {
if (keyCode == LEFT) {
if (0 < newFrame) newFrame--;
} else if (keyCode == RIGHT) {
if (newFrame < getLength() - 1) newFrame++;
}
}
setFrame(newFrame);
}
int getFrame() {
return ceil(mov.time() * 30) - 1;
}
void setFrame(int n) {
mov.play();
// The duration of a single frame:
float frameDuration = 1.0 / movFrameRate;
// We move to the middle of the frame by adding 0.5:
float where = (n + 0.5) * frameDuration;
// Taking into account border effects:
float diff = mov.duration() - where;
if (diff < 0) {
where += diff - 0.25 * frameDuration;
}
mov.jump(where);
mov.pause();
}
int getLength() {
return int(mov.duration() * movFrameRate);
}

View File

@@ -1,29 +0,0 @@
/**
* Loop.
*
* Shows how to load and play a QuickTime movie file.
*
*/
import processing.video.*;
Movie movie;
void setup() {
size(640, 360);
background(0);
// Load and play the video in a loop
movie = new Movie(this, "transit.mov");
movie.loop();
}
void movieEvent(Movie m) {
m.read();
}
void draw() {
//if (movie.available() == true) {
// movie.read();
//}
image(movie, 0, 0, width, height);
}

View File

@@ -1,51 +0,0 @@
/**
* Pixelate
* by Hernando Barragan.
*
* Load a QuickTime file and display the video signal
* using rectangles as pixels by reading the values stored
* in the current video frame pixels array.
*/
import processing.video.*;
int numPixelsWide, numPixelsHigh;
int blockSize = 10;
Movie mov;
color movColors[];
void setup() {
size(640, 360);
noStroke();
mov = new Movie(this, "transit.mov");
mov.loop();
numPixelsWide = width / blockSize;
numPixelsHigh = height / blockSize;
println(numPixelsWide);
movColors = new color[numPixelsWide * numPixelsHigh];
}
// Display values from movie
void draw() {
if (mov.available() == true) {
mov.read();
mov.loadPixels();
int count = 0;
for (int j = 0; j < numPixelsHigh; j++) {
for (int i = 0; i < numPixelsWide; i++) {
movColors[count] = mov.get(i*blockSize, j*blockSize);
count++;
}
}
}
background(255);
for (int j = 0; j < numPixelsHigh; j++) {
for (int i = 0; i < numPixelsWide; i++) {
fill(movColors[j*numPixelsWide + i]);
rect(i*blockSize, j*blockSize, blockSize, blockSize);
}
}
}

View File

@@ -1,48 +0,0 @@
/**
* Reverse playback example.
*
* The Movie.speed() method allows to change the playback speed.
* Use negative values for backwards playback. Note that not all
* video formats support backwards playback. This depends on the
* underlying gstreamer plugins used by gsvideo. For example, the
* theora codec does support backward playback, but not so the H264
* codec, at least in its current version.
*
*/
import processing.video.*;
Movie mov;
boolean speedSet = false;
boolean once = true;
void setup() {
size(640, 360);
background(0);
mov = new Movie(this, "transit.mkv");
mov.play();
}
void movieEvent(Movie m) {
m.read();
if (speedSet == true) {
speedSet = false;
}
}
void draw() {
if (speedSet == false && once == true) {
// Setting the speed should be done only once,
// this is the reason for the if statement.
speedSet = true;
once = false;
mov.jump(mov.duration());
// -1 means backward playback at normal speed.
mov.speed(-1.0);
// Setting to play again, since the movie stop
// playback once it reached the end.
mov.play();
}
image(mov, 0, 0, width, height);
}

View File

@@ -1,39 +0,0 @@
/**
* Scratch
* by Andres Colubri.
*
* Move the cursor horizontally across the screen to set
* the position in the movie file.
*/
import processing.video.*;
Movie mov;
void setup() {
size(640, 360);
background(0);
mov = new Movie(this, "transit.mov");
// Pausing the video at the first frame.
mov.play();
mov.jump(0);
mov.pause();
}
void draw() {
if (mov.available()) {
mov.read();
// A new time position is calculated using the current mouse location:
float f = map(mouseX, 0, width, 0, 1);
float t = mov.duration() * f;
mov.play();
mov.jump(t);
mov.pause();
}
image(mov, 0, 0);
}

View File

@@ -1,33 +0,0 @@
/**
* Speed.
*
* Use the Movie.speed() method to change
* the playback speed.
*
*/
import processing.video.*;
Movie mov;
void setup() {
size(640, 360);
background(0);
mov = new Movie(this, "transit.mov");
mov.loop();
}
void movieEvent(Movie movie) {
mov.read();
}
void draw() {
image(mov, 0, 0);
float newSpeed = map(mouseX, 0, width, 0.1, 2);
mov.speed(newSpeed);
fill(255);
text(nfc(newSpeed, 2) + "X", 10, 30);
}

View File

@@ -1 +0,0 @@
video.jar

View File

@@ -1 +0,0 @@
name = Video

File diff suppressed because it is too large Load Diff

View File

@@ -1,236 +0,0 @@
/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
/*
Part of the Processing project - http://processing.org
Copyright (c) 2011-12 Ben Fry and Casey Reas
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330,
Boston, MA 02111-1307 USA
*/
package processing.video;
import java.util.HashMap;
import java.util.Map;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.Platform;
/**
* This class loads the gstreamer native libraries.
* By Andres Colubri
* Based on code by Tal Shalif
*
*/
public class LibraryLoader {
public interface DummyLibrary extends Library {
}
private static LibraryLoader instance;
static final Object[][] WINDOWS_DEPENDENCIES = {
// glib libraries
{ "gio-2.0", new String[] {}, true },
{ "glib-2.0", new String[] {}, true },
{ "gmodule-2.0", new String[] {}, true },
{ "gobject-2.0", new String[] {}, true },
{ "gthread-2.0", new String[] {}, true },
// Core gstreamer libraries
{ "gstapp-0.10", new String[] {}, true },
{ "gstaudio-0.10", new String[] {}, true },
{ "gstbase-0.10", new String[] {}, true },
{ "gstbasevideo-0.10", new String[] {}, true },
{ "gstcdda-0.10", new String[] {}, true },
{ "gstcontroller-0.10", new String[] {}, true },
{ "gstdataprotocol-0.10", new String[] {}, true },
{ "gstfft-0.10", new String[] {}, true },
{ "gstinterfaces-0.10", new String[] {}, true },
{ "gstnet-0.10", new String[] {}, true },
{ "gstnetbuffer-0.10", new String[] {}, true },
{ "gstpbutils-0.10", new String[] {}, true },
{ "gstphotography-0.10", new String[] {}, true },
{ "gstreamer-0.10", new String[] {}, true },
{ "gstriff-0.10", new String[] {}, true },
{ "gstrtp-0.10", new String[] {}, true },
{ "gstrtsp-0.10", new String[] {}, true },
{ "gstsdp-0.10", new String[] {}, true },
{ "gstsignalprocessor-0.10", new String[] {}, true },
{ "gsttag-0.10", new String[] {}, true },
{ "gstvideo-0.10", new String[] {}, true },
// External libraries
{ "libiconv-2", new String[] {}, false },
{ "libintl-8", new String[] {}, false },
{ "libjpeg-8", new String[] {}, false },
{ "libogg-0", new String[] {}, false },
{ "liborc-0.4-0", new String[] {}, false },
{ "liborc-test-0.4-0", new String[] {}, false },
{ "libpng14-14", new String[] {}, false },
{ "libtheora-0", new String[] {}, false },
{ "libtheoradec-1", new String[] {}, false },
{ "libtheoraenc-1", new String[] {}, false },
{ "libvorbis-0", new String[] {}, false },
{ "libvorbisenc-2", new String[] {}, false },
{ "libvorbisfile-3", new String[] {}, false },
{ "libxml2-2", new String[] {}, false },
{ "zlib1", new String[] {}, false } };
static final Object[][] MACOSX_DEPENDENCIES = {
{ "gstbase-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstinterfaces-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstcontroller-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstaudio-0.10", new String[] { "gstbase-0.10" }, true },
{ "gstvideo-0.10", new String[] { "gstbase-0.10" }, true } };
static final Object[][] DEFAULT_DEPENDENCIES = {
{ "gstreamer-0.10", new String[] {}, true },
{ "gstbase-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstinterfaces-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstcontroller-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstaudio-0.10", new String[] { "gstbase-0.10" }, true },
{ "gstvideo-0.10", new String[] { "gstbase-0.10" }, true }, };
static final Object[][] dependencies =
Platform.isWindows() ? WINDOWS_DEPENDENCIES :
Platform.isMac() ? MACOSX_DEPENDENCIES : DEFAULT_DEPENDENCIES;
private static final Map<String, Object> loadedMap =
new HashMap<String, Object>();
private static final int RECURSIVE_LOAD_MAX_DEPTH = 5;
private LibraryLoader() {
}
private void preLoadLibs() {
for (Object[] a : dependencies) {
load(a[0].toString(), DummyLibrary.class, true, 0, (Boolean) a[2]);
}
}
private String[] findDeps(String name) {
for (Object[] a : dependencies) {
if (name.equals(a[0])) {
return (String[]) a[1];
}
}
return new String[] {}; // library dependancy load chain unspecified -
// probably client call
}
public Object load(String name, Class<?> clazz, boolean reqLib) {
return load(name, clazz, true, 0, reqLib);
}
private Object load(String name, Class<?> clazz, boolean forceReload,
int depth, boolean reqLib) {
assert depth < RECURSIVE_LOAD_MAX_DEPTH : String.format(
"recursive max load depth %s has been exceeded", depth);
Object library = loadedMap.get(name);
if (null == library || forceReload) {
// Logger.getAnonymousLogger().info(String.format("%" + ((depth + 1) * 2)
// + "sloading %s", "->", name));
try {
String[] deps = findDeps(name);
for (String lib : deps) {
load(lib, DummyLibrary.class, false, depth + 1, reqLib);
}
library = loadLibrary(name, clazz, reqLib);
if (library != null) {
loadedMap.put(name, library);
}
} catch (Exception e) {
if (reqLib)
throw new RuntimeException(String.format(
"can not load required library %s", name, e));
else
System.out.println(String.format("can not load library %s", name, e));
}
}
return library;
}
private static Object loadLibrary(String name, Class<?> clazz,
boolean reqLib) {
// Logger.getAnonymousLogger().info(String.format("loading %s", name));
String[] nameFormats;
nameFormats = Platform.isWindows() ? new String[] { "lib%s", "lib%s-0",
"%s" } : new String[] { "%s-0", "%s" };
UnsatisfiedLinkError linkError = null;
for (String fmt : nameFormats) {
try {
String s = String.format(fmt, name);
//System.out.println("Trying to load library file " + s);
Object obj = Native.loadLibrary(s, clazz);
//System.out.println("Loaded library " + s + " succesfully!");
return obj;
} catch (UnsatisfiedLinkError ex) {
linkError = ex;
}
}
if (reqLib)
throw new UnsatisfiedLinkError(
String.format(
"can't load library %s (%1$s|lib%1$s|lib%1$s-0) with " +
"-Djna.library.path=%s. Last error:%s",
name, System.getProperty("jna.library.path"), linkError));
else {
System.out.println(String.format(
"can't load library %s (%1$s|lib%1$s|lib%1$s-0) with " +
"-Djna.library.path=%s. Last error:%s",
name, System.getProperty("jna.library.path"), linkError));
return null;
}
}
public static synchronized LibraryLoader getInstance() {
if (null == instance) {
instance = new LibraryLoader();
instance.preLoadLibs();
}
return instance;
}
}

View File

@@ -1,62 +0,0 @@
/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
/*
Part of the Processing project - http://processing.org
Copyright (c) 2011-12 Ben Fry and Casey Reas
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330,
Boston, MA 02111-1307 USA
*/
package processing.video;
import java.net.URL;
import com.sun.jna.Platform;
class LibraryPath {
// This method returns the folder inside which the gstreamer library folder
// is located.
String get() {
URL url = this.getClass().getResource("LibraryPath.class");
if (url != null) {
// Convert URL to string, taking care of spaces represented by the "%20"
// string.
String path = url.toString().replace("%20", " ");
int n0 = path.indexOf('/');
int n1 = -1;
if (Platform.isLinux()) {
return "";
} else {
n1 = path.indexOf("video.jar");
if (Platform.isWindows()) {
// In Windows, path string starts with "jar file/C:/..."
// so the substring up to the first / is removed.
n0++;
}
}
if ((-1 < n0) && (-1 < n1)) {
return path.substring(n0, n1);
} else {
return "";
}
}
return "";
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,229 +0,0 @@
/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
/*
Part of the Processing project - http://processing.org
Copyright (c) 2011-12 Ben Fry and Casey Reas
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330,
Boston, MA 02111-1307 USA
*/
package processing.video;
import org.gstreamer.*;
import processing.core.PApplet;
import processing.core.PConstants;
import java.io.File;
import java.nio.ByteOrder;
import java.util.List;
/**
* This class contains some basic functions used by the rest of the classes in
* this library.
*/
public class Video implements PConstants {
// Path that the video library will use to load the GStreamer base libraries
// and plugins from. They can be passed from the application using the
// gstreamer.library.path and gstreamer.plugin.path system variables (see
// comments in initImpl() below).
protected static String gstreamerLibPath = "";
protected static String gstreamerPluginPath = "";
// Direct buffer pass enabled by default. With this mode enabled, no new
// buffers are created and disposed by the GC in each frame (thanks to Octavi
// Estape for suggesting this improvement) which should help performance in
// most situations.
protected static boolean passDirectBuffer = true;
// OpenGL texture used as buffer sink by default, when the renderer is
// GL-based. This can improve performance significantly, since the video
// frames are automatically copied into the texture without passing through
// the pixels arrays, as well as having the color conversion into RGBA handled
// natively by GStreamer.
protected static boolean useGLBufferSink = true;
protected static boolean defaultGLibContext = false;
protected static long INSTANCES_COUNT = 0;
protected static int bitsJVM;
static {
bitsJVM = PApplet.parseInt(System.getProperty("sun.arch.data.model"));
}
static protected void init() {
if (INSTANCES_COUNT == 0) {
initImpl();
}
INSTANCES_COUNT++;
}
static protected void restart() {
removePlugins();
Gst.deinit();
initImpl();
}
static protected void initImpl() {
// The location of the GStreamer base libraries can be passed from the
// application to the vide library via a system variable. In Eclipse, add to
// "VM Arguments" in "Run Configurations" the following line:
// -Dgstreamer.library.path=path
String libPath = System.getProperty("gstreamer.library.path");
if (libPath != null) {
gstreamerLibPath = libPath;
// If the GStreamer installation referred by gstreamer.library.path is not
// a system installation, then the path containing the plugins needs to be
// specified separately, otherwise the plugins will be automatically
// loaded from the default location. The system property for the plugin
// path is "gstreamer.plugin.path"
String pluginPath = System.getProperty("gstreamer.plugin.path");
if (pluginPath != null) {
gstreamerPluginPath = pluginPath;
}
} else {
// Paths are build automatically from the curren location of the video
// library.
if (PApplet.platform == LINUX) {
buildLinuxPaths();
} else if (PApplet.platform == WINDOWS) {
buildWindowsPaths();
} else if (PApplet.platform == MACOSX) {
buildMacOSXPaths();
}
}
if (!gstreamerLibPath.equals("")) {
System.setProperty("jna.library.path", gstreamerLibPath);
}
if (PApplet.platform == WINDOWS) {
LibraryLoader loader = LibraryLoader.getInstance();
if (loader == null) {
System.err.println("Cannot load local version of GStreamer libraries.");
}
}
String[] args = { "" };
Gst.setUseDefaultContext(defaultGLibContext);
Gst.init("Processing core video", args);
addPlugins();
}
static protected void addPlugins() {
if (!gstreamerPluginPath.equals("")) {
Registry reg = Registry.getDefault();
boolean res;
res = reg.scanPath(gstreamerPluginPath);
if (!res) {
System.err.println("Cannot load GStreamer plugins from " +
gstreamerPluginPath);
}
}
}
static protected void removePlugins() {
Registry reg = Registry.getDefault();
List<Plugin> list = reg.getPluginList();
for (Plugin plg : list) {
reg.removePlugin(plg);
}
}
static protected void buildLinuxPaths() {
gstreamerLibPath = "";
gstreamerPluginPath = "";
}
static protected void buildWindowsPaths() {
LibraryPath libPath = new LibraryPath();
String path = libPath.get();
gstreamerLibPath = buildGStreamerLibPath(path, "\\windows" + bitsJVM);
gstreamerPluginPath = gstreamerLibPath + "\\plugins";
}
static protected void buildMacOSXPaths() {
LibraryPath libPath = new LibraryPath();
String path = libPath.get();
gstreamerLibPath = buildGStreamerLibPath(path, "/macosx" + bitsJVM);
gstreamerPluginPath = gstreamerLibPath + "/plugins";
}
static protected String buildGStreamerLibPath(String base, String os) {
File path = new File(base + os);
if (path.exists()) {
return base + os;
} else {
return base;
}
}
static protected float nanoSecToSecFrac(float nanosec) {
for (int i = 0; i < 3; i++)
nanosec /= 1E3;
return nanosec;
}
static protected long secToNanoLong(float sec) {
Float f = new Float(sec * 1E9);
return f.longValue();
}
/**
* Reorders an OpenGL pixel array (RGBA) into ARGB. The array must be
* of size width * height.
* @param pixels int[]
*/
static protected void convertToARGB(int[] pixels, int width, int height) {
int t = 0;
int p = 0;
if (ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN) {
// RGBA to ARGB conversion: shifting RGB 8 bits to the right,
// and placing A 24 bits to the left.
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int pixel = pixels[p++];
pixels[t++] = (pixel >>> 8) | ((pixel << 24) & 0xFF000000);
}
}
} else {
// We have to convert ABGR into ARGB, so R and B must be swapped,
// A and G just brought back in.
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int pixel = pixels[p++];
pixels[t++] = ((pixel & 0xFF) << 16) | ((pixel & 0xFF0000) >> 16) |
(pixel & 0xFF00FF00);
}
}
}
}
}

View File

@@ -1,5 +1,22 @@
0228 pde
_ shouldn't write sketch.properties unless it's a non-default mode
_ https://github.com/processing/processing/issues/2531
_ huge i18n patch
_ https://github.com/processing/processing/pull/2084
_ make ant fail when trying to delete JRE files that don't exist
_ some aren't being removed properly
earlier
X for() loop with nothing inside parens crashes Auto Format
X https://github.com/processing/processing/issues/2141
gsoc
_ `return` keyword not treated as such when followed by a bracket
_ https://github.com/processing/processing/issues/2099
_ IllegalArgumentException when clicking between editor windows
_ https://github.com/processing/processing/issues/2530
_ "String index out of range" error
_ https://github.com/processing/processing/issues/1940
medium
_ possible to open a sketch multiple times
@@ -42,8 +59,6 @@ _ the Find window (also the save windows) also have the same problem
_ move old Google Code SVN back to processing.org
_ then cull out the old branches/tags from the Github repo
_ and/or start bundling separate source downloads
_ "String index out of range" error
_ https://github.com/processing/processing/issues/1940
_ look through all isPopupTrigger() code
_ make sure both press/release are implemented
_ emacs style errors in commander aren't quite right
@@ -692,8 +707,6 @@ _ update will update classes from shared in the current folder
TOOLS / Auto Format
_ for() loop with nothing inside parens crashes Auto Format
_ https://github.com/processing/processing/issues/2141
_ extra indent found
_ https://github.com/processing/processing/issues/1041
_ Switch block cases not indented
@@ -861,6 +874,8 @@ find YOUR_APP/Contents/ -type f \
DIST / Linux
_ Processing is named processing-app-Base in Gnome 3
_ https://github.com/processing/processing/issues/2534
_ how to run "headless" from user Batuff
_ sudo apt-get install xvfb
_ Xvfb :2 -screen 0 1024x768x24 &