new gsvideo library

This commit is contained in:
benfry
2011-06-18 16:27:28 +00:00
parent 4dac2e0a71
commit 7d7db7d0d1
56 changed files with 6561 additions and 1 deletions

View File

@@ -0,0 +1,38 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package codeanticode.gsvideo;
import com.sun.jna.Library;
import com.sun.jna.Native;
/**
* This JNA interface provides access to the environment variable-related functions in the C library.
* How to use:
* CLibrary clib = CLibrary.INSTANCE;
* String s = clib.getenv("DYLD_LIBRARY_PATH");
*/
public interface CLibrary extends Library {
CLibrary INSTANCE = (CLibrary)Native.loadLibrary("c", CLibrary.class);
int setenv(String name, String value, int overwrite);
String getenv(String name);
int unsetenv(String name);
int putenv(String string);
}

View File

@@ -0,0 +1,824 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package codeanticode.gsvideo;
import processing.core.*;
import java.nio.*;
import java.util.ArrayList;
import java.lang.reflect.*;
import org.gstreamer.*;
import org.gstreamer.Buffer;
import org.gstreamer.elements.*;
import org.gstreamer.interfaces.PropertyProbe;
import org.gstreamer.interfaces.Property;
/**
* Class for storing and manipulating video frames from an attached capture
* device such as a camera.
*/
public class GSCapture extends PImage implements PConstants {
protected String source;
protected boolean playing = false;
protected boolean paused = false;
protected String fps;
protected int bufWidth;
protected int bufHeight;
protected Pipeline gpipeline;
protected Element gsource;
protected Method captureEventMethod;
protected Method copyBufferMethod;
protected Object eventHandler;
protected Object copyHandler;
protected boolean available;
protected boolean pipelineReady;
protected RGBDataAppSink rgbSink = null;
protected int[] copyPixels = null;
protected BufferDataAppSink natSink = null;
protected Buffer natBuffer = null;
protected boolean copyBufferMode = false;
protected String copyMask;
protected boolean firstFrame = true;
protected ArrayList<int[]> suppResList;
protected ArrayList<String> suppFpsList;
protected int reqWidth;
protected int reqHeight;
/**
* Basic constructor: tries to auto-detect all the capture parameters,
* with the exception of the resolution.
*/
public GSCapture(PApplet parent, int requestWidth, int requestHeight) {
super(requestWidth, requestHeight, RGB);
initPlatform(parent, requestWidth, requestHeight, new String[] {}, new int[] {},
new String[] {}, new String[] {}, "");
}
/**
* Constructor that takes resolution and framerate indicated as a single number.
*/
public GSCapture(PApplet parent, int requestWidth, int requestHeight, int frameRate) {
super(requestWidth, requestHeight, RGB);
initPlatform(parent, requestWidth, requestHeight, new String[] {}, new int[] {},
new String[] {}, new String[] {}, frameRate + "/1");
}
/**
* This constructor allows to specify the camera name. In Linux, for example, this
* should be a string of the form /dev/video0, /dev/video1, etc.
*/
public GSCapture(PApplet parent, int requestWidth, int requestHeight, String cameraName) {
super(requestWidth, requestHeight, RGB);
initPlatform(parent, requestWidth, requestHeight, new String[] {}, new int[] {},
new String[] { devicePropertyName() }, new String[] { cameraName }, "");
}
/**
* This constructor allows to specify the camera name and the desired framerate.
*/
public GSCapture(PApplet parent, int requestWidth, int requestHeight, int frameRate,
String cameraName) {
super(requestWidth, requestHeight, RGB);
initPlatform(parent, requestWidth, requestHeight, new String[] {}, new int[] {},
new String[] { devicePropertyName() }, new String[] { cameraName },
frameRate + "/1");
}
/**
* This constructor lets to indicate which source element to use (i.e.: v4l2src,
* osxvideosrc, dshowvideosrc, ksvideosrc, etc).
*/
public GSCapture(PApplet parent, int requestWidth, int requestHeight, int frameRate,
String sourceName, String cameraName) {
super(requestWidth, requestHeight, RGB);
initGStreamer(parent, requestWidth, requestHeight, sourceName, new String[] {}, new int[] {},
new String[] { devicePropertyName() }, new String[] { cameraName },
frameRate + "/1");
}
/**
* This constructor accepts an arbitrary list of string properties for the source element.
* The camera name could be one of these properties. The framerate must be specified
* as a fraction string: 30/1, 15/2, etc.
*/
public GSCapture(PApplet parent, int requestWidth, int requestHeight, String frameRate,
String sourceName, String[] strPropNames, String[] strPropValues) {
super(requestWidth, requestHeight, RGB);
initGStreamer(parent, requestWidth, requestHeight, sourceName, new String[] {}, new int[] {},
strPropNames, strPropValues, frameRate);
}
/**
* This constructor accepts an arbitrary list of string properties for the source element,
* as well as a list of integer properties. This could be useful if a camera cannot by
* specified by name but by index. Framerate must be a fraction string: 30/1, 15/2, etc.
*/
public GSCapture(PApplet parent, int requestWidth, int requestHeight, String frameRate,
String sourceName, String[] strPropNames, String[] strPropValues,
String[] intPropNames, int[] intPropValues) {
super(requestWidth, requestHeight, RGB);
initGStreamer(parent, requestWidth, requestHeight, sourceName, intPropNames, intPropValues,
strPropNames, strPropValues, frameRate);
}
/**
* Releases the gstreamer resources associated to this capture object.
* It shouldn't be used after this.
*/
public void delete() {
if (gpipeline != null) {
try {
if (gpipeline.isPlaying()) {
gpipeline.stop();
}
} catch (IllegalStateException e) {
System.err.println("error when deleting player, maybe some native resource is already disposed");
} catch (Exception e) {
e.printStackTrace();
}
pixels = null;
copyPixels = null;
if (rgbSink != null) {
rgbSink.removeListener();
rgbSink.dispose();
rgbSink = null;
}
natBuffer = null;
if (natSink != null) {
natSink.removeListener();
natSink.dispose();
natSink = null;
}
gpipeline.dispose();
gpipeline = null;
}
}
/**
* Same as delete.
*/
public void dispose() {
delete();
}
/**
* Sets the object to use as destination for the frames read from the stream.
* The color conversion mask is automatically set to the one required to
* copy the frames to OpenGL.
*
* @param Object dest
*/
public void setPixelDest(Object dest) {
copyHandler = dest;
if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
} else {
copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
}
}
/**
* Sets the object to use as destination for the frames read from the stream.
*
* @param Object dest
* @param String mask
*/
public void setPixelDest(Object dest, String mask) {
copyHandler = dest;
copyMask = mask;
}
/**
* Uses a generic object as handler of the movie. This object should have a
* movieEvent method that receives a GSMovie argument. This method will
* be called upon a new frame read event.
*
*/
public void setEventHandlerObject(Object obj) {
eventHandler = obj;
try {
captureEventMethod = parent.getClass().getMethod("captureEvent",
new Class[] { GSCapture.class });
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
}
}
/**
* Returns true if the stream is already producing frames.
*
* @return boolean
*/
public boolean ready() {
return 0 < bufWidth && 0 < bufHeight && pipelineReady;
}
/**
* Returns "true" when a new video frame is available to read.
*
* @return boolean
*/
public boolean available() {
return available;
}
/**
* Returns whether the stream is playing or not.
*
* @return boolean
*/
public boolean isPlaying() {
return playing;
}
/**
* Returns whether the stream is paused or not.
*
* @return boolean
*/
public boolean isPaused() {
return paused;
}
/**
* Resumes the capture pipeline.
*/
public void play() {
boolean init = false;
if (!pipelineReady) {
initPipeline();
init = true;
}
playing = true;
paused = false;
gpipeline.play();
if (init) {
// Resolution and FPS initialization needs to be done after the
// pipeline is set to play.
initResAndFps();
}
}
/**
* Stops the capture pipeline.
*/
public void pause() {
playing = false;
paused = true;
gpipeline.pause();
}
/**
* Reads the current video frame.
*
* This method() and invokeEvent() are now synchronized, so that invokeEvent()
* can't be called whilst we're busy reading. Problematic frame error
* fixed by Charl P. Botha <charlbotha.com>
*/
public synchronized void read() {
// We loadPixels() first to ensure that at least we always have a non-null
// pixels array, even if without any valid image inside.
loadPixels();
if (copyBufferMode) {
// The native buffer from gstreamer is copies to the destination object.
if (natBuffer == null || copyBufferMethod == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
IntBuffer rgbBuffer = natBuffer.getByteBuffer().asIntBuffer();
try {
copyBufferMethod.invoke(copyHandler, new Object[] { natBuffer, rgbBuffer, bufWidth, bufHeight });
} catch (Exception e) {
e.printStackTrace();
}
natBuffer = null;
} else {
if (copyPixels == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
int[] temp = pixels;
pixels = copyPixels;
updatePixels();
copyPixels = temp;
}
available = false;
}
/**
* Returns a list with the resolutions supported by the capture device.
* Each element of the list is in turn an array of two int, first being
* the width and second the height.
*
* @return int[][]
*/
public int[][] resolutions() {
int n = suppResList.size();
int[][] res = new int[n][2];
for (int i = 0; i < n; i++) {
int[] wh = (int[])suppResList.get(i);
res[i] = new int[] {wh[0], wh[1]};
}
return res;
}
/**
* Returns a list with the framerates supported by the capture device,
* expressed as a string like: 30/1, 15/2, etc.
*
* @return String[]
*/
public String[] framerates() {
int n = suppFpsList.size();
String[] res = new String[n];
for (int i = 0; i < n; i++) {
res[i] = (String)suppFpsList.get(i);
}
return res;
}
/**
* Returns a list of available capture devices.
*
* @return String[]
*/
static public String[] list() {
if (PApplet.platform == LINUX) {
return list("v4l2src");
} else if (PApplet.platform == WINDOWS) {
return list("dshowvideosrc");
} else if (PApplet.platform == MACOSX) {
return list("osxvideosrc");
} else {
return null;
}
}
/**
* Get a list of all available captures as a String array. i.e.
* println(Capture.list()) will show you the goodies.
*
* @param sourceName String
* @return String[]
*/
static public String[] list(String sourceName) {
return list(sourceName, devicePropertyName());
}
static protected String[] list(String sourceName, String propertyName) {
GSVideo.init();
String[] valuesListing = new String[0];
Element videoSource = ElementFactory.make(sourceName, "Source");
PropertyProbe probe = PropertyProbe.wrap(videoSource);
if (probe != null) {
Property property = probe.getProperty(propertyName);
if (property != null) {
Object[] values = probe.getValues(property);
if (values != null) {
valuesListing = new String[values.length];
for (int i = 0; i < values.length; i++)
if (values[i] instanceof String)
valuesListing[i] = (String) values[i];
}
}
}
return valuesListing;
}
/**
* invokeEvent() and read() are synchronized so that they can not be
* called simultaneously. when they were not synchronized, this caused
* the infamous problematic frame crash.
* found and fixed by Charl P. Botha <charlbotha.com>
*/
protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
if (copyPixels == null) {
copyPixels = new int[w * h];
}
buffer.rewind();
try {
buffer.get(copyPixels);
} catch (BufferUnderflowException e) {
e.printStackTrace();
copyPixels = null;
return;
}
// Creates a movieEvent.
if (captureEventMethod != null) {
try {
captureEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling captureEvent() for capture object");
e.printStackTrace();
captureEventMethod = null;
}
}
}
protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
natBuffer = buffer;
// Creates a movieEvent.
if (captureEventMethod != null) {
try {
captureEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling captureEvent() for capture object");
e.printStackTrace();
captureEventMethod = null;
}
}
}
/**
* Returns the name of the source element used for capture.
*
* @return String
*/
public String getSource() {
return source;
}
// Tries to guess the best correct source elements for each platform.
protected void initPlatform(PApplet parent, int requestWidth, int requestHeight,
String[] intPropNames, int[] intPropValues,
String[] strPropNames, String[] strPropValues,
String frameRate) {
if (PApplet.platform == LINUX) {
initGStreamer(parent, requestWidth, requestHeight, "v4l2src", intPropNames, intPropValues,
strPropNames, strPropValues, frameRate);
} else if (PApplet.platform == WINDOWS) {
initGStreamer(parent, requestWidth, requestHeight, "ksvideosrc", intPropNames,
intPropValues, strPropNames, strPropValues, frameRate);
//init(requestWidth, requestHeight, "dshowvideosrc", intPropNames,
// intPropValues, strPropNames, strPropValues, frameRate, addDecoder, null, "");
} else if (PApplet.platform == MACOSX) {
initGStreamer(parent, requestWidth, requestHeight, "osxvideosrc", intPropNames,
intPropValues, strPropNames, strPropValues, frameRate);
} else {
parent.die("Error: unrecognized platform.", null);
}
}
// The main initialization here.
protected void initGStreamer(PApplet parent, int requestWidth, int requestHeight, String sourceName,
String[] intPropNames, int[] intPropValues,
String[] strPropNames, String[] strPropValues, String frameRate) {
this.parent = parent;
GSVideo.init();
// register methods
parent.registerDispose(this);
setEventHandlerObject(parent);
gpipeline = new Pipeline("GSCapture");
this.source = sourceName;
fps = frameRate;
reqWidth = requestWidth;
reqHeight = requestHeight;
gsource = ElementFactory.make(sourceName, "Source");
if (intPropNames.length != intPropValues.length) {
parent.die("Error: number of integer property names is different from number of values.",
null);
}
for (int i = 0; i < intPropNames.length; i++) {
gsource.set(intPropNames[i], intPropValues[i]);
}
if (strPropNames.length != strPropValues.length) {
parent.die("Error: number of string property names is different from number of values.",
null);
}
for (int i = 0; i < strPropNames.length; i++) {
gsource.set(strPropNames[i], strPropValues[i]);
}
bufWidth = bufHeight = 0;
pipelineReady = false;
}
protected void initPipeline() {
String fpsStr = "";
if (!fps.equals("")) {
// If the framerate string is empty we left the source element
// to use the default value.
fpsStr = ", framerate=" + fps;
}
if (copyHandler != null) {
try {
copyBufferMethod = copyHandler.getClass().getMethod("addPixelsToBuffer",
new Class[] { Object.class, IntBuffer.class, int.class, int.class });
copyBufferMode = true;
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
copyBufferMode = false;
}
if (copyBufferMode) {
try {
Method meth = copyHandler.getClass().getMethod("setPixelSource", new Class[] { Object.class});
meth.invoke(copyHandler, new Object[] { this });
} catch (Exception e) {
copyBufferMode = false;
}
if (copyBufferMode) {
String caps = "width=" + reqWidth + ", height=" + reqHeight + ", " + copyMask;
natSink = new BufferDataAppSink("nat", caps,
new BufferDataAppSink.Listener() {
public void bufferFrame(int w, int h, Buffer buffer) {
invokeEvent(w, h, buffer);
}
});
natSink.setAutoDisposeBuffer(false);
// No need for rgbSink.dispose(), because the addMany() doesn't increment the
// refcount of the videoSink object.
gpipeline.addMany(gsource, natSink);
Element.linkMany(gsource, natSink);
}
}
}
if (!copyBufferMode) {
Element conv = ElementFactory.make("ffmpegcolorspace", "ColorConverter");
Element videofilter = ElementFactory.make("capsfilter", "ColorFilter");
videofilter.setCaps(new Caps("video/x-raw-rgb, width=" + reqWidth + ", height=" + reqHeight +
", bpp=32, depth=24" + fpsStr));
rgbSink = new RGBDataAppSink("rgb",
new RGBDataAppSink.Listener() {
public void rgbFrame(int w, int h, IntBuffer buffer) {
invokeEvent(w, h, buffer);
}
});
// Setting direct buffer passing in the video sink, so no new buffers are created
// and disposed by the GC on each frame (thanks to Octavi Estape for pointing
// out this one).
rgbSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
// No need for rgbSink.dispose(), because the addMany() doesn't increment the
// refcount of the videoSink object.
gpipeline.addMany(gsource, conv, videofilter, rgbSink);
Element.linkMany(gsource, conv, videofilter, rgbSink);
}
pipelineReady = true;
}
protected void initResAndFps() {
// The pipeline needs to be in playing state to be able to
// report the supported resolutions and framerates of the
// capture device.
getSuppResAndFpsList();
boolean suppRes = !(0 < suppResList.size()); // Default value is true if resolution list empty.
for (int i = 0; i < suppResList.size(); i++) {
int[] wh = (int[])suppResList.get(i);
if (reqWidth == wh[0] && reqHeight == wh[1]) {
suppRes = true;
break;
}
}
if (!suppRes) {
System.err.println("The requested resolution of " + reqWidth + "x" + reqHeight + " is not supported by the capture device.");
System.err.println("Use one of the following resolutions instead:");
for (int i = 0; i < suppResList.size(); i++) {
int[] wh = (int[])suppResList.get(i);
System.err.println(wh[0] + "x" + wh[1]);
}
}
boolean suppFps = !(0 < suppFpsList.size()); // Default value is true if fps list empty.
for (int i = 0; i < suppFpsList.size(); i++) {
String str = (String)suppFpsList.get(i);
if (fps.equals("") || fps.equals(str)) {
suppFps = true;
break;
}
}
if (!suppFps) {
System.err.println("The requested framerate of " + fps + " is not supported by the capture device.");
System.err.println("Use one of the following framerates instead:");
for (int i = 0; i < suppFpsList.size(); i++) {
String str = (String)suppFpsList.get(i);
System.err.println(str);
}
}
}
protected void getSuppResAndFpsList() {
suppResList = new ArrayList<int[]>();
suppFpsList = new ArrayList<String>();
for (Element src : gpipeline.getSources()) {
for (Pad pad : src.getPads()) {
Caps caps = pad.getCaps();
int n = caps.size();
for (int i = 0; i < n; i++) {
Structure str = caps.getStructure(i);
int w = ((Integer)str.getValue("width")).intValue();
int h = ((Integer)str.getValue("height")).intValue();
boolean newRes = true;
// Making sure we didn't add this resolution already.
// Different caps could have same resolution.
for (int j = 0; j < suppResList.size(); j++) {
int[] wh = (int[])suppResList.get(j);
if (w == wh[0] && h == wh[1]) {
newRes = false;
break;
}
}
if (newRes) {
suppResList.add(new int[] {w, h});
}
if (PApplet.platform == WINDOWS) {
// In Windows the getValueList() method doesn't seem to
// return a valid list of fraction values, so working on
// the string representation of the caps structure.
String str2 = str.toString();
int n0 = str2.indexOf("framerate=(fraction)");
if (-1 < n0) {
String temp = str2.substring(n0 + 20, str2.length());
int n1 = temp.indexOf("[");
int n2 = temp.indexOf("]");
if (-1 < n1 && -1 < n2) {
// A list of fractions enclosed between '[' and ']'
temp = temp.substring(n1 + 1, n2);
String[] fractions = temp.split(",");
for (int k = 0; k < fractions.length; k++) {
addFpsStr(fractions[k].trim());
}
} else {
// A single fraction
int n3 = temp.indexOf(",");
int n4 = temp.indexOf(";");
if (-1 < n3 || -1 < n4) {
int n5 = -1;
if (n3 == -1) {
n5 = n4;
} else if (n4 == -1) {
n5 = n3;
} else {
n5 = PApplet.min(n3, n4);
}
temp = temp.substring(0, n5);
addFpsStr(temp.trim());
}
}
}
} else {
boolean sigleFrac = false;
try {
Fraction fr = str.getFraction("framerate");
addFps(fr);
sigleFrac = true;
} catch (Exception e) {
}
if (!sigleFrac) {
ValueList flist = str.getValueList("framerate");
// All the framerates are put together, but this is not
// entirely accurate since there might be some of them'
// that work only for certain resolutions.
for (int k = 0; k < flist.getSize(); k++) {
Fraction fr = flist.getFraction(k);
addFps(fr);
}
}
}
}
}
}
}
protected void addFps(Fraction fr) {
int frn = fr.numerator;
int frd = fr.denominator;
addFpsStr(frn + "/" + frd);
}
protected void addFpsStr(String frstr) {
boolean newFps = true;
for (int j = 0; j < suppFpsList.size(); j++) {
String frstr0 = (String)suppFpsList.get(j);
if (frstr.equals(frstr0)) {
newFps = false;
break;
}
}
if (newFps) {
suppFpsList.add(frstr);
}
}
static protected String devicePropertyName() {
// TODO: Check the property names
if (PApplet.platform == LINUX) {
return "device"; // Is this correct?
} else if (PApplet.platform == WINDOWS) {
return "device-name";
} else if (PApplet.platform == MACOSX) {
return "device";
} else {
return "";
}
}
static protected String indexPropertyName() {
// TODO: Check the property names
if (PApplet.platform == LINUX) {
return "device-index"; // Is this correct? Probably not.
} else if (PApplet.platform == WINDOWS) {
return "device-index";
} else if (PApplet.platform == MACOSX) {
return "device-index"; // Is this correct? Probably not.
} else {
return "";
}
}
public synchronized void disposeBuffer(Object buf) {
((Buffer)buf).dispose();
}
}

View File

@@ -0,0 +1,263 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package codeanticode.gsvideo;
import java.util.HashMap;
import java.util.Map;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.Platform;
// Library loader class by Tal Shalif
public class GSLibraryLoader {
public interface DummyLibrary extends Library {
}
private static GSLibraryLoader instance;
// These dependencies correspond to gstreamer-winbuilds 0.10.6
static final Object[][] WIN32_DEPENDENCIES = {
{ "SDL", new String[] {}, false }, { "glew32", new String[] {}, false },
{ "iconv-2", new String[] {}, false },
{ "liba52-0", new String[] {}, false },
{ "libbz2", new String[] {}, false },
{ "libcairo-2", new String[] {}, false },
{ "libdca-0", new String[] {}, false },
{ "libdvdcss-2", new String[] {}, false },
{ "libdvdnav-4", new String[] {}, false },
{ "libdvdnavmini-4", new String[] {}, false },
{ "libdvdread-4", new String[] {}, false },
{ "libfaac-0", new String[] {}, false },
{ "libfaad-2", new String[] {}, false },
{ "libfontconfig-1", new String[] {}, false },
{ "libfreetype-6", new String[] {}, false },
{ "libgcrypt-11", new String[] {}, false },
{ "libgnutls-26", new String[] {}, false },
{ "libgnutls-extra-26", new String[] {}, false },
{ "libgnutls-openssl-26", new String[] {}, false },
{ "libgpg-error-0", new String[] {}, false },
{ "libid3tag-0", new String[] {}, false },
{ "libjpeg-8", new String[] {}, false },
{ "libmad-0", new String[] {}, false },
{ "libmms-0", new String[] {}, false },
{ "libmp3lame-0", new String[] {}, false },
{ "libmpeg2-0", new String[] {}, false },
{ "libmpeg2convert-0", new String[] {}, false },
{ "libneon-27", new String[] {}, false },
{ "libnice-0", new String[] {}, false },
{ "libogg-0", new String[] {}, false },
{ "liboil-0.3-0", new String[] {}, false },
{ "libopenjpeg-2", new String[] {}, false },
{ "libpango-1.0-0", new String[] {}, false },
{ "libpangocairo-1.0-0", new String[] {}, false },
{ "libpangoft2-1.0-0", new String[] {}, false },
{ "libpangowin32-1.0-0", new String[] {}, false },
{ "libpixman-1-0", new String[] {}, true },
{ "libpng14-14", new String[] {}, false },
{ "liborc-0.4-0", new String[] {}, false },
{ "libschroedinger-1.0-0", new String[] {}, false },
{ "libsoup-2.4-1", new String[] {}, false },
{ "libspeex-1", new String[] {}, false },
{ "libtheora-0", new String[] {}, false },
{ "libtheoradec-1", new String[] {}, false },
{ "libtheoraenc-1", new String[] {}, false },
{ "libvorbis-0", new String[] {}, false },
{ "libvorbisenc-2", new String[] {}, false },
{ "libvorbisfile-3", new String[] {}, false },
{ "libwavpack-1", new String[] {}, false },
{ "libx264-107", new String[] {}, false },
{ "libxml2-2", new String[] {}, false },
{ "pthreadGC2", new String[] {}, false },
{ "xvidcore", new String[] {}, false },
{ "z", new String[] {}, false },
{ "avutil-gpl-50", new String[] {}, false },
{ "avformat-gpl-52", new String[] {}, false },
{ "avcodec-gpl-52", new String[] {}, false },
{ "swscale-gpl-0", new String[] {}, false },
{ "libcelt-0", new String[] {}, false },
{ "libgdk_pixbuf-2.0-0", new String[] {}, false },
{ "librsvg-2-2", new String[] {}, false },
{ "libflac-8", new String[] {}, false },
{ "gio-2.0", new String[] {}, true },
{ "glib-2.0", new String[] {}, true },
{ "gmodule-2.0", new String[] {}, true },
{ "gobject-2.0", new String[] {}, true },
{ "gthread-2.0", new String[] {}, true },
{ "gstapp-0.10", new String[] {}, true },
{ "gstaudio-0.10", new String[] {}, true },
{ "gstbase-0.10", new String[] {}, true },
{ "gstcdda-0.10", new String[] {}, true },
{ "gstcontroller-0.10", new String[] {}, true },
{ "gstdataprotocol-0.10", new String[] {}, true },
{ "gstfarsight-0.10", new String[] {}, true },
{ "gstfft-0.10", new String[] {}, true },
{ "gstgl-0.10", new String[] {}, true },
{ "gstinterfaces-0.10", new String[] {}, true },
{ "gstnet-0.10", new String[] {}, true },
{ "gstnetbuffer-0.10", new String[] {}, true },
{ "gstpbutils-0.10", new String[] {}, true },
{ "gstphotography-0.10", new String[] {}, true },
{ "gstreamer-0.10", new String[] {}, true },
{ "gstriff-0.10", new String[] {}, true },
{ "gstrtp-0.10", new String[] {}, true },
{ "gstrtsp-0.10", new String[] {}, true },
{ "gstsdp-0.10", new String[] {}, true },
{ "gsttag-0.10", new String[] {}, true },
{ "gstvideo-0.10", new String[] {}, true },
{ "gstbasevideo-0.10", new String[] {}, true } };
static final Object[][] OSX_DEPENDENCIES = {
{ "gstbase-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstinterfaces-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstcontroller-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstaudio-0.10", new String[] { "gstbase-0.10" }, true },
{ "gstvideo-0.10", new String[] { "gstbase-0.10" }, true } };
static final Object[][] DEFAULT_DEPENDENCIES = {
{ "gstreamer-0.10", new String[] {}, true },
{ "gstbase-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstinterfaces-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstcontroller-0.10", new String[] { "gstreamer-0.10" }, true },
{ "gstaudio-0.10", new String[] { "gstbase-0.10" }, true },
{ "gstvideo-0.10", new String[] { "gstbase-0.10" }, true }, };
static final Object[][] dependencies = Platform.isWindows() ? WIN32_DEPENDENCIES
: Platform.isMac() ? OSX_DEPENDENCIES : DEFAULT_DEPENDENCIES;
private static final Map<String, Object> loadedMap = new HashMap<String, Object>();
private static final int RECURSIVE_LOAD_MAX_DEPTH = 5;
private GSLibraryLoader() {
}
private void preLoadLibs() {
for (Object[] a : dependencies) {
load(a[0].toString(), DummyLibrary.class, true, 0, (Boolean) a[2]);
}
}
private String[] findDeps(String name) {
for (Object[] a : dependencies) {
if (name.equals(a[0])) {
return (String[]) a[1];
}
}
return new String[] {}; // library dependancy load chain unspecified -
// probably client call
}
public Object load(String name, Class<?> clazz, boolean reqLib) {
return load(name, clazz, true, 0, reqLib);
}
private Object load(String name, Class<?> clazz, boolean forceReload,
int depth, boolean reqLib) {
assert depth < RECURSIVE_LOAD_MAX_DEPTH : String.format(
"recursive max load depth %s has been exceeded", depth);
Object library = loadedMap.get(name);
if (null == library || forceReload) {
// Logger.getAnonymousLogger().info(String.format("%" + ((depth + 1) * 2)
// + "sloading %s", "->", name));
try {
String[] deps = findDeps(name);
for (String lib : deps) {
load(lib, DummyLibrary.class, false, depth + 1, reqLib);
}
library = loadLibrary(name, clazz, reqLib);
if (library != null) {
loadedMap.put(name, library);
}
} catch (Exception e) {
if (reqLib)
throw new RuntimeException(String.format("can not load library %s",
name, e));
else
System.out.println(String.format("can not load library %s", name, e));
}
}
return library;
}
private static Object loadLibrary(String name, Class<?> clazz, boolean reqLib) {
// Logger.getAnonymousLogger().info(String.format("loading %s", name));
String[] nameFormats;
nameFormats = Platform.isWindows() ? new String[] { "lib%s", "lib%s-0",
"%s" } : new String[] { "%s-0", "%s" };
UnsatisfiedLinkError linkError = null;
for (String fmt : nameFormats) {
try {
String s = String.format(fmt, name);
//System.out.println("Trying to load library file " + s);
Object obj = Native.loadLibrary(s, clazz);
//System.out.println("Loaded library " + s + " succesfully!");
return obj;
} catch (UnsatisfiedLinkError ex) {
linkError = ex;
}
}
if (reqLib)
throw new UnsatisfiedLinkError(
String
.format(
"can't load library %s (%1$s|lib%1$s|lib%1$s-0) with -Djna.library.path=%s. Last error:%s",
name, System.getProperty("jna.library.path"), linkError));
else {
System.out
.println(String
.format(
"can't load library %s (%1$s|lib%1$s|lib%1$s-0) with -Djna.library.path=%s. Last error:%s",
name, System.getProperty("jna.library.path"), linkError));
return null;
}
}
public static synchronized GSLibraryLoader getInstance() {
if (null == instance) {
instance = new GSLibraryLoader();
instance.preLoadLibs();
}
return instance;
}
}

View File

@@ -0,0 +1,64 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package codeanticode.gsvideo;
import java.net.URL;
import com.sun.jna.Platform;
class GSLibraryPath {
// This method returns the folder inside which the gstreamer library folder
// should be located.
String get() {
URL url = this.getClass().getResource("GSLibraryPath.class");
if (url != null) {
// Convert URL to string, taking care of spaces represented by the "%20"
// string.
String path = url.toString().replace("%20", " ");
int n0 = path.indexOf('/');
int n1 = -1;
if (Platform.isWindows()) {
n1 = path.indexOf("/lib/GSVideo.jar"); // location of GSVideo.jar in
// exported apps.
if (n1 == -1)
n1 = path.indexOf("/GSVideo.jar"); // location of GSVideo.jar in
// library folder.
// In Windows, path string starts with "jar file/C:/..."
// so the substring up to the first / is removed.
n0++;
} else if (Platform.isMac()) {
// In Mac, getting the index of GSVideo.jar is enough in the case of sketches running from the PDE
// as well as exported applications.
n1 = path.indexOf("GSVideo.jar");
} else if (Platform.isLinux()) {
// TODO: what's up?
}
if ((-1 < n0) && (-1 < n1)) {
return path.substring(n0, n1);
} else {
return "";
}
}
return "";
}
}

View File

@@ -0,0 +1,718 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package codeanticode.gsvideo;
import processing.core.*;
import java.nio.*;
import java.lang.reflect.*;
import org.gstreamer.*;
import org.gstreamer.Buffer;
import org.gstreamer.elements.*;
/**
* This class allows to create a custom GStreamer pipeline.
*/
public class GSPipeline extends PImage implements PConstants {
protected int streamType;
protected String pipeline;
protected boolean playing = false;
protected boolean paused = false;
protected boolean repeat = false;
protected int bufWidth;
protected int bufHeight;
protected int bufSize;
protected Pipeline gpipeline;
protected Method pipelineEventMethod;
protected Method copyBufferMethod;
protected Object eventHandler;
protected Object copyHandler;
protected boolean available;
protected boolean pipelineReady;
protected RGBDataSink rgbSink = null;
protected int[] copyPixels = null;
protected BufferDataSink natSink = null;
protected Buffer natBuffer = null;
protected boolean copyBufferMode = false;
protected String copyMask;
protected ByteDataSink dataSink = null;
protected byte[] copyData = null;
public byte[] data = null;
public String dataCaps;
protected String tempDataCaps;
protected boolean firstFrame = true;
/**
* Creates an instance of GSPipeline using the provided pipeline
* string.
*
* @param parent PApplet
* @param pstr String
*/
public GSPipeline(PApplet parent, String pstr) {
super(0, 0, RGB);
initGStreamer(parent, pstr, GSVideo.VIDEO);
}
/**
* Creates an instance of GSPipeline using the provided pipeline
* string.
*
* @param parent PApplet
* @param pstr String
* @param type int
*/
public GSPipeline(PApplet parent, String pstr, int type) {
super(0, 0, RGB);
initGStreamer(parent, pstr, type);
}
/**
* Releases the gstreamer resources associated to this pipeline object.
* It shouldn't be used after this.
*/
public void delete() {
if (gpipeline != null) {
try {
if (gpipeline.isPlaying()) {
gpipeline.stop();
}
} catch (IllegalStateException e) {
System.err.println("error when deleting player, maybe some native resource is already disposed");
} catch (Exception e) {
e.printStackTrace();
}
pixels = null;
data = null;
copyPixels = null;
if (rgbSink != null) {
rgbSink.removeListener();
rgbSink.dispose();
rgbSink = null;
}
copyData = null;
if (dataSink != null) {
dataSink.removeListener();
dataSink.dispose();
dataSink = null;
}
natBuffer = null;
if (natSink != null) {
natSink.removeListener();
natSink.dispose();
natSink = null;
}
gpipeline.dispose();
gpipeline = null;
}
}
/**
* Same as delete.
*/
public void dispose() {
delete();
}
/**
* Sets the object to use as destination for the frames read from the stream.
* The color conversion mask is automatically set to the one required to
* copy the frames to OpenGL.
*
* @param Object dest
*/
public void setPixelDest(Object dest) {
copyHandler = dest;
if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
} else {
copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
}
}
/**
* Sets the object to use as destination for the frames read from the stream.
*
* @param Object dest
* @param String mask
*/
public void setPixelDest(Object dest, String mask) {
copyHandler = dest;
copyMask = mask;
}
/**
* Uses a generic object as handler of the pipeline. This object should have a
* pipelineEvent method that receives a GSPipeline argument. This method will
* be called upon a new frame read event.
*
*/
public void setEventHandlerObject(Object obj) {
eventHandler = obj;
try {
pipelineEventMethod = eventHandler.getClass().getMethod("pipelineEvent",
new Class[] { GSPipeline.class });
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
}
}
/**
* Get the full length of this movie (in seconds).
*
* @return float
*/
public float duration() {
float sec = gpipeline.queryDuration().toSeconds();
float nanosec = gpipeline.queryDuration().getNanoSeconds();
return sec + GSVideo.nanoSecToSecFrac(nanosec);
}
/**
* Return the current time in seconds.
*
* @return float
*/
public float time() {
float sec = gpipeline.queryPosition().toSeconds();
float nanosec = gpipeline.queryPosition().getNanoSeconds();
return sec + GSVideo.nanoSecToSecFrac(nanosec);
}
/**
* Jump to a specific location (in seconds). The number is a float so
* fractions of seconds can be used.
*
* @param float where
*/
public void jump(float where) {
if (playing) {
gpipeline.pause();
}
boolean res;
long start = GSVideo.secToNanoLong(where);
long stop = -1; // or whatever > new_pos
res = gpipeline.seek(1.0, Format.TIME, SeekFlags.FLUSH,
SeekType.SET, start, SeekType.SET, stop);
if (!res) {
System.err.println("Seek operation failed.");
}
if (playing) {
gpipeline.play();
}
}
/**
* Returns true if the stream is already producing frames.
*
* @return boolean
*/
public boolean ready() {
return 0 < bufSize && pipelineReady;
}
/**
* Return the true or false depending on whether there is a new frame ready to
* be read.
*
* @return boolean
*/
public boolean available() {
return available;
}
/**
* Returns whether the stream is playing or not.
*
* @return boolean
*/
public boolean isPlaying() {
return playing;
}
/**
* Returns whether the stream is paused or not. If isPlaying() and isPaused()
* both return false it means that the stream is stopped.
*
* @return boolean
*/
public boolean isPaused() {
return paused;
}
/**
* Returns whether the stream is looping or not.
*
* @return boolean
*/
public boolean isLooping() {
return repeat;
}
/**
* Begin playing the stream, with no repeat.
*/
public void play() {
if (!pipelineReady) {
initPipeline();
}
playing = true;
paused = false;
gpipeline.play();
}
/**
* Begin playing the stream, with repeat.
*/
public void loop() {
repeat = true;
play();
}
/**
* Shut off the repeating loop.
*/
public void noLoop() {
repeat = false;
}
/**
* Pause the stream at its current time.
*/
public void pause() {
playing = false;
paused = true;
gpipeline.pause();
}
/**
* Stop the stream, and rewind.
*/
public void stop() {
if (playing) {
goToBeginning();
playing = false;
}
paused = false;
gpipeline.stop();
}
/**
* Reads the current video frame.
*/
public synchronized void read() {
if (streamType == GSVideo.VIDEO) {
// We loadPixels() first to ensure that at least we always have a non-null
// pixels array, even if without any valid image inside.
loadPixels();
if (copyBufferMode) {
// The native buffer from gstreamer is copies to the destination object.
if (natBuffer == null || copyBufferMethod == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
IntBuffer rgbBuffer = natBuffer.getByteBuffer().asIntBuffer();
try {
copyBufferMethod.invoke(copyHandler, new Object[] { natBuffer, rgbBuffer, bufWidth, bufHeight });
} catch (Exception e) {
e.printStackTrace();
}
natBuffer = null;
} else {
if (copyPixels == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
int[] temp = pixels;
pixels = copyPixels;
updatePixels();
copyPixels = temp;
}
} else if (streamType == GSVideo.RAW) {
if (copyData == null) {
return;
}
dataCaps = tempDataCaps;
if (data == null) {
data = new byte[copyData.length];
}
byte[] temp = data;
data = copyData;
copyData = temp;
}
available = false;
}
/**
* Goes to the first frame of the stream.
*/
public void goToBeginning() {
boolean res = gpipeline.seek(ClockTime.fromNanos(0));
if (!res) {
System.err.println("Seek operation failed.");
}
}
/**
* Goes to the last frame of the stream.
*/
public void goToEnd() {
long nanos = gpipeline.queryDuration().getNanoSeconds();
boolean res = gpipeline.seek(ClockTime.fromNanos(nanos));
if (!res) {
System.err.println("Seek operation failed.");
}
}
/**
* Get a float-value property from the pipeline.
*
* @param String name
* @return boolean
*/
public float getProperty(String name) {
if (playing) {
return ((Number)gpipeline.get(name)).floatValue();
}
return 0;
}
/**
* Set a float-value property in the pipeline.
*
* @param String name
* @param float v
*/
public void setProperty(String name, float v) {
if (playing) {
gpipeline.set(name, v);
}
}
/**
* Change the volume. Values are from 0 to 1. It will fail
* if the pipeline doesn't have a volume property available.
*
* @param float v
*/
public void volume(float v) {
setProperty("volume", v);
}
/**
* Returns the text string used to build the pipeline.
*
* @return String
*/
public String getPipeline() {
return pipeline;
}
protected void initGStreamer(PApplet parent, String pstr, int type) {
this.parent = parent;
gpipeline = null;
GSVideo.init();
// register methods
parent.registerDispose(this);
setEventHandlerObject(parent);
pipeline = pstr;
streamType = type;
bufWidth = bufHeight = bufSize = 0;
pipelineReady = false;
}
protected void initPipeline() {
// Determining if the last element is fakesink or filesink.
int idx;
String lastElem, lastElemName;
String[] parts;
idx = pipeline.lastIndexOf('!');
lastElem = pipeline.substring(idx + 1, pipeline.length()).trim();
parts = lastElem.split(" ");
if (0 < parts.length)
lastElemName = parts[0];
else
lastElemName = "";
boolean fakeSink = lastElemName.equals("fakesink");
boolean fileSink = lastElemName.equals("filesink");
if (PApplet.platform == WINDOWS) {
// Single backward slashes are replaced by double backward slashes,
// otherwise gstreamer won't understand file paths.
pipeline = pipeline.replace("\\", "\\\\");
}
if (fakeSink || fileSink) {
// If the pipeline ends in a fakesink or filesink element, the RGBDataSink
// is not added at the end of it...
gpipeline = Pipeline.launch(pipeline);
} else {
if (streamType == GSVideo.VIDEO) {
// For video pipelines, we add an RGBDataSink or NativeDataSink element at the end.
if (copyHandler != null) {
try {
copyBufferMethod = copyHandler.getClass().getMethod("addPixelsToBuffer",
new Class[] { Object.class, IntBuffer.class, int.class, int.class });
copyBufferMode = true;
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
copyBufferMode = false;
}
if (copyBufferMode) {
try {
Method meth = copyHandler.getClass().getMethod("setPixelSource", new Class[] { Object.class});
meth.invoke(copyHandler, new Object[] { this });
} catch (Exception e) {
copyBufferMode = false;
}
if (copyBufferMode) {
String caps = " ! ffmpegcolorspace ! video/x-raw-rgb, bpp=32, depth=24, endianness=(int)4321, ";
caps += copyMask;
StringBuilder finalPipeStr = new StringBuilder(pipeline);
finalPipeStr.append(caps);
finalPipeStr.append(" ! fakesink name=nat");
pipeline = finalPipeStr.toString();
gpipeline = Pipeline.launch(pipeline);
natSink = new BufferDataSink("nat", gpipeline,
new BufferDataSink.Listener() {
public void bufferFrame(int w, int h, Buffer buffer) {
invokeEvent(w, h, buffer);
}
});
natSink.setAutoDisposeBuffer(false);
}
}
}
if (!copyBufferMode) {
// Making sure we are using the right color space and color masks:
String caps = " ! ffmpegcolorspace ! video/x-raw-rgb, bpp=32, depth=24, endianness=(int)4321, ";
// JNA creates ByteBuffer using native byte order, set masks according to that.
if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN)
caps += "red_mask=(int)0xFF00, green_mask=(int)0xFF0000, blue_mask=(int)0xFF000000";
else
caps += "red_mask=(int)0xFF0000, green_mask=(int)0xFF00, blue_mask=(int)0xFF";
StringBuilder finalPipeStr = new StringBuilder(pipeline);
finalPipeStr.append(caps);
finalPipeStr.append(" ! fakesink name=rgb");
pipeline = finalPipeStr.toString();
gpipeline = Pipeline.launch(pipeline);
rgbSink = new RGBDataSink("rgb", gpipeline, new RGBDataSink.Listener() {
public void rgbFrame(boolean pre, int w, int h, IntBuffer buffer) {
invokeEvent(w, h, buffer);
}
});
// Setting direct buffer passing in the video sink, so no new buffers are created
// and disposed by the GC on each frame (thanks to Octavi Estape for pointing
// out this one).
rgbSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
// No need for videoSink.dispose(), because the append() doesn't increment the
// refcount of the videoSink object.
}
} else if (streamType == GSVideo.AUDIO) {
// For audio pipelines, we launch the pipeline as it is.
gpipeline = Pipeline.launch(pipeline);
} else if (streamType == GSVideo.RAW) {
StringBuilder finalPipeStr = new StringBuilder(pipeline);
finalPipeStr.append(" ! fakesink name=data");
pipeline = finalPipeStr.toString();
gpipeline = Pipeline.launch(pipeline);
dataSink = new ByteDataSink("data", gpipeline,
new ByteDataSink.Listener() {
public void byteFrame(boolean pre, Caps caps, int size, ByteBuffer buffer) {
invokeEvent(caps, size, buffer);
}
});
dataSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
} else {
System.err.println("Unrecognized stream type: Please use VIDEO, AUDIO, or RAW.");
return;
}
}
// Creating bus to handle end-of-stream event.
Bus bus = gpipeline.getBus();
bus.connect(new Bus.EOS() {
public void endOfStream(GstObject element) {
eosEvent();
}
});
pipelineReady = true;
}
protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
bufSize = w * h;
if (copyPixels == null) {
copyPixels = new int[w * h];
}
buffer.rewind();
try {
buffer.get(copyPixels);
} catch (BufferUnderflowException e) {
e.printStackTrace();
copyPixels = null;
return;
}
// Creates a pipelineEvent.
if (pipelineEventMethod != null) {
try {
pipelineEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling pipelineEvent() for " + pipeline);
e.printStackTrace();
pipelineEventMethod = null;
}
}
}
protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
bufSize = w * h;
natBuffer = buffer;
if (playing) {
// Creates a movieEvent.
if (pipelineEventMethod != null) {
try {
pipelineEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling movieEvent() for " + pipeline);
e.printStackTrace();
pipelineEventMethod = null;
}
}
}
}
protected synchronized void invokeEvent(Caps caps, int n, ByteBuffer buffer) {
available = true;
bufSize = n;
tempDataCaps = caps.toString();
if (copyData == null) {
copyData = new byte[n];
}
buffer.rewind();
try {
buffer.get(copyData);
} catch (BufferUnderflowException e) {
e.printStackTrace();
copyData = null;
return;
}
if (playing) {
// Creates a playerEvent.
if (pipelineEventMethod != null) {
try {
pipelineEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling pipelineEvent() for " + pipeline);
e.printStackTrace();
pipelineEventMethod = null;
}
}
}
}
public synchronized void disposeBuffer(Object buf) {
((Buffer)buf).dispose();
}
protected void eosEvent() {
if (repeat) {
goToBeginning();
} else {
playing = false;
}
}
}

View File

@@ -0,0 +1,833 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package codeanticode.gsvideo;
import processing.core.*;
import java.awt.Dimension;
import java.io.*;
import java.nio.*;
import java.util.concurrent.TimeUnit;
import java.lang.reflect.*;
import org.gstreamer.*;
import org.gstreamer.Buffer;
import org.gstreamer.elements.*;
/**
* This class makes it possible to load and play generic media content through
* playbin (not only movies, but also audio files, etc).
*/
public class GSPlayer extends PImage implements PConstants {
protected int streamType;
protected String filename;
protected boolean playing = false;
protected boolean paused = false;
protected boolean repeat = false;
protected float fps;
protected float rate;
protected int bufWidth;
protected int bufHeight;
protected int bufSize;
protected PlayBin2 gplayer;
protected Method playerEventMethod;
protected Method copyBufferMethod;
protected Object eventHandler;
protected Object copyHandler;
protected boolean available;
protected boolean sinkReady;
protected RGBDataAppSink rgbSink = null;
protected int[] copyPixels = null;
protected BufferDataAppSink natSink = null;
protected Buffer natBuffer = null;
protected boolean copyBufferMode = false;
protected String copyMask;
protected ByteDataAppSink dataSink = null;
protected byte[] copyData = null;
public byte[] data = null;
public String dataCaps;
protected String tempDataCaps;
protected boolean firstFrame = true;
/**
* Creates an instance of GSPlayer loading the media file from filename,
* assuming that it is a video file.
*
* @param parent PApplet
* @param filename String
*/
public GSPlayer(PApplet parent, String filename) {
super(0, 0, RGB);
initGStreamer(parent, filename, GSVideo.VIDEO);
}
/**
* Creates an instance of GSPlayer loading the media file from filename,
* and trying to understand it as the indicated type.
*
* @param parent PApplet
* @param filename String
* @param type int
*/
public GSPlayer(PApplet parent, String filename, int type) {
super(0, 0, RGB);
initGStreamer(parent, filename, type);
}
/**
* Releases the gstreamer resources associated to this player object.
* It shouldn't be used after this.
*/
public void delete() {
if (gplayer != null) {
try {
if (gplayer.isPlaying()) {
gplayer.stop();
}
} catch (IllegalStateException e) {
System.err.println("error when deleting player, maybe some native resource is already disposed");
} catch (Exception e) {
e.printStackTrace();
}
pixels = null;
data = null;
copyPixels = null;
if (rgbSink != null) {
rgbSink.removeListener();
rgbSink.dispose();
rgbSink = null;
}
copyData = null;
if (dataSink != null) {
dataSink.removeListener();
dataSink.dispose();
dataSink = null;
}
natBuffer = null;
if (natSink != null) {
natSink.removeListener();
natSink.dispose();
natSink = null;
}
gplayer.dispose();
gplayer = null;
}
}
/**
* Same as delete.
*/
public void dispose() {
delete();
}
/**
* Sets the object to use as destination for the frames read from the stream.
* The color conversion mask is automatically set to the one required to
* copy the frames to OpenGL.
*
* @param Object dest
*/
public void setPixelDest(Object dest) {
copyHandler = dest;
if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
} else {
copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
}
}
/**
* Sets the object to use as destination for the frames read from the stream.
*
* @param Object dest
* @param String mask
*/
public void setPixelDest(Object dest, String mask) {
copyHandler = dest;
copyMask = mask;
}
/**
* Uses a generic object as handler of the media file. This object should have a
* movieEvent method that receives a GSMovie argument. This method will
* be called upon a new frame read event.
*
*/
public void setEventHandlerObject(Object obj) {
eventHandler = obj;
try {
playerEventMethod = eventHandler.getClass().getMethod("playerEvent",
new Class[] { GSPlayer.class });
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
}
}
/**
* Get the width of the source video. Note: calling this method repeatedly
* can slow down playback performance.
*
* @return int
*/
public int getSourceWidth() {
Dimension dim = gplayer.getVideoSize();
if (dim != null) {
return dim.width;
} else {
return 0;
}
}
/**
* Get the height of the source video. Note: calling this method repeatedly
* can slow down playback performance.
*
* @return int
*/
public int getSourceHeight() {
Dimension dim = gplayer.getVideoSize();
if (dim != null) {
return dim.height;
} else {
return 0;
}
}
/**
* Get the original framerate of the source video. Note: calling this method repeatedly
* can slow down playback performance.
*
* @return float
*/
public float getSourceFrameRate() {
return (float)gplayer.getVideoSinkFrameRate();
}
/**
* Set how often new frames are to be read from the stream. Does not actually
* set the speed of the playback, that's handled by the speed() method.
*
* @param int ifps
* @see speed
*/
public void frameRate(float ifps) {
// We calculate the target ratio in the case both the
// current and target framerates are valid (greater than
// zero), otherwise we leave it as 1.
float f = (0 < ifps && 0 < fps) ? ifps / fps : 1;
if (playing) {
gplayer.pause();
}
long t = gplayer.queryPosition(TimeUnit.NANOSECONDS);
boolean res;
long start, stop;
if (rate > 0) {
start = t;
stop = -1;
} else {
start = 0;
stop = t;
}
res = gplayer.seek(rate * f, Format.TIME, SeekFlags.FLUSH,
SeekType.SET, start, SeekType.SET, stop);
if (!res) {
System.err.println("Seek operation failed.");
}
if (playing) {
gplayer.play();
}
fps = ifps;
}
/**
* Set a multiplier for how fast/slow the movie should be run. The default is
* 1.0. speed(2) will play the movie at double speed (2x). speed(0.5) will
* play at half speed. speed(-1) will play backwards at regular speed.
*
* @param float irate
*/
public void speed(float irate) {
// If the frameRate() method is called continuously with very similar
// rate values, playback might become sluggish. This condition attempts
// to take care of that.
if (PApplet.abs(rate - irate) > 0.1) {
rate = irate;
frameRate(fps); // The framerate is the same, but the rate (speed) could be different.
}
}
/**
* Get the full length of the current stream (in seconds).
*
* @return float
*/
public float duration() {
float sec = gplayer.queryDuration().toSeconds();
float nanosec = gplayer.queryDuration().getNanoSeconds();
return sec + GSVideo.nanoSecToSecFrac(nanosec);
}
/**
* Return the current time in seconds.
*
* @return float
*/
public float time() {
float sec = gplayer.queryPosition().toSeconds();
float nanosec = gplayer.queryPosition().getNanoSeconds();
return sec + GSVideo.nanoSecToSecFrac(nanosec);
}
/**
* Get the full length of this movie (in frames).
*
* @return float
*/
public long length() {
return (int)(duration() * getSourceFrameRate());
}
/**
* Return the current frame.
*
* @return int
*/
public int frame() {
return (int)(time() * getSourceFrameRate());
}
/**
* Jump to a specific location (in seconds). The number is a float so
* fractions of seconds can be used.
*
* @param float where
*/
public void jump(float where) {
if (playing) {
gplayer.pause();
}
boolean res;
long start = GSVideo.secToNanoLong(where);
long stop = -1; // or whatever > new_pos
res = gplayer.seek(1.0, Format.TIME, SeekFlags.FLUSH,
SeekType.SET, start, SeekType.SET, stop);
if (!res) {
System.err.println("Seek operation failed.");
}
if (playing) {
gplayer.play();
}
}
/**
* Jump to a specific frame.
*
* @param frame int
*/
public void jump(int frame) {
float srcFramerate = getSourceFrameRate();
// The duration of a single frame:
float frameDuration = 1 / srcFramerate;
// We move to the middle of the frame by adding 0.5:
float where = (frame + 0.5f) * frameDuration;
// Taking into account border effects:
float diff = duration() - where;
if (diff < 0) {
where += diff - 0.25f * frameDuration;
}
jump(where);
}
/**
* Returns true if the stream is already producing frames.
*
* @return boolean
*/
public boolean ready() {
return 0 < bufSize && sinkReady;
}
/**
* Return the true or false depending on whether there is a new frame ready to
* be read.
*
* @return boolean
*/
public boolean available() {
return available;
}
/**
* Returns whether the media is playing or not.
*
* @return boolean
*/
public boolean isPlaying() {
return playing;
}
/**
* Returns whether the media is paused or not. If isPlaying() and isPaused()
* both return false it means that the media is stopped.
*
* @return boolean
*/
public boolean isPaused() {
return paused;
}
/**
* Returns whether the media is looping or not.
*
* @return boolean
*/
public boolean isLooping() {
return repeat;
}
/**
* Begin playing the stream, with no repeat.
*/
public void play() {
if (!sinkReady) {
initSink();
}
playing = true;
paused = false;
gplayer.play();
}
/**
* Begin playing the stream, with repeat.
*/
public void loop() {
repeat = true;
play();
}
/**
* Shut off the repeating loop.
*/
public void noLoop() {
repeat = false;
}
/**
* Pause the stream at its current time.
*/
public void pause() {
playing = false;
paused = true;
gplayer.pause();
}
/**
* Stop the stream, and rewind.
*/
public void stop() {
if (playing) {
goToBeginning();
playing = false;
}
paused = false;
gplayer.stop();
}
/**
* Reads the current video frame or data buffer.
*/
public synchronized void read() {
if (fps <= 0) {
// Framerate not set yet, so we obtain from stream,
// which is already playing since we are in read().
fps = getSourceFrameRate();
}
if (streamType == GSVideo.VIDEO) {
// We loadPixels() first to ensure that at least we always have a non-null
// pixels array, even if without any valid image inside.
loadPixels();
if (copyBufferMode) {
// The native buffer from gstreamer is copies to the destination object.
if (natBuffer == null || copyBufferMethod == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
IntBuffer rgbBuffer = natBuffer.getByteBuffer().asIntBuffer();
try {
copyBufferMethod.invoke(copyHandler, new Object[] { natBuffer, rgbBuffer, bufWidth, bufHeight });
} catch (Exception e) {
e.printStackTrace();
}
natBuffer = null;
} else {
// Normal operation mode: the pixels just read from gstreamer
// are copied to the pixels array.
if (copyPixels == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
int[] temp = pixels;
pixels = copyPixels;
updatePixels();
copyPixels = temp;
}
} else if (streamType == GSVideo.RAW) {
if (copyData == null) {
return;
}
dataCaps = tempDataCaps;
if (data == null) {
data = new byte[copyData.length];
}
byte[] temp = data;
data = copyData;
copyData = temp;
}
available = false;
}
/**
* Goes to the first frame of the stream.
*/
public void goToBeginning() {
gplayer.seek(ClockTime.fromNanos(0));
}
/**
* Change the volume. Values are from 0 to 1.
*
* @param float v
*/
public void volume(float v) {
if (playing) {
gplayer.setVolume(v);
}
}
/**
* Returns the text string containing the filename of the media loaded.
*
* @return String
*/
public String getFilename() {
return filename;
}
protected void initGStreamer(PApplet parent, String filename, int type) {
this.parent = parent;
gplayer = null;
File file;
GSVideo.init();
// first check to see if this can be read locally from a file.
try {
try {
// first try a local file using the dataPath. usually this will
// work ok, but sometimes the dataPath is inside a jar file,
// which is less fun, so this will crap out.
file = new File(parent.dataPath(filename));
if (file.exists()) {
gplayer = new PlayBin2("GSPlayer");
gplayer.setInputFile(file);
}
} catch (Exception e) {
} // ignored
// read from a file just hanging out in the local folder.
// this might happen when the video library is used with some
// other application, or the person enters a full path name
if (gplayer == null) {
try {
file = new File(filename);
if (file.exists()) {
gplayer = new PlayBin2("GSPlayer");
gplayer.setInputFile(file);
} else {
System.err.println("File " + filename + " does not exist. Please check location.");
}
} catch (Exception e) {
}
}
// Network read needs to be implemented...
} catch (SecurityException se) {
// online, whups. catch the security exception out here rather than
// doing it three times (or whatever) for each of the cases above.
}
if (gplayer == null) {
parent.die("Could not load media file " + filename, null);
}
// we've got a valid media file! let's rock.
try {
this.filename = filename; // for error messages
// register methods
parent.registerDispose(this);
setEventHandlerObject(parent);
rate = 1.0f;
fps = -1;
sinkReady = false;
bufWidth = bufHeight = bufSize = 0;
} catch (Exception e) {
e.printStackTrace();
}
streamType = type;
}
protected void initSink() {
if (streamType == GSVideo.VIDEO) {
if (copyHandler != null) {
try {
copyBufferMethod = copyHandler.getClass().getMethod("addPixelsToBuffer",
new Class[] { Object.class, IntBuffer.class, int.class, int.class });
copyBufferMode = true;
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
copyBufferMode = false;
}
if (copyBufferMode) {
try {
Method meth = copyHandler.getClass().getMethod("setPixelSource", new Class[] { Object.class});
meth.invoke(copyHandler, new Object[] { this });
} catch (Exception e) {
copyBufferMode = false;
}
if (copyBufferMode) {
natSink = new BufferDataAppSink("nat", copyMask,
new BufferDataAppSink.Listener() {
public void bufferFrame(int w, int h, Buffer buffer) {
invokeEvent(w, h, buffer);
}
});
natSink.setAutoDisposeBuffer(false);
gplayer.setVideoSink(natSink);
// The setVideoSink() method sets the videoSink as a property of the PlayBin,
// which increments the refcount of the videoSink element. Disposing here once
// to decrement the refcount.
natSink.dispose();
}
}
}
if (!copyBufferMode) {
rgbSink = new RGBDataAppSink("rgb",
new RGBDataAppSink.Listener() {
public void rgbFrame(int w, int h, IntBuffer buffer) {
invokeEvent(w, h, buffer);
}
});
// Setting direct buffer passing in the video sink, so no new buffers are created
// and disposed by the GC on each frame (thanks to Octavi Estape for pointing
// out this one).
rgbSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
gplayer.setVideoSink(rgbSink);
// The setVideoSink() method sets the videoSink as a property of the PlayBin,
// which increments the refcount of the videoSink element. Disposing here once
// to decrement the refcount.
rgbSink.dispose();
}
} else if (streamType == GSVideo.AUDIO) {
gplayer.setVideoSink(ElementFactory.make("fakesink", "audiosink"));
} else if (streamType == GSVideo.RAW) {
dataSink = new ByteDataAppSink("data",
new ByteDataAppSink.Listener() {
public void byteFrame(Caps caps, int size, ByteBuffer buffer) {
invokeEvent(caps, size, buffer);
}
});
dataSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
gplayer.setVideoSink(dataSink);
dataSink.dispose();
} else {
System.err.println("Unrecognized stream type: Please use VIDEO, AUDIO, or RAW.");
return;
}
// Creating bus to handle end-of-stream event.
Bus bus = gplayer.getBus();
bus.connect(new Bus.EOS() {
public void endOfStream(GstObject element) {
eosEvent();
}
});
sinkReady = true;
}
protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
bufSize = w * h;
if (copyPixels == null) {
copyPixels = new int[w * h];
}
buffer.rewind();
try {
buffer.get(copyPixels);
} catch (BufferUnderflowException e) {
e.printStackTrace();
copyPixels = null;
return;
}
if (playing) {
// Creates a playerEvent.
if (playerEventMethod != null) {
try {
playerEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling playerEvent() for " + filename);
e.printStackTrace();
playerEventMethod = null;
}
}
}
}
protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
bufSize = w * h;
natBuffer = buffer;
if (playing) {
// Creates a playerEvent.
if (playerEventMethod != null) {
try {
playerEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling movieEvent() for " + filename);
e.printStackTrace();
playerEventMethod = null;
}
}
}
}
protected synchronized void invokeEvent(Caps caps, int n, ByteBuffer buffer) {
available = true;
bufSize = n;
tempDataCaps = caps.toString();
if (copyData == null) {
copyData = new byte[n];
}
buffer.rewind();
try {
buffer.get(copyData);
} catch (BufferUnderflowException e) {
e.printStackTrace();
copyData = null;
return;
}
if (playing) {
// Creates a playerEvent.
if (playerEventMethod != null) {
try {
playerEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling playerEvent() for " + filename);
e.printStackTrace();
playerEventMethod = null;
}
}
}
}
public synchronized void disposeBuffer(Object buf) {
((Buffer)buf).dispose();
}
protected void eosEvent() {
if (repeat) {
goToBeginning();
} else {
playing = false;
}
}
}

View File

@@ -0,0 +1,249 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package codeanticode.gsvideo;
import org.gstreamer.*;
// TODO: update to latest gstreamer on windows, jmcvideo on mac, seeking in gspipeline, check sf tracker
import processing.core.PApplet;
import processing.core.PConstants;
import java.io.File;
import java.util.List;
/**
* This class contains some basic functions used by the rest of the classes in
* this library.
*/
public class GSVideo implements PConstants {
protected static String VERSION_STRING = "0.9";
protected static long INSTANCES_COUNT = 0;
protected static String gstreamerBinPath = "";
protected static String gstreamerPluginsPath = "";
protected static boolean defaultGLibContext = false;
// Priority is given to global install of GStreamer if this is set to true.
public static boolean globalGStreamer = true;
// Direct buffer pass enabled by default.
public static boolean passDirectBuffer = true;
public static String globalGStreamerPath;
public static String globalPluginsFolder = "gstreamer-0.10";
// Default locations of the global install of gstreamer for each platform:
static {
if (PApplet.platform == MACOSX) {
globalGStreamerPath = "/System/Library/Frameworks/GStreamer.framework/Versions/Current/lib";
} else if (PApplet.platform == WINDOWS) {
globalGStreamerPath = "";
//globalGStreamerPath = "C://Program Files (x86)//OSSBuild//GStreamer//v0.10.7//lib";
} else if (PApplet.platform == LINUX) {
globalGStreamerPath = "/usr/lib";
} else {}
}
// Default location of the local install of gstreamer. Suggested by Charles Bourasseau.
// When it is left as empty string, GSVideo will attempt to use the path from GSLibraryPath.get(),
// otherwise it will use it as the path to the folder where the libgstreamer.dylib and other
// files are located.
public static String localGStreamerPath = "";
public static String localPluginsFolder = "plugins";
// Some constants to identify AUDIO, VIDEO and RAW streams.
static public final int AUDIO = 0;
static public final int VIDEO = 1;
static public final int RAW = 2;
public static void init() {
if (INSTANCES_COUNT == 0) {
PApplet.println("GSVideo version: " + VERSION_STRING);
initImpl();
}
INSTANCES_COUNT++;
}
public static void restart() {
removePlugins();
Gst.deinit();
initImpl();
}
protected static void initImpl() {
if (PApplet.platform == LINUX) {
// Linux only supports global gstreamer for now.
globalGStreamer = true;
setLinuxPath();
} else if (PApplet.platform == WINDOWS) {
setWindowsPath();
} else if (PApplet.platform == MACOSX) {
setMacOSXPath();
}
if (!gstreamerBinPath.equals("")) {
System.setProperty("jna.library.path", gstreamerBinPath);
}
if ((PApplet.platform == LINUX) && !globalGStreamer) {
System.err.println("Loading local version of GStreamer not supported in Linux at this time.");
}
if ((PApplet.platform == WINDOWS) && !globalGStreamer) {
GSLibraryLoader loader = GSLibraryLoader.getInstance();
if (loader == null) {
System.err.println("Cannot load local version of GStreamer libraries.");
}
}
if ((PApplet.platform == MACOSX) && !globalGStreamer) {
// Nothing to do here, since the dylib mechanism in OSX doesn't require the
// library loader.
}
String[] args = { "" };
Gst.setUseDefaultContext(defaultGLibContext);
Gst.init("GSVideo", args);
addPlugins();
}
protected static void addPlugins() {
if (!gstreamerPluginsPath.equals("")) {
Registry reg = Registry.getDefault();
boolean res;
res = reg.scanPath(gstreamerPluginsPath);
if (!res) {
System.err.println("Cannot load GStreamer plugins from " + gstreamerPluginsPath);
}
}
}
protected static void removePlugins() {
Registry reg = Registry.getDefault();
List<Plugin> list = reg.getPluginList();
for (int i = 0; i < list.size(); i++) {
Plugin plg = (Plugin)list.get(i);
reg.removePlugin(plg);
}
}
protected static void setLinuxPath() {
if (globalGStreamer && lookForGlobalGStreamer()) {
gstreamerBinPath = "";
gstreamerPluginsPath = "";
} else {
globalGStreamer = false;
if (localGStreamerPath.equals("")) {
GSLibraryPath libPath = new GSLibraryPath();
String path = libPath.get();
gstreamerBinPath = path + "/gstreamer/linux";
gstreamerPluginsPath = path + "/gstreamer/linux/" + localPluginsFolder;
} else {
gstreamerBinPath = localGStreamerPath;
gstreamerPluginsPath = localGStreamerPath + "/" + localPluginsFolder;
}
}
}
protected static void setWindowsPath() {
if (globalGStreamer && lookForGlobalGStreamer()) {
gstreamerBinPath = "";
gstreamerPluginsPath = "";
} else {
globalGStreamer = false;
if (localGStreamerPath.equals("")) {
GSLibraryPath libPath = new GSLibraryPath();
String path = libPath.get();
gstreamerBinPath = path + "\\gstreamer\\win";
gstreamerPluginsPath = path + "\\gstreamer\\win\\" + localPluginsFolder;
} else {
gstreamerBinPath = localGStreamerPath;
gstreamerPluginsPath = localGStreamerPath + "\\" + localPluginsFolder;
}
}
}
protected static void setMacOSXPath() {
if (globalGStreamer && lookForGlobalGStreamer()) {
gstreamerBinPath = globalGStreamerPath;
gstreamerPluginsPath = globalGStreamerPath + "/" + globalPluginsFolder;
} else {
globalGStreamer = false;
if (localGStreamerPath.equals("")) {
GSLibraryPath libPath = new GSLibraryPath();
String path = libPath.get();
gstreamerBinPath = path + "/gstreamer/macosx";
gstreamerPluginsPath = path + "/gstreamer/macosx/" + localPluginsFolder;
} else {
gstreamerBinPath = localGStreamerPath;
gstreamerPluginsPath = localGStreamerPath + "/" + localPluginsFolder;
}
}
}
protected static boolean lookForGlobalGStreamer() {
String[] searchPaths = null;
if (!globalGStreamerPath.equals("")) {
searchPaths = new String[] {globalGStreamerPath};
}
if (searchPaths == null) {
String lpaths = System.getProperty("java.library.path");
String pathsep = System.getProperty("path.separator");
searchPaths = lpaths.split(pathsep);
}
for (int i = 0; i < searchPaths.length; i++) {
String path = searchPaths[i];
if (libgstreamerPresent(path, "libgstreamer")) {
globalGStreamerPath = path;
return true;
}
}
return false;
}
protected static boolean libgstreamerPresent(String dir, String file) {
File libPath = new File(dir);
String[] files = libPath.list();
if (files != null) {
for (int i = 0; i < files.length; i++) {
if (-1 < files[i].indexOf(file)) {
return true;
}
}
}
return false;
}
static protected float nanoSecToSecFrac(float nanosec) {
for (int i = 0; i < 3; i++)
nanosec /= 1E3;
return nanosec;
}
static protected long secToNanoLong(float sec) {
Float f = new Float(sec * 1E9);
return f.longValue();
}
}

View File

@@ -0,0 +1,750 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package codeanticode.gsvideo;
import processing.core.*;
import java.awt.Dimension;
import java.io.*;
import java.nio.*;
import java.util.concurrent.TimeUnit;
import java.lang.reflect.*;
import org.gstreamer.*;
import org.gstreamer.Buffer;
import org.gstreamer.elements.*;
/**
* This class makes it possible to load movies and to play them back in many
* ways including looping, pausing, and changing speed.
*/
public class GSMovie extends PImage implements PConstants {
protected String filename;
protected boolean playing = false;
protected boolean paused = false;
protected boolean repeat = false;
protected float fps;
protected float rate;
protected int bufWidth;
protected int bufHeight;
protected PlayBin2 gplayer;
protected Method movieEventMethod;
protected Method copyBufferMethod;
protected Object eventHandler;
protected Object copyHandler;
protected boolean available;
protected boolean sinkReady;
protected RGBDataAppSink rgbSink = null;
protected int[] copyPixels = null;
protected BufferDataAppSink natSink = null;
protected Buffer natBuffer = null;
protected boolean copyBufferMode = false;
protected String copyMask;
protected boolean firstFrame = true;
/**
* Creates an instance of GSMovie loading the movie from filename.
*
* @param parent PApplet
* @param filename String
*/
public GSMovie(PApplet parent, String filename) {
super(0, 0, RGB);
initGStreamer(parent, filename);
}
/**
* Releases the gstreamer resources associated to this movie object.
* It shouldn't be used after this.
*/
public void delete() {
if (gplayer != null) {
try {
if (gplayer.isPlaying()) {
gplayer.stop();
}
} catch (IllegalStateException e) {
System.err.println("error when deleting player, maybe some native resource is already disposed");
} catch (Exception e) {
e.printStackTrace();
}
pixels = null;
copyPixels = null;
if (rgbSink != null) {
rgbSink.removeListener();
rgbSink.dispose();
rgbSink = null;
}
natBuffer = null;
if (natSink != null) {
natSink.removeListener();
natSink.dispose();
natSink = null;
}
gplayer.dispose();
gplayer = null;
}
}
/**
* Same as delete.
*/
public void dispose() {
delete();
}
/**
* Sets the object to use as destination for the frames read from the stream.
* The color conversion mask is automatically set to the one required to
* copy the frames to OpenGL.
*
* @param Object dest
*/
public void setPixelDest(Object dest) {
copyHandler = dest;
if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
} else {
copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
}
}
/**
* Sets the object to use as destination for the frames read from the stream.
*
* @param Object dest
* @param String mask
*/
public void setPixelDest(Object dest, String mask) {
copyHandler = dest;
copyMask = mask;
}
/**
* Uses a generic object as handler of the movie. This object should have a
* movieEvent method that receives a GSMovie argument. This method will
* be called upon a new frame read event.
*
*/
public void setEventHandlerObject(Object obj) {
eventHandler = obj;
try {
movieEventMethod = eventHandler.getClass().getMethod("movieEvent",
new Class[] { GSMovie.class });
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
}
}
/**
* Get the width of the source video. Note: calling this method repeatedly
* can slow down playback performance.
*
* @return int
*/
public int getSourceWidth() {
Dimension dim = gplayer.getVideoSize();
if (dim != null) {
return dim.width;
} else {
return 0;
}
}
/**
* Get the height of the source video. Note: calling this method repeatedly
* can slow down playback performance.
*
* @return int
*/
public int getSourceHeight() {
Dimension dim = gplayer.getVideoSize();
if (dim != null) {
return dim.height;
} else {
return 0;
}
}
/**
* Get the original framerate of the source video. Note: calling this method repeatedly
* can slow down playback performance.
*
* @return float
*/
public float getSourceFrameRate() {
return (float)gplayer.getVideoSinkFrameRate();
}
/**
* Set how often new frames are to be read from the movie. Does not actually
* set the speed of the movie playback, that's handled by the speed() method.
*
* @param float ifps
* @see speed
*/
public void frameRate(float ifps) {
// We calculate the target ratio in the case both the
// current and target framerates are valid (greater than
// zero), otherwise we leave it as 1.
float f = (0 < ifps && 0 < fps) ? ifps / fps : 1;
if (playing) {
gplayer.pause();
}
long t = gplayer.queryPosition(TimeUnit.NANOSECONDS);
boolean res;
long start, stop;
if (rate > 0) {
start = t;
stop = -1;
} else {
start = 0;
stop = t;
}
res = gplayer.seek(rate * f, Format.TIME, SeekFlags.FLUSH,
SeekType.SET, start, SeekType.SET, stop);
if (!res) {
System.err.println("Seek operation failed.");
}
if (playing) {
gplayer.play();
}
fps = ifps;
}
/**
* Set a multiplier for how fast/slow the movie should be run. The default is
* 1.0. speed(2) will play the movie at double speed (2x). speed(0.5) will
* play at half speed. speed(-1) will play backwards at regular speed.
*
* @param float irate
*/
public void speed(float irate) {
// If the frameRate() method is called continuously with very similar
// rate values, playback might become sluggish. This condition attempts
// to take care of that.
if (PApplet.abs(rate - irate) > 0.1) {
rate = irate;
frameRate(fps); // The framerate is the same, but the rate (speed) could be different.
}
}
/**
* Get the full length of this movie (in seconds).
*
* @return float
*/
public float duration() {
float sec = gplayer.queryDuration().toSeconds();
float nanosec = gplayer.queryDuration().getNanoSeconds();
return sec + GSVideo.nanoSecToSecFrac(nanosec);
}
/**
* Return the current time in seconds.
*
* @return float
*/
public float time() {
float sec = gplayer.queryPosition().toSeconds();
float nanosec = gplayer.queryPosition().getNanoSeconds();
return sec + GSVideo.nanoSecToSecFrac(nanosec);
}
/**
* Get the full length of this movie (in frames).
*
* @return float
*/
public long length() {
return (int)(duration() * getSourceFrameRate());
}
/**
* Return the current frame.
*
* @return int
*/
public int frame() {
return (int)(time() * getSourceFrameRate());
}
/**
* Jump to a specific location (in seconds). The number is a float so
* fractions of seconds can be used.
*
* @param float where
*/
public void jump(float where) {
if (playing) {
gplayer.pause();
}
boolean res;
long start = GSVideo.secToNanoLong(where);
long stop = -1; // or whatever > new_pos
res = gplayer.seek(1.0, Format.TIME, SeekFlags.FLUSH,
SeekType.SET, start, SeekType.SET, stop);
if (!res) {
System.err.println("Seek operation failed.");
}
if (playing) {
gplayer.play();
}
}
/**
* Jump to a specific frame.
*
* @param frame int
*/
public void jump(int frame) {
float srcFramerate = getSourceFrameRate();
// The duration of a single frame:
float frameDuration = 1 / srcFramerate;
// We move to the middle of the frame by adding 0.5:
float where = (frame + 0.5f) * frameDuration;
// Taking into account border effects:
float diff = duration() - where;
if (diff < 0) {
where += diff - 0.25f * frameDuration;
}
jump(where);
}
/**
* Returns true if the stream is already producing frames.
*
* @return boolean
*/
public boolean ready() {
return 0 < bufWidth && 0 < bufHeight && sinkReady;
}
/**
* Return the true or false depending on whether there is a new frame ready to
* be read.
*
* @return boolean
*/
public boolean available() {
return available;
}
/**
* Returns whether the movie is playing or not.
*
* @return boolean
*/
public boolean isPlaying() {
return playing;
}
/**
* Returns whether the movie is paused or not. If isPlaying() and isPaused()
* both return false it means that the movie is stopped.
*
* @return boolean
*/
public boolean isPaused() {
return paused;
}
/**
* Returns whether the movie is looping or not.
*
* @return boolean
*/
public boolean isLooping() {
return repeat;
}
/**
* Begin playing the movie, with no repeat.
*/
public void play() {
if (!sinkReady) {
initSink();
}
playing = true;
paused = false;
gplayer.play();
}
/**
* Begin playing the movie, with repeat.
*/
public void loop() {
repeat = true;
play();
}
/**
* Shut off the repeating loop.
*/
public void noLoop() {
repeat = false;
}
/**
* Pause the movie at its current time.
*/
public void pause() {
playing = false;
paused = true;
gplayer.pause();
}
/**
* Stop the movie, and rewind.
*/
public void stop() {
if (playing) {
goToBeginning();
playing = false;
}
paused = false;
gplayer.stop();
}
/**
* Reads the current video frame.
*/
public synchronized void read() {
if (fps <= 0) {
// Framerate not set yet, so we obtain from stream,
// which is already playing since we are in read().
fps = getSourceFrameRate();
}
// We loadPixels() first to ensure that at least we always have a non-null
// pixels array, even if without any valid image inside.
loadPixels();
if (copyBufferMode) {
// The native buffer from gstreamer is copies to the destination object.
if (natBuffer == null || copyBufferMethod == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
IntBuffer rgbBuffer = natBuffer.getByteBuffer().asIntBuffer();
try {
copyBufferMethod.invoke(copyHandler, new Object[] { natBuffer, rgbBuffer, bufWidth, bufHeight });
} catch (Exception e) {
e.printStackTrace();
}
natBuffer = null;
} else {
// Normal operation mode: the pixels just read from gstreamer
// are copied to the pixels array.
if (copyPixels == null) {
return;
}
if (firstFrame) {
resize(bufWidth, bufHeight);
firstFrame = false;
}
int[] temp = pixels;
pixels = copyPixels;
updatePixels();
copyPixels = temp;
}
available = false;
}
/**
* Goes to the first frame of the movie.
*/
public void goToBeginning() {
boolean res = gplayer.seek(ClockTime.fromNanos(0));
if (!res) {
System.err.println("Seek operation failed.");
}
}
/**
* Goes to the last frame of the movie.
*/
public void goToEnd() {
long nanos = gplayer.queryDuration().getNanoSeconds();
boolean res = gplayer.seek(ClockTime.fromNanos(nanos));
if (!res) {
System.err.println("Seek operation failed.");
}
}
/**
* Change the volume. Values are from 0 to 1.
*
* @param float v
*/
public void volume(float v) {
if (playing) {
gplayer.setVolume(v);
}
}
/**
* Returns the text string containing the filename of the video loaded.
*
* @return String
*/
public String getFilename() {
return filename;
}
protected void initGStreamer(PApplet parent, String filename) {
this.parent = parent;
gplayer = null;
File file;
GSVideo.init();
// first check to see if this can be read locally from a file.
try {
try {
// first try a local file using the dataPath. usually this will
// work ok, but sometimes the dataPath is inside a jar file,
// which is less fun, so this will crap out.
file = new File(parent.dataPath(filename));
if (file.exists()) {
gplayer = new PlayBin2("GSMovie Player");
gplayer.setInputFile(file);
}
} catch (Exception e) {
} // ignored
// read from a file just hanging out in the local folder.
// this might happen when the video library is used with some
// other application, or the person enters a full path name
if (gplayer == null) {
try {
file = new File(filename);
if (file.exists()) {
gplayer = new PlayBin2("GSMovie Player");
gplayer.setInputFile(file);
} else {
System.err.println("File " + filename + " does not exist. Please check location.");
}
} catch (Exception e) {
PApplet.println("Shit coming...");
e.printStackTrace();
}
}
// Network read needs to be implemented...
} catch (SecurityException se) {
// online, whups. catch the security exception out here rather than
// doing it three times (or whatever) for each of the cases above.
}
if (gplayer == null) {
parent.die("Could not load movie file " + filename, null);
}
// we've got a valid movie! let's rock.
try {
// PApplet.println("we've got a valid movie! let's rock.");
this.filename = filename; // for error messages
// register methods
parent.registerDispose(this);
setEventHandlerObject(parent);
rate = 1.0f;
fps = -1;
sinkReady = false;
bufWidth = bufHeight = 0;
} catch (Exception e) {
e.printStackTrace();
}
}
protected void initSink() {
if (copyHandler != null) {
try {
copyBufferMethod = copyHandler.getClass().getMethod("addPixelsToBuffer",
new Class[] { Object.class, IntBuffer.class, int.class, int.class });
copyBufferMode = true;
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
copyBufferMode = false;
}
if (copyBufferMode) {
try {
Method meth = copyHandler.getClass().getMethod("setPixelSource", new Class[] { Object.class});
meth.invoke(copyHandler, new Object[] { this });
} catch (Exception e) {
copyBufferMode = false;
}
if (copyBufferMode) {
natSink = new BufferDataAppSink("nat", copyMask,
new BufferDataAppSink.Listener() {
public void bufferFrame(int w, int h, Buffer buffer) {
invokeEvent(w, h, buffer);
}
});
natSink.setAutoDisposeBuffer(false);
gplayer.setVideoSink(natSink);
// The setVideoSink() method sets the videoSink as a property of the PlayBin,
// which increments the refcount of the videoSink element. Disposing here once
// to decrement the refcount.
natSink.dispose();
}
}
}
if (!copyBufferMode) {
rgbSink = new RGBDataAppSink("rgb",
new RGBDataAppSink.Listener() {
public void rgbFrame(int w, int h, IntBuffer buffer) {
invokeEvent(w, h, buffer);
}
});
// Setting direct buffer passing in the video sink, so no new buffers are created
// and disposed by the GC on each frame (thanks to Octavi Estape for pointing
// out this one).
rgbSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
gplayer.setVideoSink(rgbSink);
// The setVideoSink() method sets the videoSink as a property of the PlayBin,
// which increments the refcount of the videoSink element. Disposing here once
// to decrement the refcount.
rgbSink.dispose();
}
// Creating bus to handle end-of-stream event.
Bus bus = gplayer.getBus();
bus.connect(new Bus.EOS() {
public void endOfStream(GstObject element) {
eosEvent();
}
});
sinkReady = true;
}
protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
if (copyPixels == null) {
copyPixels = new int[w * h];
}
buffer.rewind();
try {
buffer.get(copyPixels);
} catch (BufferUnderflowException e) {
e.printStackTrace();
copyPixels = null;
return;
}
if (playing) {
// Creates a movieEvent.
if (movieEventMethod != null) {
try {
movieEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling movieEvent() for " + filename);
e.printStackTrace();
movieEventMethod = null;
}
}
}
}
protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
natBuffer = buffer;
if (playing) {
// Creates a movieEvent.
if (movieEventMethod != null) {
try {
movieEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling movieEvent() for " + filename);
e.printStackTrace();
movieEventMethod = null;
}
}
}
}
public synchronized void disposeBuffer(Object buf) {
((Buffer)buf).dispose();
}
protected void eosEvent() {
if (repeat) {
goToBeginning();
} else {
playing = false;
}
}
}

View File

@@ -0,0 +1,369 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package codeanticode.gsvideo;
import processing.core.*;
import java.io.File;
import java.nio.ByteBuffer;
import org.gstreamer.Buffer;
import org.gstreamer.elements.RGBDataFileSink;
/**
* This class makes movies from a running program.
*/
public class GSMovieMaker {
protected PApplet parent;
protected boolean recording;
protected RGBDataFileSink recorder;
protected int width, height;
public static final int THEORA = 0;
public static final int XVID = 1;
public static final int X264 = 2;
public static final int DIRAC = 3;
public static final int MJPEG = 4;
public static final int MJPEG2K = 5;
public static final int WORST = 0;
public static final int LOW = 1;
public static final int MEDIUM = 2;
public static final int HIGH = 3;
public static final int BEST = 4;
/**
* Constructor that sets the codec to THEORA, MEDIUM quality and 30 fps.
*
*/
public GSMovieMaker(PApplet parent, int requestWidth, int requestHeight,
String filename) {
init(parent, requestWidth, requestHeight, filename, THEORA, MEDIUM, 30);
}
/**
* Constructor that allows to set codec type and fps.
*
*/
public GSMovieMaker(PApplet parent, int requestWidth, int requestHeight,
String filename, int codecType, int ifps) {
init(parent, requestWidth, requestHeight, filename, codecType, MEDIUM, ifps);
}
/**
* Constructor that allows to set codec type, encoding quality and fps.
*
*/
public GSMovieMaker(PApplet parent, int requestWidth, int requestHeight,
String filename, int codecType, int codecQuality, int ifps) {
init(parent, requestWidth, requestHeight, filename, codecType,
codecQuality, ifps);
}
/**
* Constructor that allows to set the gstreamer encoder and muxer by name.
* Properties for encoder and muxer are left to wherever the default values are.
*
*/
public GSMovieMaker(PApplet parent, int requestWidth, int requestHeight,
String filename, String encoder, String muxer, int ifps) {
init(parent, requestWidth, requestHeight, filename, encoder, muxer, null, null, ifps);
}
/**
* Constructor that allows to set the gstreamer encoder and muxer by name, as
* well as the properties.
*
*/
public GSMovieMaker(PApplet parent, int requestWidth, int requestHeight,
String filename, String encoder, String muxer, String[] propNames, Object[] propValues, int ifps) {
init(parent, requestWidth, requestHeight, filename, encoder, muxer, propNames, propValues, ifps);
}
/**
* Releases the gstreamer resources associated to this movie maker object.
* It shouldn't be used after this.
*/
public void delete() {
recorder.stop();
recorder.dispose();
}
/**
* Same as delete.
*/
public void dispose() {
delete();
}
/**
* Adds a new frame to the video being recorded..
*
* @param pixels
* int[]
*/
public void addFrame(int[] pixels) {
if (recording && pixels.length == width * height) {
Buffer srcBuffer = new Buffer(width * height * 4);
ByteBuffer tmpBuffer = srcBuffer.getByteBuffer();
tmpBuffer.clear();
tmpBuffer.asIntBuffer().put(pixels);
recorder.pushRGBFrame(srcBuffer);
}
}
/**
* Starts recording.
*
*/
public void start() {
recorder.start();
recording = true;
}
/**
* Finishes recording.
*
*/
public void finish() {
recording = false;
recorder.stop();
}
/**
* Returns the number of frames currently in the pre-encoding queue,
* waiting to be encoded.
*
*/
public int getQueuedFrames() {
return recorder.getNumQueuedFrames();
}
/**
* Returns the number of frames dropped until now.
*
*/
public int getDroppedFrames() {
return recorder.getNumDroppedFrames();
}
/**
* Sets the maximum size of the pre-encoding and encoding queues.
* When the encoding queue is full, the frames start to be accumulated
* in the pre-encoding queue. By setting the size of the pre-encoding
* queue to zero, it can grow arbitrarily large.
*
*/
public void setQueueSize(int npre, int nenc) {
recorder.setPreQueueSize(npre);
recorder.setSrcQueueSize(nenc);
}
/**
* Returns true or false depending on whether recording is going
* on right now or not.
*
* @returns boolean
*/
public boolean isRecording() {
return recording;
}
protected void init(PApplet iparent, int requestWidth, int requestHeight,
String filename, int codecType, int codecQuality, int ifps) {
this.parent = iparent;
GSVideo.init();
// register methods
parent.registerDispose(this);
width = requestWidth;
height = requestHeight;
String[] propNames = null;
Object[] propValues = null;
String encoder = "";
String muxer = "";
// Determining container based on the filename extension.
String fn = filename.toLowerCase();
if (fn.endsWith(".ogg")) {
muxer = "oggmux";
} else if (fn.endsWith(".avi")) {
muxer = "avimux";
} else if (fn.endsWith(".mov")) {
muxer = "qtmux";
} else if (fn.endsWith(".flv")) {
muxer = "flvmux";
} else if (fn.endsWith(".mkv")) {
muxer = "matroskamux";
} else if (fn.endsWith(".mp4")) {
muxer = "mp4mux";
} else if (fn.endsWith(".3gp")) {
muxer = "gppmux";
} else if (fn.endsWith(".mpg")) {
muxer = "ffmux_mpeg";
} else if (fn.endsWith(".mj2")) {
muxer = "mj2mux";
} else {
parent.die("Unrecognized video container", null);
}
// Configuring encoder.
if (codecType == THEORA) {
encoder = "theoraenc";
propNames = new String[1];
propValues = new Object[1];
propNames[0] = "quality";
Integer q = 31;
if (codecQuality == WORST) {
q = 0;
} else if (codecQuality == LOW) {
q = 15;
} else if (codecQuality == MEDIUM) {
q = 31;
} else if (codecQuality == HIGH) {
q = 47;
} else if (codecQuality == BEST) {
q = 63;
}
propValues[0] = q;
} else if (codecType == DIRAC) {
encoder = "schroenc";
propNames = new String[1];
propValues = new Object[1];
propNames[0] = "quality";
Double q = 5.0d;
if (codecQuality == WORST) {
q = 0.0d;
} else if (codecQuality == LOW) {
q = 2.5d;
} else if (codecQuality == MEDIUM) {
q = 5.0d;
} else if (codecQuality == HIGH) {
q = 7.5d;
} else if (codecQuality == BEST) {
q = 10.0d;
}
propValues[0] = q;
} else if (codecType == XVID) {
encoder = "xvidenc";
// TODO: set Properties of xvidenc.
} else if (codecType == X264) {
encoder = "x264enc";
propNames = new String[2];
propValues = new Object[2];
// The pass property can take the following values:
// (0): cbr - Constant Bitrate Encoding (default)
// (4): quant - Constant Quantizer
// (5): qual - Constant Quality
// (17): pass1 - VBR Encoding - Pass 1
// (18): pass2 - VBR Encoding - Pass 2
// (19): pass3 - VBR Encoding - Pass 3
propNames[0] = "pass";
Integer p = 5;
propValues[0] = p;
// When Constant Quality is specified for pass, then
// the property quantizer is interpreted as the quality
// level.
propNames[1] = "quantizer";
Integer q = 21;
if (codecQuality == WORST) {
q = 50;
} else if (codecQuality == LOW) {
q = 35;
} else if (codecQuality == MEDIUM) {
q = 21;
} else if (codecQuality == HIGH) {
q = 15;
} else if (codecQuality == BEST) {
q = 1;
}
propValues[1] = q;
// The bitrate can be set with the bitrate property, which is integer and
// has range: 1 - 102400. Default: 2048 Current: 2048.
// This probably doesn't have any effect unless we set pass to cbr.
} else if (codecType == MJPEG) {
encoder = "jpegenc";
propNames = new String[1];
propValues = new Object[1];
propNames[0] = "quality";
Integer q = 85;
if (codecQuality == WORST) {
q = 0;
} else if (codecQuality == LOW) {
q = 30;
} else if (codecQuality == MEDIUM) {
q = 50;
} else if (codecQuality == HIGH) {
q = 85;
} else if (codecQuality == BEST) {
q = 100;
}
propValues[0] = q;
} else if (codecType == MJPEG2K) {
encoder = "jp2kenc";
} else {
parent.die("Unrecognized video codec", null);
}
initRecorder(filename, ifps, encoder, muxer, propNames, propValues);
}
protected void init(PApplet iparent, int requestWidth, int requestHeight, String filename,
String encoder, String muxer, String[] propNames, Object[] propValues, int ifps) {
this.parent = iparent;
GSVideo.init();
// register methods
parent.registerDispose(this);
width = requestWidth;
height = requestHeight;
initRecorder(filename, ifps, encoder, muxer, propNames, propValues);
}
protected void initRecorder(String filename, int ifps, String encoder, String muxer,
String[] propNames, Object[] propValues) {
File file = new File(parent.savePath(filename));
recorder = new RGBDataFileSink("MovieMaker", width, height, ifps, encoder,
propNames, propValues, muxer, file);
recording = false;
setQueueSize(60, 30);
}
}