Removed advanced classes from video library, fixed resolution query in Capture

This commit is contained in:
codeanticode
2011-07-09 00:14:28 +00:00
parent a52b3b67ab
commit 7fdf29baad
5 changed files with 4 additions and 1959 deletions

View File

@@ -1,38 +0,0 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package processing.video;
import com.sun.jna.Library;
import com.sun.jna.Native;
/**
* This JNA interface provides access to the environment variable-related functions in the C library.
* How to use:
* CLibrary clib = CLibrary.INSTANCE;
* String s = clib.getenv("DYLD_LIBRARY_PATH");
*/
public interface CLibrary extends Library {
CLibrary INSTANCE = (CLibrary)Native.loadLibrary("c", CLibrary.class);
int setenv(String name, String value, int overwrite);
String getenv(String name);
int unsetenv(String name);
int putenv(String string);
}

View File

@@ -685,14 +685,17 @@ public class Capture extends PImage implements PConstants {
protected void getSuppResAndFpsList() {
suppResList = new ArrayList<int[]>();
suppFpsList = new ArrayList<String>();
for (Element src : gpipeline.getSources()) {
for (Pad pad : src.getPads()) {
Caps caps = pad.getCaps();
int n = caps.size();
for (int i = 0; i < n; i++) {
Structure str = caps.getStructure(i);
if (!str.hasIntField("width") || !str.hasIntField("height")) continue;
int w = ((Integer)str.getValue("width")).intValue();
int h = ((Integer)str.getValue("height")).intValue();

View File

@@ -1,718 +0,0 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package processing.video;
import processing.core.*;
import java.nio.*;
import java.lang.reflect.*;
import org.gstreamer.*;
import org.gstreamer.Buffer;
import org.gstreamer.elements.*;
/**
* This class allows to create a custom GStreamer pipeline.
*/
public class GSPipeline extends PImage implements PConstants {
protected int streamType;
protected String pipeline;
protected boolean playing = false;
protected boolean paused = false;
protected boolean repeat = false;
protected int bufWidth;
protected int bufHeight;
protected int bufSize;
protected Pipeline gpipeline;
protected Method pipelineEventMethod;
protected Method copyBufferMethod;
protected Object eventHandler;
protected Object copyHandler;
protected boolean available;
protected boolean pipelineReady;
protected RGBDataSink rgbSink = null;
protected int[] copyPixels = null;
protected BufferDataSink natSink = null;
protected Buffer natBuffer = null;
protected boolean copyBufferMode = false;
protected String copyMask;
protected ByteDataSink dataSink = null;
protected byte[] copyData = null;
public byte[] data = null;
public String dataCaps;
protected String tempDataCaps;
protected boolean firstFrame = true;
/**
* Creates an instance of GSPipeline using the provided pipeline
* string.
*
* @param parent PApplet
* @param pstr String
*/
public GSPipeline(PApplet parent, String pstr) {
super(0, 0, RGB);
initGStreamer(parent, pstr, GSVideo.VIDEO);
}
/**
* Creates an instance of GSPipeline using the provided pipeline
* string.
*
* @param parent PApplet
* @param pstr String
* @param type int
*/
public GSPipeline(PApplet parent, String pstr, int type) {
super(0, 0, RGB);
initGStreamer(parent, pstr, type);
}
/**
* Releases the gstreamer resources associated to this pipeline object.
* It shouldn't be used after this.
*/
public void delete() {
if (gpipeline != null) {
try {
if (gpipeline.isPlaying()) {
gpipeline.stop();
}
} catch (IllegalStateException e) {
System.err.println("error when deleting player, maybe some native resource is already disposed");
} catch (Exception e) {
e.printStackTrace();
}
pixels = null;
data = null;
copyPixels = null;
if (rgbSink != null) {
rgbSink.removeListener();
rgbSink.dispose();
rgbSink = null;
}
copyData = null;
if (dataSink != null) {
dataSink.removeListener();
dataSink.dispose();
dataSink = null;
}
natBuffer = null;
if (natSink != null) {
natSink.removeListener();
natSink.dispose();
natSink = null;
}
gpipeline.dispose();
gpipeline = null;
}
}
/**
* Same as delete.
*/
public void dispose() {
delete();
}
/**
* Sets the object to use as destination for the frames read from the stream.
* The color conversion mask is automatically set to the one required to
* copy the frames to OpenGL.
*
* @param Object dest
*/
public void setPixelDest(Object dest) {
copyHandler = dest;
if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
} else {
copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
}
}
/**
* Sets the object to use as destination for the frames read from the stream.
*
* @param Object dest
* @param String mask
*/
public void setPixelDest(Object dest, String mask) {
copyHandler = dest;
copyMask = mask;
}
/**
* Uses a generic object as handler of the pipeline. This object should have a
* pipelineEvent method that receives a GSPipeline argument. This method will
* be called upon a new frame read event.
*
*/
public void setEventHandlerObject(Object obj) {
eventHandler = obj;
try {
pipelineEventMethod = eventHandler.getClass().getMethod("pipelineEvent",
new Class[] { GSPipeline.class });
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
}
}
/**
* Get the full length of this movie (in seconds).
*
* @return float
*/
public float duration() {
float sec = gpipeline.queryDuration().toSeconds();
float nanosec = gpipeline.queryDuration().getNanoSeconds();
return sec + GSVideo.nanoSecToSecFrac(nanosec);
}
/**
* Return the current time in seconds.
*
* @return float
*/
public float time() {
float sec = gpipeline.queryPosition().toSeconds();
float nanosec = gpipeline.queryPosition().getNanoSeconds();
return sec + GSVideo.nanoSecToSecFrac(nanosec);
}
/**
* Jump to a specific location (in seconds). The number is a float so
* fractions of seconds can be used.
*
* @param float where
*/
public void jump(float where) {
if (playing) {
gpipeline.pause();
}
boolean res;
long start = GSVideo.secToNanoLong(where);
long stop = -1; // or whatever > new_pos
res = gpipeline.seek(1.0, Format.TIME, SeekFlags.FLUSH,
SeekType.SET, start, SeekType.SET, stop);
if (!res) {
System.err.println("Seek operation failed.");
}
if (playing) {
gpipeline.play();
}
}
/**
* Returns true if the stream is already producing frames.
*
* @return boolean
*/
public boolean ready() {
return 0 < bufSize && pipelineReady;
}
/**
* Return the true or false depending on whether there is a new frame ready to
* be read.
*
* @return boolean
*/
public boolean available() {
return available;
}
/**
* Returns whether the stream is playing or not.
*
* @return boolean
*/
public boolean isPlaying() {
return playing;
}
/**
* Returns whether the stream is paused or not. If isPlaying() and isPaused()
* both return false it means that the stream is stopped.
*
* @return boolean
*/
public boolean isPaused() {
return paused;
}
/**
* Returns whether the stream is looping or not.
*
* @return boolean
*/
public boolean isLooping() {
return repeat;
}
/**
* Begin playing the stream, with no repeat.
*/
public void play() {
if (!pipelineReady) {
initPipeline();
}
playing = true;
paused = false;
gpipeline.play();
}
/**
* Begin playing the stream, with repeat.
*/
public void loop() {
repeat = true;
play();
}
/**
* Shut off the repeating loop.
*/
public void noLoop() {
repeat = false;
}
/**
* Pause the stream at its current time.
*/
public void pause() {
playing = false;
paused = true;
gpipeline.pause();
}
/**
* Stop the stream, and rewind.
*/
public void stop() {
if (playing) {
goToBeginning();
playing = false;
}
paused = false;
gpipeline.stop();
}
/**
* Reads the current video frame.
*/
public synchronized void read() {
if (streamType == GSVideo.VIDEO) {
// We loadPixels() first to ensure that at least we always have a non-null
// pixels array, even if without any valid image inside.
loadPixels();
if (copyBufferMode) {
// The native buffer from gstreamer is copies to the destination object.
if (natBuffer == null || copyBufferMethod == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
IntBuffer rgbBuffer = natBuffer.getByteBuffer().asIntBuffer();
try {
copyBufferMethod.invoke(copyHandler, new Object[] { natBuffer, rgbBuffer, bufWidth, bufHeight });
} catch (Exception e) {
e.printStackTrace();
}
natBuffer = null;
} else {
if (copyPixels == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
int[] temp = pixels;
pixels = copyPixels;
updatePixels();
copyPixels = temp;
}
} else if (streamType == GSVideo.RAW) {
if (copyData == null) {
return;
}
dataCaps = tempDataCaps;
if (data == null) {
data = new byte[copyData.length];
}
byte[] temp = data;
data = copyData;
copyData = temp;
}
available = false;
}
/**
* Goes to the first frame of the stream.
*/
public void goToBeginning() {
boolean res = gpipeline.seek(ClockTime.fromNanos(0));
if (!res) {
System.err.println("Seek operation failed.");
}
}
/**
* Goes to the last frame of the stream.
*/
public void goToEnd() {
long nanos = gpipeline.queryDuration().getNanoSeconds();
boolean res = gpipeline.seek(ClockTime.fromNanos(nanos));
if (!res) {
System.err.println("Seek operation failed.");
}
}
/**
* Get a float-value property from the pipeline.
*
* @param String name
* @return boolean
*/
public float getProperty(String name) {
if (playing) {
return ((Number)gpipeline.get(name)).floatValue();
}
return 0;
}
/**
* Set a float-value property in the pipeline.
*
* @param String name
* @param float v
*/
public void setProperty(String name, float v) {
if (playing) {
gpipeline.set(name, v);
}
}
/**
* Change the volume. Values are from 0 to 1. It will fail
* if the pipeline doesn't have a volume property available.
*
* @param float v
*/
public void volume(float v) {
setProperty("volume", v);
}
/**
* Returns the text string used to build the pipeline.
*
* @return String
*/
public String getPipeline() {
return pipeline;
}
protected void initGStreamer(PApplet parent, String pstr, int type) {
this.parent = parent;
gpipeline = null;
GSVideo.init();
// register methods
parent.registerDispose(this);
setEventHandlerObject(parent);
pipeline = pstr;
streamType = type;
bufWidth = bufHeight = bufSize = 0;
pipelineReady = false;
}
protected void initPipeline() {
// Determining if the last element is fakesink or filesink.
int idx;
String lastElem, lastElemName;
String[] parts;
idx = pipeline.lastIndexOf('!');
lastElem = pipeline.substring(idx + 1, pipeline.length()).trim();
parts = lastElem.split(" ");
if (0 < parts.length)
lastElemName = parts[0];
else
lastElemName = "";
boolean fakeSink = lastElemName.equals("fakesink");
boolean fileSink = lastElemName.equals("filesink");
if (PApplet.platform == WINDOWS) {
// Single backward slashes are replaced by double backward slashes,
// otherwise gstreamer won't understand file paths.
pipeline = pipeline.replace("\\", "\\\\");
}
if (fakeSink || fileSink) {
// If the pipeline ends in a fakesink or filesink element, the RGBDataSink
// is not added at the end of it...
gpipeline = Pipeline.launch(pipeline);
} else {
if (streamType == GSVideo.VIDEO) {
// For video pipelines, we add an RGBDataSink or NativeDataSink element at the end.
if (copyHandler != null) {
try {
copyBufferMethod = copyHandler.getClass().getMethod("addPixelsToBuffer",
new Class[] { Object.class, IntBuffer.class, int.class, int.class });
copyBufferMode = true;
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
copyBufferMode = false;
}
if (copyBufferMode) {
try {
Method meth = copyHandler.getClass().getMethod("setPixelSource", new Class[] { Object.class});
meth.invoke(copyHandler, new Object[] { this });
} catch (Exception e) {
copyBufferMode = false;
}
if (copyBufferMode) {
String caps = " ! ffmpegcolorspace ! video/x-raw-rgb, bpp=32, depth=24, endianness=(int)4321, ";
caps += copyMask;
StringBuilder finalPipeStr = new StringBuilder(pipeline);
finalPipeStr.append(caps);
finalPipeStr.append(" ! fakesink name=nat");
pipeline = finalPipeStr.toString();
gpipeline = Pipeline.launch(pipeline);
natSink = new BufferDataSink("nat", gpipeline,
new BufferDataSink.Listener() {
public void bufferFrame(int w, int h, Buffer buffer) {
invokeEvent(w, h, buffer);
}
});
natSink.setAutoDisposeBuffer(false);
}
}
}
if (!copyBufferMode) {
// Making sure we are using the right color space and color masks:
String caps = " ! ffmpegcolorspace ! video/x-raw-rgb, bpp=32, depth=24, endianness=(int)4321, ";
// JNA creates ByteBuffer using native byte order, set masks according to that.
if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN)
caps += "red_mask=(int)0xFF00, green_mask=(int)0xFF0000, blue_mask=(int)0xFF000000";
else
caps += "red_mask=(int)0xFF0000, green_mask=(int)0xFF00, blue_mask=(int)0xFF";
StringBuilder finalPipeStr = new StringBuilder(pipeline);
finalPipeStr.append(caps);
finalPipeStr.append(" ! fakesink name=rgb");
pipeline = finalPipeStr.toString();
gpipeline = Pipeline.launch(pipeline);
rgbSink = new RGBDataSink("rgb", gpipeline, new RGBDataSink.Listener() {
public void rgbFrame(boolean pre, int w, int h, IntBuffer buffer) {
invokeEvent(w, h, buffer);
}
});
// Setting direct buffer passing in the video sink, so no new buffers are created
// and disposed by the GC on each frame (thanks to Octavi Estape for pointing
// out this one).
rgbSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
// No need for videoSink.dispose(), because the append() doesn't increment the
// refcount of the videoSink object.
}
} else if (streamType == GSVideo.AUDIO) {
// For audio pipelines, we launch the pipeline as it is.
gpipeline = Pipeline.launch(pipeline);
} else if (streamType == GSVideo.RAW) {
StringBuilder finalPipeStr = new StringBuilder(pipeline);
finalPipeStr.append(" ! fakesink name=data");
pipeline = finalPipeStr.toString();
gpipeline = Pipeline.launch(pipeline);
dataSink = new ByteDataSink("data", gpipeline,
new ByteDataSink.Listener() {
public void byteFrame(boolean pre, Caps caps, int size, ByteBuffer buffer) {
invokeEvent(caps, size, buffer);
}
});
dataSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
} else {
System.err.println("Unrecognized stream type: Please use VIDEO, AUDIO, or RAW.");
return;
}
}
// Creating bus to handle end-of-stream event.
Bus bus = gpipeline.getBus();
bus.connect(new Bus.EOS() {
public void endOfStream(GstObject element) {
eosEvent();
}
});
pipelineReady = true;
}
protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
bufSize = w * h;
if (copyPixels == null) {
copyPixels = new int[w * h];
}
buffer.rewind();
try {
buffer.get(copyPixels);
} catch (BufferUnderflowException e) {
e.printStackTrace();
copyPixels = null;
return;
}
// Creates a pipelineEvent.
if (pipelineEventMethod != null) {
try {
pipelineEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling pipelineEvent() for " + pipeline);
e.printStackTrace();
pipelineEventMethod = null;
}
}
}
protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
bufSize = w * h;
natBuffer = buffer;
if (playing) {
// Creates a movieEvent.
if (pipelineEventMethod != null) {
try {
pipelineEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling movieEvent() for " + pipeline);
e.printStackTrace();
pipelineEventMethod = null;
}
}
}
}
protected synchronized void invokeEvent(Caps caps, int n, ByteBuffer buffer) {
available = true;
bufSize = n;
tempDataCaps = caps.toString();
if (copyData == null) {
copyData = new byte[n];
}
buffer.rewind();
try {
buffer.get(copyData);
} catch (BufferUnderflowException e) {
e.printStackTrace();
copyData = null;
return;
}
if (playing) {
// Creates a playerEvent.
if (pipelineEventMethod != null) {
try {
pipelineEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling pipelineEvent() for " + pipeline);
e.printStackTrace();
pipelineEventMethod = null;
}
}
}
}
public synchronized void disposeBuffer(Object buf) {
((Buffer)buf).dispose();
}
protected void eosEvent() {
if (repeat) {
goToBeginning();
} else {
playing = false;
}
}
}

View File

@@ -1,833 +0,0 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package processing.video;
import processing.core.*;
import java.awt.Dimension;
import java.io.*;
import java.nio.*;
import java.util.concurrent.TimeUnit;
import java.lang.reflect.*;
import org.gstreamer.*;
import org.gstreamer.Buffer;
import org.gstreamer.elements.*;
/**
* This class makes it possible to load and play generic media content through
* playbin (not only movies, but also audio files, etc).
*/
public class GSPlayer extends PImage implements PConstants {
protected int streamType;
protected String filename;
protected boolean playing = false;
protected boolean paused = false;
protected boolean repeat = false;
protected float fps;
protected float rate;
protected int bufWidth;
protected int bufHeight;
protected int bufSize;
protected PlayBin2 gplayer;
protected Method playerEventMethod;
protected Method copyBufferMethod;
protected Object eventHandler;
protected Object copyHandler;
protected boolean available;
protected boolean sinkReady;
protected RGBDataAppSink rgbSink = null;
protected int[] copyPixels = null;
protected BufferDataAppSink natSink = null;
protected Buffer natBuffer = null;
protected boolean copyBufferMode = false;
protected String copyMask;
protected ByteDataAppSink dataSink = null;
protected byte[] copyData = null;
public byte[] data = null;
public String dataCaps;
protected String tempDataCaps;
protected boolean firstFrame = true;
/**
* Creates an instance of GSPlayer loading the media file from filename,
* assuming that it is a video file.
*
* @param parent PApplet
* @param filename String
*/
public GSPlayer(PApplet parent, String filename) {
super(0, 0, RGB);
initGStreamer(parent, filename, GSVideo.VIDEO);
}
/**
* Creates an instance of GSPlayer loading the media file from filename,
* and trying to understand it as the indicated type.
*
* @param parent PApplet
* @param filename String
* @param type int
*/
public GSPlayer(PApplet parent, String filename, int type) {
super(0, 0, RGB);
initGStreamer(parent, filename, type);
}
/**
* Releases the gstreamer resources associated to this player object.
* It shouldn't be used after this.
*/
public void delete() {
if (gplayer != null) {
try {
if (gplayer.isPlaying()) {
gplayer.stop();
}
} catch (IllegalStateException e) {
System.err.println("error when deleting player, maybe some native resource is already disposed");
} catch (Exception e) {
e.printStackTrace();
}
pixels = null;
data = null;
copyPixels = null;
if (rgbSink != null) {
rgbSink.removeListener();
rgbSink.dispose();
rgbSink = null;
}
copyData = null;
if (dataSink != null) {
dataSink.removeListener();
dataSink.dispose();
dataSink = null;
}
natBuffer = null;
if (natSink != null) {
natSink.removeListener();
natSink.dispose();
natSink = null;
}
gplayer.dispose();
gplayer = null;
}
}
/**
* Same as delete.
*/
public void dispose() {
delete();
}
/**
* Sets the object to use as destination for the frames read from the stream.
* The color conversion mask is automatically set to the one required to
* copy the frames to OpenGL.
*
* @param Object dest
*/
public void setPixelDest(Object dest) {
copyHandler = dest;
if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
copyMask = "red_mask=(int)0xFF000000, green_mask=(int)0xFF0000, blue_mask=(int)0xFF00";
} else {
copyMask = "red_mask=(int)0xFF, green_mask=(int)0xFF00, blue_mask=(int)0xFF0000";
}
}
/**
* Sets the object to use as destination for the frames read from the stream.
*
* @param Object dest
* @param String mask
*/
public void setPixelDest(Object dest, String mask) {
copyHandler = dest;
copyMask = mask;
}
/**
* Uses a generic object as handler of the media file. This object should have a
* movieEvent method that receives a GSMovie argument. This method will
* be called upon a new frame read event.
*
*/
public void setEventHandlerObject(Object obj) {
eventHandler = obj;
try {
playerEventMethod = eventHandler.getClass().getMethod("playerEvent",
new Class[] { GSPlayer.class });
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
}
}
/**
* Get the width of the source video. Note: calling this method repeatedly
* can slow down playback performance.
*
* @return int
*/
public int getSourceWidth() {
Dimension dim = gplayer.getVideoSize();
if (dim != null) {
return dim.width;
} else {
return 0;
}
}
/**
* Get the height of the source video. Note: calling this method repeatedly
* can slow down playback performance.
*
* @return int
*/
public int getSourceHeight() {
Dimension dim = gplayer.getVideoSize();
if (dim != null) {
return dim.height;
} else {
return 0;
}
}
/**
* Get the original framerate of the source video. Note: calling this method repeatedly
* can slow down playback performance.
*
* @return float
*/
public float getSourceFrameRate() {
return (float)gplayer.getVideoSinkFrameRate();
}
/**
* Set how often new frames are to be read from the stream. Does not actually
* set the speed of the playback, that's handled by the speed() method.
*
* @param int ifps
* @see speed
*/
public void frameRate(float ifps) {
// We calculate the target ratio in the case both the
// current and target framerates are valid (greater than
// zero), otherwise we leave it as 1.
float f = (0 < ifps && 0 < fps) ? ifps / fps : 1;
if (playing) {
gplayer.pause();
}
long t = gplayer.queryPosition(TimeUnit.NANOSECONDS);
boolean res;
long start, stop;
if (rate > 0) {
start = t;
stop = -1;
} else {
start = 0;
stop = t;
}
res = gplayer.seek(rate * f, Format.TIME, SeekFlags.FLUSH,
SeekType.SET, start, SeekType.SET, stop);
if (!res) {
System.err.println("Seek operation failed.");
}
if (playing) {
gplayer.play();
}
fps = ifps;
}
/**
* Set a multiplier for how fast/slow the movie should be run. The default is
* 1.0. speed(2) will play the movie at double speed (2x). speed(0.5) will
* play at half speed. speed(-1) will play backwards at regular speed.
*
* @param float irate
*/
public void speed(float irate) {
// If the frameRate() method is called continuously with very similar
// rate values, playback might become sluggish. This condition attempts
// to take care of that.
if (PApplet.abs(rate - irate) > 0.1) {
rate = irate;
frameRate(fps); // The framerate is the same, but the rate (speed) could be different.
}
}
/**
* Get the full length of the current stream (in seconds).
*
* @return float
*/
public float duration() {
float sec = gplayer.queryDuration().toSeconds();
float nanosec = gplayer.queryDuration().getNanoSeconds();
return sec + GSVideo.nanoSecToSecFrac(nanosec);
}
/**
* Return the current time in seconds.
*
* @return float
*/
public float time() {
float sec = gplayer.queryPosition().toSeconds();
float nanosec = gplayer.queryPosition().getNanoSeconds();
return sec + GSVideo.nanoSecToSecFrac(nanosec);
}
/**
* Get the full length of this movie (in frames).
*
* @return float
*/
public long length() {
return (int)(duration() * getSourceFrameRate());
}
/**
* Return the current frame.
*
* @return int
*/
public int frame() {
return (int)(time() * getSourceFrameRate());
}
/**
* Jump to a specific location (in seconds). The number is a float so
* fractions of seconds can be used.
*
* @param float where
*/
public void jump(float where) {
if (playing) {
gplayer.pause();
}
boolean res;
long start = GSVideo.secToNanoLong(where);
long stop = -1; // or whatever > new_pos
res = gplayer.seek(1.0, Format.TIME, SeekFlags.FLUSH,
SeekType.SET, start, SeekType.SET, stop);
if (!res) {
System.err.println("Seek operation failed.");
}
if (playing) {
gplayer.play();
}
}
/**
* Jump to a specific frame.
*
* @param frame int
*/
public void jump(int frame) {
float srcFramerate = getSourceFrameRate();
// The duration of a single frame:
float frameDuration = 1 / srcFramerate;
// We move to the middle of the frame by adding 0.5:
float where = (frame + 0.5f) * frameDuration;
// Taking into account border effects:
float diff = duration() - where;
if (diff < 0) {
where += diff - 0.25f * frameDuration;
}
jump(where);
}
/**
* Returns true if the stream is already producing frames.
*
* @return boolean
*/
public boolean ready() {
return 0 < bufSize && sinkReady;
}
/**
* Return the true or false depending on whether there is a new frame ready to
* be read.
*
* @return boolean
*/
public boolean available() {
return available;
}
/**
* Returns whether the media is playing or not.
*
* @return boolean
*/
public boolean isPlaying() {
return playing;
}
/**
* Returns whether the media is paused or not. If isPlaying() and isPaused()
* both return false it means that the media is stopped.
*
* @return boolean
*/
public boolean isPaused() {
return paused;
}
/**
* Returns whether the media is looping or not.
*
* @return boolean
*/
public boolean isLooping() {
return repeat;
}
/**
* Begin playing the stream, with no repeat.
*/
public void play() {
if (!sinkReady) {
initSink();
}
playing = true;
paused = false;
gplayer.play();
}
/**
* Begin playing the stream, with repeat.
*/
public void loop() {
repeat = true;
play();
}
/**
* Shut off the repeating loop.
*/
public void noLoop() {
repeat = false;
}
/**
* Pause the stream at its current time.
*/
public void pause() {
playing = false;
paused = true;
gplayer.pause();
}
/**
* Stop the stream, and rewind.
*/
public void stop() {
if (playing) {
goToBeginning();
playing = false;
}
paused = false;
gplayer.stop();
}
/**
* Reads the current video frame or data buffer.
*/
public synchronized void read() {
if (fps <= 0) {
// Framerate not set yet, so we obtain from stream,
// which is already playing since we are in read().
fps = getSourceFrameRate();
}
if (streamType == GSVideo.VIDEO) {
// We loadPixels() first to ensure that at least we always have a non-null
// pixels array, even if without any valid image inside.
loadPixels();
if (copyBufferMode) {
// The native buffer from gstreamer is copies to the destination object.
if (natBuffer == null || copyBufferMethod == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
IntBuffer rgbBuffer = natBuffer.getByteBuffer().asIntBuffer();
try {
copyBufferMethod.invoke(copyHandler, new Object[] { natBuffer, rgbBuffer, bufWidth, bufHeight });
} catch (Exception e) {
e.printStackTrace();
}
natBuffer = null;
} else {
// Normal operation mode: the pixels just read from gstreamer
// are copied to the pixels array.
if (copyPixels == null) {
return;
}
if (firstFrame) {
super.init(bufWidth, bufHeight, RGB);
loadPixels();
firstFrame = false;
}
int[] temp = pixels;
pixels = copyPixels;
updatePixels();
copyPixels = temp;
}
} else if (streamType == GSVideo.RAW) {
if (copyData == null) {
return;
}
dataCaps = tempDataCaps;
if (data == null) {
data = new byte[copyData.length];
}
byte[] temp = data;
data = copyData;
copyData = temp;
}
available = false;
}
/**
* Goes to the first frame of the stream.
*/
public void goToBeginning() {
gplayer.seek(ClockTime.fromNanos(0));
}
/**
* Change the volume. Values are from 0 to 1.
*
* @param float v
*/
public void volume(float v) {
if (playing) {
gplayer.setVolume(v);
}
}
/**
* Returns the text string containing the filename of the media loaded.
*
* @return String
*/
public String getFilename() {
return filename;
}
protected void initGStreamer(PApplet parent, String filename, int type) {
this.parent = parent;
gplayer = null;
File file;
GSVideo.init();
// first check to see if this can be read locally from a file.
try {
try {
// first try a local file using the dataPath. usually this will
// work ok, but sometimes the dataPath is inside a jar file,
// which is less fun, so this will crap out.
file = new File(parent.dataPath(filename));
if (file.exists()) {
gplayer = new PlayBin2("GSPlayer");
gplayer.setInputFile(file);
}
} catch (Exception e) {
} // ignored
// read from a file just hanging out in the local folder.
// this might happen when the video library is used with some
// other application, or the person enters a full path name
if (gplayer == null) {
try {
file = new File(filename);
if (file.exists()) {
gplayer = new PlayBin2("GSPlayer");
gplayer.setInputFile(file);
} else {
System.err.println("File " + filename + " does not exist. Please check location.");
}
} catch (Exception e) {
}
}
// Network read needs to be implemented...
} catch (SecurityException se) {
// online, whups. catch the security exception out here rather than
// doing it three times (or whatever) for each of the cases above.
}
if (gplayer == null) {
parent.die("Could not load media file " + filename, null);
}
// we've got a valid media file! let's rock.
try {
this.filename = filename; // for error messages
// register methods
parent.registerDispose(this);
setEventHandlerObject(parent);
rate = 1.0f;
fps = -1;
sinkReady = false;
bufWidth = bufHeight = bufSize = 0;
} catch (Exception e) {
e.printStackTrace();
}
streamType = type;
}
protected void initSink() {
if (streamType == GSVideo.VIDEO) {
if (copyHandler != null) {
try {
copyBufferMethod = copyHandler.getClass().getMethod("addPixelsToBuffer",
new Class[] { Object.class, IntBuffer.class, int.class, int.class });
copyBufferMode = true;
} catch (Exception e) {
// no such method, or an error.. which is fine, just ignore
copyBufferMode = false;
}
if (copyBufferMode) {
try {
Method meth = copyHandler.getClass().getMethod("setPixelSource", new Class[] { Object.class});
meth.invoke(copyHandler, new Object[] { this });
} catch (Exception e) {
copyBufferMode = false;
}
if (copyBufferMode) {
natSink = new BufferDataAppSink("nat", copyMask,
new BufferDataAppSink.Listener() {
public void bufferFrame(int w, int h, Buffer buffer) {
invokeEvent(w, h, buffer);
}
});
natSink.setAutoDisposeBuffer(false);
gplayer.setVideoSink(natSink);
// The setVideoSink() method sets the videoSink as a property of the PlayBin,
// which increments the refcount of the videoSink element. Disposing here once
// to decrement the refcount.
natSink.dispose();
}
}
}
if (!copyBufferMode) {
rgbSink = new RGBDataAppSink("rgb",
new RGBDataAppSink.Listener() {
public void rgbFrame(int w, int h, IntBuffer buffer) {
invokeEvent(w, h, buffer);
}
});
// Setting direct buffer passing in the video sink, so no new buffers are created
// and disposed by the GC on each frame (thanks to Octavi Estape for pointing
// out this one).
rgbSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
gplayer.setVideoSink(rgbSink);
// The setVideoSink() method sets the videoSink as a property of the PlayBin,
// which increments the refcount of the videoSink element. Disposing here once
// to decrement the refcount.
rgbSink.dispose();
}
} else if (streamType == GSVideo.AUDIO) {
gplayer.setVideoSink(ElementFactory.make("fakesink", "audiosink"));
} else if (streamType == GSVideo.RAW) {
dataSink = new ByteDataAppSink("data",
new ByteDataAppSink.Listener() {
public void byteFrame(Caps caps, int size, ByteBuffer buffer) {
invokeEvent(caps, size, buffer);
}
});
dataSink.setPassDirectBuffer(GSVideo.passDirectBuffer);
gplayer.setVideoSink(dataSink);
dataSink.dispose();
} else {
System.err.println("Unrecognized stream type: Please use VIDEO, AUDIO, or RAW.");
return;
}
// Creating bus to handle end-of-stream event.
Bus bus = gplayer.getBus();
bus.connect(new Bus.EOS() {
public void endOfStream(GstObject element) {
eosEvent();
}
});
sinkReady = true;
}
protected synchronized void invokeEvent(int w, int h, IntBuffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
bufSize = w * h;
if (copyPixels == null) {
copyPixels = new int[w * h];
}
buffer.rewind();
try {
buffer.get(copyPixels);
} catch (BufferUnderflowException e) {
e.printStackTrace();
copyPixels = null;
return;
}
if (playing) {
// Creates a playerEvent.
if (playerEventMethod != null) {
try {
playerEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling playerEvent() for " + filename);
e.printStackTrace();
playerEventMethod = null;
}
}
}
}
protected synchronized void invokeEvent(int w, int h, Buffer buffer) {
available = true;
bufWidth = w;
bufHeight = h;
bufSize = w * h;
natBuffer = buffer;
if (playing) {
// Creates a playerEvent.
if (playerEventMethod != null) {
try {
playerEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling movieEvent() for " + filename);
e.printStackTrace();
playerEventMethod = null;
}
}
}
}
protected synchronized void invokeEvent(Caps caps, int n, ByteBuffer buffer) {
available = true;
bufSize = n;
tempDataCaps = caps.toString();
if (copyData == null) {
copyData = new byte[n];
}
buffer.rewind();
try {
buffer.get(copyData);
} catch (BufferUnderflowException e) {
e.printStackTrace();
copyData = null;
return;
}
if (playing) {
// Creates a playerEvent.
if (playerEventMethod != null) {
try {
playerEventMethod.invoke(eventHandler, new Object[] { this });
} catch (Exception e) {
System.err.println("error, disabling playerEvent() for " + filename);
e.printStackTrace();
playerEventMethod = null;
}
}
}
}
public synchronized void disposeBuffer(Object buf) {
((Buffer)buf).dispose();
}
protected void eosEvent() {
if (repeat) {
goToBeginning();
} else {
playing = false;
}
}
}

View File

@@ -1,369 +0,0 @@
/**
* Part of the GSVideo library: http://gsvideo.sourceforge.net/
* Copyright (c) 2008-11 Andres Colubri
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, version 2.1.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package processing.video;
import processing.core.*;
import java.io.File;
import java.nio.ByteBuffer;
import org.gstreamer.Buffer;
import org.gstreamer.elements.RGBDataFileSink;
/**
* This class makes movies from a running program.
*/
public class MovieMaker {
protected PApplet parent;
protected boolean recording;
protected RGBDataFileSink recorder;
protected int width, height;
public static final int THEORA = 0;
public static final int XVID = 1;
public static final int X264 = 2;
public static final int DIRAC = 3;
public static final int MJPEG = 4;
public static final int MJPEG2K = 5;
public static final int WORST = 0;
public static final int LOW = 1;
public static final int MEDIUM = 2;
public static final int HIGH = 3;
public static final int BEST = 4;
/**
* Constructor that sets the codec to THEORA, MEDIUM quality and 30 fps.
*
*/
public MovieMaker(PApplet parent, int requestWidth, int requestHeight,
String filename) {
init(parent, requestWidth, requestHeight, filename, THEORA, MEDIUM, 30);
}
/**
* Constructor that allows to set codec type and fps.
*
*/
public MovieMaker(PApplet parent, int requestWidth, int requestHeight,
String filename, int codecType, int ifps) {
init(parent, requestWidth, requestHeight, filename, codecType, MEDIUM, ifps);
}
/**
* Constructor that allows to set codec type, encoding quality and fps.
*
*/
public MovieMaker(PApplet parent, int requestWidth, int requestHeight,
String filename, int codecType, int codecQuality, int ifps) {
init(parent, requestWidth, requestHeight, filename, codecType,
codecQuality, ifps);
}
/**
* Constructor that allows to set the gstreamer encoder and muxer by name.
* Properties for encoder and muxer are left to wherever the default values are.
*
*/
public MovieMaker(PApplet parent, int requestWidth, int requestHeight,
String filename, String encoder, String muxer, int ifps) {
init(parent, requestWidth, requestHeight, filename, encoder, muxer, null, null, ifps);
}
/**
* Constructor that allows to set the gstreamer encoder and muxer by name, as
* well as the properties.
*
*/
public MovieMaker(PApplet parent, int requestWidth, int requestHeight,
String filename, String encoder, String muxer, String[] propNames, Object[] propValues, int ifps) {
init(parent, requestWidth, requestHeight, filename, encoder, muxer, propNames, propValues, ifps);
}
/**
* Releases the gstreamer resources associated to this movie maker object.
* It shouldn't be used after this.
*/
public void delete() {
recorder.stop();
recorder.dispose();
}
/**
* Same as delete.
*/
public void dispose() {
delete();
}
/**
* Adds a new frame to the video being recorded..
*
* @param pixels
* int[]
*/
public void addFrame(int[] pixels) {
if (recording && pixels.length == width * height) {
Buffer srcBuffer = new Buffer(width * height * 4);
ByteBuffer tmpBuffer = srcBuffer.getByteBuffer();
tmpBuffer.clear();
tmpBuffer.asIntBuffer().put(pixels);
recorder.pushRGBFrame(srcBuffer);
}
}
/**
* Starts recording.
*
*/
public void start() {
recorder.start();
recording = true;
}
/**
* Finishes recording.
*
*/
public void finish() {
recording = false;
recorder.stop();
}
/**
* Returns the number of frames currently in the pre-encoding queue,
* waiting to be encoded.
*
*/
public int getQueuedFrames() {
return recorder.getNumQueuedFrames();
}
/**
* Returns the number of frames dropped until now.
*
*/
public int getDroppedFrames() {
return recorder.getNumDroppedFrames();
}
/**
* Sets the maximum size of the pre-encoding and encoding queues.
* When the encoding queue is full, the frames start to be accumulated
* in the pre-encoding queue. By setting the size of the pre-encoding
* queue to zero, it can grow arbitrarily large.
*
*/
public void setQueueSize(int npre, int nenc) {
recorder.setPreQueueSize(npre);
recorder.setSrcQueueSize(nenc);
}
/**
* Returns true or false depending on whether recording is going
* on right now or not.
*
* @returns boolean
*/
public boolean isRecording() {
return recording;
}
protected void init(PApplet iparent, int requestWidth, int requestHeight,
String filename, int codecType, int codecQuality, int ifps) {
this.parent = iparent;
GSVideo.init();
// register methods
parent.registerDispose(this);
width = requestWidth;
height = requestHeight;
String[] propNames = null;
Object[] propValues = null;
String encoder = "";
String muxer = "";
// Determining container based on the filename extension.
String fn = filename.toLowerCase();
if (fn.endsWith(".ogg")) {
muxer = "oggmux";
} else if (fn.endsWith(".avi")) {
muxer = "avimux";
} else if (fn.endsWith(".mov")) {
muxer = "qtmux";
} else if (fn.endsWith(".flv")) {
muxer = "flvmux";
} else if (fn.endsWith(".mkv")) {
muxer = "matroskamux";
} else if (fn.endsWith(".mp4")) {
muxer = "mp4mux";
} else if (fn.endsWith(".3gp")) {
muxer = "gppmux";
} else if (fn.endsWith(".mpg")) {
muxer = "ffmux_mpeg";
} else if (fn.endsWith(".mj2")) {
muxer = "mj2mux";
} else {
parent.die("Unrecognized video container", null);
}
// Configuring encoder.
if (codecType == THEORA) {
encoder = "theoraenc";
propNames = new String[1];
propValues = new Object[1];
propNames[0] = "quality";
Integer q = 31;
if (codecQuality == WORST) {
q = 0;
} else if (codecQuality == LOW) {
q = 15;
} else if (codecQuality == MEDIUM) {
q = 31;
} else if (codecQuality == HIGH) {
q = 47;
} else if (codecQuality == BEST) {
q = 63;
}
propValues[0] = q;
} else if (codecType == DIRAC) {
encoder = "schroenc";
propNames = new String[1];
propValues = new Object[1];
propNames[0] = "quality";
Double q = 5.0d;
if (codecQuality == WORST) {
q = 0.0d;
} else if (codecQuality == LOW) {
q = 2.5d;
} else if (codecQuality == MEDIUM) {
q = 5.0d;
} else if (codecQuality == HIGH) {
q = 7.5d;
} else if (codecQuality == BEST) {
q = 10.0d;
}
propValues[0] = q;
} else if (codecType == XVID) {
encoder = "xvidenc";
// TODO: set Properties of xvidenc.
} else if (codecType == X264) {
encoder = "x264enc";
propNames = new String[2];
propValues = new Object[2];
// The pass property can take the following values:
// (0): cbr - Constant Bitrate Encoding (default)
// (4): quant - Constant Quantizer
// (5): qual - Constant Quality
// (17): pass1 - VBR Encoding - Pass 1
// (18): pass2 - VBR Encoding - Pass 2
// (19): pass3 - VBR Encoding - Pass 3
propNames[0] = "pass";
Integer p = 5;
propValues[0] = p;
// When Constant Quality is specified for pass, then
// the property quantizer is interpreted as the quality
// level.
propNames[1] = "quantizer";
Integer q = 21;
if (codecQuality == WORST) {
q = 50;
} else if (codecQuality == LOW) {
q = 35;
} else if (codecQuality == MEDIUM) {
q = 21;
} else if (codecQuality == HIGH) {
q = 15;
} else if (codecQuality == BEST) {
q = 1;
}
propValues[1] = q;
// The bitrate can be set with the bitrate property, which is integer and
// has range: 1 - 102400. Default: 2048 Current: 2048.
// This probably doesn't have any effect unless we set pass to cbr.
} else if (codecType == MJPEG) {
encoder = "jpegenc";
propNames = new String[1];
propValues = new Object[1];
propNames[0] = "quality";
Integer q = 85;
if (codecQuality == WORST) {
q = 0;
} else if (codecQuality == LOW) {
q = 30;
} else if (codecQuality == MEDIUM) {
q = 50;
} else if (codecQuality == HIGH) {
q = 85;
} else if (codecQuality == BEST) {
q = 100;
}
propValues[0] = q;
} else if (codecType == MJPEG2K) {
encoder = "jp2kenc";
} else {
parent.die("Unrecognized video codec", null);
}
initRecorder(filename, ifps, encoder, muxer, propNames, propValues);
}
protected void init(PApplet iparent, int requestWidth, int requestHeight, String filename,
String encoder, String muxer, String[] propNames, Object[] propValues, int ifps) {
this.parent = iparent;
GSVideo.init();
// register methods
parent.registerDispose(this);
width = requestWidth;
height = requestHeight;
initRecorder(filename, ifps, encoder, muxer, propNames, propValues);
}
protected void initRecorder(String filename, int ifps, String encoder, String muxer,
String[] propNames, Object[] propValues) {
File file = new File(parent.savePath(filename));
recorder = new RGBDataFileSink("MovieMaker", width, height, ifps, encoder,
propNames, propValues, muxer, file);
recording = false;
setQueueSize(60, 30);
}
}