Removed examples

This commit is contained in:
codeanticode
2011-07-09 00:17:28 +00:00
parent 53852c2406
commit 9a3efc92d0
13 changed files with 0 additions and 567 deletions

View File

@@ -1,58 +0,0 @@
// Using integration with GLGraphics for fast video playback.
// All the decoding stages, until the color conversion from YUV
// to RGB are handled by gstreamer, and the video frames are
// directly transfered over to the OpenGL texture encapsulated
// by the GLTexture object.
// You need the GLGraphics library (0.99+) to use this functionality:
// http://glgraphics.sourceforge.net/
import processing.opengl.*;
import codeanticode.glgraphics.*;
import codeanticode.gsvideo.*;
GSCapture cam;
GLTexture tex;
void setup() {
size(640, 480, GLConstants.GLGRAPHICS);
cam = new GSCapture(this, 640, 480);
// Use texture tex as the destination for the camera pixels.
tex = new GLTexture(this);
cam.setPixelDest(tex);
cam.play();
/*
// You can get the resolutions supported by the
// capture device using the resolutions() method.
// It must be called after creating the capture
// object.
int[][] res = cam.resolutions();
for (int i = 0; i < res.length; i++) {
println(res[i][0] + "x" + res[i][1]);
}
*/
/*
// You can also get the framerates supported by the
// capture device:
String[] fps = cam.framerates();
for (int i = 0; i < fps.length; i++) {
println(fps[i]);
}
*/
}
void captureEvent(GSCapture cam) {
cam.read();
}
void draw() {
// If there is a new frame available from the camera, the
// putPixelsIntoTexture() function will copy it to the
// video card and will return true.
if (tex.putPixelsIntoTexture()) {
image(tex, 0, 0, width, height);
}
}

View File

@@ -1,80 +0,0 @@
// Using integration with GLGraphics for fast video playback.
// All the decoding stages, until the color conversion from YUV
// to RGB are handled by gstreamer, and the video frames are
// directly transfered over to the OpenGL texture encapsulated
// by the GLTexture object.
// You need the GLGraphics library (0.99+) to use this functionality:
// http://glgraphics.sourceforge.net/
import processing.opengl.*;
import codeanticode.glgraphics.*;
import codeanticode.gsvideo.*;
GSMovie mov;
GLTexture tex;
int fcount, lastm;
float frate;
int fint = 3;
void setup() {
size(1280, 800, GLConstants.GLGRAPHICS);
frameRate(90);
mov = new GSMovie(this, "movie.avi");
// Use texture tex as the destination for the movie pixels.
tex = new GLTexture(this);
mov.setPixelDest(tex);
// This is the size of the buffer where frames are stored
// when they are not rendered quickly enough.
tex.setPixelBufferSize(10);
// New frames put into the texture when the buffer is full
// are deleted forever, so this could lead dropeed frames:
tex.delPixelsWhenBufferFull(false);
// Otherwise, they are kept by gstreamer and will be sent
// again later. This avoids loosing any frames, but increases
// the memory used by the application.
mov.loop();
background(0);
noStroke();
}
void draw() {
// Using the available() method and reading the new frame inside draw()
// instead of movieEvent() is the most effective way to keep the
// audio and video synchronization.
if (mov.available()) {
mov.read();
// putPixelsIntoTexture() copies the frame pixels to the OpenGL texture
// encapsulated by
if (tex.putPixelsIntoTexture()) {
// Calculating height to keep aspect ratio.
float h = width * tex.height / tex.width;
float b = 0.5 * (height - h);
image(tex, 0, b, width, h);
String info = "Resolution: " + mov.width + "x" + mov.height +
" , framerate: " + nfc(frate, 2) +
" , number of buffered frames: " + tex.getPixelBufferUse();
fill(0);
rect(0, 0, textWidth(info), b);
fill(255);
text(info, 0, 15);
fcount += 1;
int m = millis();
if (m - lastm > 1000 * fint) {
frate = float(fcount) / fint;
fcount = 0;
lastm = m;
}
}
}
}

View File

@@ -1,40 +0,0 @@
// Using integration with GLGraphics for fast video playback.
// All the decoding stages, until the color conversion from YUV
// to RGB are handled by gstreamer, and the video frames are
// directly transfered over to the OpenGL texture encapsulated
// by the GLTexture object.
// You need the GLGraphics library (0.99+) to use this functionality:
// http://glgraphics.sourceforge.net/
import processing.opengl.*;
import codeanticode.glgraphics.*;
import codeanticode.gsvideo.*;
GSMovie movie;
GLTexture tex;
void setup() {
size(640, 480, GLConstants.GLGRAPHICS);
background(0);
movie = new GSMovie(this, "station.mov");
// Use texture tex as the destination for the movie pixels.
tex = new GLTexture(this);
movie.setPixelDest(tex);
movie.loop();
}
void movieEvent(GSMovie movie) {
movie.read();
}
void draw() {
// If there is a new frame available from the movie, the
// putPixelsIntoTexture() function will copy it to the
// video card and will return true.
if (tex.putPixelsIntoTexture()) {
tint(255, 20);
image(tex, mouseX-movie.width/2, mouseY-movie.height/2);
}
}

View File

@@ -1,38 +0,0 @@
// Using integration with GLGraphics for fast video playback.
// All the decoding stages, until the color conversion from YUV
// to RGB are handled by gstreamer, and the video frames are
// directly transfered over to the OpenGL texture encapsulated
// by the GLTexture object.
// You need the GLGraphics library (0.99+) to use this functionality:
// http://glgraphics.sourceforge.net/
import processing.opengl.*;
import codeanticode.glgraphics.*;
import codeanticode.gsvideo.*;
GSPipeline pipeline;
GLTexture tex;
void setup() {
size(320, 240, GLConstants.GLGRAPHICS);
pipeline = new GSPipeline(this, "videotestsrc");
// Use texture tex as the destination for the pipeline pixels.
tex = new GLTexture(this);
pipeline.setPixelDest(tex);
pipeline.play();
}
void pipelineEvent(GSPipeline pipeline) {
pipeline.read();
}
void draw() {
// If there is a new frame available from the pipeline, the
// putPixelsIntoTexture() function will copy it to the
// video card and will return true.
if (tex.putPixelsIntoTexture()) {
image(tex, 0, 0, width, height);
}
}

View File

@@ -1,116 +0,0 @@
/**
* GSVideo drawing movie example.
*
* Adapted from Daniel Shiffman's original Drawing Movie
* example by Andres Colubri
* Makes a movie of a line drawn by the mouse. Press
* the spacebar to finish and save the movie.
*/
import codeanticode.gsvideo.*;
GSMovieMaker mm;
int fps = 30;
void setup() {
size(320, 240);
frameRate(fps);
PFont font = createFont("Courier", 24);
textFont(font, 24);
// Save as THEORA in a OGG file as MEDIUM quality (all quality settings are WORST, LOW,
// MEDIUM, HIGH and BEST):
mm = new GSMovieMaker(this, width, height, "drawing.ogg", GSMovieMaker.THEORA, GSMovieMaker.MEDIUM, fps);
// Available codecs are:
// THEORA
// XVID
// X264
// DIRAC
// MJPEG
// MJPEG2K
// As for the file formats, the following are autodetected from the filename extension:
// .ogg: OGG
// .avi: Microsoft's AVI
// .mov: Quicktime's MOV
// .flv: Flash Video
// .mkv: Matroska container
// .mp4: MPEG-4
// .3gp: 3GGP video
// .mpg: MPEG-1
// .mj2: Motion JPEG 2000
// Please note that some of the codecs/containers might not work as expected, depending
// on which gstreamer plugins are installed. Also, some codec/container combinations
// don't seem to be compatible, for example THEORA+AVI or X264+OGG.
// Encoding with DIRAC codec into an avi file:
//mm = new GSMovieMaker(this, width, height, "drawing.avi", GSMovieMaker.DIRAC, GSMovieMaker.BEST, fps);
// Important: Be sure of using the same framerate as the one set with frameRate().
// If the sketch's framerate is higher than the speed with which GSMovieMaker
// can compress frames and save them to file, then the computer's RAM will start to become
// clogged with unprocessed frames waiting on the gstreamer's queue. If all the physical RAM
// is exhausted, then the whole system might become extremely slow and unresponsive.
// Using the same framerate as in the frameRate() function seems to be a reasonable choice,
// assuming that CPU can keep up with encoding at the same pace with which Processing sends
// frames (which might not be the case is the CPU is slow). As the resolution increases,
// encoding becomes more costly and the risk of clogging the computer's RAM increases.
// The movie maker can also be initialized by explicitly specifying the name of the desired gstreamer's
// encoder and muxer elements. Also, arrays with property names and values for the encoder can be passed.
// In the following code, the DIRAC encoder (schroenc) and the Matroska muxer (matroskamux) are selected,
// with an encoding quality of 9.0 (schroenc accepts quality values between 0 and 10). The property arrays
// can be set to null in order to use default property values.
//String[] propName = { "quality" };
//Float f = 9.0f;
//Object[] propValue = { f };
//mm = new GSMovieMaker(this, width, height, "drawing.ogg", "schroenc", "oggmux", propName, propValue, fps);
// There are two queues in the movie recording process: a pre-encoding queue and an encoding
// queue. The former is stored in the Java side and the later inside gstreamer. When the
// encoding queue is full, frames start to accumulate in the pre-encoding queue until its
// maximum size is reached. After that point, new frames are dropped. To have no limit in the
// size of the pre-encoding queue, set it to zero.
// The size of both is set with the following function (first argument is the size of pre-
// encoding queue):
mm.setQueueSize(50, 10);
mm.start();
background(160, 32, 32);
}
void draw() {
stroke(7, 146, 168);
strokeWeight(4);
// Draw if mouse is pressed
if (mousePressed && pmouseX != 0 && mouseY != 0) {
line(pmouseX, pmouseY, mouseX, mouseY);
}
// Drawing framecount.
String s = "Frame " + frameCount;
fill(160, 32, 32);
noStroke();
rect(10, 6, textWidth(s), 24);
fill(255);
text(s, 10, 30);
loadPixels();
// Add window's pixels to movie
mm.addFrame(pixels);
println("Number of queued frames : " + mm.getQueuedFrames());
println("Number of dropped frames: " + mm.getDroppedFrames());
}
void keyPressed() {
if (key == ' ') {
// Finish the movie if space bar is pressed
mm.finish();
// Quit running the sketch once the file is written
exit();
}
}

View File

@@ -1,34 +0,0 @@
/**
* Audio pipeline.
* By Andres Colubri
*
*/
import codeanticode.gsvideo.*;
GSPipeline pipeline;
void setup() {
size(100, 100);
// An audio-only pipeline can be specified by setting the type parameter to GSVideo.AUDIO.
// In this way, GSVideo doesn't try to copy the stream to the Processing window.
// The other two possible types are GSVideo.VIDEO (default) and GSVideo.DATA.
// Linux:
pipeline = new GSPipeline(this, "audiotestsrc ! audioconvert ! alsasink", GSVideo.AUDIO);
// Windows:
//pipeline = new GSPipeline(this, "audiotestsrc ! audioconvert ! directsoundsink", GSVideo.AUDIO);
// The pipeline starts in paused state, so a call to the play()
// method is needed to get thins rolling.
pipeline.play();
}
void draw() {
// No need to draw anything on the screen. The audio gets
// automatically directed to the sound card.
}

View File

@@ -1,58 +0,0 @@
/**
* Camera capture pipelines.
* By Andres Colubri
*
*/
import codeanticode.gsvideo.*;
GSPipeline pipeline;
void setup() {
size(640, 480);
// The ksvideosrc element allows to select a capture device by index (0, 1, 2, etc).
//pipeline = new GSPipeline(this, "ksvideosrc device-index=0 ! decodebin2");
// DirectShow capture pipelines:
// Uses the first availabe capture device.
//pipeline = new GSPipeline(this, "dshowvideosrc ! decodebin2");
// This one allows to choose the device based on its name property.
//pipeline = new GSPipeline(this, "dshowvideosrc device-name=\"Sony Visual Communication Camera VGP-VCC7\" ! decodebin2");
// Capture pipeline in MacOSX 64 bits. It uses the qtkitvideosrc element based on the
// new QTkit. The input device can be set using the device-index property, which expects an
// integer value, like ksvideosrc above.
//pipeline = new GSPipeline(this, "qtkitvideosrc");
// Vide4Linux2 capture pipeline.
pipeline = new GSPipeline(this, "v4l2src");
// The full pipeline that GSVideo passes to GStremeamer can be
// obtained with the getPipeline() method:
println("Pipeline string:");
println(pipeline.getPipeline());
// Tentative dv1394 capture pipeline. This thread on the Processing's discourse:
// http://processing.org/discourse/yabb2/YaBB.pl?num=1210072258/30
// could be very useful to setup dv capture.
//pipeline = new GSPipeline(this, "dv1394src port=0 ! queue ! dvdemux ! ffdec_dvvideo ! ffmpegcolorspace ! video/x-raw-yuv, width=720");
// The pipeline starts in paused state, so a call to the play()
// method is needed to get thins rolling.
pipeline.play();
}
void draw() {
// When the GSPipeline.available() method returns true,
// it means that a new frame is ready to be read.
if (pipeline.available()) {
pipeline.read();
image(pipeline, 0, 0);
}
}

View File

@@ -1,48 +0,0 @@
/**
* Raw pipeline.
* By Andres Colubri
*
*/
import codeanticode.gsvideo.*;
GSPipeline pipeline;
void setup() {
size(200, 200);
// A raw pipeline can be used to retrieve the data frames from the stream right after it has
// been decoded from the file.
// Reading audio frames from mp3 file. Note we need to add the decoding element (mad):
pipeline = new GSPipeline(this, "filesrc location=" + dataPath("groove.mp3") + " ! mad", GSVideo.RAW);
// Test audio signal generated by the audiotestsrc element. Here we don't need any decoding, as the
// frames coming out of audiotestsrc already contain valid audio data:
//pipeline = new GSPipeline(this, "audiotestsrc", GSVideo.RAW);
pipeline.loop();
}
void pipelineEvent(GSPipeline p) {
p.read();
}
void draw() {
background(0);
if (pipeline.data != null) {
//println("Data size: " + pipeline.data.length);
//println("Data caps: " + pipeline.dataCaps);
// Mapping audio bytes to pixel color.
loadPixels();
byte[] data = pipeline.data;
for (int i = 0; i < data.length; i++) {
int k = int(map(i, 0, data.length - 1, 0, width * height - 1));
pixels[k] = color(data[i] + 128, 0, 0, 255);
}
updatePixels();
}
}

View File

@@ -1,40 +0,0 @@
/**
* Test.
* By Andres Colubri
*
* This example shows how to create GStreamer pipelines using the GSPipeline object.
* Pipelines allow to connect different gstreamer elements (video sources, decoders, etc)
* in order to construct a video or audio stream. The command line tool gst-launch can be used
* to launch pipelines, and most pipelines specified with gst-launch can be used in GSPipeline,
* as the shown in this sketch.
* Some online material on GStreamer:
* http://www.cin.ufpe.br/~cinlug/wiki/index.php/Introducing_GStreamer
* http://www.twm-kd.com/computers/software/webcam-and-linux-gstreamer-tutorial/
*/
import codeanticode.gsvideo.*;
GSPipeline pipeline;
void setup() {
size(320, 240);
// VideoTestSrc pipeline. Note that there is no need to specify a
// video sink as the last element of the pipeline, because GSVideo
// automatically directs the video frames of the pipeline to
// Processing's drawing surface.
pipeline = new GSPipeline(this, "videotestsrc");
// The pipeline starts in paused state, so a call to the play()
// method is needed to get thins rolling.
pipeline.play();
}
void draw() {
// When the GSPipeline.available() method returns true,
// it means that a new frame is ready to be read.
if (pipeline.available()) {
pipeline.read();
image(pipeline, 0, 0);
}
}

View File

@@ -1,26 +0,0 @@
/**
* Audio.
* Audio playback using the GSPlayer object.
* By Ryan Kelln
*
* Move the cursor across the screen to change volume.
*/
import codeanticode.gsvideo.*;
GSPlayer sample;
void setup() {
size(100, 100);
// The last parameter is used to indicate the stream type:
// VIDEO (default), AUDIO or DATA.
sample = new GSPlayer(this, "groove.mp3", GSVideo.AUDIO);
sample.loop();
}
void draw()
{
//sample.jump(float(mouseY) / height * sample.duration());
sample.volume(float(mouseX) / width);
}

View File

@@ -1,29 +0,0 @@
/**
* Raw.
*
* Gets raw data frames from video stream, without any color conversion.
*/
import codeanticode.gsvideo.*;
GSPlayer video;
void setup() {
size(100, 100);
video = new GSPlayer(this, "station.mov", GSVideo.RAW);
video.loop();
}
void playerEvent(GSPlayer player) {
player.read();
}
void draw() {
// The raw frame data is stored in video.data, which is a byte array.
// video.dataCaps is a string containing info about the incoming data.
if (video.data != null) {
println("Data size: " + video.data.length);
println("Data caps: " + video.dataCaps);
}
}