Initial checkin of veejay 1.4

git-svn-id: svn://code.dyne.org/veejay/trunk@1172 eb8d1916-c9e9-0310-b8de-cf0c9472ead5
This commit is contained in:
Niels Elburg
2008-11-10 20:16:24 +00:00
parent d81258c54c
commit d8e6f98d53
793 changed files with 244409 additions and 0 deletions

View File

@@ -0,0 +1,12 @@
# Makefile for veejay
MAINTAINERCLEANFILES = Makefile.in
AM_CFLAGS=$(OP_CFLAGS)
INCLUDES = -I$(top_srcdir) -I$(includedir) -I$(top_srcdir)/vjmem \
-I$(top_srcdir)/vjmsg \
-I$(top_srcdir)/libvjnet/ \
-I$(top_srcdir)/libhash \
$(XML2_CFLAGS) \
$(MJPEGTOOLS_CFLAGS) $(FFMPEG_CFLAGS) $(PTHREAD_CFLAGS) $(UNICAP_CFLAGS)
STREAM_LIB_FILE = libstream.la
noinst_LTLIBRARIES = $(STREAM_LIB_FILE)
libstream_la_SOURCES = vj-unicap.c vj-yuv4mpeg.c vj-net.c vj-dv1394.c vj-vloopback.c vj-tag.c

View File

@@ -0,0 +1,258 @@
/*
* dv1394.h - DV input/output over IEEE 1394 on OHCI chips
* Copyright (C)2001 Daniel Maas <dmaas@dcine.com>
* receive, proc_fs by Dan Dennedy <dan@dennedy.org>
*
* based on:
* video1394.h - driver for OHCI 1394 boards
* Copyright (C)1999,2000 Sebastien Rougeaux <sebastien.rougeaux@anu.edu.au>
* Peter Schlaile <udbz@rz.uni-karlsruhe.de>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
#ifndef _DV_1394_H
#define _DV_1394_H
#include <sys/types.h>
#include <sys/ioctl.h>
/* This is the public user-space interface. Try not to break it. */
#define DV1394_API_VERSION 0x20011127
/* ********************
** **
** DV1394 API **
** **
********************
There are two methods of operating the DV1394 DV output device.
1)
The simplest is an interface based on write(): simply write
full DV frames of data to the device, and they will be transmitted
as quickly as possible. The FD may be set for non-blocking I/O,
in which case you can use select() or poll() to wait for output
buffer space.
To set the DV output parameters (e.g. whether you want NTSC or PAL
video), use the DV1394_INIT ioctl, passing in the parameters you
want in a struct dv1394_init.
Example 1:
To play a raw .DV file: cat foo.DV > /dev/dv1394
(cat will use write() internally)
Example 2:
static struct dv1394_init init = {
0x63, (broadcast channel)
4, (four-frame ringbuffer)
DV1394_NTSC, (send NTSC video)
0, 0 (default empty packet rate)
}
ioctl(fd, DV1394_INIT, &init);
while(1) {
read( <a raw DV file>, buf, DV1394_NTSC_FRAME_SIZE );
write( <the dv1394 FD>, buf, DV1394_NTSC_FRAME_SIZE );
}
2)
For more control over buffering, and to avoid unnecessary copies
of the DV data, you can use the more sophisticated the mmap() interface.
First, call the DV1394_INIT ioctl to specify your parameters,
including the number of frames in the ringbuffer. Then, calling mmap()
on the dv1394 device will give you direct access to the ringbuffer
from which the DV card reads your frame data.
The ringbuffer is simply one large, contiguous region of memory
containing two or more frames of packed DV data. Each frame of DV data
is 120000 bytes (NTSC) or 144000 bytes (PAL).
Fill one or more frames in the ringbuffer, then use the DV1394_SUBMIT_FRAMES
ioctl to begin I/O. You can use either the DV1394_WAIT_FRAMES ioctl
or select()/poll() to wait until the frames are transmitted. Next, you'll
need to call the DV1394_GET_STATUS ioctl to determine which ringbuffer
frames are clear (ready to be filled with new DV data). Finally, use
DV1394_SUBMIT_FRAMES again to send the new data to the DV output.
Example: here is what a four-frame ringbuffer might look like
during DV transmission:
frame 0 frame 1 frame 2 frame 3
*--------------------------------------*
| CLEAR | DV data | DV data | CLEAR |
*--------------------------------------*
<ACTIVE>
transmission goes in this direction --->>>
The DV hardware is currently transmitting the data in frame 1.
Once frame 1 is finished, it will automatically transmit frame 2.
(if frame 2 finishes before frame 3 is submitted, the device
will continue to transmit frame 2, and will increase the dropped_frames
counter each time it repeats the transmission).
If you called DV1394_GET_STATUS at this instant, you would
receive the following values:
n_frames = 4
active_frame = 1
first_clear_frame = 3
n_clear_frames = 2
At this point, you should write new DV data into frame 3 and optionally
frame 0. Then call DV1394_SUBMIT_FRAMES to inform the device that
it may transmit the new frames.
*/
/* maximum number of frames in the ringbuffer */
#define DV1394_MAX_FRAMES 32
/* number of *full* isochronous packets per DV frame */
#define DV1394_NTSC_PACKETS_PER_FRAME 250
#define DV1394_PAL_PACKETS_PER_FRAME 300
/* size of one frame's worth of DV data, in bytes */
#define DV1394_NTSC_FRAME_SIZE (480 * DV1394_NTSC_PACKETS_PER_FRAME)
#define DV1394_PAL_FRAME_SIZE (480 * DV1394_PAL_PACKETS_PER_FRAME)
enum pal_or_ntsc {
DV1394_NTSC = 0,
DV1394_PAL
};
/* this is the argument to DV1394_INIT */
struct dv1394_init {
/* DV1394_API_VERSION */
unsigned int api_version;
/* isochronous transmission channel to use */
unsigned int channel;
/* number of frames in the ringbuffer. Must be at least 2
and at most DV1394_MAX_FRAMES. */
unsigned int n_frames;
/* send/receive PAL or NTSC video format */
enum pal_or_ntsc format;
/* the following are used only for transmission */
/* set these to zero unless you want a
non-default empty packet rate (see below) */
unsigned long cip_n;
unsigned long cip_d;
/* set this to zero unless you want a
non-default SYT cycle offset (default = 3 cycles) */
unsigned int syt_offset;
};
/* Q: What are cip_n and cip_d? */
/*
A: DV video streams do not utilize 100% of the potential bandwidth offered
by IEEE 1394 (FireWire). To achieve the correct rate of data transmission,
DV devices must periodically insert empty packets into the 1394 data stream.
Typically there is one empty packet per 14-16 data-carrying packets.
Some DV devices will accept a wide range of empty packet rates, while others
require a precise rate. If the dv1394 driver produces empty packets at
a rate that your device does not accept, you may see ugly patterns on the
DV output, or even no output at all.
The default empty packet insertion rate seems to work for many people; if
your DV output is stable, you can simply ignore this discussion. However,
we have exposed the empty packet rate as a parameter to support devices that
do not work with the default rate.
The decision to insert an empty packet is made with a numerator/denominator
algorithm. Empty packets are produced at an average rate of CIP_N / CIP_D.
You can alter the empty packet rate by passing non-zero values for cip_n
and cip_d to the INIT ioctl.
*/
struct dv1394_status {
/* this embedded init struct returns the current dv1394
parameters in use */
struct dv1394_init init;
/* the ringbuffer frame that is currently being
displayed. (-1 if the device is not transmitting anything) */
int active_frame;
/* index of the first buffer (ahead of active_frame) that
is ready to be filled with data */
unsigned int first_clear_frame;
/* how many buffers, including first_clear_buffer, are
ready to be filled with data */
unsigned int n_clear_frames;
/* how many times the DV output has underflowed
since the last call to DV1394_GET_STATUS */
unsigned int dropped_frames;
/* N.B. The dropped_frames counter is only a lower bound on the actual
number of dropped frames, with the special case that if dropped_frames
is zero, then it is guaranteed that NO frames have been dropped
since the last call to DV1394_GET_STATUS.
*/
};
/* Get the driver ready to transmit video. pass a struct dv1394_init* as
* the parameter (see below), or NULL to get default parameters */
#define DV1394_INIT _IOW('#', 0x06, struct dv1394_init)
/* Stop transmitting video and free the ringbuffer */
#define DV1394_SHUTDOWN _IO ('#', 0x07)
/* Submit N new frames to be transmitted, where the index of the first new
* frame is first_clear_buffer, and the index of the last new frame is
* (first_clear_buffer + N) % n_frames */
#define DV1394_SUBMIT_FRAMES _IO ('#', 0x08)
/* Block until N buffers are clear (pass N as the parameter) Because we
* re-transmit the last frame on underrun, there will at most be n_frames
* - 1 clear frames at any time */
#define DV1394_WAIT_FRAMES _IO ('#', 0x09)
/* Capture new frames that have been received, where the index of the
* first new frame is first_clear_buffer, and the index of the last new
* frame is (first_clear_buffer + N) % n_frames */
#define DV1394_RECEIVE_FRAMES _IO ('#', 0x0a)
/* Tell card to start receiving DMA */
#define DV1394_START_RECEIVE _IO ('#', 0x0b)
/* Pass a struct dv1394_status* as the parameter */
#define DV1394_GET_STATUS _IOR('#', 0x0c, struct dv1394_status)
#endif /* _DV_1394_H */

View File

@@ -0,0 +1,229 @@
/* veejay - Linux VeeJay
* (C) 2002-2004 Niels Elburg <nwelburg@gmail.com>
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
inspired by ffmpeg/ffmpeg/libavformat/dv1394.c
dv1394 has no audio apparently ...
*/
#include <config.h>
#include <stdint.h>
#include <libvjmem/vjmem.h>
#ifdef SUPPORT_READ_DV2
#include <stdint.h>
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#include <fcntl.h>
#include <libel/vj-el.h>
#include <libel/vj-dv.h>
#include <libvjmsg/vj-msg.h>
#include <libstream/vj-dv1394.h>
#include <libstream/dv1394.h>
#include <sys/mman.h>
#include <sys/poll.h>
#include <errno.h>
#define DV_PAL_SIZE 144000
#define DV_NTSC_SIZE 120000
#define DV1394_DEFAULT_CHANNEL 63
#define DV1394_DEFAULT_CARD 0
#define DV1394_RING_FRAMES 10
static int vj_dv1394_reset(vj_dv1394 *v )
{
struct dv1394_init init;
init.channel = v->channel;
init.api_version = DV1394_API_VERSION;
init.n_frames = DV1394_RING_FRAMES;
init.format = v->norm;
if( ioctl( v->handle, DV1394_INIT, &init ) < 0 )
{
veejay_msg(VEEJAY_MSG_ERROR, "Cannot initialize ieee1394 device");
return 0;
}
v->done = v->avail = 0;
return 1;
}
static int vj_dv1394_start(vj_dv1394 *v )
{
/* enable receiver */
if( ioctl( v->handle, DV1394_START_RECEIVE, 0) < 0)
{
veejay_msg(VEEJAY_MSG_ERROR, "Cannot start receiver");
return 0;
}
return 1;
}
vj_dv1394 *vj_dv1394_init(void *e, int channel, int quality)
{
editlist *el = (editlist*)e;
if(el->video_width != 720 && ( el->video_height != 576 || el->video_height != 480) )
{
veejay_msg(VEEJAY_MSG_ERROR, "No software scaling to %d x %d",el->video_width,
el->video_height);
return NULL;
}
vj_dv1394 *v = (vj_dv1394*)vj_malloc(sizeof(vj_dv1394));
v->map_size = (el->video_norm == 'p' ? DV_PAL_SIZE: DV_NTSC_SIZE);
v->handle = -1;
v->width = el->video_width;
v->height = el->video_height;
v->norm = (el->video_norm == 'p' ? DV1394_PAL: DV1394_NTSC );
v->handle = open( "/dev/dv1394", O_RDONLY);
v->channel = channel == -1 ? DV1394_DEFAULT_CHANNEL : channel;
v->index = 0;
v->quality = quality;
if( v->handle <= 0 )
{
veejay_msg(VEEJAY_MSG_ERROR, "opening /dev/dv1394'");
if(v) free(v);
return NULL;
}
if( vj_dv1394_reset(v) <= 0 )
{
veejay_msg(VEEJAY_MSG_ERROR, "Failed to initialize DV interface");
close(v->handle);
if(v) free(v);
return NULL;
}
v->map = mmap( NULL, v->map_size * DV1394_RING_FRAMES, PROT_READ,
MAP_PRIVATE, v->handle, 0);
if(v->map == MAP_FAILED)
{
veejay_msg(VEEJAY_MSG_ERROR, "Failed to mmap dv ring buffer");
close(v->handle);
if(v)free(v);
return NULL;
}
if( vj_dv1394_start(v) <= 0)
{
veejay_msg(VEEJAY_MSG_ERROR, "Failed to start capturing");
if(v)free(v);
close(v->handle);
return NULL;
}
v->decoder = (void*)vj_dv_decoder_init( v->quality,v->width,v->height, el->pixel_format );
if(!v->decoder)
{
veejay_msg(VEEJAY_MSG_ERROR, "Failed to initailize DV decoder");
}
return v;
}
void vj_dv1394_close(vj_dv1394 *v)
{
if(v)
{
if( ioctl( v->handle, DV1394_SHUTDOWN, 0) < 0)
{
veejay_msg(VEEJAY_MSG_ERROR, "Failed to shutdown dv1394");
}
if( munmap( v->map, v->map_size * DV1394_RING_FRAMES ) < 0 )
{
veejay_msg(VEEJAY_MSG_ERROR, "Failed to munmap dv1394 ring buffer");
}
close(v->handle);
if(v->decoder)
vj_dv_free_decoder( (vj_dv_decoder*) v->decoder );
free(v);
}
}
int vj_dv1394_read_frame(vj_dv1394 *v, uint8_t *frame[3], uint8_t *audio, int fmt)
{
if( !v->avail )
{
struct dv1394_status s;
struct pollfd p;
if( v->done )
{
/* request more frames */
if( ioctl( v->handle, DV1394_RECEIVE_FRAMES, v->done ) < 0 )
{
veejay_msg(VEEJAY_MSG_DEBUG, "Ring buffer overflow,reset");
vj_dv1394_reset( v );
vj_dv1394_start( v );
}
v->done = 0;
}
restart_poll:
p.fd = v->handle;
p.events = POLLIN | POLLERR | POLLHUP;
if( poll(&p, 1, -1 ) < 0 )
{
if( errno == EAGAIN || errno == EINTR )
{
veejay_msg(VEEJAY_MSG_DEBUG, "Waiting for DV");
goto restart_poll;
}
veejay_msg(VEEJAY_MSG_ERROR, "Poll failed");
return 0;
}
if( ioctl( v->handle, DV1394_GET_STATUS, &s ) < 0 )
{
veejay_msg(VEEJAY_MSG_ERROR, "Failed to get status");
return 0;
}
s.active_frame, s.first_clear_frame,
v->avail = s.n_clear_frames;
v->index = s.first_clear_frame;
v->done = 0;
if( s.dropped_frames )
{
veejay_msg(VEEJAY_MSG_ERROR, "dv1394: frame drop detected %d",
s.dropped_frames);
// vj_dv1394_reset( v );
// vj_dv1394_start( v );
}
}
if(!vj_dv_decode_frame(
(vj_dv_decoder*) v->decoder,
v->map + (v->index * v->map_size),
frame[0],frame[1],frame[2],
v->width,
v->height, fmt ))
{
veejay_msg(VEEJAY_MSG_ERROR, "decoding DV frame");
return 0;
}
v->index = (v->index + 1) % DV1394_RING_FRAMES;
v->done ++ ;
v->avail --;
return 1;
}
#endif

View File

@@ -0,0 +1,46 @@
/* veejay - Linux VeeJay
* (C) 2002-2004 Niels Elburg <nwelburg@gmail.com>
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#ifndef VJDV1394
#define VJDV1394
typedef struct
{
int handle;
int map_size;
uint8_t *map;
int width;
int height;
int channel;
int norm;
int avail;
int done;
int index;
int quality;
void *decoder;
} vj_dv1394;
vj_dv1394* vj_dv1394_init(void *el, int channel_nr, int quality);
void vj_dv1394_close( vj_dv1394 *v );
int vj_dv1394_read_frame( vj_dv1394 *v, uint8_t *frame[3] , uint8_t *audio, int fmt );
#endif

View File

@@ -0,0 +1,383 @@
/*
* Linux VeeJay
*
* Copyright(C)2002-2006 Niels Elburg <nwelburg@gmail.com>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License , or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
*/
#define THREAD_START 0
#define THREAD_STOP 1
#include <config.h>
#include <string.h>
#include <stdint.h>
#include <libvjmem/vjmem.h>
#include <pthread.h>
#include <libstream/vj-tag.h>
#include <libvjnet/vj-client.h>
#include <veejay/vims.h>
#include <libyuv/yuvconv.h>
#include <libvjmsg/vj-msg.h>
#include <veejay/vims.h>
#include <libstream/vj-net.h>
#include <liblzo/lzo.h>
#define _POSIX_C_SOURCE 199309
#include <time.h>
#ifdef STRICT_CHECKING
#include <assert.h>
#endif
typedef struct
{
pthread_mutex_t mutex;
pthread_t thread;
vj_client *remote;
int state;
int have_frame;
int error;
int grab;
int repeat;
} threaded_t;
static void lock_(threaded_t *t, const char *f, int line)
{
// veejay_msg(0,"lock thread by %s, line %d",f,line);
pthread_mutex_lock( &(t->mutex ));
}
static void unlock_(threaded_t *t, const char *f, int line)
{
// veejay_msg(0,"unlock thread by %s, line %d",f,line);
pthread_mutex_unlock( &(t->mutex ));
}
#define lock( t ) lock_( t, __FUNCTION__, __LINE__ )
#define unlock( t ) unlock_( t, __FUNCTION__ , __LINE__ )
#define MS_TO_NANO(a) (a *= 1000000)
static void net_delay(long nsec )
{
struct timespec ts;
ts.tv_sec = 0;
ts.tv_nsec = MS_TO_NANO( nsec);
nanosleep( &ts, NULL );
}
void *reader_thread(void *data)
{
vj_tag *tag = (vj_tag*) data;
threaded_t *t = tag->priv;
vj_client *v = t->remote;
int ret = 0;
char buf[16];
#ifdef STRICT_CHECKING
assert( v != NULL );
#endif
sprintf(buf, "%03d:;", VIMS_GET_FRAME);
int retrieve = 0;
for( ;; )
{
int error = 0;
if( t->state == 0 )
{
error = 1;
}
lock(t);
if( t->grab && tag->source_type == VJ_TAG_TYPE_NET && retrieve== 0)
{
ret = vj_client_poll_w(v , V_CMD );
if( ret )
{
ret = vj_client_send( v, V_CMD, buf );
if( ret <= 0 )
{
error = 1;
}
else
{
t->grab = 0;
retrieve = 1;
}
}
}
if (tag->source_type == VJ_TAG_TYPE_MCAST )
{
error = 0;
retrieve = 1;
}
long wait_time = 20;
if(!error && retrieve)
{
if( vj_client_poll(v, V_CMD ) )
{
ret = vj_client_read_i ( v, tag->socket_frame,tag->socket_len );
if( ret <= 0 )
{
if( tag->source_type == VJ_TAG_TYPE_NET )
{
error = 1;
}
else
{
wait_time += 10;
}
ret = 0;
}
else
{
t->have_frame = ret;
t->grab = 1;
retrieve =0;
}
}
else
{
if(tag->source_type == VJ_TAG_TYPE_MCAST )
wait_time = 25;
}
}
unlock(t);
if( wait_time )
{
if ( wait_time > 40 )
wait_time = 25;
net_delay( wait_time );
// usleep(wait_time);
}
if( error )
{
veejay_msg(VEEJAY_MSG_ERROR,
"Closing connection with remote veejay,");
t->state = 0;
t->grab = 0;
pthread_exit( &(t->thread));
return NULL;
}
}
return NULL;
}
void *net_threader( )
{
threaded_t *t = (threaded_t*) vj_calloc(sizeof(threaded_t));
return (void*) t;
}
int net_thread_get_frame( vj_tag *tag, uint8_t *buffer[3] )
{
threaded_t *t = (threaded_t*) tag->priv;
vj_client *v = t->remote;
const uint8_t *buf = tag->socket_frame;
lock(t);
if( t->state == 0 || t->error )
{
if(t->repeat < 0)
veejay_msg(VEEJAY_MSG_INFO, "Connection closed with remote host");
t->repeat++;
unlock(t);
return 0;
}
//@ color space convert frame
int len = v->cur_width * v->cur_height;
int uv_len = len;
switch(v->cur_fmt)
{
case FMT_420:
case FMT_420F:
uv_len=len/4;
break;
default:
uv_len=len/2;
break;
}
if(t->have_frame == 1 )
{
veejay_memcpy(buffer[0], tag->socket_frame, len );
veejay_memcpy(buffer[1], tag->socket_frame+len, uv_len );
veejay_memcpy(buffer[2], tag->socket_frame+len+uv_len, uv_len );
t->grab = 1;
unlock(t);
return 1;
}
else if(t->have_frame == 2 )
{
int b_len = v->in_width * v->in_height;
int buvlen = b_len;
switch(v->in_fmt)
{
case FMT_420:
case FMT_420F:
buvlen = b_len/4;
break;
default:
buvlen = b_len/2;
break;
}
VJFrame *a = yuv_yuv_template( tag->socket_frame, tag->socket_frame + b_len, tag->socket_frame+b_len+buvlen,
v->in_width,v->in_height, get_ffmpeg_pixfmt( v->in_fmt ));
VJFrame *b = yuv_yuv_template( buffer[0],buffer[1], buffer[2],
v->cur_width,v->cur_height,get_ffmpeg_pixfmt(v->cur_fmt));
yuv_convert_any_ac(a,b, a->format,b->format );
free(a);
free(b);
}
t->grab = 1;
unlock(t);
return 1;
}
int net_thread_start(vj_client *v, vj_tag *tag)
{
int success = 0;
int res = 0;
if(tag->source_type == VJ_TAG_TYPE_MCAST )
success = vj_client_connect( v,NULL,tag->source_name,tag->video_channel );
else
success = vj_client_connect_dat( v, tag->source_name,tag->video_channel );
if( success <= 0 )
{
veejay_msg(VEEJAY_MSG_ERROR, "Unable to establish connection with %s on port %d",
tag->source_name, tag->video_channel + 5);
return 0;
}
else
{
veejay_msg(VEEJAY_MSG_INFO, "Connecton established with %s:%d",tag->source_name,
tag->video_channel + 5);
}
threaded_t *t = (threaded_t*)tag->priv;
pthread_mutex_init( &(t->mutex), NULL );
v->lzo = lzo_new();
t->repeat = 0;
t->have_frame = 0;
t->error = 0;
t->state = 1;
t->remote = v;
t->grab = 1;
if( tag->source_type == VJ_TAG_TYPE_MCAST )
{
char start_mcast[6];
sprintf(start_mcast, "%03d:;", VIMS_VIDEO_MCAST_START);
veejay_msg(VEEJAY_MSG_DEBUG, "Request mcast stream from %s port %d",
tag->source_name, tag->video_channel);
res = vj_client_send( v, V_CMD, start_mcast );
if( res <= 0 )
{
veejay_msg(VEEJAY_MSG_ERROR, "Unable to send to %s port %d",
tag->source_name, tag->video_channel );
return 0;
}
else
veejay_msg(VEEJAY_MSG_INFO, "Requested mcast stream from Veejay group %s port %d",
tag->source_name, tag->video_channel );
}
int p_err = pthread_create( &(t->thread), NULL, &reader_thread, (void*) tag );
if( p_err ==0)
{
veejay_msg(VEEJAY_MSG_INFO, "Created new %s threaded stream with Veejay host %s port %d",
tag->source_type == VJ_TAG_TYPE_MCAST ?
"multicast" : "unicast", tag->source_name,tag->video_channel);
return 1;
}
return 0;
}
void net_thread_stop(vj_tag *tag)
{
char mcast_stop[6];
threaded_t *t = (threaded_t*)tag->priv;
int ret = 0;
lock(t);
if(tag->source_type == VJ_TAG_TYPE_MCAST)
{
sprintf(mcast_stop, "%03d:;", VIMS_VIDEO_MCAST_STOP );
ret = vj_client_send( t->remote , V_CMD, mcast_stop);
if(ret)
veejay_msg(VEEJAY_MSG_INFO, "Stopped multicast stream");
}
if(tag->source_type == VJ_TAG_TYPE_NET)
{
sprintf(mcast_stop, "%03d:;", VIMS_CLOSE );
ret = vj_client_send( t->remote, V_CMD, mcast_stop);
if(ret)
veejay_msg(VEEJAY_MSG_INFO, "Stopped unicast stream");
}
t->state = 0;
unlock(t);
pthread_mutex_destroy( &(t->mutex));
veejay_msg(VEEJAY_MSG_INFO, "Disconnected from Veejay host %s:%d", tag->source_name,
tag->video_channel);
}
int net_already_opened(const char *filename, int n, int channel)
{
char sourcename[255];
int i;
for (i = 1; i < n; i++)
{
if (vj_tag_exists(i) )
{
vj_tag_get_source_name(i, sourcename);
if (strcasecmp(sourcename, filename) == 0)
{
vj_tag *tt = vj_tag_get( i );
if( tt->source_type == VJ_TAG_TYPE_NET || tt->source_type == VJ_TAG_TYPE_MCAST )
{
if( tt->video_channel == channel )
{
veejay_msg(VEEJAY_MSG_WARNING, "Already streaming from %s:%p in stream %d",
filename,channel, tt->id);
return 1;
}
}
}
}
}
return 0;
}

View File

@@ -0,0 +1,32 @@
/*
* Linux VeeJay
*
* Copyright(C)2002-2006 Niels Elburg <nwelburg@gmail.com>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License , or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
*/
#ifndef NETINSTR_HH
#define NETINSTR_HH
int net_already_opened(const char *filname, int n, int chan);
void net_thread_stop(vj_tag *tag);
int net_thread_start(vj_client *v, vj_tag *tag);
void net_thread_remote(void *priv, void *p );
int net_thread_get_frame( vj_tag *tag, uint8_t *buffer[3]);
void net_thread_exit(vj_tag *tag);
void *net_threader( );
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,324 @@
/* veejay - Linux VeeJay
* (C) 2002-2004 Niels Elburg <nwelburg@gmail.com>
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#ifndef VJ_TAG_H
#define VJ_TAG_H
#define VJ_TAG_TYPE_PICTURE 5
#define VJ_TAG_TYPE_COLOR 4
#define VJ_TAG_TYPE_VLOOPBACK 3
#define VJ_TAG_TYPE_V4L 2
#define VJ_TAG_TYPE_YUV4MPEG 1
#define VJ_TAG_TYPE_NONE 0
#define VJ_TAG_TYPE_SHM 11
#define VJ_TAG_TYPE_NET 13
#define VJ_TAG_TYPE_MCAST 14
#define VJ_TAG_MAX_V4L 16
#define VJ_TAG_MAX_STREAM_IN 255
#define VJ_TAG_TYPE_DV1394 17
#define VJ_TAG_TYPE_AVFORMAT 12
#define TAG_MAX_DESCR_LEN 150
#include <config.h>
#include <libsample/sampleadm.h>
#include <libstream/vj-yuv4mpeg.h>
#include <libvjnet/vj-client.h>
#include <libstream/vj-dv1394.h>
#ifdef USE_GDK_PIXBUF
typedef struct
{
void *pic;
} vj_picture;
#endif
typedef struct {
void *unicap[VJ_TAG_MAX_STREAM_IN];
vj_yuv *stream[VJ_TAG_MAX_STREAM_IN];
vj_client *net[VJ_TAG_MAX_STREAM_IN];
vj_dv1394 *dv1394[VJ_TAG_MAX_STREAM_IN];
#ifdef USE_GDK_PIXBUF
vj_picture *picture[VJ_TAG_MAX_STREAM_IN];
#endif
int width;
int height;
int depth;
int pix_fmt;
int uv_len;
} vj_tag_data;
typedef struct {
int id;
// char description[100];
sample_eff_chain *effect_chain[SAMPLE_MAX_EFFECTS];
int next_id;
int nframes;
int source_type;
char *source_name;
char *method_filename;
int index;
int depth;
int active;
int source;
int video_channel;
int encoder_active;
unsigned long sequence_num;
unsigned long rec_total_bytes;
// char *encoder_base;
unsigned long encoder_total_frames;
// char *encoder_destination;
char encoder_base[256];
char encoder_destination[256];
char descr[TAG_MAX_DESCR_LEN];
int encoder_format;
void *encoder;
lav_file_t *encoder_file;
long encoder_duration; /* in seconds */
long encoder_num_frames;
long encoder_succes_frames;
int encoder_width;
int encoder_height;
int encoder_max_size;
int color_r;
int color_g;
int color_b;
int opacity;
int fader_active;
int fader_direction;
float fader_val;
float fader_inc;
int selected_entry;
int effect_toggle;
int socket_ready;
int socket_len;
uint8_t *socket_frame;
int n_frames;
void *priv;
void *extra;
void *dict;
char padding[4];
int composite;
void *viewport_config;
void *viewport;
} vj_tag;
void *vj_tag_get_dict( int id );
int vj_tag_set_composite(void *compiz,int id, int n);
int vj_tag_get_composite(int t1);
int vj_tag_chain_malloc(int e);
int vj_tag_chain_free(int e);
int vj_tag_get_v4l_properties(int t1,int *brightness, int *contrast, int *hue,int *saturation,int *color, int *white );
int vj_tag_init(int w, int h, int pix_fmt);
int vj_tag_get_n_frames(int t1);
int vj_tag_set_n_frames(int t1, int n_frames);
int vj_tag_get_last_tag();
void vj_tag_free(void);
/* Change color of solid stream*/
int vj_tag_set_stream_color(int t1, int r, int g, int b);
int vj_tag_get_stream_color(int t1, int *r, int *g, int *b );
/* create a new tag, type is yuv4mpeg or v4l
stream_nr indicates which stream to take of the same type
*/
int vj_tag_new(int type, char *filename, int stream_nr, editlist * el,
int pix_fmt, int channel, int extra, int has_composite);
/* return 1 if tag exists , 0 otherwise*/
int vj_tag_exists(int id);
/* return 1 if tag gets deleted, 0 on error */
int vj_tag_del(int id);
int vj_tag_verify_delete(int id, int type );
/* return -1 if there is no effect or if it is disabled, otherwise a positive value */
int vj_tag_get_effect(int t1, int position);
int vj_tag_size();
vj_tag *vj_tag_get(int id);
/* always return effect (-1 = empty) */
int vj_tag_get_effect_any(int t1, int position);
/* return -1 on error, otherwise argument gets updated */
int vj_tag_set_effect(int t1, int position, int effect_id);
/* return -1 on error, or return e_flag (effect enabled/disabled 1/0)*/
int vj_tag_get_chain_status(int t1, int position);
/* return -1 on error, otherwise set new status */
int vj_tag_set_chain_status(int t1, int position, int new_status);
/* return 0 on error, other value is trimmer (0 = no trim anyway) */
int vj_tag_get_trimmer(int t1, int poisition);
/* return -1 on error, or 1 on succes */
int vj_tag_set_trimmer(int t1, int position, int value);
//int vj_tag_get_video_palette(int t1);
//int vj_tag_set_video_palette(int t1, int video_palette);
/* return -1 on error or 1 on sucess. tag's effect parameters get copied into args
args must be initialized.
*/
int vj_tag_get_all_effect_args(int t1, int position, int *args,
int arg_len, int n_frame);
int vj_tag_get_effect_arg(int t1, int p, int arg);
/* return -1 on error, 1 on success */
int vj_tag_set_effect_arg(int t1, int position, int argnr, int value);
/* return -1 on error, 1 on sucess */
int vj_tag_get_type(int t1);
/* returns number of tags */
int vj_tag_get_logical_index(int t1);
int vj_tag_clear_chain(int id);
int vj_tag_get_depth(int t1);
int vj_tag_set_depth(int t1, int depth);
int vj_tag_set_active(int t1, int active);
int vj_tag_get_active(int t1);
int vj_tag_chain_size(int t1);
int vj_tag_chain_remove(int t1, int index);
int vj_tag_set_chain_channel(int t1, int position, int channel);
int vj_tag_get_chain_channel(int t1, int position);
void vj_tag_get_source_name(int t1, char *dst);
int vj_tag_get_chain_source(int t1, int position);
int vj_tag_set_chain_source(int t1, int position, int source);
void vj_tag_get_descriptive(int type, char *dst);
int vj_tag_by_type(int type);
int vj_tag_get_offset(int t1, int entry);
int vj_tag_set_offset(int t1, int entry, int offset);
//int vj_tag_record_frame(int t1, uint8_t *buffer[3]);
int vj_tag_get_frame(int t1, uint8_t *buffer[3], uint8_t *abuf);
int vj_tag_get_audio_frame(int t1, uint8_t *dst );
int vj_tag_enable(int t1);
int vj_tag_disable(int t1);
int vj_tag_sprint_status(int tag_id, int cache,int sa, int ca, int r, int f, int m, int t, int macro,char *str );
//int vj_tag_init_encoder(int t1, char *filename, int format,
// int w, int h, double fps, long seconds, int autoplay);
int vj_tag_stop_encoder(int t1);
int vj_tag_set_brightness(int t1, int value);
int vj_tag_set_contrast(int t1, int value);
int vj_tag_set_color(int t1, int value);
int vj_tag_set_hue(int t1, int value);
int vj_tag_set_white(int t1, int value);
int vj_tag_set_saturation(int t1, int value);
void vj_tag_set_veejay_t(void *info);
int vj_tag_set_manual_fader(int t1, int value );
int vj_tag_get_fader_direction(int t1);
int vj_tag_set_fader_active(int t1, int nframes, int direction);
int vj_tag_set_fade_to_tag(int t1, int t2);
int vj_tag_set_fade_to_sample(int t1, int s1);
int vj_tag_set_fader_val(int t1, float val);
int vj_tag_apply_fader_inc(int t1);
int vj_tag_get_fader_active(int t1);
float vj_tag_get_fader_val(int t1);
float vj_tag_get_fader_inc(int t1);
int vj_tag_reset_fader(int t1);
int vj_tag_get_effect_status(int s1);
int vj_tag_get_selected_entry(int s1);
int vj_tag_set_effect_status(int s1, int status);
int vj_tag_set_selected_entry(int s1, int position);
void vj_tag_close_all();
int vj_tag_composite(int t1);
int vj_tag_init_encoder(int t1, char *filename, int format, long nframes);
int vj_tag_record_frame(int t1, uint8_t *buffer[3], uint8_t *abuff, int audio_size);
int vj_tag_get_encoded_frames(int t1);
long vj_tag_get_total_frames(int t1);
long vj_tag_get_duration(int t1);
int vj_tag_reset_autosplit(int t1);
int vj_tag_get_frames_left(int t1);
int vj_tag_encoder_active(int t1);
int vj_tag_get_num_encoded_files(int t1);
int vj_tag_get_encoder_format(int t1);
int vj_tag_get_sequenced_file( int t1, char *descr, int num, char *ext);
int vj_tag_try_filename(int t1, char *filename, int format);
int vj_tag_get_encoded_file(int t1, char *descr);
void vj_tag_reset_encoder( int t1 );
void vj_tag_record_init(int w, int h);
void vj_tag_get_method_filename(int t1, char *dst);
int vj_tag_get_last_tag(void);
int vj_tag_put( vj_tag *tag );
int vj_tag_is_deleted(int id);
void vj_tag_close_all();
int vj_tag_continue_record( int t1 );
int vj_tag_set_logical_index(int t1, int stream_nr);
int vj_tag_set_description(int t1, char *descr);
int vj_tag_get_description(int t1, char *descr);
void vj_tag_get_by_type( int type, char *descr );
int vj_tag_chain_set_kfs( int s1, int len, unsigned char *data );
unsigned char * vj_tag_chain_get_kfs( int s1, int entry, int parameter_id, int *len );
int vj_tag_get_kf_status(int t1, int entry);
int vj_tag_chain_set_kf_status( int s1, int entry, int status );
int vj_tag_chain_reset_kf( int s1, int entry );
int vj_tag_var(int t1, int *type, int *fader, int *fx_sta , int *rec_sta, int *active );
int vj_tag_true_size();
void *vj_tag_get_kf_port( int s1, int entry );
char *vj_tag_scan_devices( void );
int vj_tag_get_kf_tokens( int s1, int entry, int id, int *start,int *end, int *type);
int vj_tag_num_devices();
void vj_tag_reload_config( void *compiz, int t1, int mode );
void *vj_tag_get_composite_view(int t1);
int vj_tag_set_composite_view(int t1, void *v);
#ifdef HAVE_XML2
void tag_writeStream( char *file, int n, xmlNodePtr node, void *font, void *vp);
void tagCreateStream(xmlNodePtr node, vj_tag *tag, void *font, void *vp);
void tagCreateStreamFX(xmlNodePtr node, vj_tag *tag);
void tagParseStreamFX(char *file, xmlDocPtr doc, xmlNodePtr cur, void *font,
void *vp);
#endif
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,54 @@
/* veejay - Linux VeeJay Unicap interface
* (C) 2002-2006 Niels Elburg <nwelburg@gmail.com>
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#ifndef VJUNICAPHH
#define VJUNICAPHH
void *vj_unicap_init(void);
void vj_unicap_deinit(void *dud );
int vj_unicap_num_capture_devices( void *dud );
char **vj_unicap_get_devices(void *unicap, int *n);
void *vj_unicap_new_device( void *ud, int device_id );
int vj_unicap_configure_device( void *ud, int pixel_format, int w, int h, int composite );
int vj_unicap_start_capture( void *vut );
int vj_unicap_grab_frame( void *vut, uint8_t *buffer[3], const int w, const int h );
int vj_unicap_stop_capture( void *vut );
int vj_unicap_composite_status(void *ud );
int vj_unicap_status(void *vut);
void vj_unicap_free_device( void *dud, void *vut );
char **vj_unicap_get_list( void *ud );
int vj_unicap_get_value( void *ud, char *key, int atom_type, void *value );
int vj_unicap_select_value( void *ud, int key, double );
void vj_unicap_set_pause( void *vut , int status );
int vj_unicap_get_pause( void *vut );
#define UNICAP_BRIGHTNESS 0
#define UNICAP_COLOR 1
#define UNICAP_SATURATION 2
#define UNICAP_HUE 3
#define UNICAP_CONTRAST 4
#define UNICAP_SOURCE0 5
#define UNICAP_SOURCE1 6
#define UNICAP_SOURCE2 7
#define UNICAP_SOURCE3 8
#define UNICAP_SOURCE4 9
#define UNICAP_PAL 10
#define UNICAP_NTSC 11
#define UNICAP_WHITE 12
#endif

View File

@@ -0,0 +1,492 @@
/* veejay - Linux VeeJay
* (C) 2002-2005 Niels Elburg <nwelburg@gmail.com>
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
Put vloopback back in place
Re-used large portions of dc1394_vloopback.c
from Dan Dennedy <dan@dennedy.org>
*/
/*
vloopback pusher (using pipes)
If someone wants to implement mmap, add SIGIO to the signal catcher
and use mutexes for asynchronosouly handling IO. I am too lazy.
*/
#include <config.h>
#ifdef HAVE_V4L
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/ioctl.h>
#include <sys/poll.h>
#include <signal.h>
#include <unistd.h>
#include <stdint.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <linux/videodev.h>
#include <libvjmem/vjmem.h>
#include <libvjmsg/vj-msg.h>
#define VLOOPBACK_MMAP 0 // commented out
#define VLOOPBACK_PIPE 1
#define VLOOPBACK_N_BUFS 2
typedef struct
{
char *dev_name; /* device name */
int palette; /* palette from vjframe */
int width;
int height;
int norm;
int mode; /* PAL or NTSC */
int fd;
int size; /* size of image out_buf */
uint8_t *out_buf;
uint8_t *out_map; /* mmap segment */
} vj_vloopback_t;
/* Open the vloopback device */
void *vj_vloopback_open(const char *device_name, int norm, int mode,
int w, int h, int pixel_format)
{
void *ret = NULL;
vj_vloopback_t *v = (vj_vloopback_t*) vj_malloc(sizeof(vj_vloopback_t));
if(!v) return ret;
memset(v , 0, sizeof(vj_vloopback_t ));
v->fd = open( device_name, O_RDWR );
v->norm = norm;
v->mode = mode;
v->width = w;
v->height = h;
v->palette = (pixel_format == 1 ? VIDEO_PALETTE_YUV422P :
VIDEO_PALETTE_YUV420P );
if(!v->fd)
{
if(v) free(v);
veejay_msg(VEEJAY_MSG_ERROR, "Cannot open vloopback %s",
device_name );
return ret;
}
v->dev_name = strdup( device_name );
ret = (void*) v;
veejay_msg(VEEJAY_MSG_DEBUG,
"Vloopback %s size %d x %d, palette YUV42%sP",
v->dev_name,
v->width,
v->height,
(pixel_format == 1 ? "2" : "0" ) );
return (void*) ret;
}
int vj_vloopback_get_mode( void *vloop )
{
vj_vloopback_t *v = (vj_vloopback_t*) vloop;
return v->mode;
}
/* write mode*/
int vj_vloopback_start_pipe( void *vloop )
{
struct video_capability caps;
struct video_window win;
struct video_picture pic;
vj_vloopback_t *v = (vj_vloopback_t*) vloop;
if(!v) return 0;
if(v->mode != VLOOPBACK_PIPE)
veejay_msg(VEEJAY_MSG_ERROR,"Program error");
/* the out_palette defines what format ! */
/* get capabilities */
if( ioctl( v->fd, VIDIOCGCAP, &caps ) < 0 )
{
veejay_msg(VEEJAY_MSG_DEBUG, "Cant get video capabilities");
return 0;
}
/* get picture */
if( ioctl( v->fd, VIDIOCGPICT, &pic ) < 0 )
{
veejay_msg(VEEJAY_MSG_DEBUG, "Cant get video picture");
return 0;
}
/* set palette */
pic.palette = v->palette;
if( ioctl( v->fd, VIDIOCSPICT, &pic ) < 0 )
{
veejay_msg(VEEJAY_MSG_DEBUG, "Cant set video picture (palette %d)",v->palette);
return 0;
}
/* set window */
win.width = v->width;
win.height = v->height;
if( ioctl( v->fd, VIDIOCSWIN, &win ) < 0 )
{
veejay_msg(VEEJAY_MSG_DEBUG ,"Cant set video window %d x %d",
v->width,v->height );
return 0;
}
int len = v->width * v->height ;
int vshift = (v->palette ==
VIDEO_PALETTE_YUV422P ? 0 : 1 );
int uv_len = (v->width >> 1 ) * (v->height >> vshift);
v->size = len + (2 * uv_len);
veejay_msg(VEEJAY_MSG_DEBUG,
"vloopback pipe (Y plane %d bytes, UV plane %d bytes) H=%d, V=%d",
len,uv_len,1,vshift );
v->out_buf = (uint8_t*) vj_malloc(sizeof(uint8_t) * v->size );
memset(v->out_buf, 0 , v->size );
if(!v->out_buf)
{
veejay_msg(VEEJAY_MSG_ERROR, "Cant allocate sufficient memory for vloopback");
return 0;
}
return 1;
}
int vj_vloopback_write_pipe( void *vloop )
{
vj_vloopback_t *v = (vj_vloopback_t*) vloop;
if(!v) return 0;
int res = write( v->fd, v->out_buf, v->size );
if(res <= 0)
return 0;
return 1;
}
int vj_vloopback_fill_buffer( void *vloop, uint8_t **frame )
{
// write frame to v->out_buf (veejay_memcpy)
vj_vloopback_t *v = (vj_vloopback_t*) vloop;
if(!v) return 0;
int len = v->width * v->height ;
int hshift = (v->palette ==
VIDEO_PALETTE_YUV422P ? 0 : 1 );
int uv_len = (v->width >> hshift ) * (v->height >> 1);
// copy data to linear buffer */
veejay_memcpy( v->out_buf, frame[0], len );
veejay_memcpy( v->out_buf + len,
frame[1], uv_len );
veejay_memcpy( v->out_buf + len + uv_len,
frame[2], uv_len );
return 1;
}
/*
int vj_vloopback_start_mmap( void *vloop )
{
vj_vloopback_t *v = (vj_vloopback_t*) vloop;
if(!v)
return 0;
int len = v->width * v->height ;
int hshift = (v->palette ==
VIDEO_PALETTE_YUV422P ? 0 : 1 );
int uv_len = (v->width >> hshift ) * (v->height >> 1);
v->size = len + (2 * uv_len);
v->out_buf = (uint8_t*) vj_malloc(
sizeof(uint8_t) * v->size * VLOOPBACK_N_BUFS );
if(!v->out_buf)
return 0;
v->out_map = mmap( 0, (v->size * VLOOPBACK_N_BUFS), PROT_READ| PROT_WRITE,
MAP_SHARED, v->fd , 0 );
if( v->out_map == (uint8_t*) -1 )
{
veejay_msg(VEEJAY_MSG_ERROR, "Cannot mmap memory");
return 0;
}
veejay_msg(VEEJAY_MSG_ERROR, "%s", __FUNCTION__ );
return 1;
}
int vj_vloopback_write_mmap( void *vloop, int frame )
{
vj_vloopback_t *v = (vj_vloopback_t*) vloop;
veejay_memcpy( v->out_map + (v->size * frame), v->out_buf, v->size );
return 1;
}
int vj_vloopback_ioctl( void *vloop, unsigned long int cmd, void *arg )
{
vj_vloopback_t *v = (vj_vloopback_t*) vloop;
veejay_msg(VEEJAY_MSG_INFO, "%s %d / %d",
__FUNCTION__, __LINE__ , cmd);
switch(cmd)
{
case VIDIOCGCAP:
{
veejay_msg(VEEJAY_MSG_INFO, "%s %d",
__FUNCTION__, __LINE__ );
struct video_capability *cap = arg;
sprintf( cap->name, "Veejay Digital Sampler");
cap->type = VID_TYPE_CAPTURE;
cap->channels = 1;
cap->audios = 0;
cap->maxwidth = v->width;
cap->maxheight = v->height;
cap->minwidth = v->width;
cap->minheight = v->height;
break;
}
case VIDIOCGTUNER:
{
veejay_msg(VEEJAY_MSG_INFO, "%s %d",
__FUNCTION__, __LINE__ );
struct video_tuner *tuner = arg;
sprintf( tuner->name, "Veejay Digital Sampler");
tuner->tuner = 0;
tuner->rangelow = 0;
tuner->rangehigh = 0;
tuner->flags = VIDEO_TUNER_PAL | VIDEO_TUNER_NTSC;
tuner->mode = (v->norm ? VIDEO_MODE_PAL : VIDEO_MODE_NTSC);
tuner->signal = 0;
break;
}
case VIDIOCGCHAN:
{
struct video_channel *vidchan=arg;
vidchan->channel = 0;
vidchan->flags = 0;
vidchan->tuners = 0;
vidchan->type = VIDEO_TYPE_CAMERA;
strcpy(vidchan->name, "Veejay Dummy channel");
break;
}
case VIDIOCGPICT:
{
veejay_msg(VEEJAY_MSG_INFO, "%s %d",
__FUNCTION__, __LINE__ );
struct video_picture *vidpic=arg;
vidpic->colour = 0xffff;
vidpic->hue = 0xffff;
vidpic->brightness = 0xffff;
vidpic->contrast = 0xffff;
vidpic->whiteness = 0xffff;
vidpic->palette = v->palette;
vidpic->depth = (
v->palette == VIDEO_PALETTE_YUV420P ?
12 : 16 );
break;
}
case VIDIOCSPICT:
{
veejay_msg(VEEJAY_MSG_INFO, "%s %d",
__FUNCTION__, __LINE__ );
struct video_picture *vidpic=arg;
if(vidpic->palette != v->palette )
veejay_msg(VEEJAY_MSG_ERROR,
"requested palette %d, but only using %d now",
vidpic->palette, v->palette );
return 1;
}
case VIDIOCCAPTURE:
{
veejay_msg(VEEJAY_MSG_INFO, "%s %d",
__FUNCTION__, __LINE__ );
break;
}
case VIDIOCGWIN:
{
veejay_msg(VEEJAY_MSG_INFO, "%s %d",
__FUNCTION__, __LINE__ );
struct video_window *vidwin=arg;
vidwin->x=0;
vidwin->y=0;
vidwin->width=v->width;
vidwin->height=v->height;
vidwin->chromakey=0;
vidwin->flags=0;
vidwin->samplecount=0;
break;
}
case VIDIOCSWIN:
{
veejay_msg(VEEJAY_MSG_ERROR, "Cannot change size ! ");
break;
}
case VIDIOCGMBUF:
{
veejay_msg(VEEJAY_MSG_INFO, "%s %d",
__FUNCTION__, __LINE__ );
struct video_mbuf *vidmbuf=arg;
int i;
vidmbuf->size = v->size;
vidmbuf->frames = VLOOPBACK_N_BUFS;
for (i=0; i < VLOOPBACK_N_BUFS; i++)
vidmbuf->offsets[i] = i * vidmbuf->size;
vidmbuf->size *= vidmbuf->frames;
break;
}
case VIDIOCMCAPTURE:
{
veejay_msg(VEEJAY_MSG_INFO, "%s %d",
__FUNCTION__, __LINE__ );
struct video_mmap *vidmmap=arg;
if ( vidmmap->format != v->palette )
{
veejay_msg(VEEJAY_MSG_ERROR, "capture palette not current palette!");
return 1;
}
if (vidmmap->height != v->height ||
vidmmap->width != v->width) {
veejay_msg(VEEJAY_MSG_ERROR, "caputure: invalid size %dx%d\n", vidmmap->width, vidmmap->height );
return 1;
}
break;
}
case VIDIOCSYNC:
{
veejay_msg(VEEJAY_MSG_INFO, "%s %d",
__FUNCTION__, __LINE__ );
struct video_mmap *vidmmap=arg;
if(!vj_vloopback_write_mmap( vloop, vidmmap->frame ))
return 1;
break;
}
default:
{
veejay_msg(VEEJAY_MSG_INFO, "%s %d",
__FUNCTION__, __LINE__ );
veejay_msg(VEEJAY_MSG_ERROR, "ioctl %ld unhandled\n", cmd & 0xff);
break;
}
}
return 0;
}
*/
void vj_vloopback_close( void *vloop )
{
vj_vloopback_t *v = (vj_vloopback_t*) vloop;
if(v)
{
if(v->fd)
close( v->fd );
if(v->out_buf)
free(v->out_buf);
/* if(v->out_map)
munmap( v->out_map,
v->size * VLOOPBACK_N_BUFS );*/
free(v);
}
}
/*
void vj_vloopback_signal_handler( void *vloop, int sig_no )
{
int size,ret;
unsigned long int cmd;
struct pollfd ufds;
char ioctlbuf[1024];
vj_vloopback_t *v = (vj_vloopback_t*) vloop;
if(sig_no != SIGIO )
return;
ufds.fd = v->fd;
ufds.events = POLLIN;
ufds.revents = 0;
poll( &ufds, 1, 10 ); // 10 ms too small ?
if( !ufds.revents & POLLIN )
{
veejay_msg(VEEJAY_MSG_ERROR,
"Received signal but got negative on poll");
return;
}
size = read( v->fd, ioctlbuf, 1024 );
if( size >= sizeof( unsigned long int ))
{
veejay_memcpy( &cmd, ioctlbuf, sizeof(unsigned long int));
if( cmd == 0 )
{
veejay_msg(VEEJAY_MSG_ERROR,
"Client closed device");
return;
}
ret = vj_vloopback_ioctl( vloop, cmd, ioctlbuf + sizeof( unsigned long int ));
if(ret)
{
memset( ioctlbuf + sizeof( unsigned long int ), 1024 - sizeof( unsigned long int ),0xff);
veejay_msg(VEEJAY_MSG_ERROR,
"IOCTL %d unsuccessfull", cmd & 0xff);
}
ioctl( v->fd, cmd, ioctlbuf + sizeof( unsigned long int ));
}
return ;
}
*/
#endif

View File

@@ -0,0 +1,42 @@
/* veejay - Linux VeeJay
* (C) 2002-2005 Niels Elburg <nwelburg@gmail.com>
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
void *vj_vloopback_open(const char *device_name, int norm, int mode,
int w, int h, int pixel_format);
// if using write mode
int vj_vloopback_start_pipe( void *vloop );
int vj_vloopback_write_pipe( void *vloop );
// resfresh buffer every cycle
int vj_vloopback_fill_buffer( void *vloop, uint8_t **image );
// if using mmap mode
//int vj_vloopback_start_mmap( void *vloop );
//int vj_vloopback_ioctl( void *vloop, unsigned long int cmd, void *arg );
void vj_vlooopback_close( void *vloop );
int vj_vloopback_get_mode( void *vloop );
void vj_vloopback_close( void *vloop );
//void vj_vloopback_signal_handler( void *vloop, int sig_no );

View File

@@ -0,0 +1,281 @@
/*
* Linux VeeJay
*
* Copyright(C)2002-2004 Niels Elburg <nwelburg@gmail.com>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License , or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
*/
#include <stdio.h>
#include <stdlib.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <libvjmsg/vj-msg.h>
#include <libstream/vj-yuv4mpeg.h>
#include <string.h>
/* the audio routines are placed here
because its a feature i need. needs to be removed or put elsewhere.
*/
#define L_FOURCC(a,b,c,d) ( (d<<24) | ((c&0xff)<<16) | ((b&0xff)<<8) | (a&0xff) )
#define L_FOURCC_RIFF L_FOURCC ('R', 'I', 'F', 'F')
#define L_FOURCC_WAVE L_FOURCC ('W', 'A', 'V', 'E')
#define L_FOURCC_FMT L_FOURCC ('f', 'm', 't', ' ')
#define L_FOURCC_DATA L_FOURCC ('d', 'a', 't', 'a')
typedef struct {
unsigned long rifftag;
unsigned long rifflen;
unsigned long wavetag;
unsigned long fmt_tag;
unsigned long fmt_len;
unsigned short wFormatTag;
unsigned short nChannels;
unsigned long nSamplesPerSec;
unsigned long nAvgBytesPerSec;
unsigned short nBlockAlign;
unsigned short wBitsPerSample;
unsigned long datatag;
unsigned long datalen;
} t_wave_hdr;
t_wave_hdr *wave_hdr;
int bytecount = 0;
vj_yuv *vj_yuv4mpeg_alloc(editlist * el, int w, int h)
{
vj_yuv *yuv4mpeg = (vj_yuv *) malloc(sizeof(vj_yuv));
if(!yuv4mpeg) return NULL;
yuv4mpeg->sar = y4m_sar_UNKNOWN;
yuv4mpeg->dar = y4m_dar_4_3;
y4m_init_stream_info(&(yuv4mpeg->streaminfo));
y4m_init_frame_info(&(yuv4mpeg->frameinfo));
yuv4mpeg->width = w;
yuv4mpeg->height = h;
yuv4mpeg->audio_rate = el->audio_rate;
yuv4mpeg->video_fps = el->video_fps;
yuv4mpeg->has_audio = el->has_audio;
yuv4mpeg->audio_bits = el->audio_bps;
return yuv4mpeg;
}
void vj_yuv4mpeg_free(vj_yuv *v) {
}
int vj_yuv_stream_start_read(vj_yuv * yuv4mpeg, char *filename, int width,
int height)
{
int i, w, h;
yuv4mpeg->fd = open(filename,O_RDONLY);
if (!yuv4mpeg->fd) {
veejay_msg(VEEJAY_MSG_ERROR, "Unable to open video stream %s\n",
filename);
return -1;
}
i = y4m_read_stream_header(yuv4mpeg->fd, &(yuv4mpeg->streaminfo));
if (i != Y4M_OK) {
veejay_msg(VEEJAY_MSG_ERROR, "yuv4mpeg: %s", y4m_strerr(i));
return -1;
}
w = y4m_si_get_width(&(yuv4mpeg->streaminfo));
h = y4m_si_get_height(&(yuv4mpeg->streaminfo));
if( w != width || h != height )
{
veejay_msg(VEEJAY_MSG_ERROR,
"Video dimensions: %d x %d must match %d x %d. Stream cannot be opened", w, h,
width, height);
return -1;
}
veejay_msg(VEEJAY_MSG_DEBUG, "YUV4MPEG: stream header ok");
return 0;
}
int vj_yuv_stream_write_header(vj_yuv * yuv4mpeg, editlist * el)
{
int i = 0;
y4m_si_set_width(&(yuv4mpeg->streaminfo), yuv4mpeg->width);
y4m_si_set_height(&(yuv4mpeg->streaminfo), yuv4mpeg->height);
y4m_si_set_interlace(&(yuv4mpeg->streaminfo), el->video_inter);
y4m_si_set_framerate(&(yuv4mpeg->streaminfo),
mpeg_conform_framerate(el->video_fps));
if (!Y4M_RATIO_EQL(yuv4mpeg->sar, y4m_sar_UNKNOWN)) {
y4m_si_set_sampleaspect(&(yuv4mpeg->streaminfo), yuv4mpeg->sar);
yuv4mpeg->sar.n = el->video_sar_width;
yuv4mpeg->sar.d = el->video_sar_height;
y4m_si_set_sampleaspect(&(yuv4mpeg->streaminfo), yuv4mpeg->sar);
} else {
y4m_ratio_t dar2 = y4m_guess_sar(yuv4mpeg->width,
yuv4mpeg->height,
yuv4mpeg->dar);
y4m_si_set_sampleaspect(&(yuv4mpeg->streaminfo), dar2);
}
i = y4m_write_stream_header(yuv4mpeg->fd, &(yuv4mpeg->streaminfo));
if (i != Y4M_OK)
return -1;
y4m_log_stream_info(2, "vj-yuv4mpeg", &(yuv4mpeg->streaminfo));
return 0;
}
int vj_yuv_stream_open_pipe(vj_yuv *yuv4mpeg, char *filename,editlist *el)
{
yuv4mpeg->fd = open(filename,O_WRONLY,0600);
if(!yuv4mpeg->fd) return 0;
return 1;
}
int vj_yuv_stream_header_pipe(vj_yuv *yuv4mpeg,editlist *el)
{
yuv4mpeg->has_audio = el->has_audio;
vj_yuv_stream_write_header(yuv4mpeg, el);
//if (el->has_audio) {
// if (!vj_yuv_write_wave_header(el, filename))
// return 0;
// }
return 1;
}
int vj_yuv_stream_start_write(vj_yuv * yuv4mpeg, char *filename,
editlist * el)
{
//if(mkfifo( filename, 0600)!=0) return -1;
/* if the file exists gamble and simply append,
if it does not exist write header
*/
struct stat sstat;
if(strncasecmp( filename, "stdout", 6) == 0)
{
yuv4mpeg->fd = 1;
}
else
{
if(strncasecmp(filename, "stderr", 6) == 0)
{
yuv4mpeg->fd = 2;
}
else
{
if (stat(filename, &sstat) == 0)
{
if (S_ISREG(sstat.st_mode))
{
/* the file is a regular file */
yuv4mpeg->fd = open(filename, O_APPEND | O_WRONLY, 0600);
if (!yuv4mpeg->fd)
return -1;
}
else
{
if (S_ISFIFO(sstat.st_mode))
veejay_msg(VEEJAY_MSG_INFO, "Destination file is a FIFO");
return 1; // pipe needs handling
}
}
else
{
veejay_msg(VEEJAY_MSG_INFO, "Creating YUV4MPEG regular file %s\n",
filename);
yuv4mpeg->fd = open(filename, O_CREAT | O_WRONLY, 0600);
if (!yuv4mpeg->fd)
return -1;
}
}
}
vj_yuv_stream_write_header(yuv4mpeg, el);
yuv4mpeg->has_audio = el->has_audio;
return 0;
}
void vj_yuv_stream_stop_write(vj_yuv * yuv4mpeg)
{
y4m_fini_stream_info(&(yuv4mpeg->streaminfo));
y4m_fini_frame_info(&(yuv4mpeg->frameinfo));
close(yuv4mpeg->fd);
}
void vj_yuv_stream_stop_read(vj_yuv * yuv4mpeg)
{
y4m_fini_stream_info(&(yuv4mpeg->streaminfo));
y4m_fini_frame_info(&(yuv4mpeg->frameinfo));
close(yuv4mpeg->fd);
yuv4mpeg->sar = y4m_sar_UNKNOWN;
yuv4mpeg->dar = y4m_dar_4_3;
}
int vj_yuv_get_frame(vj_yuv * yuv4mpeg, uint8_t * dst[3])
{
int i;
i = y4m_read_frame(yuv4mpeg->fd, &(yuv4mpeg->streaminfo),
&(yuv4mpeg->frameinfo), dst);
if (i != Y4M_OK)
{
veejay_msg(VEEJAY_MSG_ERROR, "yuv4mpeg %s", y4m_strerr(i));
return -1;
}
return 0;
}
int vj_yuv_get_aframe(vj_yuv * yuv4mpeg, uint8_t * audio)
{
return 0; /*un used */
}
int vj_yuv_put_frame(vj_yuv * vjyuv, uint8_t ** src)
{
int i;
if (!vjyuv->fd) {
veejay_msg(VEEJAY_MSG_ERROR, "Invalid file descriptor for y4m stream");
return -1;
}
i = y4m_write_frame(vjyuv->fd, &(vjyuv->streaminfo),
&(vjyuv->frameinfo), src);
if (i != Y4M_OK) {
veejay_msg(VEEJAY_MSG_ERROR, "Yuv4Mpeg : [%s]", y4m_strerr(i));
return -1;
}
return 0;
}
int vj_yuv_put_aframe(uint8_t * audio, editlist * el, int len)
{
int i = 0;
return i;
}

View File

@@ -0,0 +1,66 @@
/* veejay - Linux VeeJay
* (C) 2002-2004 Niels Elburg <nwelburg@gmail.com>
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#ifndef VJ_YUV4MPEG_H
#define VJ_YUV4MPEG_H
#include <libel/vj-el.h>
#include <mjpegtools/mpegconsts.h>
typedef struct {
y4m_stream_info_t streaminfo;
y4m_frame_info_t frameinfo;
y4m_ratio_t sar;
y4m_ratio_t dar;
int width;
int height;
int fd;
int has_audio;
int audio_bits;
float video_fps;
long audio_rate;
} vj_yuv;
vj_yuv *vj_yuv4mpeg_alloc(editlist * el, int dst_w, int dst_h);
void vj_yuv4mpeg_free(vj_yuv *v) ;
int vj_yuv_stream_start_read(vj_yuv *, char *, int width, int height);
int vj_yuv_stream_write_header(vj_yuv * yuv4mpeg, editlist * el);
int vj_yuv_stream_start_write(vj_yuv *, char *, editlist *);
void vj_yuv_stream_stop_read(vj_yuv * yuv4mpeg);
void vj_yuv_stream_stop_write(vj_yuv * yuv4mpeg);
int vj_yuv_get_frame(vj_yuv *, uint8_t **);
int vj_yuv_put_frame(vj_yuv * vjyuv, uint8_t **);
int vj_yuv_get_aframe(vj_yuv * vjyuv, uint8_t * audio);
int vj_yuv_put_aframe(uint8_t * audio, editlist *el, int len);
int vj_yuv_write_wave_header(editlist * el, char *outfile);
int vj_yuv_stream_open_pipe(vj_yuv *, char *, editlist *el);
int vj_yuv_stream_header_pipe( vj_yuv *, editlist *el );
#endif