use threaded capture by default

added double buffered capture
attempted to fix pixelformat negotation
This commit is contained in:
niels
2011-04-13 23:58:47 +02:00
parent 630cd10cea
commit cab87d71a4
3 changed files with 141 additions and 125 deletions

View File

@@ -89,6 +89,8 @@ typedef struct {
size_t length; size_t length;
} bufs; } bufs;
#define N_FRAMES 2
typedef struct typedef struct
{ {
int fd; int fd;
@@ -106,11 +108,16 @@ typedef struct
VJFrame *info; VJFrame *info;
void *scaler; void *scaler;
int planes[4]; int planes[4];
int out_planes[4];
int rw; int rw;
int composite; int composite;
int is_jpeg; int is_jpeg;
int sizeimage; int sizeimage;
VJFrame *buffer_filled; VJFrame *frames[N_FRAMES];
VJFrame *host_frame;
int frames_done[N_FRAMES];
int frameidx;
int frame_ready;
uint8_t *tmpbuf; uint8_t *tmpbuf;
int is_streaming; int is_streaming;
int pause_read; int pause_read;
@@ -119,10 +126,9 @@ typedef struct
AVCodec *codec; AVCodec *codec;
AVCodecContext *c; AVCodecContext *c;
AVFrame *picture; AVFrame *picture;
uint8_t *dst_ptr[3];
void *video_info; void *video_info;
int processed_buffer; int processed_buffer;
int allinthread;
int grey; int grey;
int threaded; int threaded;
} v4l2info; } v4l2info;
@@ -141,6 +147,14 @@ static void unlock_(v4l2_thread_info *i) {
} }
} }
static void wait_(v4l2_thread_info *i) {
pthread_cond_wait( &(i->cond), &(i->mutex));
}
static void signal_(v4l2_thread_info *i) {
pthread_cond_signal( &(i->cond) );
}
static int vioctl( int fd, int request, void *arg ) static int vioctl( int fd, int request, void *arg )
{ {
int ret; int ret;
@@ -229,7 +243,7 @@ static int v4l2_pixelformat2ffmpeg( int pf )
default: default:
break; break;
} }
return -1; return PIX_FMT_BGR24;
} }
static int v4l2_ffmpeg2v4l2( int pf) static int v4l2_ffmpeg2v4l2( int pf)
{ {
@@ -247,10 +261,11 @@ static int v4l2_ffmpeg2v4l2( int pf)
return V4L2_PIX_FMT_YUV420; return V4L2_PIX_FMT_YUV420;
case PIX_FMT_YUV422P: case PIX_FMT_YUV422P:
return V4L2_PIX_FMT_YUV422P; return V4L2_PIX_FMT_YUV422P;
case PIX_FMT_YUVJ422P:
return V4L2_PIX_FMT_YUV422P;
case PIX_FMT_YUVJ444P: case PIX_FMT_YUVJ444P:
case PIX_FMT_YUV444P: case PIX_FMT_YUV444P:
// return V4L2_PIX_FMT_YUV32; return V4L2_PIX_FMT_YUV32;
default: default:
@@ -382,7 +397,7 @@ static void v4l2_enum_frame_sizes( v4l2info *v )
} }
} }
static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei, int *pp ) static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei, int *pp, int orig_palette )
{ {
struct v4l2_format format; struct v4l2_format format;
@@ -394,7 +409,7 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
return -1; return -1;
} }
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: current configuration is in %s (%dx%d)", veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: Current configuration is in %s (%dx%d)",
(char*) &v->format.fmt.pix.pixelformat, (char*) &v->format.fmt.pix.pixelformat,
v->format.fmt.pix.width, v->format.fmt.pix.width,
v->format.fmt.pix.height ); v->format.fmt.pix.height );
@@ -405,8 +420,9 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
format.fmt.pix.height= hei; format.fmt.pix.height= hei;
format.fmt.pix.field = V4L2_FIELD_ANY; format.fmt.pix.field = V4L2_FIELD_ANY;
int ffmpeg_pixelformat = get_ffmpeg_pixfmt(pixelformat); int v4l2_pixel_format = v4l2_ffmpeg2v4l2( pixelformat );
int v4l2_pixel_format = v4l2_ffmpeg2v4l2( ffmpeg_pixelformat ); if( orig_palette >= 0 )
v4l2_pixel_format = orig_palette;
//@ or take from environment //@ or take from environment
if( *pp == 0 ) { if( *pp == 0 ) {
@@ -415,7 +431,7 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
int gc = atoi(greycap); int gc = atoi(greycap);
if( gc == 1 ) { if( gc == 1 ) {
v4l2_pixel_format = V4L2_PIX_FMT_GREY; v4l2_pixel_format = V4L2_PIX_FMT_GREY;
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: setting grey scale (env)"); veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: Setting grey scale (env)");
v->grey=1; v->grey=1;
} }
} }
@@ -426,24 +442,11 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
format.fmt.pix.pixelformat = v4l2_pixel_format; format.fmt.pix.pixelformat = v4l2_pixel_format;
if( vioctl( v->fd, VIDIOC_TRY_FMT, &format ) != -1 ) { if( vioctl( v->fd, VIDIOC_TRY_FMT, &format ) == 0 ) {
veejay_msg(VEEJAY_MSG_WARNING, "v4l2: format %s not supported by capture card... ", veejay_msg(VEEJAY_MSG_INFO, "v4l2: Format %s supported by capture card (?)", //@ some drivers dont and still get here
(char*) &v4l2_pixel_format); (char*) &v4l2_pixel_format);
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: testing palette %4.4s (%dx%d)",
(char*)&format.fmt.pix.pixelformat,
format.fmt.pix.width,
format.fmt.pix.height );
if( format.fmt.pix.width != wid || format.fmt.pix.height != hei ) {
veejay_msg(VEEJAY_MSG_WARNING,"v4l2: adjusting resolution from %dx%d to %dx%d",
wid,hei,
format.fmt.pix.width,
format.fmt.pix.height );
}
if( format.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG ) if( format.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG )
{ {
struct v4l2_jpegcompression jpegcomp; struct v4l2_jpegcompression jpegcomp;
@@ -452,9 +455,11 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
ioctl(v->fd, VIDIOC_S_JPEGCOMP, &jpegcomp); ioctl(v->fd, VIDIOC_S_JPEGCOMP, &jpegcomp);
v->is_jpeg = 1; v->is_jpeg = 1;
v->tmpbuf = (uint8_t*) vj_malloc(sizeof(uint8_t) * wid * hei * 3 ); v->tmpbuf = (uint8_t*) vj_malloc(sizeof(uint8_t) * wid * hei * 3 );
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: bpl=%d, colorspace %08x", format.fmt.pix.bytesperline,format.fmt.pix.colorspace);
v4l2_pixel_format = V4L2_PIX_FMT_YUV420;
} }
else if( format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) else if( format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG)
{ { //@ untested!
struct v4l2_jpegcompression jpegcomp; struct v4l2_jpegcompression jpegcomp;
ioctl(v->fd, VIDIOC_G_JPEGCOMP, &jpegcomp); ioctl(v->fd, VIDIOC_G_JPEGCOMP, &jpegcomp);
jpegcomp.jpeg_markers |= V4L2_JPEG_MARKER_DQT; // DQT jpegcomp.jpeg_markers |= V4L2_JPEG_MARKER_DQT; // DQT
@@ -463,12 +468,11 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
v->codec = avcodec_find_decoder( CODEC_ID_MJPEG ); v->codec = avcodec_find_decoder( CODEC_ID_MJPEG );
if(v->codec == NULL) { if(v->codec == NULL) {
veejay_msg(0, "Codec not found."); veejay_msg(0, "v4l2: (untested) Codec not found.");
return -1; return -1;
} }
v->c = avcodec_alloc_context(); v->c = avcodec_alloc_context();
//v->c->codec_id = CODEC_ID_MJPEG;
v->c->width = format.fmt.pix.width; v->c->width = format.fmt.pix.width;
v->c->height = format.fmt.pix.height; v->c->height = format.fmt.pix.height;
v->picture = avcodec_alloc_frame(); v->picture = avcodec_alloc_frame();
@@ -481,7 +485,7 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
if( avcodec_open( v->c, v->codec ) < 0 ) if( avcodec_open( v->c, v->codec ) < 0 )
{ {
veejay_msg(0, "Error opening codec"); veejay_msg(0, "v4l2: (untested) Error opening codec");
free(v->picture->data[0]); free(v->picture->data[0]);
free(v->picture->data[1]); free(v->picture->data[1]);
free(v->picture->data[2]); free(v->picture->data[2]);
@@ -494,15 +498,10 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
} else if( format.fmt.pix.pixelformat != v4l2_pixel_format ) { } else if( format.fmt.pix.pixelformat != v4l2_pixel_format ) {
int pf = v4l2_pixelformat2ffmpeg( format.fmt.pix.pixelformat ); int pf = v4l2_pixelformat2ffmpeg( format.fmt.pix.pixelformat );
if( pf == -1) { if( pf == -1) {
veejay_msg(VEEJAY_MSG_ERROR, "No support for palette %4.4s", veejay_msg(VEEJAY_MSG_ERROR, "v4l2: No support for palette %4.4s",
(char*) &format.fmt.pix.pixelformat); (char*) &format.fmt.pix.pixelformat);
return -1; return -1;
} }
veejay_msg(VEEJAY_MSG_WARNING,"v4l2: adjusting palette from %d to %d",
ffmpeg_pixelformat ,
v4l2_pixelformat2ffmpeg(format.fmt.pix.pixelformat)
);
} }
if( vioctl( v->fd, VIDIOC_S_FMT, &format ) == -1 ) { if( vioctl( v->fd, VIDIOC_S_FMT, &format ) == -1 ) {
@@ -515,7 +514,6 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
veejay_msg(0,"v4l2: negotation of data fails with %s", strerror(errno)); veejay_msg(0,"v4l2: negotation of data fails with %s", strerror(errno));
return -1; return -1;
} }
} }
if( -1 == vioctl( v->fd, VIDIOC_G_FMT, &format) ) { if( -1 == vioctl( v->fd, VIDIOC_G_FMT, &format) ) {
@@ -523,7 +521,7 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
return -1; return -1;
} }
veejay_msg(VEEJAY_MSG_INFO,"v4l2: using palette %4.4s (%dx%d)", veejay_msg(VEEJAY_MSG_INFO,"v4l2: Device captures in %4.4s (%dx%d)",
(char*) &format.fmt.pix.pixelformat, (char*) &format.fmt.pix.pixelformat,
format.fmt.pix.width, format.fmt.pix.width,
format.fmt.pix.height format.fmt.pix.height
@@ -538,7 +536,7 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei,
v->format.fmt.pix.height = format.fmt.pix.height; v->format.fmt.pix.height = format.fmt.pix.height;
v->info = yuv_yuv_template( NULL,NULL,NULL,format.fmt.pix.width, format.fmt.pix.height, v->info = yuv_yuv_template( NULL,NULL,NULL,format.fmt.pix.width, format.fmt.pix.height,
v4l2_pixelformat2ffmpeg( format.fmt.pix.pixelformat ) ); v4l2_pixelformat2ffmpeg( v4l2_pixel_format ) );
yuv_plane_sizes( v->info, &(v->planes[0]),&(v->planes[1]),&(v->planes[2]),&(v->planes[3]) ); yuv_plane_sizes( v->info, &(v->planes[0]),&(v->planes[1]),&(v->planes[2]),&(v->planes[3]) );
@@ -567,12 +565,12 @@ VJFrame *v4l2_get_dst( void *vv, uint8_t *Y, uint8_t *U, uint8_t *V ) {
v4l2info *v = (v4l2info*) vv; v4l2info *v = (v4l2info*) vv;
if(v->threaded) if(v->threaded)
lock_(v->video_info); lock_(v->video_info);
v->buffer_filled->data[0] = Y; v->host_frame->data[0] = Y;
v->buffer_filled->data[1] = U; v->host_frame->data[1] = U;
v->buffer_filled->data[2] = V; v->host_frame->data[2] = V;
if(v->threaded) if(v->threaded)
unlock_(v->video_info); unlock_(v->video_info);
return v->buffer_filled; return v->host_frame;
} }
static int v4l2_channel_choose( v4l2info *v, const int pref_channel ) static int v4l2_channel_choose( v4l2info *v, const int pref_channel )
@@ -585,7 +583,7 @@ static int v4l2_channel_choose( v4l2info *v, const int pref_channel )
for ( i = 0; i < (pref_channel+1); i ++ ) { for ( i = 0; i < (pref_channel+1); i ++ ) {
if( -1 == vioctl( v->fd, VIDIOC_S_INPUT, &i )) { if( -1 == vioctl( v->fd, VIDIOC_S_INPUT, &i )) {
#ifdef STRICT_CHECKING #ifdef STRICT_CHECKING
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: input channel %d does not exist", i); veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: Input channel %d does not exist", i);
#endif #endif
if( errno == EINVAL ) if( errno == EINVAL )
continue; continue;
@@ -608,7 +606,7 @@ static int v4l2_verify_file( const char *file )
{ {
struct stat st; struct stat st;
if( -1 == stat( file, &st )) { if( -1 == stat( file, &st )) {
veejay_msg(0, "v4l2: cannot identify '%s':%d, %s",file,errno,strerror(errno)); veejay_msg(0, "v4l2: Cannot identify '%s':%d, %s",file,errno,strerror(errno));
return 0; return 0;
} }
if( !S_ISCHR(st.st_mode)) { if( !S_ISCHR(st.st_mode)) {
@@ -619,7 +617,7 @@ static int v4l2_verify_file( const char *file )
int fd = open( file, O_RDWR | O_NONBLOCK ); int fd = open( file, O_RDWR | O_NONBLOCK );
if( -1 == fd ) { if( -1 == fd ) {
veejay_msg(0, "v4l2: cannot open '%s': %d, %s", file, errno, strerror(errno)); veejay_msg(0, "v4l2: Cannot open '%s': %d, %s", file, errno, strerror(errno));
return 0; return 0;
} }
@@ -641,7 +639,7 @@ int v4l2_poll( void *d , int nfds, int timeout )
if( err == -1 ) { if( err == -1 ) {
if( errno == EAGAIN || errno == EINTR ) { if( errno == EAGAIN || errno == EINTR ) {
#ifdef STRICT_CHECKING #ifdef STRICT_CHECKING
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: capture device busy, try again."); veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: Capture device busy, try again.");
#endif #endif
return 0; return 0;
} }
@@ -659,28 +657,21 @@ void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wi
return NULL; return NULL;
} }
char *flt = getenv( "VEEJAY_V4L2_ALL_IN_THREAD" );
int flti = 1; //@ on by default
if( flt ) {
flti = atoi(flt);
}
int fd = open( file , O_RDWR ); int fd = open( file , O_RDWR );
int i;
veejay_msg(VEEJAY_MSG_INFO, "v4l2: Video4Linux2 device opened: %s", file ); veejay_msg(VEEJAY_MSG_INFO, "v4l2: Video4Linux2 device opened: %s", file );
v4l2info *v = (v4l2info*) vj_calloc(sizeof(v4l2info)); v4l2info *v = (v4l2info*) vj_calloc(sizeof(v4l2info));
v->fd = fd; v->fd = fd;
v->allinthread = flti;
int dst_fmt = host_fmt;
if( v->grey == 1 ) { if( v->grey == 1 ) {
v->buffer_filled = yuv_yuv_template( NULL,NULL,NULL, wid,hei, PIX_FMT_GRAY8 ); dst_fmt = PIX_FMT_GRAY8;
} else {
v->buffer_filled = yuv_yuv_template( NULL,NULL,NULL,wid,hei,host_fmt );
} }
veejay_msg(VEEJAY_MSG_INFO, "v4l2: output in %s", av_pix_fmt_descriptors[ v->buffer_filled->format ] ); veejay_msg(VEEJAY_MSG_INFO, "v4l2: Host running in %s", av_pix_fmt_descriptors[ dst_fmt ] );
if( -1 == vioctl( fd, VIDIOC_QUERYCAP, &(v->capability) ) ) { if( -1 == vioctl( fd, VIDIOC_QUERYCAP, &(v->capability) ) ) {
veejay_msg(0, "v4l2: VIDIOC_QUERYCAP failed with %s", strerror(errno)); veejay_msg(0, "v4l2: VIDIOC_QUERYCAP failed with %s", strerror(errno));
@@ -730,7 +721,7 @@ void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wi
break; break;
} }
} else { } else {
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: preferring mmap() capture, override with VEEJAY_V4L2_CAPTURE_METHOD=0"); veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: Preferring mmap() capture, override with VEEJAY_V4L2_CAPTURE_METHOD=0");
can_read = 0; can_read = 0;
cap_read = 1; cap_read = 1;
} }
@@ -777,7 +768,7 @@ void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wi
} }
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: video channel %d '%s'", veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: Selected video channel %d '%s'",
chan, v->input.name ); chan, v->input.name );
v4l2_enum_video_standards( v, norm ); v4l2_enum_video_standards( v, norm );
@@ -785,8 +776,8 @@ void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wi
int cur_fmt = 0; int cur_fmt = 0;
if( v4l2_try_pix_format( v, host_fmt, wid, hei, &cur_fmt ) < 0 ) { if( v4l2_try_pix_format( v, host_fmt, wid, hei, &cur_fmt,-1 ) < 0 ) {
if( v4l2_try_pix_format(v, v4l2_pixelformat2ffmpeg( cur_fmt ), wid,hei,&cur_fmt ) < 0 ) { if( v4l2_try_pix_format(v, host_fmt, wid,hei,&cur_fmt, cur_fmt ) < 0 ) {
free(v); free(v);
close(fd); close(fd);
return NULL; return NULL;
@@ -794,7 +785,7 @@ void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wi
} }
if( v4l2_set_framerate( v, fps ) == -1 ) { if( v4l2_set_framerate( v, fps ) == -1 ) {
veejay_msg(0, "v4l2: failed to set frame rate to %2.2f", fps ); veejay_msg(VEEJAY_MSG_WARNING, "v4l2: Failed to set frame rate to %2.2f", fps );
} }
if( v->rw == 0 ) { if( v->rw == 0 ) {
@@ -863,7 +854,7 @@ void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wi
if( !v4l2_start_video_capture( v ) ) { if( !v4l2_start_video_capture( v ) ) {
if(cap_read) { if(cap_read) {
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: fallback read/write"); veejay_msg(VEEJAY_MSG_WARNING, "v4l2: Fallback read/write");
v->rw = 1; v->rw = 1;
v4l2_free_buffers(v); v4l2_free_buffers(v);
@@ -871,7 +862,7 @@ void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wi
close(v->fd); close(v->fd);
v->fd = open( file , O_RDWR ); v->fd = open( file , O_RDWR );
if(v->fd <= 0 ) { if(v->fd <= 0 ) {
veejay_msg(0,"v4l2: cannot re-open device:%d,%s",errno,strerror(errno)); veejay_msg(0,"v4l2: Cannot re-open device:%d,%s",errno,strerror(errno));
free(v->buffers); free(v->buffers);
free(v); free(v);
return NULL; return NULL;
@@ -910,14 +901,26 @@ v4l2_rw_fallback:
v->sizeimage = v->format.fmt.pix.sizeimage; v->sizeimage = v->format.fmt.pix.sizeimage;
v->buffers = (bufs*) calloc( 1, sizeof(*v->buffers)); v->buffers = (bufs*) calloc( 1, sizeof(*v->buffers));
veejay_msg(VEEJAY_MSG_DEBUG,"v4l2: read/write buffer size is %d bytes", v->format.fmt.pix.sizeimage ); veejay_msg(VEEJAY_MSG_DEBUG,"v4l2: read/write buffer size is %d bytes", v->format.fmt.pix.sizeimage );
veejay_msg(VEEJAY_MSG_DEBUG,"v4l2: requested format %s, %d x %d", if(v->is_jpeg) {
&(v->format.fmt.pix.pixelformat), v->format.fmt.pix.width,v->format.fmt.pix.height ); veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: requested format %s -> %s, %d x %d",
&(v->format.fmt.pix.pixelformat), av_pix_fmt_descriptors[ v->info->format ],
v->format.fmt.pix.width, v->format.fmt.pix.height );
}
v->buffers[0].length = v->sizeimage; v->buffers[0].length = v->sizeimage;
v->buffers[0].start = malloc( v->sizeimage * 2 ); v->buffers[0].start = malloc( v->sizeimage * 2 );
} }
for( i = 0; i < N_FRAMES; i ++ ) {
v->frames[i] = yuv_yuv_template(NULL,NULL,NULL, wid,hei,dst_fmt);
v->frames_done[i] = 0;
}
v->host_frame = yuv_yuv_template( NULL,NULL,NULL,wid,hei,host_fmt );
v->frame_ready = 0;
v->frameidx = 0;
return v; return v;
} }
@@ -992,21 +995,22 @@ static int v4l2_pull_frame_intern( v4l2info *v )
break; break;
} }
if( v->allinthread )
{
if( v->scaler == NULL ) if( v->scaler == NULL )
{ {
sws_template templ; sws_template templ;
memset(&templ,0,sizeof(sws_template)); memset(&templ,0,sizeof(sws_template));
templ.flags = yuv_which_scaler(); templ.flags = yuv_which_scaler();
v->scaler = yuv_init_swscaler( v->info,v->buffer_filled, &templ, yuv_sws_get_cpu_flags() ); v->scaler = yuv_init_swscaler( v->info,v->frames[ 0 ], &templ, yuv_sws_get_cpu_flags() );
} }
lock_(v->video_info); yuv_convert_and_scale( v->scaler, v->info, v->frames[ v->frameidx ] );
yuv_convert_and_scale( v->scaler, v->info, v->buffer_filled );
unlock_(v->video_info);
} lock_(v->video_info);
v->frames_done[v->frameidx] = 1;
v->frame_ready = v->frameidx;
v->frameidx = (v->frameidx + 1) % N_FRAMES;
unlock_(v->video_info);
signal_(v->video_info);
if(!v->rw) { if(!v->rw) {
if( -1 == vioctl( v->fd, VIDIOC_QBUF, &(v->buffer))) { if( -1 == vioctl( v->fd, VIDIOC_QBUF, &(v->buffer))) {
@@ -1130,10 +1134,19 @@ void v4l2_close( void *d )
if( v->scaler ) if( v->scaler )
yuv_free_swscaler( v->scaler ); yuv_free_swscaler( v->scaler );
if( v->buffer_filled) { if( v->frames) {
if( v->allinthread && !v->picture ) int c;
free(v->buffer_filled->data[0]); if( !v->picture )
free(v->buffer_filled); {
for ( i = 0; i < N_FRAMES; i ++ ) {
for( c = 0; c < 4; c ++ ) {
if( v->out_planes[c] )
free(v->frames[i]->data[c]);
}
free(v->frames[i]);
}
}
free(v->frames);
} }
if(v->picture) { if(v->picture) {
@@ -1154,6 +1167,9 @@ void v4l2_close( void *d )
v->codec = NULL; v->codec = NULL;
} }
if( v->host_frame )
free(v->host_frame );
if( v->buffers ) if( v->buffers )
free(v->buffers); free(v->buffers);
@@ -1692,22 +1708,26 @@ static void *v4l2_grabber_thread( void *v )
return NULL; return NULL;
} }
if( v4l2->allinthread ) { int j,c;
v4l2->buffer_filled->data[0] = (uint8_t*) vj_malloc(sizeof(uint8_t) * (v4l2->buffer_filled->len * 3)); int planes[4];
v4l2->buffer_filled->data[1] = v4l2->buffer_filled->data[0] + v4l2->buffer_filled->len; yuv_plane_sizes( v4l2->frames[0], &(planes[0]),&(planes[1]),&(planes[2]),&(planes[3]) );
v4l2->buffer_filled->data[2] = v4l2->buffer_filled->data[1] + v4l2->buffer_filled->len;
veejay_msg(VEEJAY_MSG_INFO, "v4l2: allocated %d bytes for output buffer", (v4l2->buffer_filled->len*3));
veejay_msg(VEEJAY_MSG_INFO, "v4l2: output buffer is %d x %d", v4l2->buffer_filled->width,v4l2->buffer_filled->height);
}
veejay_msg(VEEJAY_MSG_INFO, "v4l2: image processing (scale/convert) in (%s)",
(v4l2->allinthread ? "thread" : "host" )); for( j = 0; j < N_FRAMES; j ++ ) {
for( c = 0; c < 4; c ++ ) {
if( planes[c] > 0 ) {
v4l2->frames[j]->data[c] = (uint8_t*) vj_malloc(sizeof(uint8_t) * planes[c] );
veejay_msg(VEEJAY_MSG_DEBUG, "%d: allocated buffer[%d] = %d bytes", j,c,planes[c]);
}
}
v4l2->frames_done[j] = 0;
}
for( c = 0; c < 4; c ++ )
v4l2->out_planes[c] = planes[c];
veejay_msg(VEEJAY_MSG_INFO, "v4l2: capture format: %d x %d (%s)", veejay_msg(VEEJAY_MSG_INFO, "v4l2: capture format: %d x %d (%s)",
v4l2->info->width,v4l2->info->height, av_pix_fmt_descriptors[ v4l2->info->format ].name ); v4l2->info->width,v4l2->info->height, av_pix_fmt_descriptors[ v4l2->info->format ].name );
veejay_msg(VEEJAY_MSG_INFO, "v4l2: output format: %d x %d (%s)",
v4l2->buffer_filled->width,v4l2->buffer_filled->height, av_pix_fmt_descriptors[v4l2->buffer_filled->format]);
i->grabbing = 1; i->grabbing = 1;
i->retries = max_retries; i->retries = max_retries;
@@ -1770,12 +1790,12 @@ static void *v4l2_grabber_thread( void *v )
int v4l2_thread_start( v4l2_thread_info *i ) int v4l2_thread_start( v4l2_thread_info *i )
{ {
pthread_attr_init( &(i->attr) ); // pthread_attr_init( &(i->attr) );
pthread_attr_setdetachstate( &(i->attr), PTHREAD_CREATE_DETACHED ); // pthread_attr_setdetachstate( &(i->attr), PTHREAD_CREATE_DETACHED );
int err = pthread_create( &(i->thread), NULL, v4l2_grabber_thread, i ); int err = pthread_create( &(i->thread), NULL, v4l2_grabber_thread, i );
pthread_attr_destroy( &(i->attr) ); // pthread_attr_destroy( &(i->attr) );
if( err == 0 ) { if( err == 0 ) {
return 1; return 1;
@@ -1812,39 +1832,32 @@ int v4l2_thread_pull( v4l2_thread_info *i , VJFrame *dst )
{ {
v4l2info *v = (v4l2info*) i->v4l2; v4l2info *v = (v4l2info*) i->v4l2;
int status = 0; int status = 0;
lock_(i);
if(!v->allinthread && v->scaler == NULL ) { lock_(i);
sws_template templ; //@ block until a buffer is captured
memset(&templ,0,sizeof(sws_template)); int n;
templ.flags = yuv_which_scaler(); while( v->frames_done[v->frame_ready] < 1 ) {
v->scaler = yuv_init_swscaler( v->info,dst, &templ, yuv_sws_get_cpu_flags() ); veejay_msg(VEEJAY_MSG_DEBUG, "waiting for frame %d to become ready",
} v->frame_ready );
//@A wait_(i);
if( v->info->data[0] == NULL ) }
{
unlock_(i); unlock_(i);
#ifdef STRICT_CHECKING
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: capture device not ready yet.");
#endif
return 1;
}
if(!v->allinthread) { //@ copy buffer
yuv_convert_and_scale( v->scaler, v->info, dst ); veejay_memcpy( dst->data[0], v->frames[ v->frame_ready ]->data[0], v->out_planes[0]);
} else {
veejay_memcpy( dst->data[0], v->buffer_filled->data[0], v->planes[0]);
if(!v->grey) { if(!v->grey) {
veejay_memcpy( dst->data[1], v->buffer_filled->data[1], v->planes[1]); veejay_memcpy( dst->data[1], v->frames[v->frame_ready]->data[1], v->out_planes[1]);
veejay_memcpy( dst->data[2], v->buffer_filled->data[2], v->planes[2]); veejay_memcpy( dst->data[2], v->frames[v->frame_ready]->data[2], v->out_planes[2]);
} else { } else {
veejay_memset( dst->data[1], 127, dst->uv_len ); veejay_memset( dst->data[1], 127, dst->uv_len );
veejay_memset( dst->data[2], 127, dst->uv_len ); veejay_memset( dst->data[2], 127, dst->uv_len );
} }
} //@ "free" buffer
lock_(i);
v->frames_done[v->frameidx] = 0;
status = i->grabbing;
unlock_(i);
status = i->grabbing;
unlock_(i);
return status; return status;
} }
@@ -1869,6 +1882,7 @@ void *v4l2_thread_new( char *file, int channel, int host_fmt, int wid, int hei,
pthread_mutexattr_settype(&type, PTHREAD_MUTEX_NORMAL); pthread_mutexattr_settype(&type, PTHREAD_MUTEX_NORMAL);
pthread_mutex_init(&(i->mutex), &type); pthread_mutex_init(&(i->mutex), &type);
pthread_cond_init( &(i->cond) , NULL );
if( v4l2_thread_start( i ) == 0 ) { if( v4l2_thread_start( i ) == 0 ) {
free(i->file); free(i->file);
@@ -1892,9 +1906,10 @@ void *v4l2_thread_new( char *file, int channel, int host_fmt, int wid, int hei,
retries--; retries--;
} }
if( i->stop ) if( i->stop ) {
pthread_mutex_destroy(&(i->mutex)); pthread_mutex_destroy(&(i->mutex));
pthread_cond_destroy(&(i->cond));
}
return i->v4l2; return i->v4l2;
} }
#endif #endif

View File

@@ -37,6 +37,7 @@ typedef struct {
pthread_mutex_t mutex; pthread_mutex_t mutex;
pthread_t thread; pthread_t thread;
pthread_attr_t attr; pthread_attr_t attr;
pthread_cond_t cond;
} v4l2_thread_info; } v4l2_thread_info;
void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wid, int hei, float fps, char norm ); void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wid, int hei, float fps, char norm );

View File

@@ -76,7 +76,7 @@ static int video_driver_ = -1; // V4lUtils
static void *unicap_data_= NULL; static void *unicap_data_= NULL;
#endif #endif
//forward decl //forward decl
static int no_v4l2_threads_ = 1; static int no_v4l2_threads_ = 0;
int _vj_tag_new_net(vj_tag *tag, int stream_nr, int w, int h,int f, char *host, int port, int p, int ty ); int _vj_tag_new_net(vj_tag *tag, int stream_nr, int w, int h,int f, char *host, int port, int p, int ty );
int _vj_tag_new_yuv4mpeg(vj_tag * tag, int stream_nr, editlist * el); int _vj_tag_new_yuv4mpeg(vj_tag * tag, int stream_nr, editlist * el);