diff --git a/veejay-current/veejay-server/libstream/v4l2utils.c b/veejay-current/veejay-server/libstream/v4l2utils.c index 472271dc..31e81447 100644 --- a/veejay-current/veejay-server/libstream/v4l2utils.c +++ b/veejay-current/veejay-server/libstream/v4l2utils.c @@ -132,6 +132,7 @@ typedef struct int grey; int threaded; uint32_t supported_pixel_formats[64]; + int is_vloopback; } v4l2info; static struct { @@ -263,7 +264,7 @@ static int v4l2_stop_video_capture( v4l2info *v ) return 1; } -static int v4l2_pixelformat2ffmpeg( int pf ) +int v4l2_pixelformat2ffmpeg( int pf ) { switch(pf) { case V4L2_PIX_FMT_RGB24: @@ -301,16 +302,20 @@ static int v4l2_ffmpeg2v4l2( int pf) case PIX_FMT_BGR24: return V4L2_PIX_FMT_BGR24; case PIX_FMT_BGR32: - return V4L2_PIX_FMT_BGR24; + return V4L2_PIX_FMT_BGR32; case PIX_FMT_RGB32: - return V4L2_PIX_FMT_RGB24; + return V4L2_PIX_FMT_RGB32; case PIX_FMT_YUV420P: case PIX_FMT_YUVJ420P: return V4L2_PIX_FMT_YUV420; + case PIX_FMT_YUYV422: + return V4L2_PIX_FMT_YUYV; case PIX_FMT_YUV422P: return V4L2_PIX_FMT_YUV422P; case PIX_FMT_YUVJ422P: return V4L2_PIX_FMT_YUV422P; + case PIX_FMT_UYVY422: + return V4L2_PIX_FMT_UYVY; case PIX_FMT_YUVJ444P: case PIX_FMT_YUV444P: return V4L2_PIX_FMT_YUV32; @@ -606,6 +611,7 @@ static int v4l2_negotiate_pixel_format( v4l2info *v, int host_fmt, int wid, int supported = v4l2_tryout_pixel_format( v, V4L2_PIX_FMT_YUYV, wid, hei ); if( supported ) { *candidate = V4L2_PIX_FMT_YUYV; + return 1; } @@ -894,6 +900,11 @@ void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wi v->capability.driver ); veejay_msg(VEEJAY_MSG_INFO, "v4l2: Capture card: %s", v->capability.card ); + if( strncasecmp( v->capability.card, "Dummy" , 5 ) == 0 ) { + v->is_vloopback = 1; + veejay_msg(VEEJAY_MSG_WARNING, "v4l2: This is a dummy device."); + } + veejay_msg(VEEJAY_MSG_INFO, "v4l2: Capture method: %s", (can_read ? "read/write interface" : "mmap")); diff --git a/veejay-current/veejay-server/libstream/vj-vloopback.c b/veejay-current/veejay-server/libstream/vj-vloopback.c index 7438dd23..19f5cfb4 100644 --- a/veejay-current/veejay-server/libstream/vj-vloopback.c +++ b/veejay-current/veejay-server/libstream/vj-vloopback.c @@ -24,12 +24,6 @@ */ -/* - vloopback pusher (using pipes) - If someone wants to implement mmap, add SIGIO to the signal catcher - and use mutexes for asynchronosouly handling IO. I am too lazy. - */ - /* Changes: * Import patch by Xendarboh xendarboh@gmail.com to write to v4l2vloopback device @@ -52,6 +46,8 @@ #include #include #include +#include +#include #ifdef HAVE_V4L #include #endif @@ -61,6 +57,7 @@ #include #include #include +#include #define VLOOPBACK_MMAP 0 // commented out #define VLOOPBACK_PIPE 1 #define VLOOPBACK_N_BUFS 2 @@ -81,8 +78,12 @@ typedef struct int vshift; int hshift; int iov; + void *scaler; + VJFrame *src1; + VJFrame *dst1; } vj_vloopback_t; +extern int v4l2_pixelformat2ffmpeg( int pf ); /* Open the vloopback device */ @@ -117,6 +118,7 @@ void *vj_vloopback_open(const char *device_name, int norm, int mode, #endif v->hshift = 1; v->vshift = 1; + veejay_msg( VEEJAY_MSG_DEBUG, "Using V4L2_PIX_FMT_YUV420"); break; case FMT_422: case FMT_422F: @@ -127,6 +129,7 @@ void *vj_vloopback_open(const char *device_name, int norm, int mode, v->palette = V4L2_PIX_FMT_YUV422P; #endif v->vshift = 1; + veejay_msg(VEEJAY_MSG_DEBUG, "Using V4L2_PIX_FMT_YUV422P"); break; default: #ifdef HAVE_V4L @@ -156,6 +159,7 @@ void *vj_vloopback_open(const char *device_name, int norm, int mode, return (void*) ret; } +#define ROUND_UP8(num)(((num)+8)&~8) int vj_vloopback_get_mode( void *vloop ) { @@ -170,8 +174,8 @@ int vj_vloopback_start_pipe( void *vloop ) if(!v) return 0; - int len = v->width * v->height ; - int uv_len = (v->width >> 1 ) * (v->height >> v->vshift); + int len = v->width * v->height; + int uv_len = (v->width >> v->hshift ) * (v->height >> v->vshift); v->size = len + (2 * uv_len); @@ -243,7 +247,7 @@ int vj_vloopback_start_pipe( void *vloop ) format.fmt.pix.width = v->width; format.fmt.pix.height= v->height; format.fmt.pix.pixelformat = v->palette; - format.fmt.pix.sizeimage = v->size; +// format.fmt.pix.sizeimage = v->size; format.fmt.pix.field = V4L2_FIELD_NONE; format.fmt.pix.bytesperline = v->width; format.fmt.pix.colorspace = (v->jfif == 1 ? V4L2_COLORSPACE_JPEG : V4L2_COLORSPACE_SMPTE170M ); @@ -254,24 +258,48 @@ int vj_vloopback_start_pipe( void *vloop ) v->width,v->height,v->palette,v->jfif, strerror(errno) ); return 0; } - + if( format.fmt.pix.pixelformat != v->palette ) { + res = ioctl( v->fd, VIDIOC_G_FMT, &format ); + + int cap_palette = v4l2_pixelformat2ffmpeg( format.fmt.pix.pixelformat ); + int src_palette = v4l2_pixelformat2ffmpeg( v->palette ); + + veejay_msg(VEEJAY_MSG_WARNING, + "Capture device cannot handle native format, using converter for %dx%d in %d",format.fmt.pix.width, format.fmt.pix.height, format.fmt.pix.pixelformat); + sws_template tmpl; + tmpl.flags = 1; + v->dst1 = yuv_yuv_template( NULL,NULL,NULL, format.fmt.pix.width, + format.fmt.pix.height, cap_palette ); + v->src1 = yuv_yuv_template( NULL, NULL, NULL, v->width, v->height, + src_palette ); + + v->scaler = yuv_init_swscaler( v->src1,v->dst1,&tmpl,yuv_sws_get_cpu_flags() ); + + v->size = format.fmt.pix.sizeimage; + } + + veejay_msg(VEEJAY_MSG_DEBUG, + "Configured vloopback device: %d x %d in palette %x, buffer is %d bytes.", + format.fmt.pix.width,format.fmt.pix.height, + format.fmt.pix.pixelformat, + format.fmt.pix.sizeimage ); #endif - veejay_msg(VEEJAY_MSG_DEBUG, - "vloopback pipe (Y plane %d bytes, UV plane %d bytes) H=%d, V=%d, framesize=%d, palette=%d", - len,uv_len,v->vshift,v->hshift,v->size,v->palette ); - - v->out_buf = (uint8_t*) vj_malloc(sizeof(uint8_t) * v->size ); - - memset(v->out_buf, 0 , v->size ); + long sze = ROUND_UP8( v->size ); + + v->out_buf = (uint8_t*) vj_malloc(sizeof(uint8_t) * sze ); if(!v->out_buf) { - veejay_msg(VEEJAY_MSG_ERROR, "Cant allocate sufficient memory for vloopback"); + veejay_msg(VEEJAY_MSG_ERROR, "Cannot allocate sufficient memory for vloopback"); return 0; } + + veejay_memset(v->out_buf, 0 , sze ); + + return 1; } @@ -299,16 +327,41 @@ int vj_vloopback_fill_buffer( void *vloop, uint8_t **frame ) if(!v) return 0; int len = v->width * v->height ; - int uv_len = (v->width >> v->hshift ) * (v->height >> 1); + int uv_len = (v->width >> v->hshift ) * (v->height >> v->vshift); // copy data to linear buffer */ - veejay_memcpy( v->out_buf, frame[0], len ); + if( v->scaler ) { + uint8_t *p[3] = { NULL, NULL, NULL }; + v->src1->data[0] = frame[0]; + v->src1->data[1] = frame[1]; + v->src1->data[2] = frame[2]; + switch( v->dst1->format ) { + case PIX_FMT_YUVJ444P: + case PIX_FMT_YUV444P: + p[1] = v->out_buf + len; p[2] = v->out_buf + (2*len); break; + case PIX_FMT_YUVJ422P: + case PIX_FMT_YUV422P: + p[1] = v->out_buf + len; p[2] = v->out_buf + len + uv_len; break; + case PIX_FMT_YUV420P: + p[1] = v->out_buf + len; p[2] = v->out_buf + len + (len/4); break; + default: + p[0] = v->out_buf; break; + } + v->dst1->data[0] = p[0]; + v->dst1->data[1] = p[1]; + v->dst1->data[2] = p[2]; - veejay_memcpy( v->out_buf + len, - frame[1], uv_len ); - veejay_memcpy( v->out_buf + len + uv_len, - frame[2], uv_len ); + yuv_convert_and_scale( v->scaler, v->src1,v->dst1 ); + } + else { + veejay_memcpy( v->out_buf, frame[0], len ); + + veejay_memcpy( v->out_buf + len, + frame[1], uv_len ); + veejay_memcpy( v->out_buf + len + uv_len, + frame[2], uv_len ); + } return 1; } @@ -525,6 +578,10 @@ void vj_vloopback_close( void *vloop ) vj_vloopback_t *v = (vj_vloopback_t*) vloop; if(v) { + + if( v->scaler ) + yuv_free_swscaler( v->scaler ); + if(v->fd) close( v->fd ); if(v->out_buf) diff --git a/veejay-current/veejay-server/libyuv/yuvconv.c b/veejay-current/veejay-server/libyuv/yuvconv.c index e10dee70..490c297d 100644 --- a/veejay-current/veejay-server/libyuv/yuvconv.c +++ b/veejay-current/veejay-server/libyuv/yuvconv.c @@ -80,6 +80,7 @@ static struct { { PIX_FMT_GRAY8, "PIX_FMT_GRAY8"}, { PIX_FMT_RGB32_1, "PIX_FMT_RGB32_1"}, { PIX_FMT_YUYV422, "PIX_FMT_YUYV422"}, +{ PIX_FMT_UYVY422, "PIX_FMT_UYVY422"}, { 0 , NULL} }; @@ -386,8 +387,8 @@ VJFrame *yuv_yuv_template( uint8_t *Y, uint8_t *U, uint8_t *V, int w, int h, int break; case PIX_FMT_YUYV422: case PIX_FMT_UYVY422: - f->uv_width = w>>1; - f->uv_height = f->height; + f->uv_width = 0; + f->uv_height = 0; f->stride[0] = w * 2; f->stride[1] = f->stride[2] = 0; break; @@ -397,6 +398,12 @@ VJFrame *yuv_yuv_template( uint8_t *Y, uint8_t *U, uint8_t *V, int w, int h, int f->uv_width = 0; f->uv_height=0; f->data[1] = NULL;f->data[2] = NULL; break; + case PIX_FMT_BGR32: + case PIX_FMT_RGB32: + f->stride[0] = w * 4; + f->uv_width = 0; f->uv_height = 0; + f->data[1] = NULL; f->data[2] = NULL; + break; default: #ifdef STRICT_CHECKING yuv_pixstr( __FUNCTION__, "fmt", fmt ); @@ -415,7 +422,7 @@ VJFrame *yuv_rgb_template( uint8_t *rgb_buffer, int w, int h, int fmt ) { #ifdef STRICT_CHECKING assert( fmt == PIX_FMT_RGB24 || fmt == PIX_FMT_BGR24 || - fmt == PIX_FMT_RGBA || fmt == PIX_FMT_RGB32_1 || fmt == PIX_FMT_RGB32 || fmt == PIX_FMT_ARGB); + fmt == PIX_FMT_RGBA || fmt == PIX_FMT_RGB32_1 || fmt == PIX_FMT_RGB32 || fmt == PIX_FMT_ARGB || fmt == PIX_FMT_BGR32); assert( w > 0 ); assert( h > 0 ); #endif @@ -1177,7 +1184,7 @@ void yuv_convert_and_scale_from_rgb(void *sws , VJFrame *src, VJFrame *dst) { vj_sws *s = (vj_sws*) sws; int n = 3; - if( src->format == PIX_FMT_RGBA || src->format == PIX_FMT_BGRA || src->format == PIX_FMT_ARGB ) + if( src->format == PIX_FMT_RGBA || src->format == PIX_FMT_BGRA || src->format == PIX_FMT_ARGB || src->format == PIX_FMT_BGR32 || src->format == PIX_FMT_RGB32 ) n = 4; int src_stride[3] = { src->width*n,0,0}; int dst_stride[3] = { dst->width,dst->uv_width,dst->uv_width }; @@ -1191,7 +1198,7 @@ void yuv_convert_and_scale_rgb(void *sws , VJFrame *src, VJFrame *dst) vj_sws *s = (vj_sws*) sws; int n = 3; if( dst->format == PIX_FMT_RGBA || dst->format == PIX_FMT_BGRA || dst->format == PIX_FMT_ARGB || - dst->format == PIX_FMT_RGB32 ) + dst->format == PIX_FMT_RGB32 || dst->format == PIX_FMT_BGR32 ) n = 4; int src_stride[3] = { src->width,src->uv_width,src->uv_width }; @@ -1207,9 +1214,10 @@ void yuv_convert_and_scale(void *sws , VJFrame *src, VJFrame *dst) int src_stride[3]; int dst_stride[3]; +/* int n = 0; if( src->format == PIX_FMT_RGBA || src->format == PIX_FMT_BGRA || src->format == PIX_FMT_ARGB || - src->format == PIX_FMT_RGB32 ) { + src->format == PIX_FMT_RGB32 || src->format == PIX_FMT_BGR32 ) { n = 4; } if( src->format == PIX_FMT_RGB24 || src->format == PIX_FMT_BGR24 ) { @@ -1223,16 +1231,14 @@ void yuv_convert_and_scale(void *sws , VJFrame *src, VJFrame *dst) src_stride[0] = src->width; src_stride[1] = src->uv_width; src_stride[2] = src->uv_width; - } - - //@ dst is never rgb - + }*/ +/* dst_stride[0] = dst->width; dst_stride[1] = dst->uv_width; dst_stride[2] = dst->uv_width; - - sws_scale( s->sws, src->data, src_stride, 0, src->height, - dst->data, dst_stride ); +*/ + sws_scale( s->sws, src->data, src->stride, 0, src->height, + dst->data, dst->stride ); } void yuv_convert_and_scale_grey(void *sws , VJFrame *src, VJFrame *dst) { diff --git a/veejay-current/veejay-server/veejay/liblavplayvj.c b/veejay-current/veejay-server/veejay/liblavplayvj.c index cd1b93a8..45526eef 100644 --- a/veejay-current/veejay-server/veejay/liblavplayvj.c +++ b/veejay-current/veejay-server/veejay/liblavplayvj.c @@ -1108,14 +1108,12 @@ static int veejay_screen_update(veejay_t * info ) } } -#ifdef HAVE_V4L if( info->vloopback ) { vj_vloopback_fill_buffer( info->vloopback , frame ); if( vj_vloopback_get_mode( info->vloopback )) vj_vloopback_write_pipe( info->vloopback ); } -#endif //@ FIXME: Both pixbuf and jpeg method is broken for screenshot #ifdef HAVE_JPEG @@ -1212,6 +1210,8 @@ static int veejay_screen_update(veejay_t * info ) } break; + case 5: + break; default: veejay_change_state(info,LAVPLAY_STATE_STOP); return 0; @@ -2322,6 +2322,28 @@ int veejay_init(veejay_t * info, int x, int y,char *arg, int def_tags, int gen_t return -1; } break; + case 5: + veejay_msg(VEEJAY_MSG_INFO, "Entering vloopback streaming mode. "); + info->vloopback = vj_vloopback_open( info->y4m_file, + el->video_norm == 'p' ? 1: 0, 1, + info->video_output_width, + info->video_output_height, + info->pixel_format ); + if( info->vloopback == NULL ) { + veejay_msg(0, "Cannot open %s as vloopback.", + info->y4m_file); + return -1; + } + if( vj_vloopback_start_pipe( info->vloopback ) <= 0 ) + { + veejay_msg(0, "Unable to setup vloopback"); + vj_vloopback_close( info->vloopback ); + return -1; + + } + + break; + default: veejay_msg(VEEJAY_MSG_ERROR, "Invalid playback mode. Use -O [012345]"); @@ -2828,6 +2850,11 @@ static void *veejay_playback_thread(void *data) vj_yuv4mpeg_free(info->y4m ); info->y4m = NULL; } + if( info->vloopback ) { + vj_vloopback_close( info->vloopback ); + info->vloopback = NULL; + + } /* #ifdef HAVE_GL #ifndef X_DISPLAY_MISSING diff --git a/veejay-current/veejay-server/veejay/veejay.c b/veejay-current/veejay-server/veejay/veejay.c index 6d18ed45..a386a982 100644 --- a/veejay-current/veejay-server/veejay/veejay.c +++ b/veejay-current/veejay-server/veejay/veejay.c @@ -362,7 +362,7 @@ static int set_option(const char *name, char *value) } #endif */ - if( info->video_out < 0 || info->video_out > 4 ) { + if( info->video_out < 0 || info->video_out > 5 ) { fprintf(stderr, "Select a valid output display driver\n"); exit(-1); } diff --git a/veejay-current/veejay-server/veejay/vj-event.c b/veejay-current/veejay-server/veejay/vj-event.c index 1ffac15a..bec57f72 100644 --- a/veejay-current/veejay-server/veejay/vj-event.c +++ b/veejay-current/veejay-server/veejay/vj-event.c @@ -9596,6 +9596,7 @@ void vj_event_vloopback_stop( void *ptr, const char format[], va_list ap ) { veejay_t *v = (veejay_t*) ptr; vj_vloopback_close( v->vloopback ); + v->vloopback = NULL; } /* * Function that returns the options for a special sample (markers, looptype, speed ...) or