diff --git a/veejay-current/veejay-server/NEWS b/veejay-current/veejay-server/NEWS index 57e1e823..139597f9 100644 --- a/veejay-current/veejay-server/NEWS +++ b/veejay-current/veejay-server/NEWS @@ -1,4 +1,2 @@ -Feb. 28,2011 - Added support for Video4Linux 2 (beta) - Re-run configure with --without-v4l --with-v4l2 to enable v4l2 support. - This will become the default once the v4l2 driver is threaded. + + diff --git a/veejay-current/veejay-server/configure.ac b/veejay-current/veejay-server/configure.ac index b829bfa5..8809fe26 100644 --- a/veejay-current/veejay-server/configure.ac +++ b/veejay-current/veejay-server/configure.ac @@ -85,10 +85,10 @@ AC_ARG_WITH(samplerate, [], [with_samplerate=yes]) AC_ARG_WITH(v4l2, AC_HELP_STRING([--without-v4l2], [Do not use video4linux2]), - [with_v4l2=yes], [with_v4l2=no]) + [with_v4l2=no], [with_v4l2=yes]) AC_ARG_WITH(v4l, AC_HELP_STRING([--without-v4l], [Do not use video4linux1]), - [], [with_v4l=yes]) + [with_v4l=yes], [with_v4l=no]) AC_ARG_WITH(xml2, AC_HELP_STRING([--without-xml2], [Do not use the XML library for Gnome]), [], [with_xml2=yes]) @@ -1085,7 +1085,7 @@ AC_MSG_NOTICE([ - QuickTime support : ${have_libquicktime} ]) AC_MSG_NOTICE([ - Jack Audio Connection Kit : ${have_jack}]) AC_MSG_NOTICE([ - Unicap Imaging : ${have_unicap} ]) AC_MSG_NOTICE([ - Liblo OSC client : ${have_liblo}]) -AC_MSG_NOTICE([ - V4L2 : ${have_v4l2}]) +AC_MSG_NOTICE([ - V4L2 (default) : ${have_v4l2}]) AC_MSG_NOTICE([ - V4L1 (default) : ${have_v4l}]) #AC_MSG_NOTICE([ - libsamplerate : ${have_samplerate}]) diff --git a/veejay-current/veejay-server/libstream/v4l2utils.c b/veejay-current/veejay-server/libstream/v4l2utils.c index 879c56b7..bde56fe0 100644 --- a/veejay-current/veejay-server/libstream/v4l2utils.c +++ b/veejay-current/veejay-server/libstream/v4l2utils.c @@ -60,6 +60,7 @@ #include #include #include +#include #include #include #include @@ -67,6 +68,10 @@ #include #include #include +#include +#include +#include +//#include typedef struct { void *start; @@ -91,10 +96,21 @@ typedef struct VJFrame *info; void *scaler; int planes[4]; - + int rw; int composite; - + int is_jpeg; + int sizeimage; VJFrame *dst; + + uint8_t *tmpbuf; + + AVCodec *codec; + AVCodecContext *c; + AVFrame *picture; +// pthread_mutex_t mutex; +// pthread_t thread; +// pthread_attr_t attr; + } v4l2info; static int vioctl( int fd, int request, void *arg ) @@ -128,6 +144,10 @@ static int v4l2_pixelformat2ffmpeg( int pf ) return PIX_FMT_YUV420P; case V4L2_PIX_FMT_YUV32: return PIX_FMT_YUV444P; + case V4L2_PIX_FMT_MJPEG: + return PIX_FMT_YUVJ422P; //@ FIXME + case V4L2_PIX_FMT_JPEG: + return PIX_FMT_YUVJ420P; //@ FIXME default: break; } @@ -161,29 +181,6 @@ static int v4l2_ffmpeg2v4l2( int pf) return V4L2_PIX_FMT_BGR24; } -static int v4l2_is_fixed_format( int fmt ) { - switch(fmt) { - case V4L2_PIX_FMT_MJPEG: - case V4L2_PIX_FMT_JPEG: - return 0; - } - return 1; -} - -static int v4l2_compressed_format(v4l2info *v) //@untested -{ - /*memset( v->compr , 0, sizeof(v->compr )); - if( -1 == vioctl( v->fd, VIDIOC_G_JPEGCOMP, &(v->compr)) ) { - return -1; - } - - v->compr.quality = 0; - if( -1 == vioctl( v->fd, VIDIOC_S_JPEGCOMP, &(v->compr)) ) { - return -1; - }*/ - return 1; -} - static int v4l2_set_framerate( v4l2info *v , float fps ) //@untested { struct v4l2_streamparm sfps; @@ -198,7 +195,6 @@ static int v4l2_set_framerate( v4l2info *v , float fps ) //@untested return 1; } - static int v4l2_enum_video_standards( v4l2info *v, char norm ) { struct v4l2_input input; @@ -328,7 +324,7 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei ) (char*)&format.fmt.pix.pixelformat, format.fmt.pix.width, format.fmt.pix.height ); - + if( format.fmt.pix.width != wid || format.fmt.pix.height != hei ) { veejay_msg(VEEJAY_MSG_WARNING,"v4l2: adjusting resolution from %dx%d to %dx%d", wid,hei, @@ -336,7 +332,55 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei ) format.fmt.pix.height ); } - if( format.fmt.pix.pixelformat != v4l2_pixel_format ) { + + if( format.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG ) + { + struct v4l2_jpegcompression jpegcomp; + ioctl(v->fd, VIDIOC_G_JPEGCOMP, &jpegcomp); + jpegcomp.jpeg_markers |= V4L2_JPEG_MARKER_DQT; // DQT + ioctl(v->fd, VIDIOC_S_JPEGCOMP, &jpegcomp); + v->is_jpeg = 1; + v->tmpbuf = (uint8_t*) vj_malloc(sizeof(uint8_t) * wid * hei * 3 ); + } + else if( format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) + { + struct v4l2_jpegcompression jpegcomp; + ioctl(v->fd, VIDIOC_G_JPEGCOMP, &jpegcomp); + jpegcomp.jpeg_markers |= V4L2_JPEG_MARKER_DQT; // DQT + ioctl(v->fd, VIDIOC_S_JPEGCOMP, &jpegcomp); + v->is_jpeg = 2; + + v->codec = avcodec_find_decoder( CODEC_ID_MJPEG ); + if(v->codec == NULL) { + veejay_msg(0, "Codec not found."); + return -1; + } + + v->c = avcodec_alloc_context(); + //v->c->codec_id = CODEC_ID_MJPEG; + v->c->width = format.fmt.pix.width; + v->c->height = format.fmt.pix.height; + v->picture = avcodec_alloc_frame(); + v->picture->data[0] = vj_malloc(wid * hei + wid); + v->picture->data[1] = vj_malloc(wid * hei + wid); + v->picture->data[2] = vj_malloc(wid * hei + wid); + v->tmpbuf = (uint8_t*) vj_malloc(sizeof(uint8_t) * wid * hei * 3 ); + if( v->codec->capabilities & CODEC_CAP_TRUNCATED) + v->c->flags |= CODEC_FLAG_TRUNCATED; + + if( avcodec_open( v->c, v->codec ) < 0 ) + { + veejay_msg(0, "Error opening codec"); + free(v->picture->data[0]); + free(v->picture->data[1]); + free(v->picture->data[2]); + free(v->picture); + av_free(v->c); + free(v->tmpbuf); + return -1; + } + + } else if( format.fmt.pix.pixelformat != v4l2_pixel_format ) { int pf = v4l2_pixelformat2ffmpeg( format.fmt.pix.pixelformat ); if( pf == -1) { veejay_msg(VEEJAY_MSG_ERROR, "No support for palette %4.4s", @@ -369,6 +413,7 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei ) format.fmt.pix.height ); + v->info = yuv_yuv_template( NULL,NULL,NULL,format.fmt.pix.width, format.fmt.pix.height, v4l2_pixelformat2ffmpeg( format.fmt.pix.pixelformat ) ); yuv_plane_sizes( v->info, &(v->planes[0]),&(v->planes[1]),&(v->planes[2]),&(v->planes[3]) ); @@ -376,7 +421,7 @@ static int v4l2_try_pix_format( v4l2info *v, int pixelformat, int wid, int hei ) return 1; } -static void v4l2_set_output_dimensions( v4l2info *v, void *src ) +static void v4l2_set_output_pointers( v4l2info *v, void *src ) { uint8_t *map = (uint8_t*) src; if( v->planes[0] > 0 ) { @@ -433,18 +478,38 @@ static int v4l2_channel_choose( v4l2info *v, const int pref_channel ) return other; } -void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wid, int hei, float fps, char norm ) +static int v4l2_verify_file( const char *file ) { + struct stat st; + if( -1 == stat( file, &st )) { + veejay_msg(0, "v4l2: cannot identify '%s':%d, %s",file,errno,strerror(errno)); + return 0; + } + if( !S_ISCHR(st.st_mode)) { + veejay_msg(0, "v4l2: '%s' is not a device", file); + return 0; + } int fd = open( file, O_RDWR | O_NONBLOCK ); - if( fd < 0 ) { - veejay_msg(0, "v4l2: unable to open capture device %s",file); - return NULL; - } else { - close(fd); - fd = open( file , O_RDWR ); + + if( -1 == fd ) { + veejay_msg(0, "v4l2: cannot open '%s': %d, %s", file, errno, strerror(errno)); + return 0; } + close(fd); + + return 1; +} + +void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wid, int hei, float fps, char norm ) +{ + if(!v4l2_verify_file( file ) ) { + return NULL; + } + + int fd = open( file , O_RDWR ); + veejay_msg(VEEJAY_MSG_INFO, "v4l2: Video4Linux2 device opened: %s", file ); v4l2info *v = (v4l2info*) vj_calloc(sizeof(v4l2info)); @@ -465,18 +530,55 @@ void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wi return NULL; } + int can_stream = 1; + int can_read = 1; + + if( (v->capability.capabilities & V4L2_CAP_STREAMING ) == 0 ) { - veejay_msg(0, "v4l2: %s does not support streaming capture", v->capability.card ); + veejay_msg(VEEJAY_MSG_ERROR, "v4l2: %s does not support streaming capture", v->capability.card ); + can_stream = 0; + } + + if( (v->capability.capabilities & V4L2_CAP_READWRITE ) == 0 ) { + veejay_msg(VEEJAY_MSG_ERROR, "v4l2: %s does not support read/write interface.", v->capability.card); + can_read = 0; + } + + if( can_stream == 0 && can_read == 0 ) { + veejay_msg(VEEJAY_MSG_ERROR, "v4l2: giving up on %s", v->capability.card); close(fd); free(v); return NULL; } + if( can_read && can_stream ) { + char *vio = getenv("VEEJAY_V4L2_CAPTURE_METHOD"); + if(vio) { + int method = atoi(vio); + switch(method) { + case 0: + can_stream = 0; + break; + case 1: + can_read = 0; + break; + } + } else { + veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: preferring mmap() capture, override with VEEJAY_V4L2_CAPTURE_METHOD=0"); + can_read = 0; + } + } + + if( can_read && can_stream == 0) + v->rw = 1; veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: Capture driver: %s", v->capability.driver ); veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: Capture card: %s", v->capability.card ); + veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: Capture method: %s", + (can_read ? "read/write interface" : "mmap")); + //@ which video input ? int chan = v4l2_channel_choose( v, input_channel ); @@ -536,93 +638,118 @@ void *v4l2open ( const char *file, const int input_channel, int host_fmt, int wi veejay_msg(0, "v4l2: failed to set frame rate to %2.2f", fps ); } - v->reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - v->reqbuf.memory= V4L2_MEMORY_MMAP; - v->reqbuf.count = 32; + if( v->rw == 0 ) { + v->reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + v->reqbuf.memory= V4L2_MEMORY_MMAP; + v->reqbuf.count = 32; - if( -1 == vioctl( fd, VIDIOC_REQBUFS, &(v->reqbuf)) ) { - if( errno == EINVAL ) { - veejay_msg(0,"v4l2: No support for mmap streaming ?!"); - } else { - veejay_msg(0,"v4l2: VIDIOC_REQBUFS failed with %s", strerror(errno)); - } - close(fd); - free(v); - return NULL; - } - - veejay_msg(VEEJAY_MSG_INFO, "v4l2: Card supports %d buffers", v->reqbuf.count ); - v->buffers = (bufs*) calloc( v->reqbuf.count, sizeof(*v->buffers)); - - int i; - for( i = 0; i < v->reqbuf.count; i ++ ) { - memset( &(v->buffer), 0, sizeof(v->buffer)); - v->buffer.type = v->reqbuf.type; - v->buffer.memory= V4L2_MEMORY_MMAP; - v->buffer.index = i; - - if( -1 == vioctl( fd, VIDIOC_QUERYBUF, &(v->buffer)) ) { - veejay_msg(0, "v4l2: VIDIOC_QUERYBUF failed with %s",strerror(errno)); - free(v->buffers); - free(v); + if( -1 == vioctl( fd, VIDIOC_REQBUFS, &(v->reqbuf)) ) { + if( errno == EINVAL ) { + veejay_msg(0,"v4l2: No support for mmap streaming ?!"); + } else { + veejay_msg(0,"v4l2: VIDIOC_REQBUFS failed with %s", strerror(errno)); + } close(fd); + free(v); return NULL; } - v->buffers[i].length = v->buffer.length; - v->buffers[i].start = mmap( NULL, + veejay_msg(VEEJAY_MSG_INFO, "v4l2: Card supports %d buffers", v->reqbuf.count ); + v->buffers = (bufs*) calloc( v->reqbuf.count, sizeof(*v->buffers)); + + int i; + for( i = 0; i < v->reqbuf.count; i ++ ) { + memset( &(v->buffer), 0, sizeof(v->buffer)); + v->buffer.type = v->reqbuf.type; + v->buffer.memory= V4L2_MEMORY_MMAP; + v->buffer.index = i; + + if( -1 == vioctl( fd, VIDIOC_QUERYBUF, &(v->buffer)) ) { + veejay_msg(0, "v4l2: VIDIOC_QUERYBUF failed with %s",strerror(errno)); + free(v->buffers); + free(v); + close(fd); + return NULL; + } + + v->buffers[i].length = v->buffer.length; + v->buffers[i].start = mmap( NULL, v->buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, v->buffer.m.offset ); - if( MAP_FAILED == v->buffers[i].start ) { + if( MAP_FAILED == v->buffers[i].start ) { // int k; // for( k = 0; k < i; k ++ ) // munmap( v->buffer[k].start, v->buffer[k].length ); - free(v->buffers); - free(v); - close(fd); - return NULL; + free(v->buffers); + free(v); + close(fd); + return NULL; + } + } - } + for( i = 0; i < v->reqbuf.count ; i ++ ) { + veejay_memset( &(v->buffer),0,sizeof(v->buffer)); + v->buffer.type = v->reqbuf.type; + v->buffer.memory=V4L2_MEMORY_MMAP; + v->buffer.index = i; - for( i = 0; i < v->reqbuf.count ; i ++ ) { - veejay_memset( &(v->buffer),0,sizeof(v->buffer)); - v->buffer.type = v->reqbuf.type; - v->buffer.memory=V4L2_MEMORY_MMAP; - v->buffer.index = i; - - if( -1 == vioctl( fd, VIDIOC_QBUF, &(v->buffer)) ) { - veejay_msg(0, "v4l2: first VIDIOC_QBUF failed with %s", strerror(errno)); + if( -1 == vioctl( fd, VIDIOC_QBUF, &(v->buffer)) ) { + veejay_msg(0, "v4l2: first VIDIOC_QBUF failed with %s", strerror(errno)); // int k; // for( k = 0; k < v->reqbuf.count; k ++ ) // munmap( v->buffer[k].start, v->buffer[k].length ); - free(v->buffers); + free(v->buffers); + free(v); + close(fd); + return NULL; + } + + } + + if( -1 == vioctl( fd, VIDIOC_STREAMON, &(v->buftype)) ) { + + + veejay_msg(0, "v4l2: VIDIOC_STREAMON failed with %s", strerror(errno)); + free(v->buffers); + free(v); + close(fd); + return NULL; + } + } else { + v->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + v->format.fmt.pix.width = wid; + v->format.fmt.pix.height = hei; + //format.fmt.pix.pixelformat; + //format.fmt.pix.field + // + if( -1 == vioctl( v->fd, VIDIOC_S_FMT, &(v->format) ) ) { + veejay_msg(0, "V4l2: VIDIOC_S_FMT failed with %s", strerror(errno)); + close(v->fd); free(v); - close(fd); return NULL; } - } + int min = v->format.fmt.pix.width * 2; + if( v->format.fmt.pix.bytesperline < min ) + v->format.fmt.pix.bytesperline = min; + min = v->format.fmt.pix.bytesperline * v->format.fmt.pix.height; + if( v->format.fmt.pix.sizeimage < min ) + v->format.fmt.pix.sizeimage = min; - if( -1 == vioctl( fd, VIDIOC_STREAMON, &(v->buftype)) ) { - veejay_msg(0, "v4l2: VIDIOC_STREAMON failed with %s", strerror(errno)); - // int k; - // for( k = 0; k < v->reqbuf.count; k ++ ) - // munmap( v->buffer[k].start ); - - free(v->buffers); - free(v); - close(fd); - return NULL; - } + v->sizeimage = v->format.fmt.pix.sizeimage; + v->buffers = (bufs*) calloc( 1, sizeof(*v->buffers)); + veejay_msg(VEEJAY_MSG_DEBUG,"v4l2: read/write buffer size is %d bytes", v->format.fmt.pix.sizeimage ); + v->buffers[0].length = v->sizeimage; + v->buffers[0].start = malloc( v->sizeimage * 2 ); + } - v->fd = fd; return v; } @@ -635,7 +762,12 @@ static double calc_tc( struct v4l2_timecode *tc, float fps ) return (double) tc->frames / fps; } -int v4l2_pull_frame(void *vv,VJFrame *dst) { +int v4l2_pull_frame(void *vv,VJFrame *dst) +{ + void *src = NULL; + int length = 0; + int n = 0; + v4l2info *v = (v4l2info*) vv; if( v->scaler == NULL ) { sws_template templ; @@ -643,18 +775,67 @@ int v4l2_pull_frame(void *vv,VJFrame *dst) { templ.flags = yuv_which_scaler(); v->scaler = yuv_init_swscaler( v->info,dst, &templ, yuv_sws_get_cpu_flags() ); } - if( -1 == vioctl( v->fd, VIDIOC_DQBUF, &(v->buffer))) { - veejay_msg(0, "v4l2: VIDIOC_DQBUF: %s", strerror(errno)); - return 0; + + if( v->rw == 0 ) { + + if( -1 == vioctl( v->fd, VIDIOC_DQBUF, &(v->buffer))) { + veejay_msg(0, "v4l2: VIDIOC_DQBUF: %s", strerror(errno)); + return 0; + } + + src = v->buffers[ v->buffer.index ].start; + length = v->buffers[v->buffer.index].length; + } + else { + length = v->buffers[0].length; + src = v->buffers[0].start; + + n = read( v->fd, src, length); + if( -1 == n ) { + switch(errno) { + case EAGAIN: + return 1; + default: + veejay_msg(0,"v4l2: error while reading from capture device: %s", strerror(errno)); + return 0; + } + } } - void *src = v->buffers[ v->buffer.index ].start; - v4l2_set_output_dimensions( v, src ); - yuv_convert_and_scale( v->scaler, v->info, dst ); + int got_picture = 0; + + switch(v->is_jpeg) { + case 1: + v4l2_set_output_pointers(v,v->tmpbuf); + length = decode_jpeg_raw( src, n, 0,0, v->info->width,v->info->height,v->info->data[0],v->info->data[1],v->info->data[2] ); + if( length == 0 ) { //@ success + length = 1; + } + break; + case 2: + length = avcodec_decode_video( v->c, v->picture, &got_picture, v->tmpbuf,src ); + if( length == -1 ) { + veejay_msg(0,"v4l2: error while decoding frame"); + return 0; + } + v->info->data[0] = v->picture->data[0]; + v->info->data[1] = v->picture->data[1]; + v->info->data[2] = v->picture->data[2]; + + break; + default: + v4l2_set_output_pointers(v,src); + break; + } + + if( length > 0 ) + yuv_convert_and_scale( v->scaler, v->info, dst ); - if( -1 == vioctl( v->fd, VIDIOC_QBUF, &(v->buffer))) { - veejay_msg(0, "v4l2: VIDIOC_QBUF failed with %s", strerror(errno)); + if(!v->rw) { + if( -1 == vioctl( v->fd, VIDIOC_QBUF, &(v->buffer))) { + veejay_msg(0, "v4l2: VIDIOC_QBUF failed with %s", strerror(errno)); } + } return 1; } @@ -664,12 +845,14 @@ void v4l2_close( void *d ) v4l2info *v = (v4l2info*) d; int i; - if( -1 == vioctl( v->fd, VIDIOC_STREAMOFF, &(v->buftype)) ) { - veejay_msg(0, "v4l2: VIDIOC_STREAMOFF failed with %s", strerror(errno)); - } + if( v->rw == 0 ) { + if( -1 == vioctl( v->fd, VIDIOC_STREAMOFF, &(v->buftype)) ) { + veejay_msg(0, "v4l2: VIDIOC_STREAMOFF failed with %s", strerror(errno)); + } - for( i = 0; i < v->reqbuf.count; i ++ ) { - munmap( v->buffers[i].start, v->buffers[i].length ); + for( i = 0; i < v->reqbuf.count; i ++ ) { + munmap( v->buffers[i].start, v->buffers[i].length ); + } } close(v->fd); @@ -678,6 +861,26 @@ void v4l2_close( void *d ) if(v->dst) free(v->dst); + if(v->picture) { + free(v->picture->data[0]); + free(v->picture->data[1]); + free(v->picture->data[2]); + free(v->picture); + } + if(v->c) { + av_free(v->c); + } + if(v->tmpbuf) { + free(v->tmpbuf); + } + + if(v->codec) { + avcodec_close(v->codec); + v->codec = NULL; + } + + if( v->buffers ) + free(v->buffers); } @@ -1095,19 +1298,68 @@ int v4l2_num_devices() return 4; } -char **v4l2_get_device_list() +static char **v4l2_dummy_list() { + const char *list[] = { - "/dev/v4l/video0", - "/dev/v4l/video1", - "/dev/v4l/video2", - "/dev/v4l/video3", + "/dev/video0", + "/dev/video1", + "/dev/video2", + "/dev/video3", + "/dev/video4", + "/dev/video5", + "/dev/video6", + "/dev/video7", NULL }; char **dup = (char**) malloc(sizeof(char*)*5); int i; for( i = 0; list[i] != NULL ; i ++ ) dup[i] = strdup( list[i]); + veejay_msg(VEEJAY_MSG_DEBUG, "Using dummy video device list"); return dup; } + +char **v4l2_get_device_list() +{ + + DIR *dir; + struct dirent *dirp; + const char prefix[] = "/sys/class/video4linux/"; + const char v4lprefix[] = "/dev/"; + if( (dir = opendir( prefix )) == NULL ) { + veejay_msg(VEEJAY_MSG_WARNING,"Failed to open '%s':%d, %s", prefix, errno,strerror(errno)); + return v4l2_dummy_list(); + } + + char *list[255]; + int n_devices = 0; + + memset(list,0,sizeof(list)); + + while((dirp = readdir(dir)) != NULL) { + if(strncmp( dirp->d_name, "video", 5 ) != 0) + continue; + list[n_devices] = strdup( dirp->d_name ); + n_devices ++; + } + closedir(dir); + + if( n_devices == 0 ) { + veejay_msg(VEEJAY_MSG_WARNING,"No devices found!"); + return v4l2_dummy_list(); + } + + int i; + char **files = (char**) malloc(sizeof(char*) * (n_devices + 1)); + memset( files, 0, sizeof(char) * (n_devices+1)); + + for( i = 0;i < n_devices; i ++ ) { + files[i] = (char*) malloc(sizeof(char) * (strlen(list[i]) + 5)); + sprintf(files[i],"%s%s",v4lprefix, list[i]); + veejay_msg(VEEJAY_MSG_DEBUG, "Found %s", files[i]); + } + return files; + +} #endif diff --git a/veejay-current/veejay-server/libstream/vj-tag.c b/veejay-current/veejay-server/libstream/vj-tag.c index 15b22a47..8e0f7c33 100644 --- a/veejay-current/veejay-server/libstream/vj-tag.c +++ b/veejay-current/veejay-server/libstream/vj-tag.c @@ -398,11 +398,9 @@ int _vj_tag_new_unicap( vj_tag * tag, int stream_nr, int width, int height, int { return 0; } -#ifdef HAVE_V4L2 - snprintf(refname,sizeof(refname), "/dev/v4l/video%d",device_num ); -#else + snprintf(refname,sizeof(refname), "/dev/video%d",device_num ); // freq->device_num -#endif + switch(norm) { case 'P': case 'p': diff --git a/veejay-current/veejay-server/veejay/jpegutils.c b/veejay-current/veejay-server/veejay/jpegutils.c index 280ecfec..ff9aec98 100644 --- a/veejay-current/veejay-server/veejay/jpegutils.c +++ b/veejay-current/veejay-server/veejay/jpegutils.c @@ -429,7 +429,45 @@ static void guarantee_huff_tables(j_decompress_ptr dinfo) #endif /* ...'std' Huffman table generation */ +jpeghdr_t *decode_jpeg_raw_hdr(unsigned char *jpeg_data, int len) +{ + int numfields, hsf[3], vsf[3], field, yl, yc, x, y = + 0, i, xsl, xsc, xs, xd, hdown; + JSAMPROW row0[16] = { buf0[0], buf0[1], buf0[2], buf0[3], + buf0[4], buf0[5], buf0[6], buf0[7], + buf0[8], buf0[9], buf0[10], buf0[11], + buf0[12], buf0[13], buf0[14], buf0[15] + }; + JSAMPROW row1[8] = { buf1[0], buf1[1], buf1[2], buf1[3], + buf1[4], buf1[5], buf1[6], buf1[7] + }; + JSAMPROW row2[16] = { buf2[0], buf2[1], buf2[2], buf2[3], + buf2[4], buf2[5], buf2[6], buf2[7] + }; + JSAMPROW row1_444[16], row2_444[16]; + JSAMPARRAY scanarray[3] = { row0, row1, row2 }; + + struct jpeg_decompress_struct dinfo; + jpeg_create_decompress(&dinfo); + + jpeg_buffer_src(&dinfo, jpeg_data, len); + + jpeg_read_header(&dinfo, TRUE); + + jpeghdr_t *j = (jpeghdr_t*) malloc(sizeof(jpeghdr_t)); + j->jpeg_color_space= dinfo.jpeg_color_space; + j->width = dinfo.image_width; + j->height= dinfo.image_height; + j->num_components = dinfo.num_components; + j->ccir601 = dinfo.CCIR601_sampling; + j->version[0] = dinfo.JFIF_major_version; + j->version[1] = dinfo.JFIF_minor_version; + + jpeg_destroy_decompress(&dinfo); + + return j; +} /* * jpeg_data: Buffer with jpeg data to decode * len: Length of buffer @@ -501,7 +539,6 @@ int decode_jpeg_raw(unsigned char *jpeg_data, int len, vsf[i] = dinfo.comp_info[i].v_samp_factor; } - mjpeg_error( "Sampling factors, hsf=(%d, %d, %d) vsf=(%d, %d, %d) !", hsf[0], hsf[1], hsf[2], vsf[0], vsf[1], vsf[2]); if ((hsf[0] != 2 && hsf[0] != 1) || hsf[1] != 1 || hsf[2] != 1 || (vsf[0] != 1 && vsf[0] != 2) || vsf[1] != 1 || vsf[2] != 1) { mjpeg_error diff --git a/veejay-current/veejay-server/veejay/jpegutils.h b/veejay-current/veejay-server/veejay/jpegutils.h index 030d22ab..b36029cd 100644 --- a/veejay-current/veejay-server/veejay/jpegutils.h +++ b/veejay-current/veejay-server/veejay/jpegutils.h @@ -53,6 +53,17 @@ int encode_jpeg_raw(unsigned char *jpeg_data, int len, int quality,int dct, int itype, int ctype, int width, int height, unsigned char *raw0, unsigned char *raw1, unsigned char *raw2); + +typedef struct { + int jpeg_color_space; + int width; + int height; + int num_components; + int ccir601; + int version[2]; +} jpeghdr_t; + +jpeghdr_t *decode_jpeg_raw_hdr(unsigned char *jpeg_data, int len); /* void jpeg_skip_ff (j_decompress_ptr cinfo); */