From 758fca3a67ebb87e879b07a7c09b34f04ae780e6 Mon Sep 17 00:00:00 2001 From: Niels Elburg Date: Mon, 19 Sep 2005 17:48:07 +0000 Subject: [PATCH] Cleanup debug statements,fixed veejay commandline parameter '-Y' , fixed pixel format detection, Added more decoders , Fixed recording duration in both sample and stream recorders, Fixed YV12 YV16 formats (auto conversion), Fixed -d and -Y , Slight improvements to RGB <-> YCBCR conversion git-svn-id: svn://code.dyne.org/veejay/trunk@396 eb8d1916-c9e9-0310-b8de-cf0c9472ead5 --- veejay-current/gveejay/callback.c | 2 +- veejay-current/libel/lav_io.c | 15 +- veejay-current/libel/vj-avcodec.c | 113 +++++--- veejay-current/libel/vj-avcodec.h | 3 +- veejay-current/libel/vj-el.c | 322 ++++++++++++++------- veejay-current/libel/vj-el.h | 2 +- veejay-current/libsamplerec/samplerecord.c | 5 +- veejay-current/libstream/vj-tag.c | 7 +- veejay-current/libvje/effects/common.h | 48 ++- veejay-current/veejay/liblavplayvj.c | 38 +-- veejay-current/veejay/vj-perform.c | 22 +- veejay-current/veejay/vj-sdl.c | 8 +- 12 files changed, 390 insertions(+), 195 deletions(-) diff --git a/veejay-current/gveejay/callback.c b/veejay-current/gveejay/callback.c index 4d3507ba..55f6a1be 100644 --- a/veejay-current/gveejay/callback.c +++ b/veejay-current/gveejay/callback.c @@ -866,7 +866,7 @@ void on_stream_recordstart_clicked(GtkWidget *widget, gpointer user_data) "%d %d", nframes, autoplay ); - +fprintf(stderr, "sendvims %d:%d %d;", VIMS_STREAM_REC_START,nframes,autoplay); gchar *time1 = format_time( nframes ); vj_msg(VEEJAY_MSG_INFO, "Record duration: %s", time1); g_free(time1); diff --git a/veejay-current/libel/lav_io.c b/veejay-current/libel/lav_io.c index 55ff95b7..2fe0f447 100644 --- a/veejay-current/libel/lav_io.c +++ b/veejay-current/libel/lav_io.c @@ -786,7 +786,8 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) lav_fd->has_audio = (AVI_audio_bits(lav_fd->avi_fd)>0 && AVI_audio_format(lav_fd->avi_fd)==WAVE_FORMAT_PCM); video_comp = AVI_video_compressor(lav_fd->avi_fd); - veejay_msg(VEEJAY_MSG_DEBUG, "Video compressor [%s]",video_comp); + if(video_comp == NULL || strlen(video_comp) <= 0) + { if(lav_fd) free(lav_fd); return 0;} } else if( AVI_errno==AVI_ERR_NO_AVI || AVI_errno == AVI_ERR_READ ) { @@ -820,13 +821,14 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) #ifdef USE_GDK_PIXBUF } #endif - if(ret == 0) + if(ret == 0 || video_comp == NULL) { free(lav_fd); internal_error = ERROR_FORMAT; /* Format not recognized */ veejay_msg(VEEJAY_MSG_ERROR, "Unable to load file '%s'", filename); return 0; } + } lav_fd->bps = (lav_audio_channels(lav_fd)*lav_audio_bits(lav_fd)+7)/8; @@ -838,7 +840,6 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) lav_fd->MJPG_chroma = (output_yuv == 1 ? CHROMA420: CHROMA422 ); lav_fd->format = 'x'; lav_fd->interlacing = LAV_NOT_INTERLACED; - veejay_msg(VEEJAY_MSG_DEBUG, "Playing image"); return lav_fd; } @@ -846,7 +847,7 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) lav_fd->MJPG_chroma = CHROMA420; lav_fd->format = 'D'; lav_fd->interlacing = LAV_NOT_INTERLACED; - veejay_msg(VEEJAY_MSG_DEBUG, "Playing MS MPEG4v3 DivX Video. (Every frame must be an intra frame)" ); + veejay_msg(VEEJAY_MSG_WARNING, "Playing MS MPEG4v3 DivX Video. (Every frame should be an intra frame)" ); return lav_fd; } @@ -855,7 +856,7 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) lav_fd->MJPG_chroma = CHROMA420; lav_fd->format = 'M'; lav_fd->interlacing = LAV_NOT_INTERLACED; - veejay_msg(VEEJAY_MSG_DEBUG, "Playing MPEG4 Video (Experimental)"); + veejay_msg(VEEJAY_MSG_WARNING, "Playing MPEG4 Video (Every frame should be an intra frame)"); return lav_fd; } @@ -865,7 +866,6 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) lav_fd->MJPG_chroma = CHROMA420; lav_fd->format = 'Y'; lav_fd->interlacing = LAV_NOT_INTERLACED; - veejay_msg(VEEJAY_MSG_DEBUG, "Playing YUV 4:2:0 uncompressed video"); return lav_fd; } if (strncasecmp(video_comp,"yv16",4)==0) @@ -873,14 +873,12 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) lav_fd->MJPG_chroma = CHROMA422; lav_fd->format = 'P'; lav_fd->interlacing = LAV_NOT_INTERLACED; - veejay_msg(VEEJAY_MSG_DEBUG, "Playing YUV 4:2:2 uncompressed video"); return lav_fd; } if (strncasecmp(video_comp,"dvsd",4)==0 || strncasecmp(video_comp,"dv",2)==0) { lav_fd->MJPG_chroma = CHROMA422; lav_fd->interlacing = LAV_INTER_BOTTOM_FIRST; - veejay_msg(VEEJAY_MSG_DEBUG, "Playing Digital Video"); return lav_fd; } @@ -977,6 +975,7 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) } else if ( jpg_height == lav_video_height(lav_fd)/2 ) { + /* Video is interlaced */ if(lav_fd->format == 'a') { diff --git a/veejay-current/libel/vj-avcodec.c b/veejay-current/libel/vj-avcodec.c index f0c02b60..41882dd6 100644 --- a/veejay-current/libel/vj-avcodec.c +++ b/veejay-current/libel/vj-avcodec.c @@ -58,7 +58,7 @@ static vj_encoder *vj_avcodec_new_encoder( int id, editlist *el, int pixel_forma if(!e) return NULL; memset(e, 0, sizeof(vj_encoder)); - if(id != -1) + if(id != 998 && id != 999 ) { #ifdef __FALLBACK_LIBDV if(id != CODEC_ID_DVVIDEO) @@ -76,52 +76,50 @@ static vj_encoder *vj_avcodec_new_encoder( int id, editlist *el, int pixel_forma #endif } - e->context = avcodec_alloc_context(); - e->context->width = el->video_width; - e->context->height = el->video_height; - e->context->frame_rate = el->video_fps; - e->context->frame_rate_base = 1; - e->context->qcompress = 0.0; - e->context->qblur = 0.0; - e->context->flags = CODEC_FLAG_QSCALE; - e->context->gop_size = 0; - e->context->sub_id = 0; - e->context->me_method = 0; // motion estimation algorithm - e->context->workaround_bugs = FF_BUG_AUTODETECT; - e->context->prediction_method = 0; - e->context->dct_algo = FF_DCT_AUTO; //global_quality? - - if ( pixel_format == FMT_422) - { - e->context->pix_fmt = PIX_FMT_YUV422P; - e->len = el->video_width * el->video_height; - e->uv_len = e->len / 2; - e->width = el->video_width; - e->height = el->video_height; - } - if( pixel_format == FMT_420) - { - e->len = el->video_width * el->video_height; - e->uv_len = e->len / 4; - if(out_pixel_format == FMT_422) - e->sub_sample =1; - e->width = el->video_width; - e->height = el->video_height; - e->context->pix_fmt = PIX_FMT_YUV420P; - } - - if( id != -1) + if( id != 998 && id != 999 ) { #ifdef __FALLBACK_LIBDV if(id != CODEC_ID_DVVIDEO ) + { #endif + e->context = avcodec_alloc_context(); + e->context->width = el->video_width; + e->context->height = el->video_height; + e->context->frame_rate = el->video_fps; + e->context->frame_rate_base = 1; + e->context->qcompress = 0.0; + e->context->qblur = 0.0; + e->context->flags = CODEC_FLAG_QSCALE; + e->context->gop_size = 0; + e->context->sub_id = 0; + e->context->me_method = 0; // motion estimation algorithm + e->context->workaround_bugs = FF_BUG_AUTODETECT; + e->context->prediction_method = 0; + e->context->dct_algo = FF_DCT_AUTO; //global_quality? + e->context->pix_fmt = (pixel_format == FMT_420 ? PIX_FMT_YUV420P : PIX_FMT_YUV422P ); if ( avcodec_open( e->context, e->codec ) < 0 ) { if(e) free(e); return NULL; } - } + +#ifdef __FALLBACK_LIBDV + } +#endif + } + + e->len = el->video_width * el->video_height; + if(pixel_format == PIX_FMT_YUV422P) + e->uv_len = e->len / 2; + else + e->uv_len = e->len / 4; + e->width = el->video_width; + e->height = el->video_height; + + e->out_fmt = pixel_format; + e->encoder_id = id; + /* if( el->has_audio ) { @@ -179,10 +177,10 @@ int vj_avcodec_init(editlist *el, int pixel_format) _encoders[ENCODER_MPEG4] = vj_avcodec_new_encoder( CODEC_ID_MPEG4, el, fmt); if(!_encoders[ENCODER_MPEG4]) return 0; - _encoders[ENCODER_YUV420] = vj_avcodec_new_encoder( -1, el, FMT_420); + _encoders[ENCODER_YUV420] = vj_avcodec_new_encoder( 999, el, fmt); if(!_encoders[ENCODER_YUV420]) return 0; - _encoders[ENCODER_YUV422] = vj_avcodec_new_encoder( -1, el, FMT_422); + _encoders[ENCODER_YUV422] = vj_avcodec_new_encoder( 998, el, fmt); if(!_encoders[ENCODER_YUV422]) return 0; @@ -301,7 +299,38 @@ int yuv420p_to_yuv422p( uint8_t *sY,uint8_t *sCb, uint8_t *sCr, uint8_t *dst[3], static int vj_avcodec_copy_frame( vj_encoder *av, uint8_t *src[3], uint8_t *dst ) { - if(av->sub_sample) + if(!av) + { + veejay_msg(VEEJAY_MSG_ERROR, "No encoder !!"); + return 0; + } + if( (av->encoder_id == 999 && av->out_fmt == PIX_FMT_YUV420P) || (av->encoder_id == 998 && av->out_fmt == PIX_FMT_YUV422P)) + { + /* copy */ + veejay_memcpy( dst, src[0], av->len ); + veejay_memcpy( dst+(av->len), src[1], av->uv_len ); + veejay_memcpy( dst+(av->len+av->uv_len) , src[2], av->uv_len); + return ( av->len + av->uv_len + av->uv_len ); + } + /* copy by converting */ + if( av->encoder_id == 999 && av->out_fmt == PIX_FMT_YUV422P) + { + yuv422p_to_yuv420p( src, dst, av->width, av->height); + return ( av->len + (av->len/4) + (av->len/4)); + } + + if( av->encoder_id == 998 && av->out_fmt == PIX_FMT_YUV420P) + { + uint8_t *d[3]; + d[0] = dst; + d[1] = dst + av->len; + d[2] = dst + av->len + (av->len / 2); + yuv420p_to_yuv422p2( src[0],src[1],src[2], d, av->width,av->height ); + return ( av->len + av->len ); + } + + +/* if(av->sub_sample) { return(yuv422p_to_yuv420p(src,dst, av->width, av->height )); } @@ -312,7 +341,10 @@ static int vj_avcodec_copy_frame( vj_encoder *av, uint8_t *src[3], uint8_t *dst veejay_memcpy( dst+(av->len+av->uv_len) , src[2], av->uv_len); } return (av->len + av->uv_len + av->uv_len); +*/ + + return 0; } @@ -323,6 +355,7 @@ int vj_avcodec_encode_frame( int format, uint8_t *src[3], uint8_t *buf, int buf vj_encoder *av = _encoders[format]; int res=0; memset( &pict, 0, sizeof(pict)); + if(format == ENCODER_YUV420) // no compression, just copy return vj_avcodec_copy_frame( _encoders[ENCODER_YUV420],src, buf ); if(format == ENCODER_YUV422) // no compression, just copy diff --git a/veejay-current/libel/vj-avcodec.h b/veejay-current/libel/vj-avcodec.h index 49cd548b..9eec1097 100644 --- a/veejay-current/libel/vj-avcodec.h +++ b/veejay-current/libel/vj-avcodec.h @@ -35,11 +35,12 @@ typedef struct AVCodec *audiocodec; AVFrame *frame; AVCodecContext *context; - int fmt; + int out_fmt; int uv_len; int len; int sub_sample; int super_sample; + int encoder_id; int width; int height; } vj_encoder; diff --git a/veejay-current/libel/vj-el.c b/veejay-current/libel/vj-el.c index afd5805f..e3d429d5 100644 --- a/veejay-current/libel/vj-el.c +++ b/veejay-current/libel/vj-el.c @@ -46,7 +46,7 @@ #ifdef SUPPORT_READ_DV2 #include "rawdv.h" #endif -#define MAX_CODECS 10 +#define MAX_CODECS 12 #define CODEC_ID_YUV420 999 #define CODEC_ID_YUV422 998 @@ -63,25 +63,70 @@ static struct { "4:4:4" }, }; + static struct { const char *name; int id; - int fmt; } _supported_codecs[] = { - { "mjpeg" , CODEC_ID_MJPEG, 0 }, - { "mjpeg" , CODEC_ID_MJPEG, 1 }, - { "dv" , CODEC_ID_DVVIDEO,0 }, - { "dv" , CODEC_ID_DVVIDEO,1 }, - { "msmpeg4",CODEC_ID_MPEG4, 0 }, - { "msmpeg4",CODEC_ID_MPEG4, 1 }, - { "divx" ,CODEC_ID_MSMPEG4V3,0 }, - { "divx" , CODEC_ID_MSMPEG4V3, 1 }, - { "i420", CODEC_ID_YUV420,0 }, - { "i422", CODEC_ID_YUV422,1 }, + { "mjpeg" , CODEC_ID_MJPEG }, + { "mjpegb", CODEC_ID_MJPEGB }, +#if LIBAVCODEC_BUILD > 4680 + { "sp5x", CODEC_ID_SP5X }, /* sunplus motion jpeg video */ +#endif +#if LIBAVCODEC_BUILD >= 4685 + { "theora", CODEC_ID_THEORA }, +#endif + { "huffyuv", CODEC_ID_HUFFYUV }, + { "cyuv", CODEC_ID_CYUV }, + { "dv" , CODEC_ID_DVVIDEO }, + { "msmpeg4",CODEC_ID_MPEG4 }, + { "divx" ,CODEC_ID_MSMPEG4V3 }, + { "i420", CODEC_ID_YUV420 }, + { "i422", CODEC_ID_YUV422 }, { NULL , 0 }, -}; +}; + +static struct +{ + const char *name; + int id; +} _supported_fourcc[] = +{ + { "mjpg", CODEC_ID_MJPEG }, + { "mjpb", CODEC_ID_MJPEGB }, + { "jpeg", CODEC_ID_MJPEG }, + { "mjpa", CODEC_ID_MJPEG }, + { "jfif", CODEC_ID_MJPEG }, + { "jpgl", CODEC_ID_MJPEG }, + { "dvsd", CODEC_ID_DVVIDEO}, + { "dv", CODEC_ID_DVVIDEO}, + { "dvhd", CODEC_ID_DVVIDEO}, + { "dvp", CODEC_ID_DVVIDEO}, + { "mp4v", CODEC_ID_MPEG4 }, + { "xvid", CODEC_ID_MPEG4 }, + { "divx", CODEC_ID_MPEG4 }, + { "dxsd", CODEC_ID_MPEG4 }, + { "mp4s", CODEC_ID_MPEG4 }, + { "m4s2", CODEC_ID_MPEG4 }, + { "div3", CODEC_ID_MSMPEG4V3 }, + { "mp43", CODEC_ID_MSMPEG4V3 }, + { "mp42", CODEC_ID_MSMPEG4V2 }, + { "mpg4", CODEC_ID_MSMPEG4V1 }, + { "yuv", CODEC_ID_YUV420 }, + { "iyuv", CODEC_ID_YUV420 }, + { "i420", CODEC_ID_YUV420 }, + { "yv16", CODEC_ID_YUV422 }, + { "pict", 0xffff }, /* invalid fourcc */ + { "hfyu", CODEC_ID_HUFFYUV}, + { "cyuv", CODEC_ID_CYUV }, +#if LIBAVCODEC_BUILD > 4680 + { "spsx", CODEC_ID_SP5X }, +#endif + { NULL, 0 } +}; + static int mmap_size = 0; typedef struct @@ -97,19 +142,24 @@ typedef struct static vj_decoder *el_codecs[MAX_CODECS]; -static _el_get_codec(int id, int pixel_format ) +static _el_get_codec(int id ) { int i; for( i = 0; _supported_codecs[i].name != NULL ; i ++ ) { - if( _supported_codecs[i].id == id && _supported_codecs[i].fmt == pixel_format ) - { + if( _supported_codecs[i].id == id ) return i; - } } return -1; } - +static int _el_get_codec_id( const char *fourcc ) +{ + int i; + for( i = 0; _supported_fourcc[i].name != NULL ; i ++ ) + if( strncasecmp( fourcc, _supported_fourcc[i].name, strlen(_supported_fourcc[i].name) ) == 0 ) + return _supported_fourcc[i].id; + return -1; +} vj_decoder *_el_new_decoder( int id , int width, int height, float fps, int pixel_format) { vj_decoder *d = (vj_decoder*) vj_malloc(sizeof(vj_decoder)); @@ -279,23 +329,8 @@ int open_video_file(char *filename, editlist * el, int preserve_pathname, int de return -1; } - if(el->pixel_format == -1 ) - el->pixel_format = pix_fmt; - el->yuv_taste[n] = pix_fmt; -/* else - { - // check on sanity - if( pix_fmt > el->pixel_format) - { - veejay_msg(VEEJAY_MSG_ERROR, "Cannot handle mixed editlists"); - el->num_video_files--; - if( el->lav_fd[n] ) lav_close( el->lav_fd[n] ); - if( realname ) free(realname ); - return -1; - } - }*/ if(lav_video_frames(el->lav_fd[n]) < 2) { @@ -481,34 +516,11 @@ int open_video_file(char *filename, editlist * el, int preserve_pathname, int de } // initialze a decoder if needed - if( strncasecmp("mjpg", compr_type, 4) == 0) - decoder_id = CODEC_ID_MJPEG; - if( strncasecmp("jpeg", compr_type, 4) == 0) - decoder_id = CODEC_ID_MJPEG; - if( strncasecmp("mjpa", compr_type, 4) == 0) - decoder_id = CODEC_ID_MJPEG; - if( strncasecmp("dvsd", compr_type, 4) == 0) - decoder_id = CODEC_ID_DVVIDEO; - if( strncasecmp("dv", compr_type, 2) == 0) - decoder_id = CODEC_ID_DVVIDEO; - if( strncasecmp("mp4v",compr_type,4) == 0) - decoder_id = CODEC_ID_MPEG4; - if( strncasecmp("div3", compr_type,4) == 0) - decoder_id = CODEC_ID_MSMPEG4V3; - if( strncasecmp("yuv", compr_type, 3) == 0) - decoder_id = CODEC_ID_YUV420; - if( strncasecmp("iyuv", compr_type,4) == 0) - decoder_id = CODEC_ID_YUV420; - if( strncasecmp("i420", compr_type,4) == 0) - decoder_id = CODEC_ID_YUV420; - if( strncasecmp("yv16", compr_type,4) == 0) - decoder_id = CODEC_ID_YUV422; - if( strncasecmp("PICT", compr_type,4) == 0) - decoder_id = 0xffff; + decoder_id = _el_get_codec_id( compr_type ); if(decoder_id > 0 && decoder_id != 0xffff) { - int c_i = _el_get_codec(decoder_id, pix_fmt); + int c_i = _el_get_codec(decoder_id); if(c_i == -1) { veejay_msg(VEEJAY_MSG_ERROR, "Unsupported codec %s",compr_type); @@ -619,10 +631,10 @@ int vj_el_get_video_frame(editlist *el, long nframe, uint8_t *dst[3]) int c_i = 0; vj_decoder *d = NULL; - int pix_fmt = el->pixel_format; - + int out_pix_fmt = el->pixel_format; + int in_pix_fmt = out_pix_fmt; if( el->has_video == 0 || el->is_empty ) - { vj_el_dummy_frame( dst, el, pix_fmt ); return 2; } + { vj_el_dummy_frame( dst, el, out_pix_fmt ); return 2; } if (nframe < 0) nframe = 0; @@ -632,49 +644,150 @@ int vj_el_get_video_frame(editlist *el, long nframe, uint8_t *dst[3]) n = el->frame_list[nframe]; - int yy = el->yuv_taste[N_EL_FILE(n)]; + in_pix_fmt = el->yuv_taste[N_EL_FILE(n)]; res = lav_set_video_position(el->lav_fd[N_EL_FILE(n)], N_EL_FRAME(n)); decoder_id = lav_video_compressor_type( el->lav_fd[N_EL_FILE(n)] ); - if(decoder_id != 0xffff) - { - c_i = _el_get_codec( decoder_id, yy ); - if(c_i >= 0 && c_i < MAX_CODECS) - d = el_codecs[c_i]; - if(!d) - { - veejay_msg(VEEJAY_MSG_DEBUG, "Cannot find codec for id %d (%d)", decoder_id, - c_i); - return -1; - } - } - if (res < 0) { veejay_msg(VEEJAY_MSG_ERROR,"Error setting video position: %s", lav_strerror()); } - if(lav_filetype( el->lav_fd[N_EL_FILE(n)] ) != 'x') - res = lav_read_frame(el->lav_fd[N_EL_FILE(n)], d->tmp_buffer); + int len = el->video_width * el->video_height; + int uv_len = 0; + int have_picture = 0; + int inter = lav_video_interlacing(el->lav_fd[N_EL_FILE(n)]); + int buf_len = 0; + +/* switch(decoder_id) + { + case 0xffff: + {uint8_t *ptr = lav_get_frame_ptr( el->lav_fd[N_EL_FILE(n)] ); + if(!ptr) return 0; + uv_len = len / ( out_pix_fmt == FMT_420 ? 4: 2 ); + veejay_memcpy( dst[0], ptr, len); + veejay_memcpy( dst[1], ptr+len,uv_len); + veejay_memcpy( dst[2], ptr+(len+uv_len), uv_len); + return 1;} + break; + case CODEC_ID_YUV420: + if( out_pix_fmt == FMT_420 ) + { + uv_len = len / 4; + veejay_memcpy( dst[0], d->tmp_buffer, len); + veejay_memcpy( dst[1], d->tmp_buffer+len,uv_len); + veejay_memcpy( dst[2], d->tmp_buffer+(len+uv_len), uv_len); + } + else + { + uv_len = len / 4; + return (yuv420p_to_yuv422p( d->tmp_buffer, + d->tmp_buffer+len, + d->tmp_buffer+len+uv_len, + dst, + el->video_width, el->video_height)); + + } + return 1; + break; + case CODEC_ID_YUV422: + if(out_pix_fmt == FMT_422 ) + { + uv_len = len / 2; + veejay_memcpy( dst[0], d->tmp_buffer, len); + veejay_memcpy( dst[1], d->tmp_buffer+len,uv_len); + veejay_memcpy( dst[2], d->tmp_buffer+len+uv_len, uv_len); + } + else + { + uint8_t *src[3]; + uv_len = len / 2; + src[0] = d->tmp_buffer; + src[1] = d->tmp_buffer + len; + src[2] = d->tmp_buffer + len + uv_len; + yuv422p_to_yuv420p2( src, dst, el->video_width,el->video_height ); + } + return 1; + break; + default: + + buf_len = avcodec_decode_video( d->context, + d->frame, + &have_picture, + d->tmp_buffer, + res); + if(buf_len>0) + { + AVPicture pict,pict2; + int res = 0; + int src_fmt = ( in_pix_fmt == FMT_420 ? PIX_FMT_YUV420P : PIX_FMT_YUV422P ); + int dst_fmt = ( out_pix_fmt == FMT_420 ? PIX_FMT_YUV420P : PIX_FMT_YUV422P ); + pict.data[0] = dst[0]; + pict.data[1] = dst[1]; + pict.data[2] = dst[2]; + + pict.linesize[0] = el->video_width; + pict.linesize[1] = el->video_width >> 1; + pict.linesize[2] = el->video_width >> 1; + + if( el->auto_deinter && inter != LAV_NOT_INTERLACED) + { + pict2.data[0] = d->deinterlace_buffer[0]; + pict2.data[1] = d->deinterlace_buffer[1]; + pict2.data[2] = d->deinterlace_buffer[2]; + pict2.linesize[1] = el->video_width >> 1; + pict2.linesize[2] = el->video_width >> 1; + pict2.linesize[0] = el->video_width; + + res = avpicture_deinterlace( &pict2, (const AVPicture*) d->frame, src_fmt, + el->video_width, el->video_height); + + img_convert( &pict, dst_fmt, (const AVPicture*) &pict2, src_fmt, + el->video_width,el->video_height); + } + else + { + img_convert( &pict, dst_fmt, (const AVPicture*) d->frame, src_fmt, + el->video_width, el->video_height ); + } + return 1; + } + break; + } + return 0; +} +*/ if( decoder_id == 0xffff ) { uint8_t *p = lav_get_frame_ptr( el->lav_fd[N_EL_FILE(n)] ); if(!p) return -1; int len = el->video_width * el->video_height; - int uv_len = (el->video_width >> 1) * (el->video_height >> (pix_fmt == FMT_420 ? 1:0)); + int uv_len = (el->video_width >> 1) * (el->video_height >> (out_pix_fmt == FMT_420 ? 1:0)); veejay_memcpy( dst[0], p, len ); veejay_memcpy( dst[1], p + len, uv_len ); veejay_memcpy( dst[2], p + len + uv_len, uv_len ); return 1; } + c_i = _el_get_codec( decoder_id ); + if(c_i >= 0 && c_i < MAX_CODECS) + d = el_codecs[c_i]; + else + { + veejay_msg(VEEJAY_MSG_DEBUG, "Choking on decoder ID %d (%d)", decoder_id,c_i ); + return -1; + } + if(lav_filetype( el->lav_fd[N_EL_FILE(n)] ) != 'x') + res = lav_read_frame(el->lav_fd[N_EL_FILE(n)], d->tmp_buffer); + + if( decoder_id == CODEC_ID_YUV420 ) - { /* yuv420 raw */ + { int len = el->video_width * el->video_height; int uv_len = len / 4; - if(pix_fmt == FMT_420) + if(out_pix_fmt == FMT_420) { veejay_memcpy( dst[0], d->tmp_buffer, len); veejay_memcpy( dst[1], d->tmp_buffer+len,uv_len); @@ -712,23 +825,21 @@ int vj_el_get_video_frame(editlist *el, long nframe, uint8_t *dst[3]) AVPicture pict,pict2; // int dst_pix_fmt = (pix_fmt == FMT_422 ? PIX_FMT_YUV422P : PIX_FMT_YUV420P); // int dst_pix_fmt = el->pixel_format; - int src_fmt = ( el->yuv_taste[ N_EL_FILE(n)] == 0 ? PIX_FMT_YUV420P: PIX_FMT_YUV422P ); - int dst_fmt = ( el->pixel_format == FMT_422 ? PIX_FMT_YUV422P: PIX_FMT_YUV420P) ; + int src_fmt = ( in_pix_fmt == FMT_420 ? PIX_FMT_YUV420P: PIX_FMT_YUV422P ); + int dst_fmt = ( out_pix_fmt== FMT_420 ? PIX_FMT_YUV420P: PIX_FMT_YUV422P) ; -//veejay_msg(VEEJAY_MSG_DEBUG, "Destination %d, Source %d", dst_fmt, src_fmt ); int res = 0; pict.data[0] = dst[0]; pict.data[1] = dst[1]; pict.data[2] = dst[2]; pict.linesize[0] = el->video_width; - pict.linesize[1] = el->video_width >> 1; - pict.linesize[2] = el->video_width >> 1; -/* -int avpicture_deinterlace(AVPicture *dst, const AVPicture *src, - int pix_fmt, int width, int height); -*/ + pict.linesize[1] = el->video_width / 2; + pict.linesize[2] = el->video_width / 2; + memset(dst[0], 255, len ); + memset(dst[1], 255, uv_len ); + memset(dst[2], 255,uv_len); if( el->auto_deinter && inter != LAV_NOT_INTERLACED) { pict2.data[0] = d->deinterlace_buffer[0]; @@ -743,20 +854,20 @@ int avpicture_deinterlace(AVPicture *dst, const AVPicture *src, img_convert( &pict, dst_fmt, (const AVPicture*) &pict2, src_fmt, el->video_width,el->video_height); - } else { - img_convert( &pict, dst_fmt, (const AVPicture*) d->frame, src_fmt, + img_convert( &pict, dst_fmt, (const AVPicture*) d->frame, d->context->pix_fmt, el->video_width, el->video_height ); } + return 1; } veejay_msg(VEEJAY_MSG_WARNING, "Error decoding frame %ld - %d ", nframe,len); return 0; } } - + return 1; } @@ -881,7 +992,7 @@ int vj_el_get_audio_frame_at(editlist *el, uint32_t nframe, uint8_t *dst, int nu } -editlist *vj_el_dummy(int flags, int deinterlace, int chroma, char norm, int width, int height, float fps) +editlist *vj_el_dummy(int flags, int deinterlace, int chroma, char norm, int width, int height, float fps, int fmt) { editlist *el = vj_malloc(sizeof(editlist)); if(!el) return NULL; @@ -902,7 +1013,10 @@ editlist *vj_el_dummy(int flags, int deinterlace, int chroma, char norm, int wid el->video_inter = LAV_NOT_INTERLACED; /* output pixel format */ - el->pixel_format = (chroma == CHROMA420 ? FMT_420 : FMT_422); + if( fmt == -1 ) + el->pixel_format = FMT_422; + else + el->pixel_format = (fmt == 0 ? FMT_420: FMT_422 ); el->auto_deinter = deinterlace; el->max_frame_size = width*height*3; el->last_afile = -1; @@ -948,7 +1062,7 @@ editlist *vj_el_init_with_args(char **filename, int num_files, int flags, int de vj_picture_init(); #endif memset( el, 0, sizeof(editlist) ); - el->pixel_format = out_fmt; + el->has_video = 1; //assume we get it el->MJPG_chroma = CHROMA420; /* Check if a norm parameter is present */ @@ -1146,8 +1260,24 @@ editlist *vj_el_init_with_args(char **filename, int num_files, int flags, int de el->max_frame_size) el->max_frame_size = lav_frame_size(el->lav_fd[N_EL_FILE(n)], N_EL_FRAME(n)); + + } + /* Pick a pixel format */ + if(out_fmt == -1) + { + int lowest = FMT_420; + for( i = 0 ; i < el->num_video_files; i ++ ) + { + if( lav_video_MJPG_chroma( el->lav_fd[ i ] ) == CHROMA422 ) + lowest = FMT_422; + } + out_fmt = lowest; + } + + el->pixel_format = out_fmt; + /* Help for audio positioning */ el->last_afile = -1; @@ -1546,8 +1676,6 @@ int vj_el_framelist_clone( editlist *src, editlist *dst) dst->frame_list = (uint64_t*) vj_malloc(sizeof(uint64_t) * src->video_frames ); if(!dst->frame_list) return 0; - veejay_msg(VEEJAY_MSG_DEBUG, "Frame list is %2.2f Mb", - (float)(sizeof(uint64_t) * src->video_frames) / 1048576.0 ); veejay_memcpy( dst->frame_list, diff --git a/veejay-current/libel/vj-el.h b/veejay-current/libel/vj-el.h index 74ac7533..3eb5b089 100644 --- a/veejay-current/libel/vj-el.h +++ b/veejay-current/libel/vj-el.h @@ -106,7 +106,7 @@ void vj_el_ref(editlist *el, int num); void vj_el_unref(editlist *el, int num); -editlist *vj_el_dummy(int flags, int deinterlace, int chroma, char norm, int width, int height, float fps); +editlist *vj_el_dummy(int flags, int deinterlace, int chroma, char norm, int width, int height, float fps, int fmt); int vj_el_get_file_entry( editlist *el,long *start_pos, long *end_pos, long entry ); diff --git a/veejay-current/libsamplerec/samplerecord.c b/veejay-current/libsamplerec/samplerecord.c index a202c5e8..21ec96a4 100644 --- a/veejay-current/libsamplerec/samplerecord.c +++ b/veejay-current/libsamplerec/samplerecord.c @@ -224,6 +224,8 @@ int sample_continue_record( int s1 ) { veejay_msg(VEEJAY_MSG_WARNING, "Ready recording %d frames", si->encoder_succes_frames); + si->encoder_total_frames = 0; + sample_update(si,s1); return 1; } @@ -241,7 +243,7 @@ int sample_continue_record( int s1 ) si->encoder_total_frames, si->encoder_duration); - + si->encoder_total_frames = 0; sample_update(si,s1); return 2; } @@ -276,7 +278,6 @@ int sample_record_frame(int s1, uint8_t *buffer[3], uint8_t *abuff, int audio_si return 1; } - si->rec_total_bytes += buf_len; /* if(audio_size > 0) diff --git a/veejay-current/libstream/vj-tag.c b/veejay-current/libstream/vj-tag.c index 22ffedc4..bd5e8f76 100644 --- a/veejay-current/libstream/vj-tag.c +++ b/veejay-current/libstream/vj-tag.c @@ -1057,7 +1057,7 @@ static int vj_tag_start_encoder(vj_tag *tag, int format, long nframes) else if(format==ENCODER_YUV420) { - tag->encoder_max_size= ( _tag_info->edit_list->video_width * _tag_info->edit_list->video_height *2); + tag->encoder_max_size = (_tag_info->edit_list->video_width * _tag_info->edit_list->video_height * 2 ); } else { @@ -1135,7 +1135,8 @@ int vj_tag_continue_record( int t1 ) if(si->encoder_num_frames >= si->encoder_duration) { veejay_msg(VEEJAY_MSG_INFO, "Ready recording %ld frames", si->encoder_succes_frames); - + si->encoder_total_frames = 0; + vj_tag_update(si, t1 ); return 1; } @@ -1155,7 +1156,7 @@ int vj_tag_continue_record( int t1 ) si->encoder_total_frames, si->encoder_duration); - + si->encoder_total_frames = 0; vj_tag_update(si,t1); return 2; } diff --git a/veejay-current/libvje/effects/common.h b/veejay-current/libvje/effects/common.h index e64af25d..f005ba55 100644 --- a/veejay-current/libvje/effects/common.h +++ b/veejay-current/libvje/effects/common.h @@ -2,6 +2,7 @@ * Linux VeeJay * * Copyright(C)2002 Niels Elburg + * Copyright (C) 2001 Matthew J. Marjanovic * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License @@ -98,15 +99,48 @@ v = (int) (( 255.0 * Ev ) + 128);\ } +/* + http://www.w3.org/Graphics/JPEG/jfif.txt + YCbCr (256 levels) can be computed directly from 8-bit RGB as follows: + IEC 601 + */ -#define GIMP_rgb2yuv(r,g,b,y,u,v) \ +/* MJPEGtools lavtools/colorspace.c by matthew */ +#define YCBCR_to_IEC601 ( y, u, v ) \ {\ - float Ey = (0.299 * (float)r) + (0.587 * (float)g) + (0.114 * (float)b);\ - float Eu = (-0.169 * (float)r) - (0.331 * (float)g) + (0.500 * (float)b) + 128.0;\ - float Ev = (0.500 * (float)r) - (0.419 * (float)g) - (0.081 * (float)b) + 128.0;\ - y = (int) Ey;\ - u = (int) Eu;\ - v = (int) Ev;\ + y = y * 219.0 / 256.0 + 16 ;\ + u = (u - 128 ) * 224.0 / 256.0 + 128;\ + v = (v - 128 ) * 224.0 / 256.0 + 128;\ + } + +#define IEC601_to_YCBCR( y, u, v ) \ + {\ + y = ( y - 16 ) / 219.0 * 256.0;\ + u = ( u - 128 ) / 224.0 * 256.0 + 128;\ + v = ( v - 128 ) / 224.0 * 256.0 + 128;\ + } + +static inline int myround(float n) +{ + if (n >= 0) + return (int)(n + 0.5); + else + return (int)(n - 0.5); +} +/* End colorspace.c */ + + +#define GIMP_rgb2yuv(r,g,b,y,u,v)\ + {\ + float Ey = (0.299 * (float)r) + (0.587 * (float)g) + (0.114 * (float) b);\ + float Eu = (-0.168736 * (float)r) - (0.331264 * (float)g) + (0.500 * (float)b) + 128.0;\ + float Ev = (0.500 * (float)r) - (0.418688 * (float)g) - (0.081312 * (float)b)+ 128.0;\ + y = myround(Ey);\ + u = myround(Eu);\ + v = myround(Ev);\ + if( y > 0xff ) y = 0xff ; else if ( y < 0 ) y = 0;\ + if( u > 0xff ) u = 0xff ; else if ( u < 0 ) u = 0;\ + if( v > 0xff ) v = 0xff ; else if ( v < 0 ) v = 0;\ } enum diff --git a/veejay-current/veejay/liblavplayvj.c b/veejay-current/veejay/liblavplayvj.c index a59fc965..d642bfff 100644 --- a/veejay-current/veejay/liblavplayvj.c +++ b/veejay-current/veejay/liblavplayvj.c @@ -228,6 +228,7 @@ void veejay_set_sampling(veejay_t *info, subsample_mode_t m) video_playback_setup *settings = (video_playback_setup*) info->settings; if(m == SSM_420_JPEG_TR ) { +veejay_msg(VEEJAY_MSG_ERROR,"Pixel format is %d", info->pixel_format); if(info->pixel_format == FMT_420) settings->sample_mode = SSM_420_JPEG_TR; else @@ -681,8 +682,6 @@ void veejay_set_sample(veejay_t * info, int sampleid) veejay_reset_el_buffer(info); - veejay_msg(VEEJAY_MSG_DEBUG, "Sample EDL = %p, PM EDL = %p", info->edit_list, info->current_edit_list ); - sample_get_short_info( sampleid , &start,&end,&looptype,&speed); veejay_msg(VEEJAY_MSG_INFO, "Playing sample %d (frames %d - %d) at speed %d", @@ -721,6 +720,7 @@ int veejay_create_tag(veejay_t * info, int type, char *filename, } int id = vj_tag_new(type, filename, index, info->current_edit_list, info->pixel_format, channel); + char descr[200]; bzero(descr,200); vj_tag_get_by_type(type,descr); @@ -2969,18 +2969,10 @@ static int veejay_open_video_files(veejay_t *info, char **files, int num_files, vj_el_frame_cache(info->seek_cache ); vj_avformat_init(); - if(force_pix_fmt != -1) - { - info->pixel_format = (force_pix_fmt == 1 ? FMT_422 : FMT_420); - veejay_msg(VEEJAY_MSG_WARNING, "Pixel format forced to YCbCr %s", - (info->pixel_format == FMT_422 ? "4:2:2" : "4:2:0")); - } - if(info->auto_deinterlace) { veejay_msg(VEEJAY_MSG_DEBUG, "Auto deinterlacing (for playback on monitor / beamer with vga input"); - veejay_msg(VEEJAY_MSG_DEBUG, "Note that this will effect your recorded video samples"); } if(num_files<=0 || files == NULL) @@ -2989,6 +2981,18 @@ static int veejay_open_video_files(veejay_t *info, char **files, int num_files, info->dummy->active = 1; } + if(force_pix_fmt >= 0) + { + info->pixel_format = (force_pix_fmt == 1 ? FMT_422 : FMT_420); + veejay_msg(VEEJAY_MSG_WARNING, "Pixel format forced to YCbCr %s", + (info->pixel_format == FMT_422 ? "4:2:2" : "4:2:0")); + + } + else + { + info->pixel_format = -1; + } + //TODO: pass yuv sampling to dummy if( info->dummy->active ) { if( !info->dummy->norm ) @@ -3005,11 +3009,8 @@ static int veejay_open_video_files(veejay_t *info, char **files, int num_files, info->dummy->chroma = CHROMA422; info->current_edit_list = vj_el_dummy( 0, info->auto_deinterlace, info->dummy->chroma, - info->dummy->norm, info->dummy->width, info->dummy->height, info->dummy->fps ); - - veejay_msg(VEEJAY_MSG_DEBUG, "Dummy: %d x %d, %s %s ", - info->dummy->width,info->dummy->height, (info->dummy->norm == 'p' ? "PAL": "NTSC" ), - ( force_pix_fmt == 0 ? "4:2:0" : "4:2:2" )); + info->dummy->norm, info->dummy->width, info->dummy->height, info->dummy->fps, + force_pix_fmt ); if( info->dummy->arate ) { @@ -3033,9 +3034,12 @@ static int veejay_open_video_files(veejay_t *info, char **files, int num_files, { return 0; } + if(info->pixel_format == -1) + info->pixel_format = info->edit_list->pixel_format; + veejay_msg(VEEJAY_MSG_DEBUG, "Initialized with pixel format %d", info->pixel_format ); - vj_avcodec_init(info->current_edit_list , info->current_edit_list->pixel_format); + vj_avcodec_init(info->current_edit_list , info->pixel_format); if(info->pixel_format == FMT_422 ) { if(!vj_el_init_422_frame( info->current_edit_list, info->effect_frame1)) return 0; @@ -3076,8 +3080,6 @@ int veejay_open_files(veejay_t * info, char **files, int num_files, float ofps, /* override options */ if(ofps<=0.0) ofps = settings->output_fps; - if(force_pix_fmt<0) - force_pix_fmt = info->pixel_format; settings->output_fps = ofps; diff --git a/veejay-current/veejay/vj-perform.c b/veejay-current/veejay/vj-perform.c index 1605561f..6450ee86 100644 --- a/veejay-current/veejay/vj-perform.c +++ b/veejay-current/veejay/vj-perform.c @@ -2149,13 +2149,7 @@ void vj_perform_plain_fill_buffer(veejay_t * info, int entry) ret = vj_el_get_video_frame(info->current_edit_list,settings->current_frame_num,frame); - if (ret <= 0 ) - { - veejay_memset(frame[0], 16, helper_frame->len); - veejay_memset(frame[1], 128,helper_frame->uv_len); - veejay_memset(frame[2], 128,helper_frame->uv_len); - } - if(ret == 2) + if(ret <= 0) { veejay_msg(VEEJAY_MSG_WARNING, "There is no plain video to play!"); veejay_change_state(info, LAVPLAY_STATE_STOP); @@ -2169,7 +2163,7 @@ int vj_perform_render_sample_frame(veejay_t *info, uint8_t *frame[3]) int audio_len = 0; //uint8_t buf[16384]; long nframe = info->settings->current_frame_num; - uint8_t *_audio_buffer; + uint8_t *_audio_buffer = NULL; if(last_rendered_frame == nframe) return 0; // skip frame last_rendered_frame = info->settings->current_frame_num; @@ -2178,11 +2172,10 @@ int vj_perform_render_sample_frame(veejay_t *info, uint8_t *frame[3]) { _audio_buffer = x_audio_buffer; audio_len = (info->edit_list->audio_rate / info->edit_list->video_fps); - return(int)sample_record_frame( info->uc->sample_id,frame, - _audio_buffer,audio_len); - } - return 1; + return(int)sample_record_frame( info->uc->sample_id,frame, + _audio_buffer,audio_len); + } int vj_perform_render_tag_frame(veejay_t *info, uint8_t *frame[3]) @@ -2346,12 +2339,9 @@ void vj_perform_record_sample_frame(veejay_t *info, int entry) { if( res == 1) { - int len = sample_get_total_frames(info->uc->sample_id); sample_stop_encoder(info->uc->sample_id); - n = vj_perform_record_commit_single( info, entry ); + vj_perform_record_commit_single( info, entry ); vj_perform_record_stop(info); - if(n) veejay_msg(VEEJAY_MSG_DEBUG, "Added new sample %d of %d frames",n,len); - } if( res == -1) diff --git a/veejay-current/veejay/vj-sdl.c b/veejay-current/veejay/vj-sdl.c index 23f32f39..0e9ad3c8 100644 --- a/veejay-current/veejay/vj-sdl.c +++ b/veejay-current/veejay/vj-sdl.c @@ -44,7 +44,9 @@ vj_sdl *vj_sdl_allocate(int width, int height, int fmt) vjsdl->mouse_motion = 0; vjsdl->use_keyboard = 1; vjsdl->pix_format = SDL_YUY2_OVERLAY; // have best quality by default - vjsdl->pix_fmt = fmt; + // use yuv420 test + //vjsdl->pix_format = SDL_YV12_OVERLAY; + vjsdl->pix_fmt = fmt; vjsdl->width = width; vjsdl->height = height; vjsdl->frame_size = width * height; @@ -269,6 +271,10 @@ int vj_sdl_update_yuv_overlay(vj_sdl * vjsdl, uint8_t ** yuv420) else yuv422_to_yuyv( yuv420, vjsdl->yuv_overlay->pixels[0], vjsdl->width,vjsdl->height); + // test 420 +// veejay_memcpy( vjsdl->yuv_overlay->pixels[0], yuv420[0], vjsdl->width * vjsdl->height ); +// veejay_memcpy( vjsdl->yuv_overlay->pixels[1], yuv420[2], (vjsdl->width*vjsdl->height)/4); +// veejay_memcpy( vjsdl->yuv_overlay->pixels[2], yuv420[1], (vjsdl->width*vjsdl->height)/4); if (!vj_sdl_unlock(vjsdl)) return 0;