From 3cadb7226cc79561a24f96f1a73d47fd3696405c Mon Sep 17 00:00:00 2001 From: Niels Elburg Date: Mon, 20 Jun 2005 20:05:26 +0000 Subject: [PATCH] Added GdkPixbuf image loader (jpeg,png,...) , changed VIMS 330 (screenshot), new VIMS 247 (open image), images can be added to the editlist with add video file, bugfix in Gveejay open file dialog, some other smaller fixes git-svn-id: svn://code.dyne.org/veejay/trunk@351 eb8d1916-c9e9-0310-b8de-cf0c9472ead5 --- veejay-current/configure.ac | 15 ++- veejay-current/gveejay/callback.c | 30 +++-- veejay-current/gveejay/vj-api.c | 16 +-- veejay-current/libel/Makefile.am | 6 +- veejay-current/libel/lav_io.c | 194 ++++++++++++++++++++++----- veejay-current/libel/lav_io.h | 8 +- veejay-current/libel/vj-el.c | 55 +++++--- veejay-current/libstream/vj-tag.c | 106 ++++++++++++++- veejay-current/libstream/vj-tag.h | 13 +- veejay-current/libvjmsg/vj-common.c | 29 ++++ veejay-current/libvjmsg/vj-common.h | 1 + veejay-current/share/gveejay.glade | 188 +++++++++++++++++++------- veejay-current/tools/sayVIMS.c | 53 ++++---- veejay-current/veejay/Makefile.am | 2 +- veejay-current/veejay/liblavplayvj.c | 31 ++++- veejay-current/veejay/vims.h | 5 +- veejay-current/veejay/vj-event.c | 95 ++++++++++--- veejay-current/veejay/vj-event.h | 3 + veejay-current/veejay/vj-lib.h | 1 + veejay-current/veejay/vj-perform.c | 14 +- 20 files changed, 687 insertions(+), 178 deletions(-) diff --git a/veejay-current/configure.ac b/veejay-current/configure.ac index 4d7b302f..dfe8aed1 100644 --- a/veejay-current/configure.ac +++ b/veejay-current/configure.ac @@ -54,6 +54,9 @@ AC_ARG_WITH(extra-cflags, AC_ARG_WITH(libdv, AC_HELP_STRING([--without-libdv], [Do not use libdv.]), [], [with_libdv=yes]) +AC_ARG_WITH(pixbuf + AC_HELP_STRING([--without-pixbuf], [Do not use pixbuf]), + [], [with_pixbuf=yes]) AC_ARG_WITH(directfb, AC_HELP_STRING([--without-directfb], [Do not use DirectFB.]), [], [with_directfb=yes]) @@ -262,7 +265,16 @@ if test x$with_jpeg != xno ; then AC_DEFINE(HAVE_JPEG,,[Define is JPEG libraries are available]) fi fi - +have_pixbuf=false +if test x$with_pixbuf != xno ; then + PKG_CHECK_MODULES(PIXBUF, [gtk+-2.0 >= 2.4 gdk-pixbuf-2.0], + [ + AC_SUBST( PIXBUF_CFLAGS ) + AC_SUBST( PIXBUF_LIBS ) + have_pixbuf=true + AC_DEFINE(USE_GDK_PIXBUF, 1, [use gdk image load / save])], + [have_pixbuf=false]) +fi dnl ********************************************************************* dnl Check for libdv dnl (creates LIBDV_CFLAGS, LIBDV_LIBS; defines HAVE_LIBDV) @@ -882,6 +894,7 @@ AC_MSG_NOTICE([ - DirectFB support : ${have_directfb}]) AC_MSG_NOTICE([ - AVI MJPEG playback/recording : true (always)]) AC_MSG_NOTICE([ - libDV (digital video) support : ${have_libdv} ]) AC_MSG_NOTICE([ - JPEG support : ${have_jpeg} ]) +AC_MSG_NOTICE([ - GDK Pixbuf support : ${have_pixbuf}]) AC_MSG_NOTICE([ - jack : ${have_jack}]) AC_MSG_NOTICE([ - xml c library for gnome : ${have_xml2}]) AC_MSG_NOTICE([ - freetype support : ${have_freetype}]) diff --git a/veejay-current/gveejay/callback.c b/veejay-current/gveejay/callback.c index da74b3de..603c21c8 100644 --- a/veejay-current/gveejay/callback.c +++ b/veejay-current/gveejay/callback.c @@ -118,9 +118,15 @@ void on_button_251_clicked( GtkWidget *widget, gpointer user_data) void on_button_054_clicked(GtkWidget *widget, gpointer user_data) { - single_vims( VIMS_SCREENSHOT ); - vj_msg(VEEJAY_MSG_INFO, "Requested veejay to take screenshot of frame %d", - info->status_tokens[FRAME_NUM] + 1 ); + gchar *ext = get_text( "screenshotformat" ); + if(ext) + { + gchar filename[100]; + sprintf(filename, "frame-%d.%s", info->status_tokens[FRAME_NUM] + 1 , ext); + multi_vims( VIMS_SCREENSHOT,"%d %d %s",0,0,filename ); + vj_msg(VEEJAY_MSG_INFO, "Requested veejay to take screenshot of frame %d", + info->status_tokens[FRAME_NUM] + 1 ); + } } void on_button_200_clicked(GtkWidget *widget, gpointer user_data) { @@ -324,7 +330,7 @@ void on_button_el_copy_clicked(GtkWidget *w, gpointer *user_data) } } -void on_button_el_newsample_clicked(GtkWidget *w, gpointer *user) +void on_button_el_newclip_clicked(GtkWidget *w, gpointer *user) { if(verify_selection()) { @@ -1352,7 +1358,7 @@ void on_button_historymove_clicked(GtkWidget *widget, gpointer user_data) info->uc.reload_hint[HINT_HISTORY] = 1; } -void on_button_samplecopy_clicked(GtkWidget *widget, gpointer user_data) +void on_button_clipcopy_clicked(GtkWidget *widget, gpointer user_data) { if(info->uc.selected_sample_id != 0) { @@ -1410,7 +1416,7 @@ void on_inputstream_button_clicked(GtkWidget *widget, gpointer user_data) void on_inputstream_filebrowse_clicked(GtkWidget *w, gpointer user_data) { - gchar *filename = dialog_open_file( "Open video file" ); + gchar *filename = dialog_open_file( "Open new input stream" ); if(filename) { put_text( "inputstream_filename", filename ); @@ -1421,6 +1427,10 @@ void on_inputstream_filebrowse_clicked(GtkWidget *w, gpointer user_data) void on_inputstream_file_button_clicked(GtkWidget *w, gpointer user_data) { gint use_y4m = is_button_toggled( "inputstream_filey4m" ); + gint use_ffmpeg = is_button_toggled( "inputstream_fileffmpeg"); + gint use_pic = is_button_toggled( "inputstream_filepixbuf"); + + gchar *file = get_text( "inputstream_filename" ); gint br = 0; gint bw = 0; @@ -1432,11 +1442,13 @@ void on_inputstream_file_button_clicked(GtkWidget *w, gpointer user_data) } if(use_y4m) multi_vims( VIMS_STREAM_NEW_Y4M, "%s", filename ); - else + if(use_ffmpeg) multi_vims( VIMS_STREAM_NEW_AVFORMAT, "%s", filename ); - +#ifdef USE_GDK_PIXBUF + if(use_pic) + multi_vims( VIMS_STREAM_NEW_PICTURE, "%s", filename); +#endif if(filename) g_free( filename ); - if(file) g_free(file); info->uc.reload_hint[HINT_SLIST] = 1; } diff --git a/veejay-current/gveejay/vj-api.c b/veejay-current/gveejay/vj-api.c index 8e0a6f5f..0d4cc4f7 100644 --- a/veejay-current/gveejay/vj-api.c +++ b/veejay-current/gveejay/vj-api.c @@ -82,7 +82,6 @@ enum STREAM_GREEN = 8, STREAM_YELLOW = 7, STREAM_BLUE = 6, - STREAM_BLACK = 5, STREAM_WHITE = 4, STREAM_VIDEO4LINUX = 2, STREAM_DV1394 = 17, @@ -90,6 +89,7 @@ enum STREAM_MCAST = 14, STREAM_YUV4MPEG = 1, STREAM_AVFORMAT = 12, + STREAM_PICTURE = 5, }; enum @@ -992,17 +992,16 @@ gchar *dialog_open_file(const char *title) GTK_STOCK_CANCEL, GTK_RESPONSE_CANCEL, GTK_STOCK_OPEN, GTK_RESPONSE_ACCEPT, NULL); - + gchar *file = NULL; + if( gtk_dialog_run( GTK_DIALOG(dialog)) == GTK_RESPONSE_ACCEPT) { - gchar *file = gtk_file_chooser_get_filename( + file = gtk_file_chooser_get_filename( GTK_FILE_CHOOSER(dialog) ); - gtk_widget_destroy(dialog); - return file; } - gtk_widget_destroy(dialog); - return NULL; + gtk_widget_destroy(GTK_WIDGET(dialog)); + return file; } @@ -3239,6 +3238,7 @@ static void load_samplelist_info(const char *name) case STREAM_YUV4MPEG :sprintf(source,"(Streaming from Yuv4Mpeg file)");break; case STREAM_AVFORMAT :sprintf(source,"(Streaming from libavformat");break; case STREAM_DV1394 :sprintf(source,"(Streaming from DV1394 Camera");break; + case STREAM_PICTURE :sprintf(source,"(Streaming from Image");break; default: sprintf(source,"(Streaming from unknown)"); } @@ -4078,7 +4078,6 @@ static void reload_editlist_contents() gint len = 0; single_vims( VIMS_EDITLIST_LIST ); gchar *eltext = recv_vims(6,&len); // msg len - gint offset = 0; gint num_files=0; @@ -4142,6 +4141,7 @@ static void reload_editlist_contents() if(nl < 0 || nl >= num_files) { + printf("exceed max files\n"); return; } int file_len = _el_get_nframes( nl ); diff --git a/veejay-current/libel/Makefile.am b/veejay-current/libel/Makefile.am index 72e3a1ae..82a184e9 100644 --- a/veejay-current/libel/Makefile.am +++ b/veejay-current/libel/Makefile.am @@ -6,7 +6,9 @@ INCLUDES = -I$(top_srcdir) -I$(includedir) -I$(top_srcdir)/vjmem \ -I$(top_srcdir)/vjmsg \ -I$(top_srcdir)/ffmpeg/ffmpeg/libavcodec \ -I$(top_srcdir)/ffmpeg/ffmpeg/libavformat \ - -I$(top_srcdir)/utils + -I$(top_srcdir)/utils \ + ${PIXBUF_CFLAGS} + VJEL_LIB_FILE = libel.la noinst_LTLIBRARIES = $(VJEL_LIB_FILE) -libel_la_SOURCES = vj-mmap.c avilib.c lav_io.c vj-dv.c rawdv.c vj-avcodec.c vj-avformat.c vj-el.c +libel_la_SOURCES = vj-mmap.c avilib.c lav_io.c vj-dv.c rawdv.c pixbuf.c vj-avcodec.c vj-avformat.c vj-el.c diff --git a/veejay-current/libel/lav_io.c b/veejay-current/libel/lav_io.c index 929833bb..936cdb83 100644 --- a/veejay-current/libel/lav_io.c +++ b/veejay-current/libel/lav_io.c @@ -31,7 +31,9 @@ #include //#include #include - +#ifdef USE_GDK_PIXBUF +#include +#endif extern int AVI_errno; static int _lav_io_default_chroma = CHROMAUNKNOWN; static char video_format=' '; @@ -52,6 +54,22 @@ static unsigned long jpeg_padded_len = 0; static unsigned long jpeg_app0_offset = 0; static unsigned long jpeg_app1_offset = 0; +#ifdef USE_GDK_PIXBUF +static int output_scale_width = 0; +static int output_scale_height = 0; +static float output_fps = 25.0; +static int output_yuv = 0; // 422 + +void lav_set_project(int w, int h, float f, int fmt) +{ + output_scale_width = w; + output_scale_height = h; + output_fps = f; + output_yuv = fmt; +} + +#endif + #define M_SOF0 0xC0 #define M_SOF1 0xC1 #define M_DHT 0xC4 @@ -244,6 +262,7 @@ int lav_query_polarity(char format) case 'j': return LAV_INTER_TOP_FIRST; case 'q': return LAV_INTER_TOP_FIRST; case 'm': return LAV_INTER_TOP_FIRST; + case 'x': return LAV_NOT_INTERLACED; // picture is always not interlaced default: return LAV_INTER_TOP_FIRST; } } @@ -321,6 +340,7 @@ lav_file_t *lav_open_output_file(char *filename, char format, if(asize) AVI_set_audio(lav_fd->avi_fd,achans,arate,asize,WAVE_FORMAT_PCM); return lav_fd; } + if(lav_fd) free(lav_fd); return NULL; } @@ -328,16 +348,27 @@ int lav_close(lav_file_t *lav_file) { int ret = 0; video_format = lav_file->format; internal_error = 0; /* for error messages */ + switch(video_format) + { #ifdef SUPPORT_READ_DV2 - if(video_format == 'b') - ret = rawdv_close(lav_file->dv_fd ); - else + case 'b': + ret = rawdv_close(lav_file->dv_fd); + break; #endif - ret = AVI_close( lav_file->avi_fd ); +#ifdef USE_GDK_PIXBUF + case 'x': + vj_picture_cleanup( lav_file->picture ); + ret = 1; + break; +#endif + default: + ret = AVI_close(lav_file->avi_fd); + break; + } - if(lav_file) free(lav_file); + if(lav_file) free(lav_file); - return 1; + return ret; } int lav_write_frame(lav_file_t *lav_file, uint8_t *buff, long size, long count) @@ -352,7 +383,10 @@ int lav_write_frame(lav_file_t *lav_file, uint8_t *buff, long size, long count) return -1; #endif /* For interlaced video insert the apropriate APPn markers */ - +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return -1; +#endif if(lav_file->interlacing!=LAV_NOT_INTERLACED && (lav_file->format == 'a' || lav_file->format=='A')) { jpgdata = buff; @@ -403,6 +437,10 @@ int lav_write_audio(lav_file_t *lav_file, uint8_t *buff, long samps) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return 0; +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return 0; #endif return AVI_write_audio( lav_file->avi_fd, buff, samps*lav_file->bps); } @@ -415,6 +453,10 @@ long lav_video_frames(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return rawdv_video_frames(lav_file->dv_fd); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return 2; #endif return AVI_video_frames(lav_file->avi_fd); } @@ -425,6 +467,10 @@ int lav_video_width(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(video_format=='b') return rawdv_width(lav_file->dv_fd); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format=='x') + return (output_scale_width == 0 ? vj_picture_get_width( lav_file->picture ) : output_scale_width); #endif return AVI_video_width(lav_file->avi_fd); } @@ -435,6 +481,10 @@ int lav_video_height(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return rawdv_height( lav_file->dv_fd ); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return (output_scale_height == 0 ? vj_picture_get_height( lav_file->picture ) : output_scale_height); #endif return AVI_video_height(lav_file->avi_fd); } @@ -445,6 +495,10 @@ double lav_frame_rate(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return rawdv_fps(lav_file->dv_fd); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return output_fps; #endif return AVI_frame_rate(lav_file->avi_fd); } @@ -454,6 +508,10 @@ int lav_video_interlacing(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return rawdv_interlacing(lav_file->dv_fd); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return LAV_NOT_INTERLACED; #endif return lav_file->interlacing; } @@ -479,6 +537,10 @@ int lav_video_compressor_type(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(lav_file->format == 'b') return rawdv_compressor( lav_file->dv_fd ); +#endif +#ifdef USE_GDK_PIXBUF + if(lav_file->format == 'x') + return 0xffff; #endif return AVI_video_compressor_type( lav_file->avi_fd ); } @@ -492,6 +554,13 @@ const char *lav_video_compressor(lav_file_t *lav_file) const char *tmp = (const char*) strdup("dvsd"); return tmp; } +#endif +#ifdef USE_GDK_PIXBUF + if( video_format == 'x') + { + const char *tmp = (const char*) strdup("PICT"); + return tmp; + } #endif return AVI_video_compressor(lav_file->avi_fd); } @@ -503,6 +572,10 @@ int lav_audio_channels(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return rawdv_audio_channels(lav_file->dv_fd); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return 0; #endif return AVI_audio_channels(lav_file->avi_fd); } @@ -514,6 +587,10 @@ int lav_audio_bits(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return rawdv_audio_bits(lav_file->dv_fd); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x' ) + return 0; #endif return (AVI_audio_bits(lav_file->avi_fd)); } @@ -525,6 +602,10 @@ long lav_audio_rate(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(video_format=='b') return rawdv_audio_rate(lav_file->dv_fd); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return 0; #endif return (AVI_audio_rate(lav_file->avi_fd)); } @@ -536,6 +617,10 @@ long lav_audio_clips(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(video_format=='b') return rawdv_audio_bps(lav_file->dv_fd); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return 0; #endif return (AVI_audio_bytes(lav_file->avi_fd)/lav_file->bps); } @@ -546,6 +631,10 @@ long lav_frame_size(lav_file_t *lav_file, long frame) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return rawdv_frame_size( lav_file->dv_fd ); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return 1; #endif return (AVI_frame_size(lav_file->avi_fd,frame)); } @@ -556,6 +645,10 @@ int lav_seek_start(lav_file_t *lav_file) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return rawdv_set_position( lav_file->dv_fd, 0 ); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return 1; #endif return (AVI_seek_start(lav_file->avi_fd)); } @@ -566,6 +659,10 @@ int lav_set_video_position(lav_file_t *lav_file, long frame) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return rawdv_set_position( lav_file->dv_fd, frame ); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return 1; #endif return (AVI_set_video_position(lav_file->avi_fd,frame)); } @@ -573,28 +670,28 @@ int lav_set_video_position(lav_file_t *lav_file, long frame) int lav_read_frame(lav_file_t *lav_file, uint8_t *vidbuf) { video_format = lav_file->format; internal_error = 0; /* for error messages */ -#ifdef HAVE_MLT - if(lav_file->format == 't') - { - mlt_frame frame; - mlt_service_get_frame( mlt_producer( lav_file->producer ), &frame, 0); - mlt_properties_set( - mlt_frame_properties( frame ), "rescale.interp", "full"); - mlt_frame_get_image( frame, &_tmp_buffer, lav_file->iformat, lav_file->out_width, - lav_file->out_height , 0); - // convert to planar (mlt gives 422 packed) - // convert tmp_buffer to vidbuf , set flag as YUV 4:2:2 planar - } -#endif #ifdef SUPPORT_READ_DV2 if(lav_file->format == 'b') { return rawdv_read_frame( lav_file->dv_fd, vidbuf ); } +#endif +#ifdef USE_GDK_PIXBUF + if(lav_file->format == 'x') + return -1; #endif return (AVI_read_frame(lav_file->avi_fd,vidbuf)); } +#ifdef USE_GDK_PIXBUF +uint8_t *lav_get_frame_ptr( lav_file_t *lav_file ) +{ + if(lav_file->format == 'x') + return vj_picture_get( lav_file->picture ); + return NULL; +} +#endif + int lav_is_DV(lav_file_t *lav_file) { #ifdef SUPPORT_READ_DV2 @@ -611,6 +708,10 @@ int lav_set_audio_position(lav_file_t *lav_file, long clip) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return 0; +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return 0; #endif return (AVI_set_audio_position(lav_file->avi_fd,clip*lav_file->bps)); } @@ -625,6 +726,10 @@ long lav_read_audio(lav_file_t *lav_file, uint8_t *audbuf, long samps) #ifdef SUPPORT_READ_DV2 if(video_format == 'b') return rawdv_read_audio_frame( lav_file->dv_fd, audbuf ); +#endif +#ifdef USE_GDK_PIXBUF + if(video_format == 'x') + return 0; #endif video_format = lav_file->format; internal_error = 0; /* for error messages */ return (AVI_read_audio(lav_file->avi_fd,audbuf,samps*lav_file->bps)/lav_file->bps); @@ -653,6 +758,9 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) lav_fd->avi_fd = 0; #ifdef SUPPORT_READ_DV2 lav_fd->dv_fd = 0; +#endif +#ifdef USE_GDK_PIXBUF + lav_fd->picture = NULL; #endif lav_fd->format = 0; lav_fd->interlacing = LAV_INTER_UNKNOWN; @@ -682,6 +790,21 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) } else if( AVI_errno==AVI_ERR_NO_AVI ) { + int ret = 0; +#ifdef USE_GDK_PIXBUF + lav_fd->picture = vj_picture_open( (const char*) filename, + output_scale_width, output_scale_height, output_yuv ); + if(lav_fd->picture) + { + lav_fd->format = 'x'; + lav_fd->has_audio = 0; + video_comp = strdup( "PICT" ); + ret = 1; + } + else + { +#endif + #ifdef SUPPORT_READ_DV2 lav_fd->dv_fd = rawdv_open_input_file(filename,mmap_size); if(lav_fd->dv_fd) @@ -690,23 +813,34 @@ lav_file_t *lav_open_input_file(char *filename, int mmap_size) lav_fd->has_audio = 0; //(rawdv_audio_bits(lav_fd->dv_fd) > 0 ? 1:0); video_comp = rawdv_video_compressor( lav_fd->dv_fd ); + ret = 1; } - else - { -#endif - free(lav_fd); - internal_error = ERROR_FORMAT; /* Format not recognized */ - veejay_msg(VEEJAY_MSG_ERROR, "Unable to identify file"); - return 0; -#ifdef SUPPORT_READ_DV2 - } #endif +#ifdef USE_GDK_PIXBUF + } +#endif + if(ret == 0) + { + free(lav_fd); + internal_error = ERROR_FORMAT; /* Format not recognized */ + veejay_msg(VEEJAY_MSG_ERROR, "Unable to identify file '%s'", filename); + return 0; } + } lav_fd->bps = (lav_audio_channels(lav_fd)*lav_audio_bits(lav_fd)+7)/8; if(lav_fd->bps==0) lav_fd->bps=1; /* make it save since we will divide by that value */ + if(strncasecmp(video_comp, "PICT",4) == 0 ) + { + lav_fd->MJPG_chroma = (output_yuv == 1 ? CHROMA420: CHROMA422 ); + lav_fd->format = 'x'; + lav_fd->interlacing = LAV_NOT_INTERLACED; + veejay_msg(VEEJAY_MSG_DEBUG, "Playing image"); + return lav_fd; + } + if(strncasecmp(video_comp, "div3",4)==0) { lav_fd->MJPG_chroma = CHROMA420; lav_fd->format = 'D'; diff --git a/veejay-current/libel/lav_io.h b/veejay-current/libel/lav_io.h index 8ac204e9..3b2ad839 100644 --- a/veejay-current/libel/lav_io.h +++ b/veejay-current/libel/lav_io.h @@ -66,6 +66,9 @@ typedef struct #endif int jpeg_fd; char *jpeg_filename; +#ifdef USE_GDK_PIXBUF + void *picture; +#endif int format; int interlacing; int sar_w; /* "clip aspect ratio" width */ @@ -113,5 +116,8 @@ int lav_get_field_size(uint8_t * jpegdata, long jpeglen); const char *lav_strerror(void); int lav_fileno( lav_file_t *lav_file ); void lav_set_default_chroma(int c); - +#ifdef USE_GDK_PIXBUF +uint8_t *lav_get_frame_ptr( lav_file_t *lav_file ); +void lav_set_project( int w, int h, float fps, int shift ); +#endif #endif diff --git a/veejay-current/libel/vj-el.c b/veejay-current/libel/vj-el.c index 8fc2e4ac..27c8feac 100644 --- a/veejay-current/libel/vj-el.c +++ b/veejay-current/libel/vj-el.c @@ -360,6 +360,11 @@ int open_video_file(char *filename, editlist * el, int preserve_pathname, int de el->video_width = lav_video_width(el->lav_fd[n]); el->video_inter = lav_video_interlacing(el->lav_fd[n]); el->video_fps = lav_frame_rate(el->lav_fd[n]); +#ifdef USE_GDK_PIXBUF + lav_set_project( + el->video_width, el->video_height, el->video_fps , + el->pixel_format == FMT_420 ? 1 :0); +#endif lav_video_clipaspect(el->lav_fd[n], &el->video_sar_width, &el->video_sar_height); @@ -484,8 +489,10 @@ int open_video_file(char *filename, editlist * el, int preserve_pathname, int de decoder_id = CODEC_ID_YUV420; if( strncasecmp("yv16", compr_type,4) == 0) decoder_id = CODEC_ID_YUV422; + if( strncasecmp("PICT", compr_type,4) == 0) + decoder_id = 0xffff; - if(decoder_id > 0) + if(decoder_id > 0 && decoder_id != 0xffff) { int c_i = _el_get_codec(decoder_id); if(c_i == -1) @@ -512,7 +519,7 @@ int open_video_file(char *filename, editlist * el, int preserve_pathname, int de if(decoder_id == 0) { - veejay_msg(VEEJAY_MSG_ERROR, "Dont know how to handle %s (fmt %d)", compr_type, pix_fmt); + veejay_msg(VEEJAY_MSG_ERROR, "Dont know how to handle %s (fmt %d) %x", compr_type, pix_fmt,decoder_id); if(realname) free(realname); if( el->video_file_list[n]) free( el->video_file_list[n] ); if( el->lav_fd[n] ) lav_close( el->lav_fd[n]); @@ -546,6 +553,7 @@ int vj_el_get_file_fourcc(editlist *el, int num, char *fourcc) if(compr == NULL) return 0; snprintf(fourcc,4,"%s", compr ); + fourcc[5] = '\0'; return 1; } @@ -570,24 +578,37 @@ int vj_el_get_video_frame(editlist *el, long nframe, uint8_t *dst[3], int pix_fm res = lav_set_video_position(el->lav_fd[N_EL_FILE(n)], N_EL_FRAME(n)); decoder_id = lav_video_compressor_type( el->lav_fd[N_EL_FILE(n)] ); - c_i = _el_get_codec( decoder_id ); - if(c_i >= 0 && c_i < MAX_CODECS) + if(decoder_id != 0xffff) { - d = el_codecs[c_i]; + c_i = _el_get_codec( decoder_id ); + if(c_i >= 0 && c_i < MAX_CODECS) + d = el_codecs[c_i]; + if(!d) + { + veejay_msg(VEEJAY_MSG_DEBUG, "Cannot find codec for id %d (%d)", decoder_id, + c_i); + return -1; + } } - if(!d) - { - veejay_msg(VEEJAY_MSG_DEBUG, "Cannot find codec for id %d (%d)", decoder_id, - c_i); - return -1; - } - if (res < 0) { veejay_msg(VEEJAY_MSG_ERROR,"Error setting video position: %s", lav_strerror()); } - res = lav_read_frame(el->lav_fd[N_EL_FILE(n)], d->tmp_buffer); + if(lav_filetype( el->lav_fd[N_EL_FILE(n)] ) != 'x') + res = lav_read_frame(el->lav_fd[N_EL_FILE(n)], d->tmp_buffer); + + if( decoder_id == 0xffff ) + { + uint8_t *p = lav_get_frame_ptr( el->lav_fd[N_EL_FILE(n)] ); + if(!p) return -1; + int len = el->video_width * el->video_height; + int uv_len = (el->video_width >> 1) * (el->video_height >> (pix_fmt == FMT_420 ? 1:0)); + veejay_memcpy( dst[0], p, len ); + veejay_memcpy( dst[1], p + len, uv_len ); + veejay_memcpy( dst[2], p + len + uv_len, uv_len ); + return 1; + } if( decoder_id == CODEC_ID_YUV420 ) { /* yuv420 raw */ @@ -851,7 +872,9 @@ editlist *vj_el_init_with_args(char **filename, int num_files, int flags, int de uint64_t n =0; if(!el) return NULL; - +#ifdef USE_GDK_PIXBUF + vj_picture_init(); +#endif memset( el, 0, sizeof(editlist) ); el->pixel_format = -1; el->has_video = 1; //assume we get it @@ -1243,8 +1266,9 @@ char *vj_el_write_line_ascii( editlist *el, int *bytes_written ) if (index[j] >= 0 && el->video_file_list[j] != NULL) { char filename[400]; - char fourcc[5]; + char fourcc[6]; bzero(filename,400); + bzero(fourcc,6); sprintf(fourcc, "%s", "????"); vj_el_get_file_fourcc( el, j, fourcc ); sprintf(filename ,"%03d%s%04d%010ld%02d%s", @@ -1257,6 +1281,7 @@ char *vj_el_write_line_ascii( editlist *el, int *bytes_written ) ); sprintf(fourcc, "%04d", strlen( filename )); strncat( result, fourcc, strlen(fourcc )); + veejay_msg(VEEJAY_MSG_DEBUG, "%s:%s", fourcc, filename ); strncat ( result, filename, strlen(filename)); } } diff --git a/veejay-current/libstream/vj-tag.c b/veejay-current/libstream/vj-tag.c index 17dd2233..7ef7b908 100644 --- a/veejay-current/libstream/vj-tag.c +++ b/veejay-current/libstream/vj-tag.c @@ -40,6 +40,10 @@ #define VIDEO_PALETTE_YUV422P 13 #endif +#ifdef USE_GDK_PIXBUF +#include +#endif + #ifdef SUPPORT_READ_DV2 #include #endif @@ -308,7 +312,6 @@ int _vj_tag_new_v4l(vj_tag * tag, int stream_nr, int width, int height, return 1; } #endif - int _vj_tag_new_avformat( vj_tag *tag, int stream_nr, editlist *el) { int stop = 0; @@ -340,6 +343,30 @@ int _vj_tag_new_avformat( vj_tag *tag, int stream_nr, editlist *el) return 1; } +#ifdef USE_GDK_PIXBUF +int _vj_tag_new_picture( vj_tag *tag, int stream_nr, editlist *el) +{ + int stop = 0; + if(stream_nr < 0 || stream_nr > VJ_TAG_MAX_STREAM_IN) return 0; + vj_picture *p = NULL; + + if( vj_picture_probe( tag->source_name ) == 0 ) + return 0; + + p = (vj_picture*) vj_malloc(sizeof(vj_picture)); + if(!p) + return 0; + memset(p, 0, sizeof(vj_picture)); + + vj_tag_input->picture[stream_nr] = p; + + veejay_msg(VEEJAY_MSG_INFO, "Opened [%s] , %d x %d @ %2.2f fps ", + tag->source_name, + el->video_width, el->video_height, el->video_fps ); + + return 1; +} +#endif int _vj_tag_new_yuv4mpeg(vj_tag * tag, int stream_nr, editlist * el) { @@ -520,7 +547,7 @@ int vj_tag_new(int type, char *filename, int stream_nr, editlist * el, switch (type) { #ifdef HAVE_V4L case VJ_TAG_TYPE_V4L: - sprintf(tag->source_name, "/dev/%s", filename); + sprintf(tag->source_name, "/dev/%s/%d", filename,channel); if (_vj_tag_new_v4l (tag, stream_nr, w, h, el->video_norm, palette,0,channel ) != 1) return -1; @@ -534,7 +561,7 @@ int vj_tag_new(int type, char *filename, int stream_nr, editlist * el, break; case VJ_TAG_TYPE_DV1394: #ifdef SUPPORT_READ_DV2 - sprintf(tag->source_name, "/dev/dv1394"); + sprintf(tag->source_name, "/dev/dv1394/%d", channel); if( _vj_tag_new_dv1394( tag, stream_nr,channel,1,el ) == 0 ) { veejay_msg(VEEJAY_MSG_ERROR, "error opening dv1394"); @@ -552,6 +579,13 @@ int vj_tag_new(int type, char *filename, int stream_nr, editlist * el, return -1; tag->active = 1; break; +#ifdef USE_GDK_PIXBUF + case VJ_TAG_TYPE_PICTURE: + sprintf(tag->source_name, "%s", filename); + if( _vj_tag_new_picture(tag, stream_nr, el) != 1 ) + return -1; + break; +#endif case VJ_TAG_TYPE_YUV4MPEG: sprintf(tag->source_name, "%s", filename); if (_vj_tag_new_yuv4mpeg(tag, stream_nr, el) != 1) @@ -575,7 +609,7 @@ int vj_tag_new(int type, char *filename, int stream_nr, editlist * el, case VJ_TAG_TYPE_BLUE: */ case VJ_TAG_TYPE_COLOR: - sprintf(tag->source_name, "solid-[%d,%d,%d]", + sprintf(tag->source_name, "[%d,%d,%d]", tag->color_r,tag->color_g,tag->color_b ); tag->active = 1; break; @@ -679,6 +713,18 @@ int vj_tag_del(int id) veejay_msg(VEEJAY_MSG_INFO, "Closing avformat stream %s", tag->source_name); vj_avformat_close_input( vj_tag_input->avformat[tag->index]); break; +#ifdef USE_GDK_PIXBUF + case VJ_TAG_TYPE_PICTURE: + veejay_msg(VEEJAY_MSG_INFO, "Closing picture stream %s", tag->source_name); + vj_picture *pic = vj_tag_input->picture[tag->index]; + if(pic) + { + vj_picture_cleanup( pic->pic ); + free( pic ); + } + vj_tag_input->picture[tag->index] = NULL; + break; +#endif case VJ_TAG_TYPE_SHM: veejay_msg(VEEJAY_MSG_INFO, "huh ?"); break; @@ -1493,6 +1539,15 @@ int vj_tag_disable(int t1) { vj_client_close( vj_tag_input->net[tag->index] ); veejay_msg(VEEJAY_MSG_DEBUG, "Disconnected from %s", tag->source_name); } + if(tag->source_type == VJ_TAG_TYPE_PICTURE ) + { + vj_picture *pic = vj_tag_input->picture[tag->index]; + if(pic) + { + vj_picture_cleanup( pic->pic ); + } + vj_tag_input->picture[tag->index] = pic; + } tag->active = 0; if(!vj_tag_update(tag,t1)) return -1; return 1; @@ -1502,6 +1557,8 @@ int vj_tag_enable(int t1) { vj_tag *tag = vj_tag_get(t1); if(!tag) return -1; + veejay_msg(VEEJAY_MSG_INFO, "Enable stream %d", t1 ); + if(tag->active ) { veejay_msg(VEEJAY_MSG_INFO, "Already active"); @@ -1535,7 +1592,19 @@ int vj_tag_enable(int t1) { veejay_msg(VEEJAY_MSG_DEBUG, "Streaming from %s", tag->source_name ); } + if( tag->source_type == VJ_TAG_TYPE_PICTURE ) + { + vj_picture *p = vj_tag_input->picture[ tag->index ]; + p->pic = vj_picture_open( tag->source_name, + vj_tag_input->width, vj_tag_input->height, + vj_tag_input->pix_fmt == FMT_420 ? 1:0); + if(!p->pic) + return -1; + + vj_tag_input->picture[tag->index] = p; + veejay_msg(VEEJAY_MSG_DEBUG, "Streaming from picture '%s'", tag->source_name ); + } tag->active = 1; if(!vj_tag_update(tag,t1)) return -1; @@ -1591,6 +1660,7 @@ int vj_tag_set_active(int t1, int active) break; case VJ_TAG_TYPE_MCAST: case VJ_TAG_TYPE_NET: + case VJ_TAG_TYPE_PICTURE: if(active == 1 ) vj_tag_enable( t1 ); else @@ -1751,7 +1821,7 @@ void vj_tag_get_source_name(int t1, char *dst) if (tag) { sprintf(dst, tag->source_name); } else { - sprintf(dst, "error in tag %d", t1); + vj_tag_get_description( tag->source_type, dst ); } } @@ -1782,6 +1852,11 @@ void vj_tag_get_by_type(int type, char *description ) case VJ_TAG_TYPE_AVFORMAT: sprintf(description, "%s", "AVFormat"); break; +#ifdef USE_GDK_PIXBUF + case VJ_TAG_TYPE_PICTURE: + sprintf(description, "%s", "GdkPixbuf"); + break; +#endif #ifdef HAVE_V4L case VJ_TAG_TYPE_V4L: sprintf(description, "%s", "Video4Linux"); @@ -1954,7 +2029,8 @@ int vj_tag_get_audio_frame(int t1, uint8_t *dst_buffer) #endif if(tag->source_type == VJ_TAG_TYPE_AVFORMAT) return (vj_avformat_get_audio( vj_tag_input->avformat[tag->index], dst_buffer, -1 )); - return 0; + + return 0; } @@ -1969,6 +2045,9 @@ int vj_tag_get_frame(int t1, uint8_t *buffer[3], uint8_t * abuffer) int uv_len = (vj_tag_input->width * vj_tag_input->height); int len = (width * height); char buf[10]; +#ifdef USE_GDK_PIXBUF + vj_picture *p = NULL; +#endif vj_client *v; if(!tag) return -1; @@ -2000,6 +2079,21 @@ int vj_tag_get_frame(int t1, uint8_t *buffer[3], uint8_t * abuffer) return -1; break; +#endif +#ifdef USE_GDK_PIXBUF + case VJ_TAG_TYPE_PICTURE: + p = vj_tag_input->picture[tag->index]; + if(!p) + { + veejay_msg(VEEJAY_MSG_ERROR, "Picture never opened"); + vj_tag_disable(t1); + return -1; + } + address = vj_picture_get( p->pic ); + veejay_memcpy(buffer[0],address, len); + veejay_memcpy(buffer[1],address + len, uv_len); + veejay_memcpy(buffer[2],address + len + uv_len, uv_len); + break; #endif case VJ_TAG_TYPE_AVFORMAT: if(!vj_avformat_get_video_frame( vj_tag_input->avformat[tag->index], buffer, -1, diff --git a/veejay-current/libstream/vj-tag.h b/veejay-current/libstream/vj-tag.h index bfb986de..2de74273 100644 --- a/veejay-current/libstream/vj-tag.h +++ b/veejay-current/libstream/vj-tag.h @@ -19,7 +19,7 @@ #ifndef VJ_TAG_H #define VJ_TAG_H - +#define VJ_TAG_TYPE_PICTURE 5 #define VJ_TAG_TYPE_COLOR 4 #define VJ_TAG_TYPE_VLOOPBACK 3 #define VJ_TAG_TYPE_V4L 2 @@ -29,7 +29,7 @@ #define VJ_TAG_TYPE_NET 13 #define VJ_TAG_TYPE_MCAST 14 #define VJ_TAG_MAX_V4L 16 -#define VJ_TAG_MAX_STREAM_IN 16 +#define VJ_TAG_MAX_STREAM_IN 255 #define VJ_TAG_TYPE_DV1394 17 #define VJ_TAG_TYPE_AVFORMAT 12 #define TAG_MAX_DESCR_LEN 150 @@ -42,6 +42,12 @@ #include #include #include +#ifdef USE_GDK_PIXBUF +typedef struct +{ + void *pic; +} vj_picture; +#endif typedef struct { #ifdef HAVE_V4L @@ -51,6 +57,9 @@ typedef struct { vj_avformat *avformat[VJ_TAG_MAX_STREAM_IN]; vj_client *net[VJ_TAG_MAX_STREAM_IN]; vj_dv1394 *dv1394[VJ_TAG_MAX_STREAM_IN]; +#ifdef USE_GDK_PIXBUF + vj_picture *picture[VJ_TAG_MAX_STREAM_IN]; +#endif int width; int height; int depth; diff --git a/veejay-current/libvjmsg/vj-common.c b/veejay-current/libvjmsg/vj-common.c index 871bcc4d..235f53ca 100644 --- a/veejay-current/libvjmsg/vj-common.c +++ b/veejay-current/libvjmsg/vj-common.c @@ -230,6 +230,35 @@ void veejay_reap_messages(void) } +int veejay_get_file_ext( char *file, char *dst, int dlen) +{ + int len = strlen(file)-1; + int i = 0; + char tmp[dlen]; + bzero(tmp,dlen); + while(len) + { + if(file[len] == '.') + { + if(i==0) return 0; + int j; + int k = 0; + for(j = i-1; j >= 0;j--) + { + dst[k] = tmp[j]; + k ++; + } + return 1; + } + tmp[i] = file[len]; + i++; + if( i >= dlen) + return 0; + len --; + } + return 0; +} + void veejay_strrep(char *s, char delim, char tok) { unsigned int i; diff --git a/veejay-current/libvjmsg/vj-common.h b/veejay-current/libvjmsg/vj-common.h index 108a7a43..317d818a 100644 --- a/veejay-current/libvjmsg/vj-common.h +++ b/veejay-current/libvjmsg/vj-common.h @@ -41,5 +41,6 @@ extern int veejay_keep_messages(void); extern void veejay_reap_messages(void); extern char *veejay_pop_messages(int *num_lines, int *total_len); extern void get_cache_line_size(void); +extern int veejay_get_file_ext( char *file, char *dst, int dlen); extern void veejay_chomp_str( char *str, int *dlen ); #endif diff --git a/veejay-current/share/gveejay.glade b/veejay-current/share/gveejay.glade index e00c40d2..227de578 100644 --- a/veejay-current/share/gveejay.glade +++ b/veejay-current/share/gveejay.glade @@ -792,7 +792,28 @@ 0 False - True + False + + + + + + 30 + True + wbmp,wmf,jpeg,ani,bmp,gif,ico,pcx,png,pnm,ras,tga,xmb,tiff,xpm,svg + True + True + True + 4 + jpeg + False + * + False + + + 0 + False + False @@ -8127,7 +8148,7 @@ YV16 True True - False + True 0 @@ -8163,51 +8184,6 @@ YV16 - - - True - True - use YUV4MPEG - True - GTK_RELIEF_NORMAL - True - False - False - True - - - 0 - 1 - 0 - 1 - fill - - - - - - - True - True - use FFmpeg - True - GTK_RELIEF_NORMAL - True - False - False - True - inputstream_filey4m - - - 0 - 1 - 1 - 2 - fill - - - - True @@ -8305,6 +8281,124 @@ YV16 + + + + True + 0 + 0.5 + GTK_SHADOW_ETCHED_IN + + + + True + 0.5 + 0.5 + 1 + 1 + 0 + 0 + 12 + 0 + + + + True + False + 0 + + + + True + True + use YUV4MPEG + True + GTK_RELIEF_NORMAL + True + False + False + True + + + 0 + False + False + + + + + + True + True + use FFmpeg + True + GTK_RELIEF_NORMAL + True + False + False + True + inputstream_filey4m + + + 0 + False + False + + + + + + True + True + use picture + True + GTK_RELIEF_NORMAL + True + False + False + True + inputstream_filey4m + + + 0 + False + False + + + + + + + + + + True + Types + False + True + GTK_JUSTIFY_LEFT + False + False + 0.5 + 0.5 + 0 + 0 + + + label_item + + + + + 0 + 1 + 0 + 2 + 3 + fill + fill + + diff --git a/veejay-current/tools/sayVIMS.c b/veejay-current/tools/sayVIMS.c index ac751a4a..ee3c5a19 100644 --- a/veejay-current/tools/sayVIMS.c +++ b/veejay-current/tools/sayVIMS.c @@ -47,6 +47,7 @@ static void vj_flush(int frames) { char status[100]; int bytes = 100; bzero(status,100); + while(frames>0) { if( vj_client_poll(sayvims, V_STATUS )) { @@ -254,8 +255,11 @@ int main(int argc, char *argv[]) vj_flush(1); } if ( interactive ) + { + vj_client_close( sayvims ); + vj_client_free(sayvims ); return 0; - + } if(single_msg || (optind == 1 && err == 0 && argc > 1 )) { char **msg = argv + optind; @@ -283,37 +287,36 @@ int main(int argc, char *argv[]) } i++; } - - vj_client_close(sayvims); - vj_client_free(sayvims); - return 0; } else { - /* read from stdin*/ - int not_done = 1; - infile = fdopen( fd_in, "r" ); - if(!infile) - { - return 0; - } - while( fgets(buf, 4096, infile) ) - { - if( buf[0] == '+' ) + /* read from stdin*/ + int not_done = 1; + infile = fdopen( fd_in, "r" ); + if(!infile) { - int wait_ = 1; - - if(!sscanf( buf+1, "%d", &wait_ ) ) + return 0; + } + while( fgets(buf, 4096, infile) ) + { + if( buf[0] == '+' ) { - return 0; + int wait_ = 1; + + if(!sscanf( buf+1, "%d", &wait_ ) ) + { + return 0; + } + vj_flush( wait_ ); + } + else + { + vj_client_send( sayvims, V_CMD, buf ); } - vj_flush( wait_ ); - } - else - { - vj_client_send( sayvims, V_CMD, buf ); } } - } + veejay_msg(VEEJAY_MSG_INFO, "closing ..."); + vj_client_close(sayvims); + vj_client_free(sayvims); return 0; } diff --git a/veejay-current/veejay/Makefile.am b/veejay-current/veejay/Makefile.am index e45a3b18..234d1413 100644 --- a/veejay-current/veejay/Makefile.am +++ b/veejay-current/veejay/Makefile.am @@ -55,7 +55,7 @@ libveejay_la_LDFLAGS = $(VEEJAY_ALL_LIB_OPTS) \ -L$(top_builddir)/libsamplerec -lsamplerec \ ${SDL_LIBS} ${DIRECTFB_LIBS} ${PTHREAD_LIBS} \ ${XML_LIBS} ${JPEG_LIBS} ${JACK_LIBS} \ - ${LIBDV_LIBS} ${LIBM_LIBS}\ + ${LIBDV_LIBS} ${LIBM_LIBS} ${PIXBUF_LIBS}\ -DDYNAMIC -O3 -Wall -rdynamic # ********************************************************************* diff --git a/veejay-current/veejay/liblavplayvj.c b/veejay-current/veejay/liblavplayvj.c index cfd42c1f..948306b0 100644 --- a/veejay-current/veejay/liblavplayvj.c +++ b/veejay-current/veejay/liblavplayvj.c @@ -674,7 +674,8 @@ void veejay_change_playback_mode( veejay_t *info, int new_pm, int sample_id ) if( info->uc->playback_mode == VJ_PLAYBACK_MODE_TAG ) { int cur_id = info->uc->sample_id; - if( vj_tag_get_type( cur_id ) == VJ_TAG_TYPE_NET && cur_id != sample_id ) + int type = vj_tag_get_type( cur_id ); + if( (type == VJ_TAG_TYPE_NET||type==VJ_TAG_TYPE_PICTURE) && cur_id != sample_id ) { vj_tag_disable(cur_id); } @@ -946,14 +947,34 @@ static int veejay_screen_update(veejay_t * info ) vj_perform_get_primary_frame(info,frame,0); -#ifdef HAVE_JPEG +#ifdef HAVE_JPEG || USE_GDK_PIXBUF /* dirty hack to save a frame to jpeg */ if (info->uc->hackme == 1) { - vj_perform_screenshot2(info, frame); info->uc->hackme = 0; - free(info->uc->filename); - } + +#ifdef USE_GDK_PIXBUF + if(vj_picture_save( info->settings->export_image, frame, + info->video_output_width, info->video_output_height, + (info->pixel_format == FMT_420 ? 1 : 0 ) ) ) + { + veejay_msg(VEEJAY_MSG_INFO, + "Saved frame %ld to image", info->settings->current_frame_num ); + } + else + { + veejay_msg(VEEJAY_MSG_ERROR, + "Error writing frame %ld to image", + info->settings->current_frame_num ); + } +#else +#ifdef HAVE_JPEG + vj_perform_screenshot2(info, frame); + if(info->uc->filename) free(info->uc->filename); +#endif +#endif + + } #endif /* hack to write YCbCr data to stream*/ diff --git a/veejay-current/veejay/vims.h b/veejay-current/veejay/vims.h index a14a5b0b..84b7df3e 100644 --- a/veejay-current/veejay/vims.h +++ b/veejay-current/veejay/vims.h @@ -145,9 +145,12 @@ enum { #endif VIMS_STREAM_NEW_COLOR = 242, VIMS_STREAM_NEW_Y4M = 243, - VIMS_STREAM_NEW_AVFORMAT = 244, + VIMS_STREAM_NEW_AVFORMAT = 244, VIMS_STREAM_NEW_UNICAST = 245, VIMS_STREAM_NEW_MCAST = 246, +#ifdef USE_GDK_PIXBUF + VIMS_STREAM_NEW_PICTURE = 247, +#endif VIMS_STREAM_OFFLINE_REC_START = 228, VIMS_STREAM_OFFLINE_REC_STOP = 229, VIMS_STREAM_REC_START = 230, diff --git a/veejay-current/veejay/vj-event.c b/veejay-current/veejay/vj-event.c index ab84a3de..83d44413 100644 --- a/veejay-current/veejay/vj-event.c +++ b/veejay-current/veejay/vj-event.c @@ -45,6 +45,10 @@ #endif #include +#ifdef USE_GDK_PIXBUF +#include +#endif + /* Highest possible SDL Key identifier */ #define MAX_SDL_KEY (3 * SDLK_LAST) + 1 #define MSG_MIN_LEN 4 /* stripped ';' */ @@ -401,6 +405,10 @@ static struct { vj_event_tag_new_mcast, 2, "%d %s", {0,0}, VIMS_LONG_PARAMS | VIMS_REQUIRE_ALL_PARAMS }, { VIMS_STREAM_NEW_AVFORMAT, "Stream: open file as stream with FFmpeg", vj_event_tag_new_avformat, 1, "%s", {0,0}, VIMS_LONG_PARAMS | VIMS_REQUIRE_ALL_PARAMS }, +#ifdef USE_GDK_PIXBUF + { VIMS_STREAM_NEW_PICTURE, "Stream: open image from file", + vj_event_tag_new_picture, 1, "%s", {0,0}, VIMS_LONG_PARAMS | VIMS_REQUIRE_ALL_PARAMS }, +#endif { VIMS_STREAM_OFFLINE_REC_START, "Stream: start record from an invisible stream", vj_event_tag_rec_offline_start, 3, "%d %d %d", {0,0}, VIMS_REQUIRE_ALL_PARAMS }, { VIMS_STREAM_OFFLINE_REC_STOP, "Stream: stop record from an invisible stream", @@ -525,9 +533,14 @@ static struct { { VIMS_BUNDLE_ATTACH_KEY, "Attach/Detach a Key to VIMS event", vj_event_attach_detach_key, 4, "%d %d %d %s", {0,0}, VIMS_ALLOW_ANY }, #endif +#ifdef USE_GDK_PIXBUF + { VIMS_SCREENSHOT, "Various: Save image to file", + vj_event_screenshot, 3, "%d %d %s", {0,0}, VIMS_LONG_PARAMS | VIMS_REQUIRE_ALL_PARAMS }, +#else #ifdef HAVE_JPEG - { VIMS_SCREENSHOT, "Various: Save frame to jpeg", - vj_event_screenshot, 1, "%s", {0,0}, VIMS_LONG_PARAMS | VIMS_ALLOW_ANY }, + { VIMS_SCREENSHOT, "Various: Save file to jpeg", + vj_event_screenshot, 3, "%d %d %s", {0,0}, VIMS_LONG_PARAMS | VIMS_REQUIRE_ALL_PARAMS }, +#endif #endif { VIMS_CHAIN_TOGGLE_ALL, "Toggle Effect Chain on all samples or streams", vj_event_all_samples_chain_toggle,1, "%d", {0,0} , VIMS_REQUIRE_ALL_PARAMS }, @@ -614,7 +627,6 @@ vj_server_send(v->vjs[3], v->uc->current_link,str,strlen(str));\ #define p_invalid_mode() { veejay_msg(VEEJAY_MSG_DEBUG, "Invalid playback mode for this action"); } #define v_chi(v) ( (v < 0 || v >= SAMPLE_MAX_EFFECTS ) ) -/* P_A: Parse Arguments. This macro is used in many functions */ #define P_A(a,b,c,d)\ {\ int __z = 0;\ @@ -5972,6 +5984,21 @@ void vj_event_tag_toggle(void *ptr, const char format[], va_list ap) } } +#ifdef USE_GDK_PIXBUF +void vj_event_tag_new_picture(void *ptr, const char format[], va_list ap) +{ + veejay_t *v = (veejay_t*) ptr; + char str[255]; + int *args = NULL; + P_A(args,str,format,ap); + + if( veejay_create_tag(v, VJ_TAG_TYPE_PICTURE, str, v->nstreams,0,0) == -1) + { + veejay_msg(VEEJAY_MSG_INFO, "Unable to create new Picture stream"); + } +} +#endif + void vj_event_tag_new_avformat(void *ptr, const char format[], va_list ap) { veejay_t *v = (veejay_t*) ptr; @@ -5979,7 +6006,7 @@ void vj_event_tag_new_avformat(void *ptr, const char format[], va_list ap) int *args = NULL; P_A(args,str,format,ap); - if( veejay_create_tag(v, VJ_TAG_TYPE_AVFORMAT, str, v->nstreams,0,0) != 0) + if( veejay_create_tag(v, VJ_TAG_TYPE_AVFORMAT, str, v->nstreams,0,0) == -1) { veejay_msg(VEEJAY_MSG_INFO, "Unable to create new FFmpeg stream"); } @@ -5994,7 +6021,7 @@ void vj_event_tag_new_dv1394(void *ptr, const char format[], va_list ap) if(args[0] == -1) args[0] = 63; veejay_msg(VEEJAY_MSG_DEBUG, "Try channel %d", args[0]); - if( veejay_create_tag(v, VJ_TAG_TYPE_DV1394, "/dev/dv1394", v->nstreams,0, args[0]) != 0) + if( veejay_create_tag(v, VJ_TAG_TYPE_DV1394, "/dev/dv1394", v->nstreams,0, args[0]) == -1) { veejay_msg(VEEJAY_MSG_INFO, "Unable to create new DV1394 stream"); } @@ -6012,7 +6039,7 @@ void vj_event_tag_new_v4l(void *ptr, const char format[], va_list ap) sprintf(filename, "video%d", args[0]); - if( veejay_create_tag(v, VJ_TAG_TYPE_V4L, filename, v->nstreams,0,args[1]) != 0) + if( veejay_create_tag(v, VJ_TAG_TYPE_V4L, filename, v->nstreams,0,args[1]) == -1) { veejay_msg(VEEJAY_MSG_INFO, "Unable to create new Video4Linux stream "); } @@ -6076,7 +6103,7 @@ void vj_event_tag_new_y4m(void *ptr, const char format[], va_list ap) char str[255]; int *args = NULL; P_A(args,str,format,ap); - if( veejay_create_tag(v, VJ_TAG_TYPE_YUV4MPEG, str, v->nstreams,0,0) != 0) + if( veejay_create_tag(v, VJ_TAG_TYPE_YUV4MPEG, str, v->nstreams,0,0) == -1) { veejay_msg(VEEJAY_MSG_INFO, "Unable to create new Yuv4mpeg stream"); } @@ -7102,7 +7129,8 @@ void vj_event_send_tag_list ( void *ptr, const char format[], va_list ap ) char cmd[300]; bzero(source_name,200);bzero(cmd,255); bzero(line,300); - vj_tag_get_description( i, source_name ); + //vj_tag_get_description( i, source_name ); + vj_tag_get_source_name( i, source_name ); sprintf(line,"%05d%02d%03d%03d%03d%03d%03d%s", i, vj_tag_get_type(i), @@ -7783,23 +7811,49 @@ void vj_event_set_stream_color(void *ptr, const char format[], va_list ap) } } +#ifdef USE_GDK_PIXBUF +void vj_event_screenshot(void *ptr, const char format[], va_list ap) +{ + int args[4]; + char filename[1024]; + bzero(filename,1024); + P_A(args, filename, format, ap ); + veejay_t *v = (veejay_t*) ptr; + + char type[5]; + bzero(type,5); + + + veejay_get_file_ext( filename, type, sizeof(type)); + + if(args[0] == 0 ) + args[0] = v->video_output_width; + if(args[1] == 0 ) + args[1] = v->video_output_height; + + v->settings->export_image = + vj_picture_prepare_save( filename , type, args[0], args[1] ); + if(v->settings->export_image) + v->uc->hackme = 1; +} +#else #ifdef HAVE_JPEG void vj_event_screenshot(void *ptr, const char format[], va_list ap) { - int *args = NULL; - char s[1024]; - bzero(s,1024); + int args[4]; + char filename[1024]; + bzero(filename,1024); + P_A(args, filename, format, ap ); + veejay_t *v = (veejay_t*) ptr; - P_A(args,s,format,ap); - - veejay_t *v = (veejay_t*)ptr; - v->uc->hackme = 1; - if( strncasecmp(s, "(NULL)", 6) == 0 ) - v->uc->filename = NULL; - else - v->uc->filename = strdup( s ); - + v->uc->hackme = 1; + v->uc->filename = strdup( filename ); } +#endif +#endif + + + void vj_event_quick_bundle( void *ptr, const char format[], va_list ap) { @@ -7873,4 +7927,3 @@ void vj_event_vloopback_stop( void *ptr, const char format[], va_list ap ) #endif -#endif diff --git a/veejay-current/veejay/vj-event.h b/veejay-current/veejay/vj-event.h index dca3fdbf..8dc626dd 100644 --- a/veejay-current/veejay/vj-event.h +++ b/veejay-current/veejay/vj-event.h @@ -148,6 +148,9 @@ void vj_event_sample_set_dup ( void *ptr, const char format[], va_list ap ); void vj_event_tag_del ( void *ptr, const char format[], va_list ap ); void vj_event_tag_new_raw ( void *ptr, const char format[], va_list ap ); void vj_event_tag_new_avformat ( void *ptr, const char format[], va_list ap ); +#ifdef USE_GDK_PIXBUF +void vj_event_tag_new_picture ( void *ptr, const char format[], va_list ap ); +#endif void vj_event_tag_new_v4l ( void *ptr, const char format[], va_list ap ); #ifdef SUPPORT_READ_DV2 void vj_event_tag_new_dv1394 ( void *ptr, const char format[], va_list ap ); diff --git a/veejay-current/veejay/vj-lib.h b/veejay-current/veejay/vj-lib.h index dc447562..42126ccc 100644 --- a/veejay-current/veejay/vj-lib.h +++ b/veejay-current/veejay/vj-lib.h @@ -179,6 +179,7 @@ typedef struct { int crop; VJRectangle viewport; vj_rand_player randplayer; + void *export_image; } video_playback_setup; diff --git a/veejay-current/veejay/vj-perform.c b/veejay-current/veejay/vj-perform.c index e07fb008..8a29b552 100644 --- a/veejay-current/veejay/vj-perform.c +++ b/veejay-current/veejay/vj-perform.c @@ -557,6 +557,10 @@ int vj_perform_init(veejay_t * info) effect_sampler = subsample_init( w ); +#ifdef USE_GDK_PIXBUF + vj_picture_init(); +#endif + return 1; } @@ -1613,10 +1617,11 @@ int vj_perform_apply_secundary_tag(veejay_t * info, int sample_id, case VJ_TAG_TYPE_AVFORMAT: case VJ_TAG_TYPE_NET: case VJ_TAG_TYPE_MCAST: - case VJ_TAG_TYPE_COLOR: + case VJ_TAG_TYPE_PICTURE: + case VJ_TAG_TYPE_COLOR: centry = vj_perform_tag_is_cached(chain_entry, entry, sample_id); if (centry == -1) { /* not cached */ - if( (type == VJ_TAG_TYPE_NET||type==VJ_TAG_TYPE_MCAST) && vj_tag_get_active(sample_id)==0) + if( (type == VJ_TAG_TYPE_NET||type==VJ_TAG_TYPE_MCAST||type==VJ_TAG_TYPE_PICTURE) && vj_tag_get_active(sample_id)==0) vj_tag_set_active(sample_id, 1); if (vj_tag_get_active(sample_id) == 1 ) @@ -1759,9 +1764,10 @@ int vj_perform_apply_secundary(veejay_t * info, int sample_id, int type, case VJ_TAG_TYPE_NET: case VJ_TAG_TYPE_MCAST: case VJ_TAG_TYPE_COLOR: + case VJ_TAG_TYPE_PICTURE: centry = vj_perform_tag_is_cached(chain_entry, entry, sample_id); // is it cached? if (centry == -1) { // no it is not - if( (type == VJ_TAG_TYPE_NET||type==VJ_TAG_TYPE_MCAST) && vj_tag_get_active(sample_id)==0) + if( (type == VJ_TAG_TYPE_NET||type==VJ_TAG_TYPE_MCAST||type==VJ_TAG_TYPE_PICTURE) && vj_tag_get_active(sample_id)==0) vj_tag_set_active(sample_id, 1 ); if (vj_tag_get_active(sample_id) == 1) { // if it is active (playing) @@ -2513,7 +2519,7 @@ int vj_perform_tag_fill_buffer(veejay_t * info, int entry) type = vj_tag_get_type( info->uc->sample_id ); active = vj_tag_get_active(info->uc->sample_id ); - if( (type == VJ_TAG_TYPE_NET || type == VJ_TAG_TYPE_MCAST ) && active == 0) + if( (type == VJ_TAG_TYPE_NET || type == VJ_TAG_TYPE_MCAST || type == VJ_TAG_TYPE_PICTURE ) && active == 0) { vj_tag_enable( info->uc->sample_id ); }