Implement ticket #46, fixed streaming to STDOUT (junk removed), removed obsolete commandline option -X, added -C %d:%d:%d:%d for cropping before scaling , error messages are print to STDERR, updated MAN page

git-svn-id: svn://code.dyne.org/veejay/trunk@338 eb8d1916-c9e9-0310-b8de-cf0c9472ead5
This commit is contained in:
Niels Elburg
2005-06-06 21:24:35 +00:00
parent 3a3fed661a
commit 241a486215
12 changed files with 437 additions and 183 deletions

View File

@@ -67,10 +67,6 @@
#define TRACE(...) do{}while(0)
#endif
#define ERR(...) fprintf(OUTFILE, "ERR: %s:", __FUNCTION__),\
fprintf(OUTFILE, __VA_ARGS__), \
fflush(OUTFILE);
#define min(a,b) (((a) < (b)) ? (a) : (b))
#define max(a,b) (((a) < (b)) ? (b) : (a))
@@ -283,7 +279,7 @@ static void sample_move_d16_d16(short *dst, short *src,
if(!nSrcChannels && !nDstChannels)
{
ERR("nSrcChannels of %d, nDstChannels of %d, can't have zero channels\n", nSrcChannels, nDstChannels);
//ERR("nSrcChannels of %d, nDstChannels of %d, can't have zero channels\n", nSrcChannels, nDstChannels);
return; /* in the off chance we have zero channels somewhere */
}
@@ -360,8 +356,8 @@ static int JACK_callback (nframes_t nframes, void *arg)
TRACE("nframes %ld, sizeof(sample_t) == %d\n", (long)nframes, sizeof(sample_t));
#endif
if(!this->client)
ERR("client is closed, this is weird...\n");
// if(!this->client)
// ERR("client is closed, this is weird...\n");
if(nframes != this->chunk_size)
this->chunk_size = nframes;
@@ -414,7 +410,7 @@ static int JACK_callback (nframes_t nframes, void *arg)
TRACE("deviceID(%d), setting played_bytes to %d\n", this->deviceID, this->played_bytes);
} else
{
ERR("unknown type for this->setType\n");
//ERR("unknown type for this->setType\n");
}
#endif
this->pMessages = msg->pNext; /* take this message off of the queue */
@@ -455,9 +451,9 @@ static int JACK_callback (nframes_t nframes, void *arg)
/* so frame * 2 bytes(16 bits) * X output channels */
if(this->buffer_size < (jackFramesAvailable * sizeof(short) * this->num_output_channels))
{
ERR("our buffer must have changed size\n");
ERR("allocated %ld bytes, need %ld bytes\n", this->buffer_size,
jackFramesAvailable * sizeof(short) * this->num_output_channels);
//ERR("our buffer must have changed size\n");
//ERR("allocated %ld bytes, need %ld bytes\n", this->buffer_size,
// jackFramesAvailable * sizeof(short) * this->num_output_channels);
return 0;
}
@@ -663,7 +659,7 @@ static int JACK_bufsize (nframes_t nframes, void *arg)
/* if we don't have a buffer then error out */
if(!this->sound_buffer)
{
ERR("error allocating sound_buffer memory\n");
//ERR("error allocating sound_buffer memory\n");
return 0;
}
}
@@ -707,7 +703,7 @@ void JACK_shutdown(void* arg)
/* lets see if we can't reestablish the connection */
if(JACK_OpenDevice(this) != ERR_SUCCESS)
{
ERR("unable to reconnect with jack\n");
//ERR("unable to reconnect with jack\n");
}
}
@@ -719,7 +715,7 @@ void JACK_shutdown(void* arg)
*/
static void JACK_Error(const char *desc)
{
ERR("%s\n", desc);
//ERR("%s\n", desc);
}
@@ -736,7 +732,7 @@ static bool JACK_SendMessage(jack_driver_t* this, enum cmd_enum command, long da
newMessage = (message_t*)malloc(sizeof(message_t));
if(!newMessage)
{
ERR("error allocating new message\n");
//ERR("error allocating new message\n");
return FALSE;
}
@@ -799,7 +795,7 @@ static int JACK_OpenDevice(jack_driver_t* this)
/* try once more */
if ((this->client = jack_client_new(client_name)) == 0)
{
ERR("jack server not running?\n");
//ERR("jack server not running?\n");
return ERR_OPENING_JACK;
}
}
@@ -854,7 +850,7 @@ static int JACK_OpenDevice(jack_driver_t* this)
TRACE("calling jack_activate()\n");
if(jack_activate(this->client))
{
ERR( "cannot activate client\n");
//ERR( "cannot activate client\n");
return ERR_OPENING_JACK;
}
@@ -890,7 +886,7 @@ static int JACK_OpenDevice(jack_driver_t* this)
TRACE("jack_connect() to port %d('%p')\n", i, this->output_port[i]);
if(jack_connect(this->client, jack_port_name(this->output_port[i]), ports[i]))
{
ERR("cannot connect to output port %d('%s')\n", i, ports[i]);
//ERR("cannot connect to output port %d('%s')\n", i, ports[i]);
failed = 1;
}
}
@@ -906,7 +902,7 @@ static int JACK_OpenDevice(jack_driver_t* this)
if(!ports)
{
ERR("jack_get_ports() failed to find ports with jack port flags of 0x%lX'\n", this->jack_port_flags);
//ERR("jack_get_ports() failed to find ports with jack port flags of 0x%lX'\n", this->jack_port_flags);
return ERR_PORT_NOT_FOUND;
}
@@ -914,7 +910,7 @@ static int JACK_OpenDevice(jack_driver_t* this)
TRACE("jack_connect() to port %d('%p')\n", i, this->output_port[i]);
if(jack_connect(this->client, jack_port_name(this->output_port[i]), ports[0]))
{
ERR("cannot connect to output port %d('%s')\n", 0, ports[0]);
//ERR("cannot connect to output port %d('%s')\n", 0, ports[0]);
failed = 1;
}
free(ports); /* free the returned array of ports */
@@ -1061,7 +1057,7 @@ int JACK_OpenEx(int* deviceID, unsigned int bits_per_channel, unsigned long *rat
if(output_channels > MAX_OUTPUT_PORTS)
{
ERR("output_channels == %d, MAX_OUTPUT_PORTS == %d\n", output_channels, MAX_OUTPUT_PORTS);
//ERR("output_channels == %d, MAX_OUTPUT_PORTS == %d\n", output_channels, MAX_OUTPUT_PORTS);
releaseDriver(this);
return ERR_TOO_MANY_OUTPUT_CHANNELS;
}
@@ -1071,8 +1067,8 @@ int JACK_OpenEx(int* deviceID, unsigned int bits_per_channel, unsigned long *rat
/* check that we have the correct number of port names */
if((jack_port_name_count > 1) && (jack_port_name_count != output_channels))
{
ERR("specified individual port names but not enough, gave %d names, need %d\n",
jack_port_name_count, output_channels);
//ERR("specified individual port names but not enough, gave %d names, need %d\n",
//jack_port_name_count, output_channels);
releaseDriver(this);
return ERR_PORT_NAME_OUTPUT_CHANNEL_MISMATCH;
} else
@@ -1115,7 +1111,7 @@ int JACK_OpenEx(int* deviceID, unsigned int bits_per_channel, unsigned long *rat
/* make sure bytes_per_frame is valid and non-zero */
if(!this->bytes_per_output_frame)
{
ERR("bytes_per_output_frame is zero\n");
//ERR("bytes_per_output_frame is zero\n");
releaseDriver(this);
return ERR_BYTES_PER_OUTPUT_FRAME_INVALID;
}
@@ -1123,7 +1119,7 @@ int JACK_OpenEx(int* deviceID, unsigned int bits_per_channel, unsigned long *rat
/* make sure bytes_per_frame is valid and non-zero */
if(!this->bytes_per_input_frame)
{
ERR("bytes_per_output_frame is zero\n");
//ERR("bytes_per_output_frame is zero\n");
releaseDriver(this);
return ERR_BYTES_PER_INPUT_FRAME_INVALID;
}
@@ -1219,7 +1215,7 @@ long JACK_Write(int deviceID, char *data, unsigned long bytes)
newWaveHeader = (wave_header_t*)malloc(sizeof(wave_header_t)); /* create a wave header for this data */
if(!newWaveHeader)
{
ERR("error allocating memory for newWaveHeader\n");
//ERR("error allocating memory for newWaveHeader\n");
}
newWaveHeader->pData = (char*)malloc(sizeof(char) * bytes); /* allocate memory for the data */
@@ -1454,9 +1450,6 @@ static long JACK_GetBytesStoredFromThis(jack_driver_t *this)
return_val = (this->client_bytes - this->played_client_bytes);
if(return_val < 0)
ERR("client_bytes == %ld < played_client_bytes == %ld\n", this->client_bytes, this->played_client_bytes);
TRACE("this->deviceID(%d), return_val = %ld\n", this->deviceID, return_val);
return return_val;

View File

@@ -54,6 +54,13 @@ typedef struct VJFrame_t
int height;
} VJFrame;
typedef struct VJRectangle_t
{
int top;
int bottom;
int left;
int right;
} VJRectangle;
typedef struct VJFrameInfo_t
{

View File

@@ -90,18 +90,25 @@ void veejay_msg(int type, const char format[], ...)
char sline[260];
va_list args;
int line = 0;
if(_no_msg) return;
if(type == 4 && _debug_level==0 ) return; // bye
FILE *out = (_no_msg ? stderr: stdout );
if( type != VEEJAY_MSG_ERROR && _no_msg )
return;
if( !_debug_level && type == VEEJAY_MSG_DEBUG )
return ; // bye
// parse arguments
va_start(args, format);
bzero(buf,256);
vsnprintf(buf, sizeof(buf) - 1, format, args);
if(!_message_his_status)
{
memset( &_message_history , 0 , sizeof(vj_msg_hist));
_message_his_status = 1;
}
if(!_message_his_status)
{
memset( &_message_history , 0 , sizeof(vj_msg_hist));
_message_his_status = 1;
}
if(_color_level)
{
@@ -123,9 +130,9 @@ void veejay_msg(int type, const char format[], ...)
break;
}
if(!line)
printf("%s %s %s\n", prefix, buf, TXT_END);
fprintf(out,"%s %s %s\n", prefix, buf, TXT_END);
else
printf("%s%s%s", TXT_GRE, buf, TXT_END );
fprintf(out,"%s%s%s", TXT_GRE, buf, TXT_END );
if( _message_history.w_index < MAX_LINES )
{
@@ -156,9 +163,9 @@ void veejay_msg(int type, const char format[], ...)
break;
}
if(!line)
printf("%s %s\n", prefix, buf);
fprintf(out,"%s %s\n", prefix, buf);
else
printf("%s", buf );
fprintf(out,"%s", buf );
if( _message_history.w_index < MAX_LINES )
{

View File

@@ -24,9 +24,11 @@
#include <config.h>
#ifdef HAVE_MMX
#ifdef HAVE_ASM_MMX
#include "mmx.h"
#endif
#include <stdlib.h>
#include <string.h>
#include <assert.h>
@@ -64,16 +66,37 @@ const char *ssm_description[SSM_COUNT] = {
void ss_420_to_422(uint8_t *buffer, int width, int height);
void ss_422_to_420(uint8_t *buffer, int width, int height);
static uint8_t *sample_buffer;
static int go = 0;
void subsample_init(int len)
typedef struct
{
sample_buffer = (uint8_t*) vj_malloc(sizeof(uint8_t) * len );
go = 1;
uint8_t *buf;
} yuv_sampler_t;
static uint8_t *sample_buffer = NULL;
static int go = 0;
void *subsample_init(int len)
{
void *ret = NULL;
yuv_sampler_t *s = (yuv_sampler_t*) vj_malloc(sizeof(yuv_sampler_t) );
if(!s)
return ret;
s->buf = (uint8_t*) vj_malloc(sizeof(uint8_t) * len );
if(!s->buf)
return ret;
return (void*) s;
}
void subsample_free(void *data)
{
yuv_sampler_t *sampler = (yuv_sampler_t*) data;
if(sampler)
{
if(sampler->buf) free(sampler->buf);
free(sampler);
}
sampler = NULL;
}
/*************************************************************************
* Chroma Subsampling
@@ -194,19 +217,16 @@ static void ss_444_to_420jpeg(uint8_t *buffer, int width, int height)
#define BLANK_CRB in0[1]
#define BLANK_CRB_2 (in0[1] << 1)
static void tr_420jpeg_to_444(uint8_t *buffer, int width, int height)
static void tr_420jpeg_to_444(void *data, uint8_t *buffer, int width, int height)
{
uint8_t *inm, *in0, *inp, *out0, *out1;
uint8_t cmm, cm0, cmp, c0m, c00, c0p, cpm, cp0, cpp;
int x, y;
static uint8_t *saveme = NULL;
static int saveme_size = 0;
if (width > saveme_size) {
free(saveme);
saveme_size = width;
saveme = vj_malloc(saveme_size * sizeof(saveme[0]));
assert(saveme != NULL);
}
yuv_sampler_t *sampler = (yuv_sampler_t*) data;
uint8_t *saveme = sampler->buf;
veejay_memcpy(saveme, buffer, width);
in0 = buffer + ( width * height /4) - 2;
@@ -347,7 +367,7 @@ static void tr_420jpeg_to_444(uint8_t *buffer, int width, int height)
static void ss_420jpeg_to_444(uint8_t *buffer, int width, int height)
{
#ifndef HAVE_MMX
#ifndef HAVE_ASM_MMX
uint8_t *in, *out0, *out1;
int x, y;
in = buffer + (width * height / 4) - 1;
@@ -404,17 +424,16 @@ void ss_422_to_420(uint8_t *buffer, int width, int height )
//todo 2x1 down sampling (box)
}
#ifndef HAVE_MMX
static void ss_444_to_422(uint8_t *buffer, int width, int height)
#ifndef HAVE_ASM_MMX
static void ss_444_to_422(void *data, uint8_t *buffer, int width, int height)
{
const int dst_stride = width/2;
int x,y;
if(go==0) subsample_init(width);
yuv_sampler_t *sampler = (yuv_sampler_t*) data;
for(y = 0; y < height; y ++)
{
uint8_t *src = sample_buffer;
uint8_t *src = sampler->buf;
uint8_t *dst = buffer + (y*dst_stride);
veejay_memcpy( src, buffer + (y*width), width );
for(x=0; x < dst_stride; x++)
@@ -510,18 +529,18 @@ static inline void mmx_average_2_U8 (uint8_t * dest, const uint8_t * src1,
psubb_r2r (mm3, mm4); /* subtract subresults */
movq_r2m (mm4, *dest); /* store result in dest */
}
static void ss_444_to_422(uint8_t *buffer, int width, int height)
static void ss_444_to_422(void *data,uint8_t *buffer, int width, int height)
{
const int dst_stride = width/2;
const int len = width * height;
const int mmx_stride = dst_stride / 8;
int x,y;
if(go==0) subsample_init(width);
yuv_sampler_t *sampler = (yuv_sampler_t*) data;
for(y = 0; y < height; y ++)
{
uint8_t *src = sample_buffer;
uint8_t *src = sampler->buf;
uint8_t *dst = buffer + (y*dst_stride);
veejay_memcpy( src, buffer + (y*width), width );
for(x=0; x < mmx_stride; x++)
@@ -541,7 +560,7 @@ static void tr_422_to_444(uint8_t *buffer, int width, int height)
const int stride = width/2;
const int len = stride * height;
#ifdef HAVE_MMX
#ifdef HAVE_ASM_MMX
const int mmx_stride = stride / 8;
#endif
int x,y;
@@ -550,7 +569,7 @@ static void tr_422_to_444(uint8_t *buffer, int width, int height)
{
uint8_t *dst = buffer + (y * width);
uint8_t *src = buffer + (y * stride);
#ifdef HAVE_MMX
#ifdef HAVE_ASM_MMX
for( x = 0; x < mmx_stride; x ++ )
{
movq_m2r( *src,mm0 );
@@ -626,15 +645,16 @@ static void ss_444_to_420mpeg2(uint8_t *buffer, int width, int height)
void chroma_subsample(subsample_mode_t mode, uint8_t *ycbcr[],
void chroma_subsample(subsample_mode_t mode, void *data, uint8_t *ycbcr[],
int width, int height)
{
switch (mode) {
case SSM_420_JPEG_BOX:
case SSM_420_JPEG_TR:
ss_444_to_420jpeg(ycbcr[1], width, height);
ss_444_to_420jpeg(ycbcr[2], width, height);
#ifdef HAVE_MMX
#ifdef HAVE_ASM_MMX
emms();
#endif
break;
@@ -643,9 +663,9 @@ void chroma_subsample(subsample_mode_t mode, uint8_t *ycbcr[],
ss_444_to_420mpeg2(ycbcr[2], width, height);
break;
case SSM_422_444:
ss_444_to_422(ycbcr[1],width,height);
ss_444_to_422(ycbcr[2],width,height);
#ifdef HAVE_MMX
ss_444_to_422(data,ycbcr[1],width,height);
ss_444_to_422(data,ycbcr[2],width,height);
#ifdef HAVE_ASM_MMX
emms();
#endif
break;
@@ -659,25 +679,26 @@ void chroma_subsample(subsample_mode_t mode, uint8_t *ycbcr[],
}
void chroma_supersample(subsample_mode_t mode, uint8_t *ycbcr[],
void chroma_supersample(subsample_mode_t mode,void *data, uint8_t *ycbcr[],
int width, int height)
{
switch (mode) {
case SSM_420_JPEG_BOX:
ss_420jpeg_to_444(ycbcr[1], width, height);
ss_420jpeg_to_444(ycbcr[2], width, height);
#ifdef HAVE_MMX
#ifdef HAVE_ASM_MMX
emms();
#endif
break;
case SSM_420_JPEG_TR:
tr_420jpeg_to_444(ycbcr[1], width, height);
tr_420jpeg_to_444(ycbcr[2], width, height);
tr_420jpeg_to_444(data,ycbcr[1], width, height);
tr_420jpeg_to_444(data,ycbcr[2], width, height);
break;
case SSM_422_444:
tr_422_to_444(ycbcr[2],width,height);
tr_422_to_444(ycbcr[1],width,height);
#ifdef HAVE_MMX
#ifdef HAVE_ASM_MMX
emms();
#endif
break;

View File

@@ -487,6 +487,63 @@ void* yuv_init_swscaler(VJFrame *src, VJFrame *dst, sws_template *tmpl, int cpu_
}
void yuv_crop(VJFrame *src, VJFrame *dst, VJRectangle *rect )
{
int x;
int y;
uint8_t *sy = src->data[0];
uint8_t *su = src->data[1];
uint8_t *sv = src->data[2];
uint8_t *dstY = dst->data[0];
uint8_t *dstU = dst->data[1];
uint8_t *dstV = dst->data[2];
int i = 0;
for( i = 0 ; i < 3 ; i ++ )
{
int j = 0;
uint8_t *srcPlane = src->data[i];
uint8_t *dstPlane = dst->data[i];
for( y = rect->top ; y < ( src->height - rect->bottom ); y ++ )
{
for ( x = rect->left ; x < ( src->width - rect->right ); x ++ )
{
dstPlane[j] = srcPlane[ y * src->width + x ];
j++;
}
}
}
}
VJFrame *yuv_allocate_crop_image( VJFrame *src, VJRectangle *rect )
{
int w = src->width - rect->left - rect->right;
int h = src->height - rect->top - rect->bottom;
if( w <= 0 )
return NULL;
if( h <= 0 )
return NULL;
VJFrame *new = (VJFrame*) vj_malloc(sizeof(VJFrame));
if(!new)
return NULL;
new->width = w;
new->height = h;
new->uv_len = (w >> src->shift_h) * (h >> src->shift_v );
new->len = w * h;
new->uv_width = (w >> src->shift_h );
new->uv_height = (h >> src->shift_v );
new->shift_v = src->shift_v;
new->shift_h = src->shift_h;
return new;
}
void yuv_free_swscaler(void *sws)
{
if(sws)

View File

@@ -39,12 +39,14 @@ typedef enum subsample_mode {
extern const char *ssm_id[SSM_COUNT];
extern const char *ssm_description[SSM_COUNT];
void subsample_init(int buf_len);
void chroma_subsample(subsample_mode_t mode, uint8_t * ycbcr[],
void *subsample_init(int buf_len);
void subsample_free(void *sampler);
void chroma_subsample(subsample_mode_t mode, void *sampler, uint8_t * ycbcr[],
int width, int height);
void chroma_supersample(subsample_mode_t mode, uint8_t * ycbcr[],
void chroma_supersample(subsample_mode_t mode, void *sampler, uint8_t * ycbcr[],
int width, int height);
// yuv 4:2:2 packed to yuv 4:2:0 planar
@@ -86,4 +88,8 @@ int yuv_sws_get_cpu_flags(void);
void yuv_free_swscaler(void *sws);
void yuv_crop(VJFrame *src, VJFrame *dst, VJRectangle *rect );
VJFrame *yuv_allocate_crop_image( VJFrame *src, VJRectangle *rect );
#endif

View File

@@ -79,9 +79,6 @@ timer to use ( none, normal, rtc )
.B \-f/--fps <num>
Override framerate of video
.TP
.B \-X/--no-default-tags
Do not create solid color tags at startup
.TP
.B \-x/--geometryx <num>
Geometry x offset for SDL video window
.TP
@@ -141,12 +138,16 @@ Use smaller values for better performance (mapping several hundreds of
megabytes can become a problem)
.TP
.TP
.B \-w/--zoomwidth=<0-4096>
.B \-w/--zoomwidth <0-4096>
For use with \-z/--zoom, specify output width
.TP
.B \-h/--zoomheight=<0-4096>
.B \-h/--zoomheight <0-4096>
For use with \-z/--zoom, specify output height
.TP
.B \-C/--zoomcrop top:bottom:left:right
For use with \-z/--zoom, crops the input image before scaling.
Set in pixels.
.TP
.B \--lgb=<0-100>
For use with \-z/--zoom, use Gaussian blur filter (luma)
.TP

View File

@@ -525,7 +525,7 @@ int veejay_init_editlist(veejay_t * info)
veejay_msg(VEEJAY_MSG_INFO, "Started Audio Task");
// stats.audio = 1;
} else {
veejay_msg(VEEJAY_MSG_ERROR, "Could not start Audio Task");
veejay_msg(VEEJAY_MSG_WARNING, "Could not start Audio Task");
}
}
if( !el->has_audio )
@@ -890,6 +890,9 @@ static int veejay_screen_update(veejay_t * info )
uint8_t *c_frame[3];
int i = 0;
vj_perform_unlock_primary_frame();
// get the frame to output, in 420 or 422
if (info->uc->take_bg==1)
{
@@ -905,15 +908,29 @@ static int veejay_screen_update(veejay_t * info )
memset(&src,0,sizeof(VJFrame));
memset(&dst,0,sizeof(VJFrame));
vj_get_yuv_template( &src, info->edit_list->video_width,
info->edit_list->video_height,
info->pixel_format );
if(info->settings->crop)
{
int w = 0;
int h = 0;
vj_perform_get_crop_dimensions( info, &w, &h );
vj_get_yuv_template( &src, w,h, info->pixel_format );
}
else
{
vj_get_yuv_template( &src, info->edit_list->video_width,
info->edit_list->video_height,
info->pixel_format );
}
vj_get_yuv_template( &dst, info->video_output_width,
info->video_output_height,
info->pixel_format );
info->video_output_height,
info->pixel_format );
if(info->settings->crop)
vj_perform_get_cropped_frame(info, &(src.data), 1);
else
vj_perform_get_primary_frame(info, src.data, 0 );
vj_perform_get_primary_frame(info, src.data, 0 );
vj_perform_get_output_frame(info, dst.data );
yuv_convert_and_scale( info->video_out_scaler, src.data, dst.data );
@@ -1501,26 +1518,13 @@ static int veejay_mjpeg_sync_buf(veejay_t * info, struct mjpeg_sync *bs)
int veejay_init(veejay_t * info, int x, int y,char *arg, int def_tags)
{
// struct mjpeg_params bp;
editlist *el = info->edit_list;
video_playback_setup *settings = info->settings;
vj_event_init();
#ifdef HAVE_XML2
if(info->load_action_file)
editlist *el = info->edit_list;
video_playback_setup *settings = info->settings;
if(info->video_out<0)
{
veejay_msg(VEEJAY_MSG_INFO, "Loading configuaration file %s", info->action_file);
veejay_load_action_file(info, info->action_file );
}
#endif
if(info->video_out<0)
{
veejay_msg(VEEJAY_MSG_ERROR, "No video output driver selected (see man veejay)");
return -1;
}
veejay_msg(VEEJAY_MSG_ERROR, "No video output driver selected (see man veejay)");
return -1;
}
// override geometry set in config file
if( info->uc->geox != 0 && info->uc->geoy != 0 )
{
@@ -1528,22 +1532,34 @@ int veejay_init(veejay_t * info, int x, int y,char *arg, int def_tags)
y = info->uc->geoy;
}
switch (info->uc->use_timer)
vj_event_init();
#ifdef HAVE_XML2
if(info->load_action_file)
{
case 0:
veejay_msg(VEEJAY_MSG_INFO, "Loading configuaration file %s", info->action_file);
veejay_load_action_file(info, info->action_file );
}
#endif
switch (info->uc->use_timer)
{
case 0:
veejay_msg(VEEJAY_MSG_WARNING, "Not timing audio/video");
break;
case 1:
veejay_msg(VEEJAY_MSG_DEBUG,
"RTC /dev/rtc hardware timer is broken!");
info->uc->use_timer = 2;
return -1;
break;
case 2:
case 2:
veejay_msg(VEEJAY_MSG_DEBUG, "Using nanosleep timer");
break;
}
}
if (veejay_init_editlist(info) != 0)
if (veejay_init_editlist(info) != 0)
{
veejay_msg(VEEJAY_MSG_ERROR,
"Cannot initialize the EditList");
@@ -1554,7 +1570,42 @@ int veejay_init(veejay_t * info, int x, int y,char *arg, int def_tags)
{
veejay_msg(VEEJAY_MSG_ERROR, "Unable to initialize Performer");
return -1;
}
}
if( info->settings->crop && info->settings->zoom)
{
VJFrame src;
memset( &src,0,sizeof(VJFrame));
int w = 0; int h = 0;
vj_get_yuv_template( &src,
info->edit_list->video_width,
info->edit_list->video_height,
info->pixel_format );
int res = vj_perform_init_cropped_output_frame(
info,
&src,
&w,
&h
);
if( res == 0 )
{
veejay_msg(VEEJAY_MSG_ERROR ,"Invalid crop parameters: %d:%d:%d:%d (%dx%d)",
info->settings->viewport.top,
info->settings->viewport.bottom,
info->settings->viewport.left,
info->settings->viewport.right,w,h);
return -1;
}
veejay_msg(VEEJAY_MSG_INFO, "Crop video %dx%d to %dx%d (top %d, bottom %d, left %d, right %d",
info->edit_list->video_width, info->edit_list->video_height,
w,h,
info->settings->viewport.top,
info->settings->viewport.bottom,
info->settings->viewport.left,
info->settings->viewport.right );
}
if( info->settings->zoom )
{
@@ -1562,20 +1613,33 @@ int veejay_init(veejay_t * info, int x, int y,char *arg, int def_tags)
VJFrame dst;
memset( &src, 0, sizeof(VJFrame));
memset( &dst, 0, sizeof(VJFrame));
// info->video_output_width = info->dummy->width;
// info->video_output_height = info->dummy->height;
vj_get_yuv_template( &src,
if(info->settings->crop)
{
int w = 0;
int h = 0;
vj_perform_get_crop_dimensions( info, &w, &h );
vj_get_yuv_template( &src, w,h, info->pixel_format );
}
else
{
vj_get_yuv_template( &src,
info->edit_list->video_width,
info->edit_list->video_height,
info->pixel_format );
}
vj_get_yuv_template( &dst,
info->video_output_width,
info->video_output_height,
info->pixel_format );
vj_perform_get_primary_frame(info, &(src.data) ,0 );
vj_perform_init_output_frame(info, &(dst.data),
if(info->settings->crop)
vj_perform_get_cropped_frame(info, &src.data, 0);
else
vj_perform_get_primary_frame(info, &src.data ,0 );
vj_perform_init_output_frame(info, &(dst.data),
info->video_output_width, info->video_output_height );
info->settings->sws_templ.flags = info->settings->zoom;
@@ -2248,6 +2312,7 @@ veejay_t *veejay_malloc()
return NULL;
memset( info->settings, 0, sizeof(video_playback_setup));
memset( &(info->settings->action_scheduler), 0, sizeof(vj_schedule_t));
memset( &(info->settings->viewport ), 0, sizeof(VJRectangle));
info->status_what = (char*) vj_malloc(sizeof(char) * MESSAGE_SIZE );
info->status_msg = (char*) vj_malloc(sizeof(char) * MESSAGE_SIZE+5);

View File

@@ -40,7 +40,6 @@
static int run_server = 1;
static veejay_t *info;
static int default_use_tags=1;
static float override_fps = 0.0;
static int default_geometry_x = -1;
static int default_geometry_y = -1;
@@ -139,36 +138,34 @@ static void Usage(char *progname)
#else
fprintf(stderr, " -O/--output\t\t\tSDL(0), (3) yuv4mpeg (4) SHM (broken) (5) no visual\n");
#endif
fprintf(stderr,
fprintf(stderr,
" -o/--outstream <filename>\twhere to write the yuv4mpeg stream (use with -O3)\n");
fprintf(stderr,
fprintf(stderr,
" -c/--synchronization [01]\tSync correction off/on (default on)\n");
fprintf(stderr, " -f/--fps num\t\t\tOverride default framerate (default read from file)\n");
fprintf(stderr,
fprintf(stderr, " -f/--fps num\t\t\tOverride default framerate (default read from file)\n");
fprintf(stderr,
" -P/--preserve-pathnames\tDo not 'canonicalise' pathnames in editlists\n");
fprintf(stderr,
fprintf(stderr,
" -a/--audio [01]\t\tEnable (1) or disable (0) audio (default 1)\n");
fprintf(stderr,
fprintf(stderr,
" -s/--size NxN\t\t\twidth X height for SDL video window\n");
fprintf(stderr,
" -X/--no-default-tags\t\tDo not create solid color tags at startup\n");
fprintf(stderr,
fprintf(stderr,
" -l/--action-file <filename>\tLoad an Configuartion/Action File (none at default)\n");
fprintf(stderr,
fprintf(stderr,
" -u/--dump-events \t\tDump event information to screen\n");
fprintf(stderr,
fprintf(stderr,
" -I/--deinterlace\t\tDeinterlace video if it is interlaced\n");
fprintf(stderr," -x/--geometryx <num> \t\tTop left x offset for SDL video window\n");
fprintf(stderr," -y/--geometryy <num> \t\tTop left y offset for SDL video window\n");
fprintf(stderr," -F/--features \t\tList of compiled features\n");
fprintf(stderr," -v/--verbose \t\tEnable debugging output (default off)\n");
fprintf(stderr," -b/--bezerk \t\tBezerk (default off) \n");
fprintf(stderr," -L/--auto-loop \t\tStart with default sample\n");
fprintf(stderr," -x/--geometryx <num> \t\tTop left x offset for SDL video window\n");
fprintf(stderr," -y/--geometryy <num> \t\tTop left y offset for SDL video window\n");
fprintf(stderr," -F/--features \t\tList of compiled features\n");
fprintf(stderr," -v/--verbose \t\tEnable debugging output (default off)\n");
fprintf(stderr," -b/--bezerk \t\tBezerk (default off) \n");
fprintf(stderr," -L/--auto-loop \t\tStart with default sample\n");
fprintf(stderr," -n/--no-color \t\tDont use colored text\n");
fprintf(stderr," -n/--no-color \t\tDont use colored text\n");
fprintf(stderr," -r/--force \t\tForce loading of videofiles\n");
fprintf(stderr," -m/--sample-mode [01]\t\tSampling mode 1 = best quality (default), 0 = best performance\n");
fprintf(stderr," -Y/--ycbcr [01]\t\t0 = YUV 4:2:0 Planar, 1 = YUV 4:2:2 Planar\n");
fprintf(stderr," -m/--sample-mode [01]\t\tSampling mode 1 = best quality (default), 0 = best performance\n");
fprintf(stderr," -Y/--ycbcr [01]\t\t0 = YUV 4:2:0 Planar, 1 = YUV 4:2:2 Planar\n");
fprintf(stderr," -d/--dummy \t\tDummy playback\n");
fprintf(stderr," -W/--width <num>\t\tdummy width\n");
@@ -181,8 +178,6 @@ static void Usage(char *progname)
fprintf(stderr," --map-from-file <num>\tmap N frames to memory\n");
fprintf(stderr," -z/--zoom [1-11]\n");
fprintf(stderr," -w/--zoomwidth \n");
fprintf(stderr," -h/--zoomheight \n");
fprintf(stderr,"\t\t\t\tsoftware scaler type (also use -W, -H ). \n");
fprintf(stderr,"\t\t\t\tAvailable types are:\n");
fprintf(stderr,"\t\t\t\t1\tFast bilinear (default)\n");
@@ -196,7 +191,6 @@ static void Usage(char *progname)
fprintf(stderr,"\t\t\t\t9\tsincR\n");
fprintf(stderr,"\t\t\t\t10\tLanczos\n");
fprintf(stderr,"\t\t\t\t11\tNatural bicubic spline\n");
#ifdef HAVE_GETOPT_LONG
fprintf(stderr,"\n\t\t\t\tsoftware scaler options:\n");
fprintf(stderr,"\t\t\t\t--lgb=<0-100>\tGaussian blur filter (luma)\n");
fprintf(stderr,"\t\t\t\t--cgb=<0-100>\tGuassian blur filter (chroma)\n");
@@ -204,7 +198,9 @@ static void Usage(char *progname)
fprintf(stderr,"\t\t\t\t--cs=<0-100>\tSharpen filter (chroma)\n");
fprintf(stderr,"\t\t\t\t--chs=<h>\tChroma horizontal shifting\n");
fprintf(stderr,"\t\t\t\t--cvs=<v>\tChroma vertical shifting\n");
#endif
fprintf(stderr,"\t\t\t\t-w/--zoomwidth \n");
fprintf(stderr,"\t\t\t\t-h/--zoomheight \n");
fprintf(stderr,"\t\t\t\t-C/--zoomcrop [top:bottom:left:right] (crop source before scaling)\n");
fprintf(stderr," -q/--quit \t\t\tQuit at end of file\n");
fprintf(stderr,"\n\n");
exit(1);
@@ -349,9 +345,6 @@ static int set_option(const char *name, char *value)
{
auto_loop = 1;
}
else if (strcmp(name,"no-default-tags")==0||strcmp(name, "X")==0) {
default_use_tags=0;
}
else if (strcmp(name, "zoom") == 0 || strcmp(name, "z" ) == 0)
{
info->settings->zoom = atoi(optarg);
@@ -401,6 +394,18 @@ static int set_option(const char *name, char *value)
OUT_OF_RANGE_ERR(info->settings->sws_templ.chromaVShift );
info->settings->sws_templ.use_filter = 1;
}
else if (strcmp(name, "C") == 0 || strcmp(name, "zoomcrop") == 0 )
{
if (sscanf(value, "%d:%d:%d:%d", &(info->settings->viewport.top),
&(info->settings->viewport.bottom),
&(info->settings->viewport.left),
&(info->settings->viewport.right)) < 4)
{
fprintf(stderr, "Crop requires top:bottom:left:right\n");
exit(1);
}
info->settings->crop = 1;
}
else if (strcmp(name, "quit") == 0 || strcmp(name, "q") == 0 )
{
@@ -431,7 +436,6 @@ static void check_command_line_options(int argc, char *argv[])
{"size", 1, 0, 0}, /* -S/--size */
{"graphics-driver", 1, 0, 0},
{"timer", 1, 0, 0}, /* timer */
{"no-default-tags",0,0,0},
{"dump-events",0,0,0},
{"bezerk",0,0,0},
{"outstream", 1, 0, 0},
@@ -461,6 +465,7 @@ static void check_command_line_options(int argc, char *argv[])
{"multicast-osc",1,0,0},
{"multicast-vims",1,0,0},
{"map-from-file",1,0,0},
{"zoomcrop",1,0,0},
{"lgb",1,0,0},
{"cgb",1,0,0},
{"ls",1,0,0},
@@ -481,12 +486,12 @@ static void check_command_line_options(int argc, char *argv[])
#ifdef HAVE_GETOPT_LONG
while ((n =
getopt_long(argc, argv,
"o:G:O:a:H:V:s:c:t:l:C:p:m:x:y:nLFPXY:ugrvdibIjf:N:H:W:R:M:V:z:qw:h:",
"o:G:O:a:H:V:s:c:t:l:p:m:x:y:nLFPY:ugrvdibIjf:N:H:W:R:M:V:z:qw:h:C:",
long_options, &option_index)) != EOF)
#else
while ((n =
getopt(argc, argv,
"o:G:s:O:a:c:t:l:t:C:x:y:m:p:nLFPXY:Y:vudgibrIjf:N:H:W:R:M:V:z:qw:h:")) != EOF)
"o:G:s:O:a:c:t:l:t:x:y:m:p:nLFPY:vudgibrIjf:N:H:W:R:M:V:z:qw:h:C:")) != EOF)
#endif
{
switch (n) {
@@ -512,16 +517,22 @@ static void check_command_line_options(int argc, char *argv[])
Usage(argv[0]);
veejay_set_debug_level(info->verbose);
mjpeg_default_handler_verbosity( (info->verbose ? 2:0) );
if(info->video_out == 3)
{
veejay_silent();
mjpeg_default_handler_verbosity( 0 );
}
else
{
mjpeg_default_handler_verbosity( (info->verbose ? 1:0) );
}
if(!info->dump)
if(veejay_open_files(info, argv + optind, argc - optind,override_fps, force_video_file, override_pix_fmt)<=0)
{
veejay_msg(VEEJAY_MSG_ERROR, "Cannot start veejay");
exit(1);
}
}
static void print_license()
@@ -564,8 +575,6 @@ int main(int argc, char **argv)
/*EditList *editlist = info->editlist; */
fflush(stdout);
vj_mem_init();
info = veejay_malloc();
@@ -586,10 +595,7 @@ int main(int argc, char **argv)
print_license();
if(info->video_out == 3)
{
veejay_silent();
}
if(info->dump)
{
@@ -629,7 +635,7 @@ int main(int argc, char **argv)
default_geometry_x,
default_geometry_y,
NULL,
default_use_tags)<0)
1)<0)
{
veejay_msg(VEEJAY_MSG_ERROR, "Initializing veejay");
return 0;

View File

@@ -158,6 +158,8 @@ typedef struct {
sws_template sws_templ;
vj_schedule_t action_scheduler;
float output_fps;
int crop;
VJRectangle viewport;
} video_playback_setup;

View File

@@ -67,10 +67,15 @@ struct ycbcr_frame {
// audio buffer is 16 bit signed integer
static void *effect_sampler = NULL;
static void *crop_sampler = NULL;
static struct ycbcr_frame **video_output_tmp_buffer;
static struct ycbcr_frame **video_output_buffer; /* scaled video output */
static int video_output_buffer_convert = 0;
static struct ycbcr_frame **frame_buffer; /* chain */
static struct ycbcr_frame **primary_buffer; /* normal */
static VJFrame *crop_frame = NULL;
static int cached_tag_frames[2][CLIP_MAX_EFFECTS]; /* cache a frame into the buffer only once */
static int cached_clip_frames[2][CLIP_MAX_EFFECTS];
static int frame_info[64][CLIP_MAX_EFFECTS]; /* array holding frame lengths */
@@ -494,7 +499,7 @@ int vj_perform_init(veejay_t * info)
primary_buffer[0]->Cr = (uint8_t*) vj_malloc(sizeof(uint8_t) * frame_len );
if(!primary_buffer[0]->Cr) return 0;
veejay_memset(primary_buffer[0]->Cr,128, frame_len);
video_output_buffer_convert = 0;
video_output_buffer =
(struct ycbcr_frame**) vj_malloc(sizeof(struct ycbcr_frame**) * 2 );
if(!video_output_buffer)
@@ -547,6 +552,8 @@ int vj_perform_init(veejay_t * info)
vj_perform_record_buffer_init();
effect_sampler = subsample_init( w );
return 1;
}
@@ -677,6 +684,17 @@ void vj_perform_free(veejay_t * info)
if(primary_buffer) free(primary_buffer);
if(socket_buffer) free(socket_buffer);
if(crop_frame)
{
if(crop_frame->data[0]) free(crop_frame->data[0]);
if(crop_frame->data[1]) free(crop_frame->data[1]);
if(crop_frame->data[2]) free(crop_frame->data[2]);
}
if(crop_sampler)
subsample_free(crop_sampler);
if(effect_sampler)
subsample_free(effect_sampler);
for(c=0; c < 3; c ++)
{
if(temp_buffer[c]) free(temp_buffer[c]);
@@ -813,13 +831,73 @@ void vj_perform_get_primary_frame(veejay_t * info, uint8_t ** frame,
frame[2] = primary_buffer[0]->Cr;
}
void vj_perform_get_output_frame( veejay_t *info, uint8_t **frame )
{
frame[0] = video_output_buffer[0]->Y;
frame[1] = video_output_buffer[0]->Cb;
frame[2] = video_output_buffer[0]->Cr;
}
void vj_perform_get_crop_dimensions(veejay_t *info, int *w, int *h)
{
*w = info->edit_list->video_width - info->settings->viewport.left - info->settings->viewport.right;
*h = info->edit_list->video_height - info->settings->viewport.top - info->settings->viewport.bottom;
}
int vj_perform_get_cropped_frame( veejay_t *info, uint8_t **frame, int crop )
{
if(crop)
{
VJFrame src;
memset( &src, 0, sizeof(VJFrame));
vj_get_yuv_template( &src,
info->edit_list->video_width,
info->edit_list->video_height,
info->pixel_format );
src.data[0] = primary_buffer[0]->Y;
src.data[1] = primary_buffer[0]->Cb;
src.data[2] = primary_buffer[0]->Cr;
// yuv crop needs supersampled data
chroma_supersample( info->settings->sample_mode,effect_sampler, src.data, src.width,src.height );
yuv_crop( &src, crop_frame, &(info->settings->viewport));
chroma_subsample( info->settings->sample_mode,crop_sampler, crop_frame->data, crop_frame->width, crop_frame->height );
}
frame[0] = crop_frame->data[0];
frame[1] = crop_frame->data[1];
frame[2] = crop_frame->data[2];
return 1;
}
int vj_perform_init_cropped_output_frame(veejay_t *info, VJFrame *src, int *dw, int *dh )
{
video_playback_setup *settings = info->settings;
if( crop_frame )
free(crop_frame);
crop_frame = yuv_allocate_crop_image( src, &(settings->viewport) );
if(!crop_frame)
return 0;
*dw = crop_frame->width;
*dh = crop_frame->height;
crop_sampler = subsample_init( *dw );
/* enough space to supersample*/
int i;
for( i = 0; i < 3; i ++ )
{
crop_frame->data[i] = (uint8_t*) vj_malloc(sizeof(uint8_t) * crop_frame->len );
if(!crop_frame->data[i])
return 0;
}
return 1;
}
void vj_perform_init_output_frame( veejay_t *info, uint8_t **frame,
@@ -905,25 +983,19 @@ int vj_perform_send_primary_frame_s(veejay_t *info, int mcast)
void vj_perform_get_output_frame_420p( veejay_t *info, uint8_t **frame, int w, int h )
{
static long _last_frame = -1;
if(info->pixel_format == FMT_422)
{
frame[0] = video_output_buffer[1]->Y;
frame[1] = video_output_buffer[1]->Cb;
frame[2] = video_output_buffer[1]->Cr;
if(_last_frame != info->settings->current_frame_num)
{
uint8_t *src_frame[3];
src_frame[0] = video_output_buffer[0]->Y;
src_frame[1] = video_output_buffer[0]->Cb;
src_frame[2] = video_output_buffer[0]->Cr;
uint8_t *src_frame[3];
src_frame[0] = video_output_buffer[0]->Y;
src_frame[1] = video_output_buffer[0]->Cb;
src_frame[2] = video_output_buffer[0]->Cr;
yuv422p_to_yuv420p2(
src_frame, frame,w, h );
_last_frame = info->settings->current_frame_num;
}
yuv422p_to_yuv420p2(
src_frame, frame,w, h );
}
else
{
@@ -947,16 +1019,20 @@ int vj_perform_is_ready(veejay_t *info)
return 1;
}
void vj_perform_unlock_primary_frame( void )
{
video_output_buffer_convert = 0;
// call this every cycle
}
void vj_perform_get_primary_frame_420p(veejay_t *info, uint8_t **frame )
{
static long _last_frame = -1;
editlist *el = info->edit_list;
if(info->pixel_format==FMT_422)
{
if(_last_frame != info->settings->current_frame_num)
if( video_output_buffer_convert == 0 )
{
uint8_t *pframe[3];
_last_frame = info->settings->current_frame_num;
pframe[0] = primary_buffer[0]->Y;
pframe[1] = primary_buffer[0]->Cb;
pframe[2] = primary_buffer[0]->Cr;
@@ -967,7 +1043,7 @@ void vj_perform_get_primary_frame_420p(veejay_t *info, uint8_t **frame )
// ss_422_to_420( primary_buffer[0]->Cr,
// el->video_width/2,
// el->video_height );
_last_frame = info->settings->current_frame_num;\
video_output_buffer_convert = 1;
}
frame[0] = temp_buffer[0];
frame[1] = temp_buffer[1];
@@ -1827,6 +1903,7 @@ static int vj_perform_tag_render_chain_entry(veejay_t *info, int chain_entry, co
if(sub_mode)
chroma_supersample(
settings->sample_mode,
effect_sampler,
frames[1]->data,
frameinfo->width,
frameinfo->height );
@@ -1836,6 +1913,7 @@ static int vj_perform_tag_render_chain_entry(veejay_t *info, int chain_entry, co
{
chroma_supersample(
settings->sample_mode,
effect_sampler,
frames[0]->data,
frameinfo->width,
frameinfo->height );
@@ -1916,6 +1994,7 @@ static int vj_perform_render_chain_entry(veejay_t *info, int chain_entry, const
if(sub_mode)
chroma_supersample(
settings->sample_mode,
effect_sampler,
frames[1]->data,
frameinfo->width,
frameinfo->height );
@@ -1925,6 +2004,7 @@ static int vj_perform_render_chain_entry(veejay_t *info, int chain_entry, const
{
chroma_supersample(
settings->sample_mode,
effect_sampler,
frames[0]->data,
frameinfo->width,
frameinfo->height );
@@ -1978,6 +2058,7 @@ int vj_perform_clip_complete_buffers(veejay_t * info, int entry, const int skip_
{
// next is downsampled or 420
chroma_subsample( settings->sample_mode,
effect_sampler,
frames[0]->data,frameinfo->width,
frameinfo->height );
subsample = 0;
@@ -2039,6 +2120,7 @@ int vj_perform_tag_complete_buffers(veejay_t * info, int entry, const int skip_i
{
// no more entries, or entry needs subsampling anyway
chroma_subsample( settings->sample_mode,
effect_sampler,
frames[0]->data,frameinfo->width,
frameinfo->height );
subsample = 0;

View File

@@ -7,6 +7,8 @@
void vj_perform_update_plugin_frame(VJFrame *frame);
void vj_perform_unlock_primary_frame( void );
VJFrame *vj_perform_init_plugin_frame(veejay_t *info);
VJFrameInfo *vj_perform_init_plugin_frame_info(veejay_t *info);
@@ -142,4 +144,9 @@ void vj_perform_record_clip_frame(veejay_t *info, int entry);
void vj_perform_record_tag_frame(veejay_t *info, int entry);
void vj_perform_get_output_frame_420p( veejay_t *info, uint8_t **frame, int w, int h );
int vj_perform_get_cropped_frame( veejay_t *info, uint8_t **frame, int crop );
int vj_perform_init_cropped_output_frame(veejay_t *info, VJFrame *src, int *dw, int *dh );
void vj_perform_get_crop_dimensions(veejay_t *info, int *w, int *h);
#endif