mirror of
https://github.com/game-stop/veejay.git
synced 2025-12-06 16:00:02 +01:00
#178, refactored part of libvjnet and changed to MJPEG encoding/decoding instead of LZO. For unicast stream, header is put before the payload. For multicast stream, header is included with each chunk and is defined in libvjnet/packet.h
This commit is contained in:
@@ -55,8 +55,6 @@ How to activate:
|
||||
|
||||
3. echo "246:5000 224.0.0.50;" |sayVIMS
|
||||
|
||||
4. enable mcast sender on veejay: echo "022:1;" |sayVIMS -p 5000
|
||||
|
||||
4. press F2 to activate newest created stream
|
||||
|
||||
|
||||
|
||||
@@ -159,13 +159,18 @@ static int avhelper_build_table()
|
||||
|
||||
int avhelper_get_codec_by_key( int key )
|
||||
{
|
||||
#ifdef ARCH_X86_64
|
||||
int64_t k = (int64_t) key;
|
||||
#else
|
||||
int k = key;
|
||||
#endif
|
||||
if( fourccTable == NULL ) {
|
||||
/* lets initialize the hash of fourcc/codec_id pairs now */
|
||||
if(avhelper_build_table() != 0)
|
||||
return -1;
|
||||
}
|
||||
|
||||
hnode_t *node = hash_lookup( fourccTable,(const void*) key);
|
||||
hnode_t *node = hash_lookup( fourccTable,(const void*) k);
|
||||
fourcc_node *fourcc = hnode_get(node);
|
||||
if(fourcc) {
|
||||
return fourcc->codec_id;
|
||||
@@ -223,6 +228,37 @@ static void avhelper_close_input_file( AVFormatContext *s ) {
|
||||
#endif
|
||||
}
|
||||
|
||||
void *avhelper_get_mjpeg_decoder(VJFrame *output) {
|
||||
el_decoder_t *x = (el_decoder_t*) vj_calloc( sizeof( el_decoder_t ));
|
||||
if(!x) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
x->codec = avcodec_find_decoder( CODEC_ID_MJPEG );
|
||||
if(x->codec == NULL) {
|
||||
veejay_msg(0,"Unable to find MJPEG decoder");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
#if LIBAVCODEC_BUILD > 5400
|
||||
x->codec_ctx = avcodec_alloc_context3(x->codec);
|
||||
if ( avcodec_open2( x->codec_ctx, x->codec, NULL ) < 0 )
|
||||
#else
|
||||
x->codec_ctx = avcodec_alloc_context();
|
||||
if ( avcodec_open( x->codec_ctx, x->codec ) < 0 )
|
||||
#endif
|
||||
{
|
||||
free(x);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
x->frame = avhelper_alloc_frame();
|
||||
|
||||
x->output = yuv_yuv_template( NULL,NULL,NULL, output->width, output->height, alpha_fmt_to_yuv(output->format) );
|
||||
|
||||
return (void*) x;
|
||||
}
|
||||
|
||||
void *avhelper_get_decoder( const char *filename, int dst_pixfmt, int dst_width, int dst_height ) {
|
||||
char errbuf[512];
|
||||
el_decoder_t *x = (el_decoder_t*) vj_calloc( sizeof( el_decoder_t ));
|
||||
@@ -366,24 +402,6 @@ further:
|
||||
x->frame = avhelper_alloc_frame();
|
||||
x->input = yuv_yuv_template( NULL,NULL,NULL, x->codec_ctx->width,x->codec_ctx->height, x->pixfmt );
|
||||
|
||||
sws_template sws_tem;
|
||||
veejay_memset(&sws_tem, 0,sizeof(sws_template));
|
||||
sws_tem.flags = yuv_which_scaler();
|
||||
x->scaler = yuv_init_swscaler( x->input,x->output, &sws_tem, yuv_sws_get_cpu_flags());
|
||||
|
||||
if( x->scaler == NULL ) {
|
||||
veejay_msg(VEEJAY_MSG_ERROR,"FFmpeg: Failed to get scaler context for %dx%d in %d to %dx%d in %d",
|
||||
x->codec_ctx->width,x->codec_ctx->height, x->pixfmt,
|
||||
wid,hei,dst_pixfmt);
|
||||
av_free(f);
|
||||
avcodec_close( x->codec_ctx );
|
||||
avhelper_close_input_file( x->avformat_ctx );
|
||||
free(x->output);
|
||||
free(x->input);
|
||||
free(x);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return (void*) x;
|
||||
}
|
||||
|
||||
@@ -404,8 +422,12 @@ void avhelper_close_decoder( void *ptr )
|
||||
{
|
||||
el_decoder_t *e = (el_decoder_t*) ptr;
|
||||
avcodec_close( e->codec_ctx );
|
||||
if(e->avformat_ctx) {
|
||||
avhelper_close_input_file( e->avformat_ctx );
|
||||
}
|
||||
if(e->scaler) {
|
||||
yuv_free_swscaler( e->scaler );
|
||||
}
|
||||
if(e->input)
|
||||
free(e->input);
|
||||
if(e->output)
|
||||
@@ -415,17 +437,72 @@ void avhelper_close_decoder( void *ptr )
|
||||
free(e);
|
||||
}
|
||||
|
||||
int avhelper_decode_video( void *ptr, uint8_t *data, int len, uint8_t *dst[3] )
|
||||
VJFrame *avhelper_get_input_frame( void *ptr )
|
||||
{
|
||||
el_decoder_t *e = (el_decoder_t*) ptr;
|
||||
return e->input;
|
||||
}
|
||||
|
||||
VJFrame *avhelper_get_output_frame( void *ptr)
|
||||
{
|
||||
el_decoder_t *e = (el_decoder_t*) ptr;
|
||||
return e->output;
|
||||
}
|
||||
|
||||
|
||||
int avhelper_decode_video( void *ptr, uint8_t *data, int len )
|
||||
{
|
||||
int got_picture = 0;
|
||||
el_decoder_t * e = (el_decoder_t*) ptr;
|
||||
|
||||
int result = avcodec_decode_video( e->codec_ctx, e->frame, &got_picture, data, len );
|
||||
|
||||
avhelper_frame_unref(e->frame);
|
||||
|
||||
if(!got_picture || result <= 0) {
|
||||
avhelper_frame_unref( e->frame );
|
||||
return 0;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
VJFrame *avhelper_get_decoded_video(void *ptr) {
|
||||
el_decoder_t * e = (el_decoder_t*) ptr;
|
||||
|
||||
if(e->input == NULL) {
|
||||
e->input = yuv_yuv_template( NULL,NULL,NULL, e->codec_ctx->width,e->codec_ctx->height, e->codec_ctx->pix_fmt );
|
||||
}
|
||||
|
||||
e->input->data[0] = e->frame->data[0];
|
||||
e->input->data[1] = e->frame->data[1];
|
||||
e->input->data[2] = e->frame->data[2];
|
||||
e->input->data[3] = e->frame->data[3];
|
||||
|
||||
return e->input;
|
||||
}
|
||||
|
||||
void avhelper_rescale_video(void *ptr, uint8_t *dst[3])
|
||||
{
|
||||
el_decoder_t * e = (el_decoder_t*) ptr;
|
||||
|
||||
if(e->input == NULL) {
|
||||
e->input = yuv_yuv_template( NULL,NULL,NULL, e->codec_ctx->width,e->codec_ctx->height, e->codec_ctx->pix_fmt );
|
||||
}
|
||||
|
||||
if(e->scaler == NULL ) {
|
||||
sws_template sws_tem;
|
||||
veejay_memset(&sws_tem, 0,sizeof(sws_template));
|
||||
sws_tem.flags = yuv_which_scaler();
|
||||
e->scaler = yuv_init_swscaler( e->input,e->output, &sws_tem, yuv_sws_get_cpu_flags());
|
||||
if(e->scaler == NULL) {
|
||||
free(e->input);
|
||||
veejay_msg(VEEJAY_MSG_DEBUG, "Unable to initialize scaler context for [%d,%d, @%d %p,%p,%p ] -> [%d,%d, @%d, %p,%p,%p ]",
|
||||
e->input->width,e->input->height,e->input->format,e->input->data[0],e->input->data[1],e->input->data[2],
|
||||
e->output->width,e->output->height,e->output->format,e->output->data[0],e->output->data[1],e->output->data[2]);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
e->input->data[0] = e->frame->data[0];
|
||||
e->input->data[1] = e->frame->data[1];
|
||||
@@ -438,7 +515,4 @@ int avhelper_decode_video( void *ptr, uint8_t *data, int len, uint8_t *dst[3] )
|
||||
|
||||
yuv_convert_any3( e->scaler, e->input, e->frame->linesize, e->output, e->input->format, e->pixfmt );
|
||||
|
||||
avhelper_frame_unref( e->frame );
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -38,12 +38,22 @@ void *avhelper_get_codec( void *ptr );
|
||||
|
||||
void avhelper_close_decoder( void *ptr );
|
||||
|
||||
int avhelper_decode_video( void *ptr, uint8_t *data, int len, uint8_t *dst[3] );
|
||||
int avhelper_decode_video( void *ptr, uint8_t *data, int len);
|
||||
|
||||
void avhelper_rescale_video(void *ptr, uint8_t *dst[4]);
|
||||
|
||||
void *avhelper_get_decoder( const char *filename, int dst_pixfmt, int dst_width, int dst_height );
|
||||
|
||||
VJFrame *avhelper_get_decoded_video(void *ptr);
|
||||
|
||||
void avhelper_free_context(AVCodecContext **avctx);
|
||||
|
||||
void avhelper_frame_unref(AVFrame *ptr);
|
||||
|
||||
void *avhelper_get_mjpeg_decoder(VJFrame *output_info);
|
||||
|
||||
VJFrame *avhelper_get_input_frame( void *ptr );
|
||||
|
||||
VJFrame *avhelper_get_output_frame( void *ptr);
|
||||
|
||||
#endif
|
||||
|
||||
@@ -100,7 +100,7 @@ void set_fourcc(lav_file_t *lav_file, char *fourcc)
|
||||
fourcc_lc[i] = tolower(fourcc[i]);
|
||||
}
|
||||
fourcc_lc[4] = 0;
|
||||
char *ptr = &fourcc_lc;
|
||||
char *ptr = fourcc_lc;
|
||||
|
||||
/* hash the string */
|
||||
int hash = 5381;
|
||||
|
||||
@@ -568,10 +568,14 @@ static int vj_avcodec_encode_video( AVCodecContext *ctx, uint8_t *buf, int len,
|
||||
|
||||
int res = avcodec_encode_video2( ctx, &pkt, frame, &got_packet_ptr);
|
||||
|
||||
if( res == 00 ) {
|
||||
if( res == 0 ) {
|
||||
return pkt.size;
|
||||
}
|
||||
|
||||
if( res == -1) {
|
||||
veejay_msg(0, "Unable to encode frame");
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
else if( avcodec_encode_video ) {
|
||||
@@ -626,7 +630,8 @@ int vj_avcodec_encode_frame(void *encoder, long nframe,int format, uint8_t *src
|
||||
pict.linesize[0] = stride;
|
||||
pict.linesize[1] = stride2;
|
||||
pict.linesize[2] = stride2;
|
||||
|
||||
pict.width = av->width;
|
||||
pict.height = av->height;
|
||||
return vj_avcodec_encode_video( av->context, buf, buf_len, &pict );
|
||||
}
|
||||
|
||||
|
||||
@@ -892,8 +892,10 @@ int vj_el_get_video_frame(editlist *el, long nframe, uint8_t *dst[4])
|
||||
break;
|
||||
default:
|
||||
|
||||
return avhelper_decode_video( el->ctx[ N_EL_FILE(n) ], data, res, dst );
|
||||
|
||||
if( avhelper_decode_video( el->ctx[ N_EL_FILE(n) ], data, res ) ) {
|
||||
avhelper_rescale_video( el->ctx[N_EL_FILE(n) ], dst );
|
||||
return 1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
#include <config.h>
|
||||
#include <stdint.h>
|
||||
#include <pthread.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libstream/vj-tag.h>
|
||||
#include <libvjnet/vj-client.h>
|
||||
#include <veejay/vims.h>
|
||||
@@ -31,10 +32,10 @@
|
||||
#include <libvjmsg/vj-msg.h>
|
||||
#include <veejay/vims.h>
|
||||
#include <libstream/vj-net.h>
|
||||
#include <liblzo/lzo.h>
|
||||
#include <time.h>
|
||||
#include <libyuv/yuvconv.h>
|
||||
#include <libel/avcommon.h>
|
||||
#include <libel/avhelper.h>
|
||||
#include <libvje/effects/common.h>
|
||||
|
||||
typedef struct
|
||||
@@ -43,20 +44,10 @@ typedef struct
|
||||
pthread_t thread;
|
||||
int state;
|
||||
int have_frame;
|
||||
int error;
|
||||
int repeat;
|
||||
int w;
|
||||
int h;
|
||||
int f;
|
||||
int in_fmt;
|
||||
int in_w;
|
||||
int in_h;
|
||||
int af;
|
||||
uint8_t *buf;
|
||||
vj_client *v;
|
||||
VJFrame *info;
|
||||
VJFrame *dest; // there is no full range YUV + alpha in PIX_FMT family
|
||||
void *scaler;
|
||||
VJFrame *a;
|
||||
VJFrame *b;
|
||||
size_t bufsize;
|
||||
} threaded_t;
|
||||
|
||||
#define STATE_INACTIVE 0
|
||||
@@ -96,18 +87,13 @@ static void *reader_thread(void *data)
|
||||
int retrieve = 0;
|
||||
int success = 0;
|
||||
|
||||
vj_client *v = vj_client_alloc( t->w, t->h, t->af );
|
||||
if( v == NULL )
|
||||
return NULL;
|
||||
|
||||
v->lzo = lzo_new();
|
||||
if( v->lzo == NULL ) {
|
||||
vj_client_free(v);
|
||||
t->v = vj_client_alloc_stream(t->info);
|
||||
if(t->v == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
snprintf(buf,sizeof(buf), "%03d:%d;", VIMS_GET_FRAME, my_screen_id);
|
||||
success = vj_client_connect_dat( v, tag->source_name,tag->video_channel );
|
||||
success = vj_client_connect_dat( t->v, tag->source_name,tag->video_channel );
|
||||
|
||||
if( success > 0 ) {
|
||||
veejay_msg(VEEJAY_MSG_INFO, "Connecton established with %s:%d",tag->source_name, tag->video_channel + 5);
|
||||
@@ -128,7 +114,7 @@ static void *reader_thread(void *data)
|
||||
int ret = 0;
|
||||
|
||||
if( retrieve == 0 && t->have_frame == 0 ) {
|
||||
ret = vj_client_send( v, V_CMD,(unsigned char*) buf );
|
||||
ret = vj_client_send( t->v, V_CMD,(unsigned char*) buf );
|
||||
if( ret <= 0 ) {
|
||||
error = 1;
|
||||
}
|
||||
@@ -138,9 +124,9 @@ static void *reader_thread(void *data)
|
||||
}
|
||||
|
||||
if(!error && retrieve == 1 ) {
|
||||
res = vj_client_poll(v, V_CMD );
|
||||
res = vj_client_poll(t->v, V_CMD );
|
||||
if( res ) {
|
||||
if(vj_client_link_can_read( v, V_CMD ) ) {
|
||||
if(vj_client_link_can_read( t->v, V_CMD ) ) {
|
||||
retrieve = 2;
|
||||
}
|
||||
}
|
||||
@@ -153,50 +139,23 @@ static void *reader_thread(void *data)
|
||||
}
|
||||
|
||||
if(!error && retrieve == 2) {
|
||||
int strides[3] = { 0,0,0};
|
||||
int compr_len = 0;
|
||||
|
||||
if( vj_client_read_frame_header( v, &(t->in_w), &(t->in_h), &(t->in_fmt), &compr_len, &strides[0],&strides[1],&strides[2]) == 0 ) {
|
||||
int frame_len = vj_client_read_frame_hdr( t->v );
|
||||
if( frame_len <= 0 ) {
|
||||
error = 1;
|
||||
}
|
||||
|
||||
if(!error) {
|
||||
int need_rlock = 0;
|
||||
if( compr_len <= 0 )
|
||||
need_rlock = 1;
|
||||
|
||||
if( need_rlock ) {
|
||||
if( error == 0 ) {
|
||||
lock(t);
|
||||
}
|
||||
|
||||
if( t->bufsize < (t->in_w * t->in_h * 3) || t->buf == NULL ) {
|
||||
t->bufsize = t->in_w * t->in_h * 3;
|
||||
t->buf = (uint8_t*) realloc( t->buf, RUP8(t->bufsize));
|
||||
}
|
||||
|
||||
ret = vj_client_read_frame_data( v, compr_len, strides[0], strides[1], strides[2], t->buf );
|
||||
if( ret == 2 ) {
|
||||
if(!need_rlock) {
|
||||
lock(t);
|
||||
vj_client_decompress_frame_data( v, t->buf, t->in_fmt, t->in_w, t->in_h, compr_len, strides[0],strides[1],strides[2] );
|
||||
ret = vj_client_read_frame_data( t->v, frame_len );
|
||||
unlock(t);
|
||||
}
|
||||
}
|
||||
|
||||
if( need_rlock ) {
|
||||
unlock(t);
|
||||
}
|
||||
}
|
||||
|
||||
if(ret && t->buf) {
|
||||
if(ret) {
|
||||
t->have_frame = 1;
|
||||
t->in_fmt = v->in_fmt;
|
||||
t->in_w = v->in_width;
|
||||
t->in_h = v->in_height;
|
||||
retrieve = 0;
|
||||
}
|
||||
|
||||
if( ret <= 0 || t->buf == NULL ) {
|
||||
if( ret <= 0 ) {
|
||||
veejay_msg(VEEJAY_MSG_DEBUG,"Error reading video frame from %s:%d",tag->source_name,tag->video_channel );
|
||||
error = 1;
|
||||
}
|
||||
@@ -205,12 +164,12 @@ NETTHREADRETRY:
|
||||
|
||||
if( error )
|
||||
{
|
||||
vj_client_close(v);
|
||||
vj_client_close(t->v);
|
||||
|
||||
veejay_msg(VEEJAY_MSG_INFO, " ZZzzzzz ... waiting for Link %s:%d to become ready", tag->source_name, tag->video_channel );
|
||||
net_delay( 0, 5 );
|
||||
|
||||
success = vj_client_connect_dat( v, tag->source_name,tag->video_channel );
|
||||
success = vj_client_connect_dat( t->v, tag->source_name,tag->video_channel );
|
||||
|
||||
if( t->state == 0 )
|
||||
{
|
||||
@@ -240,13 +199,10 @@ NETTHREADRETRY:
|
||||
|
||||
NETTHREADEXIT:
|
||||
|
||||
if(t->buf)
|
||||
free(t->buf);
|
||||
t->buf = NULL;
|
||||
if(v) {
|
||||
vj_client_close(v);
|
||||
vj_client_free(v);
|
||||
v = NULL;
|
||||
if(t->v) {
|
||||
vj_client_close(t->v);
|
||||
vj_client_free(t->v);
|
||||
t->v = NULL;
|
||||
}
|
||||
|
||||
veejay_msg(VEEJAY_MSG_INFO, "Network thread with %s: %d has exited",tag->source_name,tag->video_channel+5);
|
||||
@@ -259,22 +215,14 @@ static void *mcast_reader_thread(void *data)
|
||||
{
|
||||
vj_tag *tag = (vj_tag*) data;
|
||||
threaded_t *t = tag->priv;
|
||||
char buf[16];
|
||||
int retrieve = 0;
|
||||
int success = 0;
|
||||
|
||||
vj_client *v = vj_client_alloc( t->w, t->h, t->af );
|
||||
if( v == NULL )
|
||||
return NULL;
|
||||
|
||||
v->lzo = lzo_new();
|
||||
if( v->lzo == NULL ) {
|
||||
vj_client_free(v);
|
||||
t->v = vj_client_alloc_stream(t->info);
|
||||
if(t->v == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
success = vj_client_connect( v, NULL, tag->source_name, tag->video_channel );
|
||||
snprintf(buf,sizeof(buf), "%03d:%d;", VIMS_VIDEO_MCAST_START, 0 );
|
||||
success = vj_client_connect( t->v, NULL, tag->source_name, tag->video_channel );
|
||||
|
||||
if( success > 0 ) {
|
||||
veejay_msg(VEEJAY_MSG_INFO, "Multicast connecton established with %s:%d",tag->source_name, tag->video_channel + 5);
|
||||
@@ -292,58 +240,27 @@ static void *mcast_reader_thread(void *data)
|
||||
const int padded = 256;
|
||||
int max_len = padded + RUP8( 1920 * 1080 * 3 );
|
||||
|
||||
t->buf = (uint8_t*) vj_malloc( sizeof(uint8_t) * max_len );
|
||||
|
||||
for( ;; ) {
|
||||
int error = 0;
|
||||
int res = 0;
|
||||
int ret = 0;
|
||||
int strides[3] = { 0,0,0};
|
||||
int compr_len = 0;
|
||||
|
||||
t->bufsize = padded + RUP8(t->in_w * t->in_h * 3);
|
||||
if( t->bufsize <= padded )
|
||||
t->bufsize = max_len;
|
||||
else
|
||||
max_len = t->bufsize;
|
||||
|
||||
if( t->bufsize != max_len ) {
|
||||
free(t->buf);
|
||||
t->buf = (uint8_t*) vj_malloc( sizeof(uint8_t) * t->bufsize );
|
||||
}
|
||||
|
||||
if( vj_client_read_mcast_data( v, &compr_len, &strides[0], &strides[1], &strides[2], &(t->in_w), &(t->in_h), &(t->in_fmt), NULL, max_len ) == 0 ) {
|
||||
if( vj_client_read_mcast_data( t->v, max_len ) < 0 ) {
|
||||
error = 1;
|
||||
}
|
||||
|
||||
if( compr_len > 0 ) {
|
||||
lock(t);
|
||||
vj_client_decompress_frame_data( v, t->buf, t->in_fmt, t->in_w, t->in_h, compr_len, strides[0],strides[1],strides[2] );
|
||||
unlock(t);
|
||||
}
|
||||
else {
|
||||
lock(t);
|
||||
veejay_memcpy( t->buf, v->space + 44, strides[0] + strides[1] + strides[2] );
|
||||
unlock(t);
|
||||
}
|
||||
|
||||
if(error == 0) {
|
||||
t->have_frame = 1;
|
||||
t->in_fmt = v->in_fmt;
|
||||
t->in_w = v->in_width;
|
||||
t->in_h = v->in_height;
|
||||
}
|
||||
|
||||
NETTHREADRETRY:
|
||||
|
||||
if( error )
|
||||
{
|
||||
vj_client_close(v);
|
||||
vj_client_close(t->v);
|
||||
|
||||
veejay_msg(VEEJAY_MSG_INFO, " ZZzzzzz ... waiting for multicast server %s:%d to become ready", tag->source_name, tag->video_channel );
|
||||
net_delay( 0, 5 );
|
||||
|
||||
success = vj_client_connect( v,NULL,tag->source_name,tag->video_channel );
|
||||
success = vj_client_connect( t->v,NULL,tag->source_name,tag->video_channel );
|
||||
|
||||
if( t->state == 0 )
|
||||
{
|
||||
@@ -370,13 +287,10 @@ NETTHREADRETRY:
|
||||
|
||||
NETTHREADEXIT:
|
||||
|
||||
if(t->buf)
|
||||
free(t->buf);
|
||||
t->buf = NULL;
|
||||
if(v) {
|
||||
vj_client_close(v);
|
||||
vj_client_free(v);
|
||||
v = NULL;
|
||||
if(t->v) {
|
||||
vj_client_close(t->v);
|
||||
vj_client_free(t->v);
|
||||
t->v = NULL;
|
||||
}
|
||||
|
||||
veejay_msg(VEEJAY_MSG_INFO, "Multicast receiver %s: %d has stopped",tag->source_name,tag->video_channel+5);
|
||||
@@ -384,30 +298,13 @@ NETTHREADEXIT:
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static void net_thread_free(vj_tag *tag)
|
||||
{
|
||||
threaded_t *t = (threaded_t*) tag->priv;
|
||||
|
||||
if( t->scaler )
|
||||
yuv_free_swscaler( t->scaler );
|
||||
|
||||
if( t->a )
|
||||
free(t->a);
|
||||
if( t->b )
|
||||
free(t->b);
|
||||
|
||||
t->a = NULL;
|
||||
t->b = NULL;
|
||||
t->scaler = NULL;
|
||||
}
|
||||
|
||||
void *net_threader(VJFrame *frame)
|
||||
{
|
||||
threaded_t *t = (threaded_t*) vj_calloc(sizeof(threaded_t));
|
||||
return (void*) t;
|
||||
}
|
||||
|
||||
int net_thread_get_frame( vj_tag *tag, uint8_t *buffer[3] )
|
||||
int net_thread_get_frame( vj_tag *tag, VJFrame *dst )
|
||||
{
|
||||
threaded_t *t = (threaded_t*) tag->priv;
|
||||
|
||||
@@ -416,71 +313,44 @@ int net_thread_get_frame( vj_tag *tag, uint8_t *buffer[3] )
|
||||
/* frame ready ? */
|
||||
lock(t);
|
||||
state = t->state;
|
||||
if( state == 0 || t->bufsize == 0 || t->buf == NULL || t->have_frame == 0 ) {
|
||||
if( state == 0 || t->have_frame == 0 ) {
|
||||
unlock(t);
|
||||
return 1; // not active or no frame
|
||||
} // just continue when t->have_frame == 0
|
||||
|
||||
//@ color space convert frame
|
||||
int b_len = t->in_w * t->in_h;
|
||||
int buvlen = b_len;
|
||||
|
||||
//FIXME alpha channel not yet supported in unicast/mcast streaming, work arround. refactor in libvevosample
|
||||
// bad image source points (alpha channel pointer is never set)
|
||||
if( t->in_fmt == PIX_FMT_YUVA420P ) {
|
||||
t->in_fmt = PIX_FMT_YUVJ420P;
|
||||
} else if (t->in_fmt == PIX_FMT_YUVA422P ) {
|
||||
t->in_fmt = PIX_FMT_YUVJ422P;
|
||||
} else if (t->in_fmt == PIX_FMT_YUVA444P ) {
|
||||
t->in_fmt = PIX_FMT_YUVJ444P;
|
||||
}
|
||||
|
||||
switch( t->in_fmt ) {
|
||||
case PIX_FMT_YUV420P:
|
||||
case PIX_FMT_YUVJ420P:
|
||||
buvlen = b_len / 4;
|
||||
break;
|
||||
case PIX_FMT_YUV444P:
|
||||
case PIX_FMT_YUVJ444P:
|
||||
buvlen = b_len;
|
||||
break;
|
||||
default:
|
||||
buvlen = b_len / 2;
|
||||
break;
|
||||
}
|
||||
VJFrame *src = avhelper_get_decoded_video(t->v->decoder);
|
||||
|
||||
if( t->a == NULL )
|
||||
t->a = yuv_yuv_template( t->buf, t->buf + b_len, t->buf+ b_len+ buvlen,t->in_w,t->in_h, t->in_fmt);
|
||||
|
||||
if( t->b == NULL )
|
||||
t->b = yuv_yuv_template( buffer[0],buffer[1], buffer[2],t->w,t->h,t->f);
|
||||
|
||||
if( t->scaler == NULL ) {
|
||||
if(t->scaler == NULL) {
|
||||
sws_template sws_templ;
|
||||
memset( &sws_templ, 0, sizeof(sws_template));
|
||||
sws_templ.flags = yuv_which_scaler();
|
||||
t->scaler = yuv_init_swscaler( t->a,t->b, &sws_templ, yuv_sws_get_cpu_flags() );
|
||||
t->scaler = yuv_init_swscaler( src,t->dest, &sws_templ, yuv_sws_get_cpu_flags() );
|
||||
}
|
||||
|
||||
yuv_convert_and_scale( t->scaler, t->a,t->b );
|
||||
yuv_convert_and_scale( t->scaler, src,dst );
|
||||
|
||||
t->have_frame = 0; // frame is consumed
|
||||
|
||||
t->have_frame = 0;
|
||||
unlock(t);
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
int net_thread_start(vj_tag *tag, int wid, int height, int pixelformat)
|
||||
int net_thread_start(vj_tag *tag, VJFrame *info)
|
||||
{
|
||||
threaded_t *t = (threaded_t*)tag->priv;
|
||||
int p_err = 0;
|
||||
|
||||
t->dest = yuv_yuv_template(NULL,NULL,NULL, info->width,info->height, alpha_fmt_to_yuv(info->format));
|
||||
if(t->dest == NULL) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
pthread_mutex_init( &(t->mutex), NULL );
|
||||
t->w = wid;
|
||||
t->h = height;
|
||||
t->af = pixelformat;
|
||||
t->f = get_ffmpeg_pixfmt(pixelformat);
|
||||
|
||||
t->have_frame = 0;
|
||||
t->info = info;
|
||||
|
||||
|
||||
if( tag->source_type == VJ_TAG_TYPE_MCAST ) {
|
||||
p_err = pthread_create( &(t->thread), NULL, &mcast_reader_thread, (void*) tag );
|
||||
@@ -512,8 +382,9 @@ void net_thread_stop(vj_tag *tag)
|
||||
unlock(t);
|
||||
|
||||
pthread_mutex_destroy( &(t->mutex));
|
||||
|
||||
net_thread_free(tag);
|
||||
if(t->dest) {
|
||||
free(t->dest);
|
||||
}
|
||||
|
||||
veejay_msg(VEEJAY_MSG_INFO, "Disconnected from Veejay host %s:%d", tag->source_name,tag->video_channel);
|
||||
}
|
||||
@@ -545,5 +416,3 @@ int net_already_opened(const char *filename, int n, int channel)
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -22,9 +22,9 @@
|
||||
|
||||
int net_already_opened(const char *filname, int n, int chan);
|
||||
void net_thread_stop(vj_tag *tag);
|
||||
int net_thread_start(vj_tag *tag, int w, int h, int f);
|
||||
int net_thread_start(vj_tag *tag, VJFrame *info);
|
||||
void net_thread_remote(void *priv, void *p );
|
||||
int net_thread_get_frame( vj_tag *tag, uint8_t *buffer[3]);
|
||||
int net_thread_get_frame( vj_tag *tag, VJFrame *dst);
|
||||
void net_thread_exit(vj_tag *tag);
|
||||
void *net_threader(VJFrame *frame);
|
||||
void net_set_screen_id(int id);
|
||||
|
||||
@@ -2502,7 +2502,7 @@ int vj_tag_enable(int t1) {
|
||||
}
|
||||
if(tag->source_type == VJ_TAG_TYPE_NET || tag->source_type == VJ_TAG_TYPE_MCAST )
|
||||
{
|
||||
if(!net_thread_start(tag, vj_tag_input->width , vj_tag_input->height, vj_tag_input->pix_fmt))
|
||||
if(!net_thread_start(tag, _tag_info->effect_frame1))
|
||||
{
|
||||
veejay_msg(VEEJAY_MSG_ERROR,
|
||||
"Unable to start thread");
|
||||
@@ -3633,7 +3633,7 @@ int vj_tag_get_frame(int t1, VJFrame *dst, uint8_t * abuffer)
|
||||
#endif
|
||||
case VJ_TAG_TYPE_MCAST:
|
||||
case VJ_TAG_TYPE_NET:
|
||||
if(!net_thread_get_frame( tag,buffer ))
|
||||
if(!net_thread_get_frame( tag,dst ))
|
||||
return 0;
|
||||
return 1;
|
||||
break;
|
||||
|
||||
@@ -192,8 +192,9 @@ int mcast_recv_packet_frame( mcast_receiver *v )
|
||||
if(res == - 1)
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Error receiving multicast packet:%s", strerror(errno));
|
||||
|
||||
return 0;
|
||||
return res;
|
||||
}
|
||||
|
||||
if( res != PACKET_PAYLOAD_SIZE ) {
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Multicast receive error, expected %d bytes got %d bytes",
|
||||
PACKET_PAYLOAD_SIZE, res );
|
||||
@@ -312,7 +313,8 @@ uint8_t *mcast_recv_frame( mcast_receiver *v, int *len, int *hdrlen, uint8_t *re
|
||||
//@ return newest full frame
|
||||
if( full_frame ) {
|
||||
packet_buffer_t *pb = q->slot[d_slot];
|
||||
*len = pb->len;
|
||||
// we can calculate the size of the data directly
|
||||
*len = (pb->hdr.length * CHUNK_SIZE) + pb->hdr.data_size;
|
||||
*hdrlen = 0;
|
||||
return q->buf;
|
||||
}
|
||||
|
||||
@@ -144,7 +144,7 @@ static long stamp_make( mcast_sender *v )
|
||||
}
|
||||
|
||||
int mcast_send_frame( mcast_sender *v, const VJFrame *frame,
|
||||
uint8_t *buf, int total_len, long ms,int port_num, int mode)
|
||||
uint8_t *buf, int total_len, int port_num, int mode)
|
||||
{
|
||||
int i;
|
||||
int res = 0;
|
||||
@@ -153,7 +153,6 @@ int mcast_send_frame( mcast_sender *v, const VJFrame *frame,
|
||||
|
||||
packet_header_t header = packet_construct_header( 1 );
|
||||
|
||||
header.timeout = (uint32_t) (ms * 1000);
|
||||
header.usec = frame_num;
|
||||
|
||||
veejay_memset( chunk, 0,sizeof(chunk));
|
||||
@@ -161,7 +160,7 @@ int mcast_send_frame( mcast_sender *v, const VJFrame *frame,
|
||||
//@ If we can send in a single packet:
|
||||
if( total_len <= CHUNK_SIZE )
|
||||
{
|
||||
header.seq_num = 0; header.length = 1;
|
||||
header.seq_num = 0; header.length = 1; header.data_size = total_len;
|
||||
packet_put_padded_data( &header,chunk, buf, total_len);
|
||||
res = mcast_send( v, chunk, PACKET_PAYLOAD_SIZE, port_num );
|
||||
if(res <= 0 )
|
||||
@@ -178,6 +177,8 @@ int mcast_send_frame( mcast_sender *v, const VJFrame *frame,
|
||||
{
|
||||
const uint8_t *data = buf + (i * CHUNK_SIZE);
|
||||
header.seq_num = i;
|
||||
header.data_size = CHUNK_SIZE;
|
||||
|
||||
packet_put_data( &header, chunk, data );
|
||||
|
||||
res = mcast_send( v, chunk, PACKET_PAYLOAD_SIZE, port_num );
|
||||
@@ -191,9 +192,8 @@ int mcast_send_frame( mcast_sender *v, const VJFrame *frame,
|
||||
{
|
||||
i = header.length - 1;
|
||||
header.seq_num = i;
|
||||
int bytes_done = packet_put_padded_data( &header, chunk, buf + (i * CHUNK_SIZE), bytes_left );
|
||||
|
||||
veejay_memset( chunk + bytes_done, 0, (PACKET_PAYLOAD_SIZE-bytes_done));
|
||||
header.data_size = packet_put_padded_data( &header, chunk, buf + (i * CHUNK_SIZE), bytes_left );
|
||||
veejay_memset( chunk + header.data_size, 0, (PACKET_PAYLOAD_SIZE-header.data_size));
|
||||
res = mcast_send( v, chunk, PACKET_PAYLOAD_SIZE, port_num );
|
||||
if( res <= 0 )
|
||||
{
|
||||
|
||||
@@ -31,7 +31,7 @@ typedef struct
|
||||
mcast_sender *mcast_new_sender( const char *group_name );
|
||||
void mcast_set_interface( mcast_sender *s, const char *interface );
|
||||
int mcast_send( mcast_sender *s, const void *buf, int len, int port_num );
|
||||
int mcast_send_frame( mcast_sender *s, const VJFrame *frame , uint8_t *buf, int total_len,long ms, int port_num ,int mode);
|
||||
int mcast_send_frame( mcast_sender *s, const VJFrame *frame , uint8_t *buf, int total_len, int port_num ,int mode);
|
||||
void mcast_close_sender(mcast_sender *s );
|
||||
int mcast_sender_set_peer( mcast_sender *v, const char *hostname );
|
||||
#endif
|
||||
|
||||
@@ -26,8 +26,8 @@
|
||||
|
||||
void packet_dump_header( packet_header_t *h)
|
||||
{
|
||||
veejay_msg(VEEJAY_MSG_DEBUG, "Flag: %x, Sequence Num %d/%d, Timestamp %ld Timeout : %d",
|
||||
h->seq_num,h->length, h->usec,h->timeout );
|
||||
veejay_msg(VEEJAY_MSG_DEBUG, "Flag: %x, Sequence Num %d/%d, Timestamp %ld DattaSize : %d",
|
||||
h->seq_num,h->length, h->usec,h->data_size );
|
||||
}
|
||||
|
||||
packet_header_t packet_construct_header(uint8_t flag)
|
||||
@@ -37,22 +37,11 @@ packet_header_t packet_construct_header(uint8_t flag)
|
||||
packet_header_t header;
|
||||
header.seq_num = 0;
|
||||
header.usec = tv.tv_usec;
|
||||
header.timeout = 0;
|
||||
header.length = 0;
|
||||
header.data_size = 0;
|
||||
return header;
|
||||
}
|
||||
|
||||
packet_header_t packet_get_header(const void *data)
|
||||
{
|
||||
packet_header_t h,tmp;
|
||||
veejay_memcpy( &tmp, data, PACKET_HEADER_LENGTH );
|
||||
h.seq_num = tmp.seq_num;
|
||||
h.length = tmp.length;
|
||||
h.usec = tmp.usec;
|
||||
h.timeout = tmp.timeout;
|
||||
return h;
|
||||
}
|
||||
|
||||
packet_header_t *packet_get_hdr(const void *data)
|
||||
{
|
||||
return (packet_header_t*) data;
|
||||
|
||||
@@ -36,10 +36,10 @@ Ignoring machine byte order. Fix it yourself
|
||||
|
||||
typedef struct
|
||||
{
|
||||
uint32_t seq_num;
|
||||
long usec;
|
||||
uint32_t timeout;
|
||||
uint32_t length;
|
||||
uint16_t seq_num; /* sequence number */
|
||||
long usec; /* time stamp */
|
||||
uint16_t length; /* total number */
|
||||
uint16_t data_size; /* number of data bytes, can be smaller than CHUNK_SIZE */
|
||||
} packet_header_t;
|
||||
|
||||
#define MCAST_PACKET_SIZE 1500
|
||||
@@ -50,8 +50,6 @@ typedef struct
|
||||
|
||||
packet_header_t packet_construct_header(uint8_t flag);
|
||||
|
||||
packet_header_t packet_get_header(const void *data);
|
||||
|
||||
packet_header_t *packet_get_hdr(const void *data);
|
||||
|
||||
int packet_get_data( packet_header_t *h, const void *data, uint8_t *plane);
|
||||
|
||||
@@ -30,9 +30,12 @@
|
||||
#include <arpa/inet.h>
|
||||
#include <netdb.h>
|
||||
#include <fcntl.h>
|
||||
#include <libavutil/avutil.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libvje/vje.h>
|
||||
#include <libvjnet/vj-client.h>
|
||||
#include <veejay/vims.h>
|
||||
#include <libel/avhelper.h>
|
||||
#include <libvjmsg/vj-msg.h>
|
||||
#include <libvjmem/vjmem.h>
|
||||
#include <libvjnet/cmd.h>
|
||||
@@ -40,7 +43,6 @@
|
||||
#include <libvjnet/mcastsender.h>
|
||||
#include <libavutil/pixfmt.h>
|
||||
#include <pthread.h>
|
||||
#include <liblzo/lzo.h>
|
||||
#include <libel/avcommon.h>
|
||||
#define VJC_OK 0
|
||||
#define VJC_NO_MEM 1
|
||||
@@ -50,19 +52,31 @@
|
||||
|
||||
extern int get_ffmpeg_pixfmt( int p);
|
||||
|
||||
vj_client *vj_client_alloc( int w, int h, int f )
|
||||
vj_client *vj_client_alloc( )
|
||||
{
|
||||
vj_client *v = (vj_client*) vj_calloc(sizeof(vj_client));
|
||||
if(!v)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
v->orig_width = w;
|
||||
v->orig_height = h;
|
||||
v->cur_width = w;
|
||||
v->cur_height = h;
|
||||
v->cur_fmt = get_ffmpeg_pixfmt(f);
|
||||
v->orig_fmt = v->cur_fmt;
|
||||
v->decoder = NULL;
|
||||
return v;
|
||||
}
|
||||
|
||||
vj_client *vj_client_alloc_stream(VJFrame *output)
|
||||
{
|
||||
vj_client *v = (vj_client*) vj_calloc(sizeof(vj_client));
|
||||
if(!v)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
|
||||
v->decoder = avhelper_get_mjpeg_decoder(output);
|
||||
if(v->decoder == NULL) {
|
||||
veejay_msg(0,"Failed to initialize MJPEG decoder");
|
||||
free(v);
|
||||
return NULL;
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
@@ -77,11 +91,13 @@ void vj_client_free(vj_client *v)
|
||||
sock_t_free( v->fd[1] );
|
||||
}
|
||||
|
||||
if( v->decoder ) {
|
||||
avhelper_close_decoder( v->decoder );
|
||||
}
|
||||
|
||||
v->fd[0] = NULL;
|
||||
v->fd[1] = NULL;
|
||||
|
||||
if(v->lzo)
|
||||
lzo_free(v->lzo);
|
||||
free(v);
|
||||
v = NULL;
|
||||
}
|
||||
@@ -214,151 +230,76 @@ int vj_client_poll( vj_client *v, int sock_type )
|
||||
return sock_t_poll( v->fd[sock_type ]);
|
||||
}
|
||||
|
||||
static long vj_client_decompress( vj_client *t,uint8_t *in, uint8_t *out, int data_len, int Y, int UV , int header_len,
|
||||
uint32_t s1, uint32_t s2, uint32_t s3)
|
||||
{
|
||||
uint8_t *dst[3] = {
|
||||
out,
|
||||
out + Y,
|
||||
out + Y + UV };
|
||||
void vj_client_rescale_video( vj_client *v, uint8_t *data[4] ) {
|
||||
|
||||
if(v->decoder == NULL) {
|
||||
veejay_msg(0, "No decoder initialized");
|
||||
return;
|
||||
}
|
||||
|
||||
long total = lzo_decompress( t->lzo, in, data_len, dst, UV,s1,s2,s3 );
|
||||
if( total != (Y+UV+UV) )
|
||||
veejay_msg(0, "Error decompressing: expected %d bytes got %d", (Y+UV+UV),total);
|
||||
|
||||
return total;
|
||||
avhelper_rescale_video( v->decoder, data );
|
||||
}
|
||||
|
||||
/* packet negotation.
|
||||
* read a small portion (44 bytes for veejay, its veejay's full header size)
|
||||
* and try to identify which software is sending frames
|
||||
*
|
||||
*/
|
||||
#define FRAMEINFO_LENGTH 44
|
||||
static int vj_client_packet_negotiate( vj_client *v, int *tokens )
|
||||
{
|
||||
uint8_t line[64];
|
||||
veejay_memset( line,0, sizeof(line));
|
||||
|
||||
int plen = sock_t_recv( v->fd[0], line, FRAMEINFO_LENGTH );
|
||||
|
||||
if( plen == 0 ) {
|
||||
veejay_msg(VEEJAY_MSG_DEBUG, "Remote closed connection");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if( plen < 0 )
|
||||
{
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Error while reading header: %s", strerror(errno));
|
||||
return 0;
|
||||
}
|
||||
|
||||
if( plen != FRAMEINFO_LENGTH ) {
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Error reading frame header, only got %d bytes", plen );
|
||||
return 0;
|
||||
}
|
||||
|
||||
int n = sscanf( (char*) line, "%04d%04d%04d%08d%08d%08d%08d",
|
||||
&tokens[0],
|
||||
&tokens[1],
|
||||
&tokens[2],
|
||||
|
||||
&tokens[3],
|
||||
&tokens[4],
|
||||
&tokens[5],
|
||||
&tokens[6]);
|
||||
|
||||
if( n != 7 ) {
|
||||
veejay_msg(0, "Unable to parse header data: '%s'", line );
|
||||
return 0;
|
||||
}
|
||||
|
||||
if( tokens[3] > 0 ) {
|
||||
if( v->lzo == NULL ) {
|
||||
v->lzo = lzo_new();
|
||||
}
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
int vj_client_read_frame_header( vj_client *v, int *w, int *h, int *fmt, int *compr_len, int *stride1,int *stride2, int *stride3 )
|
||||
{
|
||||
int tokens[16];
|
||||
veejay_memset( tokens,0,sizeof(tokens));
|
||||
|
||||
int result = vj_client_packet_negotiate( v, tokens );
|
||||
if( result == 0 ) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if( tokens[0] <= 0 || tokens[1] <= 0 ) {
|
||||
veejay_msg(VEEJAY_MSG_DEBUG, "Frame packet does not contain any data" );
|
||||
return 0;
|
||||
}
|
||||
|
||||
*w = tokens[0];
|
||||
*h = tokens[1];
|
||||
*fmt=tokens[2];
|
||||
*compr_len=tokens[3];
|
||||
*stride1=tokens[4];
|
||||
*stride2=tokens[5];
|
||||
*stride3=tokens[6];
|
||||
v->in_width = *w;
|
||||
v->in_height = *h;
|
||||
v->in_fmt = *fmt;
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
int vj_client_read_mcast_data( vj_client *v, int *compr_len, int *stride1, int *stride2, int *stride3, int *w, int *h, int *fmt, uint8_t *dst, int buflen )
|
||||
int vj_client_read_mcast_data( vj_client *v, int max_len )
|
||||
{
|
||||
if( v->space == NULL ) {
|
||||
v->space = vj_calloc( sizeof(uint8_t) * RUP8( buflen ));
|
||||
v->space = vj_calloc( sizeof(uint8_t) * RUP8( max_len ));
|
||||
if(v->space == NULL)
|
||||
return 0;
|
||||
}
|
||||
|
||||
if( v->decoder == NULL ) {
|
||||
veejay_msg(VEEJAY_MSG_DEBUG, "MJPEG decoder is not initialized");
|
||||
return 0;
|
||||
}
|
||||
|
||||
int space_len = 0;
|
||||
int hdr_len = 0;
|
||||
uint8_t *space = mcast_recv_frame( v->r, &space_len, &hdr_len, v->space );
|
||||
if( space == NULL ) {
|
||||
free(v->space);
|
||||
|
||||
if( space_len <= 0 ) {
|
||||
veejay_msg(VEEJAY_MSG_DEBUG, "Nothing received from network");
|
||||
return 0;
|
||||
}
|
||||
|
||||
int n_tokens = sscanf( (char*) v->space, "%04d%04d%04d%08d%08d%08d%08d",w,h,fmt,compr_len,stride1,stride2,stride3 );
|
||||
if( n_tokens != 7 ) {
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Frame header error in mcast frame, only parsed %d tokens", n_tokens );
|
||||
free(v->space);
|
||||
return 0;
|
||||
}
|
||||
|
||||
v->in_width = *w;
|
||||
v->in_height = *h;
|
||||
v->in_fmt = *fmt;
|
||||
|
||||
return 1;
|
||||
return avhelper_decode_video( v->decoder, space, space_len );
|
||||
}
|
||||
|
||||
int vj_client_read_frame_data( vj_client *v, int compr_len, int stride1,int stride2, int stride3, uint8_t *dst )
|
||||
int vj_client_read_frame_hdr( vj_client *v )
|
||||
{
|
||||
int datalen = (compr_len > 0 ? compr_len : stride1+stride2+stride3);
|
||||
if( (compr_len > 0) && ( v->space == NULL || v->space_len < compr_len) ) {
|
||||
if( v->space ) {
|
||||
free(v->space);
|
||||
v->space = NULL;
|
||||
}
|
||||
v->space_len = RUP8( compr_len );
|
||||
v->space = vj_calloc(sizeof(uint8_t) * v->space_len );
|
||||
if(!v->space) {
|
||||
veejay_msg(0,"Could not allocate memory for network stream");
|
||||
return 0;
|
||||
char header[16];
|
||||
memset(header,0,sizeof(header));
|
||||
int n = sock_t_recv( v->fd[0], header, 10 );
|
||||
if( n <= 0 ) {
|
||||
if( n == -1 ) {
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Error '%s' while reading socket", strerror(errno));
|
||||
} else {
|
||||
veejay_msg(VEEJAY_MSG_DEBUG,"Remote closed connection");
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
int data_len = 0;
|
||||
if( n != 10 ) {
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Bad header");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(sscanf(header, "F%08dD", &data_len ) != 1 ) {
|
||||
veejay_msg(VEEJAY_MSG_ERROR,"Expected header information");
|
||||
return -1;
|
||||
}
|
||||
|
||||
return data_len;
|
||||
}
|
||||
|
||||
int vj_client_read_frame_data( vj_client *v, int datalen)
|
||||
{
|
||||
if(v->space_len < datalen || v->space == NULL) {
|
||||
v->space_len = RUP8(datalen);
|
||||
v->space = (uint8_t*) realloc( v->space, v->space_len );
|
||||
}
|
||||
|
||||
if( compr_len > 0 ) {
|
||||
int n = sock_t_recv( v->fd[0],v->space,datalen );
|
||||
|
||||
if( n <= 0 ) {
|
||||
@@ -367,186 +308,12 @@ int vj_client_read_frame_data( vj_client *v, int compr_len, int stride1,int stri
|
||||
} else {
|
||||
veejay_msg(VEEJAY_MSG_DEBUG,"Remote closed connection");
|
||||
}
|
||||
return 0;
|
||||
return n;
|
||||
}
|
||||
|
||||
if( n != compr_len && n > 0 )
|
||||
{
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Broken video packet , got %d out of %d bytes",n, compr_len );
|
||||
return 0;
|
||||
}
|
||||
return 2;
|
||||
}
|
||||
else {
|
||||
int n = sock_t_recv( v->fd[0], dst, datalen );
|
||||
|
||||
if( n != (stride1 + stride2 + stride3) )
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
return avhelper_decode_video( v->decoder, v->space, n );
|
||||
}
|
||||
|
||||
void vj_client_decompress_frame_data( vj_client *v, uint8_t *dst, int fmt, int w, int h, int compr_len, int stride1, int stride2, int stride3 )
|
||||
{
|
||||
int y_len = w * h;
|
||||
int uv_len = 0;
|
||||
switch(fmt) //@ veejay is sending compressed YUV data, calculate UV size
|
||||
{
|
||||
case PIX_FMT_YUV422P:
|
||||
case PIX_FMT_YUVJ422P:
|
||||
case PIX_FMT_YUVA422P:
|
||||
uv_len = y_len / 2;
|
||||
break;
|
||||
case PIX_FMT_YUV420P:
|
||||
case PIX_FMT_YUVJ420P:
|
||||
case PIX_FMT_YUVA420P:
|
||||
uv_len = y_len / 4;
|
||||
break;
|
||||
default:
|
||||
uv_len = y_len;
|
||||
break;
|
||||
}
|
||||
|
||||
uint8_t *addr = v->space;
|
||||
|
||||
if( v->mcast )
|
||||
addr = v->space + FRAMEINFO_LENGTH;
|
||||
|
||||
vj_client_decompress( v, addr, dst, compr_len, y_len, uv_len ,0, stride1,stride2,stride3);
|
||||
}
|
||||
|
||||
uint8_t *vj_client_read_i( vj_client *v, uint8_t *dst, ssize_t *dstlen, int *ret )
|
||||
{
|
||||
uint32_t p[4] = {0, 0,0,0 };
|
||||
uint32_t strides[4] = { 0,0,0,0 };
|
||||
|
||||
int tokens[16];
|
||||
|
||||
int y_len = 0;
|
||||
int uv_len = 0;
|
||||
|
||||
memset( tokens,0,sizeof(tokens));
|
||||
|
||||
int result = vj_client_packet_negotiate( v, tokens );
|
||||
if( result == 0 ) {
|
||||
*ret = -1;
|
||||
return dst;
|
||||
}
|
||||
p[0] = tokens[0]; //w
|
||||
p[1] = tokens[1]; //h
|
||||
p[2] = tokens[2]; //fmt
|
||||
p[3] = tokens[3]; //compr len
|
||||
strides[0] = tokens[4]; // 0
|
||||
strides[1] = tokens[5]; // 1
|
||||
strides[2] = tokens[6]; // 2
|
||||
v->in_width = p[0];
|
||||
v->in_height = p[1];
|
||||
v->in_fmt = p[2];
|
||||
if( p[0] <= 0 || p[1] <= 0 ) {
|
||||
veejay_msg(0, "Invalid values in network frame header");
|
||||
*ret = -1;
|
||||
return dst;
|
||||
}
|
||||
|
||||
if( v->space == NULL || v->space_len < p[3] ) {
|
||||
if( v->space ) {
|
||||
free(v->space);
|
||||
v->space = NULL;
|
||||
}
|
||||
v->space_len = RUP8( p[3] );
|
||||
v->space = vj_calloc(sizeof(uint8_t) * v->space_len );
|
||||
if(!v->space) {
|
||||
veejay_msg(0,"Could not allocate memory for network stream");
|
||||
*ret = -1;
|
||||
return dst;
|
||||
}
|
||||
}
|
||||
|
||||
uv_len = 0;
|
||||
y_len = p[0] * p[1];
|
||||
|
||||
if( p[3] > 0 ) {
|
||||
switch(v->in_fmt ) //@ veejay is sending compressed YUV data, calculate UV size
|
||||
{
|
||||
case PIX_FMT_YUV422P:
|
||||
case PIX_FMT_YUVJ422P:
|
||||
uv_len = y_len / 2;
|
||||
break;
|
||||
case PIX_FMT_YUV420P:
|
||||
case PIX_FMT_YUVJ420P:
|
||||
uv_len = y_len / 4;break;
|
||||
default:
|
||||
uv_len = y_len;
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Unknown pixel format: %02x", v->in_fmt);
|
||||
*ret = -1;
|
||||
return dst;
|
||||
break;
|
||||
}
|
||||
int n = sock_t_recv( v->fd[0],v->space,p[3] );
|
||||
if( n <= 0 ) {
|
||||
if( n == -1 ) {
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Error '%s' while reading socket", strerror(errno));
|
||||
} else {
|
||||
veejay_msg(VEEJAY_MSG_DEBUG,"Remote closed connection");
|
||||
}
|
||||
*ret = -1;
|
||||
return dst;
|
||||
}
|
||||
|
||||
if( n != p[3] && n > 0 )
|
||||
{
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Broken video packet , got %d out of %d bytes",n, p[3] );
|
||||
*ret = -1;
|
||||
return dst;
|
||||
}
|
||||
|
||||
|
||||
if( *dstlen < (y_len*3) || dst == NULL ) {
|
||||
dst = realloc( dst, RUP8( y_len * 3) );
|
||||
*dstlen = y_len * 3;
|
||||
}
|
||||
|
||||
//@ decompress YUV buffer
|
||||
vj_client_decompress( v, v->space, dst, p[3], y_len, uv_len , 0, strides[0],strides[1],strides[2]);
|
||||
|
||||
if( v->in_fmt == v->cur_fmt && v->cur_width == p[0] && v->cur_height == p[1] ) {
|
||||
*ret = 1;
|
||||
return dst;
|
||||
}
|
||||
|
||||
|
||||
*ret = 2;
|
||||
return dst; //@ caller will scale frame in dst
|
||||
} else {
|
||||
|
||||
if( *dstlen < (strides[0] + strides[1] + strides[2]) || dst == NULL ) {
|
||||
dst = realloc( dst, RUP8( strides[0]+strides[1]+strides[2]) );
|
||||
*dstlen = strides[0] + strides[1] + strides[2];
|
||||
}
|
||||
|
||||
int n = sock_t_recv( v->fd[0], dst, strides[0] + strides[1] + strides[2] );
|
||||
if( n != (strides[0] + strides[1] + strides[2] ) )
|
||||
{
|
||||
*ret = -1;
|
||||
return dst;
|
||||
}
|
||||
|
||||
if( v->in_fmt == v->cur_fmt && v->cur_width == p[0] && v->cur_height == p[1] ) {
|
||||
*ret = 1;
|
||||
return dst;
|
||||
}
|
||||
|
||||
*ret = 2;
|
||||
return dst;
|
||||
}
|
||||
return dst;
|
||||
}
|
||||
|
||||
|
||||
int vj_client_read_no_wait(vj_client *v, int sock_type, uint8_t *dst, int bytes )
|
||||
{
|
||||
if( v->mcast )
|
||||
@@ -616,6 +383,7 @@ void vj_client_close( vj_client *v )
|
||||
v->fd[1] = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
int vj_client_test(char *host, int port)
|
||||
|
||||
@@ -21,29 +21,17 @@
|
||||
|
||||
typedef struct
|
||||
{
|
||||
int planes[3];
|
||||
int cur_width;
|
||||
int cur_height;
|
||||
int cur_fmt;
|
||||
int in_width;
|
||||
int in_height;
|
||||
int in_fmt;
|
||||
|
||||
int orig_width;
|
||||
int orig_height;
|
||||
int orig_fmt;
|
||||
|
||||
uint8_t *space;
|
||||
ssize_t space_len;
|
||||
int mcast;
|
||||
void *lzo;
|
||||
unsigned char *blob;
|
||||
|
||||
void *r;
|
||||
void *s;
|
||||
|
||||
void *fd[2];
|
||||
int ports[2];
|
||||
|
||||
void *decoder;
|
||||
} vj_client;
|
||||
|
||||
int vj_client_link_can_write(vj_client *v, int s);
|
||||
@@ -56,8 +44,6 @@ void vj_client_flush( vj_client *v, int delay );
|
||||
|
||||
int vj_client_poll( vj_client *v, int sock_type );
|
||||
|
||||
uint8_t *vj_client_read_i(vj_client *v, uint8_t *dst, ssize_t *len, int *ret );
|
||||
|
||||
int vj_client_read( vj_client *v, int sock_type, uint8_t *dst, int bytes );
|
||||
|
||||
int vj_client_read_no_wait( vj_client *v, int sock_type, uint8_t *dst, int bytes );
|
||||
@@ -68,7 +54,9 @@ int vj_client_send( vj_client *v, int sock_type,unsigned char *buf);
|
||||
|
||||
int vj_client_send_buf( vj_client *v, int sock_type,unsigned char *buf, int len);
|
||||
|
||||
vj_client *vj_client_alloc(int w , int h, int f);
|
||||
vj_client *vj_client_alloc();
|
||||
|
||||
vj_client *vj_client_alloc_stream(VJFrame *info);
|
||||
|
||||
void vj_client_free(vj_client *v);
|
||||
|
||||
@@ -78,14 +66,13 @@ int vj_client_window_sizes( int socket_fd, int *r, int *s );
|
||||
|
||||
int vj_client_connect_dat(vj_client *v, char *host, int port_id );
|
||||
|
||||
int vj_client_read_mcast_data( vj_client *v, int *compr_len, int *stride1, int *stride2, int *stride3, int *w, int *h, int *fmt, uint8_t *dst, int buflen );
|
||||
int vj_client_read_mcast_data( vj_client *v, int buflen );
|
||||
|
||||
int vj_client_read_frame_data( vj_client *v, int datalen);
|
||||
|
||||
void vj_client_decompress_frame_data( vj_client *v, uint8_t *dst, int fmt, int w, int h, int compr_len, int stride1, int stride2, int stride3 );
|
||||
void vj_client_rescale_video( vj_client *v, uint8_t *data[4] );
|
||||
|
||||
int vj_client_read_frame_data( vj_client *v, int compr_len, int stride1,int stride2, int stride3, uint8_t *dst );
|
||||
|
||||
int vj_client_read_frame_header( vj_client *v, int *w, int *h, int *fmt, int *compr_len, int *stride1,int *stride2, int *stride3 );
|
||||
int vj_client_read_frame_hdr( vj_client *v );
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
@@ -30,9 +30,14 @@
|
||||
#include <netinet/tcp.h>
|
||||
#include <netdb.h>
|
||||
#include <errno.h>
|
||||
|
||||
#include <libavutil/avutil.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
#include <libvjmsg/vj-msg.h>
|
||||
#include <veejay/vims.h>
|
||||
#include <libvje/vje.h>
|
||||
#include <libel/avhelper.h>
|
||||
#include <libvjmem/vjmem.h>
|
||||
#include <libvjnet/mcastreceiver.h>
|
||||
#include <libvjnet/mcastsender.h>
|
||||
@@ -524,16 +529,23 @@ int vj_server_link_can_read( vj_server *vje, int link_id)
|
||||
return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int vj_server_send_frame_now( vj_server *vje, int link_id, uint8_t *buf, int len )
|
||||
{
|
||||
unsigned int total = 0;
|
||||
|
||||
vj_link **Link = (vj_link**) vje->link;
|
||||
|
||||
/* write size of data to header */
|
||||
char hdr_buf[16];
|
||||
snprintf(hdr_buf, sizeof(hdr_buf), "F%08dD", len );
|
||||
if( sock_t_send_fd( Link[link_id]->handle, vje->send_size, hdr_buf, 10 ) <= 0 ) {
|
||||
veejay_msg(0, "Unable to send header to %s: %s", (char*)(inet_ntoa(vje->remote.sin_addr)),strerror(errno));
|
||||
return 0;
|
||||
}
|
||||
|
||||
total = sock_t_send_fd( Link[link_id]->handle, vje->send_size, buf, len);
|
||||
if( vje->logfd ) {
|
||||
fprintf(vje->logfd, "sent frame %d of %d bytes to handle %d (link %d) %s\n", total,len, Link[link_id]->handle,link_id,(char*)(inet_ntoa(vje->remote.sin_addr)) );
|
||||
// printbuf( vje->logfd, buf, len );
|
||||
}
|
||||
|
||||
if( total <= 0 )
|
||||
@@ -546,15 +558,15 @@ static int vj_server_send_frame_now( vj_server *vje, int link_id, uint8_t *buf,
|
||||
return total;
|
||||
}
|
||||
|
||||
int vj_server_send_frame( vj_server *vje, int link_id, uint8_t *buf, int len,
|
||||
VJFrame *frame, long ms )
|
||||
int vj_server_send_frame( vj_server *vje, int link_id, uint8_t *buf, int len, VJFrame *frame )
|
||||
{
|
||||
if(!vje->use_mcast )
|
||||
{
|
||||
if( vj_server_link_can_write( vje, link_id ))
|
||||
{
|
||||
return vj_server_send_frame_now( vje, link_id, buf, len );
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Link %d's socket not ready for immediate send: %s", link_id, strerror(errno));
|
||||
}
|
||||
return 0;
|
||||
@@ -563,7 +575,7 @@ int vj_server_send_frame( vj_server *vje, int link_id, uint8_t *buf, int len,
|
||||
{
|
||||
vj_proto **proto = (vj_proto**) vje->protocol;
|
||||
if( vje->server_type == V_CMD )
|
||||
return mcast_send_frame( proto[0]->s, frame, buf,len,ms, vje->ports[0],vje->mcast_gray );
|
||||
return mcast_send_frame( proto[0]->s, frame, buf,len,vje->ports[0],vje->mcast_gray );
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -60,8 +60,7 @@ int vj_server_poll(vj_server * vje);
|
||||
|
||||
int vj_server_send(vj_server *vje, int link_id, uint8_t *buf, int len);
|
||||
|
||||
int vj_server_send_frame(vj_server *vje, int link_id, uint8_t *buf, int total_len, VJFrame *frame, long ms
|
||||
);
|
||||
int vj_server_send_frame(vj_server *vje, int link_id, uint8_t *buf, int total_len, VJFrame *frame);
|
||||
|
||||
int vj_server_init(void);
|
||||
|
||||
|
||||
@@ -63,6 +63,7 @@ typedef struct
|
||||
} vj_sws;
|
||||
|
||||
static int sws_context_flags_ = 0;
|
||||
static int full_range_pixel_value_ = 0;
|
||||
static int ffmpeg_aclib[AV_PIX_FMT_NB];
|
||||
|
||||
#define put(a,b) ffmpeg_aclib[a] = b
|
||||
@@ -92,12 +93,19 @@ static struct {
|
||||
{ PIX_FMT_BGRA, "PIX_FMT_BGRA"},
|
||||
{ PIX_FMT_ARGB, "PIX_FMT_ARGB"},
|
||||
{ PIX_FMT_ABGR, "PIX_FMT_ABGR"},
|
||||
{ PIX_FMT_YUVA422P, "PIX_FMT_YUVA422P"},
|
||||
{ PIX_FMT_YUVA444P, "PIX_FMT_YUVA444P"},
|
||||
{ 0 , NULL}
|
||||
|
||||
};
|
||||
|
||||
|
||||
const char *yuv_get_pixfmt_description(int fmt)
|
||||
{
|
||||
int i;
|
||||
for( i = 0; pixstr[i].s != NULL ; i ++ )
|
||||
if( fmt == pixstr[i].i )
|
||||
return pixstr[i].s;
|
||||
return "NONE";
|
||||
}
|
||||
|
||||
static void yuv_pixstr( const char *s, const char *s2, int fmt ) {
|
||||
const char *str = NULL;
|
||||
int i;
|
||||
@@ -467,6 +475,25 @@ void yuv_plane_sizes( VJFrame *src, int *p1, int *p2, int *p3, int *p4 )
|
||||
}
|
||||
}
|
||||
|
||||
void yuv_set_pixel_range(int full_range)
|
||||
{
|
||||
full_range_pixel_value_ = full_range;
|
||||
}
|
||||
|
||||
int alpha_fmt_to_yuv(int fmt)
|
||||
{
|
||||
switch(fmt) {
|
||||
case PIX_FMT_YUVA422P:
|
||||
return (full_range_pixel_value_ ? PIX_FMT_YUVJ422P: PIX_FMT_YUV422P ); break;
|
||||
case PIX_FMT_YUVA420P:
|
||||
return (full_range_pixel_value_ ? PIX_FMT_YUVJ420P: PIX_FMT_YUV420P ); break;
|
||||
case PIX_FMT_YUVA444P:
|
||||
return (full_range_pixel_value_ ? PIX_FMT_YUVJ444P: PIX_FMT_YUV444P ); break;
|
||||
|
||||
}
|
||||
return fmt;
|
||||
}
|
||||
|
||||
int yuv_to_alpha_fmt(int fmt)
|
||||
{
|
||||
switch(fmt) {
|
||||
|
||||
@@ -29,9 +29,10 @@ int vj_to_pixfmt(int fmt);
|
||||
int pixfmt_to_vj(int pixfmt);
|
||||
int pixfmt_is_full_range(int pixfmt);
|
||||
int vj_is_full_range(int fmt);
|
||||
|
||||
const char *yuv_get_pixfmt_description(int fmt);
|
||||
int yuv_to_alpha_fmt(int fmt);
|
||||
|
||||
int alpha_fmt_to_yuv(int fmt);
|
||||
void yuv_set_pixel_range(int full_range);
|
||||
// yuv 4:2:2 packed to yuv 4:2:0 planar
|
||||
void vj_yuy2toyv12( uint8_t *y, uint8_t *u, uint8_t *v, uint8_t *in, int w, int h);
|
||||
// yuv 4:2:2 packet to yuv 4:2:2 planar
|
||||
|
||||
@@ -69,7 +69,6 @@
|
||||
#ifndef X_DISPLAY_MISSING
|
||||
#include <veejay/x11misc.h>
|
||||
#endif
|
||||
#include <libvjnet/vj-client.h>
|
||||
#ifdef HAVE_SYS_SOUNDCARD_H
|
||||
#include <sys/soundcard.h>
|
||||
#endif
|
||||
@@ -85,6 +84,7 @@
|
||||
#include <libel/vj-avcodec.h>
|
||||
#include <libel/pixbuf.h>
|
||||
#include <libel/avcommon.h>
|
||||
#include <libvjnet/vj-client.h>
|
||||
#ifdef HAVE_JACK
|
||||
#include <veejay/vj-jack.h>
|
||||
#endif
|
||||
@@ -887,7 +887,6 @@ static int veejay_screen_update(veejay_t * info )
|
||||
if( info->settings->unicast_frame_sender )
|
||||
{
|
||||
vj_perform_send_primary_frame_s2(info, 0, info->uc->current_link );
|
||||
vj_perform_done_s2(info);
|
||||
}
|
||||
|
||||
if( info->settings->mcast_frame_sender && info->settings->use_vims_mcast )
|
||||
@@ -1782,6 +1781,7 @@ int veejay_init(veejay_t * info, int x, int y,char *arg, int def_tags, int gen_t
|
||||
info->video_output_height);
|
||||
|
||||
int full_range = veejay_set_yuv_range( info );
|
||||
yuv_set_pixel_range(full_range);
|
||||
|
||||
info->settings->sample_mode = SSM_422_444;
|
||||
|
||||
@@ -3740,6 +3740,8 @@ int veejay_open_files(veejay_t * info, char **files, int num_files, float ofps,
|
||||
info->effect_frame2 = yuv_yuv_template( NULL,NULL,NULL, info->dummy->width, info->dummy->height, yuv_to_alpha_fmt(vj_to_pixfmt(info->pixel_format)) );
|
||||
info->effect_frame2->fps = info->settings->output_fps;
|
||||
|
||||
veejay_msg(VEEJAY_MSG_DEBUG,"Performer is working in %s (%d)", yuv_get_pixfmt_description(info->effect_frame1->format), info->effect_frame1->format);
|
||||
|
||||
if(num_files == 0)
|
||||
{
|
||||
if(!info->dummy->active)
|
||||
|
||||
@@ -9163,8 +9163,9 @@ void vj_event_mcast_start ( void *ptr, const char format[],
|
||||
char s[255];
|
||||
P_A( args,sizeof(args), s ,sizeof(s), format, ap);
|
||||
|
||||
if(!v->settings->use_vims_mcast)
|
||||
if(!v->settings->use_vims_mcast) {
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "start veejay in multicast mode (see -T commandline option)");
|
||||
}
|
||||
else
|
||||
{
|
||||
v->settings->mcast_frame_sender = 1;
|
||||
|
||||
@@ -2638,8 +2638,8 @@ void vj_init_vevo_events(void)
|
||||
vj_event_mcast_start,
|
||||
1,
|
||||
VIMS_ALLOW_ANY,
|
||||
"0=Color,1=Grayscale (default)",
|
||||
1,
|
||||
"(unused)",
|
||||
0,
|
||||
NULL );
|
||||
index_map_[VIMS_VIDEO_MCAST_STOP] = _new_event(
|
||||
NULL,
|
||||
|
||||
@@ -102,7 +102,6 @@ static long performer_frame_size_ = 0;
|
||||
extern uint8_t pixel_Y_lo_;
|
||||
|
||||
static varcache_t pvar_;
|
||||
static void *lzo_;
|
||||
static VJFrame *crop_frame = NULL;
|
||||
static VJFrame *rgba_frame[2] = { NULL };
|
||||
static VJFrame *yuva_frame[2] = { NULL };
|
||||
@@ -113,6 +112,7 @@ static ycbcr_frame **primary_buffer = NULL; /* normal */
|
||||
static ycbcr_frame *preview_buffer = NULL;
|
||||
static int preview_max_w;
|
||||
static int preview_max_h;
|
||||
static void *encoder_ = NULL;
|
||||
|
||||
#define CACHE_TOP 0
|
||||
#define CACHE 1
|
||||
@@ -138,7 +138,6 @@ static void *rgba2yuv_scaler = NULL;
|
||||
static void *yuv2rgba_scaler = NULL;
|
||||
static uint8_t *pribuf_area = NULL;
|
||||
static size_t pribuf_len = 0;
|
||||
static uint8_t *socket_buffer = NULL;
|
||||
static uint8_t *fx_chain_buffer = NULL;
|
||||
static size_t fx_chain_buflen = 0;
|
||||
static ycbcr_frame *record_buffer = NULL; // needed for recording invisible streams
|
||||
@@ -1033,9 +1032,6 @@ void vj_perform_free(veejay_t * info)
|
||||
|
||||
sample_record_free();
|
||||
|
||||
if( socket_buffer )
|
||||
free( socket_buffer );
|
||||
|
||||
if(info->edit_list->has_audio)
|
||||
vj_perform_close_audio();
|
||||
|
||||
@@ -1103,14 +1099,13 @@ void vj_perform_free(veejay_t * info)
|
||||
free(pribuf_area);
|
||||
}
|
||||
|
||||
if(lzo_)
|
||||
lzo_free(lzo_);
|
||||
|
||||
yuv_free_swscaler( rgba2yuv_scaler );
|
||||
yuv_free_swscaler( yuv2rgba_scaler );
|
||||
|
||||
free(rgba_frame[0]);
|
||||
free(rgba_frame[1]);
|
||||
|
||||
vj_avcodec_stop(encoder_,0);
|
||||
}
|
||||
|
||||
int vj_perform_preview_max_width() {
|
||||
@@ -1220,125 +1215,26 @@ static void long2str(uint8_t *dst, uint32_t n)
|
||||
dst[3] = (n>>24)&0xff;
|
||||
}
|
||||
|
||||
static uint32_t vj_perform_compress_frame( veejay_t *info, uint8_t *dst, uint32_t *p1_len, uint32_t *p2_len, uint32_t *p3_len, VJFrame *frame)
|
||||
{
|
||||
const int len = frame->len;
|
||||
const int uv_len = frame->uv_len;
|
||||
uint8_t *dstI = dst + 16;
|
||||
|
||||
if(lzo_ == NULL ) {
|
||||
lzo_ = lzo_new();
|
||||
if( lzo_ == NULL ) {
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Unable to initialize lzo encoder :(");
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int i = lzo_compress( lzo_ , frame->data[0], dstI, p1_len, len );
|
||||
if( i == 0 )
|
||||
{
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Unable to compress Y plane");
|
||||
return 0;
|
||||
}
|
||||
uint32_t size1 = ( *p1_len );
|
||||
dstI = dst + 16 + (sizeof(uint8_t) * size1 );
|
||||
|
||||
i = lzo_compress( lzo_, frame->data[1], dstI, p2_len, uv_len );
|
||||
if( i == 0 )
|
||||
{
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Unable to compress U plane");
|
||||
return 0;
|
||||
}
|
||||
|
||||
uint32_t size2 = ( *p2_len );
|
||||
dstI = dst + 16 + size1 + size2;
|
||||
|
||||
i = lzo_compress( lzo_, frame->data[2], dstI, p3_len, uv_len );
|
||||
if( i == 0 )
|
||||
{
|
||||
veejay_msg(VEEJAY_MSG_ERROR, "Unable to compress V plane");
|
||||
return 0;
|
||||
}
|
||||
|
||||
uint32_t size3 = ( *p3_len );
|
||||
|
||||
long2str( dst,size1);
|
||||
long2str( dst+4, size2 );
|
||||
long2str( dst+8, size3 );
|
||||
long2str( dst+12,info->settings->mcast_mode );
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
void vj_perform_done_s2( veejay_t *info ) {
|
||||
|
||||
info->settings->unicast_frame_sender = 0;
|
||||
}
|
||||
|
||||
static long stream_pts_ = 0;
|
||||
static int vj_perform_compress_primary_frame_s2(veejay_t *info,VJFrame *frame )
|
||||
{
|
||||
char info_line[64];
|
||||
int data_len = 44;
|
||||
int sp_w = frame->width;
|
||||
int sp_h = frame->height;
|
||||
int sp_uvlen = frame->uv_len;
|
||||
int sp_len = frame->len;
|
||||
int sp_format = frame->format;
|
||||
uint32_t planes[4] = { 0 };
|
||||
|
||||
if(socket_buffer == NULL ) {
|
||||
socket_buffer = vj_malloc(RUP8(data_len + (frame->len * 4 )));
|
||||
if(socket_buffer == NULL) {
|
||||
if( encoder_ == NULL ) {
|
||||
encoder_ = vj_avcodec_start(info->effect_frame1, ENCODER_MJPEG, NULL);
|
||||
if(encoder_ == NULL) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
veejay_memset(info_line, 0, sizeof(info_line) );
|
||||
uint8_t *sbuf = socket_buffer + (sizeof(uint8_t) * data_len );
|
||||
|
||||
int compr_ok = vj_perform_compress_frame(info,sbuf, &planes[0], &planes[1], &planes[2], frame);
|
||||
int total = planes[0] + planes[1] + planes[2] + 16;
|
||||
if( compr_ok == 0 ) {
|
||||
planes[0] = sp_len;
|
||||
planes[1] = sp_uvlen;
|
||||
planes[2] = planes[1];
|
||||
total = 0;
|
||||
}
|
||||
|
||||
/* peer to peer connection */
|
||||
sprintf(info_line,
|
||||
"%04d%04d%04d%08d%08d%08d%08d",
|
||||
sp_w,
|
||||
sp_h,
|
||||
sp_format,
|
||||
total,
|
||||
planes[0],
|
||||
planes[1],
|
||||
planes[2] );
|
||||
|
||||
veejay_memcpy( socket_buffer, info_line, data_len );
|
||||
|
||||
if( compr_ok == 0 )
|
||||
{
|
||||
if(!info->splitter) {
|
||||
veejay_memcpy( socket_buffer + data_len , frame->data[0], sp_len);
|
||||
veejay_memcpy( socket_buffer + data_len + sp_len,frame->data[1], sp_uvlen );
|
||||
veejay_memcpy( socket_buffer + data_len + sp_len + sp_uvlen,frame->data[2],sp_uvlen );
|
||||
}
|
||||
else
|
||||
{
|
||||
veejay_memcpy( socket_buffer + data_len, frame->data[0], sp_len + sp_uvlen + sp_uvlen);
|
||||
}
|
||||
data_len += 16 + sp_len + sp_uvlen + sp_uvlen; // 16 is compression data header
|
||||
|
||||
}
|
||||
else {
|
||||
data_len += total;
|
||||
}
|
||||
|
||||
return data_len;
|
||||
return vj_avcodec_encode_frame(encoder_,
|
||||
stream_pts_ ++,
|
||||
ENCODER_MJPEG,
|
||||
frame->data,
|
||||
vj_avcodec_get_buf(encoder_),
|
||||
8 * 16 * 65535,
|
||||
frame->format);
|
||||
}
|
||||
|
||||
int vj_perform_send_primary_frame_s2(veejay_t *info, int mcast, int to_mcast_link_id)
|
||||
void vj_perform_send_primary_frame_s2(veejay_t *info, int mcast, int to_mcast_link_id)
|
||||
{
|
||||
int i;
|
||||
|
||||
@@ -1355,8 +1251,10 @@ int vj_perform_send_primary_frame_s2(veejay_t *info, int mcast, int to_mcast_lin
|
||||
|
||||
VJFrame *frame = vj_split_get_screen( info->splitter, screen_id );
|
||||
int data_len = vj_perform_compress_primary_frame_s2( info, frame );
|
||||
if(data_len <= 0)
|
||||
continue;
|
||||
|
||||
if( vj_server_send_frame( info->vjs[3], link_id, socket_buffer,data_len, frame, info->real_fps ) <= 0 ) {
|
||||
if( vj_server_send_frame( info->vjs[3], link_id, vj_avcodec_get_buf(encoder_),data_len, frame ) <= 0 ) {
|
||||
_vj_server_del_client( info->vjs[3], link_id );
|
||||
}
|
||||
|
||||
@@ -1364,6 +1262,8 @@ int vj_perform_send_primary_frame_s2(veejay_t *info, int mcast, int to_mcast_lin
|
||||
info->splitted_screens[i] = -1;
|
||||
}
|
||||
}
|
||||
|
||||
info->settings->unicast_frame_sender = 0;
|
||||
}
|
||||
else {
|
||||
VJFrame fxframe;
|
||||
@@ -1371,6 +1271,9 @@ int vj_perform_send_primary_frame_s2(veejay_t *info, int mcast, int to_mcast_lin
|
||||
vj_copy_frame_holder(info->effect_frame1, primary_buffer[info->out_buf], &fxframe);
|
||||
|
||||
int data_len = vj_perform_compress_primary_frame_s2( info,&fxframe );
|
||||
if( data_len <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
int id = (mcast ? 2: 3);
|
||||
|
||||
@@ -1378,24 +1281,24 @@ int vj_perform_send_primary_frame_s2(veejay_t *info, int mcast, int to_mcast_lin
|
||||
{
|
||||
for( i = 0; i < VJ_MAX_CONNECTIONS; i++ ) {
|
||||
if( info->rlinks[i] != -1 ) {
|
||||
if(vj_server_send_frame( info->vjs[id], info->rlinks[i], socket_buffer, data_len, &fxframe, info->real_fps )<=0)
|
||||
if(vj_server_send_frame( info->vjs[id], info->rlinks[i], vj_avcodec_get_buf(encoder_), data_len, &fxframe )<=0)
|
||||
{
|
||||
_vj_server_del_client( info->vjs[id], info->rlinks[i] );
|
||||
}
|
||||
info->rlinks[i] = -1;
|
||||
}
|
||||
}
|
||||
|
||||
info->settings->unicast_frame_sender = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
if(vj_server_send_frame( info->vjs[id], to_mcast_link_id, socket_buffer, data_len, &fxframe, info->real_fps )<=0)
|
||||
if(vj_server_send_frame( info->vjs[id], to_mcast_link_id, vj_avcodec_get_buf(encoder_), data_len, &fxframe )<=0)
|
||||
{
|
||||
veejay_msg(VEEJAY_MSG_DEBUG, "Error sending multicast frame");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
void vj_perform_get_output_frame_420p( veejay_t *info, uint8_t **frame, int w, int h )
|
||||
|
||||
@@ -19,8 +19,6 @@ int vj_perform_init_audio(veejay_t * info);
|
||||
|
||||
void vj_perform_free(veejay_t *info);
|
||||
|
||||
void vj_perform_done_s2( veejay_t *info );
|
||||
|
||||
int vj_perform_audio_start(veejay_t * info);
|
||||
|
||||
void vj_perform_audio_status(struct timeval tmpstmp, unsigned int nb_out,
|
||||
@@ -56,7 +54,7 @@ void vj_perform_randomize(veejay_t *info);
|
||||
|
||||
void vj_perform_free_plugin_frame(VJFrameInfo *f );
|
||||
|
||||
int vj_perform_send_primary_frame_s2(veejay_t *info, int mcast, int dst_link);
|
||||
void vj_perform_send_primary_frame_s2(veejay_t *info, int mcast, int dst_link);
|
||||
void vj_perform_get_backstore( uint8_t **frame );
|
||||
int vj_perform_get_sampling();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user