mirror of
https://github.com/game-stop/veejay.git
synced 2025-12-14 11:50:02 +01:00
v4l2 capture: convert rgba to yuva
This commit is contained in:
@@ -781,7 +781,6 @@ static int v4l2_configure_format( v4l2info *v, int host_fmt, int wid, int hei )
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void v4l2_set_output_pointers( v4l2info *v, void *src )
|
static void v4l2_set_output_pointers( v4l2info *v, void *src )
|
||||||
{
|
{
|
||||||
uint8_t *map = (uint8_t*) src;
|
uint8_t *map = (uint8_t*) src;
|
||||||
@@ -800,13 +799,14 @@ static void v4l2_set_output_pointers( v4l2info *v, void *src )
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
VJFrame *v4l2_get_dst( void *vv, uint8_t *Y, uint8_t *U, uint8_t *V ) {
|
VJFrame *v4l2_get_dst( void *vv, uint8_t *Y, uint8_t *U, uint8_t *V, uint8_t *A ) {
|
||||||
v4l2info *v = (v4l2info*) vv;
|
v4l2info *v = (v4l2info*) vv;
|
||||||
if(v->threaded)
|
if(v->threaded)
|
||||||
lock_(v->video_info);
|
lock_(v->video_info);
|
||||||
v->host_frame->data[0] = Y;
|
v->host_frame->data[0] = Y;
|
||||||
v->host_frame->data[1] = U;
|
v->host_frame->data[1] = U;
|
||||||
v->host_frame->data[2] = V;
|
v->host_frame->data[2] = V;
|
||||||
|
v->host_frame->data[3] = A;
|
||||||
if(v->threaded)
|
if(v->threaded)
|
||||||
unlock_(v->video_info);
|
unlock_(v->video_info);
|
||||||
return v->host_frame;
|
return v->host_frame;
|
||||||
@@ -1150,6 +1150,27 @@ v4l2_rw_fallback:
|
|||||||
v->buffers[0].length = v->sizeimage;
|
v->buffers[0].length = v->sizeimage;
|
||||||
v->buffers[0].start = vj_malloc( RUP8( v->sizeimage * 2 ) );
|
v->buffers[0].start = vj_malloc( RUP8( v->sizeimage * 2 ) );
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
//FIXME this is here since libstream and libsample should be refactored (for now)
|
||||||
|
if( v->format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB32 ||
|
||||||
|
v->format.fmt.pix.pixelformat == V4L2_PIX_FMT_BGR32 ) {
|
||||||
|
// this allows us to convert to yuva regardless
|
||||||
|
veejay_msg(VEEJAY_MSG_DEBUG, "v4l2: source in RGB? format, converting to YUVA");
|
||||||
|
switch( dst_fmt ) {
|
||||||
|
case PIX_FMT_YUVJ422P:
|
||||||
|
case PIX_FMT_YUV422P:
|
||||||
|
dst_fmt = PIX_FMT_YUVA422P;
|
||||||
|
break;
|
||||||
|
case PIX_FMT_YUVJ420P:
|
||||||
|
case PIX_FMT_YUV420P:
|
||||||
|
dst_fmt = PIX_FMT_YUVA420P;
|
||||||
|
break;
|
||||||
|
case PIX_FMT_YUVJ444P:
|
||||||
|
case PIX_FMT_YUV444P:
|
||||||
|
dst_fmt = PIX_FMT_YUVA444P;
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for( i = 0; i < N_FRAMES; i ++ ) {
|
for( i = 0; i < N_FRAMES; i ++ ) {
|
||||||
@@ -1157,7 +1178,7 @@ v4l2_rw_fallback:
|
|||||||
v->frames_done[i] = 0;
|
v->frames_done[i] = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
v->host_frame = yuv_yuv_template( NULL,NULL,NULL,wid,hei,host_fmt );
|
v->host_frame = yuv_yuv_template( NULL,NULL,NULL,wid,hei,dst_fmt );
|
||||||
v->frame_ready = 0;
|
v->frame_ready = 0;
|
||||||
v->frameidx = 0;
|
v->frameidx = 0;
|
||||||
|
|
||||||
@@ -1181,7 +1202,7 @@ v4l2_rw_fallback:
|
|||||||
}
|
}
|
||||||
|
|
||||||
static int v4l2_pull_frame_intern( v4l2info *v )
|
static int v4l2_pull_frame_intern( v4l2info *v )
|
||||||
{ //@ fixme more functions no pasta
|
{
|
||||||
void *src = NULL;
|
void *src = NULL;
|
||||||
int length = 0;
|
int length = 0;
|
||||||
int n = 0;
|
int n = 0;
|
||||||
@@ -2163,6 +2184,11 @@ int v4l2_thread_pull( v4l2_thread_info *i , VJFrame *dst )
|
|||||||
veejay_memset( dst->data[1], 127, dst->uv_len );
|
veejay_memset( dst->data[1], 127, dst->uv_len );
|
||||||
veejay_memset( dst->data[2], 127, dst->uv_len );
|
veejay_memset( dst->data[2], 127, dst->uv_len );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if( v->frames[v->frame_ready]->stride[3] > 0 && dst->stride[3] > 0 ) {
|
||||||
|
veejay_memcpy( dst->data[3], v->frames[v->frame_ready]->data[3], v->out_planes[3]);
|
||||||
|
}
|
||||||
|
|
||||||
//@ "free" buffer
|
//@ "free" buffer
|
||||||
lock_(i);
|
lock_(i);
|
||||||
v->frames_done[v->frameidx] = 0;
|
v->frames_done[v->frameidx] = 0;
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ void v4l2_set_whiteness(void *d, int32_t value);
|
|||||||
int32_t v4l2_get_whiteness(void *d);
|
int32_t v4l2_get_whiteness(void *d);
|
||||||
void v4l2_set_vflip(void *d, int32_t value);
|
void v4l2_set_vflip(void *d, int32_t value);
|
||||||
int32_t v4l2_get_vflip(void *d);
|
int32_t v4l2_get_vflip(void *d);
|
||||||
VJFrame *v4l2_get_dst( void *v,uint8_t *Y, uint8_t *U, uint8_t *V );
|
VJFrame *v4l2_get_dst( void *v,uint8_t *Y, uint8_t *U, uint8_t *V, uint8_t *A );
|
||||||
void v4l2_set_control( void *d, uint32_t type, int32_t value );
|
void v4l2_set_control( void *d, uint32_t type, int32_t value );
|
||||||
int v4l2_poll( void *d , int nfds, int timeout );
|
int v4l2_poll( void *d , int nfds, int timeout );
|
||||||
int v4l2_thread_start( v4l2_thread_info *info );
|
int v4l2_thread_start( v4l2_thread_info *info );
|
||||||
|
|||||||
@@ -904,10 +904,9 @@ int vj_tag_new(int type, char *filename, int stream_nr, editlist * el, int pix_f
|
|||||||
if(type == VJ_TAG_TYPE_MCAST || type == VJ_TAG_TYPE_NET)
|
if(type == VJ_TAG_TYPE_MCAST || type == VJ_TAG_TYPE_NET)
|
||||||
tag->priv = net_threader(_tag_info->effect_frame1);
|
tag->priv = net_threader(_tag_info->effect_frame1);
|
||||||
|
|
||||||
#ifdef HAVE_V4L2
|
|
||||||
palette = get_ffmpeg_pixfmt( pix_fmt );
|
palette = get_ffmpeg_pixfmt( pix_fmt );
|
||||||
#endif
|
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case VJ_TAG_TYPE_V4L:
|
case VJ_TAG_TYPE_V4L:
|
||||||
snprintf(tag->source_name,SOURCE_NAME_LEN, "%s", filename );
|
snprintf(tag->source_name,SOURCE_NAME_LEN, "%s", filename );
|
||||||
|
|
||||||
@@ -3405,17 +3404,12 @@ int vj_tag_get_frame(int t1, VJFrame *dst, uint8_t * abuffer)
|
|||||||
if( tag->capture_type == 1 ) {
|
if( tag->capture_type == 1 ) {
|
||||||
#ifdef HAVE_V4L2
|
#ifdef HAVE_V4L2
|
||||||
if( no_v4l2_threads_ ) {
|
if( no_v4l2_threads_ ) {
|
||||||
res = v4l2_pull_frame( vj_tag_input->unicap[tag->index],v4l2_get_dst(vj_tag_input->unicap[tag->index],buffer[0],buffer[1],buffer[2]) );
|
res = v4l2_pull_frame( vj_tag_input->unicap[tag->index],v4l2_get_dst(vj_tag_input->unicap[tag->index],buffer[0],buffer[1],buffer[2],buffer[3]) );
|
||||||
} else {
|
} else {
|
||||||
res = v4l2_thread_pull( v4l2_thread_info_get( vj_tag_input->unicap[tag->index]),
|
res = v4l2_thread_pull( v4l2_thread_info_get( vj_tag_input->unicap[tag->index]),
|
||||||
v4l2_get_dst( vj_tag_input->unicap[tag->index], buffer[0],buffer[1],buffer[2]));
|
v4l2_get_dst( vj_tag_input->unicap[tag->index], buffer[0],buffer[1],buffer[2],buffer[3]));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
if( res <= 0 ) {
|
|
||||||
veejay_memset( buffer[0], 0, len );
|
|
||||||
veejay_memset( buffer[1], 128, uv_len );
|
|
||||||
veejay_memset( buffer[2], 128, uv_len );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
switch( tag->noise_suppression ) {
|
switch( tag->noise_suppression ) {
|
||||||
case V4L_BLACKFRAME:
|
case V4L_BLACKFRAME:
|
||||||
|
|||||||
@@ -901,7 +901,7 @@ static int veejay_screen_update(veejay_t * info )
|
|||||||
{
|
{
|
||||||
uint8_t *frame[4];
|
uint8_t *frame[4];
|
||||||
#ifdef HAVE_DIRECTFB
|
#ifdef HAVE_DIRECTFB
|
||||||
uint8_t *c_frame[3];
|
uint8_t *c_frame[4];
|
||||||
#endif
|
#endif
|
||||||
int i = 0;
|
int i = 0;
|
||||||
int skip_update = 0;
|
int skip_update = 0;
|
||||||
|
|||||||
@@ -1131,57 +1131,6 @@ int vj_perform_get_cropped_frame( veejay_t *info, uint8_t **frame, int crop )
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int vj_perform_init_cropped_output_frame(veejay_t *info, VJFrame *src, int *dw, int *dh )
|
|
||||||
{
|
|
||||||
video_playback_setup *settings = info->settings;
|
|
||||||
if( crop_frame )
|
|
||||||
free(crop_frame);
|
|
||||||
crop_frame = yuv_allocate_crop_image( src, &(settings->viewport) );
|
|
||||||
if(!crop_frame)
|
|
||||||
return 0;
|
|
||||||
|
|
||||||
*dw = crop_frame->width;
|
|
||||||
*dh = crop_frame->height;
|
|
||||||
|
|
||||||
/* enough space to supersample*/
|
|
||||||
int i;
|
|
||||||
for( i = 0; i < 3; i ++ )
|
|
||||||
{
|
|
||||||
crop_frame->data[i] = (uint8_t*) vj_malloc(sizeof(uint8_t) * RUP8(crop_frame->len) );
|
|
||||||
if(!crop_frame->data[i])
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
void vj_perform_init_output_frame( veejay_t *info, uint8_t **frame,
|
|
||||||
int dst_w, int dst_h )
|
|
||||||
{
|
|
||||||
int i;
|
|
||||||
for(i = 0; i < 2; i ++ )
|
|
||||||
{
|
|
||||||
if( video_output_buffer[i]->Y != NULL )
|
|
||||||
free(video_output_buffer[i]->Y );
|
|
||||||
if( video_output_buffer[i]->Cb != NULL )
|
|
||||||
free(video_output_buffer[i]->Cb );
|
|
||||||
if( video_output_buffer[i]->Cr != NULL )
|
|
||||||
free(video_output_buffer[i]->Cr );
|
|
||||||
|
|
||||||
video_output_buffer[i]->Y = (uint8_t*)
|
|
||||||
vj_malloc(sizeof(uint8_t) * RUP8( dst_w * dst_h) );
|
|
||||||
veejay_memset( video_output_buffer[i]->Y, pixel_Y_lo_, dst_w * dst_h );
|
|
||||||
video_output_buffer[i]->Cb = (uint8_t*)
|
|
||||||
vj_malloc(sizeof(uint8_t) * RUP8( dst_w * dst_h) );
|
|
||||||
veejay_memset( video_output_buffer[i]->Cb, 128, dst_w * dst_h );
|
|
||||||
video_output_buffer[i]->Cr = (uint8_t*)
|
|
||||||
vj_malloc(sizeof(uint8_t) * RUP8(dst_w * dst_h) );
|
|
||||||
veejay_memset( video_output_buffer[i]->Cr, 128, dst_w * dst_h );
|
|
||||||
|
|
||||||
}
|
|
||||||
frame[0] = video_output_buffer[0]->Y;
|
|
||||||
frame[1] = video_output_buffer[0]->Cb;
|
|
||||||
frame[2] = video_output_buffer[0]->Cr;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void long2str(uint8_t *dst, uint32_t n)
|
static void long2str(uint8_t *dst, uint32_t n)
|
||||||
{
|
{
|
||||||
dst[0] = (n )&0xff;
|
dst[0] = (n )&0xff;
|
||||||
@@ -3460,6 +3409,7 @@ static void vj_perform_finish_render( veejay_t *info, video_playback_setup *sett
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//FIXME: refactor this
|
||||||
if( settings->composite ) {
|
if( settings->composite ) {
|
||||||
VJFrame out;
|
VJFrame out;
|
||||||
veejay_memcpy( &out, info->effect_frame1, sizeof(VJFrame));
|
veejay_memcpy( &out, info->effect_frame1, sizeof(VJFrame));
|
||||||
|
|||||||
@@ -8,9 +8,6 @@ uint8_t *vj_perform_get_preview_buffer();
|
|||||||
int vj_perform_preview_max_width();
|
int vj_perform_preview_max_width();
|
||||||
int vj_perform_preview_max_height();
|
int vj_perform_preview_max_height();
|
||||||
|
|
||||||
void vj_perform_init_output_frame( veejay_t *info, uint8_t **frame,
|
|
||||||
int dst_w, int dst_h );
|
|
||||||
|
|
||||||
void vj_perform_update_plugin_frame(VJFrame *frame);
|
void vj_perform_update_plugin_frame(VJFrame *frame);
|
||||||
|
|
||||||
VJFrame *vj_perform_init_plugin_frame(veejay_t *info);
|
VJFrame *vj_perform_init_plugin_frame(veejay_t *info);
|
||||||
@@ -53,7 +50,6 @@ void vj_perform_record_tag_frame(veejay_t *info );
|
|||||||
void vj_perform_get_output_frame_420p( veejay_t *info, uint8_t **frame, int w, int h );
|
void vj_perform_get_output_frame_420p( veejay_t *info, uint8_t **frame, int w, int h );
|
||||||
|
|
||||||
int vj_perform_get_cropped_frame( veejay_t *info, uint8_t **frame, int crop );
|
int vj_perform_get_cropped_frame( veejay_t *info, uint8_t **frame, int crop );
|
||||||
int vj_perform_init_cropped_output_frame(veejay_t *info, VJFrame *src, int *dw, int *dh );
|
|
||||||
void vj_perform_get_crop_dimensions(veejay_t *info, int *w, int *h);
|
void vj_perform_get_crop_dimensions(veejay_t *info, int *w, int *h);
|
||||||
int vj_perform_rand_update(veejay_t *info);
|
int vj_perform_rand_update(veejay_t *info);
|
||||||
void vj_perform_randomize(veejay_t *info);
|
void vj_perform_randomize(veejay_t *info);
|
||||||
|
|||||||
Reference in New Issue
Block a user