X-Git-Url: http://git.osdn.jp/view?a=blobdiff_plain;f=libhb%2Fencx264.c;h=eb8d8426d22ff284a342b5a2c3e079812f63f380;hb=c5c381e3579de78ead6a777da6c8b934aeaba19e;hp=f40903d9dab2d695a594d5b5e81680d2ca35aa91;hpb=ac08dc081bdf198b61fdf996951eef1fca4ec44d;p=handbrake-jp%2Fhandbrake-jp-git.git diff --git a/libhb/encx264.c b/libhb/encx264.c index f40903d9..eb8d8426 100644 --- a/libhb/encx264.c +++ b/libhb/encx264.c @@ -108,9 +108,6 @@ int encx264Init( hb_work_object_t * w, hb_job_t * job ) param.i_level_idc ); } - /* Slightly faster with minimal quality lost */ - param.analyse.i_subpel_refine = 4; - /* This section passes the string x264opts to libx264 for parsing into parameter names and values. @@ -198,8 +195,21 @@ int encx264Init( hb_work_object_t * w, hb_job_t * job ) /* set up the VUI color model & gamma to match what the COLR atom * set in muxmp4.c says. See libhb/muxmp4.c for notes. */ - - if ( job->title->height >= 720 ) + if( job->color_matrix == 1 ) + { + // ITU BT.601 DVD or SD TV content + param.vui.i_colorprim = 6; + param.vui.i_transfer = 1; + param.vui.i_colmatrix = 6; + } + else if( job->color_matrix == 2 ) + { + // ITU BT.709 HD content + param.vui.i_colorprim = 1; + param.vui.i_transfer = 1; + param.vui.i_colmatrix = 1; + } + else if ( job->title->width >= 1280 || job->title->height >= 720 ) { // we guess that 720p or above is ITU BT.709 HD content param.vui.i_colorprim = 1; @@ -214,10 +224,10 @@ int encx264Init( hb_work_object_t * w, hb_job_t * job ) param.vui.i_colmatrix = 6; } - if( job->pixel_ratio ) + if( job->anamorphic.mode ) { - param.vui.i_sar_width = job->pixel_aspect_width; - param.vui.i_sar_height = job->pixel_aspect_height; + param.vui.i_sar_width = job->anamorphic.par_width; + param.vui.i_sar_height = job->anamorphic.par_height; hb_log( "encx264: encoding with stored aspect %d/%d", param.vui.i_sar_width, param.vui.i_sar_height ); @@ -286,7 +296,7 @@ int encx264Init( hb_work_object_t * w, hb_job_t * job ) } } - hb_log( "encx264: opening libx264 (pass %d)", job->pass ); + hb_deep_log( 2, "encx264: opening libx264 (pass %d)", job->pass ); pv->x264 = x264_encoder_open( ¶m ); x264_encoder_headers( pv->x264, &nal, &nal_count ); @@ -302,6 +312,7 @@ int encx264Init( hb_work_object_t * w, hb_job_t * job ) x264_picture_alloc( &pv->pic_in, X264_CSP_I420, job->width, job->height ); + pv->pic_in.img.i_stride[2] = pv->pic_in.img.i_stride[1] = ( ( job->width + 1 ) >> 1 ); pv->x264_allocated_pic = pv->pic_in.img.plane[0]; if (job->areBframes) @@ -381,7 +392,7 @@ static hb_buffer_t *nal_encode( hb_work_object_t *w, x264_picture_t *pic_out, hb_job_t *job = pv->job; /* Should be way too large */ - buf = hb_buffer_init( 3 * job->width * job->height / 2 ); + buf = hb_video_buffer_init( job->width, job->height ); buf->size = 0; buf->frametype = 0; @@ -495,17 +506,18 @@ static hb_buffer_t *x264_encode( hb_work_object_t *w, hb_buffer_t *in ) /* Point x264 at our current buffers Y(UV) data. */ pv->pic_in.img.plane[0] = in->data; + int uvsize = ( (job->width + 1) >> 1 ) * ( (job->height + 1) >> 1 ); if( job->grayscale ) { /* XXX x264 has currently no option for grayscale encoding */ - memset( pv->pic_in.img.plane[1], 0x80, job->width * job->height / 4 ); - memset( pv->pic_in.img.plane[2], 0x80, job->width * job->height / 4 ); + memset( pv->pic_in.img.plane[1], 0x80, uvsize ); + memset( pv->pic_in.img.plane[2], 0x80, uvsize ); } else { /* Point x264 at our buffers (Y)UV data */ pv->pic_in.img.plane[1] = in->data + job->width * job->height; - pv->pic_in.img.plane[2] = in->data + 5 * job->width * job->height / 4; + pv->pic_in.img.plane[2] = pv->pic_in.img.plane[1] + uvsize; } if( in->new_chap && job->chapter_markers ) { @@ -566,6 +578,8 @@ int encx264Work( hb_work_object_t * w, hb_buffer_t ** buf_in, hb_work_private_t *pv = w->private_data; hb_buffer_t *in = *buf_in; + *buf_out = NULL; + if( in->size <= 0 ) { // EOF on input. Flush any frames still in the decoder then