OSDN Git Service

Maintains separate filter settings for each job. This prevents the MacGui from using...
[handbrake-jp/handbrake-jp-git.git] / libhb / render.c
index 1700470..3a0a025 100644 (file)
@@ -7,16 +7,24 @@
 #include "hb.h"
 
 #include "ffmpeg/avcodec.h"
+#include "ffmpeg/swscale.h"
 
 struct hb_work_private_s
 {
     hb_job_t * job;
 
-    ImgReSampleContext * context;
-    AVPicture            pic_raw;
-    AVPicture            pic_deint;
-    AVPicture            pic_render;
-    hb_buffer_t        * buf_deint;
+    struct SwsContext  * context;
+    AVPicture            pic_tmp_in;
+    AVPicture            pic_tmp_crop;
+    AVPicture            pic_tmp_out;        
+    hb_buffer_t        * buf_scale;
+    hb_fifo_t          * subtitle_queue;
+    hb_fifo_t          * delay_queue;
+    int                  frames_to_extend;
+    int                  dropped_frames;
+    int                  extended_frames;
+    uint64_t             last_start[4];
+    uint64_t             last_stop[4];
 };
 
 int  renderInit( hb_work_object_t *, hb_job_t * );
@@ -32,32 +40,95 @@ hb_work_object_t hb_render =
     renderClose
 };
 
+/*
+ * getU() & getV()
+ *
+ * Utility function that finds where the U is in the YUV sub-picture
+ *
+ * The Y data is at the top, followed by U and V, but the U and V
+ * are half the width of the Y, i.e. each chroma element covers 2x2
+ * of the Y's.
+ */
+static uint8_t *getU(uint8_t *data, int width, int height, int x, int y)
+{
+    return(&data[(((y/2) * (width/2)) + (x/2)) + (width*height)]);
+}
+
+static uint8_t *getV(uint8_t *data, int width, int height, int x, int y)
+{
+    return(&data[(((y/2) * (width/2)) + (x/2)) + (width*height) + 
+                 (width*height)/4]);
+}
+
 static void ApplySub( hb_job_t * job, hb_buffer_t * buf,
                       hb_buffer_t ** _sub )
 {
     hb_buffer_t * sub = *_sub;
     hb_title_t * title = job->title;
-    int i, j, offset_top, offset_left;
-    uint8_t * lum, * alpha, * out;
+    int i, j, offset_top, offset_left, margin_top, margin_percent;
+    uint8_t * lum, * alpha, * out, * sub_chromaU, * sub_chromaV;
+
+    /*
+     * Percent of height of picture that form a margin that subtitles
+     * should not be displayed within.
+     */
+    margin_percent = 2;
 
     if( !sub )
     {
         return;
     }
+    
+    /* 
+     * If necessary, move the subtitle so it is not in a cropped zone.
+     * When it won't fit, we center it so we lose as much on both ends.
+     * Otherwise we try to leave a 20px or 2% margin around it. 
+     */
+    margin_top = ( ( title->height - job->crop[0] - job->crop[1] ) * 
+                   margin_percent ) / 100;
 
-    /* If necessary, move the subtitle so it is not in a cropped zone.
-       When it won't fit, we center it so we loose as much on both ends.
-       Otherwise we try to leave a 20px margin around it. */
+    if( margin_top > 20 )
+    {
+        /*
+         * A maximum margin of 20px regardless of height of the picture.
+         */
+        margin_top = 20;
+    }
 
-    if( sub->height > title->height - job->crop[0] - job->crop[1] - 40 )
+    if( sub->height > title->height - job->crop[0] - job->crop[1] - 
+        ( margin_top * 2 ) )
+    {
+        /*
+         * The subtitle won't fit in the cropped zone, so center
+         * it vertically so we fit in as much as we can.
+         */
         offset_top = job->crop[0] + ( title->height - job->crop[0] -
-                job->crop[1] - sub->height ) / 2;
-    else if( sub->y < job->crop[0] + 20 )
-        offset_top = job->crop[0] + 20;
-    else if( sub->y > title->height - job->crop[1] - 20 - sub->height )
-        offset_top = title->height - job->crop[1] - 20 - sub->height;
+                                      job->crop[1] - sub->height ) / 2;
+    }
+    else if( sub->y < job->crop[0] + margin_top )
+    {
+        /*
+         * The subtitle fits in the cropped zone, but is currently positioned
+         * within our top margin, so move it outside of our margin.
+         */
+        offset_top = job->crop[0] + margin_top;
+    }
+    else if( sub->y > title->height - job->crop[1] - margin_top - sub->height )
+    {
+        /*
+         * The subtitle fits in the cropped zone, and is not within the top
+         * margin but is within the bottom margin, so move it to be above
+         * the margin.
+         */
+        offset_top = title->height - job->crop[1] - margin_top - sub->height;
+    }
     else
+    {
+        /*
+         * The subtitle is fine where it is.
+         */
         offset_top = sub->y;
+    }
 
     if( sub->width > title->width - job->crop[2] - job->crop[3] - 40 )
         offset_left = job->crop[2] + ( title->width - job->crop[2] -
@@ -71,6 +142,9 @@ static void ApplySub( hb_job_t * job, hb_buffer_t * buf,
 
     lum   = sub->data;
     alpha = lum + sub->width * sub->height;
+    sub_chromaU = alpha + sub->width * sub->height;
+    sub_chromaV = sub_chromaU + sub->width * sub->height;
+
     out   = buf->data + offset_top * title->width + offset_left;
 
     for( i = 0; i < sub->height; i++ )
@@ -81,14 +155,40 @@ static void ApplySub( hb_job_t * job, hb_buffer_t * buf,
             {
                 if( offset_left + j >= 0 && offset_left + j < title->width )
                 {
+                    uint8_t *chromaU, *chromaV;
+
+                    /*
+                     * Merge the luminance and alpha with the picture
+                     */
                     out[j] = ( (uint16_t) out[j] * ( 16 - (uint16_t) alpha[j] ) +
-                               (uint16_t) lum[j] * (uint16_t) alpha[j] ) >> 4;
+                               (uint16_t) lum[j] * (uint16_t) alpha[j] ) >> 4;   
+                    /*
+                     * Set the chroma (colour) based on whether there is
+                     * any alpha at all. Don't try to blend with the picture.
+                     */
+                    chromaU = getU(buf->data, title->width, title->height,
+                                   offset_left+j, offset_top+i);
+                    
+                    chromaV = getV(buf->data, title->width, title->height,
+                                   offset_left+j, offset_top+i);
+                    
+                    if( alpha[j] > 0 )
+                    {
+                        /*
+                         * Add the chroma from the sub-picture, as this is 
+                         * not a transparent element.
+                         */
+                        *chromaU = sub_chromaU[j];
+                        *chromaV = sub_chromaV[j];
+                    } 
                 }
             }
         }
 
         lum   += sub->width;
         alpha += sub->width;
+        sub_chromaU += sub->width;
+        sub_chromaV += sub->width;
         out   += title->width;
     }
 
@@ -101,91 +201,306 @@ int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
     hb_work_private_t * pv = w->private_data;
     hb_job_t   * job   = pv->job;
     hb_title_t * title = job->title;
-    hb_buffer_t * in = *buf_in, * buf;
-
-    avpicture_fill( &pv->pic_raw, in->data, PIX_FMT_YUV420P,
-                    title->width, title->height );
+    hb_buffer_t * in = *buf_in, * buf_tmp_in = *buf_in;
+    hb_buffer_t * ivtc_buffer = NULL;
+    
+    if(!in->data)
+    {
+        /* If the input buffer is end of stream, send out an empty one
+         * to the next stage as well. Note that this will result in us
+         * losing the current contents of the delay queue.
+         */
+       *buf_out = hb_buffer_init(0);
+       return HB_WORK_OK;
+    }
 
-    buf        = hb_buffer_init( 3 * job->width * job->height / 2 );
-    buf->start = in->start;
-    buf->stop  = in->stop;
+    /*
+     * During the indepth_scan ditch the buffers here before applying filters or attempting to
+     * use the subtitles.
+     */
+    if( job->indepth_scan )
+    {      
+        *buf_out = NULL;
+        return HB_WORK_OK;
+    }
+    
+    /* Push subtitles onto queue just in case we need to delay a frame */
+    if( in->sub )
+    {
+        hb_fifo_push( pv->subtitle_queue, in->sub );
+    }
+    else
+    {
+        hb_fifo_push( pv->subtitle_queue,  hb_buffer_init(0) );
+    }
+    
+    /* Setup render buffer */
+    hb_buffer_t * buf_render = hb_buffer_init( 3 * job->width * job->height / 2 );  
+    
+    /* Apply filters */
+    if( job->filters )
+    {
+        int filter_count = hb_list_count( job->filters );
+        int i;
+        
+        for( i = 0; i < filter_count; i++ )
+        {
+            hb_filter_object_t * filter = hb_list_item( job->filters, i );
+            
+            if( !filter )
+            {
+                continue;
+            }            
+            
+            hb_buffer_t * buf_tmp_out = NULL;
+            
+            int result = filter->work( buf_tmp_in,
+                                       &buf_tmp_out, 
+                                       PIX_FMT_YUV420P, 
+                                       title->width, 
+                                       title->height, 
+                                       filter->private_data );
+            
+            /* 
+             * FILTER_OK:      set temp buffer to filter buffer, continue 
+             * FILTER_DELAY:   set temp buffer to NULL, abort 
+             * FILTER_DROP:    set temp buffer to NULL, pop subtitle, abort 
+             * FILTER_FAILED:  leave temp buffer alone, continue 
+             */
+            if( result == FILTER_OK )
+            {
+                buf_tmp_in = buf_tmp_out;
+            }
+            else if( result == FILTER_DELAY )
+            {
+                buf_tmp_in = NULL;
+                break;
+            }            
+            else if( result == FILTER_DROP )
+            {
+                if( job->vfr )
+                {
+                    pv->frames_to_extend += 4;
+                    pv->dropped_frames++;
+                    hb_fifo_get( pv->subtitle_queue );
+                    buf_tmp_in = NULL;
+                }
+                else
+                {
+                    buf_tmp_in = buf_tmp_out;
+                }
+                break;
+            }
+        }
+    }   
 
-    if( job->deinterlace && pv->context )
+    if( buf_tmp_in )
     {
-        avpicture_fill( &pv->pic_render, buf->data, PIX_FMT_YUV420P,
-                        job->width, job->height );
-        avpicture_deinterlace( &pv->pic_deint, &pv->pic_raw,
-                               PIX_FMT_YUV420P, title->width,
-                               title->height );
-        ApplySub( job, pv->buf_deint, &in->sub );
-        img_resample( pv->context, &pv->pic_render, &pv->pic_deint );
+        /* Cache frame start and stop times, so we can renumber
+           time stamps if dropping frames for VFR.              */ 
+        int i;
+        for( i = 3; i >= 1; i-- )
+        {
+            pv->last_start[i] = pv->last_start[i-1];
+            pv->last_stop[i] = pv->last_stop[i-1];
+        }
+        pv->last_start[0] = in->start;
+        pv->last_stop[0] = in->stop;
     }
-    else if( job->deinterlace )
+    
+    /* Apply subtitles */
+    if( buf_tmp_in )
     {
-        avpicture_fill( &pv->pic_deint, buf->data, PIX_FMT_YUV420P,
-                        job->width, job->height );
-        avpicture_deinterlace( &pv->pic_deint, &pv->pic_raw,
-                               PIX_FMT_YUV420P, title->width,
-                               title->height );
-        ApplySub( job, buf, &in->sub );
+        hb_buffer_t * subtitles = hb_fifo_get( pv->subtitle_queue );        
+        if( subtitles )
+        {
+            ApplySub( job, buf_tmp_in, &subtitles );
+        }
     }
-    else if( pv->context )
+    
+    /* Apply crop/scale if specified */
+    if( buf_tmp_in && pv->context )
     {
-        ApplySub( job, in, &in->sub );
-        avpicture_fill( &pv->pic_render, buf->data, PIX_FMT_YUV420P,
+        avpicture_fill( &pv->pic_tmp_in, buf_tmp_in->data, 
+                        PIX_FMT_YUV420P,
+                        title->width, title->height );
+        
+        avpicture_fill( &pv->pic_tmp_out, buf_render->data, 
+                        PIX_FMT_YUV420P,
                         job->width, job->height );
-        img_resample( pv->context, &pv->pic_render, &pv->pic_raw );
+
+        // Crop; this alters the pointer to the data to point to the correct place for cropped frame
+        av_picture_crop( &pv->pic_tmp_crop, &pv->pic_tmp_in, PIX_FMT_YUV420P,
+                         job->crop[0], job->crop[2] );
+
+        // Scale pic_crop into pic_render according to the context set up in renderInit
+        sws_scale(pv->context,
+                  pv->pic_tmp_crop.data, pv->pic_tmp_crop.linesize,
+                  0, title->height - (job->crop[0] + job->crop[1]),
+                  pv->pic_tmp_out.data,  pv->pic_tmp_out.linesize);
+        
+        hb_buffer_copy_settings( buf_render, buf_tmp_in );
+        
+        buf_tmp_in = buf_render;
+    }  
+
+    /* Set output to render buffer */
+    (*buf_out) = buf_render;
+
+    if( buf_tmp_in == NULL )
+    {
+        /* Teardown and cleanup buffers if we are emitting NULL */
+        if( buf_in && *buf_in )
+        {
+            hb_buffer_close( buf_in );
+            *buf_in = NULL;
+        }        
+        if( buf_out && *buf_out )
+        {
+            hb_buffer_close( buf_out );        
+            *buf_out = NULL;
+        }
     }
-    else
+    else if( buf_tmp_in != buf_render )
+    {    
+        /* Copy temporary results and settings into render buffer */
+        memcpy( buf_render->data, buf_tmp_in->data, buf_render->size );
+        hb_buffer_copy_settings( buf_render, buf_tmp_in );
+    }
+    
+    if (*buf_out)
     {
-        hb_buffer_close( &buf );
-        ApplySub( job, in, &in->sub );
-        buf      = in;
-        *buf_in  = NULL;
+        hb_fifo_push( pv->delay_queue, *buf_out );
+        *buf_out = NULL;        
     }
 
-    (*buf_out) = buf;
+    /*
+     * Keep the last three frames in our queue, this ensures that we have the last
+     * two always in there should we need to rewrite the durations on them.
+     */
+    if( hb_fifo_size( pv->delay_queue ) >= 3 )
+    {
+        *buf_out = hb_fifo_get( pv->delay_queue );
+    } 
+
+    if( *buf_out )
+    {
+        if( pv->frames_to_extend )
+        {
+            /*
+             * A frame's been dropped by VFR detelecine.
+             * Gotta make up the lost time. This will also
+             * slow down the video to 23.976fps.
+             * The dropped frame ran for 3003 ticks, so
+             * divvy it up amongst the 4 frames left behind.
+             * This is what the delay_queue is for;
+             * telecined sequences start 2 frames before
+             * the dropped frame, so to slow down the right
+             * ones you need a 2 frame delay between
+             * reading input and writing output.
+             */
+            ivtc_buffer = *buf_out;
+            
+            /* The 4th cached frame will be the to use. */
+            ivtc_buffer->start = pv->last_start[3];
+            ivtc_buffer->stop = pv->last_stop[3];
+
+            if (pv->frames_to_extend % 4)
+                ivtc_buffer->stop += 751;
+            else
+                ivtc_buffer->stop += 750;
+            
+            /* Set the 3rd cached frame to start when this one stops,
+               and to stop 3003 ticks later -- a normal 29.97fps
+               length frame. If it needs to be extended as well to
+               make up lost time, it'll be handled on the next
+               loop through the renderer.                            */
+            int temp_duration = pv->last_stop[2] - pv->last_start[2];
+            pv->last_start[2] = ivtc_buffer->stop;
+            pv->last_stop[2] = ivtc_buffer->stop + temp_duration;
+            
+            pv->frames_to_extend--;
+            pv->extended_frames++;
+        }
+
+    }
 
     return HB_WORK_OK;
 }
 
 void renderClose( hb_work_object_t * w )
 {
-    hb_work_private_t * pv = w->private_data;
+    hb_work_private_t * pv = w->private_data;   
+        
+    hb_log("render: dropped frames: %i (%i ticks)", pv->dropped_frames, (pv->dropped_frames * 3003) );
+    hb_log("render: extended frames: %i (%i ticks)", pv->extended_frames, ( ( pv->extended_frames / 4 ) * 3003 ) );
+    hb_log("render: Lost time: %i frames (%i ticks)", (pv->dropped_frames * 4) - (pv->extended_frames), (pv->dropped_frames * 3003) - ( ( pv->extended_frames / 4 ) * 3003 ) );
+
+    /* Cleanup subtitle queue */
+    if( pv->subtitle_queue )
+    {
+        hb_fifo_close( &pv->subtitle_queue );
+    }
     
-    img_resample_close( pv->context );
+    if( pv->delay_queue )
+    {
+        hb_fifo_close( &pv->delay_queue );
+    }
+   
+    /* Cleanup render work structure */
     free( pv );
-    w->private_data = NULL;
+    w->private_data = NULL;    
 }
 
 int renderInit( hb_work_object_t * w, hb_job_t * job )
-{
-    hb_title_t * title;
-    
+{   
+    /* Allocate new private work object */
     hb_work_private_t * pv = calloc( 1, sizeof( hb_work_private_t ) );
+    pv->job = job;
     w->private_data = pv;
 
-    title = job->title;
-
-    pv->job = job;
+    /* Get title and title size */
+    hb_title_t * title = job->title;
 
+    /* If crop or scale is specified, setup rescale context */
     if( job->crop[0] || job->crop[1] || job->crop[2] || job->crop[3] ||
         job->width != title->width || job->height != title->height )
     {
-        pv->context = img_resample_full_init(
-            job->width, job->height, title->width, title->height,
-            job->crop[0], job->crop[1], job->crop[2], job->crop[3],
-            0, 0, 0, 0 );
-    }
-
-    if( job->deinterlace )
+        pv->context = sws_getContext(title->width  - (job->crop[2] + job->crop[3]),
+                                     title->height - (job->crop[0] + job->crop[1]),
+                                     PIX_FMT_YUV420P,
+                                     job->width, job->height, PIX_FMT_YUV420P,
+                                     (uint16_t)(SWS_LANCZOS|SWS_ACCURATE_RND), NULL, NULL, NULL);
+    }   
+    
+    /* Setup FIFO queue for subtitle cache */
+    pv->subtitle_queue = hb_fifo_init( 8 );    
+    pv->delay_queue = hb_fifo_init( 8 );
+    pv->frames_to_extend = 0;
+    pv->dropped_frames = 0;
+    pv->extended_frames = 0;
+    pv->last_start[0] = 0;
+    pv->last_stop[0] = 0;
+    
+    /* Setup filters */
+    /* TODO: Move to work.c? */
+    if( job->filters )
     {
-        /* Allocate a constant buffer used for deinterlacing */
-        pv->buf_deint = hb_buffer_init( 3 * title->width *
-                                       title->height / 2 );
-        avpicture_fill( &pv->pic_deint, pv->buf_deint->data,
-                        PIX_FMT_YUV420P, title->width, title->height );
-    }
+        int filter_count = hb_list_count( job->filters );
+        int i;
+        
+        for( i = 0; i < filter_count; i++ )
+        {
+            hb_filter_object_t * filter = hb_list_item( job->filters, i );
 
+            if( !filter ) continue;
+            
+            filter->private_data = filter->init( PIX_FMT_YUV420P,
+                                                 title->width,
+                                                 title->height,
+                                                 filter->settings );
+        }
+    }
+    
     return 0;
 }