OSDN Git Service

BuildSystem:
[handbrake-jp/handbrake-jp-git.git] / libhb / render.c
index 3a0a025..55295a4 100644 (file)
@@ -1,13 +1,11 @@
 /* $Id: render.c,v 1.17 2005/04/14 17:37:54 titer Exp $
 
    This file is part of the HandBrake source code.
-   Homepage: <http://handbrake.m0k.org/>.
+   Homepage: <http://handbrake.fr/>.
    It may be used under the terms of the GNU General Public License. */
 
 #include "hb.h"
-
-#include "ffmpeg/avcodec.h"
-#include "ffmpeg/swscale.h"
+#include "hbffmpeg.h"
 
 struct hb_work_private_s
 {
@@ -16,15 +14,19 @@ struct hb_work_private_s
     struct SwsContext  * context;
     AVPicture            pic_tmp_in;
     AVPicture            pic_tmp_crop;
-    AVPicture            pic_tmp_out;        
+    AVPicture            pic_tmp_out;
     hb_buffer_t        * buf_scale;
     hb_fifo_t          * subtitle_queue;
     hb_fifo_t          * delay_queue;
-    int                  frames_to_extend;
     int                  dropped_frames;
     int                  extended_frames;
     uint64_t             last_start[4];
     uint64_t             last_stop[4];
+    uint64_t             lost_time[4];
+    uint64_t             total_lost_time;
+    uint64_t             total_gained_time;
+    int64_t              chapter_time;
+    int                  chapter_val;
 };
 
 int  renderInit( hb_work_object_t *, hb_job_t * );
@@ -32,7 +34,7 @@ int  renderWork( hb_work_object_t *, hb_buffer_t **, hb_buffer_t ** );
 void renderClose( hb_work_object_t * );
 
 hb_work_object_t hb_render =
-{   
+{
     WORK_RENDER,
     "Renderer",
     renderInit,
@@ -51,13 +53,13 @@ hb_work_object_t hb_render =
  */
 static uint8_t *getU(uint8_t *data, int width, int height, int x, int y)
 {
-    return(&data[(((y/2) * (width/2)) + (x/2)) + (width*height)]);
+    return(&data[(y>>1) * ((width+1)>>1) + (x>>1) + width*height]);
 }
 
 static uint8_t *getV(uint8_t *data, int width, int height, int x, int y)
 {
-    return(&data[(((y/2) * (width/2)) + (x/2)) + (width*height) + 
-                 (width*height)/4]);
+    int w2 = (width+1) >> 1, h2 = (height+1) >> 1;
+    return(&data[(y>>1) * w2 + (x>>1) + width*height + w2*h2]);
 }
 
 static void ApplySub( hb_job_t * job, hb_buffer_t * buf,
@@ -78,13 +80,13 @@ static void ApplySub( hb_job_t * job, hb_buffer_t * buf,
     {
         return;
     }
-    
-    /* 
+
+    /*
      * If necessary, move the subtitle so it is not in a cropped zone.
      * When it won't fit, we center it so we lose as much on both ends.
-     * Otherwise we try to leave a 20px or 2% margin around it. 
+     * Otherwise we try to leave a 20px or 2% margin around it.
      */
-    margin_top = ( ( title->height - job->crop[0] - job->crop[1] ) * 
+    margin_top = ( ( title->height - job->crop[0] - job->crop[1] ) *
                    margin_percent ) / 100;
 
     if( margin_top > 20 )
@@ -95,7 +97,7 @@ static void ApplySub( hb_job_t * job, hb_buffer_t * buf,
         margin_top = 20;
     }
 
-    if( sub->height > title->height - job->crop[0] - job->crop[1] - 
+    if( sub->height > title->height - job->crop[0] - job->crop[1] -
         ( margin_top * 2 ) )
     {
         /*
@@ -161,26 +163,26 @@ static void ApplySub( hb_job_t * job, hb_buffer_t * buf,
                      * Merge the luminance and alpha with the picture
                      */
                     out[j] = ( (uint16_t) out[j] * ( 16 - (uint16_t) alpha[j] ) +
-                               (uint16_t) lum[j] * (uint16_t) alpha[j] ) >> 4;   
+                               (uint16_t) lum[j] * (uint16_t) alpha[j] ) >> 4;
                     /*
                      * Set the chroma (colour) based on whether there is
                      * any alpha at all. Don't try to blend with the picture.
                      */
                     chromaU = getU(buf->data, title->width, title->height,
                                    offset_left+j, offset_top+i);
-                    
+
                     chromaV = getV(buf->data, title->width, title->height,
                                    offset_left+j, offset_top+i);
-                    
+
                     if( alpha[j] > 0 )
                     {
                         /*
-                         * Add the chroma from the sub-picture, as this is 
+                         * Add the chroma from the sub-picture, as this is
                          * not a transparent element.
                          */
                         *chromaU = sub_chromaU[j];
                         *chromaV = sub_chromaV[j];
-                    } 
+                    }
                 }
             }
         }
@@ -203,15 +205,16 @@ int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
     hb_title_t * title = job->title;
     hb_buffer_t * in = *buf_in, * buf_tmp_in = *buf_in;
     hb_buffer_t * ivtc_buffer = NULL;
-    
-    if(!in->data)
+
+    if( in->size <= 0 )
     {
         /* If the input buffer is end of stream, send out an empty one
          * to the next stage as well. Note that this will result in us
          * losing the current contents of the delay queue.
          */
-       *buf_out = hb_buffer_init(0);
-       return HB_WORK_OK;
+        *buf_out = in;
+        *buf_in = NULL;
+        return HB_WORK_DONE;
     }
 
     /*
@@ -219,11 +222,11 @@ int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
      * use the subtitles.
      */
     if( job->indepth_scan )
-    {      
+    {
         *buf_out = NULL;
         return HB_WORK_OK;
     }
-    
+
     /* Push subtitles onto queue just in case we need to delay a frame */
     if( in->sub )
     {
@@ -233,39 +236,47 @@ int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
     {
         hb_fifo_push( pv->subtitle_queue,  hb_buffer_init(0) );
     }
-    
+
+    /* If there's a chapter mark remember it in case we delay or drop its frame */
+    if( in->new_chap && job->vfr )
+    {
+        pv->chapter_time = in->start;
+        pv->chapter_val = in->new_chap;
+        in->new_chap = 0;
+    }
+
     /* Setup render buffer */
-    hb_buffer_t * buf_render = hb_buffer_init( 3 * job->width * job->height / 2 );  
-    
+    hb_buffer_t * buf_render = hb_video_buffer_init( job->width, job->height );
+
     /* Apply filters */
     if( job->filters )
     {
         int filter_count = hb_list_count( job->filters );
         int i;
-        
+
         for( i = 0; i < filter_count; i++ )
         {
             hb_filter_object_t * filter = hb_list_item( job->filters, i );
-            
+
             if( !filter )
             {
                 continue;
-            }            
-            
+            }
+
             hb_buffer_t * buf_tmp_out = NULL;
-            
+
             int result = filter->work( buf_tmp_in,
-                                       &buf_tmp_out, 
-                                       PIX_FMT_YUV420P, 
-                                       title->width, 
-                                       title->height, 
+                                       &buf_tmp_out,
+                                       PIX_FMT_YUV420P,
+                                       title->width,
+                                       title->height,
                                        filter->private_data );
-            
-            /* 
-             * FILTER_OK:      set temp buffer to filter buffer, continue 
-             * FILTER_DELAY:   set temp buffer to NULL, abort 
-             * FILTER_DROP:    set temp buffer to NULL, pop subtitle, abort 
-             * FILTER_FAILED:  leave temp buffer alone, continue 
+
+            /*
+             * FILTER_OK:      set temp buffer to filter buffer, continue
+             * FILTER_DELAY:   set temp buffer to NULL, abort
+             * FILTER_DROP:    set temp buffer to NULL, pop subtitle, abort
+             * FILTER_FAILED:  leave temp buffer alone, continue
              */
             if( result == FILTER_OK )
             {
@@ -275,57 +286,75 @@ int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
             {
                 buf_tmp_in = NULL;
                 break;
-            }            
+            }
             else if( result == FILTER_DROP )
             {
                 if( job->vfr )
                 {
-                    pv->frames_to_extend += 4;
+                    /* We need to compensate for the time lost by dropping this frame.
+                       Spread its duration out in quarters, because usually dropped frames
+                       maintain a 1-out-of-5 pattern and this spreads it out amongst the remaining ones.
+                       Store these in the lost_time array, which has 4 slots in it.
+                       Because not every frame duration divides evenly by 4, and we can't lose the
+                       remainder, we have to go through an awkward process to preserve it in the 4th array index. */
+                    uint64_t temp_duration = buf_tmp_out->stop - buf_tmp_out->start;
+                    pv->lost_time[0] += (temp_duration / 4);
+                    pv->lost_time[1] += (temp_duration / 4);
+                    pv->lost_time[2] += (temp_duration / 4);
+                    pv->lost_time[3] += ( temp_duration - (temp_duration / 4) - (temp_duration / 4) - (temp_duration / 4) );
+
+                    pv->total_lost_time += temp_duration;
                     pv->dropped_frames++;
-                    hb_fifo_get( pv->subtitle_queue );
+
+                    /* Pop the frame's subtitle and dispose of it. */
+                    hb_buffer_t * subtitles = hb_fifo_get( pv->subtitle_queue );
+                    hb_buffer_close( &subtitles );
                     buf_tmp_in = NULL;
+                    break;
                 }
                 else
                 {
                     buf_tmp_in = buf_tmp_out;
                 }
-                break;
             }
         }
-    }   
+    }
 
     if( buf_tmp_in )
     {
         /* Cache frame start and stop times, so we can renumber
-           time stamps if dropping frames for VFR.              */ 
+           time stamps if dropping frames for VFR.              */
         int i;
         for( i = 3; i >= 1; i-- )
         {
             pv->last_start[i] = pv->last_start[i-1];
             pv->last_stop[i] = pv->last_stop[i-1];
         }
-        pv->last_start[0] = in->start;
-        pv->last_stop[0] = in->stop;
+
+        /* In order to make sure we have continuous time stamps, store
+           the current frame's duration as starting when the last one stopped. */
+        pv->last_start[0] = pv->last_stop[1];
+        pv->last_stop[0] = pv->last_start[0] + (in->stop - in->start);
     }
-    
+
     /* Apply subtitles */
     if( buf_tmp_in )
     {
-        hb_buffer_t * subtitles = hb_fifo_get( pv->subtitle_queue );        
+        hb_buffer_t * subtitles = hb_fifo_get( pv->subtitle_queue );
         if( subtitles )
         {
             ApplySub( job, buf_tmp_in, &subtitles );
         }
     }
-    
+
     /* Apply crop/scale if specified */
     if( buf_tmp_in && pv->context )
     {
-        avpicture_fill( &pv->pic_tmp_in, buf_tmp_in->data, 
+        avpicture_fill( &pv->pic_tmp_in, buf_tmp_in->data,
                         PIX_FMT_YUV420P,
                         title->width, title->height );
-        
-        avpicture_fill( &pv->pic_tmp_out, buf_render->data, 
+
+        avpicture_fill( &pv->pic_tmp_out, buf_render->data,
                         PIX_FMT_YUV420P,
                         job->width, job->height );
 
@@ -338,11 +367,11 @@ int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
                   pv->pic_tmp_crop.data, pv->pic_tmp_crop.linesize,
                   0, title->height - (job->crop[0] + job->crop[1]),
                   pv->pic_tmp_out.data,  pv->pic_tmp_out.linesize);
-        
+
         hb_buffer_copy_settings( buf_render, buf_tmp_in );
-        
+
         buf_tmp_in = buf_render;
-    }  
+    }
 
     /* Set output to render buffer */
     (*buf_out) = buf_render;
@@ -354,44 +383,52 @@ int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
         {
             hb_buffer_close( buf_in );
             *buf_in = NULL;
-        }        
+        }
         if( buf_out && *buf_out )
         {
-            hb_buffer_close( buf_out );        
+            hb_buffer_close( buf_out );
             *buf_out = NULL;
         }
     }
     else if( buf_tmp_in != buf_render )
-    {    
+    {
         /* Copy temporary results and settings into render buffer */
         memcpy( buf_render->data, buf_tmp_in->data, buf_render->size );
         hb_buffer_copy_settings( buf_render, buf_tmp_in );
     }
-    
-    if (*buf_out)
+
+    if (*buf_out && job->vfr)
     {
         hb_fifo_push( pv->delay_queue, *buf_out );
-        *buf_out = NULL;        
+        *buf_out = NULL;
     }
 
     /*
      * Keep the last three frames in our queue, this ensures that we have the last
      * two always in there should we need to rewrite the durations on them.
      */
-    if( hb_fifo_size( pv->delay_queue ) >= 3 )
+
+    if( job->vfr )
     {
-        *buf_out = hb_fifo_get( pv->delay_queue );
-    } 
+        if( hb_fifo_size( pv->delay_queue ) >= 4 )
+        {
+            *buf_out = hb_fifo_get( pv->delay_queue );
+        }
+    }
 
-    if( *buf_out )
+    if( *buf_out && job->vfr)
     {
-        if( pv->frames_to_extend )
+        /* The current frame exists. That means it hasn't been dropped by a filter.
+           Make it accessible as ivtc_buffer so we can edit its duration if needed. */
+        ivtc_buffer = *buf_out;
+
+        if( pv->lost_time[3] > 0 )
         {
             /*
-             * A frame's been dropped by VFR detelecine.
+             * A frame's been dropped earlier by VFR detelecine.
              * Gotta make up the lost time. This will also
-             * slow down the video to 23.976fps.
-             * The dropped frame ran for 3003 ticks, so
+             * slow down the video.
+             * The dropped frame's has to be accounted for, so
              * divvy it up amongst the 4 frames left behind.
              * This is what the delay_queue is for;
              * telecined sequences start 2 frames before
@@ -399,30 +436,63 @@ int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
              * ones you need a 2 frame delay between
              * reading input and writing output.
              */
-            ivtc_buffer = *buf_out;
-            
-            /* The 4th cached frame will be the to use. */
-            ivtc_buffer->start = pv->last_start[3];
-            ivtc_buffer->stop = pv->last_stop[3];
-
-            if (pv->frames_to_extend % 4)
-                ivtc_buffer->stop += 751;
-            else
-                ivtc_buffer->stop += 750;
-            
-            /* Set the 3rd cached frame to start when this one stops,
-               and to stop 3003 ticks later -- a normal 29.97fps
-               length frame. If it needs to be extended as well to
-               make up lost time, it'll be handled on the next
-               loop through the renderer.                            */
-            int temp_duration = pv->last_stop[2] - pv->last_start[2];
-            pv->last_start[2] = ivtc_buffer->stop;
-            pv->last_stop[2] = ivtc_buffer->stop + temp_duration;
-            
-            pv->frames_to_extend--;
+
+            /* We want to extend the outputted frame's duration by the value
+              stored in the 4th slot of the lost_time array. Because we need
+              to adjust all the values in the array so they're contiguous,
+              extend the duration inside the array first, before applying
+              it to the current frame buffer. */
+            pv->last_stop[3] += pv->lost_time[3];
+
+            /* Log how much time has been added back in to the video. */
+            pv->total_gained_time += pv->lost_time[3];
+
+            /* We've pulled the 4th value from the lost_time array
+               and added it to the last_stop array's 4th slot. Now, rotate the
+                lost_time array so the 4th slot now holds the 3rd's value, and
+               so on down the line, and set the 0 index to a value of 0. */
+            int i;
+            for( i=2; i >=  0; i--)
+            {
+                pv->lost_time[i+1] = pv->lost_time[i];
+            }
+            pv->lost_time[0] = 0;
+
+            /* Log how many frames have had their durations extended. */
             pv->extended_frames++;
         }
 
+        /* We can't use the given time stamps. Previous frames
+           might already have been extended, throwing off the
+           raw values fed to render.c. Instead, their
+           stop and start times are stored in arrays.
+           The 4th cached frame will be the to use.
+           If it needed its duration extended to make up
+           lost time, it will have happened above. */
+        ivtc_buffer->start = pv->last_start[3];
+        ivtc_buffer->stop = pv->last_stop[3];
+
+        /* Set the 3rd cached frame to start when this one stops,
+           and so on down the line. If any of them need to be
+           extended as well to make up lost time, it'll be handled
+           on the next loop through the renderer.  */
+        int i;
+        for (i = 2; i >= 0; i--)
+        {
+            int temp_duration = pv->last_stop[i] - pv->last_start[i];
+            pv->last_start[i] = pv->last_stop[i+1];
+            pv->last_stop[i] = pv->last_start[i] + temp_duration;
+        }
+
+        /* If we have a pending chapter mark and this frame is at
+           or after the time of the mark, mark this frame & clear
+           our pending mark. */
+        if( pv->chapter_time && pv->chapter_time <= ivtc_buffer->start )
+        {
+            ivtc_buffer->new_chap = pv->chapter_val;
+            pv->chapter_time = 0;
+        }
+
     }
 
     return HB_WORK_OK;
@@ -430,34 +500,38 @@ int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
 
 void renderClose( hb_work_object_t * w )
 {
-    hb_work_private_t * pv = w->private_data;   
-        
-    hb_log("render: dropped frames: %i (%i ticks)", pv->dropped_frames, (pv->dropped_frames * 3003) );
-    hb_log("render: extended frames: %i (%i ticks)", pv->extended_frames, ( ( pv->extended_frames / 4 ) * 3003 ) );
-    hb_log("render: Lost time: %i frames (%i ticks)", (pv->dropped_frames * 4) - (pv->extended_frames), (pv->dropped_frames * 3003) - ( ( pv->extended_frames / 4 ) * 3003 ) );
+    hb_work_private_t * pv = w->private_data;
+
+    hb_log("render: lost time: %lld (%i frames)", pv->total_lost_time, pv->dropped_frames);
+    hb_log("render: gained time: %lld (%i frames) (%lld not accounted for)", pv->total_gained_time, pv->extended_frames, pv->total_lost_time - pv->total_gained_time);
+    if (pv->dropped_frames)
+        hb_log("render: average dropped frame duration: %lld", (pv->total_lost_time / pv->dropped_frames) );
 
     /* Cleanup subtitle queue */
     if( pv->subtitle_queue )
     {
         hb_fifo_close( &pv->subtitle_queue );
     }
-    
+
     if( pv->delay_queue )
     {
         hb_fifo_close( &pv->delay_queue );
     }
-   
+
     /* Cleanup render work structure */
     free( pv );
-    w->private_data = NULL;    
+    w->private_data = NULL;
 }
 
 int renderInit( hb_work_object_t * w, hb_job_t * job )
-{   
+{
     /* Allocate new private work object */
     hb_work_private_t * pv = calloc( 1, sizeof( hb_work_private_t ) );
     pv->job = job;
     w->private_data = pv;
+    uint32_t    swsflags;
+
+    swsflags = SWS_LANCZOS | SWS_ACCURATE_RND;
 
     /* Get title and title size */
     hb_title_t * title = job->title;
@@ -470,37 +544,48 @@ int renderInit( hb_work_object_t * w, hb_job_t * job )
                                      title->height - (job->crop[0] + job->crop[1]),
                                      PIX_FMT_YUV420P,
                                      job->width, job->height, PIX_FMT_YUV420P,
-                                     (uint16_t)(SWS_LANCZOS|SWS_ACCURATE_RND), NULL, NULL, NULL);
-    }   
-    
+                                     swsflags, NULL, NULL, NULL);
+    }
+
     /* Setup FIFO queue for subtitle cache */
-    pv->subtitle_queue = hb_fifo_init( 8 );    
+    pv->subtitle_queue = hb_fifo_init( 8 );
     pv->delay_queue = hb_fifo_init( 8 );
-    pv->frames_to_extend = 0;
+
+    /* VFR IVTC needs a bunch of time-keeping variables to track
+      how many frames are dropped, how many are extended, what the
+      last 4 start and stop times were (so they can be modified),
+      how much time has been lost and gained overall, how much time
+      the latest 4 frames should be extended by, and where chapter
+      markers are (so they can be saved if their frames are dropped.) */
     pv->dropped_frames = 0;
     pv->extended_frames = 0;
     pv->last_start[0] = 0;
     pv->last_stop[0] = 0;
-    
+    pv->total_lost_time = 0;
+    pv->total_gained_time = 0;
+    pv->lost_time[0] = 0; pv->lost_time[1] = 0; pv->lost_time[2] = 0; pv->lost_time[3] = 0;
+    pv->chapter_time = 0;
+    pv->chapter_val  = 0;
+
     /* Setup filters */
     /* TODO: Move to work.c? */
     if( job->filters )
     {
         int filter_count = hb_list_count( job->filters );
         int i;
-        
+
         for( i = 0; i < filter_count; i++ )
         {
             hb_filter_object_t * filter = hb_list_item( job->filters, i );
 
             if( !filter ) continue;
-            
+
             filter->private_data = filter->init( PIX_FMT_YUV420P,
                                                  title->width,
                                                  title->height,
                                                  filter->settings );
         }
     }
-    
+
     return 0;
 }