1 /* $Id: render.c,v 1.17 2005/04/14 17:37:54 titer Exp $
3 This file is part of the HandBrake source code.
4 Homepage: <http://handbrake.fr/>.
5 It may be used under the terms of the GNU General Public License. */
10 struct hb_work_private_s
14 struct SwsContext * context;
16 AVPicture pic_tmp_crop;
17 AVPicture pic_tmp_out;
18 hb_buffer_t * buf_scale;
19 hb_fifo_t * subtitle_queue;
20 hb_fifo_t * delay_queue;
23 uint64_t last_start[4];
24 uint64_t last_stop[4];
25 uint64_t lost_time[4];
26 uint64_t total_lost_time;
27 uint64_t total_gained_time;
32 int renderInit( hb_work_object_t *, hb_job_t * );
33 int renderWork( hb_work_object_t *, hb_buffer_t **, hb_buffer_t ** );
34 void renderClose( hb_work_object_t * );
36 hb_work_object_t hb_render =
48 * Utility function that finds where the U is in the YUV sub-picture
50 * The Y data is at the top, followed by U and V, but the U and V
51 * are half the width of the Y, i.e. each chroma element covers 2x2
54 static uint8_t *getU(uint8_t *data, int width, int height, int x, int y)
56 return(&data[(y>>1) * ((width+1)>>1) + (x>>1) + width*height]);
59 static uint8_t *getV(uint8_t *data, int width, int height, int x, int y)
61 int w2 = (width+1) >> 1, h2 = (height+1) >> 1;
62 return(&data[(y>>1) * w2 + (x>>1) + width*height + w2*h2]);
65 static void ApplySub( hb_job_t * job, hb_buffer_t * buf,
68 hb_buffer_t * sub = *_sub;
69 hb_title_t * title = job->title;
70 int i, j, offset_top, offset_left, margin_top, margin_percent;
71 uint8_t * lum, * alpha, * out, * sub_chromaU, * sub_chromaV;
74 * Percent of height of picture that form a margin that subtitles
75 * should not be displayed within.
85 * If necessary, move the subtitle so it is not in a cropped zone.
86 * When it won't fit, we center it so we lose as much on both ends.
87 * Otherwise we try to leave a 20px or 2% margin around it.
89 margin_top = ( ( title->height - job->crop[0] - job->crop[1] ) *
90 margin_percent ) / 100;
95 * A maximum margin of 20px regardless of height of the picture.
100 if( sub->height > title->height - job->crop[0] - job->crop[1] -
104 * The subtitle won't fit in the cropped zone, so center
105 * it vertically so we fit in as much as we can.
107 offset_top = job->crop[0] + ( title->height - job->crop[0] -
108 job->crop[1] - sub->height ) / 2;
110 else if( sub->y < job->crop[0] + margin_top )
113 * The subtitle fits in the cropped zone, but is currently positioned
114 * within our top margin, so move it outside of our margin.
116 offset_top = job->crop[0] + margin_top;
118 else if( sub->y > title->height - job->crop[1] - margin_top - sub->height )
121 * The subtitle fits in the cropped zone, and is not within the top
122 * margin but is within the bottom margin, so move it to be above
125 offset_top = title->height - job->crop[1] - margin_top - sub->height;
130 * The subtitle is fine where it is.
135 if( sub->width > title->width - job->crop[2] - job->crop[3] - 40 )
136 offset_left = job->crop[2] + ( title->width - job->crop[2] -
137 job->crop[3] - sub->width ) / 2;
138 else if( sub->x < job->crop[2] + 20 )
139 offset_left = job->crop[2] + 20;
140 else if( sub->x > title->width - job->crop[3] - 20 - sub->width )
141 offset_left = title->width - job->crop[3] - 20 - sub->width;
143 offset_left = sub->x;
146 alpha = lum + sub->width * sub->height;
147 sub_chromaU = alpha + sub->width * sub->height;
148 sub_chromaV = sub_chromaU + sub->width * sub->height;
150 out = buf->data + offset_top * title->width + offset_left;
152 for( i = 0; i < sub->height; i++ )
154 if( offset_top + i >= 0 && offset_top + i < title->height )
156 for( j = 0; j < sub->width; j++ )
158 if( offset_left + j >= 0 && offset_left + j < title->width )
160 uint8_t *chromaU, *chromaV;
163 * Merge the luminance and alpha with the picture
165 out[j] = ( (uint16_t) out[j] * ( 16 - (uint16_t) alpha[j] ) +
166 (uint16_t) lum[j] * (uint16_t) alpha[j] ) >> 4;
168 * Set the chroma (colour) based on whether there is
169 * any alpha at all. Don't try to blend with the picture.
171 chromaU = getU(buf->data, title->width, title->height,
172 offset_left+j, offset_top+i);
174 chromaV = getV(buf->data, title->width, title->height,
175 offset_left+j, offset_top+i);
180 * Add the chroma from the sub-picture, as this is
181 * not a transparent element.
183 *chromaU = sub_chromaU[j];
184 *chromaV = sub_chromaV[j];
192 sub_chromaU += sub->width;
193 sub_chromaV += sub->width;
197 hb_buffer_close( _sub );
200 int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
201 hb_buffer_t ** buf_out )
203 hb_work_private_t * pv = w->private_data;
204 hb_job_t * job = pv->job;
205 hb_title_t * title = job->title;
206 hb_buffer_t * in = *buf_in, * buf_tmp_in = *buf_in;
207 hb_buffer_t * ivtc_buffer = NULL;
211 hb_buffer_t *head = NULL, *tail = NULL, *next;
214 /* If the input buffer is end of stream, send out an empty one
215 * to the next stage as well. To avoid losing the contents of
216 * the delay queue connect the buffers in the delay queue in
217 * the correct order, and add the end of stream buffer to the
220 while( next = hb_fifo_get( pv->delay_queue ) )
223 /* We can't use the given time stamps. Previous frames
224 might already have been extended, throwing off the
225 raw values fed to render.c. Instead, their
226 stop and start times are stored in arrays.
227 The 4th cached frame will be the to use.
228 If it needed its duration extended to make up
229 lost time, it will have happened above. */
230 next->start = pv->last_start[counter];
231 next->stop = pv->last_stop[counter--];
253 * During the indepth_scan ditch the buffers here before applying filters or attempting to
256 if( job->indepth_scan )
262 /* Push subtitles onto queue just in case we need to delay a frame */
265 hb_fifo_push( pv->subtitle_queue, in->sub );
269 hb_fifo_push( pv->subtitle_queue, hb_buffer_init(0) );
272 /* If there's a chapter mark remember it in case we delay or drop its frame */
273 if( in->new_chap && job->vfr )
275 pv->chapter_time = in->start;
276 pv->chapter_val = in->new_chap;
280 /* Setup render buffer */
281 hb_buffer_t * buf_render = hb_video_buffer_init( job->width, job->height );
286 int filter_count = hb_list_count( job->filters );
289 for( i = 0; i < filter_count; i++ )
291 hb_filter_object_t * filter = hb_list_item( job->filters, i );
298 hb_buffer_t * buf_tmp_out = NULL;
300 int result = filter->work( buf_tmp_in,
305 filter->private_data );
308 * FILTER_OK: set temp buffer to filter buffer, continue
309 * FILTER_DELAY: set temp buffer to NULL, abort
310 * FILTER_DROP: set temp buffer to NULL, pop subtitle, abort
311 * FILTER_FAILED: leave temp buffer alone, continue
313 if( result == FILTER_OK )
315 buf_tmp_in = buf_tmp_out;
317 else if( result == FILTER_DELAY )
322 else if( result == FILTER_DROP )
326 /* We need to compensate for the time lost by dropping this frame.
327 Spread its duration out in quarters, because usually dropped frames
328 maintain a 1-out-of-5 pattern and this spreads it out amongst the remaining ones.
329 Store these in the lost_time array, which has 4 slots in it.
330 Because not every frame duration divides evenly by 4, and we can't lose the
331 remainder, we have to go through an awkward process to preserve it in the 4th array index. */
332 uint64_t temp_duration = buf_tmp_out->stop - buf_tmp_out->start;
333 pv->lost_time[0] += (temp_duration / 4);
334 pv->lost_time[1] += (temp_duration / 4);
335 pv->lost_time[2] += (temp_duration / 4);
336 pv->lost_time[3] += ( temp_duration - (temp_duration / 4) - (temp_duration / 4) - (temp_duration / 4) );
338 pv->total_lost_time += temp_duration;
339 pv->dropped_frames++;
341 /* Pop the frame's subtitle and dispose of it. */
342 hb_buffer_t * subtitles = hb_fifo_get( pv->subtitle_queue );
343 hb_buffer_close( &subtitles );
349 buf_tmp_in = buf_tmp_out;
357 /* Cache frame start and stop times, so we can renumber
358 time stamps if dropping frames for VFR. */
360 for( i = 3; i >= 1; i-- )
362 pv->last_start[i] = pv->last_start[i-1];
363 pv->last_stop[i] = pv->last_stop[i-1];
366 /* In order to make sure we have continuous time stamps, store
367 the current frame's duration as starting when the last one stopped. */
368 pv->last_start[0] = pv->last_stop[1];
369 pv->last_stop[0] = pv->last_start[0] + (in->stop - in->start);
372 /* Apply subtitles */
375 hb_buffer_t * subtitles = hb_fifo_get( pv->subtitle_queue );
378 ApplySub( job, buf_tmp_in, &subtitles );
382 /* Apply crop/scale if specified */
383 if( buf_tmp_in && pv->context )
385 avpicture_fill( &pv->pic_tmp_in, buf_tmp_in->data,
387 title->width, title->height );
389 avpicture_fill( &pv->pic_tmp_out, buf_render->data,
391 job->width, job->height );
393 // Crop; this alters the pointer to the data to point to the correct place for cropped frame
394 av_picture_crop( &pv->pic_tmp_crop, &pv->pic_tmp_in, PIX_FMT_YUV420P,
395 job->crop[0], job->crop[2] );
397 // Scale pic_crop into pic_render according to the context set up in renderInit
398 sws_scale(pv->context,
399 pv->pic_tmp_crop.data, pv->pic_tmp_crop.linesize,
400 0, title->height - (job->crop[0] + job->crop[1]),
401 pv->pic_tmp_out.data, pv->pic_tmp_out.linesize);
403 hb_buffer_copy_settings( buf_render, buf_tmp_in );
405 buf_tmp_in = buf_render;
408 /* Set output to render buffer */
409 (*buf_out) = buf_render;
411 if( buf_tmp_in == NULL )
413 /* Teardown and cleanup buffers if we are emitting NULL */
414 if( buf_in && *buf_in )
416 hb_buffer_close( buf_in );
419 if( buf_out && *buf_out )
421 hb_buffer_close( buf_out );
425 else if( buf_tmp_in != buf_render )
427 /* Copy temporary results and settings into render buffer */
428 memcpy( buf_render->data, buf_tmp_in->data, buf_render->size );
429 hb_buffer_copy_settings( buf_render, buf_tmp_in );
432 if (*buf_out && job->vfr)
434 hb_fifo_push( pv->delay_queue, *buf_out );
439 * Keep the last three frames in our queue, this ensures that we have the last
440 * two always in there should we need to rewrite the durations on them.
445 if( hb_fifo_size( pv->delay_queue ) >= 4 )
447 *buf_out = hb_fifo_get( pv->delay_queue );
451 if( *buf_out && job->vfr)
453 /* The current frame exists. That means it hasn't been dropped by a filter.
454 Make it accessible as ivtc_buffer so we can edit its duration if needed. */
455 ivtc_buffer = *buf_out;
457 if( pv->lost_time[3] > 0 )
460 * A frame's been dropped earlier by VFR detelecine.
461 * Gotta make up the lost time. This will also
462 * slow down the video.
463 * The dropped frame's has to be accounted for, so
464 * divvy it up amongst the 4 frames left behind.
465 * This is what the delay_queue is for;
466 * telecined sequences start 2 frames before
467 * the dropped frame, so to slow down the right
468 * ones you need a 2 frame delay between
469 * reading input and writing output.
472 /* We want to extend the outputted frame's duration by the value
473 stored in the 4th slot of the lost_time array. Because we need
474 to adjust all the values in the array so they're contiguous,
475 extend the duration inside the array first, before applying
476 it to the current frame buffer. */
477 pv->last_stop[3] += pv->lost_time[3];
479 /* Log how much time has been added back in to the video. */
480 pv->total_gained_time += pv->lost_time[3];
482 /* We've pulled the 4th value from the lost_time array
483 and added it to the last_stop array's 4th slot. Now, rotate the
484 lost_time array so the 4th slot now holds the 3rd's value, and
485 so on down the line, and set the 0 index to a value of 0. */
487 for( i=2; i >= 0; i--)
489 pv->lost_time[i+1] = pv->lost_time[i];
491 pv->lost_time[0] = 0;
493 /* Log how many frames have had their durations extended. */
494 pv->extended_frames++;
497 /* We can't use the given time stamps. Previous frames
498 might already have been extended, throwing off the
499 raw values fed to render.c. Instead, their
500 stop and start times are stored in arrays.
501 The 4th cached frame will be the to use.
502 If it needed its duration extended to make up
503 lost time, it will have happened above. */
504 ivtc_buffer->start = pv->last_start[3];
505 ivtc_buffer->stop = pv->last_stop[3];
507 /* Set the 3rd cached frame to start when this one stops,
508 and so on down the line. If any of them need to be
509 extended as well to make up lost time, it'll be handled
510 on the next loop through the renderer. */
512 for (i = 2; i >= 0; i--)
514 int temp_duration = pv->last_stop[i] - pv->last_start[i];
515 pv->last_start[i] = pv->last_stop[i+1];
516 pv->last_stop[i] = pv->last_start[i] + temp_duration;
519 /* If we have a pending chapter mark and this frame is at
520 or after the time of the mark, mark this frame & clear
522 if( pv->chapter_time && pv->chapter_time <= ivtc_buffer->start )
524 ivtc_buffer->new_chap = pv->chapter_val;
525 pv->chapter_time = 0;
533 void renderClose( hb_work_object_t * w )
535 hb_work_private_t * pv = w->private_data;
537 hb_log("render: lost time: %lld (%i frames)", pv->total_lost_time, pv->dropped_frames);
538 hb_log("render: gained time: %lld (%i frames) (%lld not accounted for)", pv->total_gained_time, pv->extended_frames, pv->total_lost_time - pv->total_gained_time);
539 if (pv->dropped_frames)
540 hb_log("render: average dropped frame duration: %lld", (pv->total_lost_time / pv->dropped_frames) );
542 /* Cleanup subtitle queue */
543 if( pv->subtitle_queue )
545 hb_fifo_close( &pv->subtitle_queue );
548 if( pv->delay_queue )
550 hb_fifo_close( &pv->delay_queue );
553 /* Cleanup render work structure */
555 w->private_data = NULL;
558 int renderInit( hb_work_object_t * w, hb_job_t * job )
560 /* Allocate new private work object */
561 hb_work_private_t * pv = calloc( 1, sizeof( hb_work_private_t ) );
563 w->private_data = pv;
566 swsflags = SWS_LANCZOS | SWS_ACCURATE_RND;
568 /* Get title and title size */
569 hb_title_t * title = job->title;
571 /* If crop or scale is specified, setup rescale context */
572 if( job->crop[0] || job->crop[1] || job->crop[2] || job->crop[3] ||
573 job->width != title->width || job->height != title->height )
575 pv->context = sws_getContext(title->width - (job->crop[2] + job->crop[3]),
576 title->height - (job->crop[0] + job->crop[1]),
578 job->width, job->height, PIX_FMT_YUV420P,
579 swsflags, NULL, NULL, NULL);
582 /* Setup FIFO queue for subtitle cache */
583 pv->subtitle_queue = hb_fifo_init( 8 );
584 pv->delay_queue = hb_fifo_init( 8 );
586 /* VFR IVTC needs a bunch of time-keeping variables to track
587 how many frames are dropped, how many are extended, what the
588 last 4 start and stop times were (so they can be modified),
589 how much time has been lost and gained overall, how much time
590 the latest 4 frames should be extended by, and where chapter
591 markers are (so they can be saved if their frames are dropped.) */
592 pv->dropped_frames = 0;
593 pv->extended_frames = 0;
594 pv->last_start[0] = 0;
595 pv->last_stop[0] = 0;
596 pv->total_lost_time = 0;
597 pv->total_gained_time = 0;
598 pv->lost_time[0] = 0; pv->lost_time[1] = 0; pv->lost_time[2] = 0; pv->lost_time[3] = 0;
599 pv->chapter_time = 0;
603 /* TODO: Move to work.c? */
606 int filter_count = hb_list_count( job->filters );
609 for( i = 0; i < filter_count; i++ )
611 hb_filter_object_t * filter = hb_list_item( job->filters, i );
613 if( !filter ) continue;
615 filter->private_data = filter->init( PIX_FMT_YUV420P,