1 /* $Id: render.c,v 1.17 2005/04/14 17:37:54 titer Exp $
3 This file is part of the HandBrake source code.
4 Homepage: <http://handbrake.m0k.org/>.
5 It may be used under the terms of the GNU General Public License. */
9 #include "ffmpeg/avcodec.h"
10 #include "ffmpeg/swscale.h"
12 struct hb_work_private_s
16 struct SwsContext * context;
18 AVPicture pic_tmp_crop;
19 AVPicture pic_tmp_out;
20 hb_buffer_t * buf_scale;
21 hb_fifo_t * subtitle_queue;
22 hb_fifo_t * delay_queue;
28 int renderInit( hb_work_object_t *, hb_job_t * );
29 int renderWork( hb_work_object_t *, hb_buffer_t **, hb_buffer_t ** );
30 void renderClose( hb_work_object_t * );
32 hb_work_object_t hb_render =
44 * Utility function that finds where the U is in the YUV sub-picture
46 * The Y data is at the top, followed by U and V, but the U and V
47 * are half the width of the Y, i.e. each chroma element covers 2x2
50 static uint8_t *getU(uint8_t *data, int width, int height, int x, int y)
52 return(&data[(((y/2) * (width/2)) + (x/2)) + (width*height)]);
55 static uint8_t *getV(uint8_t *data, int width, int height, int x, int y)
57 return(&data[(((y/2) * (width/2)) + (x/2)) + (width*height) +
61 static void ApplySub( hb_job_t * job, hb_buffer_t * buf,
64 hb_buffer_t * sub = *_sub;
65 hb_title_t * title = job->title;
66 int i, j, offset_top, offset_left;
67 uint8_t * lum, * alpha, * out, * sub_chromaU, * sub_chromaV;
74 /* If necessary, move the subtitle so it is not in a cropped zone.
75 When it won't fit, we center it so we loose as much on both ends.
76 Otherwise we try to leave a 20px margin around it. */
78 if( sub->height > title->height - job->crop[0] - job->crop[1] - 40 )
79 offset_top = job->crop[0] + ( title->height - job->crop[0] -
80 job->crop[1] - sub->height ) / 2;
81 else if( sub->y < job->crop[0] + 20 )
82 offset_top = job->crop[0] + 20;
83 else if( sub->y > title->height - job->crop[1] - 20 - sub->height )
84 offset_top = title->height - job->crop[1] - 20 - sub->height;
88 if( sub->width > title->width - job->crop[2] - job->crop[3] - 40 )
89 offset_left = job->crop[2] + ( title->width - job->crop[2] -
90 job->crop[3] - sub->width ) / 2;
91 else if( sub->x < job->crop[2] + 20 )
92 offset_left = job->crop[2] + 20;
93 else if( sub->x > title->width - job->crop[3] - 20 - sub->width )
94 offset_left = title->width - job->crop[3] - 20 - sub->width;
99 alpha = lum + sub->width * sub->height;
100 sub_chromaU = alpha + sub->width * sub->height;
101 sub_chromaV = sub_chromaU + sub->width * sub->height;
103 out = buf->data + offset_top * title->width + offset_left;
105 for( i = 0; i < sub->height; i++ )
107 if( offset_top + i >= 0 && offset_top + i < title->height )
109 for( j = 0; j < sub->width; j++ )
111 if( offset_left + j >= 0 && offset_left + j < title->width )
113 uint8_t *chromaU, *chromaV;
116 * Merge the luminance and alpha with the picture
118 out[j] = ( (uint16_t) out[j] * ( 16 - (uint16_t) alpha[j] ) +
119 (uint16_t) lum[j] * (uint16_t) alpha[j] ) >> 4;
121 * Set the chroma (colour) based on whether there is
122 * any alpha at all. Don't try to blend with the picture.
124 chromaU = getU(buf->data, title->width, title->height,
125 offset_left+j, offset_top+i);
127 chromaV = getV(buf->data, title->width, title->height,
128 offset_left+j, offset_top+i);
133 * Add the chroma from the sub-picture, as this is
134 * not a transparent element.
136 *chromaU = sub_chromaU[j];
137 *chromaV = sub_chromaV[j];
145 sub_chromaU += sub->width;
146 sub_chromaV += sub->width;
150 hb_buffer_close( _sub );
153 int renderWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
154 hb_buffer_t ** buf_out )
156 hb_work_private_t * pv = w->private_data;
157 hb_job_t * job = pv->job;
158 hb_title_t * title = job->title;
159 hb_buffer_t * in = *buf_in, * buf_tmp_in = *buf_in;
160 hb_buffer_t * ivtc_buffer = NULL;
164 /* If the input buffer is end of stream, send out an empty one
165 * to the next stage as well. Note that this will result in us
166 * losing the current contents of the delay queue.
168 *buf_out = hb_buffer_init(0);
173 * During the indepth_scan ditch the buffers here before applying filters or attempting to
176 if( job->indepth_scan )
182 /* Push subtitles onto queue just in case we need to delay a frame */
185 hb_fifo_push( pv->subtitle_queue, in->sub );
189 hb_fifo_push( pv->subtitle_queue, hb_buffer_init(0) );
192 /* Setup render buffer */
193 hb_buffer_t * buf_render = hb_buffer_init( 3 * job->width * job->height / 2 );
198 int filter_count = hb_list_count( job->filters );
201 for( i = 0; i < filter_count; i++ )
203 hb_filter_object_t * filter = hb_list_item( job->filters, i );
210 hb_buffer_t * buf_tmp_out = NULL;
212 int result = filter->work( buf_tmp_in,
217 filter->private_data );
220 * FILTER_OK: set temp buffer to filter buffer, continue
221 * FILTER_DELAY: set temp buffer to NULL, abort
222 * FILTER_DROP: set temp buffer to NULL, pop subtitle, abort
223 * FILTER_FAILED: leave temp buffer alone, continue
225 if( result == FILTER_OK )
227 buf_tmp_in = buf_tmp_out;
229 else if( result == FILTER_DELAY )
234 else if( result == FILTER_DROP )
236 hb_fifo_get( pv->subtitle_queue );
240 pv->frames_to_extend += 4;
241 pv->dropped_frames++;
248 /* Apply subtitles */
251 hb_buffer_t * subtitles = hb_fifo_get( pv->subtitle_queue );
254 ApplySub( job, buf_tmp_in, &subtitles );
258 /* Apply crop/scale if specified */
259 if( buf_tmp_in && pv->context )
261 avpicture_fill( &pv->pic_tmp_in, buf_tmp_in->data,
263 title->width, title->height );
265 avpicture_fill( &pv->pic_tmp_out, buf_render->data,
267 job->width, job->height );
269 // Crop; this alters the pointer to the data to point to the correct place for cropped frame
270 av_picture_crop( &pv->pic_tmp_crop, &pv->pic_tmp_in, PIX_FMT_YUV420P,
271 job->crop[0], job->crop[2] );
273 // Scale pic_crop into pic_render according to the context set up in renderInit
274 sws_scale(pv->context,
275 pv->pic_tmp_crop.data, pv->pic_tmp_crop.linesize,
276 0, title->height - (job->crop[0] + job->crop[1]),
277 pv->pic_tmp_out.data, pv->pic_tmp_out.linesize);
279 hb_buffer_copy_settings( buf_render, buf_tmp_in );
281 buf_tmp_in = buf_render;
284 /* Set output to render buffer */
285 (*buf_out) = buf_render;
287 if( buf_tmp_in == NULL )
289 /* Teardown and cleanup buffers if we are emitting NULL */
290 if( buf_in && *buf_in )
292 hb_buffer_close( buf_in );
295 if( buf_out && *buf_out )
297 hb_buffer_close( buf_out );
301 else if( buf_tmp_in != buf_render )
303 /* Copy temporary results and settings into render buffer */
304 memcpy( buf_render->data, buf_tmp_in->data, buf_render->size );
305 hb_buffer_copy_settings( buf_render, buf_tmp_in );
310 hb_fifo_push( pv->delay_queue, *buf_out );
315 * Keep the last three frames in our queue, this ensures that we have the last
316 * two always in there should we need to rewrite the durations on them.
318 if( hb_fifo_size( pv->delay_queue ) >= 3 )
320 *buf_out = hb_fifo_get( pv->delay_queue );
325 if( pv->frames_to_extend )
328 * A frame's been dropped by VFR detelecine.
329 * Gotta make up the lost time. This will also
330 * slow down the video to 23.976fps.
331 * The dropped frame ran for 3003 ticks, so
332 * divvy it up amongst the 4 frames left behind.
333 * This is what the delay_queue is for;
334 * telecined sequences start 2 frames before
335 * the dropped frame, so to slow down the right
336 * ones you need a 2 frame delay between
337 * reading input and writing output.
339 ivtc_buffer = *buf_out;
341 if (pv->frames_to_extend % 4)
342 ivtc_buffer->stop += 751;
344 ivtc_buffer->stop += 750;
346 pv->frames_to_extend--;
347 pv->extended_frames++;
354 void renderClose( hb_work_object_t * w )
356 hb_work_private_t * pv = w->private_data;
358 hb_log("render: dropped frames: %i (%i ticks)", pv->dropped_frames, (pv->dropped_frames * 3003) );
359 hb_log("render: extended frames: %i (%i ticks)", pv->extended_frames, ( ( pv->extended_frames / 4 ) * 3003 ) );
360 hb_log("render: Lost time: %i frames (%i ticks)", (pv->dropped_frames * 4) - (pv->extended_frames), (pv->dropped_frames * 3003) - ( ( pv->extended_frames / 4 ) * 3003 ) );
362 /* Cleanup subtitle queue */
363 if( pv->subtitle_queue )
365 hb_fifo_close( &pv->subtitle_queue );
368 if( pv->delay_queue )
370 hb_fifo_close( &pv->delay_queue );
373 /* Cleanup filters */
374 /* TODO: Move to work.c? */
375 if( pv->job->filters )
377 int filter_count = hb_list_count( pv->job->filters );
380 for( i = 0; i < filter_count; i++ )
382 hb_filter_object_t * filter = hb_list_item( pv->job->filters, i );
384 if( !filter ) continue;
386 filter->close( filter->private_data );
389 hb_list_close( &pv->job->filters );
392 /* Cleanup render work structure */
394 w->private_data = NULL;
397 int renderInit( hb_work_object_t * w, hb_job_t * job )
399 /* Allocate new private work object */
400 hb_work_private_t * pv = calloc( 1, sizeof( hb_work_private_t ) );
402 w->private_data = pv;
404 /* Get title and title size */
405 hb_title_t * title = job->title;
407 /* If crop or scale is specified, setup rescale context */
408 if( job->crop[0] || job->crop[1] || job->crop[2] || job->crop[3] ||
409 job->width != title->width || job->height != title->height )
411 pv->context = sws_getContext(title->width - (job->crop[2] + job->crop[3]),
412 title->height - (job->crop[0] + job->crop[1]),
414 job->width, job->height, PIX_FMT_YUV420P,
415 (uint16_t)(SWS_LANCZOS|SWS_ACCURATE_RND), NULL, NULL, NULL);
418 /* Setup FIFO queue for subtitle cache */
419 pv->subtitle_queue = hb_fifo_init( 8 );
420 pv->delay_queue = hb_fifo_init( 8 );
421 pv->frames_to_extend = 0;
422 pv->dropped_frames = 0;
423 pv->extended_frames = 0;
426 /* TODO: Move to work.c? */
429 int filter_count = hb_list_count( job->filters );
432 for( i = 0; i < filter_count; i++ )
434 hb_filter_object_t * filter = hb_list_item( job->filters, i );
436 if( !filter ) continue;
438 filter->private_data = filter->init( PIX_FMT_YUV420P,