comparison mpegvideo.c @ 903:22ee74da2cd3 libavcodec

cleanup adding AVVideoFrame moving quality, pict_type, key_frame, qscale_table, ... to AVVideoFrame removing obsolete variables in AVCodecContext skiping of MBs in b frames correctly initalizing AVCodecContext picture buffer cleanup
author michaelni
date Wed, 04 Dec 2002 10:04:03 +0000
parents 9d9a415e3dd9
children 8ae1e4c24e91
comparison
equal deleted inserted replaced
902:6acc8394960d 903:22ee74da2cd3
44 44
45 void (*draw_edges)(UINT8 *buf, int wrap, int width, int height, int w)= draw_edges_c; 45 void (*draw_edges)(UINT8 *buf, int wrap, int width, int height, int w)= draw_edges_c;
46 static void emulated_edge_mc(MpegEncContext *s, UINT8 *src, int linesize, int block_w, int block_h, 46 static void emulated_edge_mc(MpegEncContext *s, UINT8 *src, int linesize, int block_w, int block_h,
47 int src_x, int src_y, int w, int h); 47 int src_x, int src_y, int w, int h);
48 48
49 #define EDGE_WIDTH 16
50 49
51 /* enable all paranoid tests for rounding, overflows, etc... */ 50 /* enable all paranoid tests for rounding, overflows, etc... */
52 //#define PARANOID 51 //#define PARANOID
53 52
54 //#define DEBUG 53 //#define DEBUG
266 ff_init_scantable(s, &s->intra_v_scantable, ff_alternate_vertical_scan); 265 ff_init_scantable(s, &s->intra_v_scantable, ff_alternate_vertical_scan);
267 266
268 return 0; 267 return 0;
269 } 268 }
270 269
270 /**
271 * allocates various arrays for a Picture structure, except the pixels themself.
272 * The pixels are allocated/set in te get_buffer()
273 */
274 static int alloc_picture(MpegEncContext *s, Picture *pic){
275 if (s->encoding) {
276 CHECKED_ALLOCZ(pic->mb_var , s->mb_num * sizeof(INT16))
277 CHECKED_ALLOCZ(pic->mc_mb_var, s->mb_num * sizeof(INT16))
278 CHECKED_ALLOCZ(pic->mb_mean , s->mb_num * sizeof(INT8))
279 }
280
281 CHECKED_ALLOCZ(pic->mbskip_table , s->mb_num * sizeof(UINT8)+1) //the +1 is for the slice end check
282 CHECKED_ALLOCZ(pic->qscale_table , s->mb_num * sizeof(UINT8))
283 pic->qstride= s->mb_width;
284
285 return 0;
286 fail: //for the CHECKED_ALLOCZ macro
287 return -1;
288 }
289
290 static void free_picture(MpegEncContext *s, Picture *pic){
291 int i;
292
293 av_freep(&pic->mb_var);
294 av_freep(&pic->mc_mb_var);
295 av_freep(&pic->mb_mean);
296 av_freep(&pic->mbskip_table);
297 av_freep(&pic->qscale_table);
298
299 if(s->avctx->get_buffer == avcodec_default_get_buffer){
300 for(i=0; i<4; i++){
301 av_freep(&pic->base[i]);
302 pic->data[i]= NULL;
303 }
304 av_freep(&pic->opaque);
305 }
306 }
307
271 /* init common structure for both encoder and decoder */ 308 /* init common structure for both encoder and decoder */
272 int MPV_common_init(MpegEncContext *s) 309 int MPV_common_init(MpegEncContext *s)
273 { 310 {
274 UINT8 *pict;
275 int y_size, c_size, yc_size, i; 311 int y_size, c_size, yc_size, i;
276 312
277 dsputil_init(&s->dsp, s->avctx->dsp_mask); 313 dsputil_init(&s->dsp, s->avctx->dsp_mask);
278 DCT_common_init(s); 314 DCT_common_init(s);
279 315
280 s->flags= s->avctx->flags; 316 s->flags= s->avctx->flags;
281 317
282 s->mb_width = (s->width + 15) / 16; 318 s->mb_width = (s->width + 15) / 16;
283 s->mb_height = (s->height + 15) / 16; 319 s->mb_height = (s->height + 15) / 16;
284 320
285 /* set default edge pos, will be overriden in decode_header if needed */ 321 /* set default edge pos, will be overriden in decode_header if needed */
286 s->h_edge_pos= s->mb_width*16; 322 s->h_edge_pos= s->mb_width*16;
287 s->v_edge_pos= s->mb_height*16; 323 s->v_edge_pos= s->mb_height*16;
296 s->avctx->fourcc= toupper( s->avctx->fourcc &0xFF) 332 s->avctx->fourcc= toupper( s->avctx->fourcc &0xFF)
297 + (toupper((s->avctx->fourcc>>8 )&0xFF)<<8 ) 333 + (toupper((s->avctx->fourcc>>8 )&0xFF)<<8 )
298 + (toupper((s->avctx->fourcc>>16)&0xFF)<<16) 334 + (toupper((s->avctx->fourcc>>16)&0xFF)<<16)
299 + (toupper((s->avctx->fourcc>>24)&0xFF)<<24); 335 + (toupper((s->avctx->fourcc>>24)&0xFF)<<24);
300 336
301 if(!(s->flags&CODEC_FLAG_DR1)){
302 s->linesize = s->mb_width * 16 + 2 * EDGE_WIDTH;
303 s->uvlinesize = s->mb_width * 8 + EDGE_WIDTH;
304
305 for(i=0;i<3;i++) {
306 int w, h, shift, pict_start;
307 unsigned size;
308
309 w = s->linesize;
310 h = s->mb_height * 16 + 2 * EDGE_WIDTH;
311 shift = (i == 0) ? 0 : 1;
312 size = (s->linesize>>shift) * (h >> shift);
313 pict_start = (s->linesize>>shift) * (EDGE_WIDTH >> shift) + (EDGE_WIDTH >> shift);
314
315 CHECKED_ALLOCZ(pict, size)
316 s->last_picture_base[i] = pict;
317 s->last_picture[i] = pict + pict_start;
318 if(i>0) memset(s->last_picture_base[i], 128, size);
319
320 CHECKED_ALLOCZ(pict, size)
321 s->next_picture_base[i] = pict;
322 s->next_picture[i] = pict + pict_start;
323 if(i>0) memset(s->next_picture_base[i], 128, size);
324
325 if (s->has_b_frames || s->codec_id==CODEC_ID_MPEG4) {
326 /* Note the MPEG4 stuff is here cuz of buggy encoders which dont set the low_delay flag but
327 do low-delay encoding, so we cant allways distinguish b-frame containing streams from low_delay streams */
328 CHECKED_ALLOCZ(pict, size)
329 s->aux_picture_base[i] = pict;
330 s->aux_picture[i] = pict + pict_start;
331 if(i>0) memset(s->aux_picture_base[i], 128, size);
332 }
333 }
334 s->ip_buffer_count= 2;
335 }
336
337 CHECKED_ALLOCZ(s->edge_emu_buffer, (s->width+64)*2*17*2); //(width + edge + align)*interlaced*MBsize*tolerance 337 CHECKED_ALLOCZ(s->edge_emu_buffer, (s->width+64)*2*17*2); //(width + edge + align)*interlaced*MBsize*tolerance
338 338
339 s->avctx->coded_picture= (AVVideoFrame*)&s->current_picture;
340
339 if (s->encoding) { 341 if (s->encoding) {
340 int j;
341 int mv_table_size= (s->mb_width+2)*(s->mb_height+2); 342 int mv_table_size= (s->mb_width+2)*(s->mb_height+2);
342
343 CHECKED_ALLOCZ(s->mb_var , s->mb_num * sizeof(INT16))
344 CHECKED_ALLOCZ(s->mc_mb_var, s->mb_num * sizeof(INT16))
345 CHECKED_ALLOCZ(s->mb_mean , s->mb_num * sizeof(INT8))
346 343
347 /* Allocate MV tables */ 344 /* Allocate MV tables */
348 CHECKED_ALLOCZ(s->p_mv_table , mv_table_size * 2 * sizeof(INT16)) 345 CHECKED_ALLOCZ(s->p_mv_table , mv_table_size * 2 * sizeof(INT16))
349 CHECKED_ALLOCZ(s->b_forw_mv_table , mv_table_size * 2 * sizeof(INT16)) 346 CHECKED_ALLOCZ(s->b_forw_mv_table , mv_table_size * 2 * sizeof(INT16))
350 CHECKED_ALLOCZ(s->b_back_mv_table , mv_table_size * 2 * sizeof(INT16)) 347 CHECKED_ALLOCZ(s->b_back_mv_table , mv_table_size * 2 * sizeof(INT16))
352 CHECKED_ALLOCZ(s->b_bidir_back_mv_table , mv_table_size * 2 * sizeof(INT16)) 349 CHECKED_ALLOCZ(s->b_bidir_back_mv_table , mv_table_size * 2 * sizeof(INT16))
353 CHECKED_ALLOCZ(s->b_direct_forw_mv_table, mv_table_size * 2 * sizeof(INT16)) 350 CHECKED_ALLOCZ(s->b_direct_forw_mv_table, mv_table_size * 2 * sizeof(INT16))
354 CHECKED_ALLOCZ(s->b_direct_back_mv_table, mv_table_size * 2 * sizeof(INT16)) 351 CHECKED_ALLOCZ(s->b_direct_back_mv_table, mv_table_size * 2 * sizeof(INT16))
355 CHECKED_ALLOCZ(s->b_direct_mv_table , mv_table_size * 2 * sizeof(INT16)) 352 CHECKED_ALLOCZ(s->b_direct_mv_table , mv_table_size * 2 * sizeof(INT16))
356 353
357 CHECKED_ALLOCZ(s->me_scratchpad, s->linesize*16*3*sizeof(uint8_t)) 354 //FIXME should be linesize instead of s->width*2 but that isnt known before get_buffer()
355 CHECKED_ALLOCZ(s->me_scratchpad, s->width*2*16*3*sizeof(uint8_t))
358 356
359 CHECKED_ALLOCZ(s->me_map , ME_MAP_SIZE*sizeof(uint32_t)) 357 CHECKED_ALLOCZ(s->me_map , ME_MAP_SIZE*sizeof(uint32_t))
360 CHECKED_ALLOCZ(s->me_score_map, ME_MAP_SIZE*sizeof(uint16_t)) 358 CHECKED_ALLOCZ(s->me_score_map, ME_MAP_SIZE*sizeof(uint16_t))
361
362 if(s->max_b_frames){
363 for(j=0; j<REORDER_BUFFER_SIZE; j++){
364 int i;
365 for(i=0;i<3;i++) {
366 int w, h, shift, size;
367
368 w = s->linesize;
369 h = s->mb_height * 16;
370 shift = (i == 0) ? 0 : 1;
371 size = (w >> shift) * (h >> shift);
372
373 CHECKED_ALLOCZ(pict, size);
374 s->picture_buffer[j][i] = pict;
375 }
376 }
377 }
378 359
379 if(s->codec_id==CODEC_ID_MPEG4){ 360 if(s->codec_id==CODEC_ID_MPEG4){
380 CHECKED_ALLOCZ(s->tex_pb_buffer, PB_BUFFER_SIZE); 361 CHECKED_ALLOCZ(s->tex_pb_buffer, PB_BUFFER_SIZE);
381 CHECKED_ALLOCZ( s->pb2_buffer, PB_BUFFER_SIZE); 362 CHECKED_ALLOCZ( s->pb2_buffer, PB_BUFFER_SIZE);
382 } 363 }
432 s->dc_val[2] = s->dc_val[1] + c_size; 413 s->dc_val[2] = s->dc_val[1] + c_size;
433 for(i=0;i<yc_size;i++) 414 for(i=0;i<yc_size;i++)
434 s->dc_val[0][i] = 1024; 415 s->dc_val[0][i] = 1024;
435 } 416 }
436 417
437 CHECKED_ALLOCZ(s->next_qscale_table , s->mb_num * sizeof(UINT8))
438 CHECKED_ALLOCZ(s->last_qscale_table , s->mb_num * sizeof(UINT8))
439 CHECKED_ALLOCZ(s->aux_qscale_table , s->mb_num * sizeof(UINT8))
440 s->qscale_table= s->next_qscale_table;
441 s->avctx->qstride= s->mb_width;
442
443 /* which mb is a intra block */ 418 /* which mb is a intra block */
444 CHECKED_ALLOCZ(s->mbintra_table, s->mb_num); 419 CHECKED_ALLOCZ(s->mbintra_table, s->mb_num);
445 memset(s->mbintra_table, 1, s->mb_num); 420 memset(s->mbintra_table, 1, s->mb_num);
446 421
447 /* default structure is frame */ 422 /* default structure is frame */
468 /* init common structure for both encoder and decoder */ 443 /* init common structure for both encoder and decoder */
469 void MPV_common_end(MpegEncContext *s) 444 void MPV_common_end(MpegEncContext *s)
470 { 445 {
471 int i; 446 int i;
472 447
448 for(i=0; i<MAX_PICTURE_COUNT; i++){
449 if(s->picture[i].data[0]){
450 s->avctx->release_buffer(s->avctx, (AVVideoFrame*)&s->picture[i]);
451 }
452 }
453
473 av_freep(&s->mb_type); 454 av_freep(&s->mb_type);
474 av_freep(&s->mb_var);
475 av_freep(&s->mc_mb_var);
476 av_freep(&s->mb_mean);
477 av_freep(&s->p_mv_table); 455 av_freep(&s->p_mv_table);
478 av_freep(&s->b_forw_mv_table); 456 av_freep(&s->b_forw_mv_table);
479 av_freep(&s->b_back_mv_table); 457 av_freep(&s->b_back_mv_table);
480 av_freep(&s->b_bidir_forw_mv_table); 458 av_freep(&s->b_bidir_forw_mv_table);
481 av_freep(&s->b_bidir_back_mv_table); 459 av_freep(&s->b_bidir_back_mv_table);
487 av_freep(&s->ac_val[0]); 465 av_freep(&s->ac_val[0]);
488 av_freep(&s->coded_block); 466 av_freep(&s->coded_block);
489 av_freep(&s->mbintra_table); 467 av_freep(&s->mbintra_table);
490 av_freep(&s->cbp_table); 468 av_freep(&s->cbp_table);
491 av_freep(&s->pred_dir_table); 469 av_freep(&s->pred_dir_table);
492 av_freep(&s->next_qscale_table);
493 av_freep(&s->last_qscale_table);
494 av_freep(&s->aux_qscale_table);
495 av_freep(&s->me_scratchpad); 470 av_freep(&s->me_scratchpad);
496 av_freep(&s->me_map); 471 av_freep(&s->me_map);
497 av_freep(&s->me_score_map); 472 av_freep(&s->me_score_map);
498 473
499 av_freep(&s->mbskip_table); 474 av_freep(&s->mbskip_table);
505 av_freep(&s->field_mv_table); 480 av_freep(&s->field_mv_table);
506 av_freep(&s->field_select_table); 481 av_freep(&s->field_select_table);
507 av_freep(&s->avctx->stats_out); 482 av_freep(&s->avctx->stats_out);
508 av_freep(&s->ac_stats); 483 av_freep(&s->ac_stats);
509 av_freep(&s->error_status_table); 484 av_freep(&s->error_status_table);
510 485
511 for(i=0;i<3;i++) { 486 for(i=0; i<MAX_PICTURE_COUNT; i++){
512 int j; 487 free_picture(s, &s->picture[i]);
513 if(!(s->flags&CODEC_FLAG_DR1)){
514 av_freep(&s->last_picture_base[i]);
515 av_freep(&s->next_picture_base[i]);
516 av_freep(&s->aux_picture_base[i]);
517 }
518 s->last_picture_base[i]=
519 s->next_picture_base[i]=
520 s->aux_picture_base [i] = NULL;
521 s->last_picture[i]=
522 s->next_picture[i]=
523 s->aux_picture [i] = NULL;
524
525 for(j=0; j<REORDER_BUFFER_SIZE; j++){
526 av_freep(&s->picture_buffer[j][i]);
527 }
528 } 488 }
529 s->context_initialized = 0; 489 s->context_initialized = 0;
530 } 490 }
531 491
532 /* init video encoder */ 492 /* init video encoder */
811 } 771 }
812 772
813 /* generic function for encode/decode called before a frame is coded/decoded */ 773 /* generic function for encode/decode called before a frame is coded/decoded */
814 int MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx) 774 int MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx)
815 { 775 {
816 int i; 776 int i, r;
817 UINT8 *tmp; 777 AVVideoFrame *pic;
818 778
819 s->mb_skiped = 0; 779 s->mb_skiped = 0;
820 avctx->mbskip_table= s->mbskip_table; 780
781 /* mark&release old frames */
782 if (s->pict_type != B_TYPE && s->last_picture.data[0]) {
783 Picture *pic= NULL;
784 for(i=0; i<MAX_PICTURE_COUNT; i++){
785 if(s->picture[i].data[0] == s->last_picture.data[0]){
786 // s->picture[i].reference=0;
787 avctx->release_buffer(avctx, (AVVideoFrame*)&s->picture[i]);
788 break;
789 }
790 }
791 assert(i<MAX_PICTURE_COUNT);
792
793 /* release forgotten pictures */
794 /* if(mpeg124/h263) */
795 if(!s->encoding){
796 for(i=0; i<MAX_PICTURE_COUNT; i++){
797 if(s->picture[i].data[0] && s->picture[i].data[0] != s->next_picture.data[0] && s->picture[i].reference){
798 fprintf(stderr, "releasing zombie picture\n");
799 avctx->release_buffer(avctx, (AVVideoFrame*)&s->picture[i]);
800 }
801 }
802 }
803 }
804
805 if(!s->encoding){
806 /* find unused Picture */
807 for(i=0; i<MAX_PICTURE_COUNT; i++){
808 if(s->picture[i].data[0]==NULL) break;
809 }
810 assert(i<MAX_PICTURE_COUNT);
811
812 pic= (AVVideoFrame*)&s->picture[i];
813 pic->reference= s->pict_type != B_TYPE;
814 pic->coded_picture_number= s->current_picture.coded_picture_number+1;
815
816 r= avctx->get_buffer(avctx, pic);
817
818 if(r<0 || (s->linesize && (s->linesize != pic->linesize[0] || s->uvlinesize != pic->linesize[1]))){
819 fprintf(stderr, "get_buffer() failed (stride changed), bye bye\n");
820 return -1;
821 }
822
823 s->linesize = pic->linesize[0];
824 s->uvlinesize= pic->linesize[1];
825
826 if(pic->qscale_table==NULL)
827 alloc_picture(s, (Picture*)pic);
828
829 s->current_picture= s->picture[i];
830 }
821 831
822 s->hurry_up= s->avctx->hurry_up; 832 s->hurry_up= s->avctx->hurry_up;
823 s->error_resilience= avctx->error_resilience; 833 s->error_resilience= avctx->error_resilience;
824 834
825 if(avctx->flags&CODEC_FLAG_DR1){ 835 if (s->pict_type != B_TYPE) {
826 if(avctx->get_buffer_callback(avctx, s->width, s->height, s->pict_type) < 0){ 836 s->last_picture= s->next_picture;
827 fprintf(stderr, "get_buffer() failed\n"); 837 s->next_picture= s->current_picture;
828 return -1; 838 }
829 } 839
830
831 s->linesize = avctx->dr_stride;
832 s->uvlinesize= avctx->dr_uvstride;
833 s->ip_buffer_count= avctx->dr_ip_buffer_count;
834 }
835 avctx->dr_ip_buffer_count= s->ip_buffer_count;
836
837 if (s->pict_type == B_TYPE) {
838 for(i=0;i<3;i++) {
839 if(avctx->flags&CODEC_FLAG_DR1)
840 s->aux_picture[i]= avctx->dr_buffer[i];
841
842 //FIXME the following should never be needed, the decoder should drop b frames if no reference is available
843 if(s->next_picture[i]==NULL)
844 s->next_picture[i]= s->aux_picture[i];
845 if(s->last_picture[i]==NULL)
846 s->last_picture[i]= s->next_picture[i];
847
848 s->current_picture[i] = s->aux_picture[i];
849 }
850 s->avctx->display_qscale_table=
851 s->avctx->current_qscale_table=
852 s->qscale_table= s->aux_qscale_table;
853 } else {
854 for(i=0;i<3;i++) {
855 /* swap next and last */
856 if(avctx->flags&CODEC_FLAG_DR1)
857 tmp= avctx->dr_buffer[i];
858 else
859 tmp = s->last_picture[i];
860
861 s->last_picture[i] = s->next_picture[i];
862 s->next_picture[i] = tmp;
863 s->current_picture[i] = tmp;
864
865 if(s->last_picture[i]==NULL)
866 s->last_picture[i]= s->next_picture[i];
867
868 s->last_dr_opaque= s->next_dr_opaque;
869 s->next_dr_opaque= avctx->dr_opaque_frame;
870
871 if(s->has_b_frames && s->last_dr_opaque && s->codec_id!=CODEC_ID_SVQ1)
872 avctx->dr_opaque_frame= s->last_dr_opaque;
873 else
874 avctx->dr_opaque_frame= s->next_dr_opaque;
875 }
876 s->avctx->current_qscale_table= s->qscale_table = s->last_qscale_table;
877 s->avctx->display_qscale_table= s->last_qscale_table = s->next_qscale_table;
878 s->next_qscale_table= s->qscale_table;
879 }
880 /* set dequantizer, we cant do it during init as it might change for mpeg4 840 /* set dequantizer, we cant do it during init as it might change for mpeg4
881 and we cant do it in the header decode as init isnt called for mpeg4 there yet */ 841 and we cant do it in the header decode as init isnt called for mpeg4 there yet */
882 if(s->out_format == FMT_H263){ 842 if(s->out_format == FMT_H263){
883 if(s->mpeg_quant) 843 if(s->mpeg_quant)
884 s->dct_unquantize = s->dct_unquantize_mpeg2; 844 s->dct_unquantize = s->dct_unquantize_mpeg2;
891 } 851 }
892 852
893 /* generic function for encode/decode called after a frame has been coded/decoded */ 853 /* generic function for encode/decode called after a frame has been coded/decoded */
894 void MPV_frame_end(MpegEncContext *s) 854 void MPV_frame_end(MpegEncContext *s)
895 { 855 {
896 s->avctx->key_frame = (s->pict_type == I_TYPE); 856 int i;
897 s->avctx->pict_type = s->pict_type;
898 857
899 /* draw edge for correct motion prediction if outside */ 858 /* draw edge for correct motion prediction if outside */
900 if (s->pict_type != B_TYPE && !s->intra_only && !(s->flags&CODEC_FLAG_EMU_EDGE)) { 859 if(s->codec_id!=CODEC_ID_SVQ1){
901 draw_edges(s->current_picture[0], s->linesize , s->h_edge_pos , s->v_edge_pos , EDGE_WIDTH ); 860 if (s->pict_type != B_TYPE && !s->intra_only && !(s->flags&CODEC_FLAG_EMU_EDGE)) {
902 draw_edges(s->current_picture[1], s->uvlinesize, s->h_edge_pos>>1, s->v_edge_pos>>1, EDGE_WIDTH/2); 861 draw_edges(s->current_picture.data[0], s->linesize , s->h_edge_pos , s->v_edge_pos , EDGE_WIDTH );
903 draw_edges(s->current_picture[2], s->uvlinesize, s->h_edge_pos>>1, s->v_edge_pos>>1, EDGE_WIDTH/2); 862 draw_edges(s->current_picture.data[1], s->uvlinesize, s->h_edge_pos>>1, s->v_edge_pos>>1, EDGE_WIDTH/2);
863 draw_edges(s->current_picture.data[2], s->uvlinesize, s->h_edge_pos>>1, s->v_edge_pos>>1, EDGE_WIDTH/2);
864 }
904 } 865 }
905 emms_c(); 866 emms_c();
906 867
907 s->last_pict_type = s->pict_type; 868 s->last_pict_type = s->pict_type;
908 if(s->pict_type!=B_TYPE){ 869 if(s->pict_type!=B_TYPE){
909 s->last_non_b_pict_type= s->pict_type; 870 s->last_non_b_pict_type= s->pict_type;
910 s->num_available_buffers++; 871 s->num_available_buffers++;
911 if(s->num_available_buffers>2) s->num_available_buffers= 2; 872 if(s->num_available_buffers>2) s->num_available_buffers= 2;
912 } 873 }
913 } 874
914 875 s->current_picture.quality= s->qscale; //FIXME get average of qscale_table
915 /* reorder input for encoding */ 876 s->current_picture.pict_type= s->pict_type;
916 void reorder_input(MpegEncContext *s, AVPicture *pict) 877 s->current_picture.key_frame= s->pict_type == I_TYPE;
917 { 878
918 int i, j, index; 879 /* copy back current_picture variables */
880 for(i=0; i<MAX_PICTURE_COUNT; i++){
881 if(s->picture[i].data[0] == s->current_picture.data[0]){
882 s->picture[i]= s->current_picture;
883 break;
884 }
885 }
886 assert(i<MAX_PICTURE_COUNT);
887
888 /* release non refernce frames */
889 for(i=0; i<MAX_PICTURE_COUNT; i++){
890 if(s->picture[i].data[0] && !s->picture[i].reference)
891 s->avctx->release_buffer(s->avctx, (AVVideoFrame*)&s->picture[i]);
892 }
893 }
894
895 static int load_input_picture(MpegEncContext *s, AVVideoFrame *pic_arg){
896 AVVideoFrame *pic;
897 int i,r;
898 const int encoding_delay= s->max_b_frames;
899
900 /* find unused Picture */
901 for(i=0; i<MAX_PICTURE_COUNT; i++){
902 if(s->picture[i].data[0]==NULL) break;
903 }
904 assert(i<MAX_PICTURE_COUNT);
905
906 pic= (AVVideoFrame*)&s->picture[i];
907 pic->reference= 1;
908
909 // assert(avctx->get_buffer == default_get_buffer || avctx->get_buffer==NULL);
910 r= s->avctx->get_buffer(s->avctx, pic);
911
912 if(r<0 || (s->linesize && (s->linesize != pic->linesize[0] || s->uvlinesize != pic->linesize[1]))){
913 fprintf(stderr, "get_buffer() failed (stride changed), bye bye\n");
914 return -1;
915 }
916
917 assert(s->linesize==0 || s->linesize ==pic->linesize[0]);
918 assert(s->uvlinesize==0 || s->uvlinesize==pic->linesize[1]);
919 assert(pic->linesize[1] == pic->linesize[2]);
920 s->linesize = pic->linesize[0];
921 s->uvlinesize= pic->linesize[1];
922
923 if(pic->qscale_table==NULL)
924 alloc_picture(s, (Picture*)pic);
925
926 // assert(s->input_picture[0]==NULL || s->input_picture[0]->data[0]==NULL);
927
928 if(s->input_picture[encoding_delay])
929 pic->display_picture_number= s->input_picture[encoding_delay]->display_picture_number + 1;
930 //printf("dpn2:%d\n", pic->display_picture_number);
931
932 /* shift buffer entries */
933 for(i=1; i<MAX_PICTURE_COUNT /*s->encoding_delay+1*/; i++)
934 s->input_picture[i-1]= s->input_picture[i];
935
936 s->input_picture[encoding_delay]= (Picture*)pic;
937 pic->pict_type= pic_arg->pict_type;
938 pic->quality= pic_arg->quality;
939
940 if( pic->data[0] == pic_arg->data[0]
941 && pic->data[1] == pic_arg->data[1]
942 && pic->data[2] == pic_arg->data[2]){
943 // empty
944 }else{
945 int h_chroma_shift, v_chroma_shift;
946
947 avcodec_get_chroma_sub_sample(s->avctx->pix_fmt, &h_chroma_shift, &v_chroma_shift);
948
949 for(i=0; i<3; i++){
950 int src_stride= pic_arg->linesize[i];
951 int dst_stride= i ? s->uvlinesize : s->linesize;
952 int h_shift= i ? h_chroma_shift : 0;
953 int v_shift= i ? v_chroma_shift : 0;
954 int w= s->width >>h_shift;
955 int h= s->height>>v_shift;
956 uint8_t *src= pic_arg->data[i];
957 uint8_t *dst= pic->data[i] + 16;
919 958
920 if(s->max_b_frames > FF_MAX_B_FRAMES) s->max_b_frames= FF_MAX_B_FRAMES; 959 if(src_stride==dst_stride)
921 960 memcpy(dst, src, src_stride*h);
922 // delay= s->max_b_frames+1; (or 0 if no b frames cuz decoder diff) 961 else{
923 962 while(h--){
924 for(j=0; j<REORDER_BUFFER_SIZE-1; j++){ 963 memcpy(dst, src, w);
925 s->coded_order[j]= s->coded_order[j+1]; 964 dst += dst_stride;
926 } 965 src += src_stride;
927 s->coded_order[j].picture[0]= s->coded_order[j].picture[1]= s->coded_order[j].picture[2]= NULL; //catch uninitalized buffers 966 }
928 s->coded_order[j].pict_type=0; 967 }
929 968 }
930 switch(s->input_pict_type){ 969 }
931 default: 970
932 case I_TYPE: 971 return 0;
933 case S_TYPE: 972 }
934 case P_TYPE: 973
935 index= s->max_b_frames - s->b_frames_since_non_b; 974 static void select_input_picture(MpegEncContext *s){
936 s->b_frames_since_non_b=0; 975 int i;
937 break; 976 const int encoding_delay= s->max_b_frames;
938 case B_TYPE: 977 int coded_pic_num=0;
939 index= s->max_b_frames + 1; 978
940 s->b_frames_since_non_b++; 979 if(s->reordered_input_picture[0])
941 break; 980 coded_pic_num= s->reordered_input_picture[0]->coded_picture_number + 1;
942 } 981 //printf("cpn:%d\n", coded_pic_num);
943 //printf("index:%d type:%d strides: %d %d\n", index, s->input_pict_type, pict->linesize[0], s->linesize); 982 for(i=1; i<MAX_PICTURE_COUNT; i++)
944 if( (index==0 || (s->flags&CODEC_FLAG_INPUT_PRESERVED)) 983 s->reordered_input_picture[i-1]= s->reordered_input_picture[i];
945 && pict->linesize[0] == s->linesize 984 s->reordered_input_picture[MAX_PICTURE_COUNT-1]= NULL;
946 && pict->linesize[1] == s->uvlinesize 985
947 && pict->linesize[2] == s->uvlinesize){ 986 /* set next picture types & ordering */
948 //printf("ptr\n"); 987 if(s->reordered_input_picture[0]==NULL && s->input_picture[0]){
949 for(i=0; i<3; i++){ 988 if(/*s->picture_in_gop_number >= s->gop_size ||*/ s->next_picture.data[0]==NULL || s->intra_only){
950 s->coded_order[index].picture[i]= pict->data[i]; 989 s->reordered_input_picture[0]= s->input_picture[0];
951 } 990 s->reordered_input_picture[0]->pict_type= I_TYPE;
991 s->reordered_input_picture[0]->coded_picture_number= coded_pic_num;
992 }else{
993 s->reordered_input_picture[0]= s->input_picture[s->max_b_frames];
994 if(s->picture_in_gop_number + s->max_b_frames >= s->gop_size)
995 s->reordered_input_picture[0]->pict_type= I_TYPE;
996 else
997 s->reordered_input_picture[0]->pict_type= P_TYPE;
998 s->reordered_input_picture[0]->coded_picture_number= coded_pic_num;
999 for(i=0; i<s->max_b_frames; i++){
1000 coded_pic_num++;
1001 s->reordered_input_picture[i+1]= s->input_picture[i];
1002 s->reordered_input_picture[i+1]->pict_type= B_TYPE;
1003 s->reordered_input_picture[i+1]->coded_picture_number= coded_pic_num;
1004 }
1005 }
1006 }
1007
1008 if(s->reordered_input_picture[0]){
1009 if(s->reordered_input_picture[0]->pict_type==B_TYPE){
1010 s->reordered_input_picture[0]->reference=0;
1011 }
1012 s->current_picture= *s->reordered_input_picture[0];
1013 s->new_picture= s->current_picture;
1014 s->new_picture.data[0]+=16;
1015 s->new_picture.data[1]+=16;
1016 s->new_picture.data[2]+=16;
1017
1018 s->picture_number= s->new_picture.display_picture_number;
1019 //printf("dpn:%d\n", s->picture_number);
952 }else{ 1020 }else{
953 //printf("copy\n"); 1021 memset(&s->new_picture, 0, sizeof(Picture));
954 for(i=0; i<3; i++){
955 uint8_t *src = pict->data[i];
956 uint8_t *dest;
957 int src_wrap = pict->linesize[i];
958 int dest_wrap = s->linesize;
959 int w = s->width;
960 int h = s->height;
961
962 if(index==0) dest= s->last_picture[i]+16; //is current_picture indeed but the switch hapens after reordering
963 else dest= s->picture_buffer[s->picture_buffer_index][i];
964
965 if (i >= 1) {
966 dest_wrap >>= 1;
967 w >>= 1;
968 h >>= 1;
969 }
970
971 s->coded_order[index].picture[i]= dest;
972 for(j=0;j<h;j++) {
973 memcpy(dest, src, w);
974 dest += dest_wrap;
975 src += src_wrap;
976 }
977 }
978 if(index!=0){
979 s->picture_buffer_index++;
980 if(s->picture_buffer_index >= REORDER_BUFFER_SIZE) s->picture_buffer_index=0;
981 }
982 }
983 s->coded_order[index].pict_type = s->input_pict_type;
984 s->coded_order[index].qscale = s->input_qscale;
985 s->coded_order[index].force_type= s->force_input_type;
986 s->coded_order[index].picture_in_gop_number= s->input_picture_in_gop_number;
987 s->coded_order[index].picture_number= s->input_picture_number;
988
989 for(i=0; i<3; i++){
990 s->new_picture[i]= s->coded_order[0].picture[i];
991 } 1022 }
992 } 1023 }
993 1024
994 int MPV_encode_picture(AVCodecContext *avctx, 1025 int MPV_encode_picture(AVCodecContext *avctx,
995 unsigned char *buf, int buf_size, void *data) 1026 unsigned char *buf, int buf_size, void *data)
996 { 1027 {
997 MpegEncContext *s = avctx->priv_data; 1028 MpegEncContext *s = avctx->priv_data;
998 AVPicture *pict = data; 1029 AVVideoFrame *pic_arg = data;
999
1000 s->input_qscale = avctx->quality;
1001 1030
1002 init_put_bits(&s->pb, buf, buf_size, NULL, NULL); 1031 init_put_bits(&s->pb, buf, buf_size, NULL, NULL);
1003 1032
1004 if(avctx->force_type){ 1033 s->picture_in_gop_number++;
1005 s->input_pict_type= 1034
1006 s->force_input_type= avctx->force_type; 1035 load_input_picture(s, pic_arg);
1007 }else if(s->flags&CODEC_FLAG_PASS2){ 1036
1008 s->input_pict_type= 1037 select_input_picture(s);
1009 s->force_input_type= s->rc_context.entry[s->input_picture_number].new_pict_type;
1010 }else{
1011 s->force_input_type=0;
1012 if (!s->intra_only) {
1013 /* first picture of GOP is intra */
1014 if (s->input_picture_in_gop_number % s->gop_size==0){
1015 s->input_pict_type = I_TYPE;
1016 }else if(s->max_b_frames==0){
1017 s->input_pict_type = P_TYPE;
1018 }else{
1019 if(s->b_frames_since_non_b < s->max_b_frames) //FIXME more IQ
1020 s->input_pict_type = B_TYPE;
1021 else
1022 s->input_pict_type = P_TYPE;
1023 }
1024 } else {
1025 s->input_pict_type = I_TYPE;
1026 }
1027 }
1028
1029 if(s->input_pict_type==I_TYPE)
1030 s->input_picture_in_gop_number=0;
1031
1032 reorder_input(s, pict);
1033 1038
1034 /* output? */ 1039 /* output? */
1035 if(s->coded_order[0].picture[0]){ 1040 if(s->new_picture.data[0]){
1036 1041
1037 s->pict_type= s->coded_order[0].pict_type; 1042 s->pict_type= s->new_picture.pict_type;
1038 if (s->fixed_qscale) /* the ratecontrol needs the last qscale so we dont touch it for CBR */ 1043 if (s->fixed_qscale){ /* the ratecontrol needs the last qscale so we dont touch it for CBR */
1039 s->qscale= s->coded_order[0].qscale; 1044 s->qscale= (int)(s->new_picture.quality+0.5);
1040 s->force_type= s->coded_order[0].force_type; 1045 assert(s->qscale);
1041 s->picture_in_gop_number= s->coded_order[0].picture_in_gop_number; 1046 }
1042 s->picture_number= s->coded_order[0].picture_number; 1047 //emms_c();
1043 1048 //printf("qs:%f %f %d\n", s->new_picture.quality, s->current_picture.quality, s->qscale);
1044 MPV_frame_start(s, avctx); 1049 MPV_frame_start(s, avctx);
1045 1050
1046 encode_picture(s, s->picture_number); 1051 encode_picture(s, s->picture_number);
1047 1052
1048 avctx->real_pict_num = s->picture_number; 1053 avctx->real_pict_num = s->picture_number;
1057 1062
1058 MPV_frame_end(s); 1063 MPV_frame_end(s);
1059 1064
1060 if (s->out_format == FMT_MJPEG) 1065 if (s->out_format == FMT_MJPEG)
1061 mjpeg_picture_trailer(s); 1066 mjpeg_picture_trailer(s);
1062
1063 if(!s->fixed_qscale)
1064 avctx->quality = s->qscale;
1065 1067
1066 if(s->flags&CODEC_FLAG_PASS1) 1068 if(s->flags&CODEC_FLAG_PASS1)
1067 ff_write_pass1_stats(s); 1069 ff_write_pass1_stats(s);
1068
1069 } 1070 }
1070 1071
1071 s->input_picture_number++; 1072 s->input_picture_number++;
1072 s->input_picture_in_gop_number++;
1073 1073
1074 flush_put_bits(&s->pb); 1074 flush_put_bits(&s->pb);
1075 s->frame_bits = (pbBufPtr(&s->pb) - s->pb.buf) * 8; 1075 s->frame_bits = (pbBufPtr(&s->pb) - s->pb.buf) * 8;
1076 1076
1077 s->total_bits += s->frame_bits; 1077 s->total_bits += s->frame_bits;
1086 get_psnr(pict->data, s->current_picture, 1086 get_psnr(pict->data, s->current_picture,
1087 pict->linesize, s->linesize, avctx); 1087 pict->linesize, s->linesize, avctx);
1088 fprintf(f, "%7d, %7d, %2.4f\n", pbBufPtr(&s->pb) - s->pb.buf, s->qscale, avctx->psnr_y); 1088 fprintf(f, "%7d, %7d, %2.4f\n", pbBufPtr(&s->pb) - s->pb.buf, s->qscale, avctx->psnr_y);
1089 } 1089 }
1090 #endif 1090 #endif
1091 1091 #if 0
1092 if (avctx->get_psnr) { 1092 if (avctx->get_psnr) {
1093 /* At this point pict->data should have the original frame */ 1093 /* At this point pict->data should have the original frame */
1094 /* an s->current_picture should have the coded/decoded frame */ 1094 /* an s->current_picture should have the coded/decoded frame */
1095 get_psnr(pict->data, s->current_picture, 1095 get_psnr(pict->data, s->current_picture.data,
1096 pict->linesize, s->linesize, avctx); 1096 pict->linesize, s->linesize, avctx);
1097 // printf("%f\n", avctx->psnr_y); 1097 // printf("%f\n", avctx->psnr_y);
1098 } 1098 }
1099 #endif
1100
1099 return pbBufPtr(&s->pb) - s->pb.buf; 1101 return pbBufPtr(&s->pb) - s->pb.buf;
1100 } 1102 }
1101 1103
1102 static inline void gmc1_motion(MpegEncContext *s, 1104 static inline void gmc1_motion(MpegEncContext *s,
1103 UINT8 *dest_y, UINT8 *dest_cb, UINT8 *dest_cr, 1105 UINT8 *dest_y, UINT8 *dest_cb, UINT8 *dest_cr,
1755 const int mb_xy = s->mb_y * s->mb_width + s->mb_x; 1757 const int mb_xy = s->mb_y * s->mb_width + s->mb_x;
1756 1758
1757 mb_x = s->mb_x; 1759 mb_x = s->mb_x;
1758 mb_y = s->mb_y; 1760 mb_y = s->mb_y;
1759 1761
1760 s->qscale_table[mb_xy]= s->qscale; 1762 s->current_picture.qscale_table[mb_xy]= s->qscale;
1761 1763
1762 /* update DC predictors for P macroblocks */ 1764 /* update DC predictors for P macroblocks */
1763 if (!s->mb_intra) { 1765 if (!s->mb_intra) {
1764 if (s->h263_pred || s->h263_aic) { 1766 if (s->h263_pred || s->h263_aic) {
1765 if(s->mbintra_table[mb_xy]) 1767 if(s->mbintra_table[mb_xy])
1821 UINT8 *dest_y, *dest_cb, *dest_cr; 1823 UINT8 *dest_y, *dest_cb, *dest_cr;
1822 int dct_linesize, dct_offset; 1824 int dct_linesize, dct_offset;
1823 op_pixels_func (*op_pix)[4]; 1825 op_pixels_func (*op_pix)[4];
1824 qpel_mc_func (*op_qpix)[16]; 1826 qpel_mc_func (*op_qpix)[16];
1825 1827
1826 /* avoid copy if macroblock skipped in last frame too 1828 /* avoid copy if macroblock skipped in last frame too */
1827 dont touch it for B-frames as they need the skip info from the next p-frame */
1828 if (s->pict_type != B_TYPE) { 1829 if (s->pict_type != B_TYPE) {
1830 s->current_picture.mbskip_table[mb_xy]= s->mb_skiped;
1831 }
1832
1833 /* skip only during decoding as we might trash the buffers during encoding a bit */
1834 if(!s->encoding){
1829 UINT8 *mbskip_ptr = &s->mbskip_table[mb_xy]; 1835 UINT8 *mbskip_ptr = &s->mbskip_table[mb_xy];
1836 const int age= s->current_picture.age;
1837
1838 assert(age);
1839
1830 if (s->mb_skiped) { 1840 if (s->mb_skiped) {
1831 s->mb_skiped = 0; 1841 s->mb_skiped= 0;
1832 1842 assert(s->pict_type!=I_TYPE);
1843
1833 (*mbskip_ptr) ++; /* indicate that this time we skiped it */ 1844 (*mbskip_ptr) ++; /* indicate that this time we skiped it */
1834 if(*mbskip_ptr >99) *mbskip_ptr= 99; 1845 if(*mbskip_ptr >99) *mbskip_ptr= 99;
1835 1846
1836 /* if previous was skipped too, then nothing to do ! 1847 /* if previous was skipped too, then nothing to do ! */
1837 skip only during decoding as we might trash the buffers during encoding a bit */ 1848 if (*mbskip_ptr >= age){
1838 if (*mbskip_ptr >= s->ip_buffer_count && !s->encoding) 1849 //if(s->pict_type!=B_TYPE && s->mb_x==0) printf("\n");
1839 return; 1850 //if(s->pict_type!=B_TYPE) printf("%d%d ", *mbskip_ptr, age);
1851 if(s->pict_type!=B_TYPE) return;
1852 if(s->avctx->draw_horiz_band==NULL && *mbskip_ptr > age) return;
1853 /* we dont draw complete frames here so we cant skip */
1854 }
1840 } else { 1855 } else {
1841 *mbskip_ptr = 0; /* not skipped */ 1856 *mbskip_ptr = 0; /* not skipped */
1842 } 1857 }
1843 } 1858 }else
1859 s->mb_skiped= 0;
1844 1860
1845 if(s->pict_type==B_TYPE && s->avctx->draw_horiz_band){ 1861 if(s->pict_type==B_TYPE && s->avctx->draw_horiz_band){
1846 dest_y = s->current_picture [0] + mb_x * 16; 1862 dest_y = s->current_picture.data[0] + mb_x * 16;
1847 dest_cb = s->current_picture[1] + mb_x * 8; 1863 dest_cb = s->current_picture.data[1] + mb_x * 8;
1848 dest_cr = s->current_picture[2] + mb_x * 8; 1864 dest_cr = s->current_picture.data[2] + mb_x * 8;
1849 }else{ 1865 }else{
1850 dest_y = s->current_picture [0] + (mb_y * 16* s->linesize ) + mb_x * 16; 1866 dest_y = s->current_picture.data[0] + (mb_y * 16* s->linesize ) + mb_x * 16;
1851 dest_cb = s->current_picture[1] + (mb_y * 8 * s->uvlinesize) + mb_x * 8; 1867 dest_cb = s->current_picture.data[1] + (mb_y * 8 * s->uvlinesize) + mb_x * 8;
1852 dest_cr = s->current_picture[2] + (mb_y * 8 * s->uvlinesize) + mb_x * 8; 1868 dest_cr = s->current_picture.data[2] + (mb_y * 8 * s->uvlinesize) + mb_x * 8;
1853 } 1869 }
1854 1870
1855 if (s->interlaced_dct) { 1871 if (s->interlaced_dct) {
1856 dct_linesize = s->linesize * 2; 1872 dct_linesize = s->linesize * 2;
1857 dct_offset = s->linesize; 1873 dct_offset = s->linesize;
1871 op_pix = s->dsp.put_no_rnd_pixels_tab; 1887 op_pix = s->dsp.put_no_rnd_pixels_tab;
1872 op_qpix= s->dsp.put_no_rnd_qpel_pixels_tab; 1888 op_qpix= s->dsp.put_no_rnd_qpel_pixels_tab;
1873 } 1889 }
1874 1890
1875 if (s->mv_dir & MV_DIR_FORWARD) { 1891 if (s->mv_dir & MV_DIR_FORWARD) {
1876 MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture, op_pix, op_qpix); 1892 MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.data, op_pix, op_qpix);
1877 op_pix = s->dsp.avg_pixels_tab; 1893 op_pix = s->dsp.avg_pixels_tab;
1878 op_qpix= s->dsp.avg_qpel_pixels_tab; 1894 op_qpix= s->dsp.avg_qpel_pixels_tab;
1879 } 1895 }
1880 if (s->mv_dir & MV_DIR_BACKWARD) { 1896 if (s->mv_dir & MV_DIR_BACKWARD) {
1881 MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture, op_pix, op_qpix); 1897 MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.data, op_pix, op_qpix);
1882 } 1898 }
1883 } 1899 }
1884 1900
1885 /* skip dequant / idct if we are really late ;) */ 1901 /* skip dequant / idct if we are really late ;) */
1886 if(s->hurry_up>1) return; 1902 if(s->hurry_up>1) return;
2152 offset = 0; 2168 offset = 0;
2153 else 2169 else
2154 offset = y * s->linesize; 2170 offset = y * s->linesize;
2155 2171
2156 if(s->pict_type==B_TYPE || (!s->has_b_frames)){ 2172 if(s->pict_type==B_TYPE || (!s->has_b_frames)){
2157 src_ptr[0] = s->current_picture[0] + offset; 2173 src_ptr[0] = s->current_picture.data[0] + offset;
2158 src_ptr[1] = s->current_picture[1] + (offset >> 2); 2174 src_ptr[1] = s->current_picture.data[1] + (offset >> 2);
2159 src_ptr[2] = s->current_picture[2] + (offset >> 2); 2175 src_ptr[2] = s->current_picture.data[2] + (offset >> 2);
2160 } else { 2176 } else {
2161 src_ptr[0] = s->last_picture[0] + offset; 2177 src_ptr[0] = s->last_picture.data[0] + offset;
2162 src_ptr[1] = s->last_picture[1] + (offset >> 2); 2178 src_ptr[1] = s->last_picture.data[1] + (offset >> 2);
2163 src_ptr[2] = s->last_picture[2] + (offset >> 2); 2179 src_ptr[2] = s->last_picture.data[2] + (offset >> 2);
2164 } 2180 }
2165 emms_c(); 2181 emms_c();
2166 2182
2167 s->avctx->draw_horiz_band(s->avctx, src_ptr, s->linesize, 2183 s->avctx->draw_horiz_band(s->avctx, src_ptr, s->linesize,
2168 y, s->width, h); 2184 y, s->width, h);
2178 int dct_offset = s->linesize*8; //default for progressive frames 2194 int dct_offset = s->linesize*8; //default for progressive frames
2179 2195
2180 for(i=0; i<6; i++) skip_dct[i]=0; 2196 for(i=0; i<6; i++) skip_dct[i]=0;
2181 2197
2182 if(s->adaptive_quant){ 2198 if(s->adaptive_quant){
2183 s->dquant= s->qscale_table[mb_x + mb_y*s->mb_width] - s->qscale; 2199 s->dquant= s->current_picture.qscale_table[mb_x + mb_y*s->mb_width] - s->qscale;
2184 2200
2185 if(s->out_format==FMT_H263){ 2201 if(s->out_format==FMT_H263){
2186 if (s->dquant> 2) s->dquant= 2; 2202 if (s->dquant> 2) s->dquant= 2;
2187 else if(s->dquant<-2) s->dquant=-2; 2203 else if(s->dquant<-2) s->dquant=-2;
2188 } 2204 }
2204 UINT8 *ptr; 2220 UINT8 *ptr;
2205 int wrap_y; 2221 int wrap_y;
2206 int emu=0; 2222 int emu=0;
2207 2223
2208 wrap_y = s->linesize; 2224 wrap_y = s->linesize;
2209 ptr = s->new_picture[0] + (mb_y * 16 * wrap_y) + mb_x * 16; 2225 ptr = s->new_picture.data[0] + (mb_y * 16 * wrap_y) + mb_x * 16;
2210 2226
2211 if(mb_x*16+16 > s->width || mb_y*16+16 > s->height){ 2227 if(mb_x*16+16 > s->width || mb_y*16+16 > s->height){
2212 emulated_edge_mc(s, ptr, wrap_y, 16, 16, mb_x*16, mb_y*16, s->width, s->height); 2228 emulated_edge_mc(s, ptr, wrap_y, 16, 16, mb_x*16, mb_y*16, s->width, s->height);
2213 ptr= s->edge_emu_buffer; 2229 ptr= s->edge_emu_buffer;
2214 emu=1; 2230 emu=1;
2237 if(s->flags&CODEC_FLAG_GRAY){ 2253 if(s->flags&CODEC_FLAG_GRAY){
2238 skip_dct[4]= 1; 2254 skip_dct[4]= 1;
2239 skip_dct[5]= 1; 2255 skip_dct[5]= 1;
2240 }else{ 2256 }else{
2241 int wrap_c = s->uvlinesize; 2257 int wrap_c = s->uvlinesize;
2242 ptr = s->new_picture[1] + (mb_y * 8 * wrap_c) + mb_x * 8; 2258 ptr = s->new_picture.data[1] + (mb_y * 8 * wrap_c) + mb_x * 8;
2243 if(emu){ 2259 if(emu){
2244 emulated_edge_mc(s, ptr, wrap_c, 8, 8, mb_x*8, mb_y*8, s->width>>1, s->height>>1); 2260 emulated_edge_mc(s, ptr, wrap_c, 8, 8, mb_x*8, mb_y*8, s->width>>1, s->height>>1);
2245 ptr= s->edge_emu_buffer; 2261 ptr= s->edge_emu_buffer;
2246 } 2262 }
2247 s->dsp.get_pixels(s->block[4], ptr, wrap_c); 2263 s->dsp.get_pixels(s->block[4], ptr, wrap_c);
2248 2264
2249 ptr = s->new_picture[2] + (mb_y * 8 * wrap_c) + mb_x * 8; 2265 ptr = s->new_picture.data[2] + (mb_y * 8 * wrap_c) + mb_x * 8;
2250 if(emu){ 2266 if(emu){
2251 emulated_edge_mc(s, ptr, wrap_c, 8, 8, mb_x*8, mb_y*8, s->width>>1, s->height>>1); 2267 emulated_edge_mc(s, ptr, wrap_c, 8, 8, mb_x*8, mb_y*8, s->width>>1, s->height>>1);
2252 ptr= s->edge_emu_buffer; 2268 ptr= s->edge_emu_buffer;
2253 } 2269 }
2254 s->dsp.get_pixels(s->block[5], ptr, wrap_c); 2270 s->dsp.get_pixels(s->block[5], ptr, wrap_c);
2259 UINT8 *dest_y, *dest_cb, *dest_cr; 2275 UINT8 *dest_y, *dest_cb, *dest_cr;
2260 UINT8 *ptr_y, *ptr_cb, *ptr_cr; 2276 UINT8 *ptr_y, *ptr_cb, *ptr_cr;
2261 int wrap_y, wrap_c; 2277 int wrap_y, wrap_c;
2262 int emu=0; 2278 int emu=0;
2263 2279
2264 dest_y = s->current_picture[0] + (mb_y * 16 * s->linesize ) + mb_x * 16; 2280 dest_y = s->current_picture.data[0] + (mb_y * 16 * s->linesize ) + mb_x * 16;
2265 dest_cb = s->current_picture[1] + (mb_y * 8 * (s->uvlinesize)) + mb_x * 8; 2281 dest_cb = s->current_picture.data[1] + (mb_y * 8 * (s->uvlinesize)) + mb_x * 8;
2266 dest_cr = s->current_picture[2] + (mb_y * 8 * (s->uvlinesize)) + mb_x * 8; 2282 dest_cr = s->current_picture.data[2] + (mb_y * 8 * (s->uvlinesize)) + mb_x * 8;
2267 wrap_y = s->linesize; 2283 wrap_y = s->linesize;
2268 wrap_c = s->uvlinesize; 2284 wrap_c = s->uvlinesize;
2269 ptr_y = s->new_picture[0] + (mb_y * 16 * wrap_y) + mb_x * 16; 2285 ptr_y = s->new_picture.data[0] + (mb_y * 16 * wrap_y) + mb_x * 16;
2270 ptr_cb = s->new_picture[1] + (mb_y * 8 * wrap_c) + mb_x * 8; 2286 ptr_cb = s->new_picture.data[1] + (mb_y * 8 * wrap_c) + mb_x * 8;
2271 ptr_cr = s->new_picture[2] + (mb_y * 8 * wrap_c) + mb_x * 8; 2287 ptr_cr = s->new_picture.data[2] + (mb_y * 8 * wrap_c) + mb_x * 8;
2272 2288
2273 if ((!s->no_rounding) || s->pict_type==B_TYPE){ 2289 if ((!s->no_rounding) || s->pict_type==B_TYPE){
2274 op_pix = s->dsp.put_pixels_tab; 2290 op_pix = s->dsp.put_pixels_tab;
2275 op_qpix= s->dsp.put_qpel_pixels_tab; 2291 op_qpix= s->dsp.put_qpel_pixels_tab;
2276 }else{ 2292 }else{
2277 op_pix = s->dsp.put_no_rnd_pixels_tab; 2293 op_pix = s->dsp.put_no_rnd_pixels_tab;
2278 op_qpix= s->dsp.put_no_rnd_qpel_pixels_tab; 2294 op_qpix= s->dsp.put_no_rnd_qpel_pixels_tab;
2279 } 2295 }
2280 2296
2281 if (s->mv_dir & MV_DIR_FORWARD) { 2297 if (s->mv_dir & MV_DIR_FORWARD) {
2282 MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture, op_pix, op_qpix); 2298 MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.data, op_pix, op_qpix);
2283 op_pix = s->dsp.avg_pixels_tab; 2299 op_pix = s->dsp.avg_pixels_tab;
2284 op_qpix= s->dsp.avg_qpel_pixels_tab; 2300 op_qpix= s->dsp.avg_qpel_pixels_tab;
2285 } 2301 }
2286 if (s->mv_dir & MV_DIR_BACKWARD) { 2302 if (s->mv_dir & MV_DIR_BACKWARD) {
2287 MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture, op_pix, op_qpix); 2303 MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.data, op_pix, op_qpix);
2288 } 2304 }
2289 2305
2290 if(mb_x*16+16 > s->width || mb_y*16+16 > s->height){ 2306 if(mb_x*16+16 > s->width || mb_y*16+16 > s->height){
2291 emulated_edge_mc(s, ptr_y, wrap_y, 16, 16, mb_x*16, mb_y*16, s->width, s->height); 2307 emulated_edge_mc(s, ptr_y, wrap_y, 16, 16, mb_x*16, mb_y*16, s->width, s->height);
2292 ptr_y= s->edge_emu_buffer; 2308 ptr_y= s->edge_emu_buffer;
2328 emulated_edge_mc(s, ptr_cr, wrap_c, 8, 8, mb_x*8, mb_y*8, s->width>>1, s->height>>1); 2344 emulated_edge_mc(s, ptr_cr, wrap_c, 8, 8, mb_x*8, mb_y*8, s->width>>1, s->height>>1);
2329 ptr_cr= s->edge_emu_buffer; 2345 ptr_cr= s->edge_emu_buffer;
2330 } 2346 }
2331 s->dsp.diff_pixels(s->block[5], ptr_cr, dest_cr, wrap_c); 2347 s->dsp.diff_pixels(s->block[5], ptr_cr, dest_cr, wrap_c);
2332 } 2348 }
2333
2334 /* pre quantization */ 2349 /* pre quantization */
2335 if(s->mc_mb_var[s->mb_width*mb_y+ mb_x]<2*s->qscale*s->qscale){ 2350 if(s->current_picture.mc_mb_var[s->mb_width*mb_y+ mb_x]<2*s->qscale*s->qscale){
2336 //FIXME optimize 2351 //FIXME optimize
2337 if(s->dsp.pix_abs8x8(ptr_y , dest_y , wrap_y) < 20*s->qscale) skip_dct[0]= 1; 2352 if(s->dsp.pix_abs8x8(ptr_y , dest_y , wrap_y) < 20*s->qscale) skip_dct[0]= 1;
2338 if(s->dsp.pix_abs8x8(ptr_y + 8, dest_y + 8, wrap_y) < 20*s->qscale) skip_dct[1]= 1; 2353 if(s->dsp.pix_abs8x8(ptr_y + 8, dest_y + 8, wrap_y) < 20*s->qscale) skip_dct[1]= 1;
2339 if(s->dsp.pix_abs8x8(ptr_y +dct_offset , dest_y +dct_offset , wrap_y) < 20*s->qscale) skip_dct[2]= 1; 2354 if(s->dsp.pix_abs8x8(ptr_y +dct_offset , dest_y +dct_offset , wrap_y) < 20*s->qscale) skip_dct[2]= 1;
2340 if(s->dsp.pix_abs8x8(ptr_y +dct_offset+ 8, dest_y +dct_offset+ 8, wrap_y) < 20*s->qscale) skip_dct[3]= 1; 2355 if(s->dsp.pix_abs8x8(ptr_y +dct_offset+ 8, dest_y +dct_offset+ 8, wrap_y) < 20*s->qscale) skip_dct[3]= 1;
2555 s->block_wrap[3]= s->mb_width*2 + 2; 2570 s->block_wrap[3]= s->mb_width*2 + 2;
2556 s->block_wrap[4]= 2571 s->block_wrap[4]=
2557 s->block_wrap[5]= s->mb_width + 2; 2572 s->block_wrap[5]= s->mb_width + 2;
2558 2573
2559 /* Reset the average MB variance */ 2574 /* Reset the average MB variance */
2560 s->mb_var_sum = 0; 2575 s->current_picture.mb_var_sum = 0;
2561 s->mc_mb_var_sum = 0; 2576 s->current_picture.mc_mb_var_sum = 0;
2562 2577
2563 /* we need to initialize some time vars before we can encode b-frames */ 2578 /* we need to initialize some time vars before we can encode b-frames */
2564 if (s->h263_pred && !s->h263_msmpeg4) 2579 if (s->h263_pred && !s->h263_msmpeg4)
2565 ff_set_mpeg4_time(s, s->picture_number); 2580 ff_set_mpeg4_time(s, s->picture_number);
2566 2581
2602 /* finding spatial complexity for I-frame rate control */ 2617 /* finding spatial complexity for I-frame rate control */
2603 for(mb_y=0; mb_y < s->mb_height; mb_y++) { 2618 for(mb_y=0; mb_y < s->mb_height; mb_y++) {
2604 for(mb_x=0; mb_x < s->mb_width; mb_x++) { 2619 for(mb_x=0; mb_x < s->mb_width; mb_x++) {
2605 int xx = mb_x * 16; 2620 int xx = mb_x * 16;
2606 int yy = mb_y * 16; 2621 int yy = mb_y * 16;
2607 uint8_t *pix = s->new_picture[0] + (yy * s->linesize) + xx; 2622 uint8_t *pix = s->new_picture.data[0] + (yy * s->linesize) + xx;
2608 int varc; 2623 int varc;
2609 int sum = s->dsp.pix_sum(pix, s->linesize); 2624 int sum = s->dsp.pix_sum(pix, s->linesize);
2610 2625
2611 varc = (s->dsp.pix_norm1(pix, s->linesize) - (((unsigned)(sum*sum))>>8) + 500 + 128)>>8; 2626 varc = (s->dsp.pix_norm1(pix, s->linesize) - (((unsigned)(sum*sum))>>8) + 500 + 128)>>8;
2612 2627
2613 s->mb_var [s->mb_width * mb_y + mb_x] = varc; 2628 s->current_picture.mb_var [s->mb_width * mb_y + mb_x] = varc;
2614 s->mb_mean[s->mb_width * mb_y + mb_x] = (sum+128)>>8; 2629 s->current_picture.mb_mean[s->mb_width * mb_y + mb_x] = (sum+128)>>8;
2615 s->mb_var_sum += varc; 2630 s->current_picture.mb_var_sum += varc;
2616 } 2631 }
2617 } 2632 }
2618 } 2633 }
2619 } 2634 }
2620 emms_c(); 2635 emms_c();
2621 2636
2622 if(s->scene_change_score > 0 && s->pict_type == P_TYPE){ 2637 if(s->scene_change_score > 0 && s->pict_type == P_TYPE){
2623 s->pict_type= I_TYPE; 2638 s->pict_type= I_TYPE;
2624 memset(s->mb_type , MB_TYPE_INTRA, sizeof(UINT8)*s->mb_width*s->mb_height); 2639 memset(s->mb_type , MB_TYPE_INTRA, sizeof(UINT8)*s->mb_width*s->mb_height);
2625 if(s->max_b_frames==0){ 2640 //printf("Scene change detected, encoding as I Frame %d %d\n", s->current_picture.mb_var_sum, s->current_picture.mc_mb_var_sum);
2626 s->input_pict_type= I_TYPE; 2641 }
2627 s->input_picture_in_gop_number=0; 2642
2628 }
2629 //printf("Scene change detected, encoding as I Frame %d %d\n", s->mb_var_sum, s->mc_mb_var_sum);
2630 }
2631
2632 if(s->pict_type==P_TYPE || s->pict_type==S_TYPE) 2643 if(s->pict_type==P_TYPE || s->pict_type==S_TYPE)
2633 s->f_code= ff_get_best_fcode(s, s->p_mv_table, MB_TYPE_INTER); 2644 s->f_code= ff_get_best_fcode(s, s->p_mv_table, MB_TYPE_INTER);
2634 ff_fix_long_p_mvs(s); 2645 ff_fix_long_p_mvs(s);
2635 if(s->pict_type==B_TYPE){ 2646 if(s->pict_type==B_TYPE){
2636 s->f_code= ff_get_best_fcode(s, s->b_forw_mv_table, MB_TYPE_FORWARD); 2647 s->f_code= ff_get_best_fcode(s, s->b_forw_mv_table, MB_TYPE_FORWARD);
2641 ff_fix_long_b_mvs(s, s->b_bidir_forw_mv_table, s->f_code, MB_TYPE_BIDIR); 2652 ff_fix_long_b_mvs(s, s->b_bidir_forw_mv_table, s->f_code, MB_TYPE_BIDIR);
2642 ff_fix_long_b_mvs(s, s->b_bidir_back_mv_table, s->b_code, MB_TYPE_BIDIR); 2653 ff_fix_long_b_mvs(s, s->b_bidir_back_mv_table, s->b_code, MB_TYPE_BIDIR);
2643 } 2654 }
2644 2655
2645 if (s->fixed_qscale) 2656 if (s->fixed_qscale)
2646 s->frame_qscale = s->avctx->quality; 2657 s->frame_qscale = s->current_picture.quality;
2647 else 2658 else
2648 s->frame_qscale = ff_rate_estimate_qscale(s); 2659 s->frame_qscale = ff_rate_estimate_qscale(s);
2649 2660
2650 if(s->adaptive_quant){ 2661 if(s->adaptive_quant){
2651 switch(s->codec_id){ 2662 switch(s->codec_id){
2656 case CODEC_ID_H263P: 2667 case CODEC_ID_H263P:
2657 ff_clean_h263_qscales(s); 2668 ff_clean_h263_qscales(s);
2658 break; 2669 break;
2659 } 2670 }
2660 2671
2661 s->qscale= s->qscale_table[0]; 2672 s->qscale= s->current_picture.qscale_table[0];
2662 }else 2673 }else
2663 s->qscale= (int)(s->frame_qscale + 0.5); 2674 s->qscale= (int)(s->frame_qscale + 0.5);
2664 2675
2665 if (s->out_format == FMT_MJPEG) { 2676 if (s->out_format == FMT_MJPEG) {
2666 /* for mjpeg, we do include qscale in the matrix */ 2677 /* for mjpeg, we do include qscale in the matrix */
2671 s->intra_matrix[j] = CLAMP_TO_8BIT((ff_mpeg1_default_intra_matrix[i] * s->qscale) >> 3); 2682 s->intra_matrix[j] = CLAMP_TO_8BIT((ff_mpeg1_default_intra_matrix[i] * s->qscale) >> 3);
2672 } 2683 }
2673 convert_matrix(s, s->q_intra_matrix, s->q_intra_matrix16, 2684 convert_matrix(s, s->q_intra_matrix, s->q_intra_matrix16,
2674 s->q_intra_matrix16_bias, s->intra_matrix, s->intra_quant_bias, 8, 8); 2685 s->q_intra_matrix16_bias, s->intra_matrix, s->intra_quant_bias, 8, 8);
2675 } 2686 }
2687
2688 //FIXME var duplication
2689 s->current_picture.key_frame= s->pict_type == I_TYPE;
2690 s->current_picture.pict_type= s->pict_type;
2691
2692 if(s->current_picture.key_frame)
2693 s->picture_in_gop_number=0;
2676 2694
2677 s->last_bits= get_bit_count(&s->pb); 2695 s->last_bits= get_bit_count(&s->pb);
2678 switch(s->out_format) { 2696 switch(s->out_format) {
2679 case FMT_MJPEG: 2697 case FMT_MJPEG:
2680 mjpeg_picture_header(s); 2698 mjpeg_picture_header(s);