2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
12 #include "vpx_ports/config.h"
14 #include "reconintra.h"
15 #include "vpx_mem/vpx_mem.h"
17 /* For skip_recon_mb(), add vp8_build_intra_predictors_mby_s(MACROBLOCKD *x) and
18 * vp8_build_intra_predictors_mbuv_s(MACROBLOCKD *x).
20 void vp8_recon_intra_mbuv(const vp8_recon_rtcd_vtable_t
*rtcd
, MACROBLOCKD
*x
)
24 for (i
= 16; i
< 24; i
+= 2)
26 BLOCKD
*b
= &x
->block
[i
];
27 RECON_INVOKE(rtcd
, recon2
)(b
->predictor
, b
->diff
, *(b
->base_dst
) + b
->dst
, b
->dst_stride
);
31 void vp8_build_intra_predictors_mby(MACROBLOCKD
*x
)
34 unsigned char *yabove_row
= x
->dst
.y_buffer
- x
->dst
.y_stride
;
35 unsigned char yleft_col
[16];
36 unsigned char ytop_left
= yabove_row
[-1];
37 unsigned char *ypred_ptr
= x
->predictor
;
40 for (i
= 0; i
< 16; i
++)
42 yleft_col
[i
] = x
->dst
.y_buffer
[i
* x
->dst
.y_stride
-1];
46 switch (x
->mode_info_context
->mbmi
.mode
)
56 if (x
->up_available
|| x
->left_available
)
60 for (i
= 0; i
< 16; i
++)
62 average
+= yabove_row
[i
];
66 if (x
->left_available
)
69 for (i
= 0; i
< 16; i
++)
71 average
+= yleft_col
[i
];
78 shift
= 3 + x
->up_available
+ x
->left_available
;
79 expected_dc
= (average
+ (1 << (shift
- 1))) >> shift
;
86 vpx_memset(ypred_ptr
, expected_dc
, 256);
92 for (r
= 0; r
< 16; r
++)
95 ((int *)ypred_ptr
)[0] = ((int *)yabove_row
)[0];
96 ((int *)ypred_ptr
)[1] = ((int *)yabove_row
)[1];
97 ((int *)ypred_ptr
)[2] = ((int *)yabove_row
)[2];
98 ((int *)ypred_ptr
)[3] = ((int *)yabove_row
)[3];
106 for (r
= 0; r
< 16; r
++)
109 vpx_memset(ypred_ptr
, yleft_col
[r
], 16);
118 for (r
= 0; r
< 16; r
++)
120 for (c
= 0; c
< 16; c
++)
122 int pred
= yleft_col
[r
] + yabove_row
[ c
] - ytop_left
;
149 void vp8_build_intra_predictors_mby_s(MACROBLOCKD
*x
)
152 unsigned char *yabove_row
= x
->dst
.y_buffer
- x
->dst
.y_stride
;
153 unsigned char yleft_col
[16];
154 unsigned char ytop_left
= yabove_row
[-1];
155 unsigned char *ypred_ptr
= x
->predictor
;
158 int y_stride
= x
->dst
.y_stride
;
159 ypred_ptr
= x
->dst
.y_buffer
; /*x->predictor;*/
161 for (i
= 0; i
< 16; i
++)
163 yleft_col
[i
] = x
->dst
.y_buffer
[i
* x
->dst
.y_stride
-1];
167 switch (x
->mode_info_context
->mbmi
.mode
)
177 if (x
->up_available
|| x
->left_available
)
181 for (i
= 0; i
< 16; i
++)
183 average
+= yabove_row
[i
];
187 if (x
->left_available
)
190 for (i
= 0; i
< 16; i
++)
192 average
+= yleft_col
[i
];
199 shift
= 3 + x
->up_available
+ x
->left_available
;
200 expected_dc
= (average
+ (1 << (shift
- 1))) >> shift
;
207 /*vpx_memset(ypred_ptr, expected_dc, 256);*/
208 for (r
= 0; r
< 16; r
++)
210 vpx_memset(ypred_ptr
, expected_dc
, 16);
211 ypred_ptr
+= y_stride
; /*16;*/
218 for (r
= 0; r
< 16; r
++)
221 ((int *)ypred_ptr
)[0] = ((int *)yabove_row
)[0];
222 ((int *)ypred_ptr
)[1] = ((int *)yabove_row
)[1];
223 ((int *)ypred_ptr
)[2] = ((int *)yabove_row
)[2];
224 ((int *)ypred_ptr
)[3] = ((int *)yabove_row
)[3];
225 ypred_ptr
+= y_stride
; /*16;*/
232 for (r
= 0; r
< 16; r
++)
235 vpx_memset(ypred_ptr
, yleft_col
[r
], 16);
236 ypred_ptr
+= y_stride
; /*16;*/
244 for (r
= 0; r
< 16; r
++)
246 for (c
= 0; c
< 16; c
++)
248 int pred
= yleft_col
[r
] + yabove_row
[ c
] - ytop_left
;
259 ypred_ptr
+= y_stride
; /*16;*/
275 void vp8_build_intra_predictors_mbuv(MACROBLOCKD
*x
)
277 unsigned char *uabove_row
= x
->dst
.u_buffer
- x
->dst
.uv_stride
;
278 unsigned char uleft_col
[16];
279 unsigned char utop_left
= uabove_row
[-1];
280 unsigned char *vabove_row
= x
->dst
.v_buffer
- x
->dst
.uv_stride
;
281 unsigned char vleft_col
[20];
282 unsigned char vtop_left
= vabove_row
[-1];
283 unsigned char *upred_ptr
= &x
->predictor
[256];
284 unsigned char *vpred_ptr
= &x
->predictor
[320];
287 for (i
= 0; i
< 8; i
++)
289 uleft_col
[i
] = x
->dst
.u_buffer
[i
* x
->dst
.uv_stride
-1];
290 vleft_col
[i
] = x
->dst
.v_buffer
[i
* x
->dst
.uv_stride
-1];
293 switch (x
->mode_info_context
->mbmi
.uv_mode
)
306 for (i
= 0; i
< 8; i
++)
308 Uaverage
+= uabove_row
[i
];
309 Vaverage
+= vabove_row
[i
];
313 if (x
->left_available
)
315 for (i
= 0; i
< 8; i
++)
317 Uaverage
+= uleft_col
[i
];
318 Vaverage
+= vleft_col
[i
];
322 if (!x
->up_available
&& !x
->left_available
)
329 shift
= 2 + x
->up_available
+ x
->left_available
;
330 expected_udc
= (Uaverage
+ (1 << (shift
- 1))) >> shift
;
331 expected_vdc
= (Vaverage
+ (1 << (shift
- 1))) >> shift
;
335 vpx_memset(upred_ptr
, expected_udc
, 64);
336 vpx_memset(vpred_ptr
, expected_vdc
, 64);
345 for (i
= 0; i
< 8; i
++)
347 vpx_memcpy(upred_ptr
, uabove_row
, 8);
348 vpx_memcpy(vpred_ptr
, vabove_row
, 8);
359 for (i
= 0; i
< 8; i
++)
361 vpx_memset(upred_ptr
, uleft_col
[i
], 8);
362 vpx_memset(vpred_ptr
, vleft_col
[i
], 8);
373 for (i
= 0; i
< 8; i
++)
375 for (j
= 0; j
< 8; j
++)
377 int predu
= uleft_col
[i
] + uabove_row
[j
] - utop_left
;
378 int predv
= vleft_col
[i
] + vabove_row
[j
] - vtop_left
;
392 upred_ptr
[j
] = predu
;
393 vpred_ptr
[j
] = predv
;
413 void vp8_build_intra_predictors_mbuv_s(MACROBLOCKD
*x
)
415 unsigned char *uabove_row
= x
->dst
.u_buffer
- x
->dst
.uv_stride
;
416 unsigned char uleft_col
[16];
417 unsigned char utop_left
= uabove_row
[-1];
418 unsigned char *vabove_row
= x
->dst
.v_buffer
- x
->dst
.uv_stride
;
419 unsigned char vleft_col
[20];
420 unsigned char vtop_left
= vabove_row
[-1];
421 unsigned char *upred_ptr
= x
->dst
.u_buffer
; /*&x->predictor[256];*/
422 unsigned char *vpred_ptr
= x
->dst
.v_buffer
; /*&x->predictor[320];*/
423 int uv_stride
= x
->dst
.uv_stride
;
427 for (i
= 0; i
< 8; i
++)
429 uleft_col
[i
] = x
->dst
.u_buffer
[i
* x
->dst
.uv_stride
-1];
430 vleft_col
[i
] = x
->dst
.v_buffer
[i
* x
->dst
.uv_stride
-1];
433 switch (x
->mode_info_context
->mbmi
.uv_mode
)
446 for (i
= 0; i
< 8; i
++)
448 Uaverage
+= uabove_row
[i
];
449 Vaverage
+= vabove_row
[i
];
453 if (x
->left_available
)
455 for (i
= 0; i
< 8; i
++)
457 Uaverage
+= uleft_col
[i
];
458 Vaverage
+= vleft_col
[i
];
462 if (!x
->up_available
&& !x
->left_available
)
469 shift
= 2 + x
->up_available
+ x
->left_available
;
470 expected_udc
= (Uaverage
+ (1 << (shift
- 1))) >> shift
;
471 expected_vdc
= (Vaverage
+ (1 << (shift
- 1))) >> shift
;
475 /*vpx_memset(upred_ptr,expected_udc,64);*/
476 /*vpx_memset(vpred_ptr,expected_vdc,64);*/
477 for (i
= 0; i
< 8; i
++)
479 vpx_memset(upred_ptr
, expected_udc
, 8);
480 vpx_memset(vpred_ptr
, expected_vdc
, 8);
481 upred_ptr
+= uv_stride
; /*8;*/
482 vpred_ptr
+= uv_stride
; /*8;*/
490 for (i
= 0; i
< 8; i
++)
492 vpx_memcpy(upred_ptr
, uabove_row
, 8);
493 vpx_memcpy(vpred_ptr
, vabove_row
, 8);
494 upred_ptr
+= uv_stride
; /*8;*/
495 vpred_ptr
+= uv_stride
; /*8;*/
504 for (i
= 0; i
< 8; i
++)
506 vpx_memset(upred_ptr
, uleft_col
[i
], 8);
507 vpx_memset(vpred_ptr
, vleft_col
[i
], 8);
508 upred_ptr
+= uv_stride
; /*8;*/
509 vpred_ptr
+= uv_stride
; /*8;*/
518 for (i
= 0; i
< 8; i
++)
520 for (j
= 0; j
< 8; j
++)
522 int predu
= uleft_col
[i
] + uabove_row
[j
] - utop_left
;
523 int predv
= vleft_col
[i
] + vabove_row
[j
] - vtop_left
;
537 upred_ptr
[j
] = predu
;
538 vpred_ptr
[j
] = predv
;
541 upred_ptr
+= uv_stride
; /*8;*/
542 vpred_ptr
+= uv_stride
; /*8;*/