Define RDCOST only once
[libvpx.git] / vp8 / encoder / variance.h
blobbf17ea8b6884382cda7daaaedf8be09cff9e6a8c
1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
12 #ifndef VARIANCE_H
13 #define VARIANCE_H
15 #define prototype_sad(sym)\
16 unsigned int (sym)\
18 const unsigned char *src_ptr, \
19 int source_stride, \
20 const unsigned char *ref_ptr, \
21 int ref_stride, \
22 int max_sad\
25 #define prototype_sad_multi_same_address(sym)\
26 void (sym)\
28 const unsigned char *src_ptr, \
29 int source_stride, \
30 const unsigned char *ref_ptr, \
31 int ref_stride, \
32 unsigned int *sad_array\
35 #define prototype_sad_multi_same_address_1(sym)\
36 void (sym)\
38 const unsigned char *src_ptr, \
39 int source_stride, \
40 const unsigned char *ref_ptr, \
41 int ref_stride, \
42 unsigned short *sad_array\
45 #define prototype_sad_multi_dif_address(sym)\
46 void (sym)\
48 const unsigned char *src_ptr, \
49 int source_stride, \
50 unsigned char *ref_ptr[4], \
51 int ref_stride, \
52 unsigned int *sad_array\
55 #define prototype_variance(sym) \
56 unsigned int (sym) \
58 const unsigned char *src_ptr, \
59 int source_stride, \
60 const unsigned char *ref_ptr, \
61 int ref_stride, \
62 unsigned int *sse\
65 #define prototype_variance2(sym) \
66 unsigned int (sym) \
68 const unsigned char *src_ptr, \
69 int source_stride, \
70 const unsigned char *ref_ptr, \
71 int ref_stride, \
72 unsigned int *sse,\
73 int *sum\
76 #define prototype_subpixvariance(sym) \
77 unsigned int (sym) \
78 ( \
79 const unsigned char *src_ptr, \
80 int source_stride, \
81 int xoffset, \
82 int yoffset, \
83 const unsigned char *ref_ptr, \
84 int Refstride, \
85 unsigned int *sse \
88 #define prototype_ssimpf(sym) \
89 void (sym) \
90 ( \
91 unsigned char *s, \
92 int sp, \
93 unsigned char *r, \
94 int rp, \
95 unsigned long *sum_s, \
96 unsigned long *sum_r, \
97 unsigned long *sum_sq_s, \
98 unsigned long *sum_sq_r, \
99 unsigned long *sum_sxr \
102 #define prototype_getmbss(sym) unsigned int (sym)(const short *)
104 #if ARCH_X86 || ARCH_X86_64
105 #include "x86/variance_x86.h"
106 #endif
108 #if ARCH_ARM
109 #include "arm/variance_arm.h"
110 #endif
112 #ifndef vp8_variance_sad4x4
113 #define vp8_variance_sad4x4 vp8_sad4x4_c
114 #endif
115 extern prototype_sad(vp8_variance_sad4x4);
117 #ifndef vp8_variance_sad8x8
118 #define vp8_variance_sad8x8 vp8_sad8x8_c
119 #endif
120 extern prototype_sad(vp8_variance_sad8x8);
122 #ifndef vp8_variance_sad8x16
123 #define vp8_variance_sad8x16 vp8_sad8x16_c
124 #endif
125 extern prototype_sad(vp8_variance_sad8x16);
127 #ifndef vp8_variance_sad16x8
128 #define vp8_variance_sad16x8 vp8_sad16x8_c
129 #endif
130 extern prototype_sad(vp8_variance_sad16x8);
132 #ifndef vp8_variance_sad16x16
133 #define vp8_variance_sad16x16 vp8_sad16x16_c
134 #endif
135 extern prototype_sad(vp8_variance_sad16x16);
137 //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
139 #ifndef vp8_variance_sad16x16x3
140 #define vp8_variance_sad16x16x3 vp8_sad16x16x3_c
141 #endif
142 extern prototype_sad_multi_same_address(vp8_variance_sad16x16x3);
144 #ifndef vp8_variance_sad16x8x3
145 #define vp8_variance_sad16x8x3 vp8_sad16x8x3_c
146 #endif
147 extern prototype_sad_multi_same_address(vp8_variance_sad16x8x3);
149 #ifndef vp8_variance_sad8x8x3
150 #define vp8_variance_sad8x8x3 vp8_sad8x8x3_c
151 #endif
152 extern prototype_sad_multi_same_address(vp8_variance_sad8x8x3);
154 #ifndef vp8_variance_sad8x16x3
155 #define vp8_variance_sad8x16x3 vp8_sad8x16x3_c
156 #endif
157 extern prototype_sad_multi_same_address(vp8_variance_sad8x16x3);
159 #ifndef vp8_variance_sad4x4x3
160 #define vp8_variance_sad4x4x3 vp8_sad4x4x3_c
161 #endif
162 extern prototype_sad_multi_same_address(vp8_variance_sad4x4x3);
164 #ifndef vp8_variance_sad16x16x8
165 #define vp8_variance_sad16x16x8 vp8_sad16x16x8_c
166 #endif
167 extern prototype_sad_multi_same_address_1(vp8_variance_sad16x16x8);
169 #ifndef vp8_variance_sad16x8x8
170 #define vp8_variance_sad16x8x8 vp8_sad16x8x8_c
171 #endif
172 extern prototype_sad_multi_same_address_1(vp8_variance_sad16x8x8);
174 #ifndef vp8_variance_sad8x8x8
175 #define vp8_variance_sad8x8x8 vp8_sad8x8x8_c
176 #endif
177 extern prototype_sad_multi_same_address_1(vp8_variance_sad8x8x8);
179 #ifndef vp8_variance_sad8x16x8
180 #define vp8_variance_sad8x16x8 vp8_sad8x16x8_c
181 #endif
182 extern prototype_sad_multi_same_address_1(vp8_variance_sad8x16x8);
184 #ifndef vp8_variance_sad4x4x8
185 #define vp8_variance_sad4x4x8 vp8_sad4x4x8_c
186 #endif
187 extern prototype_sad_multi_same_address_1(vp8_variance_sad4x4x8);
189 //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
191 #ifndef vp8_variance_sad16x16x4d
192 #define vp8_variance_sad16x16x4d vp8_sad16x16x4d_c
193 #endif
194 extern prototype_sad_multi_dif_address(vp8_variance_sad16x16x4d);
196 #ifndef vp8_variance_sad16x8x4d
197 #define vp8_variance_sad16x8x4d vp8_sad16x8x4d_c
198 #endif
199 extern prototype_sad_multi_dif_address(vp8_variance_sad16x8x4d);
201 #ifndef vp8_variance_sad8x8x4d
202 #define vp8_variance_sad8x8x4d vp8_sad8x8x4d_c
203 #endif
204 extern prototype_sad_multi_dif_address(vp8_variance_sad8x8x4d);
206 #ifndef vp8_variance_sad8x16x4d
207 #define vp8_variance_sad8x16x4d vp8_sad8x16x4d_c
208 #endif
209 extern prototype_sad_multi_dif_address(vp8_variance_sad8x16x4d);
211 #ifndef vp8_variance_sad4x4x4d
212 #define vp8_variance_sad4x4x4d vp8_sad4x4x4d_c
213 #endif
214 extern prototype_sad_multi_dif_address(vp8_variance_sad4x4x4d);
216 //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
218 #ifndef vp8_variance_var4x4
219 #define vp8_variance_var4x4 vp8_variance4x4_c
220 #endif
221 extern prototype_variance(vp8_variance_var4x4);
223 #ifndef vp8_variance_var8x8
224 #define vp8_variance_var8x8 vp8_variance8x8_c
225 #endif
226 extern prototype_variance(vp8_variance_var8x8);
228 #ifndef vp8_variance_var8x16
229 #define vp8_variance_var8x16 vp8_variance8x16_c
230 #endif
231 extern prototype_variance(vp8_variance_var8x16);
233 #ifndef vp8_variance_var16x8
234 #define vp8_variance_var16x8 vp8_variance16x8_c
235 #endif
236 extern prototype_variance(vp8_variance_var16x8);
238 #ifndef vp8_variance_var16x16
239 #define vp8_variance_var16x16 vp8_variance16x16_c
240 #endif
241 extern prototype_variance(vp8_variance_var16x16);
243 //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
245 #ifndef vp8_variance_subpixvar4x4
246 #define vp8_variance_subpixvar4x4 vp8_sub_pixel_variance4x4_c
247 #endif
248 extern prototype_subpixvariance(vp8_variance_subpixvar4x4);
250 #ifndef vp8_variance_subpixvar8x8
251 #define vp8_variance_subpixvar8x8 vp8_sub_pixel_variance8x8_c
252 #endif
253 extern prototype_subpixvariance(vp8_variance_subpixvar8x8);
255 #ifndef vp8_variance_subpixvar8x16
256 #define vp8_variance_subpixvar8x16 vp8_sub_pixel_variance8x16_c
257 #endif
258 extern prototype_subpixvariance(vp8_variance_subpixvar8x16);
260 #ifndef vp8_variance_subpixvar16x8
261 #define vp8_variance_subpixvar16x8 vp8_sub_pixel_variance16x8_c
262 #endif
263 extern prototype_subpixvariance(vp8_variance_subpixvar16x8);
265 #ifndef vp8_variance_subpixvar16x16
266 #define vp8_variance_subpixvar16x16 vp8_sub_pixel_variance16x16_c
267 #endif
268 extern prototype_subpixvariance(vp8_variance_subpixvar16x16);
270 #ifndef vp8_variance_halfpixvar16x16_h
271 #define vp8_variance_halfpixvar16x16_h vp8_variance_halfpixvar16x16_h_c
272 #endif
273 extern prototype_variance(vp8_variance_halfpixvar16x16_h);
275 #ifndef vp8_variance_halfpixvar16x16_v
276 #define vp8_variance_halfpixvar16x16_v vp8_variance_halfpixvar16x16_v_c
277 #endif
278 extern prototype_variance(vp8_variance_halfpixvar16x16_v);
280 #ifndef vp8_variance_halfpixvar16x16_hv
281 #define vp8_variance_halfpixvar16x16_hv vp8_variance_halfpixvar16x16_hv_c
282 #endif
283 extern prototype_variance(vp8_variance_halfpixvar16x16_hv);
285 #ifndef vp8_variance_subpixmse16x16
286 #define vp8_variance_subpixmse16x16 vp8_sub_pixel_mse16x16_c
287 #endif
288 extern prototype_subpixvariance(vp8_variance_subpixmse16x16);
290 //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
292 #ifndef vp8_variance_getmbss
293 #define vp8_variance_getmbss vp8_get_mb_ss_c
294 #endif
295 extern prototype_getmbss(vp8_variance_getmbss);
297 #ifndef vp8_variance_mse16x16
298 #define vp8_variance_mse16x16 vp8_mse16x16_c
299 #endif
300 extern prototype_variance(vp8_variance_mse16x16);
302 #ifndef vp8_variance_get16x16prederror
303 #define vp8_variance_get16x16prederror vp8_get16x16pred_error_c
304 #endif
305 extern prototype_sad(vp8_variance_get16x16prederror);
307 #ifndef vp8_variance_get8x8var
308 #define vp8_variance_get8x8var vp8_get8x8var_c
309 #endif
310 extern prototype_variance2(vp8_variance_get8x8var);
312 #ifndef vp8_variance_get16x16var
313 #define vp8_variance_get16x16var vp8_get16x16var_c
314 #endif
315 extern prototype_variance2(vp8_variance_get16x16var);
317 #ifndef vp8_variance_get4x4sse_cs
318 #define vp8_variance_get4x4sse_cs vp8_get4x4sse_cs_c
319 #endif
320 extern prototype_sad(vp8_variance_get4x4sse_cs);
322 #ifndef vp8_ssimpf
323 #define vp8_ssimpf ssim_parms_c
324 #endif
325 extern prototype_ssimpf(vp8_ssimpf)
327 #ifndef vp8_ssimpf_8x8
328 #define vp8_ssimpf_8x8 ssim_parms_8x8_c
329 #endif
330 extern prototype_ssimpf(vp8_ssimpf_8x8)
332 typedef prototype_sad(*vp8_sad_fn_t);
333 typedef prototype_sad_multi_same_address(*vp8_sad_multi_fn_t);
334 typedef prototype_sad_multi_same_address_1(*vp8_sad_multi1_fn_t);
335 typedef prototype_sad_multi_dif_address(*vp8_sad_multi_d_fn_t);
336 typedef prototype_variance(*vp8_variance_fn_t);
337 typedef prototype_variance2(*vp8_variance2_fn_t);
338 typedef prototype_subpixvariance(*vp8_subpixvariance_fn_t);
339 typedef prototype_getmbss(*vp8_getmbss_fn_t);
341 typedef prototype_ssimpf(*vp8_ssimpf_fn_t)
344 typedef struct
346 vp8_sad_fn_t sad4x4;
347 vp8_sad_fn_t sad8x8;
348 vp8_sad_fn_t sad8x16;
349 vp8_sad_fn_t sad16x8;
350 vp8_sad_fn_t sad16x16;
352 vp8_variance_fn_t var4x4;
353 vp8_variance_fn_t var8x8;
354 vp8_variance_fn_t var8x16;
355 vp8_variance_fn_t var16x8;
356 vp8_variance_fn_t var16x16;
358 vp8_subpixvariance_fn_t subpixvar4x4;
359 vp8_subpixvariance_fn_t subpixvar8x8;
360 vp8_subpixvariance_fn_t subpixvar8x16;
361 vp8_subpixvariance_fn_t subpixvar16x8;
362 vp8_subpixvariance_fn_t subpixvar16x16;
363 vp8_variance_fn_t halfpixvar16x16_h;
364 vp8_variance_fn_t halfpixvar16x16_v;
365 vp8_variance_fn_t halfpixvar16x16_hv;
366 vp8_subpixvariance_fn_t subpixmse16x16;
368 vp8_getmbss_fn_t getmbss;
369 vp8_variance_fn_t mse16x16;
371 vp8_sad_fn_t get16x16prederror;
372 vp8_variance2_fn_t get8x8var;
373 vp8_variance2_fn_t get16x16var;
374 vp8_sad_fn_t get4x4sse_cs;
376 vp8_sad_multi_fn_t sad16x16x3;
377 vp8_sad_multi_fn_t sad16x8x3;
378 vp8_sad_multi_fn_t sad8x16x3;
379 vp8_sad_multi_fn_t sad8x8x3;
380 vp8_sad_multi_fn_t sad4x4x3;
382 vp8_sad_multi1_fn_t sad16x16x8;
383 vp8_sad_multi1_fn_t sad16x8x8;
384 vp8_sad_multi1_fn_t sad8x16x8;
385 vp8_sad_multi1_fn_t sad8x8x8;
386 vp8_sad_multi1_fn_t sad4x4x8;
388 vp8_sad_multi_d_fn_t sad16x16x4d;
389 vp8_sad_multi_d_fn_t sad16x8x4d;
390 vp8_sad_multi_d_fn_t sad8x16x4d;
391 vp8_sad_multi_d_fn_t sad8x8x4d;
392 vp8_sad_multi_d_fn_t sad4x4x4d;
394 #if CONFIG_PSNR
395 vp8_ssimpf_fn_t ssimpf_8x8;
396 vp8_ssimpf_fn_t ssimpf;
397 #endif
399 } vp8_variance_rtcd_vtable_t;
401 typedef struct
403 vp8_sad_fn_t sdf;
404 vp8_variance_fn_t vf;
405 vp8_subpixvariance_fn_t svf;
406 vp8_variance_fn_t svf_halfpix_h;
407 vp8_variance_fn_t svf_halfpix_v;
408 vp8_variance_fn_t svf_halfpix_hv;
409 vp8_sad_multi_fn_t sdx3f;
410 vp8_sad_multi1_fn_t sdx8f;
411 vp8_sad_multi_d_fn_t sdx4df;
413 } vp8_variance_fn_ptr_t;
415 #if CONFIG_RUNTIME_CPU_DETECT
416 #define VARIANCE_INVOKE(ctx,fn) (ctx)->fn
417 #else
418 #define VARIANCE_INVOKE(ctx,fn) vp8_variance_##fn
419 #endif
421 #endif