2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
12 #ifndef VPX_PORTS_X86_H
13 #define VPX_PORTS_X86_H
30 VPX_CPU_TRANSMETA_OLD
,
37 #if defined(__GNUC__) && __GNUC__
39 #define cpuid(func,ax,bx,cx,dx)\
40 __asm__ __volatile__ (\
42 : "=a" (ax), "=b" (bx), "=c" (cx), "=d" (dx) \
45 #define cpuid(func,ax,bx,cx,dx)\
46 __asm__ __volatile__ (\
47 "mov %%ebx, %%edi \n\t" \
49 "xchg %%edi, %%ebx \n\t" \
50 : "=a" (ax), "=D" (bx), "=c" (cx), "=d" (dx) \
55 void __cpuid(int CPUInfo
[4], int info_type
);
56 #pragma intrinsic(__cpuid)
57 #define cpuid(func,a,b,c,d) do{\
59 __cpuid(regs,func); a=regs[0]; b=regs[1]; c=regs[2]; d=regs[3];\
62 #define cpuid(func,a,b,c,d)\
76 #define HAS_SSSE3 0x10
77 #define HAS_SSE4_1 0x20
85 unsigned int flags
= 0;
86 unsigned int mask
= ~0;
87 unsigned int reg_eax
, reg_ebx
, reg_ecx
, reg_edx
;
91 /* See if the CPU capabilities are being overridden by the environment */
92 env
= getenv("VPX_SIMD_CAPS");
95 return (int)strtol(env
, NULL
, 0);
97 env
= getenv("VPX_SIMD_CAPS_MASK");
100 mask
= strtol(env
, NULL
, 0);
102 /* Ensure that the CPUID instruction supports extended features */
103 cpuid(0, reg_eax
, reg_ebx
, reg_ecx
, reg_edx
);
108 /* Get the standard feature flags */
109 cpuid(1, reg_eax
, reg_ebx
, reg_ecx
, reg_edx
);
111 if (reg_edx
& BIT(23)) flags
|= HAS_MMX
;
113 if (reg_edx
& BIT(25)) flags
|= HAS_SSE
; /* aka xmm */
115 if (reg_edx
& BIT(26)) flags
|= HAS_SSE2
; /* aka wmt */
117 if (reg_ecx
& BIT(0)) flags
|= HAS_SSE3
;
119 if (reg_ecx
& BIT(9)) flags
|= HAS_SSSE3
;
121 if (reg_ecx
& BIT(19)) flags
|= HAS_SSE4_1
;
126 vpx_cpu_t
vpx_x86_vendor(void);
128 #if ARCH_X86_64 && defined(_MSC_VER)
129 unsigned __int64
__rdtsc(void);
130 #pragma intrinsic(__rdtsc)
135 #if defined(__GNUC__) && __GNUC__
137 __asm__
__volatile__("rdtsc\n\t":"=a"(tsc
):);
149 #if defined(__GNUC__) && __GNUC__
150 #define x86_pause_hint()\
151 __asm__ __volatile__ ("pause \n\t")
154 /* No pause intrinsic for windows x64 */
155 #define x86_pause_hint()
157 #define x86_pause_hint()\
162 #if defined(__GNUC__) && __GNUC__
164 x87_set_control_word(unsigned short mode
)
166 __asm__
__volatile__("fldcw %0" : : "m"(*&mode
));
168 static unsigned short
169 x87_get_control_word(void)
172 __asm__
__volatile__("fstcw %0\n\t":"=m"(*&mode
):);
176 /* No fldcw intrinsics on Windows x64, punt to external asm */
177 extern void vpx_winx64_fldcw(unsigned short mode
);
178 extern unsigned short vpx_winx64_fstcw(void);
179 #define x87_set_control_word vpx_winx64_fldcw
180 #define x87_get_control_word vpx_winx64_fstcw
183 x87_set_control_word(unsigned short mode
)
187 static unsigned short
188 x87_get_control_word(void)
196 static unsigned short
197 x87_set_double_precision(void)
199 unsigned short mode
= x87_get_control_word();
200 x87_set_control_word((mode
&~0x300) | 0x200);
205 extern void vpx_reset_mmx_state(void);