block: Improve bdrv_iterate (Jan Kiszka)
[sniper_test.git] / host-utils.h
blob212861524f0409273fb9d852c30a17e2b120339b
1 /*
2 * Utility compute operations used by translated code.
4 * Copyright (c) 2007 Thiemo Seufer
5 * Copyright (c) 2007 Jocelyn Mayer
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
26 #include "osdep.h"
28 #if defined(__x86_64__)
29 #define __HAVE_FAST_MULU64__
30 static always_inline void mulu64 (uint64_t *plow, uint64_t *phigh,
31 uint64_t a, uint64_t b)
33 __asm__ ("mul %0\n\t"
34 : "=d" (*phigh), "=a" (*plow)
35 : "a" (a), "0" (b));
37 #define __HAVE_FAST_MULS64__
38 static always_inline void muls64 (uint64_t *plow, uint64_t *phigh,
39 int64_t a, int64_t b)
41 __asm__ ("imul %0\n\t"
42 : "=d" (*phigh), "=a" (*plow)
43 : "a" (a), "0" (b));
45 #else
46 void muls64(uint64_t *phigh, uint64_t *plow, int64_t a, int64_t b);
47 void mulu64(uint64_t *phigh, uint64_t *plow, uint64_t a, uint64_t b);
48 #endif
50 /* Binary search for leading zeros. */
52 static always_inline int clz32(uint32_t val)
54 #if QEMU_GNUC_PREREQ(3, 4)
55 if (val)
56 return __builtin_clz(val);
57 else
58 return 32;
59 #else
60 int cnt = 0;
62 if (!(val & 0xFFFF0000U)) {
63 cnt += 16;
64 val <<= 16;
66 if (!(val & 0xFF000000U)) {
67 cnt += 8;
68 val <<= 8;
70 if (!(val & 0xF0000000U)) {
71 cnt += 4;
72 val <<= 4;
74 if (!(val & 0xC0000000U)) {
75 cnt += 2;
76 val <<= 2;
78 if (!(val & 0x80000000U)) {
79 cnt++;
80 val <<= 1;
82 if (!(val & 0x80000000U)) {
83 cnt++;
85 return cnt;
86 #endif
89 static always_inline int clo32(uint32_t val)
91 return clz32(~val);
94 static always_inline int clz64(uint64_t val)
96 #if QEMU_GNUC_PREREQ(3, 4)
97 if (val)
98 return __builtin_clzll(val);
99 else
100 return 64;
101 #else
102 int cnt = 0;
104 if (!(val >> 32)) {
105 cnt += 32;
106 } else {
107 val >>= 32;
110 return cnt + clz32(val);
111 #endif
114 static always_inline int clo64(uint64_t val)
116 return clz64(~val);
119 static always_inline int ctz32(uint32_t val)
121 #if QEMU_GNUC_PREREQ(3, 4)
122 if (val)
123 return __builtin_ctz(val);
124 else
125 return 32;
126 #else
127 int cnt;
129 cnt = 0;
130 if (!(val & 0x0000FFFFUL)) {
131 cnt += 16;
132 val >>= 16;
134 if (!(val & 0x000000FFUL)) {
135 cnt += 8;
136 val >>= 8;
138 if (!(val & 0x0000000FUL)) {
139 cnt += 4;
140 val >>= 4;
142 if (!(val & 0x00000003UL)) {
143 cnt += 2;
144 val >>= 2;
146 if (!(val & 0x00000001UL)) {
147 cnt++;
148 val >>= 1;
150 if (!(val & 0x00000001UL)) {
151 cnt++;
154 return cnt;
155 #endif
158 static always_inline int cto32(uint32_t val)
160 return ctz32(~val);
163 static always_inline int ctz64(uint64_t val)
165 #if QEMU_GNUC_PREREQ(3, 4)
166 if (val)
167 return __builtin_ctz(val);
168 else
169 return 64;
170 #else
171 int cnt;
173 cnt = 0;
174 if (!((uint32_t)val)) {
175 cnt += 32;
176 val >>= 32;
179 return cnt + ctz32(val);
180 #endif
183 static always_inline int cto64(uint64_t val)
185 return ctz64(~val);
188 static always_inline int ctpop8(uint8_t val)
190 val = (val & 0x55) + ((val >> 1) & 0x55);
191 val = (val & 0x33) + ((val >> 2) & 0x33);
192 val = (val & 0x0f) + ((val >> 4) & 0x0f);
194 return val;
197 static always_inline int ctpop16(uint16_t val)
199 val = (val & 0x5555) + ((val >> 1) & 0x5555);
200 val = (val & 0x3333) + ((val >> 2) & 0x3333);
201 val = (val & 0x0f0f) + ((val >> 4) & 0x0f0f);
202 val = (val & 0x00ff) + ((val >> 8) & 0x00ff);
204 return val;
207 static always_inline int ctpop32(uint32_t val)
209 #if QEMU_GNUC_PREREQ(3, 4)
210 return __builtin_popcount(val);
211 #else
212 val = (val & 0x55555555) + ((val >> 1) & 0x55555555);
213 val = (val & 0x33333333) + ((val >> 2) & 0x33333333);
214 val = (val & 0x0f0f0f0f) + ((val >> 4) & 0x0f0f0f0f);
215 val = (val & 0x00ff00ff) + ((val >> 8) & 0x00ff00ff);
216 val = (val & 0x0000ffff) + ((val >> 16) & 0x0000ffff);
218 return val;
219 #endif
222 static always_inline int ctpop64(uint64_t val)
224 #if QEMU_GNUC_PREREQ(3, 4)
225 return __builtin_popcountll(val);
226 #else
227 val = (val & 0x5555555555555555ULL) + ((val >> 1) & 0x5555555555555555ULL);
228 val = (val & 0x3333333333333333ULL) + ((val >> 2) & 0x3333333333333333ULL);
229 val = (val & 0x0f0f0f0f0f0f0f0fULL) + ((val >> 4) & 0x0f0f0f0f0f0f0f0fULL);
230 val = (val & 0x00ff00ff00ff00ffULL) + ((val >> 8) & 0x00ff00ff00ff00ffULL);
231 val = (val & 0x0000ffff0000ffffULL) + ((val >> 16) & 0x0000ffff0000ffffULL);
232 val = (val & 0x00000000ffffffffULL) + ((val >> 32) & 0x00000000ffffffffULL);
234 return val;
235 #endif