1 #include <linux/linkage.h>
2 #include <asm/export.h>
7 * unsigned int __sw_hweight32(unsigned int w)
15 __ASM_SIZE(push,) %__ASM_REG(dx)
16 movl %eax, %edx # w -> t
18 andl $0x55555555, %edx # t &= 0x55555555
19 subl %edx, %eax # w -= t
21 movl %eax, %edx # w -> t
22 shrl $2, %eax # w_tmp >>= 2
23 andl $0x33333333, %edx # t &= 0x33333333
24 andl $0x33333333, %eax # w_tmp &= 0x33333333
25 addl %edx, %eax # w = w_tmp + t
27 movl %eax, %edx # w -> t
28 shrl $4, %edx # t >>= 4
29 addl %edx, %eax # w_tmp += t
30 andl $0x0f0f0f0f, %eax # w_tmp &= 0x0f0f0f0f
31 imull $0x01010101, %eax, %eax # w_tmp *= 0x01010101
32 shrl $24, %eax # w = w_tmp >> 24
33 __ASM_SIZE(pop,) %__ASM_REG(dx)
35 ENDPROC(__sw_hweight32)
36 EXPORT_SYMBOL(__sw_hweight32)
43 movq %rdi, %rdx # w -> t
44 movabsq $0x5555555555555555, %rax
46 andq %rdx, %rax # t &= 0x5555555555555555
47 movabsq $0x3333333333333333, %rdx
48 subq %rax, %rdi # w -= t
50 movq %rdi, %rax # w -> t
51 shrq $2, %rdi # w_tmp >>= 2
52 andq %rdx, %rax # t &= 0x3333333333333333
53 andq %rdi, %rdx # w_tmp &= 0x3333333333333333
54 addq %rdx, %rax # w = w_tmp + t
56 movq %rax, %rdx # w -> t
57 shrq $4, %rdx # t >>= 4
58 addq %rdx, %rax # w_tmp += t
59 movabsq $0x0f0f0f0f0f0f0f0f, %rdx
60 andq %rdx, %rax # w_tmp &= 0x0f0f0f0f0f0f0f0f
61 movabsq $0x0101010101010101, %rdx
62 imulq %rdx, %rax # w_tmp *= 0x0101010101010101
63 shrq $56, %rax # w = w_tmp >> 56
68 #else /* CONFIG_X86_32 */
69 /* We're getting an u64 arg in (%eax,%edx): unsigned long hweight64(__u64 w) */
73 movl %eax, %ecx # stash away result
74 movl %edx, %eax # second part of input
76 addl %ecx, %eax # result
81 ENDPROC(__sw_hweight64)
82 EXPORT_SYMBOL(__sw_hweight64)