Correct PPTP server firewall rules chain.
[tomato/davidwu.git] / release / src / router / nettle / arm / v6 / sha1-compress.asm
blob69c30e42db542fb6258431c1341570ea65450b5c
1 C nettle, low-level cryptographics library
2 C
3 C Copyright (C) 2013 Niels Möller
4 C
5 C The nettle library is free software; you can redistribute it and/or modify
6 C it under the terms of the GNU Lesser General Public License as published by
7 C the Free Software Foundation; either version 2.1 of the License, or (at your
8 C option) any later version.
9 C
10 C The nettle library is distributed in the hope that it will be useful, but
11 C WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
12 C or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
13 C License for more details.
15 C You should have received a copy of the GNU Lesser General Public License
16 C along with the nettle library; see the file COPYING.LIB. If not, write to
17 C the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
18 C MA 02111-1301, USA.
20 .file "sha1-compress.asm"
22 define(<STATE>, <r0>)
23 define(<INPUT>, <r1>)
24 define(<SA>, <r2>)
25 define(<SB>, <r3>)
26 define(<SC>, <r4>)
27 define(<SD>, <r5>)
28 define(<SE>, <r6>)
29 define(<T0>, <r7>)
30 define(<SHIFT>, <r8>)
31 define(<WPREV>, <r10>)
32 define(<W>, <r12>)
33 define(<K>, <lr>)
35 C FIXME: Could avoid a mov with even and odd variants.
36 define(<LOAD>, <
37 ldr T0, [INPUT], #+4
38 sel W, WPREV, T0
39 ror W, W, SHIFT
40 mov WPREV, T0
41 rev W, W
42 str W, [SP,#eval(4*$1)]
44 define(<EXPN>, <
45 ldr W, [sp, #+eval(4*$1)]
46 ldr T0, [sp, #+eval(4*(($1 + 2) % 16))]
47 eor W, W, T0
48 ldr T0, [sp, #+eval(4*(($1 + 8) % 16))]
49 eor W, W, T0
50 ldr T0, [sp, #+eval(4*(($1 + 13) % 16))]
51 eor W, W, T0
52 ror W, W, #31
53 str W, [sp, #+eval(4*$1)]
56 C F1(B,C,D) = D^(B&(C^D))
57 C ROUND1(A,B,C,D,E)
58 define(<ROUND1>, <
59 eor T0, $3, $4
60 add $5, $5, K
61 and T0, T0, $2
62 add $5, $5, $1, ror #27
63 eor T0, T0, $4
64 add $5, $5, W
65 ror $2, $2, #2
66 add $5, $5, T0
68 C F2(B,C,D) = B^C^D
69 define(<ROUND2>, <
70 eor T0, $2, $4
71 add $5, $5, K
72 eor T0, T0, $3
73 add $5, $5, $1, ror #27
74 add $5, $5, W
75 ror $2, $2, #2
76 add $5, $5, T0
78 C F3(B,C,D) = (B&C) | (D & (B|C)) = (B & (C ^ D)) + (C & D)
79 define(<ROUND3>, <
80 eor T0, $3, $4
81 add $5, $5, K
82 and T0, T0, $2
83 add $5, $5, $1, ror #27
84 add $5, $5, T0
85 add $5, $5, W
86 and T0, $3, $4
87 ror $2, $2, #2
88 add $5, $5, T0
90 C void _nettle_sha1_compress(uint32_t *state, const uint8_t *input)
92 .text
93 .align 2
94 .LK1:
95 .int 0x5A827999
96 .LK2:
97 .int 0x6ED9EBA1
98 .LK3:
99 .int 0x8F1BBCDC
101 PROLOGUE(_nettle_sha1_compress)
102 push {r4,r5,r6,r7,r8,r10,lr}
103 sub sp, sp, #64
105 C Sets SHIFT to 8*low bits of input pointer. Sets up GE flags
106 C as follows, corresponding to bytes to be used from WPREV
107 C SHIFT 0 8 16 24
108 C CPSR.GE 0000 1110 1100 1000
109 ands SHIFT, INPUT, #3
110 and INPUT, INPUT, $-4
111 ldr WPREV, [INPUT]
112 addne INPUT, INPUT, #4 C Unaligned input
113 lsl SHIFT, SHIFT, #3
114 mov T0, #0
115 movne T0, #-1
116 lsl W, T0, SHIFT
117 uadd8 T0, T0, W C Sets APSR.GE bits
119 ldr K, .LK1
120 ldm STATE, {SA,SB,SC,SD,SE}
122 LOAD( 0) ROUND1(SA, SB, SC, SD, SE)
123 LOAD( 1) ROUND1(SE, SA, SB, SC, SD)
124 LOAD( 2) ROUND1(SD, SE, SA, SB, SC)
125 LOAD( 3) ROUND1(SC, SD, SE, SA, SB)
126 LOAD( 4) ROUND1(SB, SC, SD, SE, SA)
128 LOAD( 5) ROUND1(SA, SB, SC, SD, SE)
129 LOAD( 6) ROUND1(SE, SA, SB, SC, SD)
130 LOAD( 7) ROUND1(SD, SE, SA, SB, SC)
131 LOAD( 8) ROUND1(SC, SD, SE, SA, SB)
132 LOAD( 9) ROUND1(SB, SC, SD, SE, SA)
134 LOAD(10) ROUND1(SA, SB, SC, SD, SE)
135 LOAD(11) ROUND1(SE, SA, SB, SC, SD)
136 LOAD(12) ROUND1(SD, SE, SA, SB, SC)
137 LOAD(13) ROUND1(SC, SD, SE, SA, SB)
138 LOAD(14) ROUND1(SB, SC, SD, SE, SA)
140 LOAD(15) ROUND1(SA, SB, SC, SD, SE)
141 EXPN( 0) ROUND1(SE, SA, SB, SC, SD)
142 EXPN( 1) ROUND1(SD, SE, SA, SB, SC)
143 EXPN( 2) ROUND1(SC, SD, SE, SA, SB)
144 EXPN( 3) ROUND1(SB, SC, SD, SE, SA)
146 ldr K, .LK2
147 EXPN( 4) ROUND2(SA, SB, SC, SD, SE)
148 EXPN( 5) ROUND2(SE, SA, SB, SC, SD)
149 EXPN( 6) ROUND2(SD, SE, SA, SB, SC)
150 EXPN( 7) ROUND2(SC, SD, SE, SA, SB)
151 EXPN( 8) ROUND2(SB, SC, SD, SE, SA)
153 EXPN( 9) ROUND2(SA, SB, SC, SD, SE)
154 EXPN(10) ROUND2(SE, SA, SB, SC, SD)
155 EXPN(11) ROUND2(SD, SE, SA, SB, SC)
156 EXPN(12) ROUND2(SC, SD, SE, SA, SB)
157 EXPN(13) ROUND2(SB, SC, SD, SE, SA)
159 EXPN(14) ROUND2(SA, SB, SC, SD, SE)
160 EXPN(15) ROUND2(SE, SA, SB, SC, SD)
161 EXPN( 0) ROUND2(SD, SE, SA, SB, SC)
162 EXPN( 1) ROUND2(SC, SD, SE, SA, SB)
163 EXPN( 2) ROUND2(SB, SC, SD, SE, SA)
165 EXPN( 3) ROUND2(SA, SB, SC, SD, SE)
166 EXPN( 4) ROUND2(SE, SA, SB, SC, SD)
167 EXPN( 5) ROUND2(SD, SE, SA, SB, SC)
168 EXPN( 6) ROUND2(SC, SD, SE, SA, SB)
169 EXPN( 7) ROUND2(SB, SC, SD, SE, SA)
171 ldr K, .LK3
172 EXPN( 8) ROUND3(SA, SB, SC, SD, SE)
173 EXPN( 9) ROUND3(SE, SA, SB, SC, SD)
174 EXPN(10) ROUND3(SD, SE, SA, SB, SC)
175 EXPN(11) ROUND3(SC, SD, SE, SA, SB)
176 EXPN(12) ROUND3(SB, SC, SD, SE, SA)
178 EXPN(13) ROUND3(SA, SB, SC, SD, SE)
179 EXPN(14) ROUND3(SE, SA, SB, SC, SD)
180 EXPN(15) ROUND3(SD, SE, SA, SB, SC)
181 EXPN( 0) ROUND3(SC, SD, SE, SA, SB)
182 EXPN( 1) ROUND3(SB, SC, SD, SE, SA)
184 EXPN( 2) ROUND3(SA, SB, SC, SD, SE)
185 EXPN( 3) ROUND3(SE, SA, SB, SC, SD)
186 EXPN( 4) ROUND3(SD, SE, SA, SB, SC)
187 EXPN( 5) ROUND3(SC, SD, SE, SA, SB)
188 EXPN( 6) ROUND3(SB, SC, SD, SE, SA)
190 EXPN( 7) ROUND3(SA, SB, SC, SD, SE)
191 EXPN( 8) ROUND3(SE, SA, SB, SC, SD)
192 EXPN( 9) ROUND3(SD, SE, SA, SB, SC)
193 EXPN(10) ROUND3(SC, SD, SE, SA, SB)
194 EXPN(11) ROUND3(SB, SC, SD, SE, SA)
196 ldr K, .LK4
197 EXPN(12) ROUND2(SA, SB, SC, SD, SE)
198 EXPN(13) ROUND2(SE, SA, SB, SC, SD)
199 EXPN(14) ROUND2(SD, SE, SA, SB, SC)
200 EXPN(15) ROUND2(SC, SD, SE, SA, SB)
201 EXPN( 0) ROUND2(SB, SC, SD, SE, SA)
203 EXPN( 1) ROUND2(SA, SB, SC, SD, SE)
204 EXPN( 2) ROUND2(SE, SA, SB, SC, SD)
205 EXPN( 3) ROUND2(SD, SE, SA, SB, SC)
206 EXPN( 4) ROUND2(SC, SD, SE, SA, SB)
207 EXPN( 5) ROUND2(SB, SC, SD, SE, SA)
209 EXPN( 6) ROUND2(SA, SB, SC, SD, SE)
210 EXPN( 7) ROUND2(SE, SA, SB, SC, SD)
211 EXPN( 8) ROUND2(SD, SE, SA, SB, SC)
212 EXPN( 9) ROUND2(SC, SD, SE, SA, SB)
213 EXPN(10) ROUND2(SB, SC, SD, SE, SA)
215 EXPN(11) ROUND2(SA, SB, SC, SD, SE)
216 EXPN(12) ROUND2(SE, SA, SB, SC, SD)
217 EXPN(13) ROUND2(SD, SE, SA, SB, SC)
218 EXPN(14) ROUND2(SC, SD, SE, SA, SB)
219 EXPN(15) ROUND2(SB, SC, SD, SE, SA)
221 C Use registers we no longer need.
222 ldm STATE, {INPUT,T0,SHIFT,W,K}
223 add SA, SA, INPUT
224 add SB, SB, T0
225 add SC, SC, SHIFT
226 add SD, SD, W
227 add SE, SE, K
228 add sp, sp, #64
229 stm STATE, {SA,SB,SC,SD,SE}
230 pop {r4,r5,r6,r7,r8,r10,pc}
231 EPILOGUE(_nettle_sha1_compress)
233 .LK4:
234 .int 0xCA62C1D6