1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * INET An implementation of the TCP/IP protocol suite for the LINUX
4 * operating system. INET is implemented using the BSD Socket
5 * interface as the means of communication with the user level.
7 * IP/TCP/UDP checksumming routines
9 * Authors: Jorge Cwik, <jorge@laser.satlink.net>
10 * Arnt Gulbrandsen, <agulbra@nvg.unit.no>
11 * Tom May, <ftom@netcom.com>
12 * Andreas Schwab, <schwab@issan.informatik.uni-dortmund.de>
13 * Lots of code moved from tcp.c and ip.c; see those files
16 * 03/02/96 Jes Sorensen, Andreas Schwab, Roman Hodek:
17 * Fixed some nasty bugs, causing some horrible crashes.
18 * A: At some points, the sum (%0) was used as
19 * length-counter instead of the length counter
20 * (%1). Thanks to Roman Hodek for pointing this out.
21 * B: GCC seems to mess up if one uses too many
22 * data-registers to hold input values and one tries to
23 * specify d0 and d1 as scratch registers. Letting gcc
24 * choose these registers itself solves the problem.
26 * 1998/8/31 Andreas Schwab:
27 * Zero out rest of buffer on exception in
28 * csum_partial_copy_from_user.
31 #include <linux/module.h>
32 #include <net/checksum.h>
35 * computes a partial checksum, e.g. for TCP/UDP fragments
38 __wsum
csum_partial(const void *buff
, int len
, __wsum sum
)
40 unsigned long tmp1
, tmp2
;
42 * Experiments with ethernet and slip connections show that buff
43 * is aligned on either a 2-byte or 4-byte boundary.
45 __asm__("movel %2,%3\n\t"
46 "btst #1,%3\n\t" /* Check alignment */
48 "subql #2,%1\n\t" /* buff%4==2: treat first word */
50 "addql #2,%1\n\t" /* len was == 2, treat only rest */
53 "addw %2@+,%0\n\t" /* add first word to sum */
55 "addxl %3,%0\n" /* add X bit */
57 /* unrolled loop for the main part: do 8 longs at once */
58 "movel %1,%3\n\t" /* save len in tmp1 */
59 "lsrl #5,%1\n\t" /* len/32 */
60 "jeq 2f\n\t" /* not enough... */
81 "addxl %4,%0\n\t" /* add X bit */
86 "movel %3,%1\n\t" /* restore len from tmp1 */
87 "andw #0x1c,%3\n\t" /* number of rest longs */
92 /* loop for rest longs */
97 "addxl %4,%0\n" /* add X bit */
99 /* now check for rest bytes that do not fit into longs */
102 "clrl %4\n\t" /* clear tmp2 for rest bytes */
105 "movew %2@+,%4\n\t" /* have rest >= 2: get word */
106 "swap %4\n\t" /* into bits 16..31 */
107 "tstw %1\n\t" /* another byte? */
110 "moveb %2@,%4\n\t" /* have odd rest: get byte */
111 "lslw #8,%4\n\t" /* into bits 8..15; 16..31 untouched */
113 "addl %4,%0\n\t" /* now add rest long to sum */
115 "addxl %4,%0\n" /* add X bit */
117 : "=d" (sum
), "=d" (len
), "=a" (buff
),
118 "=&d" (tmp1
), "=&d" (tmp2
)
119 : "0" (sum
), "1" (len
), "2" (buff
)
124 EXPORT_SYMBOL(csum_partial
);
128 * copy from user space while checksumming, with exception handling.
132 csum_and_copy_from_user(const void __user
*src
, void *dst
, int len
)
135 * GCC doesn't like more than 10 operands for the asm
136 * statements so we have to use tmp2 for the error
139 unsigned long tmp1
, tmp2
;
142 __asm__("movel %2,%4\n\t"
143 "btst #1,%4\n\t" /* Check alignment */
145 "subql #2,%1\n\t" /* buff%4==2: treat first word */
147 "addql #2,%1\n\t" /* len was == 2, treat only rest */
151 "movesw %2@+,%4\n\t" /* add first word to sum */
155 "addxl %4,%0\n" /* add X bit */
157 /* unrolled loop for the main part: do 8 longs at once */
158 "movel %1,%4\n\t" /* save len in tmp1 */
159 "lsrl #5,%1\n\t" /* len/32 */
160 "jeq 2f\n\t" /* not enough... */
197 "addxl %5,%0\n\t" /* add X bit */
202 "movel %4,%1\n\t" /* restore len from tmp1 */
203 "andw #0x1c,%4\n\t" /* number of rest longs */
208 /* loop for rest longs */
215 "addxl %5,%0\n" /* add X bit */
217 /* now check for rest bytes that do not fit into longs */
220 "clrl %5\n\t" /* clear tmp2 for rest bytes */
224 "movesw %2@+,%5\n\t" /* have rest >= 2: get word */
226 "swap %5\n\t" /* into bits 16..31 */
227 "tstw %1\n\t" /* another byte? */
231 "movesb %2@,%5\n\t" /* have odd rest: get byte */
233 "lslw #8,%5\n\t" /* into bits 8..15; 16..31 untouched */
235 "addl %5,%0\n\t" /* now add rest long to sum */
237 "addxl %5,%0\n\t" /* add X bit */
239 ".section .fixup,\"ax\"\n"
241 /* If any exception occurs, return 0 */
246 ".section __ex_table,\"a\"\n"
260 : "=d" (sum
), "=d" (len
), "=a" (src
), "=a" (dst
),
261 "=&d" (tmp1
), "=d" (tmp2
)
262 : "0" (sum
), "1" (len
), "2" (src
), "3" (dst
)
268 EXPORT_SYMBOL(csum_and_copy_from_user
);
272 * copy from kernel space while checksumming, otherwise like csum_partial
276 csum_partial_copy_nocheck(const void *src
, void *dst
, int len
)
278 unsigned long tmp1
, tmp2
;
280 __asm__("movel %2,%4\n\t"
281 "btst #1,%4\n\t" /* Check alignment */
283 "subql #2,%1\n\t" /* buff%4==2: treat first word */
285 "addql #2,%1\n\t" /* len was == 2, treat only rest */
288 "movew %2@+,%4\n\t" /* add first word to sum */
292 "addxl %4,%0\n" /* add X bit */
294 /* unrolled loop for the main part: do 8 longs at once */
295 "movel %1,%4\n\t" /* save len in tmp1 */
296 "lsrl #5,%1\n\t" /* len/32 */
297 "jeq 2f\n\t" /* not enough... */
326 "addxl %5,%0\n\t" /* add X bit */
331 "movel %4,%1\n\t" /* restore len from tmp1 */
332 "andw #0x1c,%4\n\t" /* number of rest longs */
337 /* loop for rest longs */
343 "addxl %5,%0\n" /* add X bit */
345 /* now check for rest bytes that do not fit into longs */
348 "clrl %5\n\t" /* clear tmp2 for rest bytes */
351 "movew %2@+,%5\n\t" /* have rest >= 2: get word */
353 "swap %5\n\t" /* into bits 16..31 */
354 "tstw %1\n\t" /* another byte? */
357 "moveb %2@,%5\n\t" /* have odd rest: get byte */
359 "lslw #8,%5\n" /* into bits 8..15; 16..31 untouched */
361 "addl %5,%0\n\t" /* now add rest long to sum */
363 "addxl %5,%0\n" /* add X bit */
365 : "=d" (sum
), "=d" (len
), "=a" (src
), "=a" (dst
),
366 "=&d" (tmp1
), "=&d" (tmp2
)
367 : "0" (sum
), "1" (len
), "2" (src
), "3" (dst
)
371 EXPORT_SYMBOL(csum_partial_copy_nocheck
);