1 /* mc68020 __mpn_lshift -- Shift left a low-level natural-number integer.
3 Copyright (C) 1996, 1998, 2012 Free Software Foundation, Inc.
5 This file is part of the GNU MP Library.
7 The GNU MP Library is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published by
9 the Free Software Foundation; either version 2.1 of the License, or (at your
10 option) any later version.
12 The GNU MP Library is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
15 License for more details.
17 You should have received a copy of the GNU Lesser General Public License
18 along with the GNU MP Library; see the file COPYING.LIB. If not, write to
19 the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston,
20 MA 02111-1307, USA. */
31 #include "asm-syntax.h"
41 /* Save used registers on the stack. */
42 moveml R(d2)-R(d6)/R(a2),MEM_PREDEC(sp)
43 cfi_adjust_cfa_offset (6*4)
44 cfi_rel_offset (R(d2), 0)
45 cfi_rel_offset (R(d3), 4)
46 cfi_rel_offset (R(d4), 8)
47 cfi_rel_offset (R(d5), 12)
48 cfi_rel_offset (R(d6), 16)
49 cfi_rel_offset (R(a2), 20)
51 /* Copy the arguments to registers. */
52 movel MEM_DISP(sp,28),R(res_ptr)
53 movel MEM_DISP(sp,32),R(s_ptr)
54 movel MEM_DISP(sp,36),R(s_size)
55 movel MEM_DISP(sp,40),R(cnt)
60 cmpl R(s_ptr),R(res_ptr)
61 bls L(Lspecial) /* jump if s_ptr >= res_ptr */
62 #if (defined (__mc68020__) || defined (__NeXT__) || defined(mc68020))
63 lea MEM_INDX1(s_ptr,s_size,l,4),R(a2)
64 #else /* not mc68020 */
67 lea MEM_INDX(s_ptr,d0,l),R(a2)
70 bls L(Lspecial) /* jump if res_ptr >= s_ptr + s_size */
76 #if (defined (__mc68020__) || defined (__NeXT__) || defined(mc68020))
77 lea MEM_INDX1(s_ptr,s_size,l,4),R(s_ptr)
78 lea MEM_INDX1(res_ptr,s_size,l,4),R(res_ptr)
79 #else /* not mc68000 */
82 addl R(s_size),R(s_ptr)
83 addl R(s_size),R(res_ptr)
85 movel MEM_PREDEC(s_ptr),R(d2)
87 lsrl R(d5),R(d0) /* compute carry limb */
98 movel MEM_PREDEC(s_ptr),R(d2)
102 movel R(d1),MEM_PREDEC(res_ptr)
105 movel MEM_PREDEC(s_ptr),R(d1)
109 movel R(d2),MEM_PREDEC(res_ptr)
112 dbf R(s_size),L(Loop)
113 subl #0x10000,R(s_size)
117 movel R(d1),MEM_PREDEC(res_ptr) /* store least significant limb */
119 /* Restore used registers from stack frame. */
120 moveml MEM_POSTINC(sp),R(d2)-R(d6)/R(a2)
122 cfi_adjust_cfa_offset (-6*4)
131 /* We loop from least significant end of the arrays, which is only
132 permissible if the source and destination don't overlap, since the
133 function is documented to work for overlapping source and destination. */
137 clrl R(d0) /* initialize carry */
144 movel MEM_POSTINC(s_ptr),R(d2)
146 movel R(d2),MEM_POSTINC(res_ptr)
148 movel MEM_POSTINC(s_ptr),R(d2)
150 movel R(d2),MEM_POSTINC(res_ptr)
152 dbf R(s_size),L(LLoop)
153 addxl R(d0),R(d0) /* save cy in lsb */
154 subl #0x10000,R(s_size)
156 lsrl #1,R(d0) /* restore cy */
160 /* Restore used registers from stack frame. */
161 moveml MEM_POSTINC(sp),R(d2)-R(d6)/R(a2)
162 cfi_adjust_cfa_offset (-6*4)