1 /***********************************************************************
3 * This software is part of the ast package *
4 * Copyright (c) 1985-2010 AT&T Intellectual Property *
5 * and is licensed under the *
6 * Common Public License, Version 1.0 *
7 * by AT&T Intellectual Property *
9 * A copy of the License is available at *
10 * http://www.opensource.org/licenses/cpl1.0.txt *
11 * (with md5 checksum 059e8cd6165cb4c31e351f2b69388fd9) *
13 * Information and Software Systems Research *
17 * Glenn Fowler <gsf@research.att.com> *
18 * David Korn <dgk@research.att.com> *
19 * Phong Vo <kpv@research.att.com> *
21 ***********************************************************************/
22 #if defined(_UWIN) && defined(_BLD_ast)
30 /* Allocation with freeing and reallocing of last allocated block only.
32 ** Written by Kiem-Phong Vo, kpv@research.att.com, 01/16/94.
36 static Void_t
* lastalloc(Vmalloc_t
* vm
, size_t size
)
38 static Void_t
* lastalloc(vm
, size
)
43 reg Block_t
*tp
, *next
;
44 reg Seg_t
*seg
, *last
;
46 reg Vmdata_t
* vd
= vm
->data
;
51 if(!(local
= vd
->mode
&VM_TRUST
))
54 { CLRINUSE(vd
, inuse
);
61 size
= size
< ALIGN
? ALIGN
: ROUND(size
,ALIGN
);
63 { for(last
= NIL(Seg_t
*), seg
= vd
->seg
; seg
; last
= seg
, seg
= seg
->next
)
64 { if(!(tp
= seg
->free
) || (SIZE(tp
)+sizeof(Head_t
)) < size
)
67 { last
->next
= seg
->next
;
74 /* there is no usable free space in region, try extending */
75 if((tp
= (*_Vmextend
)(vm
,size
,NIL(Vmsearch_f
))) )
79 else if(vd
->mode
&VM_AGAIN
)
80 vd
->mode
&= ~VM_AGAIN
;
85 if((s
= SIZE(tp
)) >= size
)
86 { next
= (Block_t
*)((Vmuchar_t
*)tp
+size
);
87 SIZE(next
) = s
- size
;
91 else seg
->free
= NIL(Block_t
*);
93 vd
->free
= seg
->last
= tp
;
95 if(!local
&& (vd
->mode
&VM_TRACE
) && _Vmtrace
)
96 (*_Vmtrace
)(vm
, NIL(Vmuchar_t
*), (Vmuchar_t
*)tp
, orgsize
, 0);
100 ANNOUNCE(local
, vm
, VM_ALLOC
, (Void_t
*)tp
, vm
->disc
);
106 static int lastfree(Vmalloc_t
* vm
, reg Void_t
* data
)
108 static int lastfree(vm
, data
)
116 reg Vmdata_t
* vd
= vm
->data
;
117 reg
int local
, inuse
;
123 if(!(local
= vd
->mode
&VM_TRUST
) )
124 { GETLOCAL(vd
, local
);
125 if(ISLOCK(vd
, local
))
126 { CLRINUSE(vd
, inuse
);
131 if(data
!= (Void_t
*)vd
->free
)
132 { if(!local
&& vm
->disc
->exceptf
)
133 (void)(*vm
->disc
->exceptf
)(vm
,VM_BADADDR
,data
,vm
->disc
);
140 if(!local
&& (vd
->mode
&VM_TRACE
) && _Vmtrace
)
142 s
= (Vmuchar_t
*)(seg
->free
) - (Vmuchar_t
*)data
;
143 else s
= (Vmuchar_t
*)BLOCK(seg
->baddr
) - (Vmuchar_t
*)data
;
144 (*_Vmtrace
)(vm
, (Vmuchar_t
*)data
, NIL(Vmuchar_t
*), s
, 0);
147 vd
->free
= NIL(Block_t
*);
150 SIZE(fp
) = ((Vmuchar_t
*)BLOCK(seg
->baddr
) - (Vmuchar_t
*)data
) - sizeof(Head_t
);
152 seg
->last
= NIL(Block_t
*);
155 ANNOUNCE(local
, vm
, VM_FREE
, data
, vm
->disc
);
162 static Void_t
* lastresize(Vmalloc_t
* vm
, reg Void_t
* data
, size_t size
, int type
)
164 static Void_t
* lastresize(vm
, data
, size
, type
)
175 reg Vmdata_t
* vd
= vm
->data
;
176 reg
int local
, inuse
;
178 Void_t
* orgdata
= NIL(Void_t
*);
184 data
= lastalloc(vm
,size
);
188 { (void)lastfree(vm
,data
);
193 if(!(local
= vd
->mode
&VM_TRUST
))
194 { GETLOCAL(vd
, local
);
195 if(ISLOCK(vd
, local
))
196 { CLRINUSE(vd
, inuse
);
204 if(data
== (Void_t
*)vd
->free
)
207 { /* see if it was one of ours */
208 for(seg
= vd
->seg
; seg
; seg
= seg
->next
)
209 if(data
>= seg
->addr
&& data
< (Void_t
*)seg
->baddr
)
211 if(!seg
|| (VLONG(data
)%ALIGN
) != 0 ||
212 (seg
->last
&& (Vmuchar_t
*)data
> (Vmuchar_t
*)seg
->last
) )
219 /* set 's' to be the current available space */
220 if(data
!= seg
->last
)
221 { if(seg
->last
&& (Vmuchar_t
*)data
< (Vmuchar_t
*)seg
->last
)
222 oldsize
= (Vmuchar_t
*)seg
->last
- (Vmuchar_t
*)data
;
223 else oldsize
= (Vmuchar_t
*)BLOCK(seg
->baddr
) - (Vmuchar_t
*)data
;
227 { s
= (Vmuchar_t
*)BLOCK(seg
->baddr
) - (Vmuchar_t
*)data
;
228 if(!(tp
= seg
->free
) )
231 { oldsize
= (Vmuchar_t
*)tp
- (Vmuchar_t
*)data
;
232 seg
->free
= NIL(Block_t
*);
236 size
= size
< ALIGN
? ALIGN
: ROUND(size
,ALIGN
);
237 if(s
< 0 || (ssize_t
)size
> s
)
238 { if(s
>= 0) /* amount to extend */
239 { ds
= size
-s
; ds
= ROUND(ds
,vd
->incr
);
240 addr
= (*vm
->disc
->memoryf
)(vm
, seg
->addr
, seg
->extent
,
241 seg
->extent
+ds
, vm
->disc
);
242 if(addr
== seg
->addr
)
247 SIZE(BLOCK(seg
->baddr
)) = BUSY
;
253 if(!(type
&(VM_RSMOVE
|VM_RSCOPY
)) )
257 if(!(addr
= KPVALLOC(vm
,size
,lastalloc
)) )
263 { ds
= oldsize
< size
? oldsize
: size
;
264 memcpy(addr
, data
, ds
);
267 if(s
>= 0 && seg
!= vd
->seg
)
268 { tp
= (Block_t
*)data
;
270 SIZE(tp
) = s
- sizeof(Head_t
);
274 /* new block and size */
277 s
= (Vmuchar_t
*)BLOCK(seg
->baddr
) -
279 seg
->free
= NIL(Block_t
*);
286 { if(s
>= (ssize_t
)(size
+sizeof(Head_t
)) )
287 { tp
= (Block_t
*)((Vmuchar_t
*)data
+ size
);
289 SIZE(tp
) = (s
- size
) - sizeof(Head_t
);
293 vd
->free
= seg
->last
= (Block_t
*)data
;
295 if(!local
&& (vd
->mode
&VM_TRACE
) && _Vmtrace
)
296 (*_Vmtrace
)(vm
,(Vmuchar_t
*)orgdata
,(Vmuchar_t
*)data
,orgsize
,0);
300 ANNOUNCE(local
, vm
, VM_RESIZE
, data
, vm
->disc
);
302 done
: if(data
&& (type
&VM_RSZERO
) && size
> oldsize
)
303 memset((Void_t
*)((Vmuchar_t
*)data
+ oldsize
), 0, size
-oldsize
);
311 static long lastaddr(Vmalloc_t
* vm
, Void_t
* addr
)
313 static long lastaddr(vm
, addr
)
318 reg Vmdata_t
* vd
= vm
->data
;
320 if(!(vd
->mode
&VM_TRUST
) && ISLOCK(vd
,0))
322 if(!vd
->free
|| addr
< (Void_t
*)vd
->free
|| addr
>= (Void_t
*)vd
->seg
->baddr
)
324 else return (Vmuchar_t
*)addr
- (Vmuchar_t
*)vd
->free
;
328 static long lastsize(Vmalloc_t
* vm
, Void_t
* addr
)
330 static long lastsize(vm
, addr
)
335 reg Vmdata_t
* vd
= vm
->data
;
337 if(!(vd
->mode
&VM_TRUST
) && ISLOCK(vd
,0))
339 if(!vd
->free
|| addr
!= (Void_t
*)vd
->free
)
341 else if(vd
->seg
->free
)
342 return (Vmuchar_t
*)vd
->seg
->free
- (Vmuchar_t
*)addr
;
343 else return (Vmuchar_t
*)vd
->seg
->baddr
- (Vmuchar_t
*)addr
- sizeof(Head_t
);
347 static int lastcompact(Vmalloc_t
* vm
)
349 static int lastcompact(vm
)
354 reg Seg_t
*seg
, *next
;
356 reg Vmdata_t
* vd
= vm
->data
;
360 if(!(vd
->mode
&VM_TRUST
))
362 { CLRINUSE(vd
, inuse
);
368 for(seg
= vd
->seg
; seg
; seg
= next
)
371 if(!(fp
= seg
->free
))
374 seg
->free
= NIL(Block_t
*);
375 if(seg
->size
== (s
= SIZE(fp
)&~BITS
))
377 else s
+= sizeof(Head_t
);
379 if((*_Vmtruncate
)(vm
,seg
,s
,1) == s
)
383 if((vd
->mode
&VM_TRACE
) && _Vmtrace
)
384 (*_Vmtrace
)(vm
,(Vmuchar_t
*)0,(Vmuchar_t
*)0,0,0);
392 static Void_t
* lastalign(Vmalloc_t
* vm
, size_t size
, size_t align
)
394 static Void_t
* lastalign(vm
, size
, align
)
403 reg
int local
, inuse
;
404 reg
size_t s
, orgsize
= 0, orgalign
= 0;
405 reg Vmdata_t
* vd
= vm
->data
;
407 if(size
<= 0 || align
<= 0)
411 if(!(local
= vd
->mode
&VM_TRUST
) )
412 { GETLOCAL(vd
,local
);
413 if(ISLOCK(vd
,local
) )
414 { CLRINUSE(vd
, inuse
);
422 size
= size
<= TINYSIZE
? TINYSIZE
: ROUND(size
,ALIGN
);
423 align
= MULTIPLE(align
,ALIGN
);
426 if(!(data
= (Vmuchar_t
*)KPVALLOC(vm
,s
,lastalloc
)) )
429 /* find the segment containing this block */
430 for(seg
= vd
->seg
; seg
; seg
= seg
->next
)
431 if(seg
->last
== (Block_t
*)data
)
435 /* get a suitably aligned address */
436 if((s
= (size_t)(VLONG(data
)%align
)) != 0)
437 data
+= align
-s
; /**/ASSERT((VLONG(data
)%align
) == 0);
439 /* free the unused tail */
440 next
= (Block_t
*)(data
+size
);
441 if((s
= (seg
->baddr
- (Vmuchar_t
*)next
)) >= sizeof(Block_t
))
443 SIZE(next
) = s
- sizeof(Head_t
);
447 vd
->free
= seg
->last
= (Block_t
*)data
;
449 if(!local
&& !(vd
->mode
&VM_TRUST
) && _Vmtrace
&& (vd
->mode
&VM_TRACE
) )
450 (*_Vmtrace
)(vm
,NIL(Vmuchar_t
*),data
,orgsize
,orgalign
);
454 ANNOUNCE(local
, vm
, VM_ALLOC
, (Void_t
*)data
, vm
->disc
);
457 return (Void_t
*)data
;
460 /* Public method for free-1 allocation */
461 static Vmethod_t _Vmlast
=
473 __DEFINE__(Vmethod_t
*,Vmlast
,&_Vmlast
);