2 * CDE - Common Desktop Environment
4 * Copyright (c) 1993-2012, The Open Group. All rights reserved.
6 * These libraries and programs are free software; you can
7 * redistribute them and/or modify them under the terms of the GNU
8 * Lesser General Public License as published by the Free Software
9 * Foundation; either version 2 of the License, or (at your option)
12 * These libraries and programs are distributed in the hope that
13 * they will be useful, but WITHOUT ANY WARRANTY; without even the
14 * implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
15 * PURPOSE. See the GNU Lesser General Public License for more
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with these librararies and programs; if not, write
20 * to the Free Software Foundation, Inc., 51 Franklin Street, Fifth
21 * Floor, Boston, MA 02110-1301 USA
23 /* $XConsortium: malloc.c /main/3 1995/11/01 17:58:31 rswiston $ */
24 /***************************************************************
26 * AT&T - PROPRIETARY *
28 * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF *
29 * AT&T BELL LABORATORIES *
30 * AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN *
31 * ACCORDANCE WITH APPLICABLE AGREEMENTS *
33 * Copyright (c) 1994 AT&T Bell Laboratories *
34 * Unpublished & Not for Publication *
35 * All Rights Reserved *
37 * The copyright notice above does not evidence any *
38 * actual or intended publication of such source code *
40 * This software was created by the *
41 * Software Engineering Research Department *
42 * AT&T Bell Laboratories *
44 * For further information contact *
45 * advsoft@research.att.com *
46 * Randy Hackbarth 908-582-5245 *
47 * Dave Belanger 908-582-7427 *
49 ***************************************************************/
51 /* : : generated by proto : : */
55 #if !defined(__PROTO__)
56 #if defined(__STDC__) || defined(__cplusplus) || defined(_proto) || defined(c_plusplus)
57 #if defined(__cplusplus)
58 #define __MANGLE__ "C"
63 #define __PROTO__(x) x
65 #define __PARAM__(n,o) n
66 #if !defined(__STDC__) && !defined(__cplusplus)
67 #if !defined(c_plusplus)
78 #define __PROTO__(x) ()
79 #define __OTORP__(x) x
80 #define __PARAM__(n,o) o
88 #if defined(__cplusplus) || defined(c_plusplus)
89 #define __VARARG__ ...
93 #if defined(__STDARG__)
94 #define __VA_START__(p,a) va_start(p,a)
96 #define __VA_START__(p,a) va_start(p)
103 #if _std_malloc || _INSTRUMENT_ || cray
115 /**********************************************************************
116 Memory management: malloc(), realloc(), free(), M_brksize.
117 M_brksize: if > 0 is the maximum amount that the bottom free
118 block can grow to. If not SEGMENT, GETCORE() will
119 be used to compact bottom space.
121 The following #-parameters may be redefined:
122 SEGMENT: if defined, memory requests are assumed to be
123 non-contiguous across calls of GETCORE's. SEGMENT defines
124 the size of each GETCORE request.
125 CORESIZE: min number of bytes to used with GETCORE.
126 On a SEGMENT machine, this should be defined
127 as the size of a segment. Default is 4096.
128 GETCORE: a function to get more core memory. If not SEGMENT,
129 GETCORE(0) is assumed to return the next available
130 address. Default is 'sbrk'.
131 ERRCORE: the error code as returned by GETCORE.
132 Default is ((char*)(-1)).
133 MTRACE: if #define-d, code is included to trace and certify that
134 malloc-ed blocks are not corrupted. Available functions
136 int (*Mt_corrupt)(addr,size,stamp)
137 The function called when a corrupted block is detected.
138 addr: address of the corrupted block
139 size: size of the block.
140 stamp: a user/program-defined stamp (below).
141 If Mt_corrupt is NULL, a default function is used.
142 Mt_certify: if not 0, indicates that the arena should
143 be automatically certified on each call to malloc,
144 realloc, or free. Default is 0.
145 Mt_trace: if >= 0, is the file descriptor to write out
146 a trace of all calls to malloc, free, realloc.
148 mt_stamp(VOID *data, VOID *stamp)
149 Set a stamp for a malloc-ed block. This stamp is used
150 in (*corrupt)() calls. For example, a stamp may be
151 an indicator of the place where malloc was called.
153 Check all blocks to see if they are ok. If a block is
154 corrupted, (*corrupt)() is called as above.
156 Print arena statistics.
158 With minor variations, the basic allocation strategy is best-fit.
159 Lists of free blocks of the same size are kept in a splay tree.
160 For results on splay trees, see:
161 Self-Adjusting Binary Trees,
162 DD Sleator & RE Tarjan, JACM 1985.
164 The header of a block contains the size of the data part in bytes.
165 Since the size of a block is 0%4, the low two bits of the header
166 are free and used as follows:
167 BIT0: 1 for busy (block is in use), 0 for free.
168 BIT1: if the block is busy, this bit is 1 if the
169 preceding block in contiguous memory is free.
170 Otherwise, it is always 0.
172 Kiem-Phong Vo, AT&T Bell Laboratories
173 **********************************************************************/
175 int M_brksize; /* the max size for Bottom */
177 /* debugging macros */
179 #define VOID unsigned char
180 #define ASSERT(p) ((void) ((p) ? 0 : (abort(),0)))
181 #define COUNT(n) ((void) ((n) += 1))
182 #define DISCOUNT(n) ((void) ((n) -= 1))
183 static int N_malloc, N_realloc, N_free, N_block, N_list;
191 /* system call to get more core */
193 #define CORESIZE SEGMENT
195 #define CORESIZE (4096)
198 #define ERRCORE ((VOID*)(-1))
200 #define size_t unsigned int
203 extern __MANGLE__ __V_* GETCORE __PROTO__((ssize_t));
204 extern __MANGLE__ __V_* malloc __PROTO__((size_t));
205 extern __MANGLE__ __V_* realloc __PROTO__((__V_*, size_t));
206 extern __MANGLE__ void free __PROTO__((__V_*));
207 extern __MANGLE__ void nomalloc __PROTO__((void));
209 /* function to copy data from one area to another */
212 #define memcpy(to,fr,n) bcopy(fr,to,n)
213 extern __MANGLE__ void bcopy __PROTO__((__V_*, __V_*, int));
216 /* for conveniences */
217 #define uchar unsigned char
219 #define NIL(p) ((p)(NULL))
220 #define MINSIZE ((int)(sizeof(TREE)-sizeof(WORD)))
221 #define ROUND(x,y) ((((x)+((y)-1))/(y))*(y))
223 /* the following structures are used to compute a suitable alignment for all types */
236 #define ALIGN ((int)(sizeof(_sa_) - sizeof(_ua_)))
237 #define WORDSIZE ROUND((int)ALIGN,4)
240 { /* the proto-word */
241 size_t w_i; /* an int */
242 struct _t_ *w_p; /* a pointer */
243 VOID w_a[ALIGN]; /* to force alignment */
247 { /* structure of a node in the free tree */
248 WORD t_s; /* size of this element */
249 WORD t_n; /* next in link list */
250 WORD t_l; /* left child */
251 WORD t_r; /* right child */
252 WORD t_d; /* stub to reserve space for self-pointer */
255 /* usable # of bytes in the block */
256 #define SIZE(b) (((b)->t_s).w_i)
258 /* free tree pointers */
259 #define LEFT(b) (((b)->t_l).w_p)
260 #define RIGHT(b) (((b)->t_r).w_p)
262 /* links in linked lists */
263 #define LINK(b) (((b)->t_n).w_p)
264 #define BACK(b) (((b)->t_l).w_p)
266 /* set/test indicator to see if a block is in the tree or in a list */
267 #define SETNOTREE(b) (RIGHT(b) = (TREE*)(~0))
268 #define ISNOTREE(b) (RIGHT(b) == (TREE*)(~0))
270 /* functions to get information on a block */
271 #define DATA(b) (((VOID*) (b)) + WORDSIZE)
272 #define BLOCK(d) ((TREE*) ((d) - WORDSIZE))
273 #define SELFP(b) ((TREE**) (((VOID*) (b)) + SIZE(b)))
274 #define LAST(b) (*((TREE**) (((VOID*) (b)) - WORDSIZE)))
275 #define NEXT(b) ((TREE*) (((VOID*) (b)) + SIZE(b) + WORDSIZE))
276 #define BOTTOM(b) ((DATA(b)+SIZE(b)+WORDSIZE) == Baddr)
278 /* functions to set and test the lowest two bits of a word */
279 #define BIT0 (01) /* ....01 */
280 #define BIT1 (02) /* ...010 */
281 #define BITS01 (03) /* ...011 */
282 #define ISBIT0(w) ((w) & BIT0)
283 #define ISBIT1(w) ((w) & BIT1)
284 #define SETBIT0(w) ((w) |= BIT0)
285 #define SETBIT1(w) ((w) |= BIT1)
286 #define CLRBIT0(w) ((w) &= ~BIT0)
287 #define CLRBIT1(w) ((w) &= ~BIT1)
288 #define ISBITS01(w) ((w) & BITS01)
289 #define SETBITS01(w) ((w) |= BITS01)
290 #define CLRBITS01(w) ((w) &= ~BITS01)
291 #define CPYBITS01(w,f) ((w) |= (f)&BITS01)
293 static TREE *Root, /* root of the free tree */
294 *Bottom; /* the last free chunk in the arena */
296 static VOID *Baddr, /* current high address of the arena */
297 *Lfree, /* last free block with data intact */
298 *Nofree; /* this block is not to be freed */
300 /* lists of small blocks */
302 static TREE *List[(MINSIZE-WORDSIZE)/WORDSIZE];
304 /* circular queue of delayed free blocks */
306 #define QMASK (QSIZE-1)
308 static int Qhead = -1, Qtail = 0;
309 #define CYCLE(n) (n = (n+1)&QMASK)
310 #define REWIND(n) (n = (n+QMASK)&QMASK)
311 #define QVOID() (Qhead < 0)
312 #define QFULL() (Qtail == Qhead)
313 #define QENDADD() (Qhead < 0 ? (Qhead = 0) : 0)
314 #define QENDDEL() (Qtail == Qhead ? (Qhead = -1, Qtail = 0) : 0)
315 #define ENQUEUE(x) ((Qfree[Qtail] = (x)), CYCLE(Qtail), QENDADD())
316 #define DEQUEUE(x) (((x) = Qfree[Qhead]), CYCLE(Qhead), QENDDEL())
317 #define DESTACK(x) (REWIND(Qtail), ((x) = Qfree[Qtail]), QENDDEL())
320 ** Coalesce adjacent blocks and free left-over memory.
322 #define UNLINK(u) { if(LINK(u)) \
323 BACK(LINK(u)) = BACK(u); \
324 LINK(BACK(u)) = LINK(u); /**/ DISCOUNT(N_block); \
326 #define FMERGE(t,n) { n = NEXT(t); \
327 if(!ISBIT0(SIZE(n))) \
333 else Bottom = NIL(TREE*); \
334 SIZE(t) += SIZE(n)+WORDSIZE; \
335 CLRBIT1(SIZE(NEXT(t))); \
336 /**/ ASSERT(ISBIT0(SIZE(NEXT(t)))); \
339 #define BMERGE(t,n) { n = LAST(t); \
343 SIZE(n) += SIZE(t)+WORDSIZE; \
346 #define FREE(f) { if(BOTTOM(f)) \
349 { SETBIT0(SIZE(f)); /**/ ASSERT(!Lfree); \
354 ** Trace malloc/free patterns.
361 #define MTSPACE (3*WORDSIZE)
362 #define MTSIZE(b) (SIZE(b)&(~BITS01))
363 #define MTNEXT(b) ((TREE*)(DATA(b) + MTSIZE(b)))
364 #define USIZE(b) (*((size_t*)(DATA(b)+MTSIZE(b)-(2*WORDSIZE))))
365 #define USTAMP(b) (*((VOID**)(DATA(b)+MTSIZE(b)-WORDSIZE)))
366 #define MTSETFREE(b) (USTAMP(b) = (VOID*)(~0))
367 #define MTISFREE(b) (!ISBITS01(SIZE(b)) || USTAMP(b) == (VOID*)(~0))
368 #define SMAGIC(b) ((uchar*)(DATA(b)+USIZE(b)))
369 #define EMAGIC(b) ((uchar*)(DATA(b)+MTSIZE(b)-(2*WORDSIZE)))
370 #define MAGIC 0253 /* 10101011 pattern */
372 static VOID *Laddr; /* low address of the arena */
373 int (*Mt_corrupt) __PROTO__((VOID*,size_t,VOID*)); /* function to process corrupted blocks */
374 int Mt_certify; /* automatically certify the arena */
375 int Mt_trace = -1; /* print trace of mallocs and frees */
377 /* Convert an int to a string */
378 static itoa __PARAM__((register size_t i, register char* buf), (i, buf)) __OTORP__(register size_t i; register char* buf;)
386 buf[k++] = '0' + i%10;
390 for(i = 0; i < k/2; ++i)
393 buf[i] = buf[(k-i)-1];
398 /* Convert an int to a hex string */
399 static xtoa __PARAM__((register size_t i, register char* buf), (i, buf)) __OTORP__(register size_t i; register char* buf;)
404 static char dig[] = "0123456789ABCDEF";
408 buf += 2 * sizeof(size_t);
410 for (k = 0; k < 2 * sizeof(size_t); k++, i >>= 4)
411 *--buf = dig[i & 0xF];
414 /* internal function for warning on corruption */
415 static int mt_corrupt __PARAM__((register VOID* addr, register size_t usize, register VOID* stamp), (addr, usize, stamp)) __OTORP__(register VOID* addr; register size_t usize; register VOID* stamp;)
420 mesg = "corrupt:addr=";
421 write(2,mesg,strlen(mesg));
422 xtoa((size_t)addr,buf);
423 write(2,buf,strlen(buf));
426 write(2,mesg,strlen(mesg));
428 write(2,buf,strlen(buf));
431 write(2,mesg,strlen(mesg));
432 xtoa((size_t)stamp,buf);
433 write(2,buf,strlen(buf));
438 /* Print trace information */
439 static mt_trace __PARAM__((register VOID* addr, register int type), (addr, type)) __OTORP__(register VOID* addr; register int type;)
444 mesg = type == MTMALLOC ? "malloc" : type == MTFREE ? "free" : "realloc";
445 write(Mt_trace,mesg,strlen(mesg));
448 write(Mt_trace,mesg,strlen(mesg));
449 xtoa((size_t)addr,buf);
450 write(Mt_trace,buf,strlen(buf));
453 write(Mt_trace,mesg,strlen(mesg));
454 itoa(USIZE(BLOCK(addr)),buf);
455 write(Mt_trace,buf,strlen(buf));
457 write(Mt_trace,"\n",1);
460 /* Print a warning */
461 static mt_didfree __PARAM__((register VOID* addr, register int type), (addr, type)) __OTORP__(register VOID* addr; register int type;)
466 mesg = type == MTFREE ? "free:addr=" : "realloc:addr=";
467 write(2,mesg,strlen(mesg));
468 xtoa((size_t)addr,buf);
469 write(2,buf,strlen(buf));
470 mesg = ":already freed\n";
471 write(2,mesg,strlen(mesg));
474 /* Set trace info for a block */
475 static mt_setinfo __PARAM__((register TREE* bp, register size_t usize, int type), (bp, usize, type)) __OTORP__(register TREE* bp; register size_t usize; int type;)
478 register uchar *magic, *emagic;
481 USTAMP(bp) = NIL(VOID*);
482 for(magic = SMAGIC(bp), emagic = EMAGIC(bp); magic < emagic; ++magic)
485 if(Mt_trace >= 0 && type != MTNONE)
486 mt_trace(DATA(bp),type);
490 mt_stamp __PARAM__((register VOID* addr, register VOID* stamp), (addr, stamp)) __OTORP__(register VOID* addr; register VOID* stamp;)
493 USTAMP(BLOCK(addr)) = stamp;
496 /* Certify that no data block has been corrupted */
497 mt_certify __PARAM__((void), ())
500 register TREE *bp, *endb;
501 register uchar *magic, *endm;
504 Mt_corrupt = mt_corrupt;
506 for(bp = (TREE*)Laddr, endb = (TREE*) Baddr; bp < endb; bp = MTNEXT(bp))
508 if(MTISFREE(bp) || MTSIZE(bp) == 0)
510 if(USIZE(bp) >= MTSIZE(bp))
511 (*Mt_corrupt)(DATA(bp),USIZE(bp),USTAMP(bp));
512 else for(magic = SMAGIC(bp), endm = EMAGIC(bp); magic < endm; ++magic)
515 (*Mt_corrupt)(DATA(bp),USIZE(bp),USTAMP(bp));
521 /* Print block statistics */
522 mt_stat __PARAM__((int fd), (fd)) __OTORP__(int fd;)
525 register TREE *bp, *endb;
526 register size_t nbusy, sfree, sbusy, mbusy;
529 nbusy = sfree = sbusy = mbusy = 0;
530 for(bp = (TREE*)Laddr, endb = (TREE*) Baddr; bp < endb; bp = MTNEXT(bp))
532 if(MTISFREE(bp) || MTSIZE(bp) == 0)
543 write(fd,mesg,strlen(mesg));
545 write(fd,buf,strlen(buf));
547 mesg=", busy_blocks=";
548 write(fd,mesg,strlen(mesg));
550 write(fd,buf,strlen(buf));
552 mesg=", user_space=";
553 write(fd,mesg,strlen(mesg));
555 write(fd,buf,strlen(buf));
557 mesg=", malloc_space=";
558 write(fd,mesg,strlen(mesg));
560 write(fd,buf,strlen(buf));
567 ** Get more core. Gaps in memory are noted as busy blocks.
569 static TREE *morecore __PARAM__((register size_t size), (size)) __OTORP__(register size_t size;)
572 register TREE *tp, *bp;
575 /* space for queue of delayed free blocks */
577 size += QSIZE*sizeof(Qfree[0]);
579 /* try to extend the Bottom block if possible */
585 size = ROUND(size,CORESIZE);
586 if((addr = (VOID*)GETCORE(size)) == ERRCORE)
593 { /* contiguous memory, merge with previous bottom */
597 size += SIZE(bp) + 2*WORDSIZE;
601 addr = Baddr-WORDSIZE;
608 if((((size_t)addr)%ALIGN) != 0)
609 { /* make sure alignment is correct */
610 register size_t n = ALIGN - ((size_t)addr)%ALIGN;
611 if((VOID*)GETCORE(n) == ERRCORE)
620 { /* space for the free queue */
621 Qfree = (VOID**) addr;
622 addr += QSIZE*sizeof(Qfree[0]);
623 size -= QSIZE*sizeof(Qfree[0]);
631 /* new bottom address */
634 /* new bottom block */
636 SIZE(tp) = size - 2*WORDSIZE; /**/ASSERT((SIZE(tp)%WORDSIZE) == 0);
638 /* reserved the last word to head any noncontiguous memory */
639 SETBIT0(SIZE(NEXT(tp)));
642 { /* non-contiguous memory, free old bottom block */
643 /**/ ASSERT(!Lfree && QVOID());
652 ** Tree rotation functions
654 #define RROTATE(t,r) (t = LEFT(r), LEFT(r) = RIGHT(t), RIGHT(t) = r, r = t)
655 #define LROTATE(t,r) (t = RIGHT(r), RIGHT(r) = LEFT(t), LEFT(t) = r, r = t)
656 #define RLINK(r,s,x) (r ? (s = LEFT(s) = x) : (r = s = x))
657 #define LLINK(r,s,x) (r ? (s = RIGHT(s) = x) : (r = s = x))
658 #define RTWICE(t,r) (LEFT(LEFT(r)) = RIGHT(t), RIGHT(t) = r, r = t)
659 #define LTWICE(t,r) (RIGHT(RIGHT(r)) = LEFT(t), LEFT(t) = r, r = t)
662 ** Look up a suitable element in the tree. If found, delete it from
663 ** the tree and return its address.
664 ** This uses the top-down splay strategy.
666 static TREE *t_search __PARAM__((register int size), (size)) __OTORP__(register int size;)
670 register TREE *t, *del, *left, *right, *lroot, *rroot;
672 /* find the right one to delete */
674 lroot = rroot = NIL(TREE*);
676 { /**/ ASSERT(!ISBITS01(size) && !ISBITS01(SIZE(del)));
677 if((cmp = size - SIZE(del)) == 0)
681 if((t = LEFT(del)) == NIL(TREE*))
683 RLINK(rroot,right,del);
686 else if((cmp = size - SIZE(t)) <= 0)
687 { /* left,left case */
691 RLINK(rroot,right,del);
695 { /* left, right case */
696 RLINK(rroot,right,del);
703 if((t = RIGHT(del)) == NIL(TREE*))
705 LLINK(lroot,left,del);
708 else if((cmp = size - SIZE(t)) >= 0)
709 { /* right, right case */
717 { /* right, left case */
718 LLINK(lroot,left,del);
719 RLINK(rroot,right,t);
729 RIGHT(left) = LEFT(del);
734 LEFT(right) = RIGHT(del);
741 RIGHT(left) = NIL(TREE*);
743 { /* get least one > size */
744 LEFT(right) = NIL(TREE*);
746 { /* left zig-zig case */
747 if((t = LEFT(LEFT(rroot))) != NIL(TREE*))
749 else RROTATE(t,rroot);
761 if((t = LINK(del)) != NIL(TREE*))
762 { /* start of a non-singleton list */
764 RIGHT(t) = RIGHT(del);
765 Root = t; /**/ ASSERT(!ISNOTREE(t));
768 { /**/ DISCOUNT(N_list);
769 if((right = RIGHT(del)) != NIL(TREE*))
770 { /* make least elt of right tree the root */
772 { /* left zig-zig case */
773 if((t = LEFT(LEFT(right))) != NIL(TREE*))
775 else RROTATE(t,right);
777 LEFT(right) = LEFT(del);
780 else Root = LEFT(del);
781 } /**/ DISCOUNT(N_block);
789 __V_* malloc __PARAM__((register size_t size), (size)) __OTORP__(register size_t size;)
792 register TREE *tp, *np, *fp;
795 /* save true size and make size large enough to hold our data */
796 register size_t mtsize = size;
797 size = size <= (MINSIZE-MTSPACE) ? MINSIZE : size + MTSPACE;
801 /**/ COUNT(N_malloc);
803 size = size == 0 ? WORDSIZE : ROUND(size,WORDSIZE);
806 { /* see if the last free block can be used */
812 { /* exact match, use it as is */
817 mt_setinfo(fp,mtsize,MTMALLOC);
821 else if(n >= MINSIZE && size >= MINSIZE && !ISBIT1(n))
822 { /* see if good enough */
824 if(!BOTTOM(fp) && SIZE(fp) >= size)
829 free(Lfree = DATA(fp));
833 { /**/ ASSERT(!Lfree && QVOID());
834 n = size/WORDSIZE - 1;
835 if(List[n] == NIL(TREE*))
836 { /* get a bunch of these small blocks */
837 if(!(tp = (TREE*) malloc((size+WORDSIZE)*LGET)))
845 for(i = LGET-1; i > 0; --i)
848 tp = LINK(tp) = NEXT(tp);
851 LINK(tp) = NIL(TREE*);
859 { /* normal malloc requests */
860 if(Root && (tp = t_search(size)) != NIL(TREE*))
861 CLRBIT1(SIZE(NEXT(tp)));
862 else if((tp = Bottom) != NIL(TREE*) && SIZE(tp) >= size)
864 else if((tp = morecore(size)) == NIL(TREE*))
871 } /**/ ASSERT(tp && !ISBITS01(SIZE(tp)));
873 if((n = SIZE(tp)-size) >= (MINSIZE+WORDSIZE))
874 { /* the leftover is enough for a new free piece */
877 SIZE(np) = n-WORDSIZE;
884 /* peel out a pending free block */
885 if(!Lfree && !QVOID())
888 /* return the allocated space */
890 mt_setinfo(tp,mtsize,MTMALLOC);
899 __V_* realloc __PARAM__((__V_* old, register size_t size), (old, size)) __OTORP__(__V_* old; register size_t size;)
902 register TREE *tp, *np;
906 register size_t mtsize = size;
909 size = size < (MINSIZE-MTSPACE) ? MINSIZE : size + MTSPACE;
912 if(MTISFREE(BLOCK((VOID*)old)))
913 mt_didfree((VOID*)old,MTREALLOC);
915 mt_trace((VOID*)old,MTREALLOC);
918 /**/ COUNT(N_realloc);
924 { /* free everything except old */
930 size = size == 0 ? WORDSIZE : ROUND(size,WORDSIZE);
931 tp = BLOCK((VOID*)old);
933 if(size >= MINSIZE && ts >= MINSIZE)
936 if((n = SIZE(tp)-size) < 0)
937 { /* growing, try forward merging */
941 if(n < 0 && BOTTOM(tp) && (VOID*)GETCORE(0) == Baddr)
942 { /* try extending core */
944 if((tp = morecore(size)) != NIL(TREE*))
949 if(n >= (MINSIZE+WORDSIZE))
950 { /* left over is enough for a new piece */
953 SIZE(np) = (n-WORDSIZE);
959 CPYBITS01(SIZE(tp),ts);
960 if(n >= 0) /* got it */
963 mt_setinfo(tp,mtsize,MTMALLOC);
969 MTSETFREE(BLOCK((VOID*)old));
971 /* call malloc to get a new block */
973 if((newp = (VOID*)malloc(size)) != NIL(VOID*))
974 memcpy(newp,old,ts < size ? ts : size);
976 /**/ ASSERT(!QFULL());
979 else ENQUEUE((VOID*)old);
981 mt_setinfo(BLOCK(newp),mtsize,MTNONE);
989 void free __PARAM__((__V_* aold), (aold)) __OTORP__(__V_* aold;)
992 register VOID* old = (VOID*)aold;
994 register TREE *tp, *np, *sp;
995 register VOID *dequeue;
1001 if(MTISFREE(BLOCK(old)))
1002 mt_didfree(old,MTFREE);
1003 else MTSETFREE(BLOCK(old));
1005 mt_trace(old,MTFREE);
1013 dequeue = NIL(VOID*);
1015 { /* this is a normal free call */
1017 { /* make queue space for current Lfree */
1019 DEQUEUE(dequeue); /**/ ASSERT(!QFULL());
1025 else Lfree = NIL(VOID*);
1033 if((size = SIZE(tp)) < MINSIZE)
1035 size = size/WORDSIZE - 1;
1036 LINK(tp) = List[size];
1041 /* merge adjacent free blocks */
1042 CLRBITS01(SIZE(tp));
1048 { /* bottom block */
1051 if(M_brksize > 0 && (VOID*)GETCORE(0) == Baddr)
1053 M_brksize = ROUND(M_brksize,CORESIZE);
1054 if((size = SIZE(tp)-MINSIZE) >= M_brksize)
1056 size = (size/CORESIZE)*CORESIZE;
1058 if((old = (VOID*)GETCORE(0)) != Baddr)
1061 SIZE(tp) = (old-WORDSIZE) - DATA(tp);
1062 SIZE(NEXT(tp)) = BIT0;
1070 /* tell next block that this one is free */
1071 SETBIT1(SIZE(NEXT(tp))); /**/ ASSERT(ISBIT0(SIZE(NEXT(tp))));
1073 /* leaf insert into the free tree */
1074 LEFT(tp) = RIGHT(tp) = LINK(tp) = NIL(TREE*);
1076 /**/ COUNT(N_block);
1079 { /**/ COUNT(N_list);
1090 if((sp = LEFT(np)) != NIL(TREE*))
1093 { /**/ COUNT(N_list);
1098 else if(SIZE(np) < size)
1100 if((sp = RIGHT(np)) != NIL(TREE*))
1103 { /**/ COUNT(N_list);
1108 else /* SIZE(np) == size */
1110 if((sp = LINK(np)) != NIL(TREE*))
1122 if(dequeue || QVOID())