Add GNU LGPL headers to all .c .C and .h files
[oweals/cde.git] / cde / programs / dtksh / ksh93 / src / lib / libast / vmalloc / vmbest.c
1 /*
2  * CDE - Common Desktop Environment
3  *
4  * Copyright (c) 1993-2012, The Open Group. All rights reserved.
5  *
6  * These libraries and programs are free software; you can
7  * redistribute them and/or modify them under the terms of the GNU
8  * Lesser General Public License as published by the Free Software
9  * Foundation; either version 2 of the License, or (at your option)
10  * any later version.
11  *
12  * These libraries and programs are distributed in the hope that
13  * they will be useful, but WITHOUT ANY WARRANTY; without even the
14  * implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
15  * PURPOSE. See the GNU Lesser General Public License for more
16  * details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with these librararies and programs; if not, write
20  * to the Free Software Foundation, Inc., 51 Franklin Street, Fifth
21  * Floor, Boston, MA 02110-1301 USA
22  */
23 /* $XConsortium: vmbest.c /main/2 1996/05/08 20:01:03 drk $ */
24 /***************************************************************
25 *                                                              *
26 *                      AT&T - PROPRIETARY                      *
27 *                                                              *
28 *         THIS IS PROPRIETARY SOURCE CODE LICENSED BY          *
29 *                          AT&T CORP.                          *
30 *                                                              *
31 *                Copyright (c) 1995 AT&T Corp.                 *
32 *                     All Rights Reserved                      *
33 *                                                              *
34 *           This software is licensed by AT&T Corp.            *
35 *       under the terms and conditions of the license in       *
36 *       http://www.research.att.com/orgs/ssr/book/reuse        *
37 *                                                              *
38 *               This software was created by the               *
39 *           Software Engineering Research Department           *
40 *                    AT&T Bell Laboratories                    *
41 *                                                              *
42 *               For further information contact                *
43 *                     gsf@research.att.com                     *
44 *                                                              *
45 ***************************************************************/
46 #include        "vmhdr.h"
47
48 /*      Best-fit allocation method. This is based on a best-fit strategy
49 **      using a splay tree for storage of lists of free blocks of the same
50 **      size. Recent free blocks may be cached for fast reuse.
51 **
52 **      Written by (Kiem-)Phong Vo, kpv@research.att.com, 01/16/94.
53 */
54
55 #ifdef DEBUG
56 static int      N_free;         /* # of free calls                      */
57 static int      N_alloc;        /* # of alloc calls                     */
58 static int      N_resize;       /* # of resize calls                    */
59 static int      N_wild;         /* # allocated from the wild block      */
60 static int      N_cache;        /* # allocated from cache               */
61 static int      N_last;         /* # allocated from last free block     */
62 static int      P_junk;         /* # of semi-free pieces                */
63 static int      P_free;         /* # of free pieces                     */
64 static int      P_busy;         /* # of busy pieces                     */
65 static size_t   M_junk;         /* max size of a junk piece             */
66 static size_t   M_free;         /* max size of a free piece             */
67 static size_t   M_busy;         /* max size of a busy piece             */
68 static size_t   S_free;         /* total free space                     */
69 static size_t   S_junk;         /* total junk space                     */
70 static int      Vmcheck=0;      /* 1 if checking                        */
71
72 /* Check to see if a block is in the free tree */
73 #if __STD_C
74 static vmintree(Block_t* node, Block_t* b)
75 #else
76 static vmintree(node,b)
77 Block_t*        node;
78 Block_t*        b;
79 #endif
80 {       Block_t*        t;
81
82         for(t = node; t; t = LINK(t))
83                 if(t == b)
84                         return 1;
85         if(LEFT(node) && vmintree(LEFT(node),b))
86                 return 1;
87         if(RIGHT(node) && vmintree(RIGHT(node),b))
88                 return 1;
89         return 0;
90 }
91
92 /* check to see if the tree is in good shape */
93 #if __STD_C
94 static vmchktree(Block_t* node)
95 #else
96 static vmchktree(node)
97 Block_t*        node;
98 #endif
99 {       Block_t*        t;
100
101         for(t = LINK(node); t; t = LINK(t))
102                 /**/ASSERT(SIZE(t) == SIZE(node));
103         if((t = LEFT(node)) )
104         {       /**/ASSERT(SIZE(t) < SIZE(node));
105                 vmchktree(t);
106         }
107         if((t = RIGHT(node)) )
108         {       /**/ASSERT(SIZE(t) > SIZE(node));
109                 vmchktree(t);
110         }
111         return 1;
112 }
113
114 #if __STD_C
115 static vmcheck(Vmdata_t* vd, size_t size, int wild)
116 #else
117 static vmcheck(vd, size, wild)
118 Vmdata_t*       vd;
119 size_t          size;   /* if > 0, checking that no large free block >size      */
120 int             wild;   /* if != 0, do above but allow wild to be >size         */
121 #endif
122 {
123         reg Seg_t       *seg, *sp;
124         reg Block_t     *b, *endb, *t, *np;
125         reg size_t      s;
126
127         if(!Vmcheck)
128                 return 1;
129
130         /**/ASSERT(size <= 0 || !vd->free);
131         /**/ASSERT(!vd->root || vmchktree(vd->root));
132
133         P_junk = P_free = P_busy = 0;
134         M_junk = M_free = M_busy = S_free = 0;
135         for(seg = vd->seg; seg; seg = seg->next)
136         {       b = SEGBLOCK(seg);
137                 endb = (Block_t*)(seg->baddr - sizeof(Head_t));
138                 while(b < endb )
139                 {       s = SIZE(b)&~BITS;
140                         np = (Block_t*)((uchar*)DATA(b) + s);
141
142                         if(!ISBUSY(SIZE(b)) )
143                         {       /**/ ASSERT(!ISJUNK(SIZE(b)));
144                                 /**/ ASSERT(!ISPFREE(SIZE(b)));
145                                 /**/ ASSERT(TINIEST(b) || SEG(b)==seg);
146                                 /**/ ASSERT(ISBUSY(SIZE(np)));
147                                 /**/ ASSERT(ISPFREE(SIZE(np)));
148                                 /**/ ASSERT(*SELF(b) == b);
149                                 /**/ ASSERT(size<=0 || SIZE(b)<size ||
150                                             SIZE(b) < MAXTINY ||
151                                             (wild && b==vd->wild));
152                                 P_free += 1;
153                                 S_free += s;
154                                 if(s > M_free)
155                                         M_free = s;
156
157                                 if(s < MAXTINY)
158                                 {       for(t = TINY(vd)[INDEX(s)]; t; t = LINK(t))
159                                                 if(b == t)
160                                                         goto fine;
161                                 }
162                                 if(b == vd->wild)
163                                 {       /**/ASSERT(VMWILD(vd,b));
164                                         goto fine;
165                                 }
166                                 if(vd->root && vmintree(vd->root,b))
167                                         goto fine;
168
169                                 /**/ ASSERT(0);
170                         }
171                         else if(ISJUNK(SIZE(b)) )
172                         {       /**/ ASSERT(ISBUSY(SIZE(b)));
173                                 /**/ ASSERT(!ISPFREE(SIZE(np)));
174                                 P_junk += 1;
175                                 S_junk += s;
176                                 if(s > M_junk)
177                                         M_junk = s;
178
179                                 if(b == vd->free)
180                                         goto fine;
181                                 if(s < MAXCACHE)
182                                 {       for(t = CACHE(vd)[INDEX(s)]; t; t = LINK(t))
183                                                 if(b == t)
184                                                         goto fine;
185                                 }
186                                 for(t = CACHE(vd)[S_CACHE]; t; t = LINK(t))
187                                         if(b == t)
188                                                 goto fine;
189                                 /**/ ASSERT(0);
190                         }
191                         else
192                         {       /**/ ASSERT(!ISPFREE(SIZE(b)) || !ISBUSY(SIZE(LAST(b))));
193                                 /**/ ASSERT(SEG(b) == seg);
194                                 /**/ ASSERT(!ISPFREE(SIZE(np)));
195                                 P_busy += 1;
196                                 if(s > M_busy)
197                                         M_busy = s;
198                                 goto fine;
199                         }
200                 fine:
201                         b = np;
202                 }
203         }
204
205         return 1;
206 }
207
208 #endif /*DEBUG*/
209
210 /* Tree rotation functions */
211 #define RROTATE(x,y)    (LEFT(x) = RIGHT(y), RIGHT(y) = (x), (x) = (y))
212 #define LROTATE(x,y)    (RIGHT(x) = LEFT(y), LEFT(y) = (x), (x) = (y))
213 #define RLINK(s,x)      ((s) = LEFT(s) = (x))
214 #define LLINK(s,x)      ((s) = RIGHT(s) = (x))
215
216 /* Find and delete a suitable element in the free tree. */
217 #if __STD_C
218 static Block_t* bestsearch(Vmdata_t* vd, reg size_t size, Block_t* wanted)
219 #else
220 static Block_t* bestsearch(vd, size, wanted)
221 Vmdata_t*       vd;
222 reg size_t      size;
223 Block_t*        wanted;
224 #endif
225 {
226         reg size_t      s;
227         reg Block_t     *t, *root, *l, *r;
228         Block_t         link;
229
230         /* extracting a tiniest block from its list */
231         if((root = wanted) && size == TINYSIZE)
232         {       reg Seg_t*      seg;
233
234                 l = TLEFT(root);
235                 if((r = LINK(root)) )
236                         TLEFT(r) = l;
237                 if(l)
238                         LINK(l) = r;
239                 else    TINY(vd)[0] = r;
240
241                 seg = vd->seg;
242                 if(!seg->next)
243                         SEG(root) = seg;
244                 else for(;; seg = seg->next)
245                 {       if((uchar*)root > (uchar*)seg->addr && (uchar*)root < seg->baddr)
246                         {       SEG(root) = seg;
247                                 break;
248                         }
249                 }
250
251                 return root;
252         }
253
254         /* find the right one to delete */
255         l = r = &link;
256         if((root = vd->root) ) do
257         {       /**/ ASSERT(!ISBITS(size) && !ISBITS(SIZE(root)));
258                 if(size == (s = SIZE(root)) )
259                         break;
260                 if(size < s)
261                 {       if((t = LEFT(root)) )
262                         {       if(size <= (s = SIZE(t)) )
263                                 {       RROTATE(root,t);
264                                         if(size == s)
265                                                 break;
266                                         t = LEFT(root);
267                                 }
268                                 else
269                                 {       LLINK(l,t);
270                                         t = RIGHT(t);
271                                 }
272                         }
273                         RLINK(r,root);
274                 }
275                 else
276                 {       if((t = RIGHT(root)) )
277                         {       if(size >= (s = SIZE(t)) )
278                                 {       LROTATE(root,t);
279                                         if(size == s)
280                                                 break;
281                                         t = RIGHT(root);
282                                 }
283                                 else
284                                 {       RLINK(r,t);
285                                         t = LEFT(t);
286                                 }
287                         }
288                         LLINK(l,root);
289                 }
290         } while((root = t) );
291
292         if(root)        /* found it, now isolate it */
293         {       RIGHT(l) = LEFT(root);
294                 LEFT(r) = RIGHT(root);
295         }
296         else            /* nothing exactly fit  */
297         {       LEFT(r) = NIL(Block_t*);
298                 RIGHT(l) = NIL(Block_t*);
299
300                 /* grab the least one from the right tree */
301                 if((root = LEFT(&link)) )
302                 {       while((t = LEFT(root)) )
303                                 RROTATE(root,t);
304                         LEFT(&link) = RIGHT(root);
305                 }
306         }
307
308         if(root && (r = LINK(root)) )
309         {       /* head of a link list, use next one for root */
310                 LEFT(r) = RIGHT(&link);
311                 RIGHT(r) = LEFT(&link);
312         }
313         else if(!(r = LEFT(&link)) )
314                 r = RIGHT(&link);
315         else /* graft left tree to right tree */
316         {       while((t = LEFT(r)) )
317                         RROTATE(r,t);
318                 LEFT(r) = RIGHT(&link);
319         }
320         vd->root = r;
321
322         /**/ ASSERT(!wanted || wanted == root);
323         return root;
324 }
325
326 /* Reclaim all delayed free blocks into the free tree */
327 #if __STD_C
328 static bestreclaim(reg Vmdata_t* vd, Block_t* wanted, int c)
329 #else
330 static bestreclaim(vd, wanted, c)
331 reg Vmdata_t*   vd;
332 Block_t*        wanted;
333 int             c;
334 #endif
335 {
336         reg size_t      size, s;
337         reg Block_t     *fp, *np, *t, *list, **cache;
338         reg int         n, count;
339         Block_t         tree;
340
341         if((fp = vd->free) )
342         {       LINK(fp) = *(cache = CACHE(vd) + C_INDEX(SIZE(fp)) );
343                 *cache = fp;
344                 vd->free = NIL(Block_t*);
345         }
346
347         LINK(&tree) = NIL(Block_t*);
348         count = 0;
349         for(n = S_CACHE; n >= c; --n)
350         {       list = *(cache = CACHE(vd) + n);
351                 *cache = NIL(Block_t*);
352                 while((fp = list) )
353                 {       /* Note that below here we allow ISJUNK blocks to be
354                         ** forward-merged even though they are not removed from
355                         ** the list immediately. In this way, the list is
356                         ** scanned only once. It works because the LINK and SIZE
357                         ** fields are not destroyed during the merging. This can
358                         ** be seen by observing that a tiniest block has a 2-word
359                         ** header and a 2-word body. Merging a tiniest block
360                         ** (1seg) and the next block (2seg) looks like this:
361                         **      1seg  size  link  left  2seg size link left ....
362                         **      1seg  size  link  left  rite xxxx xxxx .... self
363                         ** After the merge, the 2seg word is replaced by the RIGHT
364                         ** pointer of the new block and somewhere beyond the
365                         ** two xxxx fields, the SELF pointer will replace some
366                         ** other word. The important part is that the two xxxx
367                         ** fields are kept intact.
368                         */
369                         count += 1;
370                         list = LINK(list);
371                         size = SIZE(fp);
372                         if(!ISJUNK(size))       /* already done */
373                                 continue;
374
375                         if(ISPFREE(size))       /* backward merge */
376                         {       fp = LAST(fp);
377                                 s = SIZE(fp);
378                                 DELETE(vd,fp,INDEX(s),t,bestsearch);
379                                 size = (size&~BITS) + s + sizeof(Head_t);
380                         }
381                         else    size &= ~BITS;
382
383                         for(;;) /* forward merge */
384                         {       np = (Block_t*)((uchar*)fp+size+sizeof(Head_t));
385                                 s = SIZE(np);   /**/ASSERT(s > 0);
386                                 if(!ISBUSY(s))
387                                 {       if(np == vd->wild)
388                                                 vd->wild = NIL(Block_t*);
389                                         else    DELETE(vd,np,INDEX(s),t,bestsearch);
390                                 }
391                                 else if(ISJUNK(s) && C_INDEX(s) >= c )
392                                 {       SIZE(np) = 0;
393                                         CLRBITS(s);
394                                 }
395                                 else    break;
396                                 size += s + sizeof(Head_t);
397                         }
398                         SIZE(fp) = size;
399
400                         if(fp == wanted) /* about to be consumed by bestresize */
401                                 continue;
402
403                         /* tell next block that this one is free */
404                         SETPFREE(SIZE(np)); /**/ ASSERT(ISBUSY(SIZE(np)) );
405                         *(SELF(fp)) = fp;
406
407                         if(np->body.data >= vd->seg->baddr)
408                         {       vd->wild = fp;
409                                 continue;
410                         }
411
412                         /* tiny block goes to tiny list */
413                         if(size < MAXTINY)
414                         {       s = INDEX(size);
415                                 np = LINK(fp) = TINY(vd)[s];
416                                 if(s == 0)      /* TINIEST block */
417                                 {       if(np)
418                                                 TLEFT(np) = fp;
419                                         TLEFT(fp) = NIL(Block_t*);
420                                 }
421                                 else
422                                 {       if(np)
423                                                 LEFT(np)  = fp;
424                                         LEFT(fp) = NIL(Block_t*);
425                                         SETLINK(fp);
426                                 }
427                                 TINY(vd)[s] = fp;
428                                 continue;
429                         }
430
431                         /* don't put in free tree yet because they may be merged soon */
432                         np = &tree;
433                         if((LINK(fp) = LINK(np)) )
434                                 LEFT(LINK(fp)) = fp;
435                         LINK(np) = fp;
436                         LEFT(fp) = np;
437                         SETLINK(fp);
438                 }
439         }
440
441         /* insert all free blocks into the free tree */
442         for(list = LINK(&tree); list; )
443         {       fp = list;
444                 list = LINK(list);
445
446                 /**/ASSERT(!ISBITS(SIZE(fp)));
447                 /**/ASSERT(ISBUSY(SIZE(NEXT(fp))) );
448                 /**/ASSERT(ISPFREE(SIZE(NEXT(fp))) );
449                 LEFT(fp) = RIGHT(fp) = LINK(fp) = NIL(Block_t*);
450                 if(!(np = vd->root) )   /* inserting into an empty tree */
451                 {       vd->root = fp;
452                         continue;
453                 }
454
455                 size = SIZE(fp);
456                 while(1)        /* leaf insertion */
457                 {       if((s = SIZE(np)) > size)
458                         {       if((t = LEFT(np)) )
459                                         np = t;
460                                 else
461                                 {       LEFT(np) = fp;
462                                         break;
463                                 }
464                         }
465                         else if(s < size)
466                         {       if((t = RIGHT(np)) )
467                                         np = t;
468                                 else
469                                 {       RIGHT(np) = fp;
470                                         break;
471                                 }
472                         }
473                         else /* s == size */
474                         {       if((t = LINK(np)) )
475                                 {       LINK(fp) = t;
476                                         LEFT(t) = fp;
477                                 }
478                                 LINK(np) = fp;
479                                 LEFT(fp) = np;
480                                 SETLINK(fp);
481                                 break;
482                         }
483                 }
484         }
485
486         return count;
487 }
488
489 #if __STD_C
490 static Void_t* bestalloc(Vmalloc_t* vm, reg size_t size )
491 #else
492 static Void_t* bestalloc(vm,size)
493 Vmalloc_t*      vm;     /* region allocating from       */
494 reg size_t      size;   /* desired block size           */
495 #endif
496 {
497         reg Vmdata_t*   vd = vm->data;
498         reg size_t      s;
499         reg Block_t     *tp, *np, **cache;
500         reg int         local;
501         size_t          orgsize;
502
503         /**/ COUNT(N_alloc);
504
505         if(!(local = vd->mode&VM_TRUST))
506         {       GETLOCAL(vd,local);
507                 if(ISLOCK(vd,local) )
508                         return NIL(Void_t*);
509                 SETLOCK(vd,local);
510                 orgsize = size;
511         }
512
513         /**/ ASSERT(HEADSIZE == sizeof(Head_t));
514         /**/ ASSERT(BODYSIZE == sizeof(Body_t));
515         /**/ ASSERT((ALIGN%(BITS+1)) == 0 );
516         /**/ ASSERT((sizeof(Head_t)%ALIGN) == 0 );
517         /**/ ASSERT((sizeof(Body_t)%ALIGN) == 0 );
518         /**/ ASSERT((TINYSIZE%ALIGN) == 0 );
519         /**/ ASSERT(sizeof(Block_t) == (sizeof(Body_t)+sizeof(Head_t)) );
520
521         /* for ANSI requirement that malloc(0) returns non-NULL pointer */
522         size = size <= TINYSIZE ? TINYSIZE : ROUND(size,ALIGN);
523
524         if(size < MAXCACHE && (tp = *(cache = CACHE(vd) + INDEX(size)) ) )
525         {       *cache = LINK(tp);
526                 CLRJUNK(SIZE(tp));
527                 /**/COUNT(N_cache);
528                 goto done;
529         }
530
531         if((tp = vd->free) )    /* allocate from last free piece */
532         {       /**/ASSERT(ISBUSY(SIZE(tp)) );
533                 /**/ASSERT(ISJUNK(SIZE(tp)) );
534                 /**/COUNT(N_last);
535
536                 vd->free = NIL(Block_t*);
537                 if((s = SIZE(tp)) < size)
538                 {       LINK(tp) = *(cache = CACHE(vd)+C_INDEX(s));
539                         *cache = tp;
540                 }
541                 else
542                 {       if(s >= size + (sizeof(Head_t)+TINYSIZE) )
543                         {       SIZE(tp) = size;
544                                 np = NEXT(tp);
545                                 SEG(np) = SEG(tp);
546                                 SIZE(np) = ((s&~BITS) - (size+sizeof(Head_t)))|JUNK|BUSY;
547                                 vd->free = np;
548                                 SIZE(tp) |= s&BITS;
549                         }
550                         CLRJUNK(SIZE(tp));
551                         goto done;
552                 }
553         }
554
555         for(;;)
556         {       for(;;) /* best-fit - more or less */
557                 {       for(s = INDEX(size); s < S_TINY; ++s)
558                         {       if((tp = TINY(vd)[s]) )
559                                 {       DELETE(vd,tp,s,np,bestsearch);
560                                         CLRPFREE(SIZE(NEXT(tp)));
561                                         goto got_block;
562                                 }
563                         }
564
565                         if(CACHE(vd)[S_CACHE])  /* reclaim big pieces */
566                                 bestreclaim(vd,NIL(Block_t*),S_CACHE);
567                         if(vd->root && (tp = bestsearch(vd,size,NIL(Block_t*))) )
568                                 goto got_block;
569                         if(bestreclaim(vd,NIL(Block_t*),0) == 0)
570                                 break;
571                 }
572
573                 /**/ASSERT(!vd->free);
574                 if((tp = vd->wild) && SIZE(tp) >= size)
575                 {       /**/ASSERT(vmcheck(vd,size,1));
576                         /**/COUNT(N_wild);
577                         vd->wild = NIL(Block_t*);
578                         goto got_block;
579                 }
580         
581                 /**/ASSERT(vmcheck(vd,size,0) );
582                 if((tp = (*_Vmextend)(vm,size,bestsearch)) )
583                         goto got_block;
584                 else if(vd->mode&VM_AGAIN)
585                         vd->mode &= ~VM_AGAIN;
586                 else
587                 {       CLRLOCK(vd,local);
588                         return NIL(Void_t*);
589                 }
590         }
591
592 got_block:
593         /**/ ASSERT(!ISBITS(SIZE(tp)));
594         /**/ ASSERT(SIZE(tp) >= size);
595         /**/ ASSERT((SIZE(tp)%ALIGN) == 0);
596         /**/ ASSERT(!vd->free);
597
598         /* tell next block that we are no longer a free block */
599         CLRPFREE(SIZE(NEXT(tp)));       /**/ ASSERT(ISBUSY(SIZE(NEXT(tp))));
600
601         if((s = SIZE(tp)-size) >= (sizeof(Head_t)+TINYSIZE) )
602         {       SIZE(tp) = size;
603
604                 np = NEXT(tp);
605                 SEG(np) = SEG(tp);
606                 SIZE(np) = (s - sizeof(Head_t)) | BUSY|JUNK;
607
608                 if(!vd->root)
609                         vd->free = np;
610                 else if(VMWILD(vd,np))
611                 {       SIZE(np) &= ~BITS;
612                         *SELF(np) = np;
613                         SETPFREE(SIZE(NEXT(np)));
614                         vd->wild = np;
615                 }
616                 else
617                 {       LINK(np) = *(cache = CACHE(vd) + C_INDEX(SIZE(np)));
618                         *cache = np;
619                 }
620         }
621
622         SETBUSY(SIZE(tp));
623
624 done:
625         if(!local && (vd->mode&VM_TRACE) && _Vmtrace && VMETHOD(vd) == VM_MTBEST)
626                 (*_Vmtrace)(vm,NIL(uchar*),(uchar*)DATA(tp),orgsize);
627
628         CLRLOCK(vd,local);
629         return DATA(tp);
630 }
631
632 #if __STD_C
633 static long bestaddr(Vmalloc_t* vm, Void_t* addr )
634 #else
635 static long bestaddr(vm, addr)
636 Vmalloc_t*      vm;     /* region allocating from       */
637 Void_t*         addr;   /* address to check             */
638 #endif
639 {
640         reg Seg_t*      seg;
641         reg Block_t     *b, *endb;
642         reg long        offset;
643         reg Vmdata_t*   vd = vm->data;
644         reg int         local;
645
646         if(!(local = vd->mode&VM_TRUST) )
647         {       GETLOCAL(vd,local);
648                 if(ISLOCK(vd,local))
649                         return -1L;
650                 SETLOCK(vd,local);
651         }
652
653         offset = -1L;
654         for(seg = vd->seg; seg; seg = seg->next)
655         {       b = SEGBLOCK(seg);
656                 endb = (Block_t*)(seg->baddr - sizeof(Head_t));
657                 if((uchar*)addr > (uchar*)b && (uchar*)addr < (uchar*)endb)
658                         break;
659         }
660
661         if(local && !(vd->mode&VM_TRUST) ) /* from bestfree or bestresize */
662         {       b = BLOCK(addr);
663                 if(seg && SEG(b) == seg && ISBUSY(SIZE(b)) && !ISJUNK(SIZE(b)) )
664                         offset = 0;
665                 if(offset != 0 && vm->disc->exceptf)
666                         (void)(*vm->disc->exceptf)(vm,VM_BADADDR,addr,vm->disc);
667         }
668         else if(seg)
669         {       while(b < endb)
670                 {       reg uchar*      data = (uchar*)DATA(b);
671                         reg size_t      size = SIZE(b)&~BITS;
672
673                         if((uchar*)addr >= data && (uchar*)addr < data+size)
674                         {       if(ISJUNK(SIZE(b)) || !ISBUSY(SIZE(b)))
675                                         offset = -1L;
676                                 else    offset = (uchar*)addr - data;
677                                 goto done;
678                         }
679
680                         b = (Block_t*)((uchar*)DATA(b) + size);
681                 }
682         }
683
684 done:   
685         CLRLOCK(vd,local);
686         return offset;
687 }
688
689 #if __STD_C
690 static bestfree(Vmalloc_t* vm, Void_t* data )
691 #else
692 static bestfree(vm, data )
693 Vmalloc_t*      vm;
694 Void_t*         data;
695 #endif
696 {
697         reg Vmdata_t*   vd = vm->data;
698         reg Block_t     *bp, *fp, **cache;
699         reg size_t      s;
700         reg int         local;
701
702         /**/COUNT(N_free);
703
704         if(!data)       /* ANSI-ism */
705                 return 0;
706
707         if(!(local = vd->mode&VM_TRUST) )
708         {       if(ISLOCK(vd,0) )
709                         return -1;
710                 if(KPVADDR(vm,data,bestaddr) != 0 )
711                         return -1;
712                 SETLOCK(vd,0);
713         }
714
715         bp = BLOCK(data);       /**/ASSERT(ISBUSY(SIZE(bp)) && !ISJUNK(SIZE(bp)));
716         SETJUNK(SIZE(bp));
717         if((s = SIZE(bp)) < MAXCACHE)
718         {       LINK(bp) = *(cache = CACHE(vd) + INDEX(s));
719                 *cache = bp;
720         }
721         else
722         {       if((!(fp = vd->free) || VMWILD(vd,fp)) && !VMWILD(vd,bp) )
723                         vd->free = bp;
724                 else    fp = bp;
725                 if(fp)
726                 {       LINK(fp) = *(cache = CACHE(vd) + C_INDEX(SIZE(fp)));
727                         *cache = fp;
728                 }
729         }
730
731         if(!local && _Vmtrace && (vd->mode&VM_TRACE) && VMETHOD(vd) == VM_MTBEST )
732                 (*_Vmtrace)(vm,(uchar*)data,NIL(uchar*),SIZE(BLOCK(data))&~BITS);
733
734         CLRLOCK(vd,0);
735         return 0;
736 }
737
738 #if __STD_C
739 static Void_t* bestresize(Vmalloc_t* vm, Void_t* data, reg size_t size, int flags)
740 #else
741 static Void_t* bestresize(vm,data,size,flags)
742 Vmalloc_t*      vm;             /* region allocating from       */
743 Void_t*         data;           /* old block of data            */
744 reg size_t      size;           /* new size                     */
745 int             flags;          /* VM_RS*                       */
746 #endif
747 {
748         reg Vmdata_t*   vd = vm->data;
749         reg Block_t     *rp, *np, *t, **cache;
750         reg size_t      s, bs;
751         reg int         local, *nd, *od;
752         Void_t*         orgdata;
753         size_t          orgsize;
754
755         /**/ COUNT(N_resize);
756
757         if(!data)
758         {       if((data = bestalloc(vm,size)) && (flags & VM_RSZERO))
759                 {       s = (size+sizeof(int)-1)/sizeof(int);
760                         for(nd = (int*)data; s-- > 0; )
761                                 *nd++ = 0;
762                 }
763                 return data;
764         }
765         if(size == 0)
766         {       (void)bestfree(vm,data);
767                 return NIL(Void_t*);
768         }
769
770         if(!(local = vd->mode&VM_TRUST) )
771         {       GETLOCAL(vd,local);
772                 if(ISLOCK(vd,local) )
773                         return NIL(Void_t*);
774                 if(!local && KPVADDR(vm,data,bestaddr) != 0 )
775                         return NIL(Void_t*);
776                 SETLOCK(vd,local);
777
778                 orgdata = data; /* for tracing */
779                 orgsize = size;
780         }
781
782         size = size <= TINYSIZE ? TINYSIZE : ROUND(size,ALIGN);
783         rp = BLOCK(data);       /**/ASSERT(ISBUSY(SIZE(rp)) && !ISJUNK(SIZE(rp)));
784         if((bs = SIZE(rp)) < size)
785         {       CLRBITS(SIZE(rp));
786                 np = NEXT(rp);
787                 do      /* forward merge as much as possible */
788                 {       s = SIZE(np);
789                         if(np == vd->free)
790                         {       vd->free = NIL(Block_t*);
791                                 CLRBITS(s);
792                         }
793                         else if(ISJUNK(s) )
794                         {       CPYBITS(SIZE(rp),bs);
795                                 bestreclaim(vd,np,C_INDEX(s));
796                                 s = SIZE(np);
797                                 bs = SIZE(rp);
798                                 CLRBITS(SIZE(rp));
799                         }
800                         else if(!ISBUSY(s) )
801                         {       if(np == vd->wild)
802                                         vd->wild = NIL(Block_t*);
803                                 else    DELETE(vd,np,INDEX(s),t,bestsearch); 
804                         }
805                         else    break;
806
807                         SIZE(rp) += (s += sizeof(Head_t));
808                         np = (Block_t*)((uchar*)np + s);
809                         CLRPFREE(SIZE(np));
810                 } while(SIZE(rp) < size);
811
812                 if(SIZE(rp) < size && size > vd->incr && SEGWILD(rp) )
813                 {       reg Seg_t*      seg;
814
815                         s = (size - SIZE(rp)) + sizeof(Head_t);
816                         s = ROUND(s,vd->incr);
817                         seg = SEG(rp);
818                         if((*vm->disc->memoryf)(vm,seg->addr,seg->extent,seg->extent+s,
819                                       vm->disc) == seg->addr )
820                         {       SIZE(rp) += s;
821                                 seg->extent += s;
822                                 seg->size += s;
823                                 seg->baddr += s;
824                                 SEG(NEXT(rp)) = seg;
825                                 SIZE(NEXT(rp)) = BUSY;
826                         }
827                 }
828
829                 CPYBITS(SIZE(rp),bs);
830         }
831
832         /* If a buffer is resized, it is likely to be resized again. So we
833            make the increment a reasonable size to reduce future work */
834 #define INCREMENT       128
835         if((s = SIZE(rp)) >= (size + (INCREMENT+TINYSIZE+sizeof(Head_t))) )
836         {       SIZE(rp) = size;
837                 np = NEXT(rp);
838                 SEG(np) = SEG(rp);
839                 SIZE(np) = (((s&~BITS)-size) - sizeof(Head_t))|BUSY|JUNK;
840                 CPYBITS(SIZE(rp),s);
841                 rp = np;
842                 goto do_free;
843         }
844         else if(s < size)
845         {       if(!(flags & VM_RSFREE))
846                         data = NIL(Void_t*);
847                 else
848                 {       od = (int*)data;
849                         if(size < (s&~BITS)+INCREMENT)
850                                 size = (s&~BITS)+INCREMENT;
851                         if((data = KPVALLOC(vm,size,bestalloc)) )
852                         {       if(flags & VM_RSZERO)
853                                 {       bs = (size-s+sizeof(int)-1)/sizeof(int);
854                                         for(nd = (int*)((char*)data+s); bs-- > 0; )
855                                                 *nd++ = 0;
856                                 }
857                                 if(flags & VM_RSCOPY)
858                                 {       nd = (int*)data;
859                                         for(s /= sizeof(int); s-- > 0; )
860                                                 *nd++ = *od++;
861                                 }
862                                 SETJUNK(SIZE(rp));
863                         do_free:
864                                 if((np = vd->free) && VMWILD(vd,np) )
865                                 {       vd->free = rp;
866                                         rp = np;
867                                 }
868                                 cache = CACHE(vd) + C_INDEX(SIZE(rp));
869                                 LINK(rp) = *cache;
870                                 *cache = rp;
871                         }
872                 }
873         }
874
875         if(!local && _Vmtrace && data && (vd->mode&VM_TRACE) && VMETHOD(vd) == VM_MTBEST)
876                 (*_Vmtrace)(vm, (uchar*)orgdata, (uchar*)data, orgsize);
877
878         CLRLOCK(vd,local);
879         return data;
880 }
881
882 #if __STD_C
883 static long bestsize(Vmalloc_t* vm, Void_t* addr )
884 #else
885 static long bestsize(vm, addr)
886 Vmalloc_t*      vm;     /* region allocating from       */
887 Void_t*         addr;   /* address to check             */
888 #endif
889 {
890         reg Seg_t*      seg;
891         reg Block_t     *b, *endb;
892         reg long        size;
893         reg Vmdata_t*   vd = vm->data;
894
895         if(!(vd->mode&VM_TRUST) )
896         {       if(ISLOCK(vd,0))
897                         return -1L;
898                 SETLOCK(vd,0);
899         }
900
901         size = -1L;
902         for(seg = vd->seg; seg; seg = seg->next)
903         {       b = SEGBLOCK(seg);
904                 endb = (Block_t*)(seg->baddr - sizeof(Head_t));
905                 if((uchar*)addr <= (uchar*)b || (uchar*)addr >= (uchar*)endb)
906                         continue;
907                 while(b < endb)
908                 {       if(addr == DATA(b))
909                         {       if(!ISBUSY(SIZE(b)) || ISJUNK(SIZE(b)) )
910                                         size = -1L;
911                                 else    size = (long)SIZE(b)&~BITS;
912                                 goto done;
913                         }
914                         else if((uchar*)addr <= (uchar*)b)
915                                 break;
916
917                         b = (Block_t*)((uchar*)DATA(b) + (SIZE(b)&~BITS) );
918                 }
919         }
920
921 done:
922         CLRLOCK(vd,0);
923         return size;
924 }
925
926 #if __STD_C
927 static bestcompact(Vmalloc_t* vm)
928 #else
929 static bestcompact(vm)
930 Vmalloc_t*      vm;
931 #endif
932 {
933         reg Seg_t       *seg, *next;
934         reg Block_t     *bp, *t;
935         reg size_t      size;
936         reg Vmdata_t*   vd = vm->data;
937
938         if(!(vd->mode&VM_TRUST) )
939         {       if(ISLOCK(vd,0))
940                         return -1;
941                 SETLOCK(vd,0);
942         }
943
944         bestreclaim(vd,NIL(Block_t*),0);
945
946         for(seg = vd->seg; seg; )
947         {       next = seg->next;
948                 bp = BLOCK(seg->baddr);
949                 if(!ISPFREE(SIZE(bp)) )
950                         goto loop;
951
952                 bp = LAST(bp);  /**/ ASSERT(!ISBUSY(SIZE(bp)));
953                 size = SIZE(bp);
954                 if(VMWILD(vd,bp))
955                         vd->wild = NIL(Block_t*);
956                 else    DELETE(vd,bp,INDEX(size),t,bestsearch);
957                 CLRPFREE(SIZE(NEXT(bp)));
958
959                 if(size < seg->size)
960                         size += sizeof(Head_t);
961
962                 if((*_Vmtruncate)(vm,seg,size,1) >= 0)
963                 {       if((size = (seg->baddr - ((uchar*)bp) - sizeof(Head_t))) > 0)
964                                 SIZE(bp) = size - sizeof(Head_t);
965                         else    bp = NIL(Block_t*);
966                 }
967
968                 if(bp)
969                 {       /**/ ASSERT(SIZE(bp) >= TINYSIZE);
970                         /**/ ASSERT(SEGWILD(bp));
971                         SIZE(bp) |= BUSY|JUNK;
972                         LINK(bp) = CACHE(vd)[C_INDEX(SIZE(bp))];
973                         CACHE(vd)[C_INDEX(SIZE(bp))] = bp;
974                 }
975
976         loop:
977                 seg = next;
978         }
979
980         CLRLOCK(vd,0);
981         return 0;
982 }
983
984 #if __STD_C
985 static Void_t* bestalign(Vmalloc_t* vm, size_t size, size_t align)
986 #else
987 static Void_t* bestalign(vm, size, align)
988 Vmalloc_t*      vm;
989 size_t          size;
990 size_t          align;
991 #endif
992 {
993         reg uchar       *data;
994         reg Block_t     *tp, *np;
995         reg Seg_t*      seg;
996         reg size_t      s;
997         reg Vmdata_t*   vd = vm->data;
998
999         if(size <= 0 || align <= 0)
1000                 return NIL(Void_t*);
1001
1002         if(!(vd->mode&VM_TRUST) )
1003         {       if(ISLOCK(vd,0) )
1004                         return NIL(Void_t*);
1005                 SETLOCK(vd,0);
1006         }
1007
1008         align = MULTIPLE(align,ALIGN);
1009         s = (size <= TINYSIZE ? TINYSIZE : ROUND(size,ALIGN)) + align;
1010         if(!(data = (uchar*)KPVALLOC(vm,s,bestalloc)) )
1011                 goto done;
1012
1013         tp = BLOCK(data);
1014         seg = SEG(tp);
1015
1016         /* get an aligned address that we can live with */
1017         if((s = (size_t)(ULONG(data)%align)) != 0)
1018                 data += align-s;
1019         np = BLOCK(data);
1020         if(((uchar*)np - (uchar*)tp) < sizeof(Block_t) )
1021                 data += align;
1022
1023         /* np is the usable block of data */
1024         np = BLOCK(data);
1025         s  = (uchar*)np - (uchar*)tp;
1026         SIZE(np) = ((SIZE(tp)&~BITS) - s)|BUSY;
1027         SEG(np) = seg;
1028         data = (uchar*)DATA(np);
1029
1030         /* now free the left part */
1031         SIZE(tp) = (s - sizeof(Head_t)) | (SIZE(tp)&BITS) | JUNK;
1032         /**/ ASSERT(SIZE(tp) >= sizeof(Body_t) );
1033         if(!vd->free)
1034                 vd->free = tp;
1035         else
1036         {       LINK(tp) = CACHE(vd)[C_INDEX(SIZE(tp))];
1037                 CACHE(vd)[C_INDEX(SIZE(tp))] = tp;
1038         }
1039
1040         if(!(vd->mode&VM_TRUST) && _Vmtrace && (vd->mode&VM_TRACE) &&
1041            VMETHOD(vd) == VM_MTBEST )
1042                 (*_Vmtrace)(vm,NIL(uchar*),data,size);
1043
1044 done:
1045         CLRLOCK(vd,0);
1046         return (Void_t*)data;
1047 }
1048
1049 /* The world knows us by this */
1050 Vmethod_t       _Vmbest =
1051 {
1052         bestalloc,
1053         bestresize,
1054         bestfree,
1055         bestaddr,
1056         bestsize,
1057         bestcompact,
1058         bestalign,
1059         VM_MTBEST
1060 };
1061
1062 /* The heap region */
1063 static Vmdata_t _Vmdata =
1064 {
1065         VM_MTBEST|VM_TRUST,             /* mode         */
1066         0,                              /* incr         */
1067         0,                              /* pool         */
1068         NIL(Seg_t*),                    /* seg          */
1069         NIL(Block_t*),                  /* free         */
1070         NIL(Block_t*),                  /* wild         */
1071         NIL(Block_t*),                  /* root         */
1072 };
1073 Vmalloc_t _Vmheap =
1074 {
1075         { bestalloc,
1076           bestresize,
1077           bestfree,
1078           bestaddr,
1079           bestsize,
1080           bestcompact,
1081           bestalign,
1082           VM_MTBEST
1083         },
1084         Vmdcsbrk,                       /* disc         */
1085         &_Vmdata                        /* data         */
1086 };
1087
1088 Vmalloc_t*      Vmregion = &_Vmheap;    /* region for malloc/free/realloc       */