Added
Link Here
|
1 |
--- src/lib/libast/vmalloc/vmbest.c.orig 2016-02-28 17:09:23 UTC |
2 |
+++ src/lib/libast/vmalloc/vmbest.c |
3 |
@@ -64,19 +64,19 @@ void _STUB_vmbest(){} |
4 |
#define PK_ALLOW 128 /* min #packs allowed to be created */ |
5 |
|
6 |
/* Small requests are rounded to 0%SM_RNDx */ |
7 |
-#define SM_RND0 (1*ALIGN) /* round value: 1*ALIGN == 16 */ |
8 |
+#define SM_RND0 (1*MEM_ALIGN) /* round value: 1*MEM_ALIGN == 16 */ |
9 |
#define SM_BIT0 4 /* (1<<SM_BIT0) == SM_RND0 */ |
10 |
#define SM_CNT0 16 /* # caches as rounded by SM_RND0 */ |
11 |
#define SM_IDX0 0 /* starting cache index of this group */ |
12 |
#define SM_MAX0 (SM_CNT0*SM_RND0) |
13 |
|
14 |
-#define SM_RND1 (2*ALIGN) /* round value: 2*ALIGN == 32 */ |
15 |
+#define SM_RND1 (2*MEM_ALIGN) /* round value: 2*MEM_ALIGN == 32 */ |
16 |
#define SM_CNT1 8 |
17 |
#define SM_BIT1 5 |
18 |
#define SM_IDX1 (SM_IDX0+SM_CNT0) |
19 |
#define SM_MAX1 (SM_MAX0 + SM_CNT1*SM_RND1) |
20 |
|
21 |
-#define SM_RND2 (4*ALIGN) /* round value: 4*ALIGN == 64 */ |
22 |
+#define SM_RND2 (4*MEM_ALIGN) /* round value: 4*MEM_ALIGN == 64 */ |
23 |
#define SM_BIT2 6 |
24 |
#define SM_CNT2 8 |
25 |
#define SM_IDX2 (SM_IDX1+SM_CNT1) |
26 |
@@ -167,7 +167,7 @@ static int chktree(Pack_t* pack, Block_t |
27 |
if(_Vmassert & VM_check_reg) |
28 |
{ if(!node) /* the empty tree is always good */ |
29 |
return 0; |
30 |
- /**/DEBUG_ASSERT(BDSZ(node) >= BODYSIZE && (BDSZ(node)%ALIGN) == 0 ); |
31 |
+ /**/DEBUG_ASSERT(BDSZ(node) >= BODYSIZE && (BDSZ(node)%MEM_ALIGN) == 0 ); |
32 |
|
33 |
if(SIZE(node) & (BUSY|PFREE)) /* should be BITS-free */ |
34 |
{ /**/DEBUG_MESSAGE("Free block corrupted"); /**/DEBUG_ASSERT(0); return -1; } |
35 |
@@ -252,7 +252,7 @@ static int bestfree(Vmalloc_t* vm, Void_ |
36 |
if((Vmuchar_t*)data < vm->data->segmin || (Vmuchar_t*)data >= vm->data->segmax) |
37 |
return -1; |
38 |
|
39 |
- blk = BLOCK(data); /**/DEBUG_ASSERT((SIZE(blk)&BUSY) && (BDSZ(blk)%ALIGN) == 0 ); |
40 |
+ blk = BLOCK(data); /**/DEBUG_ASSERT((SIZE(blk)&BUSY) && (BDSZ(blk)%MEM_ALIGN) == 0 ); |
41 |
pack = PACK(blk); /**/DEBUG_ASSERT(pack->best == (Vmbest_t*)vm->data); |
42 |
if((sz = SIZE(blk))&SMALL ) |
43 |
listp = &pack->small[SMDECODE(sz)].free; |
44 |
@@ -339,7 +339,7 @@ static Block_t* bestpackextend(Vmalloc_t |
45 |
|
46 |
/**/DEBUG_ASSERT(!wild || (PACK(wild) == pack && BDSZ(wild) < size && PACKWILD(pack,wild)) ); |
47 |
blkz = BDSZ(pack->pblk); /**/DEBUG_ASSERT(blkz >= _Vmpagesize); |
48 |
- size += blkz - (wild ? BDSZ(wild) : 0) + EXTRA(pack); /**/DEBUG_ASSERT(size%ALIGN == 0); |
49 |
+ size += blkz - (wild ? BDSZ(wild) : 0) + EXTRA(pack); /**/DEBUG_ASSERT(size%MEM_ALIGN == 0); |
50 |
if(_Vmassert & VM_debug) debug_printf(2, "%s:%d: PACK(%p) WILD(%p)=%zd BDSZ(%p)=%zd blkz=%zd size=%zu\n", __FILE__, __LINE__, pack, wild, wild ? BDSZ(wild) : 0, pack->pblk, BDSZ(pack->pblk), blkz, size); |
51 |
if(!(pblk = (*_Vmsegalloc)(vm, pack->pblk, size, segtype)) ) |
52 |
pblk = pack->pblk; |
53 |
@@ -383,7 +383,7 @@ static Block_t* bestpackextract(Pack_t* |
54 |
|
55 |
l = r = &link; |
56 |
if((root = pack->root) ) do /* top-down splay tree search */ |
57 |
- { /**/DEBUG_ASSERT((size%ALIGN) == 0 && !(SIZE(root)&(BUSY|PFREE)) ); |
58 |
+ { /**/DEBUG_ASSERT((size%MEM_ALIGN) == 0 && !(SIZE(root)&(BUSY|PFREE)) ); |
59 |
if(size == (sz = BDSZ(root)) ) |
60 |
break; |
61 |
if(size < sz) |
62 |
@@ -508,7 +508,7 @@ static int bestlistreclaim(Vmalloc_t* vm |
63 |
continue; |
64 |
|
65 |
/**/DEBUG_ASSERT((SIZE(fp)&(BUSY|MARK)) == (BUSY|MARK) ); |
66 |
- /**/DEBUG_ASSERT(BDSZ(fp) >= sizeof(Body_t) && BDSZ(fp)%ALIGN == 0); |
67 |
+ /**/DEBUG_ASSERT(BDSZ(fp) >= sizeof(Body_t) && BDSZ(fp)%MEM_ALIGN == 0); |
68 |
SIZE(fp) &= ~BITS; |
69 |
t = NEXT(fp); |
70 |
SIZE(t) |= PFREE; /**/DEBUG_ASSERT(SIZE(NEXT(fp))&BUSY); |
71 |
@@ -563,7 +563,7 @@ static Block_t* bestpackalloc(Vmalloc_t* |
72 |
ssize_t sz; |
73 |
Block_t *tp, *np, *pblk; |
74 |
Vmbest_t *best = (Vmbest_t*)vm->data; |
75 |
- /**/DEBUG_ASSERT(size >= sizeof(Body_t) && size%ALIGN == 0); |
76 |
+ /**/DEBUG_ASSERT(size >= sizeof(Body_t) && size%MEM_ALIGN == 0); |
77 |
|
78 |
if((tp = pack->alloc) ) /* fast allocation from recent memory */ |
79 |
{ pack->alloc = NIL(Block_t*); |
80 |
@@ -719,9 +719,9 @@ static Void_t* bestalloc(Vmalloc_t* vm, |
81 |
asospindecl(); |
82 |
|
83 |
/**/DEBUG_COUNT(N_alloc); |
84 |
- /**/DEBUG_ASSERT((ALIGN%(BITS+1)) == 0 ); |
85 |
- /**/DEBUG_ASSERT((sizeof(Head_t)%ALIGN) == 0 ); |
86 |
- /**/DEBUG_ASSERT((sizeof(Body_t)%ALIGN) == 0 ); |
87 |
+ /**/DEBUG_ASSERT((MEM_ALIGN%(BITS+1)) == 0 ); |
88 |
+ /**/DEBUG_ASSERT((sizeof(Head_t)%MEM_ALIGN) == 0 ); |
89 |
+ /**/DEBUG_ASSERT((sizeof(Body_t)%MEM_ALIGN) == 0 ); |
90 |
/**/DEBUG_ASSERT(sizeof(Block_t) == (sizeof(Body_t)+sizeof(Head_t)) ); |
91 |
/**/DEBUG_ASSERT(chkregion((Vmbest_t*)vm->data, local) >= 0); |
92 |
|
93 |
@@ -902,7 +902,7 @@ static Void_t* bestalign(Vmalloc_t* vm, |
94 |
return NIL(Void_t*); |
95 |
|
96 |
algz = LGROUND(size); |
97 |
- algn = (*_Vmlcm)(align,ALIGN); |
98 |
+ algn = (*_Vmlcm)(align,MEM_ALIGN); |
99 |
|
100 |
/* non-Vmbest methods may require extra header space */ |
101 |
if(METHOD(best) != VM_MTBEST && vm->meth.eventf) |