OpenMoHAA 0.82.0
Loading...
Searching...
No Matches
gsMemory.c
1
2
5#include "gsPlatform.h"
6#include "gsPlatformUtil.h"
7#include "gsMemory.h"
8#include "gsAssert.h"
9#include "gsDebug.h"
10
11#ifdef _PSP
12 #include <malloc.h>
13#endif
14
15// toDo: move some of this to platform.h
16#ifdef _PS3
17 #if(0)
18 typedef gsi_u64 gsi_uint;
19 #define PTR_ALIGNMENT 32
20 #define GSI_64BIT (1)
21 #define GS_BIG_ENDIAN
22 #else
23 // changed as of SDK 0.8 Sony moved back to using 32 bit pointers
24 typedef gsi_u32 gsi_uint;
25 #define PTR_ALIGNMENT 16
26 #define GSI_64BIT (0)
27 #define GS_BIG_ENDIAN
28 #endif
29#else
30 typedef gsi_u32 gsi_uint;
31 #define PTR_ALIGNMENT 16
32 #define GSI_64BIT (0)
33#endif
34
35
36
37// To Do:
38// Small block optimization using fixed size mempool.
39// add multi-threaded support
40
41#define MEM_PROFILE (1) // if on additional memprofiling code will be enabled for such things as high water mark calcs
42#if defined(MEM_PROFILE)
43 #define IF_MEM_PROFILE_ISON(a) a
44#else
45 #define IF_MEM_PROFILE_ISON(a)
46#endif
47
48// Disable compiler warnings for issues that are unavoidable.
50#if defined(_MSC_VER) // DevStudio
51 // Level4, "conditional expression is constant".
52 // Occurs with use of the MS provided macro FD_SET
53 #pragma warning ( disable: 4127 )
54#include <malloc.h>
55#endif // _MSC_VER
56
57#ifdef _WIN32
58 #define MEM_MANAGER_CALL _cdecl
59#else
60 #define MEM_MANAGER_CALL
61#endif
62
63//#if !defined(_WIN32)
64// #define MEM_MANAGER_DIRECT
65//#endif
66
67typedef struct
68{
69 void* (MEM_MANAGER_CALL *malloc )(size_t size);
70 void (MEM_MANAGER_CALL *free )(void* ptr);
71 void* (MEM_MANAGER_CALL *realloc )(void* ptr, size_t size);
72 void* (MEM_MANAGER_CALL *memalign)(size_t boundary, size_t size);
74
75static void* MEM_MANAGER_CALL _gsi_malloc(size_t size)
76{
77 return malloc(size);
78}
79
80static void MEM_MANAGER_CALL _gsi_free(void* ptr)
81{
82 free(ptr);
83}
84
85static void* MEM_MANAGER_CALL _gsi_realloc(void* ptr, size_t size)
86{
87 return realloc(ptr, size);
88}
89
90#if defined(_PS2) || defined(_PSP) || defined(_PS3)
91 static void* _gsi_memalign(size_t boundary, size_t size)
92 {
93 return memalign(boundary, size);
94 }
95#elif defined (_WIN32)
96 #if (_MSC_VER < 1300)
97 //extern added for vc6 compatability.
98 extern void* __cdecl _aligned_malloc(size_t size, int boundary);
99 #endif
100 static void* __cdecl _gsi_memalign(size_t boundary, size_t size)
101 {
102 return _aligned_malloc(size, (int)boundary);
103 }
104#else
105 // no built in system memalign
106 static void* _gsi_memalign(size_t boundary, size_t size)
107 {
108 void *ptr = calloc((size)/(boundary), (boundary));
109 // check alignment
110 GS_ASSERT((((gsi_u32)ptr)% boundary)==0);
111 return ptr;
112 }
113#endif
114
115static MemManagerCallbacks memmanagercallbacks =
116{
117#ifdef MEM_MANAGER_DIRECT
118 &malloc,
119 &free,
120 &realloc,
121 #if defined(_PS2) || defined(_PSP) || defined(_PS3)
122 &memalign, // a version already exists on this platform
123 #else
124 &_gsi_memalign, //wrote our own
125 #endif
126#else
127 &_gsi_malloc,
128 &_gsi_free,
129 &_gsi_realloc,
130 &_gsi_memalign
131#endif
132};
133
134
135void gsiMemoryCallbacksSet(gsMallocCB p_malloc, gsFreeCB p_free, gsReallocCB p_realloc, gsMemalignCB p_memalign)
136{
137
138 memmanagercallbacks.malloc = p_malloc;
139 memmanagercallbacks.free = p_free;
140 memmanagercallbacks.realloc = p_realloc;
141 memmanagercallbacks.memalign = p_memalign;
142}
143
144
145
146
147
148
149// These functions shunt to virtual function pointer
150void* gsimalloc (size_t size)
151{
152 return (*memmanagercallbacks.malloc)(size);
153}
154void* gsirealloc (void* ptr, size_t size)
155{
156 return (*memmanagercallbacks.realloc)(ptr,size);
157}
158void gsifree (void* ptr)
159{
160 if(ptr == NULL)
161 return;
162 (*memmanagercallbacks.free)(ptr);
163}
164void* gsimemalign (size_t boundary, size_t size)
165{
166 return (*memmanagercallbacks.memalign)(boundary,size);
167}
168
169
170
171#ifdef GSI_MEM_MANAGED
172
173
174
175
176/***************************************************************************/
177/*
178
179 Random Access Memory Pool
180
181*/
182/***************************************************************************/
183
184
185// Context Stack
186#define MEM_CONTEXT_STACK_MAX 10 // max stack depth
187static gsMemMgrContext MemTypeStack [MEM_CONTEXT_STACK_MAX] = {gsMemMgrContext_Default};
188static gsi_u32 MemTypeStackIndex = 0;
189extern gsMemMgrContext gsMemMgrContextCurrent;
190
191// Memtype Tag stack
192#define MEM_TAG_STACK_MAX 10 // max stack depth
193static gsi_u8 MemTagStack [MEM_TAG_STACK_MAX] = {0};
194static gsi_u32 MemTagStackIndex = 0;
195
196
197// ToDo:
198// - Add 64 bit pointer support
199
200
201
202// Default pointer alignment. Must be 16, 32, 64, 128, or 256 bytes.
203// i.e. malloc (x) = memalign(default alignment,x);
204
205
206
207#define MEM_IS_POWER_OF_2(x) (((x) & ((x)-1)) == 0)
208#define MEMALIGN_POWEROF2(x,a) (((gsi_uint)(x)+(a-1)) &~ ( ((gsi_uint)(a)) -1))
209
210#if(1) // enable assert, otherwise this runs faster
211 #define MP_ASSERT(x) GS_ASSERT(x)
212#else
213 #define MP_ASSERT(x)
214#endif
215
216
217#define MEM_TYPES_MAX 127
218
219
220typedef struct
221{
222 gsi_u32 MemTotal;
223 gsi_u32 MemAvail;
224 gsi_u32 MemUsed;
225 gsi_u32 MemUsed_At_HighWater;
226 gsi_u32 MemWasted; // overhead memory + memory lost due to fragmentation.
227
228 gsi_u32 ChunksCount; // number of ChunkHeaders in linked list.
229 gsi_u32 ChunksFreeCount; // number of free ChunkHeaders in linked list.
230 gsi_u32 ChunksFreeLargestAvail;
231 // these are the same as handles
232 gsi_u32 ChunksUsedCount; // number of ChunkHeaders which are in use.
233 gsi_u32 ChunksUsedCount_At_HighWater; // the most handles used at any one time
234
235 // memtype specifics
236 gsi_u32 MemType_ChunksCount [MEM_TYPES_MAX];
237 gsi_u32 MemType_MemUsed [MEM_TYPES_MAX];
238 gsi_u32 MemType_MemUsed_At_HighWater [MEM_TYPES_MAX];
239
240
241} MEM_STATS;
242
243void MEM_STATSAddAll (MEM_STATS *_this, const MEM_STATS *ms);
244void MEM_STATSClear (MEM_STATS *_this);
245// except HW
246void MEM_STATSClearAll (MEM_STATS *_this);
247
248
249// RA_MEM_CHUNK
250typedef struct tMEM_CHUNK
251{
252
253 // private
254 union
255 {
256 gsi_uint MemUsed; // size used by application. ex// malloc(size)
257 #ifdef GS_BIG_ENDIAN
258 struct
259 {
260 #if (GSI_64BIT)
261 char pad[7],MemType;
262 #else
263 char pad[3],MemType;
264 #endif
265 }MEM_TypeStruct;
266 #else
267 struct
268 {
269 #if (GSI_64BIT)
270 char MemType,pad[7];
271 #else
272 char MemType,pad[3];
273 #endif
274 } MEM_TypeStruct;
275 #endif
276 } MEM_UsageStat;
277
278 // public:
279 // double linked list of all chunks
280 struct tMEM_CHUNK *prev;
281 struct tMEM_CHUNK *next; // next chunk
282 // single linked list of free chunks
283 struct tMEM_CHUNK *NextFree; // next free chunk
284} MEM_CHUNK;
285
286
287
288/***************************************/
289// flag as in use, set size, memtype
290void MEM_CHUNKAlloc (MEM_CHUNK *_this, gsi_u8 _MemType, size_t _UsedSize)
291{
292 _UsedSize = MEMALIGN_POWEROF2(_UsedSize,4); //The lower 2 bits are zero, so we don't store them.
293 GS_ASSERT_STR(_UsedSize < 0x3FFFFFC, "Alloc Memory size is too big.");
294 _this->MEM_UsageStat.MemUsed = _UsedSize<<6;
295 _this->MEM_UsageStat.MEM_TypeStruct.MemType = _MemType;
296}
297void MEM_CHUNKFree (MEM_CHUNK *_this)
298{
299 _this->MEM_UsageStat.MemUsed = 0;
300}
301
302/***************************************/
303// returns true if not in use
304gsi_bool MEM_CHUNKIsFree (MEM_CHUNK *_this)
305{
306 return (_this->MEM_UsageStat.MemUsed == 0);
307}
308
309/***************************************/
310gsi_u32 MEM_CHUNKTotalSizeGet(MEM_CHUNK *_this)
311// Total size chunk is using up, including header.
312{
313 if (!_this->next)
314 {
315 return PTR_ALIGNMENT + sizeof(MEM_CHUNK)/*Nub*/;
316 }
317 return (gsi_uint) _this->next - (gsi_uint) _this;
318}
319
320/***************************************/
321gsi_u32 MEM_CHUNKChunkSizeGet(MEM_CHUNK *_this)
322// size of chunk, without header. "Available memory"
323{
324 if (!_this->next)
325 return PTR_ALIGNMENT;/*Nub*/;
326 return (gsi_uint) _this->next - (gsi_uint) _this - sizeof(MEM_CHUNK);
327}
328
329gsi_u32 MEM_CHUNKMemUsedGet (MEM_CHUNK *_this)
330{
331 return (_this->MEM_UsageStat.MemUsed & ~0xFF)>>6;
332}
333
334void MEM_CHUNKMemUsedSet (MEM_CHUNK *_this, gsi_u32 size)
335{
336 _this->MEM_UsageStat.MemUsed = (MEMALIGN_POWEROF2(size,4)<<6) + _this->MEM_UsageStat.MEM_TypeStruct.MemType;
337}
338
339gsi_u32 MEM_CHUNKMemAvailGet(MEM_CHUNK *_this)
340{
341 return MEM_CHUNKChunkSizeGet(_this) - MEM_CHUNKMemUsedGet(_this);
342}
343
344char MEM_CHUNKMemTypeGet (MEM_CHUNK *_this)
345{
346 return _this->MEM_UsageStat.MEM_TypeStruct.MemType;
347}
348
349void MEM_CHUNKMemTypeSet (MEM_CHUNK *_this, char _MemType)
350{
351 GS_ASSERT(_MemType < MEM_TYPES_MAX);
352 _this->MEM_UsageStat.MEM_TypeStruct.MemType = _MemType;
353}
354
355void* MEM_CHUNKMemPtrGet (MEM_CHUNK *_this)
356{
357 return (void*)((gsi_uint) _this + sizeof(MEM_CHUNK));
358}
359
360/*inline */MEM_CHUNK *Ptr_To_MEM_CHUNK(void *ptr)
361{
362 return ((MEM_CHUNK *)ptr)-1;
363}
364
365/***************************************/
366/***************************************/
367typedef struct MEM_CHUNK_POOL
368{
369 // public:
370 char Name[20]; // name of this pool. Used for debug purposes
371 // private:
372 MEM_CHUNK *HeaderStart;
373 MEM_CHUNK *HeaderEnd;
374 MEM_CHUNK *pFirstFree;
375 gsi_u32 HeapSize;
376 #if MEM_PROFILE
377 gsi_u32 HWMemUsed;
378 gsi_u32 MemUsed;
379 #endif
380} MEM_CHUNK_POOL;
381
382// private
383MEM_CHUNK *MEM_CHUNK_POOLFindPreviousFreeChunk (MEM_CHUNK_POOL *_this, MEM_CHUNK *header);
384MEM_CHUNK *MEM_CHUNK_POOLFindNextFreeChunk (MEM_CHUNK_POOL *_this, MEM_CHUNK *header);
385void MEM_CHUNK_POOLSplitChunk (MEM_CHUNK_POOL *_this, MEM_CHUNK *header,gsi_bool ReAlloc);
386void MEM_CHUNK_POOLFreeChunk (MEM_CHUNK_POOL *_this, MEM_CHUNK *header);
387MEM_CHUNK *MEM_CHUNK_POOLAllocChunk (MEM_CHUNK_POOL *_this, size_t Size,int Alignment , gsi_bool Backwards );//int Alignment = PTR_ALIGNMENT, gsi_bool Backwards = gsi_false);
388
389// move a chunk within the limits of prev + prev_size and next - this_size
390void MEM_CHUNK_POOLChunkMove (MEM_CHUNK_POOL *_this, MEM_CHUNK *oldpos, MEM_CHUNK *newpos);
391
392// public
393/***************************************/
394void MEM_CHUNK_POOLCreate (MEM_CHUNK_POOL *_this, const char *szName, char *ptr, gsi_u32 _size);
395void MEM_CHUNK_POOLDestroy (MEM_CHUNK_POOL *_this) ;
396gsi_bool MEM_CHUNK_POOLIsValid (MEM_CHUNK_POOL *_this)
397{
398 return _this->HeapSize > 0;
399}
400
401
402/***************************************/
403void *MEM_CHUNK_POOLmalloc (MEM_CHUNK_POOL *_this, size_t Size, gsi_i32 Alignment );//= PTR_ALIGNMENT);
404// allocated backwards from top of heap
405void *MEM_CHUNK_POOLmalloc_backwards (MEM_CHUNK_POOL *_this, size_t Size, gsi_i32 Alignment );//= PTR_ALIGNMENT);
406void *MEM_CHUNK_POOLrealloc (MEM_CHUNK_POOL *_this, void *oldmem, size_t newSize);
407void MEM_CHUNK_POOLfree (MEM_CHUNK_POOL *_this, void *mem);
408
409/***************************************/
410void MEM_CHUNK_POOLCheckValidity (MEM_CHUNK_POOL *_this );
411void MEM_CHUNK_POOLMemStatsGet (MEM_CHUNK_POOL *_this, MEM_STATS *stats);
412gsi_u32 MEM_CHUNK_POOLWalkForType (MEM_CHUNK_POOL *_this, int _MemType, gsi_bool _LogUse);
413
414// returns true if this is a valid heap ptr
415gsi_bool MEM_CHUNK_POOLIsHeapPtr (MEM_CHUNK_POOL *_this, void * mem);
416
417/***************************************/
418// add to table, filling in memtype .
419void MEM_CHUNK_POOLFillMemoryTable (MEM_CHUNK_POOL *_this, char *Table, const int TableSize, gsi_u32 _HeapStart, gsi_u32 _HeapSize);
420
421/***************************************/
422// returns true if mem handle is in range of heap
423gsi_bool MEM_CHUNK_POOLItemIsInPoolMemory (MEM_CHUNK_POOL *_this, void *ptr)
424{
425 GS_ASSERT(MEM_CHUNK_POOLIsValid(_this));
426 return (((gsi_uint)ptr >= (gsi_uint)MEM_CHUNKMemPtrGet(_this->HeaderStart)) &&((gsi_uint)ptr <= (gsi_uint)MEM_CHUNKMemPtrGet(_this->HeaderEnd)));
427}
428
429
430
431
432
433
434
435
436
437
438void MEM_STATSAddAll(MEM_STATS *_this, const MEM_STATS *ms)
439{
440 int i;
441 _this->MemTotal += ms->MemTotal ;
442 _this->MemAvail += ms->MemAvail ;
443 _this->MemUsed += ms->MemUsed ;
444 _this->MemUsed_At_HighWater += ms->MemUsed_At_HighWater ;
445 _this->MemWasted += ms->MemWasted ;
446 _this->ChunksCount += ms->ChunksCount ;
447 _this->ChunksFreeCount += ms->ChunksFreeCount ;
448 _this->ChunksFreeLargestAvail += ms->ChunksFreeLargestAvail ;
449 _this->ChunksUsedCount += ms->ChunksUsedCount ;
450 _this->ChunksUsedCount_At_HighWater += ms->ChunksUsedCount_At_HighWater;
451 for (i =0; i<MEM_TYPES_MAX;i++)
452 {
453 _this->MemType_ChunksCount[i] +=ms->MemType_ChunksCount[i];
454 _this->MemType_MemUsed[i] +=ms->MemType_MemUsed[i] ;
455 }
456
457}
458
459void MEM_STATSClear(MEM_STATS *_this )
460// except HW
461{
462 _this->MemTotal = 0;
463 _this->MemAvail = 0;
464 _this->MemUsed = 0;
465 _this->MemWasted = 0;
466 _this->ChunksCount = 0;
467 _this->ChunksFreeCount = 0;
468 _this->ChunksFreeLargestAvail = 0;
469 _this->ChunksUsedCount = 0;
470
471 memset(_this->MemType_ChunksCount, 0,4 * MEM_TYPES_MAX);
472 memset(_this->MemType_MemUsed, 0,4 * MEM_TYPES_MAX);
473
474}
475
476void MEM_STATSClearAll(MEM_STATS *_this )
477{
478 int i;
479 MEM_STATSClear(_this);
480 _this->MemUsed_At_HighWater = 0;
481 for (i=0;i< MEM_TYPES_MAX;i++ )
482 _this->MemType_MemUsed_At_HighWater[i] = 0;
483 _this->ChunksUsedCount_At_HighWater = 0;
484}
485
486
487
488//--------------------------------------------------------------------------
489void MEM_CHUNK_POOLChunkMove (MEM_CHUNK_POOL *_this, MEM_CHUNK *oldpos, MEM_CHUNK *newpos)
490//--------------------------------------------------------------------------
491{
492 MEM_CHUNK *firstfree;
493 //todo!!!
494 MEM_CHUNK temp = *oldpos;
495
496 // can not be end/start chunk
497 MP_ASSERT(oldpos->prev)
498 MP_ASSERT(oldpos->next)
499
500 // check if within movement limits
501 MP_ASSERT((gsi_uint) newpos <= (gsi_uint)oldpos->next - MEM_CHUNKMemUsedGet(oldpos) - sizeof(MEM_CHUNK))
502 MP_ASSERT((gsi_uint) newpos >= (gsi_uint)oldpos->prev + MEM_CHUNKMemUsedGet(oldpos->prev) + sizeof(MEM_CHUNK))
503
504 // check if alignment is valid
505 MP_ASSERT((((gsi_uint) newpos) % sizeof(MEM_CHUNK)) == 0)
506
507 *newpos = temp;
508
509 // link into chunk list
510 newpos->prev->next = newpos;
511 newpos->next->prev = newpos;
512
513 // Fix links in free chunk list
514 if (MEM_CHUNKIsFree(newpos))
515 {
516
517 if (_this->pFirstFree == oldpos)
518 _this->pFirstFree = newpos;
519 else
520 {
521 firstfree = MEM_CHUNK_POOLFindPreviousFreeChunk(_this,newpos->prev);
522 if (firstfree != newpos)
523 firstfree->NextFree = newpos;
524 else
525 {
526 // first in list.
527 _this->pFirstFree = newpos;
528 }
529
530 MP_ASSERT((newpos->NextFree==NULL) || ((gsi_uint)newpos->NextFree > (gsi_uint)newpos))
531 }
532 }
533
534
535}
536
537void MEM_CHUNK_POOLDestroy(MEM_CHUNK_POOL *_this)
538{
539 memset(_this, 0, sizeof (MEM_CHUNK_POOL));
540}
541//--------------------------------------------------------------------------
542void MEM_CHUNK_POOLCreate(MEM_CHUNK_POOL *_this, const char * szNameIn, char *ptr, gsi_u32 size)
543//--------------------------------------------------------------------------
544{
545 int len;
546 MEM_CHUNK *HeaderMid;
547 MP_ASSERT(((gsi_uint)ptr & 15 )==0) // ensure 16 byte alignment
548
549 //Copy limited length name
550 len = strlen(szNameIn)+1;
551 if (len > 20) len = 20;
552 memcpy(_this->Name,szNameIn, len);
553 _this->Name[19]='\0'; // in case str is too long.
554
555 // create two nubs, at start, and end, with a chunk in between
556 MP_ASSERT(size > 48 + 3 * sizeof(MEM_CHUNK))
557
558 _this->HeaderStart = (MEM_CHUNK *) (ptr);
559 HeaderMid = (MEM_CHUNK *) (ptr + 2 * sizeof(MEM_CHUNK));
560 _this->HeaderEnd = (MEM_CHUNK *) (ptr + size - 2 * sizeof(MEM_CHUNK));
561
562 // Bogus nub which is never freed.
563 _this->HeaderStart->prev = NULL;
564 _this->HeaderStart->next = HeaderMid;
565 _this->HeaderStart->NextFree = HeaderMid;
566 MEM_CHUNKAlloc (_this->HeaderStart,0,sizeof(MEM_CHUNK)); // don't mark as free
567
568 // Here is our real heap, after before and after overhead
569 HeaderMid->prev = _this->HeaderStart;
570 HeaderMid->next = _this->HeaderEnd;
571 HeaderMid->NextFree = 0;
572 MEM_CHUNKFree(HeaderMid);
573
574 // Bogus nub which is never freed.
575 _this->HeaderEnd->prev = HeaderMid;
576 _this->HeaderEnd->next = NULL;
577 _this->HeaderEnd->NextFree = NULL;
578 MEM_CHUNKAlloc (_this->HeaderEnd,0,sizeof(MEM_CHUNK)); // don't mark as free
579
580 _this->HeapSize = size;
581 _this->pFirstFree = HeaderMid;
582
583}
584
585
586//--------------------------------------------------------------------------
587MEM_CHUNK *MEM_CHUNK_POOLFindPreviousFreeChunk(MEM_CHUNK_POOL *_this, MEM_CHUNK *header)
588// find previous free chunk
589// return NULL if start header is not free, and there is nothing free before it.
590// return header if start header is first free chunk
591{
592 while ((header) && (!MEM_CHUNKIsFree(header)))
593 {
594 //GS_ASSERT(header->prev == NULL || (header->prev >= _this->HeaderStart && header->prev <= _this->HeaderEnd));
595 header = header->prev;
596 }
597
598 GSI_UNUSED(_this);
599 return header;
600}
601
602//--------------------------------------------------------------------------
603MEM_CHUNK *MEM_CHUNK_POOLFindNextFreeChunk(MEM_CHUNK_POOL *_this, MEM_CHUNK *header_in)
604// find previous free chunk
605// return NULL if no next free chunk.
606{
607 MEM_CHUNK *header = header_in;
608 while ((header) && (!MEM_CHUNKIsFree(header)))
609 {
610 header = header->next;
611 }
612 if (header == header_in)
613 return NULL;
614
615 GSI_UNUSED(_this);
616 return header;
617}
618
619
620
621
622//--------------------------------------------------------------------------
623void MEM_CHUNK_POOLSplitChunk(MEM_CHUNK_POOL *_this, MEM_CHUNK *header, gsi_bool ReAlloc)
624// split a used chunk into two if the UsedSize is smaller then the ChunkSize
625//--------------------------------------------------------------------------
626{
627 MEM_CHUNK *next;
628 MEM_CHUNK *PrevFree;
629 MEM_CHUNK *NewHeader;
630
631 // calc new position at end of used mem
632 NewHeader = (MEM_CHUNK *) ((gsi_u8*)header + MEM_CHUNKMemUsedGet(header) + sizeof(MEM_CHUNK));
633 NewHeader = (MEM_CHUNK *)MEMALIGN_POWEROF2(NewHeader,sizeof(MEM_CHUNK));
634
635 //assert we have enough room for this new chunk
636 MP_ASSERT ((gsi_uint)NewHeader + 2 * sizeof(MEM_CHUNK) <= (gsi_uint)header->next)
637
638 // update some stats
639 #if (MEM_PROFILE)
640 if(ReAlloc)
641 {
642 //09-OCT-07 BED: Since we're splitting the chunk, it seems more accurate
643 // to use the full size of the chunk, not just the used portion
644 _this->MemUsed -= MEM_CHUNKChunkSizeGet(header);
645 //_this->MemUsed -= MEM_CHUNKMemUsedGet(header);
646 GS_ASSERT(_this->MemUsed >= 0);
647 }
648 #endif
649
650 // Can this new chunk fit in the current one?
651 // create a new chunk header, at the end of used space, plus enough to align us to 16 bytes
652
653 // Splice into linked list
654 NewHeader->prev = header;
655 NewHeader->next = header->next;
656 MEM_CHUNKFree(NewHeader);
657
658 if (NewHeader->next)
659 {
660 NewHeader->next->prev = NewHeader;
661 }
662
663 header->next = NewHeader;
664
665 // Splice into free chunks linked list
666
667 // this need to merge can happen on a realloc before a free chunk
668 if (MEM_CHUNKIsFree(NewHeader->next))
669 {
670 MP_ASSERT(ReAlloc)
671
672 // merge and splice
673 next = NewHeader->next->next;
674 next->prev = NewHeader;
675
676 NewHeader->NextFree = NewHeader->next->NextFree;
677 NewHeader->next = next;
678 }
679 else
680 {
681 if (ReAlloc)
682 {
683 // on a realloc, this next value is useless
684 NewHeader->NextFree = MEM_CHUNK_POOLFindNextFreeChunk(_this,NewHeader->next);
685 }
686 else
687 NewHeader->NextFree = header->NextFree;
688 }
689
690 if (_this->pFirstFree == header)
691 {
692 // this is first free chunk
693 _this->pFirstFree = NewHeader;
694 }
695 else
696 {
697 // link previous free chunk to this one.
698 PrevFree = MEM_CHUNK_POOLFindPreviousFreeChunk(_this,header);
699 if (PrevFree)
700 PrevFree->NextFree = NewHeader;
701 else
702 // this is first free chunk
703 _this->pFirstFree = NewHeader;
704 }
705
706 #if (MEM_PROFILE)
707 if(ReAlloc)
708 {
709 _this->MemUsed += MEM_CHUNKMemUsedGet(header);
710 // update highwater mark
711 if(_this->MemUsed > _this->HWMemUsed)
712 _this->HWMemUsed = _this->MemUsed;
713
714 GS_ASSERT(_this->MemUsed <= _this->HeapSize);
715 }
716 #endif
717
718#ifdef _DEBUG_
719 header->NextFree = NULL;
720#endif
721
722}
723
724
725//--------------------------------------------------------------------------
726gsi_bool MEM_CHUNK_POOLIsHeapPtr(MEM_CHUNK_POOL *_this, void * mem)
727// returns true if this is a valid heap ptr
728{
729 MEM_CHUNK *headertofind = Ptr_To_MEM_CHUNK(mem);
730 MEM_CHUNK *header = _this->HeaderStart;
731
732 while (header)
733 {
734 header = header->next;
735 if (headertofind == header)
736 return gsi_true;
737 }
738
739 return gsi_false;
740
741}
742
743
744
745
746
747
748
749//--------------------------------------------------------------------------
750MEM_CHUNK *MEM_CHUNK_POOLAllocChunk(MEM_CHUNK_POOL *_this,size_t Size, gsi_i32 Alignment, gsi_bool Backwards)
751// size = requested size from app.
752
753// Find first chunk that will fit,
754// allocate from it, splitting it
755// merge split with next free chunk, if next chunk is free
756//--------------------------------------------------------------------------
757{
758 gsi_u32 Ptr ;
759 gsi_u32 AlignedPtr ;
760 int delta ;
761 MEM_CHUNK *PrevFree ;
762 int total_size ;
763 int MemRemain ;
764 MEM_CHUNK *alignedheader;
765
766
767 MEM_CHUNK *header;
768 gsi_u32 SizeNeeded = Size + sizeof(MEM_CHUNK);
769 SizeNeeded = MEMALIGN_POWEROF2(SizeNeeded,sizeof(MEM_CHUNK)); // must be aligned to this at least!!!
770
771 MP_ASSERT(Size)
772 MP_ASSERT(MEM_IS_POWER_OF_2(Alignment)) // must be power of two!!!
773 MP_ASSERT(Alignment >= PTR_ALIGNMENT)
774
775
776// Backwards = gsi_false;
777
778 if(Backwards)
779 header = MEM_CHUNK_POOLFindPreviousFreeChunk(_this,_this->HeaderEnd);
780 else
781 header = _this->pFirstFree;
782
783
784 // should all be free chunks linked from here in.
785 while (header)
786 {
787 // is this chunk available
788 MP_ASSERT (MEM_CHUNKIsFree(header))
789
790 // Calc memory left in this chunk after we alloc
791 total_size = MEM_CHUNKTotalSizeGet(header);
792 MemRemain = total_size - SizeNeeded;
793
794 // can we fit?
795 if (MemRemain >= 0 )
796 {
797 // are we aligned properly?
798 Ptr = (gsi_uint)MEM_CHUNKMemPtrGet(header);
799 AlignedPtr = MEMALIGN_POWEROF2(Ptr,Alignment);
800 delta = AlignedPtr - Ptr;
801 if (delta)
802 {
803 // we need to move free chunk over by ptr.
804 if (MemRemain < delta)
805 {
806 // not enough space in this chunk
807 header = header->NextFree;
808 continue;
809 }
810
811 // move the chunk over so that the pointer is aligned.
812 alignedheader = Ptr_To_MEM_CHUNK((void*)(gsi_uint)AlignedPtr);
813 MEM_CHUNK_POOLChunkMove (_this,header,alignedheader);
814 header = alignedheader;
815 MemRemain -= delta;
816
817 }
818
819
820 // at this point we've taken this chunk, and need to split off the unused part
821 // in theory, there should be no other free chunk ahead of us.
822
823 MEM_CHUNKAlloc(header,MemTagStack[MemTagStackIndex],Size);
824
825 // split as needed
826 if (MemRemain > sizeof(MEM_CHUNK)*2)
827 {
828
829 // split chunk, this will handle free chunk pointer list
830 MEM_CHUNK_POOLSplitChunk(_this,header, gsi_false);
831 }
832 else
833 {
834 // remove from free list
835 if (_this->pFirstFree == header)
836 {
837 // this is first free chunk
838 _this->pFirstFree = header->NextFree;
839
840 }
841 else
842 {
843 // link previous free chunk to this one.
844 PrevFree = MEM_CHUNK_POOLFindPreviousFreeChunk(_this,header);
845 if (PrevFree)
846 PrevFree->NextFree = header->NextFree;
847 else
848 _this->pFirstFree = header->NextFree;
849
850 }
851 }
852 {
853 #if (MEM_PROFILE)
854 _this->MemUsed += MEM_CHUNKMemUsedGet(header);
855 // update highwater mark
856 if(_this->MemUsed > _this->HWMemUsed)
857 _this->HWMemUsed = _this->MemUsed;
858
859 GS_ASSERT(_this->MemUsed <= _this->HeapSize);
860 #endif
861 }
862 return header;
863
864 }
865 if (Backwards)
866 header = MEM_CHUNK_POOLFindPreviousFreeChunk(_this,header);
867 else
868 header = header->NextFree;
869 }
870 // not crashing here.
871 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Misc, GSIDebugLevel_Notice," Could not allocate %i bytes\n", Size);
872 GS_ASSERT_STR(0,"Out of memory");//(_this->Name);
873
874
875 return NULL;
876
877}
878
879
880
881//--------------------------------------------------------------------------
882void MEM_CHUNK_POOLFreeChunk(MEM_CHUNK_POOL *_this,MEM_CHUNK *header)
883// set chunk as free
884// merge if possible with prev and next
885// adding chunk to free chunks list.
886//--------------------------------------------------------------------------
887{
888
889 MEM_CHUNK *prev = header;
890 MEM_CHUNK *next = header;
891 MEM_CHUNK *PrevFree;
892
893 #if (MEM_PROFILE)
894 _this->MemUsed -= MEM_CHUNKMemUsedGet(header);
895 GS_ASSERT(_this->MemUsed >= 0);
896 #endif
897
898 while (next->next && (MEM_CHUNKIsFree(next->next)))
899 {
900 next = next->next;
901 }
902
903 while (prev->prev && (MEM_CHUNKIsFree(prev->prev)))
904 {
905 prev = prev->prev;
906 }
907
908 if (prev != next)
909 {
910 // merge
911 // prev becomes the new chunk.
912 prev->next = next->next;
913
914 if (next->next)
915 next->next->prev = prev;
916
917 }
918
919 // since this is now a free chunk, we must add it to the free chunk list
920
921 // find previous free
922 PrevFree = MEM_CHUNK_POOLFindPreviousFreeChunk(_this,prev);
923 if (PrevFree == NULL)
924 {
925 // this is first free chunk
926 _this->pFirstFree = prev;
927
928 }
929 else
930 {
931 // link previous free chunk to this one.
932 PrevFree->NextFree = prev;
933 }
934
935 // find and set next free chunk
936 if(next->next)
937 prev->NextFree = MEM_CHUNK_POOLFindNextFreeChunk(_this,next->next);
938 else
939 prev->NextFree = NULL;
940
941 MEM_CHUNKFree(prev);
942
943
944#if(0)
945 //ToDo: steal unused memory from previous used chunk
946 gsi_u32 destptr = (gsi_u32)prev->prev + prev->prev->MemAvailGet() + sizeof(MEM_CHUNK);
947 destptr = MEMALIGN_POWEROF2(destptr,sizeof(MEM_CHUNK));
948
949 // we can move back to this ptr. Is it worth it?
950 if (destptr < (gsi_u32)prev )
951 ChunkMove(prev,(MEM_CHUNK *)destptr);
952#endif
953}
954
955
956
957
958//--------------------------------------------------------------------------
959void *MEM_CHUNK_POOLmalloc(MEM_CHUNK_POOL *_this,size_t Size, gsi_i32 Alignment)
960//--------------------------------------------------------------------------
961{
962 void *mem;
963
964 // return ptr to the first block big enough
965 MEM_CHUNK *header = MEM_CHUNK_POOLAllocChunk( _this,Size, Alignment, gsi_false);
966
967 if (header)
968 {
969 // alloc new chunk
970 mem = MEM_CHUNKMemPtrGet(header);
971 return mem;
972 }
973
974 return NULL;
975}
976
977
978//--------------------------------------------------------------------------
979void *MEM_CHUNK_POOLmalloc_backwards(MEM_CHUNK_POOL *_this,size_t Size, gsi_i32 Alignment)
980//--------------------------------------------------------------------------
981{
982 void *mem;
983
984 // return ptr to the first block big enough
985 MEM_CHUNK *header = MEM_CHUNK_POOLAllocChunk( _this,Size, Alignment, gsi_true);
986
987 if (header)
988 {
989 // alloc new chunk
990 mem = MEM_CHUNKMemPtrGet(header);
991 return mem;
992 }
993
994 return NULL;
995}
996
997
998//--------------------------------------------------------------------------
999void MEM_CHUNK_POOLfree(MEM_CHUNK_POOL *_this,void *mem)
1000// return 0 if memory freed in this call
1001// else return mem value passed in
1002//--------------------------------------------------------------------------
1003{
1004 MEM_CHUNK *header = Ptr_To_MEM_CHUNK(mem);
1005 MEM_CHUNK_POOLFreeChunk(_this,header);
1006}
1007
1008
1009//--------------------------------------------------------------------------
1010void *MEM_CHUNK_POOLrealloc(MEM_CHUNK_POOL *_this,void *oldmem, size_t newSize)
1011//--------------------------------------------------------------------------
1012{
1013 MEM_CHUNK *oldheader;
1014 MEM_CHUNK *NewHeader;
1015 gsi_u32 OldSize;
1016 char MemType;
1017
1018 MP_ASSERT(newSize)
1019
1020 if (!oldmem)
1021 {
1022 return MEM_CHUNK_POOLmalloc( _this, newSize,PTR_ALIGNMENT);
1023 }
1024
1025
1026 oldheader = Ptr_To_MEM_CHUNK(oldmem);
1027 OldSize = MEM_CHUNKMemUsedGet(oldheader);
1028
1029 if (newSize == OldSize)
1030 return oldmem;
1031
1032 if (newSize < OldSize )
1033 {
1034
1035 if ((newSize + 2 * sizeof(MEM_CHUNK))> OldSize )
1036 {
1037 // not enough room to create another chunk, can't shrink
1038 return oldmem;
1039 }
1040
1041 // shrink it
1042 MEM_CHUNKMemUsedSet(oldheader,newSize);
1043 MEM_CHUNK_POOLSplitChunk(_this,oldheader, gsi_true);
1044 return MEM_CHUNKMemPtrGet(oldheader);
1045 }
1046 else
1047 {
1048 // get a new chunk
1049 MemType = MEM_CHUNKMemTypeGet(oldheader);
1050 MEM_CHUNK_POOLFreeChunk(_this,oldheader);
1051 NewHeader = MEM_CHUNK_POOLAllocChunk( _this,newSize,PTR_ALIGNMENT,gsi_false);
1052 MEM_CHUNKMemTypeSet(NewHeader,MemType);
1053
1054 memmove(MEM_CHUNKMemPtrGet(NewHeader),oldmem,OldSize);
1055
1056 return MEM_CHUNKMemPtrGet(NewHeader);
1057 }
1058
1059}
1060
1061//--------------------------------------------------------------------------
1062void MEM_CHUNK_POOLMEM_CHUNK_POOL(MEM_CHUNK_POOL *_this)
1063//--------------------------------------------------------------------------
1064{
1065 _this->Name[0] = 0;
1066 _this->HeaderEnd = NULL;
1067 _this->HeaderStart = NULL;
1068 _this->HeapSize = 0;
1069 _this->pFirstFree = NULL;
1070}
1071
1072
1073
1074
1075
1076//--------------------------------------------------------------------------
1077gsi_u32 MEM_CHUNK_POOLWalkForType(MEM_CHUNK_POOL *_this,int type, gsi_bool _LogUse)
1078//--------------------------------------------------------------------------
1079{
1080 MEM_CHUNK *header;
1081 gsi_u32 Total = 0;
1082 header = _this->HeaderStart;
1083
1084 while (header)
1085 {
1086 MP_ASSERT((header->next == NULL) || ((gsi_uint)header < (gsi_uint)header->next )) // infinite loop or out of place
1087 MP_ASSERT((header->prev == NULL) || ((gsi_uint)header->prev < (gsi_uint)header )) // infinite loop or out of place
1088 MP_ASSERT((header->prev == NULL) || (header->prev->next == header)) // previous linked correctly to us
1089 MP_ASSERT((header->next == NULL) || (header->next->prev == header)) // next linked correctly to us
1090 MP_ASSERT( MEM_CHUNKMemUsedGet(header) <= MEM_CHUNKChunkSizeGet(header) ) // using too much mem
1091
1092 if (!MEM_CHUNKIsFree(header) && (MEM_CHUNKMemTypeGet(header) == type))
1093 {
1094 //Don't log a message for the HeaderStart and HeaderEnd blocks.
1095 if ((header != _this->HeaderStart) && (header != _this->HeaderEnd))
1096 {
1097 // Used Chunk
1098 Total += MEM_CHUNKTotalSizeGet(header);
1099 if (_LogUse)
1100 {
1101 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Misc, GSIDebugLevel_Notice,"MemFound ptr:0x%8x size:%8u %s\n", MEM_CHUNKMemPtrGet(header),
1102 MEM_CHUNKMemUsedGet(header),MemMgrBufferGetName((gsMemMgrContext) type));
1103 }
1104 }
1105
1106 }
1107
1108 // make sure we hit the correct end
1109 MP_ASSERT (header->next || (header == _this->HeaderEnd))
1110 header = header->next;
1111
1112 }
1113 return Total;
1114}
1115
1116
1117//--------------------------------------------------------------------------
1118void MEM_CHUNK_POOLMemStatsGet(MEM_CHUNK_POOL *_this,MEM_STATS *pS)
1119{
1120 int ChunksFreeLostCount ;
1121 int i,type;
1122 MEM_CHUNK *header ;
1123 MEM_CHUNK *NextFree;
1124 MEM_STATSClear(pS);
1125
1126 // check free chunk linked list
1127 header = _this->HeaderStart;
1128 NextFree = _this->pFirstFree;
1129
1130
1131
1132 /*** Test validity of all chunks chain ***/
1133 while (header)
1134 {
1135 MP_ASSERT((header->next == NULL) || ((gsi_uint)header < (gsi_uint)header->next )) // infinite loop or out of place
1136 MP_ASSERT((header->prev == NULL) || ((gsi_uint)header->prev < (gsi_uint)header )) // infinite loop or out of place
1137 MP_ASSERT((header->prev == NULL) || (header->prev->next == header)) // previous linked correctly to us
1138 MP_ASSERT((header->next == NULL) || (header->next->prev == header)) // next linked correctly to us
1139 MP_ASSERT( MEM_CHUNKMemUsedGet(header) <= MEM_CHUNKChunkSizeGet(header) ) // using too much mem
1140
1141 pS->MemTotal += MEM_CHUNKTotalSizeGet(header);
1142 if (!MEM_CHUNKIsFree(header))
1143 {
1144 // Used Chunk
1145 pS->ChunksUsedCount++;
1146 if (pS->ChunksUsedCount_At_HighWater < pS->ChunksUsedCount)
1147 pS->ChunksUsedCount_At_HighWater = pS->ChunksUsedCount;
1148
1149 // calc overhead and waste
1150 pS->MemWasted += MEM_CHUNKTotalSizeGet(header) - MEM_CHUNKMemUsedGet(header);
1151 pS->MemUsed += MEM_CHUNKTotalSizeGet(header);
1152
1153 type = MEM_CHUNKMemTypeGet(header);
1154 pS->MemType_MemUsed[type] += MEM_CHUNKTotalSizeGet(header);
1155 pS->MemType_ChunksCount[type]++;
1156
1157 }
1158 else
1159 {
1160 // free chunk
1161 MP_ASSERT((header->NextFree == NULL) || ((gsi_uint)header < (gsi_uint)header->NextFree )) // infinite loop or out of place
1162
1163 // make sure we aren't fragmented, as this ruins some algorithm assumptions
1164 MP_ASSERT((header->next == NULL) || (!MEM_CHUNKIsFree(header->next))) // infinite loop or out of place
1165 MP_ASSERT((header->prev == NULL) || (!MEM_CHUNKIsFree(header->prev))) // infinite loop or out of place
1166
1167 // previous free chunk linked correctly to us, we aren't a lost chunk
1168 MP_ASSERT(header == NextFree)
1169 NextFree = header->NextFree;
1170
1171 // calc overhead and waste (in this case, the same value...sizeof(MEM_CHUNK) header)
1172 pS->MemWasted += MEM_CHUNKTotalSizeGet(header) - MEM_CHUNKChunkSizeGet(header);
1173 pS->MemUsed += MEM_CHUNKTotalSizeGet(header) - MEM_CHUNKChunkSizeGet(header);
1174
1175 pS->ChunksFreeCount++;
1176 if (pS->ChunksFreeLargestAvail < MEM_CHUNKChunkSizeGet(header))
1177 pS->ChunksFreeLargestAvail = MEM_CHUNKChunkSizeGet(header);
1178 }
1179
1180 pS->ChunksCount++;
1181
1182 // make sure we hit the correct end
1183 MP_ASSERT (header->next || (header == _this->HeaderEnd))
1184 header = header->next;
1185
1186 }
1187
1188 // Check free chunks
1189 header = _this->HeaderStart;
1190
1191
1192 /*** Test validity of free chunks chain ***/
1193 // Walk heap looking for first free chunk,
1194 while(header && (!MEM_CHUNKIsFree(header)))
1195 header = header->next;
1196
1197 // make sure the first free one is linked correctly
1198 MP_ASSERT(_this->pFirstFree == header)
1199
1200 ChunksFreeLostCount = pS->ChunksFreeCount;
1201 while (header)
1202 {
1203 // add up sizes
1204 ChunksFreeLostCount --;
1205 pS->MemAvail +=MEM_CHUNKChunkSizeGet(header);
1206 header = header->NextFree;
1207
1208 }
1209
1210
1211 // Update stats
1212 if (pS->MemUsed_At_HighWater < pS->MemUsed)
1213 pS->MemUsed_At_HighWater = pS->MemUsed;
1214
1215 for ( i=0;i< MEM_TYPES_MAX;i++ )
1216 {
1217 if (pS->MemType_MemUsed_At_HighWater[i] < pS->MemType_MemUsed[i] )
1218 pS->MemType_MemUsed_At_HighWater[i] = pS->MemType_MemUsed[i];
1219 }
1220
1221 MP_ASSERT(ChunksFreeLostCount == 0) // lost free blocks
1222}
1223
1224//--------------------------------------------------------------------------
1225void MEM_CHUNK_POOLCheckValidity(MEM_CHUNK_POOL *_this)
1226{
1227 MEM_STATS stats;
1228 MEM_CHUNK_POOLMemStatsGet(_this,&stats);
1229
1230}
1231
1232
1233//--------------------------------------------------------------------------
1234void MEM_CHUNK_POOLFillMemoryTable(MEM_CHUNK_POOL *_this,char *Table, const int TableSize, gsi_u32 _HeapStart, gsi_u32 _HeapSize)
1235//--------------------------------------------------------------------------
1236{
1237 int s,e,j;
1238 gsi_u32 start_address;
1239 gsi_u32 end_address ;
1240 MEM_CHUNK *pChunk = _this->HeaderStart;
1241 MP_ASSERT(_this->HeapSize)
1242
1243
1244 while (pChunk)
1245 {
1246 if (!MEM_CHUNKIsFree(pChunk))
1247 {
1248 start_address = (gsi_uint)pChunk;
1249 end_address = ((gsi_uint)pChunk->next)-1;
1250
1251 // translate address into table positions
1252 s= ((start_address - _HeapStart) * (TableSize>>4)) / (_HeapSize>>4);
1253 MP_ASSERT(s < TableSize)
1254 MP_ASSERT(s >= 0)
1255
1256 e= (( end_address - _HeapStart) * (TableSize>>4)) / (_HeapSize>>4);
1257 MP_ASSERT(e < TableSize)
1258 MP_ASSERT(e >= 0)
1259
1260 for ( j= s; j<= e; j++)
1261 {
1262 // if(Table[j] != -2)
1263 // Table[j] = -1;
1264 // else
1265 Table[j] = MEM_CHUNKMemTypeGet(pChunk);
1266 }
1267
1268 }
1269 pChunk = pChunk->next;
1270 }
1271
1272
1273}
1274
1275
1276
1277static MEM_CHUNK_POOL gChunkPool [gsMemMgrContext_Count] ;
1278
1279
1280
1281// Use this to determine which pool and subsequent allocations will be taken from.
1282gsMemMgrContext gsMemMgrContextCurrent = gsMemMgrContext_Default;
1283
1284//static GSICriticalSection gMemCrit;
1285
1286//--------------------------------------------------------------------------
1287gsMemMgrContext gsMemMgrContextFind (void *ptr)
1288// find pool corresponding to mem ptr.
1289{
1290 int i;
1291 // find which pool owns this pointer!!!!, this is kind of a hack.... but here goes.
1292 for (i=0; i< gsMemMgrContext_Count;i++)
1293 {
1294 if (
1295 MEM_CHUNK_POOLIsValid(&gChunkPool[i]) &&
1296 MEM_CHUNK_POOLItemIsInPoolMemory(&gChunkPool[i],ptr)
1297 )
1298 {
1299 return (gsMemMgrContext) i;
1300 }
1301
1302 }
1303 return gsMemMgrContext_Invalid;
1304}
1305
1306void *gs_malloc(size_t size)
1307{
1308 GS_ASSERT(size)
1309 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[gsMemMgrContextCurrent]),"malloc: context is invalid mempool");
1310
1311 return MEM_CHUNK_POOLmalloc(&gChunkPool[gsMemMgrContextCurrent], size,PTR_ALIGNMENT);
1312}
1313
1314void *gs_calloc(size_t size,size_t size2)
1315{
1316 GS_ASSERT(size)
1317 GS_ASSERT(size2)
1318 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[gsMemMgrContextCurrent]),"calloc: context is invalid mempool");
1319
1320 return MEM_CHUNK_POOLmalloc(&gChunkPool[gsMemMgrContextCurrent], size*size2,PTR_ALIGNMENT);
1321}
1322
1323void *gs_realloc(void* ptr,size_t size)
1324{
1325 GS_ASSERT(size)
1326 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[gsMemMgrContextCurrent]),"realloc: context is invalid mempool");
1327
1328 return MEM_CHUNK_POOLrealloc(&gChunkPool[gsMemMgrContextCurrent],ptr, size);
1329}
1330
1331void *gs_memalign(size_t boundary,size_t size)
1332{
1333 GS_ASSERT(size)
1334 GS_ASSERT(boundary)
1335 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[gsMemMgrContextCurrent]),"memalign: context is invalid mempool");
1336
1337 return MEM_CHUNK_POOLmalloc(&gChunkPool[gsMemMgrContextCurrent], size,boundary);
1338}
1339
1340void gs_free(void *ptr)
1341{
1342 gsMemMgrContext context;
1343
1344 context = gsMemMgrContextFind(ptr);
1345 GS_ASSERT_STR(context != gsMemMgrContext_Invalid,"Attempt to free invalid ptr")
1346
1347 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[context]),"free: ptr context is invalid mempool");
1348 MEM_CHUNK_POOLfree(&gChunkPool[context],ptr);
1349}
1350
1351//--------------------------------------------------------------------------
1352const char *MemMgrBufferGetName(gsMemMgrContext context)
1353{
1354 GS_ASSERT_STR(context != gsMemMgrContext_Invalid, "Invalid Context");
1355 GS_ASSERT_STR(context < gsMemMgrContext_Count, "Context out of range");
1356 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[context ]),"Invalid mempool");
1357
1358 return gChunkPool[context].Name;
1359}
1360
1361
1362void gsMemMgrContextSet(gsMemMgrContext context)
1363{
1364 GS_ASSERT_STR(context != gsMemMgrContext_Invalid, "Invalid Context");
1365 GS_ASSERT_STR(context < gsMemMgrContext_Count, "Context out of range");
1366 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[context]),"Setting context to invalid mempool");
1367
1368 gsMemMgrContextCurrent = context;
1369}
1370
1371
1372
1373//--------------------------------------------------------------------------
1374// call this to enable GameSpy's provided memory manager
1375// Create a mem pool for the given context. If that context is in use, it will return the next available
1376// if none are available it will return gsMemMgrContext_Invalid
1377// ex use: gQR2MemContext = gsMemMgrCreate (0,0,16 * 1024);
1378// will find the first avaiable spot, create a mem pool of 16k, and return the context handle.
1379// then later in your API
1380// enter an API function
1381// gsMemMgrContextPush(gQR2MemContext);
1382// do some allocs
1383// gQR2MemContextPop()
1384// return from function.
1385gsMemMgrContext gsMemMgrCreate (gsMemMgrContext context, const char *PoolName,void* thePoolBuffer, size_t thePoolSize)
1386{
1387 char *ptr = (char *)thePoolBuffer;
1388
1389 GS_ASSERT_STR(thePoolSize,"Cannnot create a pool of size 0")
1390 GS_ASSERT_STR(thePoolSize,"thePoolBuffer ptr is inivalid");
1391 GS_ASSERT_STR(((((gsi_uint)thePoolSize) &15) ==0) ,"PoolSize must be aligned to 16 bytes");
1392 GS_ASSERT_STR(((((gsi_uint)thePoolBuffer)&15) ==0) ,"thePoolBuffer must be aligned to 16 bytes");
1393
1394
1395 while (MEM_CHUNK_POOLIsValid(&gChunkPool[context]))
1396 {
1397 context = (gsMemMgrContext)(context + 1);
1398 }
1399 if (context == gsMemMgrContext_Count)
1400 {
1401 // Warn!!!!
1402 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Memory, GSIDebugLevel_Comment,
1403 "Out of memory context handles!\n");
1404 GS_ASSERT(0);
1405 return gsMemMgrContext_Invalid; // ran out of context slots
1406 }
1407
1408 MEM_CHUNK_POOLCreate(&gChunkPool[context],PoolName,ptr,thePoolSize);
1409 // Set call backs.
1410 gsiMemoryCallbacksSet(gs_malloc, gs_free, gs_realloc, gs_memalign);
1411 return context;
1412}
1413
1416void gsMemMgrDestroy(gsMemMgrContext context)
1417{
1418 GS_ASSERT(gChunkPool[context].HeapSize != 0);
1419 MEM_CHUNK_POOLDestroy(&gChunkPool[context]);
1420
1421 // if this is the last one,
1422#if(0)
1423 {
1424 // Set call backs.
1425 gsiMemoryCallbacksSet(malloc,free,realloc,memalign);
1426
1427 // Reset memmgr
1428 gsiDeleteCriticalSection(&gMemCrit);
1429
1430 // #ifdef _GSI_MULTI_THREADED_
1431 // gsiLeaveCriticalSection(&gMemCrit);
1432 // gsiEnterCriticalSection(&gMemCrit);
1433 // #endif
1434 }
1435#endif
1436}
1437
1438
1439//--------------------------------------------------------------------------
1440void gsMemMgrTagPush (gsi_u8 tag)
1441{
1442 GS_ASSERT(MemTagStackIndex < MEM_TAG_STACK_MAX-1)
1443 MemTagStackIndex++;
1444 MemTagStack[MemTagStackIndex] = tag;
1445}
1446//--------------------------------------------------------------------------
1447void gsMemMgrTagPop ()
1448{
1449 GS_ASSERT(MemTagStackIndex > 0)
1450 MemTagStackIndex--;
1451}
1452//--------------------------------------------------------------------------
1453gsi_u8 gsMemMgrTagGet (void *ptr)
1454{
1455 GS_ASSERT(ptr);
1456 return MEM_CHUNKMemTypeGet( Ptr_To_MEM_CHUNK(ptr));
1457}
1458//--------------------------------------------------------------------------
1459gsi_u32 gsMemMgrMemUsedByTagGet(gsi_u8 tag)
1460{
1461 int i;
1462 gsi_u32 used = 0;
1463 for ( i=0;i< gsMemMgrContext_Count;i++)
1464 {
1465 used+= MEM_CHUNK_POOLWalkForType(&gChunkPool[i] ,tag, gsi_false);
1466 }
1467 return used;
1468
1469}
1470
1471//--------------------------------------------------------------------------
1472void gsMemMgrContextPush(gsMemMgrContext NewType)
1473{
1474// PARANOID_MemProfilerCheck();
1475 GS_ASSERT(MemTypeStackIndex < MEM_CONTEXT_STACK_MAX)
1476 GS_ASSERT(NewType < gsMemMgrContext_Count)
1477
1478// gsDebugFormat(GSIDebugCat_App, GSIDebugType_State, GSIDebugLevel_Comment,"MemProfilerStart: %s\n",MemProfiler.MemPool[NewType].Name);
1479 MemTypeStack[MemTypeStackIndex++] = gsMemMgrContextCurrent;
1480 gsMemMgrContextCurrent = NewType;
1481}
1482
1483//--------------------------------------------------------------------------
1484gsMemMgrContext gsMemMgrContextPop()
1485{
1486// PARANOID_MemProfilerCheck();
1487 GS_ASSERT(MemTypeStackIndex > 0)
1488// gsDebugFormat(GSIDebugCat_App, GSIDebugType_State, GSIDebugLevel_Comment,"MemProfilerEnd: %s\n",MemProfiler.MemPool[OldType].Name);
1489 gsMemMgrContextCurrent = MemTypeStack[--MemTypeStackIndex];
1490 return gsMemMgrContextCurrent;
1491}
1492
1493
1494//--------------------------------------------------------------------------
1495// return total available memory for the given memory pool
1496gsi_u32 gsMemMgrMemAvailGet (gsMemMgrContext context)
1497{
1498 MEM_STATS stats;
1499 MEM_STATSClearAll(&stats);
1500 GS_ASSERT_STR(context < gsMemMgrContext_Count, "gsMemMgrMemAvailGet: context out of range");
1501 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[context]), "gsMemMgrMemAvailGet: context is invalid mempool");
1502 MEM_CHUNK_POOLMemStatsGet (&gChunkPool[context], &stats);
1503 return stats.MemAvail;
1504}
1505
1506//--------------------------------------------------------------------------
1507// return total used memory for the given memory pool
1508gsi_u32 gsMemMgrMemUsedGet (gsMemMgrContext context)
1509{
1510 MEM_STATS stats;
1511 MEM_STATSClearAll(&stats);
1512 GS_ASSERT_STR(context < gsMemMgrContext_Count, "gsMemMgrMemUsedGet: context out of range");
1513 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[context]), "gsMemMgrMemUsedGet: context is invalid mempool");
1514 MEM_CHUNK_POOLMemStatsGet (&gChunkPool[context], &stats);
1515 return stats.MemUsed;
1516}
1517
1518
1519//--------------------------------------------------------------------------
1520// return largest allocatable chunk the given memory pool. This
1521// will be the same or probably smaller then the value returned by gsMemMgrMemAvailGet
1522// depending on degree of memory fragmentation.
1523gsi_u32 gsMemMgrMemLargestAvailGet (gsMemMgrContext context)
1524{
1525 MEM_STATS stats;
1526 MEM_STATSClearAll(&stats);
1527 GS_ASSERT_STR(context < gsMemMgrContext_Count, "gsMemMgrMemLargestAvailGet: context out of range");
1528 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[context]), "gsMemMgrMemLargestAvailGet: context is invalid mempool");
1529 MEM_CHUNK_POOLMemStatsGet (&gChunkPool[context], &stats);
1530 return stats.ChunksFreeLargestAvail;
1531}
1532
1533//--------------------------------------------------------------------------
1534gsi_u32 gsMemMgrMemHighwaterMarkGet (gsMemMgrContext context)
1535{
1536 GS_ASSERT_STR(context < gsMemMgrContext_Count, "gsMemMgrMemLargestAvailGet: context out of range");
1537 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[context]), "gsMemMgrMemLargestAvailGet: context is invalid mempool");
1538
1539 #if(MEM_PROFILE)
1540 return gChunkPool[context].HWMemUsed;
1541 #else
1542 // Display info - App type b/c it was requested by the app
1543 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Memory, GSIDebugLevel_Comment,
1544 "gsMemMgrMemHighwaterMarkGet called without MEM_PROFILE enabled.");
1545 return 0;
1546 #endif
1547}
1548
1549//--------------------------------------------------------------------------
1550void gsMemMgrValidateMemoryPool()
1551{
1552 GS_ASSERT_STR(MEM_CHUNK_POOLIsValid(&gChunkPool[gsMemMgrContextCurrent]),"memalign: context is invalid mempool");
1553 MEM_CHUNK_POOLCheckValidity(&gChunkPool[gsMemMgrContextCurrent]);
1554}
1555
1556
1559// Show allocated, free, total memory, num blocks
1560void gsMemMgrDumpStats()
1561{
1562#if(0)
1563 int numUsed = 0;
1564 int numFree = 0;
1565
1566 struct GSIMemoryBlock* aTempPtr = NULL;
1567
1568 gsiEnterCriticalSection(&gMemCrit);
1569
1570 // Display the number of free blocks
1571 // TODO: dump size statistics
1572 aTempPtr = gMemoryMgr->mFirstFreeBlock;
1573 while(aTempPtr != NULL)
1574 {
1575 numFree++;
1576 aTempPtr = aTempPtr->mNext;
1577 }
1578
1579 // Display the number of used blocks
1580 // TODO: dump size statistics
1581 aTempPtr = gMemoryMgr->mFirstUsedBlock;
1582 while(aTempPtr != NULL)
1583 {
1584 numUsed++;
1585 aTempPtr = aTempPtr->mNext;
1586 }
1587
1588 // Display info - App type b/c it was requested by the app
1589 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Memory, GSIDebugLevel_Comment,
1590 "BytesUsed: %d, BlocksUsed: %d, BlocksFree: %d\r\n",
1591 gMemoryMgr->mMemUsed, numUsed, numFree);
1592
1593 gsiLeaveCriticalSection(&gMemCrit);
1594#endif
1595}
1596
1597
1600void gsMemMgrDumpAllocations()
1601{
1602#if(0)
1603 struct GSIMemoryBlock* aBlockPtr = NULL;
1604 gsi_time aStartTime = 0;
1605 gsi_i32 aNumAllocations = 0;
1606 gsi_i32 aNumBytesAllocated = 0;
1607
1608 gsiEnterCriticalSection(&gMemCrit);
1609
1610 aStartTime = current_time();
1611 aBlockPtr = (GSIMemoryBlock*)gMemoryMgr->mPoolStart;
1612
1613 // Announce start
1614 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Memory, GSIDebugLevel_Comment,
1615 "Dumping allocations from pool - [0x%08x] %d bytes.\r\n",
1616 gMemoryMgr->mPoolStart, gMemoryMgr->mPoolSize);
1617
1618 // Dump information about each allocated block
1619 // - Do this in linear order, not list order
1620 while(aBlockPtr != NULL)
1621 {
1622 // If it's in use, verify contents and dump info
1623 if (gsiMemBlockIsFlagged(aBlockPtr, BlockFlag_Used))
1624 {
1625 int anObjectSize = gsiMemBlockGetObjectSize(aBlockPtr);
1626 aNumAllocations++;
1627 aNumBytesAllocated += anObjectSize;
1628
1629 if (aBlockPtr == gMemoryMgr->mPoolStart)
1630 {
1631 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Memory, GSIDebugLevel_Comment,
1632 "\t[0x%08x] Size: %d (memmgr instance)\r\n", (gsi_u32)aBlockPtr, anObjectSize);
1633 }
1634 else
1635 {
1636 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Memory, GSIDebugLevel_Comment,
1637 "\t[0x%08x] Size: %d\r\n", (gsi_u32)(gsiMemBlockGetObjectPtr(aBlockPtr)), anObjectSize);
1638 }
1639 }
1640 else
1641 {
1642 // Verify that the block has the correct memory fill
1643 }
1644 // Get linear next (not list next!)
1645 aBlockPtr = gsiMemBlockGetLinearNext(aBlockPtr);
1646 }
1647
1648 // Announce finish
1649 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Memory, GSIDebugLevel_Comment,
1650 "\t--%d allocations, %d bytes allocated.\r\n", aNumAllocations, aNumBytesAllocated);
1651 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Memory, GSIDebugLevel_Comment,
1652 "\t--%d peak memory usage\r\n", gMemoryMgr->mPeakMemoryUsage);
1653
1654 gsDebugFormat(GSIDebugCat_App, GSIDebugType_Memory, GSIDebugLevel_Comment,
1655 "Memory dump complete. (%d ms)\r\n", current_time() - aStartTime);
1656
1657 gsiLeaveCriticalSection(&gMemCrit);
1658
1659 GSI_UNUSED(aStartTime); // may be unused if common debug is not defined
1660#endif
1661}
1662
1663
1664
1665#if (1) // test stuff
1666
1667#define PTR_TABLE_SIZE 2048
1668static int PtrTableCount = 0;
1669static void *PtrTable[2048];
1670
1671int Random(int x)
1672{
1673 return Util_RandInt(0,x);
1674}
1675//--------------------------------------------------------------------------
1676void gsMemMgrSelfText()
1677//--------------------------------------------------------------------------
1678{
1679
1680
1681 static MEM_CHUNK_POOL gChunkPool;
1682 int size = 32 * 1024 * 1024;
1683 int c= 0;
1684 int i,j,k;
1685
1686 char *ptr = (char *) ( ((gsi_uint)malloc(size-PTR_ALIGNMENT)+(PTR_ALIGNMENT-1))&~ (PTR_ALIGNMENT-1) ) ;
1687 MEM_CHUNK_POOLCreate(&gChunkPool,"",ptr,size);
1688
1689 while(1)
1690 {
1691
1692 i= Random(4);
1693 if ((i==0) &&(PtrTableCount < 1024))
1694 {
1695 // malloc
1696 j = Random(1024)+1;
1697 k = 32<< (Random(4));
1698
1699 if (c&1)
1700 PtrTable[PtrTableCount] = MEM_CHUNK_POOLmalloc(&gChunkPool, j,k);
1701 else
1702 PtrTable[PtrTableCount] = MEM_CHUNK_POOLmalloc_backwards(&gChunkPool, j,k);
1703
1704 if(PtrTable[PtrTableCount])
1705 {
1706 PtrTableCount++;
1707 }
1708 else
1709 {
1710 GS_ASSERT(0);
1711 }
1712
1713 }
1714 else
1715 if ((i==1) &&(PtrTableCount))
1716 {
1717 // free
1718 j = Random(PtrTableCount);
1719 MP_ASSERT(j < PtrTableCount)
1720
1721
1722 MEM_CHUNK_POOLfree(&gChunkPool,PtrTable[j]);
1723
1724 // swap with last.
1725 PtrTableCount--;
1726 PtrTable[j] = PtrTable[PtrTableCount];
1727
1728 }
1729 else
1730 if ((i==2) &&(PtrTableCount))
1731 {
1732 j = Random(PtrTableCount);
1733 MP_ASSERT(j < PtrTableCount)
1734
1735 // realloc
1736 k = Random(1024) +1;
1737 #if(1)
1738 PtrTable[j] = MEM_CHUNK_POOLrealloc(&gChunkPool,PtrTable[j], k);
1739 #else
1740 // skip
1741 PtrTable[j] = PtrTable[j];
1742 #endif
1743
1744 if(PtrTable[j])
1745 {
1746 }
1747 else
1748 {
1749 GS_ASSERT(0);
1750 }
1751
1752 }
1753 else
1754 continue; // skip count
1755
1756 c++;
1757 MEM_CHUNK_POOLCheckValidity(&gChunkPool);
1758 }
1759
1760}
1761
1762
1763#endif
1764
1765
1766
1767
1768
1769
1770
1773#endif // GSI_MEM_MANAGED
1774
Definition gsMemory.c:68