*/
static VOID
HEAP_InsertFreeBlock(PHEAP heap,
- ARENA_FREE *pArena)
+ ARENA_FREE *pArena,
+ BOOL last)
{
FREE_LIST_ENTRY *pEntry = heap->freeList;
while (pEntry->size < pArena->size) pEntry++;
- pArena->size |= ARENA_FLAG_FREE;
- pArena->next = pEntry->arena.next;
- pArena->next->prev = pArena;
- pArena->prev = &pEntry->arena;
- pEntry->arena.next = pArena;
+ if (last)
+ {
+ /* insert at end of free list, i.e. before next free list entry */
+ pEntry++;
+ if (pEntry == &heap->freeList[HEAP_NB_FREE_LISTS])
+ {
+ pEntry = heap->freeList;
+ }
+ pArena->prev = pEntry->arena.prev;
+ pArena->prev->next = pArena;
+ pArena->next = &pEntry->arena;
+ pEntry->arena.prev = pArena;
+ }
+ else
+ {
+ /* insert at head of free list */
+ pArena->next = pEntry->arena.next;
+ pArena->next->prev = pArena;
+ pArena->prev = &pEntry->arena;
+ pEntry->arena.next = pArena;
+ }
+ pArena->size |= ARENA_FLAG_FREE;
}
return FALSE;
}
}
- subheap->commitSize = size;
+ subheap->commitSize += commitsize;
return TRUE;
}
return FALSE;
}
}
- subheap->commitSize = size;
+ subheap->commitSize -= decommitsize;
return TRUE;
}
static void HEAP_CreateFreeBlock( SUBHEAP *subheap, void *ptr, DWORD size )
{
ARENA_FREE *pFree;
+ BOOL last;
/* Create a free arena */
/* Set the next block PREV_FREE flag and pointer */
- if ((char *)ptr + size < (char *)subheap + subheap->size)
+ last = ((char *)ptr + size >= (char *)subheap + subheap->size);
+ if (!last)
{
DWORD *pNext = (DWORD *)((char *)ptr + size);
*pNext |= ARENA_FLAG_PREV_FREE;
/* Last, insert the new block into the free list */
pFree->size = size - sizeof(*pFree);
- HEAP_InsertFreeBlock( subheap->heap, pFree );
+ HEAP_InsertFreeBlock( subheap->heap, pFree, last );
}
{
HEAP_CreateFreeBlock( subheap, (char *)(pArena + 1) + size,
(pArena->size & ARENA_SIZE_MASK) - size );
- pArena->size = (pArena->size & ~ARENA_SIZE_MASK) | size;
+ /* assign size plus previous arena flags */
+ pArena->size = size | (pArena->size & ~ARENA_SIZE_MASK);
}
else
{
/* Fill the sub-heap structure */
+ subheap = (SUBHEAP *)address;
subheap->heap = heap;
subheap->selector = selector;
subheap->size = totalSize;
if (!HEAP_InitSubHeap( heap? heap : (HEAP *)address,
address, flags, commitSize, totalSize ))
{
- if (!(flags & HEAP_NO_VALLOC))
+ if (address && !(flags & HEAP_NO_VALLOC))
{
ULONG dummySize = 0;
ZwFreeVirtualMemory(NtCurrentProcess(),
&address,
&dummySize,
MEM_RELEASE);
- return NULL;
}
+ return NULL;
}
return (SUBHEAP *)address;
pArena = pEntry->arena.next;
while (pArena != &heap->freeList[0].arena)
{
- if (pArena->size > size)
+ DWORD arena_size = (pArena->size & ARENA_SIZE_MASK) +
+ sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
+ if (arena_size >= size)
{
subheap = HEAP_FindSubHeap( heap, pArena );
if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
(DWORD)heap, size );
return NULL;
}
- size += sizeof(SUBHEAP) + sizeof(ARENA_FREE);
+ /* make sure that we have a big enough size *committed* to fit another
+ * last free arena in !
+ * So just one heap struct, one first free arena which will eventually
+ * get inuse, and HEAP_MIN_BLOCK_SIZE for the second free arena that
+ * might get assigned all remaining free space in HEAP_ShrinkBlock() */
+ size += sizeof(SUBHEAP) + sizeof(ARENA_FREE) + HEAP_MIN_BLOCK_SIZE;
if (!(subheap = HEAP_CreateSubHeap( NULL, heap, heap->flags, size,
max( HEAP_DEF_SIZE, size ) )))
return NULL;
{
ERR("Heap %08lx: bad flags %lx for in-use arena %08lx\n",
(DWORD)subheap->heap, pArena->size & ~ARENA_SIZE_MASK, (DWORD)pArena );
+ return FALSE;
}
/* Check arena size */
if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
* RETURNS
* Handle of heap: Success
* NULL: Failure
+ *
+ * @implemented
*/
HANDLE STDCALL
RtlCreateHeap(ULONG flags,
* RETURNS
* TRUE: Success
* FALSE: Failure
+ *
+ * @implemented
*/
BOOL STDCALL
RtlDestroyHeap(HANDLE heap) /* [in] Handle of heap */
* RETURNS
* Pointer to allocated memory block
* NULL: Failure
+ *
+ * @implemented
*/
PVOID STDCALL
RtlAllocateHeap(HANDLE heap, /* [in] Handle of private heap block */
/* Validate the parameters */
- if (!heapPtr) return NULL;
+ if (!heapPtr)
+ {
+ if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
+ return NULL;
+ }
flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
flags |= heapPtr->flags;
if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );
TRACE("(%08x,%08lx,%08lx): returning NULL\n",
heap, flags, size );
if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
+ if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
return NULL;
}
* RETURNS
* TRUE: Success
* FALSE: Failure
+ *
+ * @implemented
*/
BOOLEAN STDCALL RtlFreeHeap(
HANDLE heap, /* [in] Handle of heap */
* RETURNS
* Pointer to reallocated memory block
* NULL: Failure
+ *
+ * @implemented
*/
LPVOID STDCALL RtlReAllocateHeap(
HANDLE heap, /* [in] Handle of heap block */
if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
TRACE("(%08x,%08lx,%08lx,%08lx): returning NULL\n",
heap, flags, (DWORD)ptr, size );
+ if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
return NULL;
}
heapPtr->flags))
{
if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
+ if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
return NULL;
}
HEAP_ShrinkBlock( subheap, pArena, size );
!(pNew = HEAP_FindFreeBlock( heapPtr, size, &newsubheap )))
{
if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
+ if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
return NULL;
}
/***********************************************************************
* HeapCompact (KERNEL32.335)
+ *
+ * @unimplemented
*/
DWORD STDCALL RtlCompactHeap( HANDLE heap, DWORD flags )
{
* RETURNS
* TRUE: Success
* FALSE: Failure
+ *
+ * @implemented
*/
BOOL STDCALL RtlLockHeap(
HANDLE heap /* [in] Handle of heap to lock for exclusive access */
* RETURNS
* TRUE: Success
* FALSE: Failure
+ *
+ * @implemented
*/
BOOL STDCALL RtlUnlockHeap(
HANDLE heap /* [in] Handle to the heap to unlock */
* RETURNS
* Size in bytes of allocated memory
* 0xffffffff: Failure
+ *
+ * @implemented
*/
DWORD STDCALL RtlSizeHeap(
HANDLE heap, /* [in] Handle of heap */
* RETURNS
* TRUE: Success
* FALSE: Failure
+ *
+ * @implemented
*/
BOOL STDCALL RtlValidateHeap(
HANDLE heap, /* [in] Handle to the heap */
PVOID block /* [in] Optional pointer to memory block to validate */
) {
- return HEAP_IsRealArena( heap, flags, block, QUIET );
+ HEAP *heapPtr = HEAP_GetPtr( heap );
+ if (!heapPtr) return FALSE;
+ return HEAP_IsRealArena( heapPtr, flags, block, QUIET );
}
}
+/*
+ * @implemented
+ */
NTSTATUS STDCALL
RtlEnumProcessHeaps(DWORD STDCALL(*func)(void*,LONG),
LONG lParam)
}
+/*
+ * @implemented
+ */
ULONG STDCALL
RtlGetProcessHeaps(ULONG HeapCount,
HANDLE *HeapArray)
}
+/*
+ * @implemented
+ */
BOOLEAN STDCALL
RtlValidateProcessHeaps(VOID)
{