#if SPECIAL #include "main.h" #endif static void Memory_InitTable(global_memory *GlobalMemory, memory *Memory, uint64 Size, memory_table_list TableName, char *Name, uint64 Block_ElementSize = 0) { memory_table *Table = &Memory->Slot[TableName]; Table->Name = Name; Table->Address = (ptrsize *)((uint8 *)GlobalMemory->Address + GlobalMemory->CurrentPosition); // Table->Address = malloc(Size); Table->Size = Size; Table->Block_ElementSize = Block_ElementSize; GlobalMemory->CurrentPosition += Size; } static uint32 Memory_Block_AllocateNew(memory *Memory, memory_table_list TableName) { memory_table *Table = &Memory->Slot[TableName]; Assert(Table->Block_ElementSize != 0); bool32 Empty = 0; uint32 Index = 0; uint8 *Address_Playhead = (uint8 *)Table->Address; while (*Address_Playhead != 0) { Address_Playhead += Table->Block_ElementSize; Index++; } Assert((Address_Playhead - (uint8 *)Table->Address) < Table->Size); Arbitrary_Zero(Address_Playhead, Table->Block_ElementSize); return Index; } static void * Memory_Block_AddressAtIndex(memory *Memory, memory_table_list TableName, uint32 Index, bool32 AssertExists) { memory_table *Table = &Memory->Slot[TableName]; Assert(Table->Block_ElementSize != 0); uint8 *Address = (uint8 *)Table->Address + (Table->Block_ElementSize * Index); if (AssertExists) Assert(*Address != 0); Assert((Address - (uint8 *)Table->Address) < Table->Size); return (void *)Address; } static uint16 Memory_Block_LazyIndexAtAddress(memory *Memory, memory_table_list TableName, void *Address) { memory_table *Table = &Memory->Slot[TableName]; return ((uint8 *)Address - (uint8 *)Table->Address) / Table->Block_ElementSize; } static void * Memory_Block_AllocateAddress(memory *Memory, memory_table_list TableName) { uint16 FileIndex = Memory_Block_AllocateNew(Memory, TableName); return Memory_Block_AddressAtIndex(Memory, TableName, FileIndex, 0); } // IMPORTANT(fox): All block data structs have to start with a uint8 Occupied variable! static bool32 Block_Loop(memory *Memory, memory_table_list TableName, uint32 TotalCount, int *HasIncremented, int *CurrentCount, int *Index) { for (;;) { if (*CurrentCount == TotalCount) { return 0; } if (*HasIncremented) { *HasIncremented = 0; (*Index)++; } uint8 *Occupied = (uint8 *)Memory_Block_AddressAtIndex(Memory, TableName, *Index, 0); if (*Occupied) { *HasIncremented = 1; (*CurrentCount)++; return 1; } (*Index)++; Assert(*CurrentCount <= TotalCount); Assert(*Index <= TotalCount*100); // This can get triggered normally if 100+ items are added and the first 99 in memory are deleted. } Assert(0); return 0; } static bool32 Block_Loop(memory *Memory, property_channel *Property, uint32 TotalCount, int *HasIncremented, int *CurrentCount, int *Index) { for (;;) { if (*CurrentCount == TotalCount) { return 0; } if (*HasIncremented) { *HasIncremented = 0; (*Index)++; } uint8 *Occupied = (uint8 *)Bezier_LookupAddress(Memory, Property->Block_Bezier_Index, *Index, 0); if (*Occupied) { *HasIncremented = 1; (*CurrentCount)++; return 1; } (*Index)++; Assert(*CurrentCount <= TotalCount); Assert(*Index <= TotalCount*100); // This can get triggered normally if 100+ items are added and the first 99 in memory are deleted. } Assert(0); return 0; } static uint32 Memory_Block_PrincipalBitmap_AllocateNew(project_data *File, project_state *State, memory *Memory) { int h = 0, c = 0, i = 0; int MaxBlockIndex = -1; int PrincipalCount = 0; while (Block_Loop(Memory, F_Sources, File->Source_Count, &h, &c, &i)) { block_source *Source = (block_source *)Memory_Block_AddressAtIndex(Memory, F_Sources, i); if (Source->Type == source_type_principal || Source->Type == source_type_principal_temp) PrincipalCount++; if (Source->Bitmap_Index > MaxBlockIndex) MaxBlockIndex = i; } if (!PrincipalCount) return 0; block_source Source = *(block_source *)Memory_Block_AddressAtIndex(Memory, F_Sources, MaxBlockIndex); uint32 LastBlock = Source.Bitmap_Index; uint32 BlockSize = ((Source.Width * Source.Height * Source.BytesPerPixel) / BitmapBlockSize) + 1; uint32 Blocks_Max = Memory->Slot[B_CachedBitmaps].Size / BitmapBlockSize; Assert(Blocks_Max > (LastBlock + BlockSize)); return LastBlock + BlockSize; } static uint32 Memory_Block_Bitmap_AllocateNew(project_state *State, memory *Memory, cache_entry Entry, uint64 NewSize) { uint32 LastVal = 0; uint32 LastBlock = 0; uint32 c = 0; cache_entry *EntryArray = State->Render.Entry; while (EntryArray[c].IsOccupied != 0) { if (EntryArray[c].Block_StartIndex > LastBlock) { LastBlock = EntryArray[c].Block_StartIndex; LastVal = c; } c++; } cache_entry LastEntry = EntryArray[LastVal]; uint32 LastEntry_BlockCount = 0; switch (EntryArray[LastVal].Type) { case cache_entry_type_comp: { block_composition Comp = *(block_composition *)Memory_Block_AddressAtIndex(Memory, F_Precomps, LastEntry.TypeInfo); uint64 Size = Comp.Width * Comp.Height * Comp.BytesPerPixel; LastEntry_BlockCount = (Size / BitmapBlockSize) + 1; } break; case cache_entry_type_source: { block_source Source = *(block_source *)Memory_Block_AddressAtIndex(Memory, F_Sources, LastEntry.TypeInfo); uint64 Size = Source.Width * Source.Height * Source.BytesPerPixel; LastEntry_BlockCount = (Size / BitmapBlockSize) + 1; } break; case cache_entry_type_assert: { Assert(0); } break; default: { Assert(0); } break; } uint32 Blocks_Max = Memory->Slot[B_CachedBitmaps].Size / BitmapBlockSize; Assert(Blocks_Max > LastBlock); return LastBlock + LastEntry_BlockCount; /* uint32 Blocks_Needed = (NewSize / BitmapBlockSize) + 1; uint32 Block_Index_Available = 0; */ } static void Memory_Cache_Purge(project_data *File, project_state *State, memory *Memory, int32 SingleFrame) { cache_entry *EntryArray = State->Render.Entry; int c = 0; int count = Memory->EntryCount; Assert(Memory->EntryCount < 10000); while (count != 0) { bool32 ExtraCheck = (SingleFrame == -1) ? 1 : EntryArray[c].TypeInfo_Sub == SingleFrame; if (EntryArray[c].Type == cache_entry_type_comp && EntryArray[c].TypeInfo == File->PrincipalCompIndex && ExtraCheck) { EntryArray[c].IsCached = 0; } c++; count--; } State->LastCachedFrame = -10000; State->CachedFrameCount = 0; } static cache_entry * Memory_Cache_Search(project_state *State, memory *Memory, cache_entry_type Type, uint32 TypeInfo, uint32 TypeInfo_Sub) { cache_entry *EntryArray = State->Render.Entry; int c = 0; int count = Memory->EntryCount; while (count != 0) { if (EntryArray[c].Type == Type && EntryArray[c].TypeInfo == TypeInfo && EntryArray[c].TypeInfo_Sub == TypeInfo_Sub) { return &EntryArray[c]; } c++; count--; } if (c != 0) EntryArray[c].Block_StartIndex = Memory_Block_Bitmap_AllocateNew(State, Memory, EntryArray[c], 0); EntryArray[c].IsOccupied = true; EntryArray[c].Type = Type; EntryArray[c].TypeInfo = TypeInfo; EntryArray[c].TypeInfo_Sub = TypeInfo_Sub; Memory->EntryCount++; return &EntryArray[c]; } static void * Memory_Block_Bitmap_AddressAtIndex(memory *Memory, uint32 Index) { memory_table *Table = &Memory->Slot[B_CachedBitmaps]; uint8 *Address = (uint8 *)Table->Address + Index*BitmapBlockSize; return (void *)Address; } static void * Memory_PushScratch(memory *Memory, uint64 Size) { memory_table *Table = &Memory->Slot[B_ScratchSpace]; uint8 *Address = ((uint8 *)Table->Address + Memory->ScratchPos); Memory->ScratchPos += Size; #if DEBUG Assert(Memory->ScratchPos > 0); Assert(Memory->ScratchPos < Table->Size); Debug.ScratchSize[Debug.ScratchState] = Size; Debug.ScratchState++; #endif return (void *)Address; } static void Memory_PopScratch(memory *Memory, uint64 Size) { memory_table *Table = &Memory->Slot[B_ScratchSpace]; Assert(Memory->ScratchPos >= Size); Memory->ScratchPos -= Size; #if DEBUG Debug.ScratchState--; Assert(Debug.ScratchSize[Debug.ScratchState] == Size); #endif } static void * Memory_AddressAtOffset(memory *Memory, memory_table_list TableName, uint64 Offset) { memory_table *Table = &Memory->Slot[TableName]; Assert(Offset < Table->Size); return (void *)((uint8 *)Table->Address + Offset); } void Memory_Copy(uint8 *Address_Write, uint8 *Address_Read, uint64 Size) { uint64 i = 0; while (i < Size) { *(Address_Write + i) = *(Address_Read + i); i++; } } void Memory_Fill(uint8 *Address_Write, uint8 *Address_Read, uint64 WriteSize, uint64 ReadSize) { uint64 i = 0; while (i < WriteSize) { *(Address_Write + i) = *(Address_Read + (i % ReadSize)); i++; } } void Arbitrary_Zero(uint8 *Address_Write, uint64 Size) { uint64 i = 0; #if ARM #else __m256i Zero256 = _mm256_setzero_si256(); if (Size > 64 && InstructionMode == instruction_mode_avx) { uint64 Size_Lane = Size - (Size % 64); while (i < Size_Lane) { _mm256_storeu_si256((__m256i *)(Address_Write + i), Zero256); _mm256_storeu_si256((__m256i *)(Address_Write + i + 32), Zero256); i += 64; } } #endif while (i < Size) { *(Address_Write + i) = 0; i++; } } void Arbitrary_SwapData(memory *Memory, uint8 *Address_0, uint8 *Address_1, uint64 Size) { uint8 *Buffer_Scratch = (uint8 *)Memory_PushScratch(Memory, Size); Memory_Copy(Buffer_Scratch, Address_0, Size); Memory_Copy(Address_0, Address_1, Size); Memory_Copy(Address_1, Buffer_Scratch, Size); Memory_PopScratch(Memory, Size); } static void Arbitrary_ShiftData(uint8 *Address_Start, uint8 *Address_End, uint64 ShiftAmount, int32 Direction) { if (Direction > 0) { uint8 *AddressPlayhead = Address_End; while ((ptrsize)AddressPlayhead >= (ptrsize)Address_Start) { *(AddressPlayhead + ShiftAmount) = *AddressPlayhead; AddressPlayhead--; } } else { uint8 *AddressPlayhead = Address_Start; while ((ptrsize)AddressPlayhead < (ptrsize)Address_End) { *(AddressPlayhead - ShiftAmount) = *AddressPlayhead; AddressPlayhead++; } } } // TODO(fox): Just use zlib... extern "C" { #include "miniz.c" } static uint64 Data_Compress(memory *Memory, void *DataSource, uint64 DataSize, void *DataBuffer, uint64 DataBufferSize, int CompressionLevel) { z_stream stream = {}; stream.next_in = (uint8 *)DataSource; stream.avail_in = DataSize; stream.next_out = (uint8 *)DataBuffer; stream.avail_out = DataBufferSize; if (deflateInit(&stream, CompressionLevel) != Z_OK) { Assert(0); } int status; for ( ; ; ) { status = deflate(&stream, Z_SYNC_FLUSH); if (status == Z_STREAM_END || !stream.avail_in) break; else Assert(0); } uint64 CompressedSize = stream.total_out; if (deflateEnd(&stream) != Z_OK) { Assert(0); } return CompressedSize; } static void Data_Decompress(memory *Memory, void *CompressedLocation, uint64 CompressedSize, void *BitmapLocation, uint64 ExpectedSize) { z_stream stream = {}; stream.next_in = (uint8 *)CompressedLocation; stream.avail_in = CompressedSize; stream.next_out = (uint8 *)BitmapLocation; stream.avail_out = 2147483648; if (inflateInit(&stream)) { Assert(0); } int status; for ( ; ; ) { status = inflate(&stream, Z_NO_FLUSH); if (status == Z_STREAM_END || !stream.avail_in) break; else Assert(0); } // Assert(stream.total_out == ExpectedSize); if (inflateEnd(&stream) != Z_OK) { Assert(0); } }