/*************************************************************/ /* The kinds of allocation: malloc_sector = returns new SECTOR_SEGMENT_SIZE-aligned memory; relies on nothing else; the memeory blocks must be explicitly freed with free_sector; all GC allocation is perfomed via sectors malloc_managed = malloc "atomic" block used by GC implementation itself; no GCing should occur during the malloc; the block is freed with free_managed realloc_collect_temp = temporary structures used during gc; no other allocation can take place during gc, and all memory will be freed when GC is done with free_collect_temp */ #if GET_MEM_VIA_SBRK static void *platform_plain_sector(int count, int executable) { caddr_t cur_brk = (caddr_t)sbrk(0); intptr_t lsbs = (uintptr_t)cur_brk & TABLE_LO_MASK; void *result; if (lsbs != 0) { if ((caddr_t)sbrk(SECTOR_SEGMENT_SIZE - lsbs) == (caddr_t)(-1)) return 0; } result = (caddr_t)sbrk((count << LOG_SECTOR_SEGMENT_SIZE)); if (result == (caddr_t)(-1)) return 0; return result; } #endif #if GET_MEM_VIA_MMAP static void *mmap_sector(int count, int executable) { uintptr_t pre_extra; void *p; #ifdef MAP_ANON int fd = -1; int flags = MAP_ANON; #else static int fd; int flags = 0; if (!fd) fd = open("/dev/zero", O_RDWR); #endif p = mmap(NULL, (count + 1) << LOG_SECTOR_SEGMENT_SIZE, PROT_READ | PROT_WRITE | (executable ? PROT_EXEC : 0), MAP_PRIVATE | flags, fd, 0); pre_extra = (uintptr_t)p & (SECTOR_SEGMENT_SIZE - 1); if (pre_extra) pre_extra = SECTOR_SEGMENT_SIZE - pre_extra; if (pre_extra) munmap(p, pre_extra); if (pre_extra < SECTOR_SEGMENT_SIZE) munmap((char *)p + pre_extra + (count << LOG_SECTOR_SEGMENT_SIZE), SECTOR_SEGMENT_SIZE - pre_extra); return (char *)p + pre_extra; }