15 #include "llvm/Config/config.h" 20 #ifdef HAVE_SYS_MMAN_H 25 #include <mach/mach.h> 29 #include <zircon/syscalls.h> 33 # if defined(__OpenBSD__) 34 # include <mips64/sysarch.h> 35 # elif !defined(__FreeBSD__) 36 # include <sys/cachectl.h> 40 #if defined(__APPLE__) 41 extern "C" void sys_icache_invalidate(
const void *Addr,
size_t len);
43 extern "C" void __clear_cache(
void *,
void*);
48 int getPosixProtectionFlags(
unsigned Flags) {
55 return PROT_READ | PROT_WRITE;
57 return PROT_READ | PROT_EXEC;
60 return PROT_READ | PROT_WRITE | PROT_EXEC;
62 #if defined(__FreeBSD__) 70 return PROT_READ | PROT_EXEC;
87 Memory::allocateMappedMemory(
size_t NumBytes,
88 const MemoryBlock *
const NearBlock,
90 std::error_code &EC) {
91 EC = std::error_code();
95 static const size_t PageSize = Process::getPageSize();
96 const size_t NumPages = (NumBytes+PageSize-1)/PageSize;
100 int MMFlags = MAP_PRIVATE |
108 int Protect = getPosixProtectionFlags(PFlags);
110 #if defined(__NetBSD__) && defined(PROT_MPROTECT) 111 Protect |= PROT_MPROTECT(PROT_READ | PROT_WRITE | PROT_EXEC);
115 uintptr_t Start = NearBlock ?
reinterpret_cast<uintptr_t
>(NearBlock->base()) +
116 NearBlock->size() : 0;
117 if (Start && Start % PageSize)
118 Start += PageSize - Start %
PageSize;
120 void *Addr = ::mmap(reinterpret_cast<void*>(Start), PageSize*NumPages,
121 Protect, MMFlags, fd, 0);
122 if (Addr == MAP_FAILED) {
124 return allocateMappedMemory(NumBytes,
nullptr, PFlags, EC);
126 EC = std::error_code(errno, std::generic_category());
127 return MemoryBlock();
131 Result.Address = Addr;
135 if (PFlags & MF_EXEC) {
136 EC = Memory::protectMappedMemory (Result, PFlags);
137 if (EC != std::error_code())
138 return MemoryBlock();
145 Memory::releaseMappedMemory(MemoryBlock &M) {
146 if (M.Address ==
nullptr || M.Size == 0)
147 return std::error_code();
149 if (0 != ::munmap(M.Address, M.Size))
150 return std::error_code(errno, std::generic_category());
155 return std::error_code();
159 Memory::protectMappedMemory(
const MemoryBlock &M,
unsigned Flags) {
160 static const size_t PageSize = Process::getPageSize();
161 if (M.Address ==
nullptr || M.Size == 0)
162 return std::error_code();
165 return std::error_code(EINVAL, std::generic_category());
167 int Protect = getPosixProtectionFlags(Flags);
168 uintptr_t Start =
alignAddr((uint8_t *)M.Address - PageSize + 1, PageSize);
169 uintptr_t End =
alignAddr((uint8_t *)M.Address + M.Size, PageSize);
171 bool InvalidateCache = (Flags & MF_EXEC);
173 #if defined(__arm__) || defined(__aarch64__) 177 if (InvalidateCache && !(Protect & PROT_READ)) {
178 int Result = ::mprotect((
void *)Start, End - Start, Protect | PROT_READ);
180 return std::error_code(errno, std::generic_category());
182 Memory::InvalidateInstructionCache(M.Address, M.Size);
183 InvalidateCache =
false;
187 int Result = ::mprotect((
void *)Start, End - Start, Protect);
190 return std::error_code(errno, std::generic_category());
193 Memory::InvalidateInstructionCache(M.Address, M.Size);
195 return std::error_code();
201 void Memory::InvalidateInstructionCache(
const void *Addr,
205 #if defined(__APPLE__) 207 # if (defined(__POWERPC__) || defined (__ppc__) || \ 208 defined(_POWER) || defined(_ARCH_PPC) || defined(__arm__) || \ 210 sys_icache_invalidate(const_cast<void *>(Addr), Len);
213 #elif defined(__Fuchsia__) 215 zx_status_t
Status = zx_cache_flush(Addr, Len, ZX_CACHE_FLUSH_INSN);
216 assert(Status == ZX_OK &&
"cannot invalidate instruction cache");
220 # if (defined(__POWERPC__) || defined (__ppc__) || \ 221 defined(_POWER) || defined(_ARCH_PPC)) && defined(__GNUC__) 222 const size_t LineSize = 32;
228 for (
intptr_t Line = StartLine; Line < EndLine; Line += LineSize)
229 asm volatile(
"dcbf 0, %0" : :
"r"(Line));
230 asm volatile(
"sync");
232 for (
intptr_t Line = StartLine; Line < EndLine; Line += LineSize)
233 asm volatile(
"icbi 0, %0" : :
"r"(Line));
234 asm volatile(
"isync");
235 # elif (defined(__arm__) || defined(__aarch64__) || defined(__mips__)) && \ 238 const char *Start =
static_cast<const char *
>(Addr);
239 const char *End = Start + Len;
240 __clear_cache(const_cast<char *>(Start), const_cast<char *>(End));
This class represents lattice values for constants.
void ValgrindDiscardTranslations(const void *Addr, size_t Len)
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
uintptr_t alignAddr(const void *Addr, size_t Alignment)
Aligns Addr to Alignment bytes, rounding up.
Provides a library for accessing information about this process and other processes on the operating ...
static cl::opt< int > PageSize("imp-null-check-page-size", cl::desc("The page size of the target in bytes"), cl::init(4096), cl::Hidden)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
std::underlying_type< E >::type Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.