Merge pull request #90 from gizmo98/patch-2
[libretro-ppsspp.git] / Core / MemMap.cpp
blob9a90c2b8a82d4effbfcf0227d03a91fceaf8ef81
1 // Copyright (C) 2003 Dolphin Project / 2012 PPSSPP Project
3 // This program is free software: you can redistribute it and/or modify
4 // it under the terms of the GNU General Public License as published by
5 // the Free Software Foundation, version 2.0 or later versions.
7 // This program is distributed in the hope that it will be useful,
8 // but WITHOUT ANY WARRANTY; without even the implied warranty of
9 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 // GNU General Public License 2.0 for more details.
12 // A copy of the GPL 2.0 should have been included with the program.
13 // If not, see http://www.gnu.org/licenses/
15 // Official git repository and contact information can be found at
16 // https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
18 #include <algorithm>
20 #include "base/mutex.h"
21 #include "Common/Common.h"
22 #include "Common/MemoryUtil.h"
23 #ifndef __SYMBIAN32__
24 #include "Common/MemArena.h"
25 #endif
26 #include "Common/ChunkFile.h"
28 #include "Core/MemMap.h"
29 #include "Core/HDRemaster.h"
30 #include "Core/MIPS/MIPS.h"
31 #include "Core/HLE/HLE.h"
33 #include "Core/Core.h"
34 #include "Core/Debugger/SymbolMap.h"
35 #include "Core/Debugger/Breakpoints.h"
36 #include "Core/Config.h"
37 #include "Core/HLE/ReplaceTables.h"
39 namespace Memory {
41 // The base pointer to the auto-mirrored arena.
42 u8* base = NULL;
44 #ifdef __SYMBIAN32__
45 RChunk* memmap;
46 #else
47 // The MemArena class
48 MemArena g_arena;
49 #endif
50 // ==============
52 // 64-bit: Pointers to low-mem (sub-0x10000000) mirror
53 // 32-bit: Same as the corresponding physical/virtual pointers.
54 u8 *m_pRAM;
55 u8 *m_pRAM2;
56 u8 *m_pRAM3;
57 u8 *m_pScratchPad;
58 u8 *m_pVRAM;
60 u8 *m_pPhysicalScratchPad;
61 u8 *m_pUncachedScratchPad;
62 // 64-bit: Pointers to high-mem mirrors
63 // 32-bit: Same as above
64 u8 *m_pPhysicalRAM;
65 u8 *m_pUncachedRAM;
66 u8 *m_pKernelRAM; // RAM mirrored up to "kernel space". Fully accessible at all times currently.
67 u8 *m_pPhysicalRAM2;
68 u8 *m_pUncachedRAM2;
69 u8 *m_pKernelRAM2;
70 u8 *m_pPhysicalRAM3;
71 u8 *m_pUncachedRAM3;
72 u8 *m_pKernelRAM3;
74 // VRAM is mirrored 4 times. The second and fourth mirrors are swizzled.
75 // In practice, a game accessing the mirrors most likely is deswizzling the depth buffer.
76 u8 *m_pPhysicalVRAM1;
77 u8 *m_pPhysicalVRAM2;
78 u8 *m_pPhysicalVRAM3;
79 u8 *m_pPhysicalVRAM4;
80 u8 *m_pUncachedVRAM1;
81 u8 *m_pUncachedVRAM2;
82 u8 *m_pUncachedVRAM3;
83 u8 *m_pUncachedVRAM4;
85 // Holds the ending address of the PSP's user space.
86 // Required for HD Remasters to work properly.
87 // This replaces RAM_NORMAL_SIZE at runtime.
88 u32 g_MemorySize;
89 // Used to store the PSP model on game startup.
90 u32 g_PSPModel;
92 recursive_mutex g_shutdownLock;
94 // We don't declare the IO region in here since its handled by other means.
95 static MemoryView views[] =
97 {&m_pScratchPad, &m_pPhysicalScratchPad, 0x00010000, SCRATCHPAD_SIZE, 0},
98 {NULL, &m_pUncachedScratchPad, 0x40010000, SCRATCHPAD_SIZE, MV_MIRROR_PREVIOUS},
99 {&m_pVRAM, &m_pPhysicalVRAM1, 0x04000000, 0x00200000, 0},
100 {NULL, &m_pPhysicalVRAM2, 0x04200000, 0x00200000, MV_MIRROR_PREVIOUS},
101 {NULL, &m_pPhysicalVRAM3, 0x04400000, 0x00200000, MV_MIRROR_PREVIOUS},
102 {NULL, &m_pPhysicalVRAM4, 0x04600000, 0x00200000, MV_MIRROR_PREVIOUS},
103 {NULL, &m_pUncachedVRAM1, 0x44000000, 0x00200000, MV_MIRROR_PREVIOUS},
104 {NULL, &m_pUncachedVRAM2, 0x44200000, 0x00200000, MV_MIRROR_PREVIOUS},
105 {NULL, &m_pUncachedVRAM3, 0x44400000, 0x00200000, MV_MIRROR_PREVIOUS},
106 {NULL, &m_pUncachedVRAM4, 0x44600000, 0x00200000, MV_MIRROR_PREVIOUS},
107 {&m_pRAM, &m_pPhysicalRAM, 0x08000000, g_MemorySize, MV_IS_PRIMARY_RAM}, // only from 0x08800000 is it usable (last 24 megs)
108 {NULL, &m_pUncachedRAM, 0x48000000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_PRIMARY_RAM},
109 {NULL, &m_pKernelRAM, 0x88000000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_PRIMARY_RAM},
110 // Starts at memory + 31 MB.
111 {&m_pRAM2, &m_pPhysicalRAM2, 0x09F00000, g_MemorySize, MV_IS_EXTRA1_RAM},
112 {NULL, &m_pUncachedRAM2, 0x49F00000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_EXTRA1_RAM},
113 {NULL, &m_pKernelRAM2, 0x89F00000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_EXTRA1_RAM},
114 // Starts at memory + 31 * 2 MB.
115 {&m_pRAM3, &m_pPhysicalRAM3, 0x0BE00000, g_MemorySize, MV_IS_EXTRA2_RAM},
116 {NULL, &m_pUncachedRAM3, 0x4BE00000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_EXTRA2_RAM},
117 {NULL, &m_pKernelRAM3, 0x8BE00000, g_MemorySize, MV_MIRROR_PREVIOUS | MV_IS_EXTRA2_RAM},
119 // TODO: There are a few swizzled mirrors of VRAM, not sure about the best way to
120 // implement those.
123 static const int num_views = sizeof(views) / sizeof(MemoryView);
125 inline static bool CanIgnoreView(const MemoryView &view) {
126 #ifdef _ARCH_32
127 // Basically, 32-bit platforms can ignore views that are masked out anyway.
128 return (view.flags & MV_MIRROR_PREVIOUS) && (view.virtual_address & ~MEMVIEW32_MASK) != 0;
129 #else
130 return false;
131 #endif
134 // yeah, this could also be done in like two bitwise ops...
135 #define SKIP(a_flags, b_flags)
136 // if (!(a_flags & MV_WII_ONLY) && (b_flags & MV_WII_ONLY))
137 // continue;
138 // if (!(a_flags & MV_FAKE_VMEM) && (b_flags & MV_FAKE_VMEM))
139 // continue;
141 static bool Memory_TryBase(u32 flags) {
142 // OK, we know where to find free space. Now grab it!
143 // We just mimic the popular BAT setup.
145 #if defined(_XBOX)
146 void *ptr;
147 #elif !defined(__SYMBIAN32__)
148 size_t position = 0;
149 size_t last_position = 0;
150 #endif
152 // Zero all the pointers to be sure.
153 for (int i = 0; i < num_views; i++)
155 if (views[i].out_ptr_low)
156 *views[i].out_ptr_low = 0;
157 if (views[i].out_ptr)
158 *views[i].out_ptr = 0;
161 int i;
162 for (i = 0; i < num_views; i++)
164 const MemoryView &view = views[i];
165 if (view.size == 0)
166 continue;
167 SKIP(flags, view.flags);
169 #ifdef __SYMBIAN32__
170 if (!CanIgnoreView(view)) {
171 memmap->Commit(view.virtual_address & MEMVIEW32_MASK, view.size);
173 *(view.out_ptr) = (u8*)base + (view.virtual_address & MEMVIEW32_MASK);
174 #elif defined(_XBOX)
175 if (!CanIgnoreView(view)) {
176 *(view.out_ptr_low) = (u8*)(base + view.virtual_address);
177 ptr = VirtualAlloc(base + (view.virtual_address & MEMVIEW32_MASK), view.size, MEM_COMMIT, PAGE_READWRITE);
179 *(view.out_ptr) = (u8*)base + (view.virtual_address & MEMVIEW32_MASK);
180 #else
181 if (view.flags & MV_MIRROR_PREVIOUS) {
182 position = last_position;
183 } else {
184 *(view.out_ptr_low) = (u8*)g_arena.CreateView(position, view.size);
185 if (!*view.out_ptr_low)
186 goto bail;
188 #if defined(_ARCH_64)
189 *view.out_ptr = (u8*)g_arena.CreateView(
190 position, view.size, base + view.virtual_address);
191 #else
192 if (CanIgnoreView(view)) {
193 // No need to create multiple identical views.
194 *view.out_ptr = *views[i - 1].out_ptr;
195 } else {
196 *view.out_ptr = (u8*)g_arena.CreateView(
197 position, view.size, base + (view.virtual_address & MEMVIEW32_MASK));
198 if (!*view.out_ptr)
199 goto bail;
201 #endif
202 last_position = position;
203 position += g_arena.roundup(view.size);
204 #endif
207 return true;
209 #if !defined(__SYMBIAN32__)
210 bail:
211 // Argh! ERROR! Free what we grabbed so far so we can try again.
212 for (int j = 0; j <= i; j++)
214 if (views[i].size == 0)
215 continue;
216 SKIP(flags, views[i].flags);
217 if (views[j].out_ptr_low && *views[j].out_ptr_low)
219 g_arena.ReleaseView(*views[j].out_ptr_low, views[j].size);
220 *views[j].out_ptr_low = NULL;
222 if (*views[j].out_ptr)
224 if (!CanIgnoreView(views[j])) {
225 g_arena.ReleaseView(*views[j].out_ptr, views[j].size);
227 *views[j].out_ptr = NULL;
230 return false;
231 #endif
234 void MemoryMap_Setup(u32 flags)
236 // Find a base to reserve 256MB
237 #if defined(_XBOX)
238 base = (u8*)VirtualAlloc(0, 0x10000000, MEM_RESERVE|MEM_LARGE_PAGES, PAGE_READWRITE);
239 #elif defined(__SYMBIAN32__)
240 memmap = new RChunk();
241 memmap->CreateDisconnectedLocal(0 , 0, 0x10000000);
242 base = memmap->Base();
243 #else
244 size_t total_mem = 0;
246 for (int i = 0; i < num_views; i++)
248 if (views[i].size == 0)
249 continue;
250 SKIP(flags, views[i].flags);
251 if (!CanIgnoreView(views[i]))
252 total_mem += g_arena.roundup(views[i].size);
254 // Grab some pagefile backed memory out of the void ...
255 g_arena.GrabLowMemSpace(total_mem);
256 // 32-bit Windows retrieves base a different way
257 #if defined(_M_X64) || !defined(_WIN32)
258 // This really shouldn't fail - in 64-bit, there will always be enough address space.
259 // Linux32 is fine with the x64 method, although limited to 32-bit with no automirrors.
260 base = MemArena::Find4GBBase();
261 #endif
262 #endif
265 // Now, create views in high memory where there's plenty of space.
266 #if defined(_WIN32) && !defined(_M_X64)
267 // Try a whole range of possible bases. Return once we got a valid one.
268 int base_attempts = 0;
269 u32 max_base_addr = 0x7FFF0000 - 0x10000000;
271 for (u32 base_addr = 0x01000000; base_addr < max_base_addr; base_addr += 0x400000)
273 base_attempts++;
274 base = (u8 *)base_addr;
275 if (Memory_TryBase(flags))
277 INFO_LOG(MEMMAP, "Found valid memory base at %p after %i tries.", base, base_attempts);
278 base_attempts = 0;
279 break;
283 if (base_attempts)
284 PanicAlert("No possible memory base pointer found!");
285 #else
286 // Try base we retrieved earlier
287 if (!Memory_TryBase(flags))
289 ERROR_LOG(MEMMAP, "MemoryMap_Setup: Failed finding a memory base.");
290 PanicAlert("MemoryMap_Setup: Failed finding a memory base.");
292 #endif
293 return;
296 void MemoryMap_Shutdown(u32 flags)
298 #ifdef __SYMBIAN32__
299 memmap->Decommit(0, memmap->MaxSize());
300 memmap->Close();
301 delete memmap;
302 #else
303 for (int i = 0; i < num_views; i++)
305 if (views[i].size == 0)
306 continue;
307 SKIP(flags, views[i].flags);
308 if (views[i].out_ptr_low && *views[i].out_ptr_low)
309 g_arena.ReleaseView(*views[i].out_ptr_low, views[i].size);
310 if (*views[i].out_ptr && (!views[i].out_ptr_low || *views[i].out_ptr != *views[i].out_ptr_low))
311 g_arena.ReleaseView(*views[i].out_ptr, views[i].size);
312 *views[i].out_ptr = NULL;
313 if (views[i].out_ptr_low)
314 *views[i].out_ptr_low = NULL;
316 g_arena.ReleaseSpace();
317 #endif
320 void Init()
322 int flags = 0;
324 // On some 32 bit platforms, you can only map < 32 megs at a time.
325 const static int MAX_MMAP_SIZE = 31 * 1024 * 1024;
326 _dbg_assert_msg_(MEMMAP, g_MemorySize < MAX_MMAP_SIZE * 3, "ACK - too much memory for three mmap views.");
327 for (size_t i = 0; i < ARRAY_SIZE(views); i++) {
328 if (views[i].flags & MV_IS_PRIMARY_RAM)
329 views[i].size = std::min((int)g_MemorySize, MAX_MMAP_SIZE);
330 if (views[i].flags & MV_IS_EXTRA1_RAM)
331 views[i].size = std::min(std::max((int)g_MemorySize - MAX_MMAP_SIZE, 0), MAX_MMAP_SIZE);
332 if (views[i].flags & MV_IS_EXTRA2_RAM)
333 views[i].size = std::min(std::max((int)g_MemorySize - MAX_MMAP_SIZE * 2, 0), MAX_MMAP_SIZE);
335 MemoryMap_Setup(flags);
337 INFO_LOG(MEMMAP, "Memory system initialized. RAM at %p (mirror at 0 @ %p, uncached @ %p)",
338 m_pRAM, m_pPhysicalRAM, m_pUncachedRAM);
341 void DoState(PointerWrap &p)
343 auto s = p.Section("Memory", 1, 2);
344 if (!s)
345 return;
347 if (s < 2) {
348 if (!g_RemasterMode)
349 g_MemorySize = RAM_NORMAL_SIZE;
350 g_PSPModel = PSP_MODEL_FAT;
351 } else {
352 u32 oldMemorySize = g_MemorySize;
353 p.Do(g_PSPModel);
354 p.DoMarker("PSPModel");
355 if (!g_RemasterMode) {
356 g_MemorySize = g_PSPModel == PSP_MODEL_FAT ? RAM_NORMAL_SIZE : RAM_DOUBLE_SIZE;
357 if (oldMemorySize < g_MemorySize) {
358 Shutdown();
359 Init();
364 p.DoArray(GetPointer(PSP_GetKernelMemoryBase()), g_MemorySize);
365 p.DoMarker("RAM");
367 p.DoArray(m_pVRAM, VRAM_SIZE);
368 p.DoMarker("VRAM");
369 p.DoArray(m_pScratchPad, SCRATCHPAD_SIZE);
370 p.DoMarker("ScratchPad");
373 void Shutdown()
375 lock_guard guard(g_shutdownLock);
376 u32 flags = 0;
378 MemoryMap_Shutdown(flags);
379 base = NULL;
380 DEBUG_LOG(MEMMAP, "Memory system shut down.");
383 void Clear()
385 if (m_pRAM)
386 memset(GetPointerUnchecked(PSP_GetKernelMemoryBase()), 0, g_MemorySize);
387 if (m_pScratchPad)
388 memset(m_pScratchPad, 0, SCRATCHPAD_SIZE);
389 if (m_pVRAM)
390 memset(m_pVRAM, 0, VRAM_SIZE);
393 // Wanting to avoid include pollution, MemMap.h is included a lot.
394 MemoryInitedLock::MemoryInitedLock()
396 g_shutdownLock.lock();
398 MemoryInitedLock::~MemoryInitedLock()
400 g_shutdownLock.unlock();
403 MemoryInitedLock Lock()
405 return MemoryInitedLock();
408 __forceinline static Opcode Read_Instruction(u32 address, bool resolveReplacements, Opcode inst)
410 if (!MIPS_IS_EMUHACK(inst.encoding)) {
411 return inst;
414 if (MIPS_IS_RUNBLOCK(inst.encoding) && MIPSComp::jit) {
415 JitBlockCache *bc = MIPSComp::jit->GetBlockCache();
416 int block_num = bc->GetBlockNumberFromEmuHackOp(inst, true);
417 if (block_num >= 0) {
418 inst = bc->GetOriginalFirstOp(block_num);
419 if (resolveReplacements && MIPS_IS_REPLACEMENT(inst)) {
420 u32 op;
421 if (GetReplacedOpAt(address, &op)) {
422 if (MIPS_IS_EMUHACK(op)) {
423 ERROR_LOG(HLE,"WTF 1");
424 return Opcode(op);
425 } else {
426 return Opcode(op);
428 } else {
429 ERROR_LOG(HLE, "Replacement, but no replacement op? %08x", inst.encoding);
432 return inst;
433 } else {
434 return inst;
436 } else if (resolveReplacements && MIPS_IS_REPLACEMENT(inst.encoding)) {
437 u32 op;
438 if (GetReplacedOpAt(address, &op)) {
439 if (MIPS_IS_EMUHACK(op)) {
440 ERROR_LOG(HLE,"WTF 2");
441 return Opcode(op);
442 } else {
443 return Opcode(op);
445 } else {
446 return inst;
448 } else {
449 return inst;
453 Opcode Read_Instruction(u32 address, bool resolveReplacements)
455 Opcode inst = Opcode(Read_U32(address));
456 return Read_Instruction(address, resolveReplacements, inst);
459 Opcode ReadUnchecked_Instruction(u32 address, bool resolveReplacements)
461 Opcode inst = Opcode(ReadUnchecked_U32(address));
462 return Read_Instruction(address, resolveReplacements, inst);
465 Opcode Read_Opcode_JIT(u32 address)
467 Opcode inst = Opcode(Read_U32(address));
468 if (MIPS_IS_RUNBLOCK(inst.encoding) && MIPSComp::jit) {
469 JitBlockCache *bc = MIPSComp::jit->GetBlockCache();
470 int block_num = bc->GetBlockNumberFromEmuHackOp(inst, true);
471 if (block_num >= 0) {
472 return bc->GetOriginalFirstOp(block_num);
473 } else {
474 return inst;
476 } else {
477 return inst;
481 // WARNING! No checks!
482 // We assume that _Address is cached
483 void Write_Opcode_JIT(const u32 _Address, const Opcode& _Value)
485 Memory::WriteUnchecked_U32(_Value.encoding, _Address);
488 void Memset(const u32 _Address, const u8 _iValue, const u32 _iLength)
490 u8 *ptr = GetPointer(_Address);
491 if (ptr != NULL) {
492 memset(ptr, _iValue, _iLength);
494 else
496 for (size_t i = 0; i < _iLength; i++)
497 Write_U8(_iValue, (u32)(_Address + i));
499 #ifndef MOBILE_DEVICE
500 CBreakPoints::ExecMemCheck(_Address, true, _iLength, currentMIPS->pc);
501 #endif
504 } // namespace