// SPDX-FileCopyrightText: 2019-2022 Connor McLaughlin // SPDX-License-Identifier: (GPL-3.0 OR CC-BY-NC-ND-4.0) #include "bus.h" #include "cdrom.h" #include "cpu_code_cache.h" #include "cpu_core.h" #include "cpu_core_private.h" #include "cpu_disasm.h" #include "dma.h" #include "gpu.h" #include "host.h" #include "interrupt_controller.h" #include "mdec.h" #include "pad.h" #include "settings.h" #include "sio.h" #include "spu.h" #include "system.h" #include "timers.h" #include "timing_event.h" #include "util/state_wrapper.h" #include "common/align.h" #include "common/assert.h" #include "common/log.h" #include "common/make_array.h" #include "common/memmap.h" #include #include #include Log_SetChannel(Bus); // Exports for external debugger access namespace Exports { extern "C" { #ifdef _WIN32 _declspec(dllexport) uintptr_t RAM; _declspec(dllexport) u32 RAM_SIZE, RAM_MASK; #else __attribute__((visibility("default"), used)) uintptr_t RAM; __attribute__((visibility("default"), used)) u32 RAM_SIZE, RAM_MASK; #endif } } // namespace Exports namespace Bus { namespace { union MEMDELAY { u32 bits; BitField access_time; // cycles BitField use_com0_time; BitField use_com1_time; BitField use_com2_time; BitField use_com3_time; BitField data_bus_16bit; BitField memory_window_size; static constexpr u32 WRITE_MASK = 0b10101111'00011111'11111111'11111111; }; union COMDELAY { u32 bits; BitField com0; BitField com1; BitField com2; BitField com3; BitField comunk; static constexpr u32 WRITE_MASK = 0b00000000'00000011'11111111'11111111; }; union MEMCTRL { u32 regs[MEMCTRL_REG_COUNT]; struct { u32 exp1_base; u32 exp2_base; MEMDELAY exp1_delay_size; MEMDELAY exp3_delay_size; MEMDELAY bios_delay_size; MEMDELAY spu_delay_size; MEMDELAY cdrom_delay_size; MEMDELAY exp2_delay_size; COMDELAY common_delay; }; }; } // namespace std::bitset g_ram_code_bits{}; u8* g_ram = nullptr; u32 g_ram_size = 0; u32 g_ram_mask = 0; u8 g_bios[BIOS_SIZE]{}; static void* s_ram_handle = nullptr; static std::array s_exp1_access_time = {}; static std::array s_exp2_access_time = {}; static std::array s_bios_access_time = {}; static std::array s_cdrom_access_time = {}; static std::array s_spu_access_time = {}; static std::vector s_exp1_rom; static MEMCTRL s_MEMCTRL = {}; static u32 s_ram_size_reg = 0; static std::string s_tty_line_buffer; static CPUFastmemMode s_fastmem_mode = CPUFastmemMode::Disabled; #ifdef ENABLE_MMAP_FASTMEM static SharedMemoryMappingArea s_fastmem_arena; static std::vector> s_fastmem_ram_views; #endif static u8** s_fastmem_lut = nullptr; static constexpr auto s_fastmem_ram_mirrors = make_array(0x00000000u, 0x00200000u, 0x00400000u, 0x00600000u, 0x80000000u, 0x80200000u, 0x80400000u, 0x80600000u, 0xA0000000u, 0xA0200000u, 0xA0400000u, 0xA0600000u); static void SetRAMSize(bool enable_8mb_ram); static std::tuple CalculateMemoryTiming(MEMDELAY mem_delay, COMDELAY common_delay); static void RecalculateMemoryTimings(); static void SetCodePageFastmemProtection(u32 page_index, bool writable); } // namespace Bus #define FIXUP_HALFWORD_OFFSET(size, offset) ((size >= MemoryAccessSize::HalfWord) ? (offset) : ((offset) & ~1u)) #define FIXUP_HALFWORD_READ_VALUE(size, offset, value) \ ((size >= MemoryAccessSize::HalfWord) ? (value) : ((value) >> (((offset)&u32(1)) * 8u))) #define FIXUP_HALFWORD_WRITE_VALUE(size, offset, value) \ ((size >= MemoryAccessSize::HalfWord) ? (value) : ((value) << (((offset)&u32(1)) * 8u))) #define FIXUP_WORD_OFFSET(size, offset) ((size == MemoryAccessSize::Word) ? (offset) : ((offset) & ~3u)) #define FIXUP_WORD_READ_VALUE(size, offset, value) \ ((size == MemoryAccessSize::Word) ? (value) : ((value) >> (((offset)&3u) * 8))) #define FIXUP_WORD_WRITE_VALUE(size, offset, value) \ ((size == MemoryAccessSize::Word) ? (value) : ((value) << (((offset)&3u) * 8))) bool Bus::AllocateMemory() { s_ram_handle = MemMap::CreateSharedMemory(MemMap::GetFileMappingName("duckstation_ram").c_str(), RAM_8MB_SIZE); if (!s_ram_handle) { Host::ReportErrorAsync("Error", "Failed to allocate memory"); return false; } g_ram = static_cast(MemMap::MapSharedMemory(s_ram_handle, 0, nullptr, RAM_8MB_SIZE, PageProtect::ReadWrite)); if (!g_ram) { Host::ReportErrorAsync("Error", "Failed to map memory"); ReleaseMemory(); return false; } Log_InfoPrintf("RAM is mapped at %p.", g_ram); #ifdef ENABLE_MMAP_FASTMEM if (!s_fastmem_arena.Create(FASTMEM_ARENA_SIZE)) { // TODO: maybe make this non-fatal? Host::ReportErrorAsync("Error", "Failed to create fastmem arena"); ReleaseMemory(); return false; } Log_InfoPrintf("Fastmem base: %p", s_fastmem_arena.BasePointer()); #endif return true; } void Bus::ReleaseMemory() { #ifdef ENABLE_MMAP_FASTMEM DebugAssert(s_fastmem_ram_views.empty()); s_fastmem_arena.Destroy(); #endif std::free(s_fastmem_lut); s_fastmem_lut = nullptr; if (g_ram) { MemMap::UnmapSharedMemory(g_ram, RAM_8MB_SIZE); g_ram = nullptr; } if (s_ram_handle) { MemMap::DestroySharedMemory(s_ram_handle); s_ram_handle = nullptr; } } bool Bus::Initialize() { SetRAMSize(g_settings.enable_8mb_ram); Reset(); return true; } void Bus::SetRAMSize(bool enable_8mb_ram) { g_ram_size = enable_8mb_ram ? RAM_8MB_SIZE : RAM_2MB_SIZE; g_ram_mask = enable_8mb_ram ? RAM_8MB_MASK : RAM_2MB_MASK; Exports::RAM = reinterpret_cast(g_ram); Exports::RAM_SIZE = g_ram_size; Exports::RAM_MASK = g_ram_mask; } void Bus::Shutdown() { UpdateFastmemViews(CPUFastmemMode::Disabled); CPU::g_state.fastmem_base = nullptr; g_ram_mask = 0; g_ram_size = 0; Exports::RAM = 0; Exports::RAM_SIZE = 0; Exports::RAM_MASK = 0; } void Bus::Reset() { std::memset(g_ram, 0, g_ram_size); s_MEMCTRL.exp1_base = 0x1F000000; s_MEMCTRL.exp2_base = 0x1F802000; s_MEMCTRL.exp1_delay_size.bits = 0x0013243F; s_MEMCTRL.exp3_delay_size.bits = 0x00003022; s_MEMCTRL.bios_delay_size.bits = 0x0013243F; s_MEMCTRL.spu_delay_size.bits = 0x200931E1; s_MEMCTRL.cdrom_delay_size.bits = 0x00020843; s_MEMCTRL.exp2_delay_size.bits = 0x00070777; s_MEMCTRL.common_delay.bits = 0x00031125; s_ram_size_reg = UINT32_C(0x00000B88); g_ram_code_bits = {}; RecalculateMemoryTimings(); } void Bus::AddTTYCharacter(char ch) { if (ch == '\r') { } else if (ch == '\n') { if (!s_tty_line_buffer.empty()) { Log::Writef("TTY", "", LOGLEVEL_INFO, "\033[1;34m%s\033[0m", s_tty_line_buffer.c_str()); #ifdef _DEBUG if (CPU::IsTraceEnabled()) CPU::WriteToExecutionLog("TTY: %s\n", s_tty_line_buffer.c_str()); #endif } s_tty_line_buffer.clear(); } else { s_tty_line_buffer += ch; } } void Bus::AddTTYString(const std::string_view& str) { for (char ch : str) AddTTYCharacter(ch); } bool Bus::DoState(StateWrapper& sw) { u32 ram_size = g_ram_size; sw.DoEx(&ram_size, 52, static_cast(RAM_2MB_SIZE)); if (ram_size != g_ram_size) { const bool using_8mb_ram = (ram_size == RAM_8MB_SIZE); SetRAMSize(using_8mb_ram); UpdateFastmemViews(s_fastmem_mode); CPU::UpdateFastmemBase(); } sw.Do(&s_exp1_access_time); sw.Do(&s_exp2_access_time); sw.Do(&s_bios_access_time); sw.Do(&s_cdrom_access_time); sw.Do(&s_spu_access_time); sw.DoBytes(g_ram, g_ram_size); if (sw.GetVersion() < 58) { Log_WarningPrint("Overwriting loaded BIOS with old save state."); sw.DoBytes(g_bios, BIOS_SIZE); } sw.DoArray(s_MEMCTRL.regs, countof(s_MEMCTRL.regs)); sw.Do(&s_ram_size_reg); sw.Do(&s_tty_line_buffer); return !sw.HasError(); } void Bus::SetExpansionROM(std::vector data) { s_exp1_rom = std::move(data); } std::tuple Bus::CalculateMemoryTiming(MEMDELAY mem_delay, COMDELAY common_delay) { // from nocash spec s32 first = 0, seq = 0, min = 0; if (mem_delay.use_com0_time) { first += s32(common_delay.com0) - 1; seq += s32(common_delay.com0) - 1; } if (mem_delay.use_com2_time) { first += s32(common_delay.com2); seq += s32(common_delay.com2); } if (mem_delay.use_com3_time) { min = s32(common_delay.com3); } if (first < 6) first++; first = first + s32(mem_delay.access_time) + 2; seq = seq + s32(mem_delay.access_time) + 2; if (first < (min + 6)) first = min + 6; if (seq < (min + 2)) seq = min + 2; const TickCount byte_access_time = first; const TickCount halfword_access_time = mem_delay.data_bus_16bit ? first : (first + seq); const TickCount word_access_time = mem_delay.data_bus_16bit ? (first + seq) : (first + seq + seq + seq); return std::tie(std::max(byte_access_time - 1, 0), std::max(halfword_access_time - 1, 0), std::max(word_access_time - 1, 0)); } void Bus::RecalculateMemoryTimings() { std::tie(s_bios_access_time[0], s_bios_access_time[1], s_bios_access_time[2]) = CalculateMemoryTiming(s_MEMCTRL.bios_delay_size, s_MEMCTRL.common_delay); std::tie(s_cdrom_access_time[0], s_cdrom_access_time[1], s_cdrom_access_time[2]) = CalculateMemoryTiming(s_MEMCTRL.cdrom_delay_size, s_MEMCTRL.common_delay); std::tie(s_spu_access_time[0], s_spu_access_time[1], s_spu_access_time[2]) = CalculateMemoryTiming(s_MEMCTRL.spu_delay_size, s_MEMCTRL.common_delay); Log_TracePrintf("BIOS Memory Timing: %u bit bus, byte=%d, halfword=%d, word=%d", s_MEMCTRL.bios_delay_size.data_bus_16bit ? 16 : 8, s_bios_access_time[0] + 1, s_bios_access_time[1] + 1, s_bios_access_time[2] + 1); Log_TracePrintf("CDROM Memory Timing: %u bit bus, byte=%d, halfword=%d, word=%d", s_MEMCTRL.cdrom_delay_size.data_bus_16bit ? 16 : 8, s_cdrom_access_time[0] + 1, s_cdrom_access_time[1] + 1, s_cdrom_access_time[2] + 1); Log_TracePrintf("SPU Memory Timing: %u bit bus, byte=%d, halfword=%d, word=%d", s_MEMCTRL.spu_delay_size.data_bus_16bit ? 16 : 8, s_spu_access_time[0] + 1, s_spu_access_time[1] + 1, s_spu_access_time[2] + 1); } CPUFastmemMode Bus::GetFastmemMode() { return s_fastmem_mode; } u8* Bus::GetFastmemBase() { #ifdef ENABLE_MMAP_FASTMEM if (s_fastmem_mode == CPUFastmemMode::MMap) return s_fastmem_arena.BasePointer(); #endif if (s_fastmem_mode == CPUFastmemMode::LUT) return reinterpret_cast(s_fastmem_lut); return nullptr; } void Bus::UpdateFastmemViews(CPUFastmemMode mode) { #ifndef ENABLE_MMAP_FASTMEM Assert(mode != CPUFastmemMode::MMap); #else for (const auto& it : s_fastmem_ram_views) s_fastmem_arena.Unmap(it.first, it.second); s_fastmem_ram_views.clear(); #endif s_fastmem_mode = mode; if (mode == CPUFastmemMode::Disabled) return; #ifdef ENABLE_MMAP_FASTMEM if (mode == CPUFastmemMode::MMap) { auto MapRAM = [](u32 base_address) { u8* map_address = s_fastmem_arena.BasePointer() + base_address; if (!s_fastmem_arena.Map(s_ram_handle, 0, map_address, g_ram_size, PageProtect::ReadWrite)) { Log_ErrorPrintf("Failed to map RAM at fastmem area %p (offset 0x%08X)", map_address, g_ram_size); return; } // mark all pages with code as non-writable for (u32 i = 0; i < static_cast(g_ram_code_bits.size()); i++) { if (g_ram_code_bits[i]) { u8* page_address = map_address + (i * HOST_PAGE_SIZE); if (!MemMap::MemProtect(page_address, HOST_PAGE_SIZE, PageProtect::ReadOnly)) { Log_ErrorPrintf("Failed to write-protect code page at %p", page_address); s_fastmem_arena.Unmap(map_address, g_ram_size); return; } } } s_fastmem_ram_views.emplace_back(map_address, g_ram_size); }; // KUSEG - cached MapRAM(0x00000000); // KSEG0 - cached MapRAM(0x80000000); // KSEG1 - uncached MapRAM(0xA0000000); return; } #endif if (!s_fastmem_lut) { s_fastmem_lut = static_cast(std::malloc(sizeof(u8*) * FASTMEM_LUT_NUM_SLOTS)); Assert(s_fastmem_lut); Log_InfoPrintf("Fastmem base (software): %p", s_fastmem_lut); } std::memset(s_fastmem_lut, 0, sizeof(u8*) * FASTMEM_LUT_NUM_SLOTS); auto MapRAM = [](u32 base_address) { u8* ram_ptr = g_ram; for (u32 address = 0; address < g_ram_size; address += FASTMEM_LUT_PAGE_SIZE) { const u32 lut_index = (base_address + address) >> FASTMEM_LUT_PAGE_SHIFT; s_fastmem_lut[lut_index] = ram_ptr; s_fastmem_lut[FASTMEM_LUT_NUM_PAGES + lut_index] = g_ram_code_bits[address >> HOST_PAGE_SHIFT] ? nullptr : ram_ptr; ram_ptr += FASTMEM_LUT_PAGE_SIZE; } }; // KUSEG - cached MapRAM(0x00000000); MapRAM(0x00200000); MapRAM(0x00400000); MapRAM(0x00600000); // KSEG0 - cached MapRAM(0x80000000); MapRAM(0x80200000); MapRAM(0x80400000); MapRAM(0x80600000); // KSEG1 - uncached MapRAM(0xA0000000); MapRAM(0xA0200000); MapRAM(0xA0400000); MapRAM(0xA0600000); } bool Bus::CanUseFastmemForAddress(VirtualMemoryAddress address) { const PhysicalMemoryAddress paddr = address & CPU::PHYSICAL_MEMORY_ADDRESS_MASK; switch (s_fastmem_mode) { #ifdef ENABLE_MMAP_FASTMEM case CPUFastmemMode::MMap: { // Currently since we don't map the mirrors, don't use fastmem for them. // This is because the swapping of page code bits for SMC is too expensive. return (paddr < RAM_MIRROR_END); } #endif case CPUFastmemMode::LUT: return (paddr < g_ram_size); case CPUFastmemMode::Disabled: default: return false; } } bool Bus::IsRAMCodePage(u32 index) { return g_ram_code_bits[index]; } void Bus::SetRAMCodePage(u32 index) { if (g_ram_code_bits[index]) return; // protect fastmem pages g_ram_code_bits[index] = true; SetCodePageFastmemProtection(index, false); } void Bus::ClearRAMCodePage(u32 index) { if (!g_ram_code_bits[index]) return; // unprotect fastmem pages g_ram_code_bits[index] = false; SetCodePageFastmemProtection(index, true); } void Bus::SetCodePageFastmemProtection(u32 page_index, bool writable) { #ifdef ENABLE_MMAP_FASTMEM if (s_fastmem_mode == CPUFastmemMode::MMap) { const PageProtect protect = writable ? PageProtect::ReadWrite : PageProtect::ReadOnly; // unprotect fastmem pages for (const auto& it : s_fastmem_ram_views) { u8* page_address = it.first + (page_index * HOST_PAGE_SIZE); if (!MemMap::MemProtect(page_address, HOST_PAGE_SIZE, protect)) { Log_ErrorPrintf("Failed to %s code page %u (0x%08X) @ %p", writable ? "unprotect" : "protect", page_index, page_index * static_cast(HOST_PAGE_SIZE), page_address); } } return; } #endif if (s_fastmem_mode == CPUFastmemMode::LUT) { // mirrors... const u32 code_addr = page_index << HOST_PAGE_SHIFT; u8* code_ptr = &g_ram[code_addr]; for (u32 mirror_start : s_fastmem_ram_mirrors) { u32 ram_offset = code_addr; u8* ram_ptr = code_ptr; for (u32 i = 0; i < FASTMEM_LUT_PAGES_PER_CODE_PAGE; i++) { s_fastmem_lut[FASTMEM_LUT_NUM_PAGES + ((mirror_start + ram_offset) >> FASTMEM_LUT_PAGE_SHIFT)] = ram_ptr; ram_offset += FASTMEM_LUT_PAGE_SIZE; ram_ptr += FASTMEM_LUT_PAGE_SIZE; } } } } void Bus::ClearRAMCodePageFlags() { g_ram_code_bits.reset(); #ifdef ENABLE_MMAP_FASTMEM if (s_fastmem_mode == CPUFastmemMode::MMap) { // unprotect fastmem pages for (const auto& it : s_fastmem_ram_views) { if (!MemMap::MemProtect(it.first, it.second, PageProtect::ReadWrite)) { Log_ErrorPrintf("Failed to unprotect code pages for fastmem view @ %p", it.first); } } } #endif if (s_fastmem_mode == CPUFastmemMode::LUT) { for (u32 i = 0; i < static_cast(g_ram_code_bits.size()); i++) { const u32 code_addr = (i * HOST_PAGE_SIZE); for (u32 mirror_start : s_fastmem_ram_mirrors) { u32 ram_offset = code_addr; for (u32 j = 0; j < FASTMEM_LUT_PAGES_PER_CODE_PAGE; j++) { s_fastmem_lut[FASTMEM_LUT_NUM_PAGES + ((mirror_start + ram_offset) >> FASTMEM_LUT_PAGE_SHIFT)] = &g_ram[ram_offset]; ram_offset += FASTMEM_LUT_PAGE_SIZE; } } } } } bool Bus::IsCodePageAddress(PhysicalMemoryAddress address) { return IsRAMAddress(address) ? g_ram_code_bits[(address & g_ram_mask) / HOST_PAGE_SIZE] : false; } bool Bus::HasCodePagesInRange(PhysicalMemoryAddress start_address, u32 size) { if (!IsRAMAddress(start_address)) return false; start_address = (start_address & g_ram_mask); const u32 end_address = start_address + size; while (start_address < end_address) { const u32 code_page_index = start_address / HOST_PAGE_SIZE; if (g_ram_code_bits[code_page_index]) return true; start_address += HOST_PAGE_SIZE; } return false; } std::optional Bus::GetMemoryRegionForAddress(PhysicalMemoryAddress address) { if (address < RAM_2MB_SIZE) return MemoryRegion::RAM; else if (address < RAM_MIRROR_END) return static_cast(static_cast(MemoryRegion::RAM) + (address / RAM_2MB_SIZE)); else if (address >= EXP1_BASE && address < (EXP1_BASE + EXP1_SIZE)) return MemoryRegion::EXP1; else if (address >= CPU::DCACHE_LOCATION && address < (CPU::DCACHE_LOCATION + CPU::DCACHE_SIZE)) return MemoryRegion::Scratchpad; else if (address >= BIOS_BASE && address < (BIOS_BASE + BIOS_SIZE)) return MemoryRegion::BIOS; return std::nullopt; } static constexpr std::array, static_cast(Bus::MemoryRegion::Count)> s_code_region_ranges = {{ {0, Bus::RAM_2MB_SIZE}, {Bus::RAM_2MB_SIZE, Bus::RAM_2MB_SIZE * 2}, {Bus::RAM_2MB_SIZE * 2, Bus::RAM_2MB_SIZE * 3}, {Bus::RAM_2MB_SIZE * 3, Bus::RAM_MIRROR_END}, {Bus::EXP1_BASE, Bus::EXP1_BASE + Bus::EXP1_SIZE}, {CPU::DCACHE_LOCATION, CPU::DCACHE_LOCATION + CPU::DCACHE_SIZE}, {Bus::BIOS_BASE, Bus::BIOS_BASE + Bus::BIOS_SIZE}, }}; PhysicalMemoryAddress Bus::GetMemoryRegionStart(MemoryRegion region) { return s_code_region_ranges[static_cast(region)].first; } PhysicalMemoryAddress Bus::GetMemoryRegionEnd(MemoryRegion region) { return s_code_region_ranges[static_cast(region)].second; } u8* Bus::GetMemoryRegionPointer(MemoryRegion region) { switch (region) { case MemoryRegion::RAM: return g_ram; case MemoryRegion::RAMMirror1: return (g_ram + (RAM_2MB_SIZE & g_ram_mask)); case MemoryRegion::RAMMirror2: return (g_ram + ((RAM_2MB_SIZE * 2) & g_ram_mask)); case MemoryRegion::RAMMirror3: return (g_ram + ((RAM_8MB_SIZE * 3) & g_ram_mask)); case MemoryRegion::EXP1: return nullptr; case MemoryRegion::Scratchpad: return CPU::g_state.dcache.data(); case MemoryRegion::BIOS: return g_bios; default: return nullptr; } } static ALWAYS_INLINE_RELEASE bool MaskedMemoryCompare(const u8* pattern, const u8* mask, u32 pattern_length, const u8* mem) { if (!mask) return std::memcmp(mem, pattern, pattern_length) == 0; for (u32 i = 0; i < pattern_length; i++) { if ((mem[i] & mask[i]) != (pattern[i] & mask[i])) return false; } return true; } std::optional Bus::SearchMemory(PhysicalMemoryAddress start_address, const u8* pattern, const u8* mask, u32 pattern_length) { std::optional region = GetMemoryRegionForAddress(start_address); if (!region.has_value()) return std::nullopt; PhysicalMemoryAddress current_address = start_address; MemoryRegion current_region = region.value(); while (current_region != MemoryRegion::Count) { const u8* mem = GetMemoryRegionPointer(current_region); const PhysicalMemoryAddress region_start = GetMemoryRegionStart(current_region); const PhysicalMemoryAddress region_end = GetMemoryRegionEnd(current_region); if (mem) { PhysicalMemoryAddress region_offset = current_address - region_start; PhysicalMemoryAddress bytes_remaining = region_end - current_address; while (bytes_remaining >= pattern_length) { if (MaskedMemoryCompare(pattern, mask, pattern_length, mem + region_offset)) return region_start + region_offset; region_offset++; bytes_remaining--; } } // skip RAM mirrors if (current_region == MemoryRegion::RAM) current_region = MemoryRegion::EXP1; else current_region = static_cast(static_cast(current_region) + 1); if (current_region != MemoryRegion::Count) current_address = GetMemoryRegionStart(current_region); } return std::nullopt; } static TickCount DoInvalidAccess(MemoryAccessType type, MemoryAccessSize size, PhysicalMemoryAddress address, u32& value) { SmallString str; str.append("Invalid bus "); if (size == MemoryAccessSize::Byte) str.append("byte"); if (size == MemoryAccessSize::HalfWord) str.append("word"); if (size == MemoryAccessSize::Word) str.append("dword"); str.append(' '); if (type == MemoryAccessType::Read) str.append("read"); else str.append("write"); str.append_fmt(" at address 0x{:08X}", address); if (type == MemoryAccessType::Write) str.append_fmt(" (value 0x{:08X})", value); Log_ErrorPrint(str); if (type == MemoryAccessType::Read) value = UINT32_C(0xFFFFFFFF); return (type == MemoryAccessType::Read) ? 1 : 0; } template ALWAYS_INLINE static TickCount DoRAMAccess(u32 offset, u32& value) { using namespace Bus; offset &= g_ram_mask; if constexpr (type == MemoryAccessType::Read) { if constexpr (size == MemoryAccessSize::Byte) { value = ZeroExtend32(g_ram[offset]); } else if constexpr (size == MemoryAccessSize::HalfWord) { u16 temp; std::memcpy(&temp, &g_ram[offset], sizeof(u16)); value = ZeroExtend32(temp); } else if constexpr (size == MemoryAccessSize::Word) { std::memcpy(&value, &g_ram[offset], sizeof(u32)); } } else { const u32 page_index = offset / HOST_PAGE_SIZE; if constexpr (skip_redundant_writes) { if constexpr (size == MemoryAccessSize::Byte) { if (g_ram[offset] != Truncate8(value)) { g_ram[offset] = Truncate8(value); if (g_ram_code_bits[page_index]) CPU::CodeCache::InvalidateBlocksWithPageIndex(page_index); } } else if constexpr (size == MemoryAccessSize::HalfWord) { const u16 new_value = Truncate16(value); u16 old_value; std::memcpy(&old_value, &g_ram[offset], sizeof(old_value)); if (old_value != new_value) { std::memcpy(&g_ram[offset], &new_value, sizeof(u16)); if (g_ram_code_bits[page_index]) CPU::CodeCache::InvalidateBlocksWithPageIndex(page_index); } } else if constexpr (size == MemoryAccessSize::Word) { u32 old_value; std::memcpy(&old_value, &g_ram[offset], sizeof(u32)); if (old_value != value) { std::memcpy(&g_ram[offset], &value, sizeof(u32)); if (g_ram_code_bits[page_index]) CPU::CodeCache::InvalidateBlocksWithPageIndex(page_index); } } } else { if (g_ram_code_bits[page_index]) CPU::CodeCache::InvalidateBlocksWithPageIndex(page_index); if constexpr (size == MemoryAccessSize::Byte) { g_ram[offset] = Truncate8(value); } else if constexpr (size == MemoryAccessSize::HalfWord) { const u16 temp = Truncate16(value); std::memcpy(&g_ram[offset], &temp, sizeof(u16)); } else if constexpr (size == MemoryAccessSize::Word) { std::memcpy(&g_ram[offset], &value, sizeof(u32)); } } } return (type == MemoryAccessType::Read) ? RAM_READ_TICKS : 0; } template ALWAYS_INLINE static TickCount DoBIOSAccess(u32 offset, u32& value) { using namespace Bus; // TODO: Configurable mirroring. if constexpr (type == MemoryAccessType::Read) { offset &= UINT32_C(0x7FFFF); if constexpr (size == MemoryAccessSize::Byte) { value = ZeroExtend32(g_bios[offset]); } else if constexpr (size == MemoryAccessSize::HalfWord) { u16 temp; std::memcpy(&temp, &g_bios[offset], sizeof(u16)); value = ZeroExtend32(temp); } else { std::memcpy(&value, &g_bios[offset], sizeof(u32)); } } else { // Writes are ignored. } return s_bios_access_time[static_cast(size)]; } template static TickCount DoEXP1Access(u32 offset, u32& value) { using namespace Bus; if constexpr (type == MemoryAccessType::Read) { if (s_exp1_rom.empty()) { // EXP1 not present. value = UINT32_C(0xFFFFFFFF); } else if (offset == 0x20018) { // Bit 0 - Action Replay On/Off value = UINT32_C(1); } else { const u32 transfer_size = u32(1) << static_cast(size); if ((offset + transfer_size) > s_exp1_rom.size()) { value = UINT32_C(0); } else { if constexpr (size == MemoryAccessSize::Byte) { value = ZeroExtend32(s_exp1_rom[offset]); } else if constexpr (size == MemoryAccessSize::HalfWord) { u16 halfword; std::memcpy(&halfword, &s_exp1_rom[offset], sizeof(halfword)); value = ZeroExtend32(halfword); } else { std::memcpy(&value, &s_exp1_rom[offset], sizeof(value)); } // Log_DevPrintf("EXP1 read: 0x%08X -> 0x%08X", EXP1_BASE | offset, value); } } return s_exp1_access_time[static_cast(size)]; } else { Log_WarningPrintf("EXP1 write: 0x%08X <- 0x%08X", EXP1_BASE | offset, value); return 0; } } template static TickCount DoEXP2Access(u32 offset, u32& value) { using namespace Bus; if constexpr (type == MemoryAccessType::Read) { // rx/tx buffer empty if (offset == 0x21) { value = 0x04 | 0x08; } else if (offset >= 0x60 && offset <= 0x67) { // nocash expansion area value = UINT32_C(0xFFFFFFFF); } else { Log_WarningPrintf("EXP2 read: 0x%08X", EXP2_BASE | offset); value = UINT32_C(0xFFFFFFFF); } return s_exp2_access_time[static_cast(size)]; } else { if (offset == 0x23 || offset == 0x80) { AddTTYCharacter(static_cast(value)); } else if (offset == 0x41 || offset == 0x42) { Log_DevPrintf("BIOS POST status: %02X", value & UINT32_C(0x0F)); } else if (offset == 0x70) { Log_DevPrintf("BIOS POST2 status: %02X", value & UINT32_C(0x0F)); } #if 0 // TODO: Put behind configuration variable else if (offset == 0x81) { Log_WarningPrintf("pcsx_debugbreak()"); Host::ReportErrorAsync("Error", "pcsx_debugbreak()"); System::PauseSystem(true); CPU::ExitExecution(); } else if (offset == 0x82) { Log_WarningPrintf("pcsx_exit() with status 0x%02X", value & UINT32_C(0xFF)); Host::ReportErrorAsync("Error", fmt::format("pcsx_exit() with status 0x{:02X}", value & UINT32_C(0xFF))); System::ShutdownSystem(false); CPU::ExitExecution(); } #endif else { Log_WarningPrintf("EXP2 write: 0x%08X <- 0x%08X", EXP2_BASE | offset, value); } return 0; } } template ALWAYS_INLINE static TickCount DoEXP3Access(u32 offset, u32& value) { if constexpr (type == MemoryAccessType::Read) { Log_WarningPrintf("EXP3 read: 0x%08X -> 0x%08X", offset, Bus::EXP3_BASE | offset); value = UINT32_C(0xFFFFFFFF); return 0; } else { if (offset == 0) Log_WarningPrintf("BIOS POST3 status: %02X", value & UINT32_C(0x0F)); return 0; } } template ALWAYS_INLINE static TickCount DoUnknownEXPAccess(u32 address, u32& value) { if constexpr (type == MemoryAccessType::Read) { Log_ErrorPrintf("Unknown EXP read: 0x%08X", address); return -1; } else { Log_WarningPrintf("Unknown EXP write: 0x%08X <- 0x%08X", address, value); return 0; } } template ALWAYS_INLINE static TickCount DoMemoryControlAccess(u32 offset, u32& value) { using namespace Bus; if constexpr (type == MemoryAccessType::Read) { value = s_MEMCTRL.regs[FIXUP_WORD_OFFSET(size, offset) / 4]; value = FIXUP_WORD_READ_VALUE(size, offset, value); return 2; } else { const u32 index = FIXUP_WORD_OFFSET(size, offset) / 4; value = FIXUP_WORD_WRITE_VALUE(size, offset, value); const u32 write_mask = (index == 8) ? COMDELAY::WRITE_MASK : MEMDELAY::WRITE_MASK; const u32 new_value = (s_MEMCTRL.regs[index] & ~write_mask) | (value & write_mask); if (s_MEMCTRL.regs[index] != new_value) { s_MEMCTRL.regs[index] = new_value; RecalculateMemoryTimings(); } return 0; } } template ALWAYS_INLINE static TickCount DoMemoryControl2Access(u32 offset, u32& value) { using namespace Bus; if constexpr (type == MemoryAccessType::Read) { if (offset == 0x00) { value = s_ram_size_reg; } else { return DoInvalidAccess(type, size, MEMCTRL2_BASE | offset, value); } return 2; } else { if (offset == 0x00) { s_ram_size_reg = value; } else { return DoInvalidAccess(type, size, MEMCTRL2_BASE | offset, value); } return 0; } } template ALWAYS_INLINE static TickCount DoPadAccess(u32 offset, u32& value) { if constexpr (type == MemoryAccessType::Read) { value = Pad::ReadRegister(FIXUP_HALFWORD_OFFSET(size, offset)); value = FIXUP_HALFWORD_READ_VALUE(size, offset, value); return 2; } else { Pad::WriteRegister(FIXUP_HALFWORD_OFFSET(size, offset), FIXUP_HALFWORD_WRITE_VALUE(size, offset, value)); return 0; } } template ALWAYS_INLINE static TickCount DoSIOAccess(u32 offset, u32& value) { if constexpr (type == MemoryAccessType::Read) { value = SIO::ReadRegister(FIXUP_HALFWORD_OFFSET(size, offset)); value = FIXUP_HALFWORD_READ_VALUE(size, offset, value); return 2; } else { SIO::WriteRegister(FIXUP_HALFWORD_OFFSET(size, offset), FIXUP_HALFWORD_WRITE_VALUE(size, offset, value)); return 0; } } template ALWAYS_INLINE static TickCount DoCDROMAccess(u32 offset, u32& value) { if constexpr (type == MemoryAccessType::Read) { switch (size) { case MemoryAccessSize::Word: { const u32 b0 = ZeroExtend32(CDROM::ReadRegister(offset)); const u32 b1 = ZeroExtend32(CDROM::ReadRegister(offset + 1u)); const u32 b2 = ZeroExtend32(CDROM::ReadRegister(offset + 2u)); const u32 b3 = ZeroExtend32(CDROM::ReadRegister(offset + 3u)); value = b0 | (b1 << 8) | (b2 << 16) | (b3 << 24); } case MemoryAccessSize::HalfWord: { const u32 lsb = ZeroExtend32(CDROM::ReadRegister(offset)); const u32 msb = ZeroExtend32(CDROM::ReadRegister(offset + 1u)); value = lsb | (msb << 8); } case MemoryAccessSize::Byte: default: value = ZeroExtend32(CDROM::ReadRegister(offset)); } return Bus::s_cdrom_access_time[static_cast(size)]; } else { switch (size) { case MemoryAccessSize::Word: { CDROM::WriteRegister(offset, Truncate8(value & 0xFFu)); CDROM::WriteRegister(offset + 1u, Truncate8((value >> 8) & 0xFFu)); CDROM::WriteRegister(offset + 2u, Truncate8((value >> 16) & 0xFFu)); CDROM::WriteRegister(offset + 3u, Truncate8((value >> 24) & 0xFFu)); } break; case MemoryAccessSize::HalfWord: { CDROM::WriteRegister(offset, Truncate8(value & 0xFFu)); CDROM::WriteRegister(offset + 1u, Truncate8((value >> 8) & 0xFFu)); } break; case MemoryAccessSize::Byte: default: CDROM::WriteRegister(offset, Truncate8(value)); break; } return 0; } } template ALWAYS_INLINE static TickCount DoGPUAccess(u32 offset, u32& value) { if constexpr (type == MemoryAccessType::Read) { value = g_gpu->ReadRegister(FIXUP_WORD_OFFSET(size, offset)); value = FIXUP_WORD_READ_VALUE(size, offset, value); return 2; } else { g_gpu->WriteRegister(FIXUP_WORD_OFFSET(size, offset), FIXUP_WORD_WRITE_VALUE(size, offset, value)); return 0; } } template ALWAYS_INLINE static TickCount DoMDECAccess(u32 offset, u32& value) { if constexpr (type == MemoryAccessType::Read) { value = MDEC::ReadRegister(FIXUP_WORD_OFFSET(size, offset)); value = FIXUP_WORD_READ_VALUE(size, offset, value); return 2; } else { MDEC::WriteRegister(FIXUP_WORD_OFFSET(size, offset), FIXUP_WORD_WRITE_VALUE(size, offset, value)); return 0; } } template ALWAYS_INLINE static TickCount DoAccessInterruptController(u32 offset, u32& value) { if constexpr (type == MemoryAccessType::Read) { value = InterruptController::ReadRegister(FIXUP_WORD_OFFSET(size, offset)); value = FIXUP_WORD_READ_VALUE(size, offset, value); return 2; } else { InterruptController::WriteRegister(FIXUP_WORD_OFFSET(size, offset), FIXUP_WORD_WRITE_VALUE(size, offset, value)); return 0; } } template ALWAYS_INLINE static TickCount DoAccessTimers(u32 offset, u32& value) { if constexpr (type == MemoryAccessType::Read) { value = Timers::ReadRegister(FIXUP_WORD_OFFSET(size, offset)); value = FIXUP_WORD_READ_VALUE(size, offset, value); return 2; } else { Timers::WriteRegister(FIXUP_WORD_OFFSET(size, offset), FIXUP_WORD_WRITE_VALUE(size, offset, value)); return 0; } } template ALWAYS_INLINE static TickCount DoAccessSPU(u32 offset, u32& value) { if constexpr (type == MemoryAccessType::Read) { switch (size) { case MemoryAccessSize::Word: { // 32-bit reads are read as two 16-bit accesses. const u16 lsb = SPU::ReadRegister(offset); const u16 msb = SPU::ReadRegister(offset + 2); value = ZeroExtend32(lsb) | (ZeroExtend32(msb) << 16); } break; case MemoryAccessSize::HalfWord: { value = ZeroExtend32(SPU::ReadRegister(offset)); } break; case MemoryAccessSize::Byte: default: { const u16 value16 = SPU::ReadRegister(FIXUP_HALFWORD_OFFSET(size, offset)); value = FIXUP_HALFWORD_READ_VALUE(size, offset, value16); } break; } return Bus::s_spu_access_time[static_cast(size)]; } else { // 32-bit writes are written as two 16-bit writes. // TODO: Ignore if address is not aligned. switch (size) { case MemoryAccessSize::Word: { DebugAssert(Common::IsAlignedPow2(offset, 2)); SPU::WriteRegister(offset, Truncate16(value)); SPU::WriteRegister(offset + 2, Truncate16(value >> 16)); break; } case MemoryAccessSize::HalfWord: { DebugAssert(Common::IsAlignedPow2(offset, 2)); SPU::WriteRegister(offset, Truncate16(value)); break; } case MemoryAccessSize::Byte: { SPU::WriteRegister(FIXUP_HALFWORD_OFFSET(size, offset), Truncate16(FIXUP_HALFWORD_READ_VALUE(size, offset, value))); break; } } return 0; } } template ALWAYS_INLINE static TickCount DoDMAAccess(u32 offset, u32& value) { if constexpr (type == MemoryAccessType::Read) { value = DMA::ReadRegister(FIXUP_WORD_OFFSET(size, offset)); value = FIXUP_WORD_READ_VALUE(size, offset, value); return 2; } else { DMA::WriteRegister(FIXUP_WORD_OFFSET(size, offset), FIXUP_WORD_WRITE_VALUE(size, offset, value)); return 0; } } namespace CPU { template ALWAYS_INLINE_RELEASE bool DoInstructionRead(PhysicalMemoryAddress address, void* data) { using namespace Bus; address &= PHYSICAL_MEMORY_ADDRESS_MASK; if (address < RAM_MIRROR_END) { std::memcpy(data, &g_ram[address & g_ram_mask], sizeof(u32) * word_count); if constexpr (add_ticks) g_state.pending_ticks += (icache_read ? 1 : RAM_READ_TICKS) * word_count; return true; } else if (address >= BIOS_BASE && address < (BIOS_BASE + BIOS_SIZE)) { std::memcpy(data, &g_bios[(address - BIOS_BASE) & BIOS_MASK], sizeof(u32) * word_count); if constexpr (add_ticks) g_state.pending_ticks += s_bios_access_time[static_cast(MemoryAccessSize::Word)] * word_count; return true; } else { if (raise_exceptions) CPU::RaiseException(address, Cop0Registers::CAUSE::MakeValueForException(Exception::IBE, false, false, 0)); std::memset(data, 0, sizeof(u32) * word_count); return false; } } TickCount GetInstructionReadTicks(VirtualMemoryAddress address) { using namespace Bus; address &= PHYSICAL_MEMORY_ADDRESS_MASK; if (address < RAM_MIRROR_END) { return RAM_READ_TICKS; } else if (address >= BIOS_BASE && address < (BIOS_BASE + BIOS_SIZE)) { return s_bios_access_time[static_cast(MemoryAccessSize::Word)]; } else { return 0; } } TickCount GetICacheFillTicks(VirtualMemoryAddress address) { using namespace Bus; address &= PHYSICAL_MEMORY_ADDRESS_MASK; if (address < RAM_MIRROR_END) { return 1 * ((ICACHE_LINE_SIZE - (address & (ICACHE_LINE_SIZE - 1))) / sizeof(u32)); } else if (address >= BIOS_BASE && address < (BIOS_BASE + BIOS_SIZE)) { return s_bios_access_time[static_cast(MemoryAccessSize::Word)] * ((ICACHE_LINE_SIZE - (address & (ICACHE_LINE_SIZE - 1))) / sizeof(u32)); } else { return 0; } } void CheckAndUpdateICacheTags(u32 line_count, TickCount uncached_ticks) { VirtualMemoryAddress current_pc = g_state.pc & ICACHE_TAG_ADDRESS_MASK; if (IsCachedAddress(current_pc)) { TickCount ticks = 0; TickCount cached_ticks_per_line = GetICacheFillTicks(current_pc); for (u32 i = 0; i < line_count; i++, current_pc += ICACHE_LINE_SIZE) { const u32 line = GetICacheLine(current_pc); if (g_state.icache_tags[line] != current_pc) { g_state.icache_tags[line] = current_pc; ticks += cached_ticks_per_line; } } g_state.pending_ticks += ticks; } else { g_state.pending_ticks += uncached_ticks; } } u32 FillICache(VirtualMemoryAddress address) { const u32 line = GetICacheLine(address); u8* line_data = &g_state.icache_data[line * ICACHE_LINE_SIZE]; u32 line_tag; switch ((address >> 2) & 0x03u) { case 0: DoInstructionRead(address & ~(ICACHE_LINE_SIZE - 1u), line_data); line_tag = GetICacheTagForAddress(address); break; case 1: DoInstructionRead(address & (~(ICACHE_LINE_SIZE - 1u) | 0x4), line_data + 0x4); line_tag = GetICacheTagForAddress(address) | 0x1; break; case 2: DoInstructionRead(address & (~(ICACHE_LINE_SIZE - 1u) | 0x8), line_data + 0x8); line_tag = GetICacheTagForAddress(address) | 0x3; break; case 3: default: DoInstructionRead(address & (~(ICACHE_LINE_SIZE - 1u) | 0xC), line_data + 0xC); line_tag = GetICacheTagForAddress(address) | 0x7; break; } g_state.icache_tags[line] = line_tag; const u32 offset = GetICacheLineOffset(address); u32 result; std::memcpy(&result, &line_data[offset], sizeof(result)); return result; } void ClearICache() { std::memset(g_state.icache_data.data(), 0, ICACHE_SIZE); g_state.icache_tags.fill(ICACHE_INVALID_BITS); } ALWAYS_INLINE_RELEASE static u32 ReadICache(VirtualMemoryAddress address) { const u32 line = GetICacheLine(address); const u8* line_data = &g_state.icache_data[line * ICACHE_LINE_SIZE]; const u32 offset = GetICacheLineOffset(address); u32 result; std::memcpy(&result, &line_data[offset], sizeof(result)); return result; } ALWAYS_INLINE_RELEASE static void WriteICache(VirtualMemoryAddress address, u32 value) { const u32 line = GetICacheLine(address); const u32 offset = GetICacheLineOffset(address); g_state.icache_tags[line] = GetICacheTagForAddress(address) | ICACHE_INVALID_BITS; std::memcpy(&g_state.icache_data[line * ICACHE_LINE_SIZE + offset], &value, sizeof(value)); } static void WriteCacheControl(u32 value) { Log_DevPrintf("Cache control <- 0x%08X", value); g_state.cache_control.bits = value; } template ALWAYS_INLINE static TickCount DoScratchpadAccess(PhysicalMemoryAddress address, u32& value) { const PhysicalMemoryAddress cache_offset = address & DCACHE_OFFSET_MASK; if constexpr (size == MemoryAccessSize::Byte) { if constexpr (type == MemoryAccessType::Read) value = ZeroExtend32(g_state.dcache[cache_offset]); else g_state.dcache[cache_offset] = Truncate8(value); } else if constexpr (size == MemoryAccessSize::HalfWord) { if constexpr (type == MemoryAccessType::Read) { u16 temp; std::memcpy(&temp, &g_state.dcache[cache_offset], sizeof(temp)); value = ZeroExtend32(temp); } else { u16 temp = Truncate16(value); std::memcpy(&g_state.dcache[cache_offset], &temp, sizeof(temp)); } } else if constexpr (size == MemoryAccessSize::Word) { if constexpr (type == MemoryAccessType::Read) std::memcpy(&value, &g_state.dcache[cache_offset], sizeof(value)); else std::memcpy(&g_state.dcache[cache_offset], &value, sizeof(value)); } return 0; } template static ALWAYS_INLINE_RELEASE TickCount DoMemoryAccess(VirtualMemoryAddress address, u32& value) { using namespace Bus; #if 0 if (type == MemoryAccessType::Write && address == 0x80113028) { if ((TimingEvents::GetGlobalTickCounter() + CPU::g_state.pending_ticks) == 5051485) __debugbreak(); Log_WarningPrintf("VAL %08X @ %u", value, (TimingEvents::GetGlobalTickCounter() + CPU::g_state.pending_ticks)); } #endif switch (address >> 29) { case 0x00: // KUSEG 0M-512M case 0x04: // KSEG0 - physical memory cached { if constexpr (type == MemoryAccessType::Write) { if (g_state.cop0_regs.sr.Isc) { WriteICache(address, value); return 0; } } address &= PHYSICAL_MEMORY_ADDRESS_MASK; if ((address & DCACHE_LOCATION_MASK) == DCACHE_LOCATION) return DoScratchpadAccess(address, value); } break; case 0x01: // KUSEG 512M-1024M case 0x02: // KUSEG 1024M-1536M case 0x03: // KUSEG 1536M-2048M { // Above 512mb raises an exception. if constexpr (type == MemoryAccessType::Read) value = UINT32_C(0xFFFFFFFF); return -1; } case 0x05: // KSEG1 - physical memory uncached { address &= PHYSICAL_MEMORY_ADDRESS_MASK; } break; case 0x06: // KSEG2 case 0x07: // KSEG2 { if (address == 0xFFFE0130) { if constexpr (type == MemoryAccessType::Read) value = g_state.cache_control.bits; else WriteCacheControl(value); return 0; } else { if constexpr (type == MemoryAccessType::Read) value = UINT32_C(0xFFFFFFFF); return -1; } } } if (address < RAM_MIRROR_END) { return DoRAMAccess(address, value); } else if (address >= BIOS_BASE && address < (BIOS_BASE + BIOS_SIZE)) { return DoBIOSAccess(static_cast(address - BIOS_BASE), value); } else if (address < EXP1_BASE) { return DoInvalidAccess(type, size, address, value); } else if (address < (EXP1_BASE + EXP1_SIZE)) { return DoEXP1Access(address & EXP1_MASK, value); } else if (address < MEMCTRL_BASE) { return DoInvalidAccess(type, size, address, value); } else if (address < (MEMCTRL_BASE + MEMCTRL_SIZE)) { return DoMemoryControlAccess(address & MEMCTRL_MASK, value); } else if (address < (PAD_BASE + PAD_SIZE)) { return DoPadAccess(address & PAD_MASK, value); } else if (address < (SIO_BASE + SIO_SIZE)) { return DoSIOAccess(address & SIO_MASK, value); } else if (address < (MEMCTRL2_BASE + MEMCTRL2_SIZE)) { return DoMemoryControl2Access(address & MEMCTRL2_MASK, value); } else if (address < (INTERRUPT_CONTROLLER_BASE + INTERRUPT_CONTROLLER_SIZE)) { return DoAccessInterruptController(address & INTERRUPT_CONTROLLER_MASK, value); } else if (address < (DMA_BASE + DMA_SIZE)) { return DoDMAAccess(address & DMA_MASK, value); } else if (address < (TIMERS_BASE + TIMERS_SIZE)) { return DoAccessTimers(address & TIMERS_MASK, value); } else if (address < CDROM_BASE) { return DoInvalidAccess(type, size, address, value); } else if (address < (CDROM_BASE + GPU_SIZE)) { return DoCDROMAccess(address & CDROM_MASK, value); } else if (address < (GPU_BASE + GPU_SIZE)) { return DoGPUAccess(address & GPU_MASK, value); } else if (address < (MDEC_BASE + MDEC_SIZE)) { return DoMDECAccess(address & MDEC_MASK, value); } else if (address < SPU_BASE) { return DoInvalidAccess(type, size, address, value); } else if (address < (SPU_BASE + SPU_SIZE)) { return DoAccessSPU(address & SPU_MASK, value); } else if (address < EXP2_BASE) { return DoInvalidAccess(type, size, address, value); } else if (address < (EXP2_BASE + EXP2_SIZE)) { return DoEXP2Access(address & EXP2_MASK, value); } else if (address < EXP3_BASE) { return DoUnknownEXPAccess(address, value); } else if (address < (EXP3_BASE + EXP3_SIZE)) { return DoEXP3Access(address & EXP3_MASK, value); } else { return DoInvalidAccess(type, size, address, value); } } template static bool DoAlignmentCheck(VirtualMemoryAddress address) { if constexpr (size == MemoryAccessSize::HalfWord) { if (Common::IsAlignedPow2(address, 2)) return true; } else if constexpr (size == MemoryAccessSize::Word) { if (Common::IsAlignedPow2(address, 4)) return true; } else { return true; } g_state.cop0_regs.BadVaddr = address; RaiseException(type == MemoryAccessType::Read ? Exception::AdEL : Exception::AdES); return false; } bool FetchInstruction() { DebugAssert(Common::IsAlignedPow2(g_state.npc, 4)); const PhysicalMemoryAddress address = g_state.npc; switch (address >> 29) { case 0x00: // KUSEG 0M-512M case 0x04: // KSEG0 - physical memory cached { #if 0 DoInstructionRead(address, &g_state.next_instruction.bits); #else if (CompareICacheTag(address)) g_state.next_instruction.bits = ReadICache(address); else g_state.next_instruction.bits = FillICache(address); #endif } break; case 0x05: // KSEG1 - physical memory uncached { if (!DoInstructionRead(address, &g_state.next_instruction.bits)) return false; } break; case 0x01: // KUSEG 512M-1024M case 0x02: // KUSEG 1024M-1536M case 0x03: // KUSEG 1536M-2048M case 0x06: // KSEG2 case 0x07: // KSEG2 default: { CPU::RaiseException(Cop0Registers::CAUSE::MakeValueForException(Exception::IBE, g_state.current_instruction_in_branch_delay_slot, g_state.current_instruction_was_branch_taken, 0), address); return false; } } g_state.pc = g_state.npc; g_state.npc += sizeof(g_state.next_instruction.bits); return true; } bool FetchInstructionForInterpreterFallback() { DebugAssert(Common::IsAlignedPow2(g_state.npc, 4)); const PhysicalMemoryAddress address = g_state.npc; switch (address >> 29) { case 0x00: // KUSEG 0M-512M case 0x04: // KSEG0 - physical memory cached case 0x05: // KSEG1 - physical memory uncached { // We don't use the icache when doing interpreter fallbacks, because it's probably stale. if (!DoInstructionRead(address, &g_state.next_instruction.bits)) return false; } break; case 0x01: // KUSEG 512M-1024M case 0x02: // KUSEG 1024M-1536M case 0x03: // KUSEG 1536M-2048M case 0x06: // KSEG2 case 0x07: // KSEG2 default: { CPU::RaiseException(Cop0Registers::CAUSE::MakeValueForException(Exception::IBE, g_state.current_instruction_in_branch_delay_slot, g_state.current_instruction_was_branch_taken, 0), address); return false; } } g_state.pc = g_state.npc; g_state.npc += sizeof(g_state.next_instruction.bits); return true; } bool SafeReadInstruction(VirtualMemoryAddress addr, u32* value) { switch (addr >> 29) { case 0x00: // KUSEG 0M-512M case 0x04: // KSEG0 - physical memory cached case 0x05: // KSEG1 - physical memory uncached { // TODO: Check icache. return DoInstructionRead(addr, value); } case 0x01: // KUSEG 512M-1024M case 0x02: // KUSEG 1024M-1536M case 0x03: // KUSEG 1536M-2048M case 0x06: // KSEG2 case 0x07: // KSEG2 default: { return false; } } } bool ReadMemoryByte(VirtualMemoryAddress addr, u8* value) { u32 temp = 0; const TickCount cycles = DoMemoryAccess(addr, temp); *value = Truncate8(temp); if (cycles < 0) { RaiseException(Exception::DBE); return false; } g_state.pending_ticks += cycles; return true; } bool ReadMemoryHalfWord(VirtualMemoryAddress addr, u16* value) { if (!DoAlignmentCheck(addr)) return false; u32 temp = 0; const TickCount cycles = DoMemoryAccess(addr, temp); *value = Truncate16(temp); if (cycles < 0) { RaiseException(Exception::DBE); return false; } g_state.pending_ticks += cycles; return true; } bool ReadMemoryWord(VirtualMemoryAddress addr, u32* value) { if (!DoAlignmentCheck(addr)) return false; const TickCount cycles = DoMemoryAccess(addr, *value); if (cycles < 0) { RaiseException(Exception::DBE); return false; } g_state.pending_ticks += cycles; return true; } bool WriteMemoryByte(VirtualMemoryAddress addr, u32 value) { const TickCount cycles = DoMemoryAccess(addr, value); if (cycles < 0) { RaiseException(Exception::DBE); return false; } DebugAssert(cycles == 0); return true; } bool WriteMemoryHalfWord(VirtualMemoryAddress addr, u32 value) { if (!DoAlignmentCheck(addr)) return false; const TickCount cycles = DoMemoryAccess(addr, value); if (cycles < 0) { RaiseException(Exception::DBE); return false; } DebugAssert(cycles == 0); return true; } bool WriteMemoryWord(VirtualMemoryAddress addr, u32 value) { if (!DoAlignmentCheck(addr)) return false; const TickCount cycles = DoMemoryAccess(addr, value); if (cycles < 0) { RaiseException(Exception::DBE); return false; } DebugAssert(cycles == 0); return true; } template static ALWAYS_INLINE bool DoSafeMemoryAccess(VirtualMemoryAddress address, u32& value) { using namespace Bus; switch (address >> 29) { case 0x00: // KUSEG 0M-512M case 0x04: // KSEG0 - physical memory cached { address &= PHYSICAL_MEMORY_ADDRESS_MASK; if ((address & DCACHE_LOCATION_MASK) == DCACHE_LOCATION) { DoScratchpadAccess(address, value); return true; } } break; case 0x01: // KUSEG 512M-1024M case 0x02: // KUSEG 1024M-1536M case 0x03: // KUSEG 1536M-2048M case 0x06: // KSEG2 case 0x07: // KSEG2 { // Above 512mb raises an exception. return false; } case 0x05: // KSEG1 - physical memory uncached { address &= PHYSICAL_MEMORY_ADDRESS_MASK; } break; } if (address < RAM_MIRROR_END) { DoRAMAccess(address, value); return true; } if constexpr (type == MemoryAccessType::Read) { if (address >= BIOS_BASE && address < (BIOS_BASE + BIOS_SIZE)) { DoBIOSAccess(static_cast(address - BIOS_BASE), value); return true; } } return false; } bool SafeReadMemoryByte(VirtualMemoryAddress addr, u8* value) { u32 temp = 0; if (!DoSafeMemoryAccess(addr, temp)) return false; *value = Truncate8(temp); return true; } bool SafeReadMemoryHalfWord(VirtualMemoryAddress addr, u16* value) { if ((addr & 1) == 0) { u32 temp = 0; if (!DoSafeMemoryAccess(addr, temp)) return false; *value = Truncate16(temp); return true; } u8 low, high; if (!SafeReadMemoryByte(addr, &low) || !SafeReadMemoryByte(addr + 1, &high)) return false; *value = (ZeroExtend16(high) << 8) | ZeroExtend16(low); return true; } bool SafeReadMemoryWord(VirtualMemoryAddress addr, u32* value) { if ((addr & 3) == 0) return DoSafeMemoryAccess(addr, *value); u16 low, high; if (!SafeReadMemoryHalfWord(addr, &low) || !SafeReadMemoryHalfWord(addr + 2, &high)) return false; *value = (ZeroExtend32(high) << 16) | ZeroExtend32(low); return true; } bool SafeReadMemoryCString(VirtualMemoryAddress addr, std::string* value, u32 max_length /*= 1024*/) { value->clear(); u8 ch; while (SafeReadMemoryByte(addr, &ch)) { if (ch == 0) return true; value->push_back(ch); if (value->size() >= max_length) return true; addr++; } value->clear(); return false; } bool SafeWriteMemoryByte(VirtualMemoryAddress addr, u8 value) { u32 temp = ZeroExtend32(value); return DoSafeMemoryAccess(addr, temp); } bool SafeWriteMemoryHalfWord(VirtualMemoryAddress addr, u16 value) { if ((addr & 1) == 0) { u32 temp = ZeroExtend32(value); return DoSafeMemoryAccess(addr, temp); } return SafeWriteMemoryByte(addr, Truncate8(value)) && SafeWriteMemoryByte(addr + 1, Truncate8(value >> 8)); } bool SafeWriteMemoryWord(VirtualMemoryAddress addr, u32 value) { if ((addr & 3) == 0) return DoSafeMemoryAccess(addr, value); return SafeWriteMemoryHalfWord(addr, Truncate16(value >> 16)) && SafeWriteMemoryHalfWord(addr + 2, Truncate16(value >> 16)); } void* GetDirectReadMemoryPointer(VirtualMemoryAddress address, MemoryAccessSize size, TickCount* read_ticks) { using namespace Bus; const u32 seg = (address >> 29); if (seg != 0 && seg != 4 && seg != 5) return nullptr; const PhysicalMemoryAddress paddr = address & PHYSICAL_MEMORY_ADDRESS_MASK; if (paddr < RAM_MIRROR_END) { if (read_ticks) *read_ticks = RAM_READ_TICKS; return &g_ram[paddr & g_ram_mask]; } if ((paddr & DCACHE_LOCATION_MASK) == DCACHE_LOCATION) { if (read_ticks) *read_ticks = 0; return &g_state.dcache[paddr & DCACHE_OFFSET_MASK]; } if (paddr >= BIOS_BASE && paddr < (BIOS_BASE + BIOS_SIZE)) { if (read_ticks) *read_ticks = s_bios_access_time[static_cast(size)]; return &g_bios[paddr & BIOS_MASK]; } return nullptr; } void* GetDirectWriteMemoryPointer(VirtualMemoryAddress address, MemoryAccessSize size) { using namespace Bus; const u32 seg = (address >> 29); if (seg != 0 && seg != 4 && seg != 5) return nullptr; const PhysicalMemoryAddress paddr = address & PHYSICAL_MEMORY_ADDRESS_MASK; #if 0 // Not enabled until we can protect code regions. if (paddr < RAM_MIRROR_END) return &g_ram[paddr & RAM_MASK]; #endif if ((paddr & DCACHE_LOCATION_MASK) == DCACHE_LOCATION) return &g_state.dcache[paddr & DCACHE_OFFSET_MASK]; return nullptr; } namespace Recompiler::Thunks { u64 ReadMemoryByte(u32 address) { u32 temp; const TickCount cycles = DoMemoryAccess(address, temp); if (cycles < 0) return static_cast(-static_cast(Exception::DBE)); g_state.pending_ticks += cycles; return ZeroExtend64(temp); } u64 ReadMemoryHalfWord(u32 address) { if (!Common::IsAlignedPow2(address, 2)) { g_state.cop0_regs.BadVaddr = address; return static_cast(-static_cast(Exception::AdEL)); } u32 temp; const TickCount cycles = DoMemoryAccess(address, temp); if (cycles < 0) return static_cast(-static_cast(Exception::DBE)); g_state.pending_ticks += cycles; return ZeroExtend64(temp); } u64 ReadMemoryWord(u32 address) { if (!Common::IsAlignedPow2(address, 4)) { g_state.cop0_regs.BadVaddr = address; return static_cast(-static_cast(Exception::AdEL)); } u32 temp; const TickCount cycles = DoMemoryAccess(address, temp); if (cycles < 0) return static_cast(-static_cast(Exception::DBE)); g_state.pending_ticks += cycles; return ZeroExtend64(temp); } u32 WriteMemoryByte(u32 address, u32 value) { const TickCount cycles = DoMemoryAccess(address, value); if (cycles < 0) return static_cast(Exception::DBE); DebugAssert(cycles == 0); return 0; } u32 WriteMemoryHalfWord(u32 address, u32 value) { if (!Common::IsAlignedPow2(address, 2)) { g_state.cop0_regs.BadVaddr = address; return static_cast(Exception::AdES); } const TickCount cycles = DoMemoryAccess(address, value); if (cycles < 0) return static_cast(Exception::DBE); DebugAssert(cycles == 0); return 0; } u32 WriteMemoryWord(u32 address, u32 value) { if (!Common::IsAlignedPow2(address, 4)) { g_state.cop0_regs.BadVaddr = address; return static_cast(Exception::AdES); } const TickCount cycles = DoMemoryAccess(address, value); if (cycles < 0) return static_cast(Exception::DBE); DebugAssert(cycles == 0); return 0; } u32 UncheckedReadMemoryByte(u32 address) { u32 temp; g_state.pending_ticks += DoMemoryAccess(address, temp); return temp; } u32 UncheckedReadMemoryHalfWord(u32 address) { u32 temp; g_state.pending_ticks += DoMemoryAccess(address, temp); return temp; } u32 UncheckedReadMemoryWord(u32 address) { u32 temp; g_state.pending_ticks += DoMemoryAccess(address, temp); return temp; } void UncheckedWriteMemoryByte(u32 address, u32 value) { g_state.pending_ticks += DoMemoryAccess(address, value); } void UncheckedWriteMemoryHalfWord(u32 address, u32 value) { g_state.pending_ticks += DoMemoryAccess(address, value); } void UncheckedWriteMemoryWord(u32 address, u32 value) { g_state.pending_ticks += DoMemoryAccess(address, value); } } // namespace Recompiler::Thunks } // namespace CPU