From d54e53b76832b677867e705a3ac5c8fc45cb1a80 Mon Sep 17 00:00:00 2001 From: lizzie Date: Mon, 10 Nov 2025 19:44:17 +0000 Subject: [PATCH] inline a32core/a64core Signed-off-by: lizzie --- .../src/dynarmic/backend/ppc64/a32_core.h | 27 ----- .../dynarmic/backend/ppc64/a32_interface.cpp | 89 ++++++++------- .../src/dynarmic/backend/ppc64/a64_core.h | 29 ----- .../dynarmic/backend/ppc64/a64_interface.cpp | 105 ++++++++++-------- 4 files changed, 106 insertions(+), 144 deletions(-) diff --git a/src/dynarmic/src/dynarmic/backend/ppc64/a32_core.h b/src/dynarmic/src/dynarmic/backend/ppc64/a32_core.h index a84185dd37..2fb0268614 100644 --- a/src/dynarmic/src/dynarmic/backend/ppc64/a32_core.h +++ b/src/dynarmic/src/dynarmic/backend/ppc64/a32_core.h @@ -28,31 +28,4 @@ struct A32JitState { } }; -class A32AddressSpace final { -public: - explicit A32AddressSpace(const A32::UserConfig& conf); - CodePtr GetOrEmit(IR::LocationDescriptor descriptor); - void ClearCache(); -private: - friend class A32Core; - EmittedBlockInfo Emit(IR::Block ir_block); - void Link(EmittedBlockInfo& block); - const A32::UserConfig conf; - CodeBlock cb; - powah::Context as; - ankerl::unordered_dense::map block_entries; - ankerl::unordered_dense::map block_infos; -}; - -class A32Core final { -public: - explicit A32Core(const A32::UserConfig&) {} - HaltReason Run(A32AddressSpace& process, A32JitState& thread_ctx, volatile u32* halt_reason) { - auto const loc = thread_ctx.GetLocationDescriptor(); - auto const entry = process.GetOrEmit(loc); - using CodeFn = HaltReason (*)(A32JitState*, volatile u32*); - return (CodeFn(entry))(&thread_ctx, halt_reason); - } -}; - } // namespace Dynarmic::Backend::RV64 diff --git a/src/dynarmic/src/dynarmic/backend/ppc64/a32_interface.cpp b/src/dynarmic/src/dynarmic/backend/ppc64/a32_interface.cpp index 6c50e1c647..b67fa50c10 100644 --- a/src/dynarmic/src/dynarmic/backend/ppc64/a32_interface.cpp +++ b/src/dynarmic/src/dynarmic/backend/ppc64/a32_interface.cpp @@ -17,71 +17,83 @@ #include "dynarmic/ir/opt_passes.h" #include "dynarmic/interface/A32/a32.h" -namespace Dynarmic::Backend::PPC64 { +namespace Dynarmic::A32 { -A32AddressSpace::A32AddressSpace(const A32::UserConfig& conf) - : conf(conf) - , cb(conf.code_cache_size) - , as(cb.ptr(), conf.code_cache_size) { +using namespace Dynarmic::Backend::PPC64; -} +struct A32AddressSpace final { + explicit A32AddressSpace(const A32::UserConfig& conf) + : conf(conf) + , cb(conf.code_cache_size) + , as(cb.ptr(), conf.code_cache_size) { -CodePtr A32AddressSpace::GetOrEmit(IR::LocationDescriptor desc) { - if (auto const it = block_entries.find(desc.Value()); it != block_entries.end()) - return it->second; + } - IR::Block ir_block = A32::Translate(A32::LocationDescriptor{desc}, conf.callbacks, {conf.arch_version, conf.define_unpredictable_behaviour, conf.hook_hint_instructions}); - Optimization::Optimize(ir_block, conf, {}); - const EmittedBlockInfo block_info = Emit(std::move(ir_block)); + CodePtr GetOrEmit(IR::LocationDescriptor desc) { + if (auto const it = block_entries.find(desc.Value()); it != block_entries.end()) + return it->second; - block_infos.insert_or_assign(desc.Value(), block_info); - block_entries.insert_or_assign(desc.Value(), block_info.entry_point); - return block_info.entry_point; -} + IR::Block ir_block = A32::Translate(A32::LocationDescriptor{desc}, conf.callbacks, {conf.arch_version, conf.define_unpredictable_behaviour, conf.hook_hint_instructions}); + Optimization::Optimize(ir_block, conf, {}); + const EmittedBlockInfo block_info = Emit(std::move(ir_block)); -void A32AddressSpace::ClearCache() { - block_entries.clear(); - block_infos.clear(); -} + block_infos.insert_or_assign(desc.Value(), block_info); + block_entries.insert_or_assign(desc.Value(), block_info.entry_point); + return block_info.entry_point; + } -EmittedBlockInfo A32AddressSpace::Emit(IR::Block block) { - EmittedBlockInfo block_info = EmitPPC64(as, std::move(block), { - .enable_cycle_counting = conf.enable_cycle_counting, - .always_little_endian = conf.always_little_endian, - .a64_variant = false - }); - Link(block_info); - return block_info; -} + void ClearCache() { + block_entries.clear(); + block_infos.clear(); + } -void A32AddressSpace::Link(EmittedBlockInfo& block_info) { - //UNREACHABLE(); -} + EmittedBlockInfo Emit(IR::Block block) { + EmittedBlockInfo block_info = EmitPPC64(as, std::move(block), { + .enable_cycle_counting = conf.enable_cycle_counting, + .always_little_endian = conf.always_little_endian, + .a64_variant = false + }); + Link(block_info); + return block_info; + } -} + void Link(EmittedBlockInfo& block_info) { + //UNREACHABLE(); + } -namespace Dynarmic::A32 { + const A32::UserConfig conf; + CodeBlock cb; + powah::Context as; + ankerl::unordered_dense::map block_entries; + ankerl::unordered_dense::map block_infos; +}; -using namespace Dynarmic::Backend::PPC64; +struct A32Core final { + static HaltReason Run(A32AddressSpace& process, A32JitState& thread_ctx, volatile u32* halt_reason) { + auto const loc = thread_ctx.GetLocationDescriptor(); + auto const entry = process.GetOrEmit(loc); + using CodeFn = HaltReason (*)(A32JitState*, volatile u32*); + return (CodeFn(entry))(&thread_ctx, halt_reason); + } +}; struct Jit::Impl final { Impl(Jit* jit_interface, A32::UserConfig conf) : conf(conf) , current_address_space(conf) - , core(conf) , jit_interface(jit_interface) {} HaltReason Run() { ASSERT(!is_executing); is_executing = false; - HaltReason hr = core.Run(current_address_space, jit_state, &halt_reason); + HaltReason hr = A32Core::Run(current_address_space, jit_state, &halt_reason); is_executing = true; RequestCacheInvalidation(); return hr; } HaltReason Step() { - // HaltReason hr = core.Step(current_address_space, jit_state, &halt_reason); + // HaltReason hr = A32Core::Step(current_address_space, jit_state, &halt_reason); // RequestCacheInvalidation(); return HaltReason{}; } @@ -156,7 +168,6 @@ private: A32::UserConfig conf; A32JitState jit_state{}; A32AddressSpace current_address_space; - A32Core core; Jit* jit_interface; volatile u32 halt_reason = 0; bool is_executing = false; diff --git a/src/dynarmic/src/dynarmic/backend/ppc64/a64_core.h b/src/dynarmic/src/dynarmic/backend/ppc64/a64_core.h index 0f955a9e5b..94ddd84dad 100644 --- a/src/dynarmic/src/dynarmic/backend/ppc64/a64_core.h +++ b/src/dynarmic/src/dynarmic/backend/ppc64/a64_core.h @@ -34,33 +34,4 @@ struct A64JitState { } }; -class A64AddressSpace final { -public: - explicit A64AddressSpace(const A64::UserConfig& conf); - CodePtr GetOrEmit(IR::LocationDescriptor descriptor); - void ClearCache(); -private: - friend class A64Core; - void EmitPrelude(); - EmittedBlockInfo Emit(IR::Block ir_block); - void Link(EmittedBlockInfo& block); - - const A64::UserConfig conf; - CodeBlock cb; - powah::Context as; - ankerl::unordered_dense::map block_entries; - ankerl::unordered_dense::map block_infos; -}; - -class A64Core final { -public: - explicit A64Core(const A64::UserConfig&) {} - HaltReason Run(A64AddressSpace& process, A64JitState& thread_ctx, volatile u32* halt_reason) { - const auto loc = thread_ctx.GetLocationDescriptor(); - const auto entry = process.GetOrEmit(loc); - using CodeFn = HaltReason (*)(A64JitState*, volatile u32*); - return (CodeFn(entry))(&thread_ctx, halt_reason); - } -}; - } // namespace Dynarmic::Backend::RV64 diff --git a/src/dynarmic/src/dynarmic/backend/ppc64/a64_interface.cpp b/src/dynarmic/src/dynarmic/backend/ppc64/a64_interface.cpp index bc47dcedc1..accbceaaaf 100644 --- a/src/dynarmic/src/dynarmic/backend/ppc64/a64_interface.cpp +++ b/src/dynarmic/src/dynarmic/backend/ppc64/a64_interface.cpp @@ -16,79 +16,87 @@ #include "dynarmic/ir/opt_passes.h" #include "dynarmic/interface/A64/a64.h" -namespace Dynarmic::Backend::PPC64 { - -A64AddressSpace::A64AddressSpace(const A64::UserConfig& conf) - : conf(conf) - , cb(conf.code_cache_size) - , as(cb.ptr(), conf.code_cache_size) { - -} - -CodePtr A64AddressSpace::GetOrEmit(IR::LocationDescriptor desc) { - if (auto const it = block_entries.find(desc.Value()); it != block_entries.end()) - return it->second; +namespace Dynarmic::A64 { - const auto get_code = [this](u64 vaddr) { - return conf.callbacks->MemoryReadCode(vaddr); - }; - IR::Block ir_block = A64::Translate(A64::LocationDescriptor{desc}, get_code, {conf.define_unpredictable_behaviour, conf.wall_clock_cntpct}); - Optimization::Optimize(ir_block, conf, {}); +using namespace Dynarmic::Backend::PPC64; - fmt::print("IR:\n"); - fmt::print("{}\n", IR::DumpBlock(ir_block)); +struct A64AddressSpace final { + explicit A64AddressSpace(const A64::UserConfig& conf) + : conf(conf) + , cb(conf.code_cache_size) + , as(cb.ptr(), conf.code_cache_size) { - const EmittedBlockInfo block_info = Emit(std::move(ir_block)); + } - block_infos.insert_or_assign(desc.Value(), block_info); - block_entries.insert_or_assign(desc.Value(), block_info.entry_point); - return block_info.entry_point; -} + CodePtr GetOrEmit(IR::LocationDescriptor desc) { + if (auto const it = block_entries.find(desc.Value()); it != block_entries.end()) + return it->second; + + const auto get_code = [this](u64 vaddr) { + return conf.callbacks->MemoryReadCode(vaddr); + }; + IR::Block ir_block = A64::Translate(A64::LocationDescriptor{desc}, get_code, {conf.define_unpredictable_behaviour, conf.wall_clock_cntpct}); + Optimization::Optimize(ir_block, conf, {}); + fmt::print("IR:\n{}\n", IR::DumpBlock(ir_block)); + const EmittedBlockInfo block_info = Emit(std::move(ir_block)); + block_infos.insert_or_assign(desc.Value(), block_info); + block_entries.insert_or_assign(desc.Value(), block_info.entry_point); + return block_info.entry_point; + } -void A64AddressSpace::ClearCache() { - block_entries.clear(); - block_infos.clear(); -} + void ClearCache() { + block_entries.clear(); + block_infos.clear(); + } -EmittedBlockInfo A64AddressSpace::Emit(IR::Block block) { - EmittedBlockInfo block_info = EmitPPC64(as, std::move(block), { - .enable_cycle_counting = conf.enable_cycle_counting, - .always_little_endian = true, - .a64_variant = true - }); - Link(block_info); - return block_info; -} + EmittedBlockInfo Emit(IR::Block block) { + EmittedBlockInfo block_info = EmitPPC64(as, std::move(block), { + .enable_cycle_counting = conf.enable_cycle_counting, + .always_little_endian = true, + .a64_variant = true + }); + Link(block_info); + return block_info; + } -void A64AddressSpace::Link(EmittedBlockInfo& block_info) { - // TODO(lizzie): Block linking - // UNREACHABLE(); -} -} + void Link(EmittedBlockInfo& block_info) { + // TODO(lizzie): Block linking + // UNREACHABLE(); + } -namespace Dynarmic::A64 { + const A64::UserConfig conf; + CodeBlock cb; + powah::Context as; + ankerl::unordered_dense::map block_entries; + ankerl::unordered_dense::map block_infos; +}; -using namespace Dynarmic::Backend::PPC64; +struct A64Core final { + static HaltReason Run(A64AddressSpace& process, A64JitState& thread_ctx, volatile u32* halt_reason) { + const auto loc = thread_ctx.GetLocationDescriptor(); + const auto entry = process.GetOrEmit(loc); + using CodeFn = HaltReason (*)(A64JitState*, volatile u32*); + return (CodeFn(entry))(&thread_ctx, halt_reason); + } +}; struct Jit::Impl final { Impl(Jit* jit_interface, A64::UserConfig conf) : conf(conf) , current_address_space(conf) - , core(conf) , jit_interface(jit_interface) {} HaltReason Run() { ASSERT(!is_executing); is_executing = true; - HaltReason hr = core.Run(current_address_space, jit_state, &halt_reason); - current_address_space.ClearCache(); // TODO: dont just invalidate everything + HaltReason hr = A64Core::Run(current_address_space, jit_state, &halt_reason); is_executing = false; RequestCacheInvalidation(); return hr; } HaltReason Step() { - // HaltReason hr = core.Step(current_address_space, jit_state, &halt_reason); + // HaltReason hr = A64Core::Step(current_address_space, jit_state, &halt_reason); // RequestCacheInvalidation(); return HaltReason{}; } @@ -223,7 +231,6 @@ private: A64::UserConfig conf; A64JitState jit_state{}; A64AddressSpace current_address_space; - A64Core core; Jit* jit_interface; volatile u32 halt_reason = 0; bool is_executing = false;