Browse Source

[shader_recompiler, spir-v] verifying int64 emulation path activation

test-revert-gpu-optim
CamilleLaVey 1 month ago
committed by lizzie
parent
commit
1ca19af7fb
  1. 3
      src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp
  2. 6
      src/shader_recompiler/backend/spirv/spirv_emit_context.cpp
  3. 3
      src/shader_recompiler/backend/spirv/spirv_emit_context.h
  4. 27
      src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp

3
src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp

@ -1,3 +1,6 @@
// SPDX-FileCopyrightText: Copyright 2025 Eden Emulator Project
// SPDX-License-Identifier: GPL-3.0-or-later
// SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project // SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later // SPDX-License-Identifier: GPL-2.0-or-later

6
src/shader_recompiler/backend/spirv/spirv_emit_context.cpp

@ -461,7 +461,11 @@ void VectorTypes::Define(Sirit::Module& sirit_ctx, Id base_type, std::string_vie
EmitContext::EmitContext(const Profile& profile_, const RuntimeInfo& runtime_info_, EmitContext::EmitContext(const Profile& profile_, const RuntimeInfo& runtime_info_,
IR::Program& program, Bindings& bindings) IR::Program& program, Bindings& bindings)
: Sirit::Module(profile_.supported_spirv), profile{profile_}, runtime_info{runtime_info_}, : Sirit::Module(profile_.supported_spirv), profile{profile_}, runtime_info{runtime_info_},
stage{program.stage}, emulate_int64{program.info.uses_int64 && !profile.support_int64},
stage{program.stage},
// Enable int64 emulation if host lacks int64 but we either use int64 ops
// or we need 64-bit addressing for global memory operations.
emulate_int64{!profile.support_int64 &&
(program.info.uses_int64 || program.info.uses_global_memory)},
texture_rescaling_index{bindings.texture_scaling_index}, texture_rescaling_index{bindings.texture_scaling_index},
image_rescaling_index{bindings.image_scaling_index} { image_rescaling_index{bindings.image_scaling_index} {
const bool is_unified{profile.unified_descriptor_binding}; const bool is_unified{profile.unified_descriptor_binding};

3
src/shader_recompiler/backend/spirv/spirv_emit_context.h

@ -1,3 +1,6 @@
// SPDX-FileCopyrightText: Copyright 2025 Eden Emulator Project
// SPDX-License-Identifier: GPL-3.0-or-later
// SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project // SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later // SPDX-License-Identifier: GPL-2.0-or-later

27
src/shader_recompiler/ir_opt/global_memory_to_storage_buffer_pass.cpp

@ -1,3 +1,6 @@
// SPDX-FileCopyrightText: Copyright 2025 Eden Emulator Project
// SPDX-License-Identifier: GPL-3.0-or-later
// SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project // SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later // SPDX-License-Identifier: GPL-2.0-or-later
@ -293,6 +296,14 @@ std::optional<LowAddrInfo> TrackLowAddress(IR::Inst* inst) {
} }
// This address is expected to either be a PackUint2x32, a IAdd64, or a CompositeConstructU32x2 // This address is expected to either be a PackUint2x32, a IAdd64, or a CompositeConstructU32x2
IR::Inst* addr_inst{addr.InstRecursive()}; IR::Inst* addr_inst{addr.InstRecursive()};
// Unwrap Identity ops introduced by lowerings (e.g., PackUint2x32 -> Identity)
while (addr_inst->GetOpcode() == IR::Opcode::Identity) {
const IR::Value id_arg{addr_inst->Arg(0)};
if (id_arg.IsImmediate()) {
return std::nullopt;
}
addr_inst = id_arg.InstRecursive();
}
s32 imm_offset{0}; s32 imm_offset{0};
if (addr_inst->GetOpcode() == IR::Opcode::IAdd64) { if (addr_inst->GetOpcode() == IR::Opcode::IAdd64) {
// If it's an IAdd64, get the immediate offset it is applying and grab the address // If it's an IAdd64, get the immediate offset it is applying and grab the address
@ -308,6 +319,14 @@ std::optional<LowAddrInfo> TrackLowAddress(IR::Inst* inst) {
return std::nullopt; return std::nullopt;
} }
addr_inst = iadd_addr.InstRecursive(); addr_inst = iadd_addr.InstRecursive();
// Unwrap Identity again if present after folding IAdd64
while (addr_inst->GetOpcode() == IR::Opcode::Identity) {
const IR::Value id_arg{addr_inst->Arg(0)};
if (id_arg.IsImmediate()) {
return std::nullopt;
}
addr_inst = id_arg.InstRecursive();
}
} }
// With IAdd64 handled, now PackUint2x32 is expected // With IAdd64 handled, now PackUint2x32 is expected
if (addr_inst->GetOpcode() == IR::Opcode::PackUint2x32) { if (addr_inst->GetOpcode() == IR::Opcode::PackUint2x32) {
@ -317,6 +336,14 @@ std::optional<LowAddrInfo> TrackLowAddress(IR::Inst* inst) {
return std::nullopt; return std::nullopt;
} }
addr_inst = vector.InstRecursive(); addr_inst = vector.InstRecursive();
// Unwrap Identity that may replace PackUint2x32
while (addr_inst->GetOpcode() == IR::Opcode::Identity) {
const IR::Value id_arg{addr_inst->Arg(0)};
if (id_arg.IsImmediate()) {
return std::nullopt;
}
addr_inst = id_arg.InstRecursive();
}
} }
// The vector is expected to be a CompositeConstructU32x2 // The vector is expected to be a CompositeConstructU32x2
if (addr_inst->GetOpcode() != IR::Opcode::CompositeConstructU32x2) { if (addr_inst->GetOpcode() != IR::Opcode::CompositeConstructU32x2) {

Loading…
Cancel
Save