|
|
|
@ -92,19 +92,25 @@ void MaxwellDMA::Launch() { |
|
|
|
} |
|
|
|
} |
|
|
|
} else { |
|
|
|
// TODO: allow multisized components.
|
|
|
|
// TODO: xbzk: multisized components support.
|
|
|
|
// validadte this widely!
|
|
|
|
// shipped in PR 3164.
|
|
|
|
auto& accelerate = rasterizer->AccessAccelerateDMA(); |
|
|
|
const bool is_const_a_dst = regs.remap_const.dst_x == RemapConst::Swizzle::CONST_A; |
|
|
|
if (regs.launch_dma.remap_enable != 0 && is_const_a_dst) { |
|
|
|
ASSERT(regs.remap_const.component_size_minus_one == 3); |
|
|
|
const u32 remap_components_size = regs.remap_const.component_size_minus_one + 1; |
|
|
|
accelerate.BufferClear(regs.offset_out, regs.line_length_in, |
|
|
|
regs.remap_const.remap_consta_value); |
|
|
|
read_buffer.resize_destructive(regs.line_length_in * sizeof(u32)); |
|
|
|
std::span<u32> span(reinterpret_cast<u32*>(read_buffer.data()), regs.line_length_in); |
|
|
|
std::ranges::fill(span, regs.remap_const.remap_consta_value); |
|
|
|
read_buffer.resize_destructive(regs.line_length_in * remap_components_size); |
|
|
|
for (u32 i = 0; i < regs.line_length_in; ++i) { |
|
|
|
for (u32 j = 0; j < remap_components_size; ++j) { |
|
|
|
read_buffer[i * remap_components_size + j] = |
|
|
|
(regs.remap_const.remap_consta_value >> (j * 8)) & 0xFF; |
|
|
|
} |
|
|
|
} |
|
|
|
memory_manager.WriteBlockUnsafe(regs.offset_out, |
|
|
|
reinterpret_cast<u8*>(read_buffer.data()), |
|
|
|
regs.line_length_in * sizeof(u32)); |
|
|
|
read_buffer.data(), |
|
|
|
regs.line_length_in * remap_components_size); |
|
|
|
} else { |
|
|
|
memory_manager.FlushCaching(); |
|
|
|
const auto convert_linear_2_blocklinear_addr = [](u64 address) { |
|
|
|
|