Browse Source
Merge pull request #6099 from bunnei/derive-mem
Merge pull request #6099 from bunnei/derive-mem
Kernel Rework: Derive memory regions from board layout.nce_cpp
committed by
GitHub
26 changed files with 2139 additions and 173 deletions
-
1src/common/CMakeLists.txt
-
43src/common/common_sizes.h
-
9src/core/CMakeLists.txt
-
20src/core/hle/kernel/arch/arm64/k_memory_region_device_types.inc
-
52src/core/hle/kernel/board/nintendo/nx/k_memory_region_device_types.inc
-
164src/core/hle/kernel/board/nintendo/nx/k_system_control.cpp
-
28src/core/hle/kernel/board/nintendo/nx/k_system_control.h
-
26src/core/hle/kernel/board/nintendo/nx/secure_monitor.h
-
43src/core/hle/kernel/k_address_space_info.cpp
-
199src/core/hle/kernel/k_memory_layout.board.nintendo_nx.cpp
-
166src/core/hle/kernel/k_memory_layout.cpp
-
397src/core/hle/kernel/k_memory_layout.h
-
12src/core/hle/kernel/k_memory_manager.cpp
-
18src/core/hle/kernel/k_memory_manager.h
-
350src/core/hle/kernel/k_memory_region.h
-
338src/core/hle/kernel/k_memory_region_type.h
-
22src/core/hle/kernel/k_scheduler.cpp
-
7src/core/hle/kernel/k_scheduler.h
-
11src/core/hle/kernel/k_scheduler_lock.h
-
6src/core/hle/kernel/k_spin_lock.h
-
42src/core/hle/kernel/k_system_control.cpp
-
18src/core/hle/kernel/k_system_control.h
-
4src/core/hle/kernel/k_thread.h
-
12src/core/hle/kernel/k_trace.h
-
322src/core/hle/kernel/kernel.cpp
-
2src/core/hle/kernel/kernel.h
@ -0,0 +1,43 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project |
||||
|
// Licensed under GPLv2 or any later version |
||||
|
// Refer to the license.txt file included. |
||||
|
|
||||
|
#pragma once |
||||
|
|
||||
|
#include <limits> |
||||
|
|
||||
|
#include "common/common_types.h" |
||||
|
|
||||
|
namespace Common { |
||||
|
|
||||
|
enum : u64 { |
||||
|
Size_1_KB = 0x400ULL, |
||||
|
Size_64_KB = 64ULL * Size_1_KB, |
||||
|
Size_128_KB = 128ULL * Size_1_KB, |
||||
|
Size_1_MB = 0x100000ULL, |
||||
|
Size_2_MB = 2ULL * Size_1_MB, |
||||
|
Size_4_MB = 4ULL * Size_1_MB, |
||||
|
Size_5_MB = 5ULL * Size_1_MB, |
||||
|
Size_14_MB = 14ULL * Size_1_MB, |
||||
|
Size_32_MB = 32ULL * Size_1_MB, |
||||
|
Size_33_MB = 33ULL * Size_1_MB, |
||||
|
Size_128_MB = 128ULL * Size_1_MB, |
||||
|
Size_448_MB = 448ULL * Size_1_MB, |
||||
|
Size_507_MB = 507ULL * Size_1_MB, |
||||
|
Size_562_MB = 562ULL * Size_1_MB, |
||||
|
Size_1554_MB = 1554ULL * Size_1_MB, |
||||
|
Size_2048_MB = 2048ULL * Size_1_MB, |
||||
|
Size_2193_MB = 2193ULL * Size_1_MB, |
||||
|
Size_3285_MB = 3285ULL * Size_1_MB, |
||||
|
Size_4916_MB = 4916ULL * Size_1_MB, |
||||
|
Size_1_GB = 0x40000000ULL, |
||||
|
Size_2_GB = 2ULL * Size_1_GB, |
||||
|
Size_4_GB = 4ULL * Size_1_GB, |
||||
|
Size_6_GB = 6ULL * Size_1_GB, |
||||
|
Size_8_GB = 8ULL * Size_1_GB, |
||||
|
Size_64_GB = 64ULL * Size_1_GB, |
||||
|
Size_512_GB = 512ULL * Size_1_GB, |
||||
|
Size_Invalid = std::numeric_limits<u64>::max(), |
||||
|
}; |
||||
|
|
||||
|
} // namespace Common |
||||
@ -0,0 +1,20 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project |
||||
|
// Licensed under GPLv2 or any later version |
||||
|
// Refer to the license.txt file included. |
||||
|
|
||||
|
// All architectures must define NumArchitectureDeviceRegions. |
||||
|
constexpr inline const auto NumArchitectureDeviceRegions = 3; |
||||
|
|
||||
|
constexpr inline const auto KMemoryRegionType_Uart = |
||||
|
KMemoryRegionType_ArchDeviceBase.DeriveSparse(0, NumArchitectureDeviceRegions, 0); |
||||
|
constexpr inline const auto KMemoryRegionType_InterruptCpuInterface = |
||||
|
KMemoryRegionType_ArchDeviceBase.DeriveSparse(0, NumArchitectureDeviceRegions, 1) |
||||
|
.SetAttribute(KMemoryRegionAttr_NoUserMap); |
||||
|
constexpr inline const auto KMemoryRegionType_InterruptDistributor = |
||||
|
KMemoryRegionType_ArchDeviceBase.DeriveSparse(0, NumArchitectureDeviceRegions, 2) |
||||
|
.SetAttribute(KMemoryRegionAttr_NoUserMap); |
||||
|
static_assert(KMemoryRegionType_Uart.GetValue() == (0x1D)); |
||||
|
static_assert(KMemoryRegionType_InterruptCpuInterface.GetValue() == |
||||
|
(0x2D | KMemoryRegionAttr_NoUserMap)); |
||||
|
static_assert(KMemoryRegionType_InterruptDistributor.GetValue() == |
||||
|
(0x4D | KMemoryRegionAttr_NoUserMap)); |
||||
@ -0,0 +1,52 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project |
||||
|
// Licensed under GPLv2 or any later version |
||||
|
// Refer to the license.txt file included. |
||||
|
|
||||
|
// All architectures must define NumBoardDeviceRegions. |
||||
|
constexpr inline const auto NumBoardDeviceRegions = 6; |
||||
|
// UNUSED: .Derive(NumBoardDeviceRegions, 0); |
||||
|
constexpr inline const auto KMemoryRegionType_MemoryController = |
||||
|
KMemoryRegionType_BoardDeviceBase.Derive(NumBoardDeviceRegions, 1) |
||||
|
.SetAttribute(KMemoryRegionAttr_NoUserMap); |
||||
|
constexpr inline const auto KMemoryRegionType_MemoryController1 = |
||||
|
KMemoryRegionType_BoardDeviceBase.Derive(NumBoardDeviceRegions, 2) |
||||
|
.SetAttribute(KMemoryRegionAttr_NoUserMap); |
||||
|
constexpr inline const auto KMemoryRegionType_MemoryController0 = |
||||
|
KMemoryRegionType_BoardDeviceBase.Derive(NumBoardDeviceRegions, 3) |
||||
|
.SetAttribute(KMemoryRegionAttr_NoUserMap); |
||||
|
constexpr inline const auto KMemoryRegionType_PowerManagementController = |
||||
|
KMemoryRegionType_BoardDeviceBase.Derive(NumBoardDeviceRegions, 4).DeriveTransition(); |
||||
|
constexpr inline const auto KMemoryRegionType_LegacyLpsDevices = |
||||
|
KMemoryRegionType_BoardDeviceBase.Derive(NumBoardDeviceRegions, 5); |
||||
|
static_assert(KMemoryRegionType_MemoryController.GetValue() == |
||||
|
(0x55 | KMemoryRegionAttr_NoUserMap)); |
||||
|
static_assert(KMemoryRegionType_MemoryController1.GetValue() == |
||||
|
(0x65 | KMemoryRegionAttr_NoUserMap)); |
||||
|
static_assert(KMemoryRegionType_MemoryController0.GetValue() == |
||||
|
(0x95 | KMemoryRegionAttr_NoUserMap)); |
||||
|
static_assert(KMemoryRegionType_PowerManagementController.GetValue() == (0x1A5)); |
||||
|
|
||||
|
static_assert(KMemoryRegionType_LegacyLpsDevices.GetValue() == 0xC5); |
||||
|
|
||||
|
constexpr inline const auto NumLegacyLpsDevices = 7; |
||||
|
constexpr inline const auto KMemoryRegionType_LegacyLpsExceptionVectors = |
||||
|
KMemoryRegionType_LegacyLpsDevices.Derive(NumLegacyLpsDevices, 0); |
||||
|
constexpr inline const auto KMemoryRegionType_LegacyLpsIram = |
||||
|
KMemoryRegionType_LegacyLpsDevices.Derive(NumLegacyLpsDevices, 1); |
||||
|
constexpr inline const auto KMemoryRegionType_LegacyLpsFlowController = |
||||
|
KMemoryRegionType_LegacyLpsDevices.Derive(NumLegacyLpsDevices, 2); |
||||
|
constexpr inline const auto KMemoryRegionType_LegacyLpsPrimaryICtlr = |
||||
|
KMemoryRegionType_LegacyLpsDevices.Derive(NumLegacyLpsDevices, 3); |
||||
|
constexpr inline const auto KMemoryRegionType_LegacyLpsSemaphore = |
||||
|
KMemoryRegionType_LegacyLpsDevices.Derive(NumLegacyLpsDevices, 4); |
||||
|
constexpr inline const auto KMemoryRegionType_LegacyLpsAtomics = |
||||
|
KMemoryRegionType_LegacyLpsDevices.Derive(NumLegacyLpsDevices, 5); |
||||
|
constexpr inline const auto KMemoryRegionType_LegacyLpsClkRst = |
||||
|
KMemoryRegionType_LegacyLpsDevices.Derive(NumLegacyLpsDevices, 6); |
||||
|
static_assert(KMemoryRegionType_LegacyLpsExceptionVectors.GetValue() == 0x3C5); |
||||
|
static_assert(KMemoryRegionType_LegacyLpsIram.GetValue() == 0x5C5); |
||||
|
static_assert(KMemoryRegionType_LegacyLpsFlowController.GetValue() == 0x6C5); |
||||
|
static_assert(KMemoryRegionType_LegacyLpsPrimaryICtlr.GetValue() == 0x9C5); |
||||
|
static_assert(KMemoryRegionType_LegacyLpsSemaphore.GetValue() == 0xAC5); |
||||
|
static_assert(KMemoryRegionType_LegacyLpsAtomics.GetValue() == 0xCC5); |
||||
|
static_assert(KMemoryRegionType_LegacyLpsClkRst.GetValue() == 0x11C5); |
||||
@ -0,0 +1,164 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project
|
||||
|
// Licensed under GPLv2 or any later version
|
||||
|
// Refer to the license.txt file included.
|
||||
|
|
||||
|
#include <random>
|
||||
|
|
||||
|
#include "common/common_sizes.h"
|
||||
|
#include "core/hle/kernel/board/nintendo/nx/k_system_control.h"
|
||||
|
#include "core/hle/kernel/board/nintendo/nx/secure_monitor.h"
|
||||
|
#include "core/hle/kernel/k_trace.h"
|
||||
|
|
||||
|
namespace Kernel::Board::Nintendo::Nx { |
||||
|
|
||||
|
namespace impl { |
||||
|
|
||||
|
constexpr const std::size_t RequiredNonSecureSystemMemorySizeVi = 0x2238 * 4 * 1024; |
||||
|
constexpr const std::size_t RequiredNonSecureSystemMemorySizeNvservices = 0x710 * 4 * 1024; |
||||
|
constexpr const std::size_t RequiredNonSecureSystemMemorySizeMisc = 0x80 * 4 * 1024; |
||||
|
|
||||
|
} // namespace impl
|
||||
|
|
||||
|
constexpr const std::size_t RequiredNonSecureSystemMemorySize = |
||||
|
impl::RequiredNonSecureSystemMemorySizeVi + impl::RequiredNonSecureSystemMemorySizeNvservices + |
||||
|
impl::RequiredNonSecureSystemMemorySizeMisc; |
||||
|
|
||||
|
namespace { |
||||
|
|
||||
|
u32 GetMemoryModeForInit() { |
||||
|
return 0x01; |
||||
|
} |
||||
|
|
||||
|
u32 GetMemorySizeForInit() { |
||||
|
return 0; |
||||
|
} |
||||
|
|
||||
|
Smc::MemoryArrangement GetMemoryArrangeForInit() { |
||||
|
switch (GetMemoryModeForInit() & 0x3F) { |
||||
|
case 0x01: |
||||
|
default: |
||||
|
return Smc::MemoryArrangement_4GB; |
||||
|
case 0x02: |
||||
|
return Smc::MemoryArrangement_4GBForAppletDev; |
||||
|
case 0x03: |
||||
|
return Smc::MemoryArrangement_4GBForSystemDev; |
||||
|
case 0x11: |
||||
|
return Smc::MemoryArrangement_6GB; |
||||
|
case 0x12: |
||||
|
return Smc::MemoryArrangement_6GBForAppletDev; |
||||
|
case 0x21: |
||||
|
return Smc::MemoryArrangement_8GB; |
||||
|
} |
||||
|
} |
||||
|
} // namespace
|
||||
|
|
||||
|
// Initialization.
|
||||
|
size_t KSystemControl::Init::GetIntendedMemorySize() { |
||||
|
switch (GetMemorySizeForInit()) { |
||||
|
case Smc::MemorySize_4GB: |
||||
|
default: // All invalid modes should go to 4GB.
|
||||
|
return Common::Size_4_GB; |
||||
|
case Smc::MemorySize_6GB: |
||||
|
return Common::Size_6_GB; |
||||
|
case Smc::MemorySize_8GB: |
||||
|
return Common::Size_8_GB; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
PAddr KSystemControl::Init::GetKernelPhysicalBaseAddress(u64 base_address) { |
||||
|
return base_address; |
||||
|
} |
||||
|
|
||||
|
bool KSystemControl::Init::ShouldIncreaseThreadResourceLimit() { |
||||
|
return true; |
||||
|
} |
||||
|
|
||||
|
std::size_t KSystemControl::Init::GetApplicationPoolSize() { |
||||
|
// Get the base pool size.
|
||||
|
const size_t base_pool_size = []() -> size_t { |
||||
|
switch (GetMemoryArrangeForInit()) { |
||||
|
case Smc::MemoryArrangement_4GB: |
||||
|
default: |
||||
|
return Common::Size_3285_MB; |
||||
|
case Smc::MemoryArrangement_4GBForAppletDev: |
||||
|
return Common::Size_2048_MB; |
||||
|
case Smc::MemoryArrangement_4GBForSystemDev: |
||||
|
return Common::Size_3285_MB; |
||||
|
case Smc::MemoryArrangement_6GB: |
||||
|
return Common::Size_4916_MB; |
||||
|
case Smc::MemoryArrangement_6GBForAppletDev: |
||||
|
return Common::Size_3285_MB; |
||||
|
case Smc::MemoryArrangement_8GB: |
||||
|
return Common::Size_4916_MB; |
||||
|
} |
||||
|
}(); |
||||
|
|
||||
|
// Return (possibly) adjusted size.
|
||||
|
return base_pool_size; |
||||
|
} |
||||
|
|
||||
|
size_t KSystemControl::Init::GetAppletPoolSize() { |
||||
|
// Get the base pool size.
|
||||
|
const size_t base_pool_size = []() -> size_t { |
||||
|
switch (GetMemoryArrangeForInit()) { |
||||
|
case Smc::MemoryArrangement_4GB: |
||||
|
default: |
||||
|
return Common::Size_507_MB; |
||||
|
case Smc::MemoryArrangement_4GBForAppletDev: |
||||
|
return Common::Size_1554_MB; |
||||
|
case Smc::MemoryArrangement_4GBForSystemDev: |
||||
|
return Common::Size_448_MB; |
||||
|
case Smc::MemoryArrangement_6GB: |
||||
|
return Common::Size_562_MB; |
||||
|
case Smc::MemoryArrangement_6GBForAppletDev: |
||||
|
return Common::Size_2193_MB; |
||||
|
case Smc::MemoryArrangement_8GB: |
||||
|
return Common::Size_2193_MB; |
||||
|
} |
||||
|
}(); |
||||
|
|
||||
|
// Return (possibly) adjusted size.
|
||||
|
constexpr size_t ExtraSystemMemoryForAtmosphere = Common::Size_33_MB; |
||||
|
return base_pool_size - ExtraSystemMemoryForAtmosphere - KTraceBufferSize; |
||||
|
} |
||||
|
|
||||
|
size_t KSystemControl::Init::GetMinimumNonSecureSystemPoolSize() { |
||||
|
// Verify that our minimum is at least as large as Nintendo's.
|
||||
|
constexpr size_t MinimumSize = RequiredNonSecureSystemMemorySize; |
||||
|
static_assert(MinimumSize >= 0x29C8000); |
||||
|
|
||||
|
return MinimumSize; |
||||
|
} |
||||
|
|
||||
|
namespace { |
||||
|
template <typename F> |
||||
|
u64 GenerateUniformRange(u64 min, u64 max, F f) { |
||||
|
// Handle the case where the difference is too large to represent.
|
||||
|
if (max == std::numeric_limits<u64>::max() && min == std::numeric_limits<u64>::min()) { |
||||
|
return f(); |
||||
|
} |
||||
|
|
||||
|
// Iterate until we get a value in range.
|
||||
|
const u64 range_size = ((max + 1) - min); |
||||
|
const u64 effective_max = (std::numeric_limits<u64>::max() / range_size) * range_size; |
||||
|
while (true) { |
||||
|
if (const u64 rnd = f(); rnd < effective_max) { |
||||
|
return min + (rnd % range_size); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
} // Anonymous namespace
|
||||
|
|
||||
|
u64 KSystemControl::GenerateRandomU64() { |
||||
|
static std::random_device device; |
||||
|
static std::mt19937 gen(device()); |
||||
|
static std::uniform_int_distribution<u64> distribution(1, std::numeric_limits<u64>::max()); |
||||
|
return distribution(gen); |
||||
|
} |
||||
|
|
||||
|
u64 KSystemControl::GenerateRandomRange(u64 min, u64 max) { |
||||
|
return GenerateUniformRange(min, max, GenerateRandomU64); |
||||
|
} |
||||
|
|
||||
|
} // namespace Kernel::Board::Nintendo::Nx
|
||||
@ -0,0 +1,28 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project |
||||
|
// Licensed under GPLv2 or any later version |
||||
|
// Refer to the license.txt file included. |
||||
|
|
||||
|
#pragma once |
||||
|
|
||||
|
#include "common/common_types.h" |
||||
|
|
||||
|
namespace Kernel::Board::Nintendo::Nx { |
||||
|
|
||||
|
class KSystemControl { |
||||
|
public: |
||||
|
class Init { |
||||
|
public: |
||||
|
// Initialization. |
||||
|
static std::size_t GetIntendedMemorySize(); |
||||
|
static PAddr GetKernelPhysicalBaseAddress(u64 base_address); |
||||
|
static bool ShouldIncreaseThreadResourceLimit(); |
||||
|
static std::size_t GetApplicationPoolSize(); |
||||
|
static std::size_t GetAppletPoolSize(); |
||||
|
static std::size_t GetMinimumNonSecureSystemPoolSize(); |
||||
|
}; |
||||
|
|
||||
|
static u64 GenerateRandomRange(u64 min, u64 max); |
||||
|
static u64 GenerateRandomU64(); |
||||
|
}; |
||||
|
|
||||
|
} // namespace Kernel::Board::Nintendo::Nx |
||||
@ -0,0 +1,26 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project |
||||
|
// Licensed under GPLv2 or any later version |
||||
|
// Refer to the license.txt file included. |
||||
|
|
||||
|
#pragma once |
||||
|
|
||||
|
#include "common/common_types.h" |
||||
|
|
||||
|
namespace Kernel::Board::Nintendo::Nx::Smc { |
||||
|
|
||||
|
enum MemorySize { |
||||
|
MemorySize_4GB = 0, |
||||
|
MemorySize_6GB = 1, |
||||
|
MemorySize_8GB = 2, |
||||
|
}; |
||||
|
|
||||
|
enum MemoryArrangement { |
||||
|
MemoryArrangement_4GB = 0, |
||||
|
MemoryArrangement_4GBForAppletDev = 1, |
||||
|
MemoryArrangement_4GBForSystemDev = 2, |
||||
|
MemoryArrangement_6GB = 3, |
||||
|
MemoryArrangement_6GBForAppletDev = 4, |
||||
|
MemoryArrangement_8GB = 5, |
||||
|
}; |
||||
|
|
||||
|
} // namespace Kernel::Board::Nintendo::Nx::Smc |
||||
@ -0,0 +1,199 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project
|
||||
|
// Licensed under GPLv2 or any later version
|
||||
|
// Refer to the license.txt file included.
|
||||
|
|
||||
|
#include "common/alignment.h"
|
||||
|
#include "core/hle/kernel/k_memory_layout.h"
|
||||
|
#include "core/hle/kernel/k_memory_manager.h"
|
||||
|
#include "core/hle/kernel/k_system_control.h"
|
||||
|
#include "core/hle/kernel/k_trace.h"
|
||||
|
|
||||
|
namespace Kernel { |
||||
|
|
||||
|
namespace { |
||||
|
|
||||
|
constexpr size_t CarveoutAlignment = 0x20000; |
||||
|
constexpr size_t CarveoutSizeMax = (512ULL * 1024 * 1024) - CarveoutAlignment; |
||||
|
|
||||
|
bool SetupPowerManagementControllerMemoryRegion(KMemoryLayout& memory_layout) { |
||||
|
// Above firmware 2.0.0, the PMC is not mappable.
|
||||
|
return memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x7000E000, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap) && |
||||
|
memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x7000E400, 0xC00, |
||||
|
KMemoryRegionType_PowerManagementController | KMemoryRegionAttr_NoUserMap); |
||||
|
} |
||||
|
|
||||
|
void InsertPoolPartitionRegionIntoBothTrees(KMemoryLayout& memory_layout, size_t start, size_t size, |
||||
|
KMemoryRegionType phys_type, |
||||
|
KMemoryRegionType virt_type, u32& cur_attr) { |
||||
|
const u32 attr = cur_attr++; |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert(start, size, |
||||
|
static_cast<u32>(phys_type), attr)); |
||||
|
const KMemoryRegion* phys = memory_layout.GetPhysicalMemoryRegionTree().FindByTypeAndAttribute( |
||||
|
static_cast<u32>(phys_type), attr); |
||||
|
ASSERT(phys != nullptr); |
||||
|
ASSERT(phys->GetEndAddress() != 0); |
||||
|
ASSERT(memory_layout.GetVirtualMemoryRegionTree().Insert(phys->GetPairAddress(), size, |
||||
|
static_cast<u32>(virt_type), attr)); |
||||
|
} |
||||
|
|
||||
|
} // namespace
|
||||
|
|
||||
|
namespace Init { |
||||
|
|
||||
|
void SetupDevicePhysicalMemoryRegions(KMemoryLayout& memory_layout) { |
||||
|
ASSERT(SetupPowerManagementControllerMemoryRegion(memory_layout)); |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x70019000, 0x1000, KMemoryRegionType_MemoryController | KMemoryRegionAttr_NoUserMap)); |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x7001C000, 0x1000, KMemoryRegionType_MemoryController0 | KMemoryRegionAttr_NoUserMap)); |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x7001D000, 0x1000, KMemoryRegionType_MemoryController1 | KMemoryRegionAttr_NoUserMap)); |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x50040000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x50041000, 0x1000, |
||||
|
KMemoryRegionType_InterruptDistributor | KMemoryRegionAttr_ShouldKernelMap)); |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x50042000, 0x1000, |
||||
|
KMemoryRegionType_InterruptCpuInterface | KMemoryRegionAttr_ShouldKernelMap)); |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x50043000, 0x1D000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); |
||||
|
|
||||
|
// Map IRAM unconditionally, to support debug-logging-to-iram build config.
|
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x40000000, 0x40000, KMemoryRegionType_LegacyLpsIram | KMemoryRegionAttr_ShouldKernelMap)); |
||||
|
|
||||
|
// Above firmware 2.0.0, prevent mapping the bpmp exception vectors or the ipatch region.
|
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x6000F000, 0x1000, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
0x6001DC00, 0x400, KMemoryRegionType_None | KMemoryRegionAttr_NoUserMap)); |
||||
|
} |
||||
|
|
||||
|
void SetupDramPhysicalMemoryRegions(KMemoryLayout& memory_layout) { |
||||
|
const size_t intended_memory_size = KSystemControl::Init::GetIntendedMemorySize(); |
||||
|
const PAddr physical_memory_base_address = |
||||
|
KSystemControl::Init::GetKernelPhysicalBaseAddress(DramPhysicalAddress); |
||||
|
|
||||
|
// Insert blocks into the tree.
|
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
physical_memory_base_address, intended_memory_size, KMemoryRegionType_Dram)); |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
physical_memory_base_address, ReservedEarlyDramSize, KMemoryRegionType_DramReservedEarly)); |
||||
|
|
||||
|
// Insert the KTrace block at the end of Dram, if KTrace is enabled.
|
||||
|
static_assert(!IsKTraceEnabled || KTraceBufferSize > 0); |
||||
|
if constexpr (IsKTraceEnabled) { |
||||
|
const PAddr ktrace_buffer_phys_addr = |
||||
|
physical_memory_base_address + intended_memory_size - KTraceBufferSize; |
||||
|
ASSERT(memory_layout.GetPhysicalMemoryRegionTree().Insert( |
||||
|
ktrace_buffer_phys_addr, KTraceBufferSize, KMemoryRegionType_KernelTraceBuffer)); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
void SetupPoolPartitionMemoryRegions(KMemoryLayout& memory_layout) { |
||||
|
// Start by identifying the extents of the DRAM memory region.
|
||||
|
const auto dram_extents = memory_layout.GetMainMemoryPhysicalExtents(); |
||||
|
ASSERT(dram_extents.GetEndAddress() != 0); |
||||
|
|
||||
|
// Determine the end of the pool region.
|
||||
|
const u64 pool_end = dram_extents.GetEndAddress() - KTraceBufferSize; |
||||
|
|
||||
|
// Find the start of the kernel DRAM region.
|
||||
|
const KMemoryRegion* kernel_dram_region = |
||||
|
memory_layout.GetPhysicalMemoryRegionTree().FindFirstDerived( |
||||
|
KMemoryRegionType_DramKernelBase); |
||||
|
ASSERT(kernel_dram_region != nullptr); |
||||
|
|
||||
|
const u64 kernel_dram_start = kernel_dram_region->GetAddress(); |
||||
|
ASSERT(Common::IsAligned(kernel_dram_start, CarveoutAlignment)); |
||||
|
|
||||
|
// Find the start of the pool partitions region.
|
||||
|
const KMemoryRegion* pool_partitions_region = |
||||
|
memory_layout.GetPhysicalMemoryRegionTree().FindByTypeAndAttribute( |
||||
|
KMemoryRegionType_DramPoolPartition, 0); |
||||
|
ASSERT(pool_partitions_region != nullptr); |
||||
|
const u64 pool_partitions_start = pool_partitions_region->GetAddress(); |
||||
|
|
||||
|
// Setup the pool partition layouts.
|
||||
|
// On 5.0.0+, setup modern 4-pool-partition layout.
|
||||
|
|
||||
|
// Get Application and Applet pool sizes.
|
||||
|
const size_t application_pool_size = KSystemControl::Init::GetApplicationPoolSize(); |
||||
|
const size_t applet_pool_size = KSystemControl::Init::GetAppletPoolSize(); |
||||
|
const size_t unsafe_system_pool_min_size = |
||||
|
KSystemControl::Init::GetMinimumNonSecureSystemPoolSize(); |
||||
|
|
||||
|
// Decide on starting addresses for our pools.
|
||||
|
const u64 application_pool_start = pool_end - application_pool_size; |
||||
|
const u64 applet_pool_start = application_pool_start - applet_pool_size; |
||||
|
const u64 unsafe_system_pool_start = std::min( |
||||
|
kernel_dram_start + CarveoutSizeMax, |
||||
|
Common::AlignDown(applet_pool_start - unsafe_system_pool_min_size, CarveoutAlignment)); |
||||
|
const size_t unsafe_system_pool_size = applet_pool_start - unsafe_system_pool_start; |
||||
|
|
||||
|
// We want to arrange application pool depending on where the middle of dram is.
|
||||
|
const u64 dram_midpoint = (dram_extents.GetAddress() + dram_extents.GetEndAddress()) / 2; |
||||
|
u32 cur_pool_attr = 0; |
||||
|
size_t total_overhead_size = 0; |
||||
|
if (dram_extents.GetEndAddress() <= dram_midpoint || dram_midpoint <= application_pool_start) { |
||||
|
InsertPoolPartitionRegionIntoBothTrees( |
||||
|
memory_layout, application_pool_start, application_pool_size, |
||||
|
KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, |
||||
|
cur_pool_attr); |
||||
|
total_overhead_size += |
||||
|
KMemoryManager::CalculateManagementOverheadSize(application_pool_size); |
||||
|
} else { |
||||
|
const size_t first_application_pool_size = dram_midpoint - application_pool_start; |
||||
|
const size_t second_application_pool_size = |
||||
|
application_pool_start + application_pool_size - dram_midpoint; |
||||
|
InsertPoolPartitionRegionIntoBothTrees( |
||||
|
memory_layout, application_pool_start, first_application_pool_size, |
||||
|
KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, |
||||
|
cur_pool_attr); |
||||
|
InsertPoolPartitionRegionIntoBothTrees( |
||||
|
memory_layout, dram_midpoint, second_application_pool_size, |
||||
|
KMemoryRegionType_DramApplicationPool, KMemoryRegionType_VirtualDramApplicationPool, |
||||
|
cur_pool_attr); |
||||
|
total_overhead_size += |
||||
|
KMemoryManager::CalculateManagementOverheadSize(first_application_pool_size); |
||||
|
total_overhead_size += |
||||
|
KMemoryManager::CalculateManagementOverheadSize(second_application_pool_size); |
||||
|
} |
||||
|
|
||||
|
// Insert the applet pool.
|
||||
|
InsertPoolPartitionRegionIntoBothTrees(memory_layout, applet_pool_start, applet_pool_size, |
||||
|
KMemoryRegionType_DramAppletPool, |
||||
|
KMemoryRegionType_VirtualDramAppletPool, cur_pool_attr); |
||||
|
total_overhead_size += KMemoryManager::CalculateManagementOverheadSize(applet_pool_size); |
||||
|
|
||||
|
// Insert the nonsecure system pool.
|
||||
|
InsertPoolPartitionRegionIntoBothTrees( |
||||
|
memory_layout, unsafe_system_pool_start, unsafe_system_pool_size, |
||||
|
KMemoryRegionType_DramSystemNonSecurePool, KMemoryRegionType_VirtualDramSystemNonSecurePool, |
||||
|
cur_pool_attr); |
||||
|
total_overhead_size += KMemoryManager::CalculateManagementOverheadSize(unsafe_system_pool_size); |
||||
|
|
||||
|
// Insert the pool management region.
|
||||
|
total_overhead_size += KMemoryManager::CalculateManagementOverheadSize( |
||||
|
(unsafe_system_pool_start - pool_partitions_start) - total_overhead_size); |
||||
|
const u64 pool_management_start = unsafe_system_pool_start - total_overhead_size; |
||||
|
const size_t pool_management_size = total_overhead_size; |
||||
|
u32 pool_management_attr = 0; |
||||
|
InsertPoolPartitionRegionIntoBothTrees( |
||||
|
memory_layout, pool_management_start, pool_management_size, |
||||
|
KMemoryRegionType_DramPoolManagement, KMemoryRegionType_VirtualDramPoolManagement, |
||||
|
pool_management_attr); |
||||
|
|
||||
|
// Insert the system pool.
|
||||
|
const u64 system_pool_size = pool_management_start - pool_partitions_start; |
||||
|
InsertPoolPartitionRegionIntoBothTrees(memory_layout, pool_partitions_start, system_pool_size, |
||||
|
KMemoryRegionType_DramSystemPool, |
||||
|
KMemoryRegionType_VirtualDramSystemPool, cur_pool_attr); |
||||
|
} |
||||
|
|
||||
|
} // namespace Init
|
||||
|
|
||||
|
} // namespace Kernel
|
||||
@ -0,0 +1,166 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project
|
||||
|
// Licensed under GPLv2 or any later version
|
||||
|
// Refer to the license.txt file included.
|
||||
|
|
||||
|
#include <array>
|
||||
|
|
||||
|
#include "common/alignment.h"
|
||||
|
#include "core/hle/kernel/k_memory_layout.h"
|
||||
|
#include "core/hle/kernel/k_system_control.h"
|
||||
|
|
||||
|
namespace Kernel { |
||||
|
|
||||
|
namespace { |
||||
|
|
||||
|
template <typename... Args> |
||||
|
KMemoryRegion* AllocateRegion(KMemoryRegionAllocator& memory_region_allocator, Args&&... args) { |
||||
|
return memory_region_allocator.Allocate(std::forward<Args>(args)...); |
||||
|
} |
||||
|
|
||||
|
} // namespace
|
||||
|
|
||||
|
KMemoryRegionTree::KMemoryRegionTree(KMemoryRegionAllocator& memory_region_allocator_) |
||||
|
: memory_region_allocator{memory_region_allocator_} {} |
||||
|
|
||||
|
void KMemoryRegionTree::InsertDirectly(u64 address, u64 last_address, u32 attr, u32 type_id) { |
||||
|
this->insert(*AllocateRegion(memory_region_allocator, address, last_address, attr, type_id)); |
||||
|
} |
||||
|
|
||||
|
bool KMemoryRegionTree::Insert(u64 address, size_t size, u32 type_id, u32 new_attr, u32 old_attr) { |
||||
|
// Locate the memory region that contains the address.
|
||||
|
KMemoryRegion* found = this->FindModifiable(address); |
||||
|
|
||||
|
// We require that the old attr is correct.
|
||||
|
if (found->GetAttributes() != old_attr) { |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
// We further require that the region can be split from the old region.
|
||||
|
const u64 inserted_region_end = address + size; |
||||
|
const u64 inserted_region_last = inserted_region_end - 1; |
||||
|
if (found->GetLastAddress() < inserted_region_last) { |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
// Further, we require that the type id is a valid transformation.
|
||||
|
if (!found->CanDerive(type_id)) { |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
// Cache information from the region before we remove it.
|
||||
|
const u64 old_address = found->GetAddress(); |
||||
|
const u64 old_last = found->GetLastAddress(); |
||||
|
const u64 old_pair = found->GetPairAddress(); |
||||
|
const u32 old_type = found->GetType(); |
||||
|
|
||||
|
// Erase the existing region from the tree.
|
||||
|
this->erase(this->iterator_to(*found)); |
||||
|
|
||||
|
// Insert the new region into the tree.
|
||||
|
if (old_address == address) { |
||||
|
// Reuse the old object for the new region, if we can.
|
||||
|
found->Reset(address, inserted_region_last, old_pair, new_attr, type_id); |
||||
|
this->insert(*found); |
||||
|
} else { |
||||
|
// If we can't re-use, adjust the old region.
|
||||
|
found->Reset(old_address, address - 1, old_pair, old_attr, old_type); |
||||
|
this->insert(*found); |
||||
|
|
||||
|
// Insert a new region for the split.
|
||||
|
const u64 new_pair = (old_pair != std::numeric_limits<u64>::max()) |
||||
|
? old_pair + (address - old_address) |
||||
|
: old_pair; |
||||
|
this->insert(*AllocateRegion(memory_region_allocator, address, inserted_region_last, |
||||
|
new_pair, new_attr, type_id)); |
||||
|
} |
||||
|
|
||||
|
// If we need to insert a region after the region, do so.
|
||||
|
if (old_last != inserted_region_last) { |
||||
|
const u64 after_pair = (old_pair != std::numeric_limits<u64>::max()) |
||||
|
? old_pair + (inserted_region_end - old_address) |
||||
|
: old_pair; |
||||
|
this->insert(*AllocateRegion(memory_region_allocator, inserted_region_end, old_last, |
||||
|
after_pair, old_attr, old_type)); |
||||
|
} |
||||
|
|
||||
|
return true; |
||||
|
} |
||||
|
|
||||
|
VAddr KMemoryRegionTree::GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id) { |
||||
|
// We want to find the total extents of the type id.
|
||||
|
const auto extents = this->GetDerivedRegionExtents(static_cast<KMemoryRegionType>(type_id)); |
||||
|
|
||||
|
// Ensure that our alignment is correct.
|
||||
|
ASSERT(Common::IsAligned(extents.GetAddress(), alignment)); |
||||
|
|
||||
|
const u64 first_address = extents.GetAddress(); |
||||
|
const u64 last_address = extents.GetLastAddress(); |
||||
|
|
||||
|
const u64 first_index = first_address / alignment; |
||||
|
const u64 last_index = last_address / alignment; |
||||
|
|
||||
|
while (true) { |
||||
|
const u64 candidate = |
||||
|
KSystemControl::GenerateRandomRange(first_index, last_index) * alignment; |
||||
|
|
||||
|
// Ensure that the candidate doesn't overflow with the size.
|
||||
|
if (!(candidate < candidate + size)) { |
||||
|
continue; |
||||
|
} |
||||
|
|
||||
|
const u64 candidate_last = candidate + size - 1; |
||||
|
|
||||
|
// Ensure that the candidate fits within the region.
|
||||
|
if (candidate_last > last_address) { |
||||
|
continue; |
||||
|
} |
||||
|
|
||||
|
// Locate the candidate region, and ensure it fits and has the correct type id.
|
||||
|
if (const auto& candidate_region = *this->Find(candidate); |
||||
|
!(candidate_last <= candidate_region.GetLastAddress() && |
||||
|
candidate_region.GetType() == type_id)) { |
||||
|
continue; |
||||
|
} |
||||
|
|
||||
|
return candidate; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
KMemoryLayout::KMemoryLayout() |
||||
|
: virtual_tree{memory_region_allocator}, physical_tree{memory_region_allocator}, |
||||
|
virtual_linear_tree{memory_region_allocator}, physical_linear_tree{memory_region_allocator} {} |
||||
|
|
||||
|
void KMemoryLayout::InitializeLinearMemoryRegionTrees(PAddr aligned_linear_phys_start, |
||||
|
VAddr linear_virtual_start) { |
||||
|
// Set static differences.
|
||||
|
linear_phys_to_virt_diff = linear_virtual_start - aligned_linear_phys_start; |
||||
|
linear_virt_to_phys_diff = aligned_linear_phys_start - linear_virtual_start; |
||||
|
|
||||
|
// Initialize linear trees.
|
||||
|
for (auto& region : GetPhysicalMemoryRegionTree()) { |
||||
|
if (region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) { |
||||
|
GetPhysicalLinearMemoryRegionTree().InsertDirectly( |
||||
|
region.GetAddress(), region.GetLastAddress(), region.GetAttributes(), |
||||
|
region.GetType()); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
for (auto& region : GetVirtualMemoryRegionTree()) { |
||||
|
if (region.IsDerivedFrom(KMemoryRegionType_Dram)) { |
||||
|
GetVirtualLinearMemoryRegionTree().InsertDirectly( |
||||
|
region.GetAddress(), region.GetLastAddress(), region.GetAttributes(), |
||||
|
region.GetType()); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
size_t KMemoryLayout::GetResourceRegionSizeForInit() { |
||||
|
// Calculate resource region size based on whether we allow extra threads.
|
||||
|
const bool use_extra_resources = KSystemControl::Init::ShouldIncreaseThreadResourceLimit(); |
||||
|
size_t resource_region_size = |
||||
|
KernelResourceSize + (use_extra_resources ? KernelSlabHeapAdditionalSize : 0); |
||||
|
|
||||
|
return resource_region_size; |
||||
|
} |
||||
|
|
||||
|
} // namespace Kernel
|
||||
@ -0,0 +1,350 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project |
||||
|
// Licensed under GPLv2 or any later version |
||||
|
// Refer to the license.txt file included. |
||||
|
|
||||
|
#pragma once |
||||
|
|
||||
|
#include "common/assert.h" |
||||
|
#include "common/common_types.h" |
||||
|
#include "common/intrusive_red_black_tree.h" |
||||
|
#include "core/hle/kernel/k_memory_region_type.h" |
||||
|
|
||||
|
namespace Kernel { |
||||
|
|
||||
|
class KMemoryRegionAllocator; |
||||
|
|
||||
|
class KMemoryRegion final : public Common::IntrusiveRedBlackTreeBaseNode<KMemoryRegion>, |
||||
|
NonCopyable { |
||||
|
friend class KMemoryRegionTree; |
||||
|
|
||||
|
public: |
||||
|
constexpr KMemoryRegion() = default; |
||||
|
constexpr KMemoryRegion(u64 address_, u64 last_address_) |
||||
|
: address{address_}, last_address{last_address_} {} |
||||
|
constexpr KMemoryRegion(u64 address_, u64 last_address_, u64 pair_address_, u32 attributes_, |
||||
|
u32 type_id_) |
||||
|
: address(address_), last_address(last_address_), pair_address(pair_address_), |
||||
|
attributes(attributes_), type_id(type_id_) {} |
||||
|
constexpr KMemoryRegion(u64 address_, u64 last_address_, u32 attributes_, u32 type_id_) |
||||
|
: KMemoryRegion(address_, last_address_, std::numeric_limits<u64>::max(), attributes_, |
||||
|
type_id_) {} |
||||
|
|
||||
|
static constexpr int Compare(const KMemoryRegion& lhs, const KMemoryRegion& rhs) { |
||||
|
if (lhs.GetAddress() < rhs.GetAddress()) { |
||||
|
return -1; |
||||
|
} else if (lhs.GetAddress() <= rhs.GetLastAddress()) { |
||||
|
return 0; |
||||
|
} else { |
||||
|
return 1; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
constexpr void Reset(u64 a, u64 la, u64 p, u32 r, u32 t) { |
||||
|
address = a; |
||||
|
pair_address = p; |
||||
|
last_address = la; |
||||
|
attributes = r; |
||||
|
type_id = t; |
||||
|
} |
||||
|
|
||||
|
public: |
||||
|
constexpr u64 GetAddress() const { |
||||
|
return address; |
||||
|
} |
||||
|
|
||||
|
constexpr u64 GetPairAddress() const { |
||||
|
return pair_address; |
||||
|
} |
||||
|
|
||||
|
constexpr u64 GetLastAddress() const { |
||||
|
return last_address; |
||||
|
} |
||||
|
|
||||
|
constexpr u64 GetEndAddress() const { |
||||
|
return this->GetLastAddress() + 1; |
||||
|
} |
||||
|
|
||||
|
constexpr size_t GetSize() const { |
||||
|
return this->GetEndAddress() - this->GetAddress(); |
||||
|
} |
||||
|
|
||||
|
constexpr u32 GetAttributes() const { |
||||
|
return attributes; |
||||
|
} |
||||
|
|
||||
|
constexpr u32 GetType() const { |
||||
|
return type_id; |
||||
|
} |
||||
|
|
||||
|
constexpr void SetType(u32 type) { |
||||
|
ASSERT(this->CanDerive(type)); |
||||
|
type_id = type; |
||||
|
} |
||||
|
|
||||
|
constexpr bool Contains(u64 address) const { |
||||
|
ASSERT(this->GetEndAddress() != 0); |
||||
|
return this->GetAddress() <= address && address <= this->GetLastAddress(); |
||||
|
} |
||||
|
|
||||
|
constexpr bool IsDerivedFrom(u32 type) const { |
||||
|
return (this->GetType() | type) == this->GetType(); |
||||
|
} |
||||
|
|
||||
|
constexpr bool HasTypeAttribute(u32 attr) const { |
||||
|
return (this->GetType() | attr) == this->GetType(); |
||||
|
} |
||||
|
|
||||
|
constexpr bool CanDerive(u32 type) const { |
||||
|
return (this->GetType() | type) == type; |
||||
|
} |
||||
|
|
||||
|
constexpr void SetPairAddress(u64 a) { |
||||
|
pair_address = a; |
||||
|
} |
||||
|
|
||||
|
constexpr void SetTypeAttribute(u32 attr) { |
||||
|
type_id |= attr; |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
u64 address{}; |
||||
|
u64 last_address{}; |
||||
|
u64 pair_address{}; |
||||
|
u32 attributes{}; |
||||
|
u32 type_id{}; |
||||
|
}; |
||||
|
|
||||
|
class KMemoryRegionTree final : NonCopyable { |
||||
|
public: |
||||
|
struct DerivedRegionExtents { |
||||
|
const KMemoryRegion* first_region{}; |
||||
|
const KMemoryRegion* last_region{}; |
||||
|
|
||||
|
constexpr DerivedRegionExtents() = default; |
||||
|
|
||||
|
constexpr u64 GetAddress() const { |
||||
|
return this->first_region->GetAddress(); |
||||
|
} |
||||
|
|
||||
|
constexpr u64 GetLastAddress() const { |
||||
|
return this->last_region->GetLastAddress(); |
||||
|
} |
||||
|
|
||||
|
constexpr u64 GetEndAddress() const { |
||||
|
return this->GetLastAddress() + 1; |
||||
|
} |
||||
|
|
||||
|
constexpr size_t GetSize() const { |
||||
|
return this->GetEndAddress() - this->GetAddress(); |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
private: |
||||
|
using TreeType = |
||||
|
Common::IntrusiveRedBlackTreeBaseTraits<KMemoryRegion>::TreeType<KMemoryRegion>; |
||||
|
|
||||
|
public: |
||||
|
using value_type = TreeType::value_type; |
||||
|
using size_type = TreeType::size_type; |
||||
|
using difference_type = TreeType::difference_type; |
||||
|
using pointer = TreeType::pointer; |
||||
|
using const_pointer = TreeType::const_pointer; |
||||
|
using reference = TreeType::reference; |
||||
|
using const_reference = TreeType::const_reference; |
||||
|
using iterator = TreeType::iterator; |
||||
|
using const_iterator = TreeType::const_iterator; |
||||
|
|
||||
|
private: |
||||
|
TreeType m_tree{}; |
||||
|
KMemoryRegionAllocator& memory_region_allocator; |
||||
|
|
||||
|
public: |
||||
|
explicit KMemoryRegionTree(KMemoryRegionAllocator& memory_region_allocator_); |
||||
|
|
||||
|
public: |
||||
|
KMemoryRegion* FindModifiable(u64 address) { |
||||
|
if (auto it = this->find(KMemoryRegion(address, address, 0, 0)); it != this->end()) { |
||||
|
return std::addressof(*it); |
||||
|
} else { |
||||
|
return nullptr; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
const KMemoryRegion* Find(u64 address) const { |
||||
|
if (auto it = this->find(KMemoryRegion(address, address, 0, 0)); it != this->cend()) { |
||||
|
return std::addressof(*it); |
||||
|
} else { |
||||
|
return nullptr; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
const KMemoryRegion* FindByType(KMemoryRegionType type_id) const { |
||||
|
for (auto it = this->cbegin(); it != this->cend(); ++it) { |
||||
|
if (it->GetType() == static_cast<u32>(type_id)) { |
||||
|
return std::addressof(*it); |
||||
|
} |
||||
|
} |
||||
|
return nullptr; |
||||
|
} |
||||
|
|
||||
|
const KMemoryRegion* FindByTypeAndAttribute(u32 type_id, u32 attr) const { |
||||
|
for (auto it = this->cbegin(); it != this->cend(); ++it) { |
||||
|
if (it->GetType() == type_id && it->GetAttributes() == attr) { |
||||
|
return std::addressof(*it); |
||||
|
} |
||||
|
} |
||||
|
return nullptr; |
||||
|
} |
||||
|
|
||||
|
const KMemoryRegion* FindFirstDerived(KMemoryRegionType type_id) const { |
||||
|
for (auto it = this->cbegin(); it != this->cend(); it++) { |
||||
|
if (it->IsDerivedFrom(type_id)) { |
||||
|
return std::addressof(*it); |
||||
|
} |
||||
|
} |
||||
|
return nullptr; |
||||
|
} |
||||
|
|
||||
|
const KMemoryRegion* FindLastDerived(KMemoryRegionType type_id) const { |
||||
|
const KMemoryRegion* region = nullptr; |
||||
|
for (auto it = this->begin(); it != this->end(); it++) { |
||||
|
if (it->IsDerivedFrom(type_id)) { |
||||
|
region = std::addressof(*it); |
||||
|
} |
||||
|
} |
||||
|
return region; |
||||
|
} |
||||
|
|
||||
|
DerivedRegionExtents GetDerivedRegionExtents(KMemoryRegionType type_id) const { |
||||
|
DerivedRegionExtents extents; |
||||
|
|
||||
|
ASSERT(extents.first_region == nullptr); |
||||
|
ASSERT(extents.last_region == nullptr); |
||||
|
|
||||
|
for (auto it = this->cbegin(); it != this->cend(); it++) { |
||||
|
if (it->IsDerivedFrom(type_id)) { |
||||
|
if (extents.first_region == nullptr) { |
||||
|
extents.first_region = std::addressof(*it); |
||||
|
} |
||||
|
extents.last_region = std::addressof(*it); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
ASSERT(extents.first_region != nullptr); |
||||
|
ASSERT(extents.last_region != nullptr); |
||||
|
|
||||
|
return extents; |
||||
|
} |
||||
|
|
||||
|
DerivedRegionExtents GetDerivedRegionExtents(u32 type_id) const { |
||||
|
return GetDerivedRegionExtents(static_cast<KMemoryRegionType>(type_id)); |
||||
|
} |
||||
|
|
||||
|
public: |
||||
|
void InsertDirectly(u64 address, u64 last_address, u32 attr = 0, u32 type_id = 0); |
||||
|
bool Insert(u64 address, size_t size, u32 type_id, u32 new_attr = 0, u32 old_attr = 0); |
||||
|
|
||||
|
VAddr GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id); |
||||
|
|
||||
|
VAddr GetRandomAlignedRegionWithGuard(size_t size, size_t alignment, u32 type_id, |
||||
|
size_t guard_size) { |
||||
|
return this->GetRandomAlignedRegion(size + 2 * guard_size, alignment, type_id) + guard_size; |
||||
|
} |
||||
|
|
||||
|
public: |
||||
|
// Iterator accessors. |
||||
|
iterator begin() { |
||||
|
return m_tree.begin(); |
||||
|
} |
||||
|
|
||||
|
const_iterator begin() const { |
||||
|
return m_tree.begin(); |
||||
|
} |
||||
|
|
||||
|
iterator end() { |
||||
|
return m_tree.end(); |
||||
|
} |
||||
|
|
||||
|
const_iterator end() const { |
||||
|
return m_tree.end(); |
||||
|
} |
||||
|
|
||||
|
const_iterator cbegin() const { |
||||
|
return this->begin(); |
||||
|
} |
||||
|
|
||||
|
const_iterator cend() const { |
||||
|
return this->end(); |
||||
|
} |
||||
|
|
||||
|
iterator iterator_to(reference ref) { |
||||
|
return m_tree.iterator_to(ref); |
||||
|
} |
||||
|
|
||||
|
const_iterator iterator_to(const_reference ref) const { |
||||
|
return m_tree.iterator_to(ref); |
||||
|
} |
||||
|
|
||||
|
// Content management. |
||||
|
bool empty() const { |
||||
|
return m_tree.empty(); |
||||
|
} |
||||
|
|
||||
|
reference back() { |
||||
|
return m_tree.back(); |
||||
|
} |
||||
|
|
||||
|
const_reference back() const { |
||||
|
return m_tree.back(); |
||||
|
} |
||||
|
|
||||
|
reference front() { |
||||
|
return m_tree.front(); |
||||
|
} |
||||
|
|
||||
|
const_reference front() const { |
||||
|
return m_tree.front(); |
||||
|
} |
||||
|
|
||||
|
iterator insert(reference ref) { |
||||
|
return m_tree.insert(ref); |
||||
|
} |
||||
|
|
||||
|
iterator erase(iterator it) { |
||||
|
return m_tree.erase(it); |
||||
|
} |
||||
|
|
||||
|
iterator find(const_reference ref) const { |
||||
|
return m_tree.find(ref); |
||||
|
} |
||||
|
|
||||
|
iterator nfind(const_reference ref) const { |
||||
|
return m_tree.nfind(ref); |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
class KMemoryRegionAllocator final : NonCopyable { |
||||
|
public: |
||||
|
static constexpr size_t MaxMemoryRegions = 200; |
||||
|
|
||||
|
constexpr KMemoryRegionAllocator() = default; |
||||
|
|
||||
|
template <typename... Args> |
||||
|
KMemoryRegion* Allocate(Args&&... args) { |
||||
|
// Ensure we stay within the bounds of our heap. |
||||
|
ASSERT(this->num_regions < MaxMemoryRegions); |
||||
|
|
||||
|
// Create the new region. |
||||
|
KMemoryRegion* region = std::addressof(this->region_heap[this->num_regions++]); |
||||
|
new (region) KMemoryRegion(std::forward<Args>(args)...); |
||||
|
|
||||
|
return region; |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
std::array<KMemoryRegion, MaxMemoryRegions> region_heap{}; |
||||
|
size_t num_regions{}; |
||||
|
}; |
||||
|
|
||||
|
} // namespace Kernel |
||||
@ -0,0 +1,338 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project |
||||
|
// Licensed under GPLv2 or any later version |
||||
|
// Refer to the license.txt file included. |
||||
|
|
||||
|
#pragma once |
||||
|
|
||||
|
#include "common/bit_util.h" |
||||
|
#include "common/common_funcs.h" |
||||
|
#include "common/common_types.h" |
||||
|
|
||||
|
#define ARCH_ARM64 |
||||
|
#define BOARD_NINTENDO_NX |
||||
|
|
||||
|
namespace Kernel { |
||||
|
|
||||
|
enum KMemoryRegionType : u32 { |
||||
|
KMemoryRegionAttr_CarveoutProtected = 0x04000000, |
||||
|
KMemoryRegionAttr_DidKernelMap = 0x08000000, |
||||
|
KMemoryRegionAttr_ShouldKernelMap = 0x10000000, |
||||
|
KMemoryRegionAttr_UserReadOnly = 0x20000000, |
||||
|
KMemoryRegionAttr_NoUserMap = 0x40000000, |
||||
|
KMemoryRegionAttr_LinearMapped = 0x80000000, |
||||
|
}; |
||||
|
DECLARE_ENUM_FLAG_OPERATORS(KMemoryRegionType); |
||||
|
|
||||
|
namespace impl { |
||||
|
|
||||
|
constexpr size_t BitsForDeriveSparse(size_t n) { |
||||
|
return n + 1; |
||||
|
} |
||||
|
|
||||
|
constexpr size_t BitsForDeriveDense(size_t n) { |
||||
|
size_t low = 0, high = 1; |
||||
|
for (size_t i = 0; i < n - 1; ++i) { |
||||
|
if ((++low) == high) { |
||||
|
++high; |
||||
|
low = 0; |
||||
|
} |
||||
|
} |
||||
|
return high + 1; |
||||
|
} |
||||
|
|
||||
|
class KMemoryRegionTypeValue { |
||||
|
public: |
||||
|
using ValueType = std::underlying_type_t<KMemoryRegionType>; |
||||
|
|
||||
|
constexpr KMemoryRegionTypeValue() = default; |
||||
|
|
||||
|
constexpr operator KMemoryRegionType() const { |
||||
|
return static_cast<KMemoryRegionType>(m_value); |
||||
|
} |
||||
|
|
||||
|
constexpr ValueType GetValue() const { |
||||
|
return m_value; |
||||
|
} |
||||
|
|
||||
|
constexpr const KMemoryRegionTypeValue& Finalize() { |
||||
|
m_finalized = true; |
||||
|
return *this; |
||||
|
} |
||||
|
|
||||
|
constexpr const KMemoryRegionTypeValue& SetSparseOnly() { |
||||
|
m_sparse_only = true; |
||||
|
return *this; |
||||
|
} |
||||
|
|
||||
|
constexpr const KMemoryRegionTypeValue& SetDenseOnly() { |
||||
|
m_dense_only = true; |
||||
|
return *this; |
||||
|
} |
||||
|
|
||||
|
constexpr KMemoryRegionTypeValue& SetAttribute(u32 attr) { |
||||
|
m_value |= attr; |
||||
|
return *this; |
||||
|
} |
||||
|
|
||||
|
constexpr KMemoryRegionTypeValue DeriveInitial( |
||||
|
size_t i, size_t next = Common::BitSize<ValueType>()) const { |
||||
|
KMemoryRegionTypeValue new_type = *this; |
||||
|
new_type.m_value = (ValueType{1} << i); |
||||
|
new_type.m_next_bit = next; |
||||
|
return new_type; |
||||
|
} |
||||
|
|
||||
|
constexpr KMemoryRegionTypeValue DeriveAttribute(u32 attr) const { |
||||
|
KMemoryRegionTypeValue new_type = *this; |
||||
|
new_type.m_value |= attr; |
||||
|
return new_type; |
||||
|
} |
||||
|
|
||||
|
constexpr KMemoryRegionTypeValue DeriveTransition(size_t ofs = 0, size_t adv = 1) const { |
||||
|
KMemoryRegionTypeValue new_type = *this; |
||||
|
new_type.m_value |= (ValueType{1} << (m_next_bit + ofs)); |
||||
|
new_type.m_next_bit += adv; |
||||
|
return new_type; |
||||
|
} |
||||
|
|
||||
|
constexpr KMemoryRegionTypeValue DeriveSparse(size_t ofs, size_t n, size_t i) const { |
||||
|
KMemoryRegionTypeValue new_type = *this; |
||||
|
new_type.m_value |= (ValueType{1} << (m_next_bit + ofs)); |
||||
|
new_type.m_value |= (ValueType{1} << (m_next_bit + ofs + 1 + i)); |
||||
|
new_type.m_next_bit += ofs + n + 1; |
||||
|
return new_type; |
||||
|
} |
||||
|
|
||||
|
constexpr KMemoryRegionTypeValue Derive(size_t n, size_t i) const { |
||||
|
size_t low = 0, high = 1; |
||||
|
for (size_t j = 0; j < i; ++j) { |
||||
|
if ((++low) == high) { |
||||
|
++high; |
||||
|
low = 0; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
KMemoryRegionTypeValue new_type = *this; |
||||
|
new_type.m_value |= (ValueType{1} << (m_next_bit + low)); |
||||
|
new_type.m_value |= (ValueType{1} << (m_next_bit + high)); |
||||
|
new_type.m_next_bit += BitsForDeriveDense(n); |
||||
|
return new_type; |
||||
|
} |
||||
|
|
||||
|
constexpr KMemoryRegionTypeValue Advance(size_t n) const { |
||||
|
KMemoryRegionTypeValue new_type = *this; |
||||
|
new_type.m_next_bit += n; |
||||
|
return new_type; |
||||
|
} |
||||
|
|
||||
|
constexpr bool IsAncestorOf(ValueType v) const { |
||||
|
return (m_value | v) == v; |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
constexpr KMemoryRegionTypeValue(ValueType v) : m_value(v) {} |
||||
|
|
||||
|
private: |
||||
|
ValueType m_value{}; |
||||
|
size_t m_next_bit{}; |
||||
|
bool m_finalized{}; |
||||
|
bool m_sparse_only{}; |
||||
|
bool m_dense_only{}; |
||||
|
}; |
||||
|
|
||||
|
} // namespace impl |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_None = impl::KMemoryRegionTypeValue(); |
||||
|
constexpr auto KMemoryRegionType_Kernel = KMemoryRegionType_None.DeriveInitial(0, 2); |
||||
|
constexpr auto KMemoryRegionType_Dram = KMemoryRegionType_None.DeriveInitial(1, 2); |
||||
|
static_assert(KMemoryRegionType_Kernel.GetValue() == 0x1); |
||||
|
static_assert(KMemoryRegionType_Dram.GetValue() == 0x2); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_DramKernelBase = |
||||
|
KMemoryRegionType_Dram.DeriveSparse(0, 3, 0) |
||||
|
.SetAttribute(KMemoryRegionAttr_NoUserMap) |
||||
|
.SetAttribute(KMemoryRegionAttr_CarveoutProtected); |
||||
|
constexpr auto KMemoryRegionType_DramReservedBase = KMemoryRegionType_Dram.DeriveSparse(0, 3, 1); |
||||
|
constexpr auto KMemoryRegionType_DramHeapBase = |
||||
|
KMemoryRegionType_Dram.DeriveSparse(0, 3, 2).SetAttribute(KMemoryRegionAttr_LinearMapped); |
||||
|
static_assert(KMemoryRegionType_DramKernelBase.GetValue() == |
||||
|
(0xE | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_NoUserMap)); |
||||
|
static_assert(KMemoryRegionType_DramReservedBase.GetValue() == (0x16)); |
||||
|
static_assert(KMemoryRegionType_DramHeapBase.GetValue() == (0x26 | KMemoryRegionAttr_LinearMapped)); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_DramKernelCode = |
||||
|
KMemoryRegionType_DramKernelBase.DeriveSparse(0, 4, 0); |
||||
|
constexpr auto KMemoryRegionType_DramKernelSlab = |
||||
|
KMemoryRegionType_DramKernelBase.DeriveSparse(0, 4, 1); |
||||
|
constexpr auto KMemoryRegionType_DramKernelPtHeap = |
||||
|
KMemoryRegionType_DramKernelBase.DeriveSparse(0, 4, 2).SetAttribute( |
||||
|
KMemoryRegionAttr_LinearMapped); |
||||
|
constexpr auto KMemoryRegionType_DramKernelInitPt = |
||||
|
KMemoryRegionType_DramKernelBase.DeriveSparse(0, 4, 3).SetAttribute( |
||||
|
KMemoryRegionAttr_LinearMapped); |
||||
|
static_assert(KMemoryRegionType_DramKernelCode.GetValue() == |
||||
|
(0xCE | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_NoUserMap)); |
||||
|
static_assert(KMemoryRegionType_DramKernelSlab.GetValue() == |
||||
|
(0x14E | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_NoUserMap)); |
||||
|
static_assert(KMemoryRegionType_DramKernelPtHeap.GetValue() == |
||||
|
(0x24E | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_NoUserMap | |
||||
|
KMemoryRegionAttr_LinearMapped)); |
||||
|
static_assert(KMemoryRegionType_DramKernelInitPt.GetValue() == |
||||
|
(0x44E | KMemoryRegionAttr_CarveoutProtected | KMemoryRegionAttr_NoUserMap | |
||||
|
KMemoryRegionAttr_LinearMapped)); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_DramReservedEarly = |
||||
|
KMemoryRegionType_DramReservedBase.DeriveAttribute(KMemoryRegionAttr_NoUserMap); |
||||
|
static_assert(KMemoryRegionType_DramReservedEarly.GetValue() == |
||||
|
(0x16 | KMemoryRegionAttr_NoUserMap)); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_KernelTraceBuffer = |
||||
|
KMemoryRegionType_DramReservedBase.DeriveSparse(0, 3, 0) |
||||
|
.SetAttribute(KMemoryRegionAttr_LinearMapped) |
||||
|
.SetAttribute(KMemoryRegionAttr_UserReadOnly); |
||||
|
constexpr auto KMemoryRegionType_OnMemoryBootImage = |
||||
|
KMemoryRegionType_DramReservedBase.DeriveSparse(0, 3, 1); |
||||
|
constexpr auto KMemoryRegionType_DTB = KMemoryRegionType_DramReservedBase.DeriveSparse(0, 3, 2); |
||||
|
static_assert(KMemoryRegionType_KernelTraceBuffer.GetValue() == |
||||
|
(0xD6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_UserReadOnly)); |
||||
|
static_assert(KMemoryRegionType_OnMemoryBootImage.GetValue() == 0x156); |
||||
|
static_assert(KMemoryRegionType_DTB.GetValue() == 0x256); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_DramPoolPartition = |
||||
|
KMemoryRegionType_DramHeapBase.DeriveAttribute(KMemoryRegionAttr_NoUserMap); |
||||
|
static_assert(KMemoryRegionType_DramPoolPartition.GetValue() == |
||||
|
(0x26 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap)); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_DramPoolManagement = |
||||
|
KMemoryRegionType_DramPoolPartition.DeriveTransition(0, 2).DeriveTransition().SetAttribute( |
||||
|
KMemoryRegionAttr_CarveoutProtected); |
||||
|
constexpr auto KMemoryRegionType_DramUserPool = |
||||
|
KMemoryRegionType_DramPoolPartition.DeriveTransition(1, 2).DeriveTransition(); |
||||
|
static_assert(KMemoryRegionType_DramPoolManagement.GetValue() == |
||||
|
(0x166 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap | |
||||
|
KMemoryRegionAttr_CarveoutProtected)); |
||||
|
static_assert(KMemoryRegionType_DramUserPool.GetValue() == |
||||
|
(0x1A6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap)); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_DramApplicationPool = KMemoryRegionType_DramUserPool.Derive(4, 0); |
||||
|
constexpr auto KMemoryRegionType_DramAppletPool = KMemoryRegionType_DramUserPool.Derive(4, 1); |
||||
|
constexpr auto KMemoryRegionType_DramSystemNonSecurePool = |
||||
|
KMemoryRegionType_DramUserPool.Derive(4, 2); |
||||
|
constexpr auto KMemoryRegionType_DramSystemPool = |
||||
|
KMemoryRegionType_DramUserPool.Derive(4, 3).SetAttribute(KMemoryRegionAttr_CarveoutProtected); |
||||
|
static_assert(KMemoryRegionType_DramApplicationPool.GetValue() == |
||||
|
(0x7A6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap)); |
||||
|
static_assert(KMemoryRegionType_DramAppletPool.GetValue() == |
||||
|
(0xBA6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap)); |
||||
|
static_assert(KMemoryRegionType_DramSystemNonSecurePool.GetValue() == |
||||
|
(0xDA6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap)); |
||||
|
static_assert(KMemoryRegionType_DramSystemPool.GetValue() == |
||||
|
(0x13A6 | KMemoryRegionAttr_LinearMapped | KMemoryRegionAttr_NoUserMap | |
||||
|
KMemoryRegionAttr_CarveoutProtected)); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_VirtualDramHeapBase = KMemoryRegionType_Dram.DeriveSparse(1, 3, 0); |
||||
|
constexpr auto KMemoryRegionType_VirtualDramKernelPtHeap = |
||||
|
KMemoryRegionType_Dram.DeriveSparse(1, 3, 1); |
||||
|
constexpr auto KMemoryRegionType_VirtualDramKernelTraceBuffer = |
||||
|
KMemoryRegionType_Dram.DeriveSparse(1, 3, 2); |
||||
|
static_assert(KMemoryRegionType_VirtualDramHeapBase.GetValue() == 0x1A); |
||||
|
static_assert(KMemoryRegionType_VirtualDramKernelPtHeap.GetValue() == 0x2A); |
||||
|
static_assert(KMemoryRegionType_VirtualDramKernelTraceBuffer.GetValue() == 0x4A); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_VirtualDramKernelInitPt = |
||||
|
KMemoryRegionType_VirtualDramHeapBase.Derive(3, 0); |
||||
|
constexpr auto KMemoryRegionType_VirtualDramPoolManagement = |
||||
|
KMemoryRegionType_VirtualDramHeapBase.Derive(3, 1); |
||||
|
constexpr auto KMemoryRegionType_VirtualDramUserPool = |
||||
|
KMemoryRegionType_VirtualDramHeapBase.Derive(3, 2); |
||||
|
static_assert(KMemoryRegionType_VirtualDramKernelInitPt.GetValue() == 0x19A); |
||||
|
static_assert(KMemoryRegionType_VirtualDramPoolManagement.GetValue() == 0x29A); |
||||
|
static_assert(KMemoryRegionType_VirtualDramUserPool.GetValue() == 0x31A); |
||||
|
|
||||
|
// NOTE: For unknown reason, the pools are derived out-of-order here. It's worth eventually trying |
||||
|
// to understand why Nintendo made this choice. |
||||
|
// UNUSED: .Derive(6, 0); |
||||
|
// UNUSED: .Derive(6, 1); |
||||
|
constexpr auto KMemoryRegionType_VirtualDramAppletPool = |
||||
|
KMemoryRegionType_VirtualDramUserPool.Derive(6, 2); |
||||
|
constexpr auto KMemoryRegionType_VirtualDramApplicationPool = |
||||
|
KMemoryRegionType_VirtualDramUserPool.Derive(6, 3); |
||||
|
constexpr auto KMemoryRegionType_VirtualDramSystemNonSecurePool = |
||||
|
KMemoryRegionType_VirtualDramUserPool.Derive(6, 4); |
||||
|
constexpr auto KMemoryRegionType_VirtualDramSystemPool = |
||||
|
KMemoryRegionType_VirtualDramUserPool.Derive(6, 5); |
||||
|
static_assert(KMemoryRegionType_VirtualDramAppletPool.GetValue() == 0x1B1A); |
||||
|
static_assert(KMemoryRegionType_VirtualDramApplicationPool.GetValue() == 0x271A); |
||||
|
static_assert(KMemoryRegionType_VirtualDramSystemNonSecurePool.GetValue() == 0x2B1A); |
||||
|
static_assert(KMemoryRegionType_VirtualDramSystemPool.GetValue() == 0x331A); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_ArchDeviceBase = |
||||
|
KMemoryRegionType_Kernel.DeriveTransition(0, 1).SetSparseOnly(); |
||||
|
constexpr auto KMemoryRegionType_BoardDeviceBase = |
||||
|
KMemoryRegionType_Kernel.DeriveTransition(0, 2).SetDenseOnly(); |
||||
|
static_assert(KMemoryRegionType_ArchDeviceBase.GetValue() == 0x5); |
||||
|
static_assert(KMemoryRegionType_BoardDeviceBase.GetValue() == 0x5); |
||||
|
|
||||
|
#if defined(ARCH_ARM64) |
||||
|
#include "core/hle/kernel/arch/arm64/k_memory_region_device_types.inc" |
||||
|
#elif defined(ARCH_ARM) |
||||
|
#error "Unimplemented" |
||||
|
#else |
||||
|
// Default to no architecture devices. |
||||
|
constexpr auto NumArchitectureDeviceRegions = 0; |
||||
|
#endif |
||||
|
static_assert(NumArchitectureDeviceRegions >= 0); |
||||
|
|
||||
|
#if defined(BOARD_NINTENDO_NX) |
||||
|
#include "core/hle/kernel/board/nintendo/nx/k_memory_region_device_types.inc" |
||||
|
#else |
||||
|
// Default to no board devices. |
||||
|
constexpr auto NumBoardDeviceRegions = 0; |
||||
|
#endif |
||||
|
static_assert(NumBoardDeviceRegions >= 0); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_KernelCode = KMemoryRegionType_Kernel.DeriveSparse(1, 4, 0); |
||||
|
constexpr auto KMemoryRegionType_KernelStack = KMemoryRegionType_Kernel.DeriveSparse(1, 4, 1); |
||||
|
constexpr auto KMemoryRegionType_KernelMisc = KMemoryRegionType_Kernel.DeriveSparse(1, 4, 2); |
||||
|
constexpr auto KMemoryRegionType_KernelSlab = KMemoryRegionType_Kernel.DeriveSparse(1, 4, 3); |
||||
|
static_assert(KMemoryRegionType_KernelCode.GetValue() == 0x19); |
||||
|
static_assert(KMemoryRegionType_KernelStack.GetValue() == 0x29); |
||||
|
static_assert(KMemoryRegionType_KernelMisc.GetValue() == 0x49); |
||||
|
static_assert(KMemoryRegionType_KernelSlab.GetValue() == 0x89); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_KernelMiscDerivedBase = |
||||
|
KMemoryRegionType_KernelMisc.DeriveTransition(); |
||||
|
static_assert(KMemoryRegionType_KernelMiscDerivedBase.GetValue() == 0x149); |
||||
|
|
||||
|
// UNUSED: .Derive(7, 0); |
||||
|
constexpr auto KMemoryRegionType_KernelMiscMainStack = |
||||
|
KMemoryRegionType_KernelMiscDerivedBase.Derive(7, 1); |
||||
|
constexpr auto KMemoryRegionType_KernelMiscMappedDevice = |
||||
|
KMemoryRegionType_KernelMiscDerivedBase.Derive(7, 2); |
||||
|
constexpr auto KMemoryRegionType_KernelMiscExceptionStack = |
||||
|
KMemoryRegionType_KernelMiscDerivedBase.Derive(7, 3); |
||||
|
constexpr auto KMemoryRegionType_KernelMiscUnknownDebug = |
||||
|
KMemoryRegionType_KernelMiscDerivedBase.Derive(7, 4); |
||||
|
// UNUSED: .Derive(7, 5); |
||||
|
constexpr auto KMemoryRegionType_KernelMiscIdleStack = |
||||
|
KMemoryRegionType_KernelMiscDerivedBase.Derive(7, 6); |
||||
|
static_assert(KMemoryRegionType_KernelMiscMainStack.GetValue() == 0xB49); |
||||
|
static_assert(KMemoryRegionType_KernelMiscMappedDevice.GetValue() == 0xD49); |
||||
|
static_assert(KMemoryRegionType_KernelMiscExceptionStack.GetValue() == 0x1349); |
||||
|
static_assert(KMemoryRegionType_KernelMiscUnknownDebug.GetValue() == 0x1549); |
||||
|
static_assert(KMemoryRegionType_KernelMiscIdleStack.GetValue() == 0x2349); |
||||
|
|
||||
|
constexpr auto KMemoryRegionType_KernelTemp = KMemoryRegionType_Kernel.Advance(2).Derive(2, 0); |
||||
|
static_assert(KMemoryRegionType_KernelTemp.GetValue() == 0x31); |
||||
|
|
||||
|
constexpr KMemoryRegionType GetTypeForVirtualLinearMapping(u32 type_id) { |
||||
|
if (KMemoryRegionType_KernelTraceBuffer.IsAncestorOf(type_id)) { |
||||
|
return KMemoryRegionType_VirtualDramKernelTraceBuffer; |
||||
|
} else if (KMemoryRegionType_DramKernelPtHeap.IsAncestorOf(type_id)) { |
||||
|
return KMemoryRegionType_VirtualDramKernelPtHeap; |
||||
|
} else { |
||||
|
return KMemoryRegionType_Dram; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
} // namespace Kernel |
||||
@ -1,42 +0,0 @@ |
|||||
// Copyright 2021 yuzu Emulator Project
|
|
||||
// Licensed under GPLv2 or any later version
|
|
||||
// Refer to the license.txt file included.
|
|
||||
|
|
||||
#include <random>
|
|
||||
|
|
||||
#include "core/hle/kernel/k_system_control.h"
|
|
||||
|
|
||||
namespace Kernel { |
|
||||
|
|
||||
namespace { |
|
||||
template <typename F> |
|
||||
u64 GenerateUniformRange(u64 min, u64 max, F f) { |
|
||||
// Handle the case where the difference is too large to represent.
|
|
||||
if (max == std::numeric_limits<u64>::max() && min == std::numeric_limits<u64>::min()) { |
|
||||
return f(); |
|
||||
} |
|
||||
|
|
||||
// Iterate until we get a value in range.
|
|
||||
const u64 range_size = ((max + 1) - min); |
|
||||
const u64 effective_max = (std::numeric_limits<u64>::max() / range_size) * range_size; |
|
||||
while (true) { |
|
||||
if (const u64 rnd = f(); rnd < effective_max) { |
|
||||
return min + (rnd % range_size); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
} // Anonymous namespace
|
|
||||
|
|
||||
u64 KSystemControl::GenerateRandomU64() { |
|
||||
static std::random_device device; |
|
||||
static std::mt19937 gen(device()); |
|
||||
static std::uniform_int_distribution<u64> distribution(1, std::numeric_limits<u64>::max()); |
|
||||
return distribution(gen); |
|
||||
} |
|
||||
|
|
||||
u64 KSystemControl::GenerateRandomRange(u64 min, u64 max) { |
|
||||
return GenerateUniformRange(min, max, GenerateRandomU64); |
|
||||
} |
|
||||
|
|
||||
} // namespace Kernel
|
|
||||
@ -0,0 +1,12 @@ |
|||||
|
// Copyright 2021 yuzu Emulator Project |
||||
|
// Licensed under GPLv2 or any later version |
||||
|
// Refer to the license.txt file included. |
||||
|
|
||||
|
#pragma once |
||||
|
|
||||
|
namespace Kernel { |
||||
|
|
||||
|
constexpr bool IsKTraceEnabled = false; |
||||
|
constexpr std::size_t KTraceBufferSize = IsKTraceEnabled ? 16 * 1024 * 1024 : 0; |
||||
|
|
||||
|
} // namespace Kernel |
||||
Write
Preview
Loading…
Cancel
Save
Reference in new issue