summaryrefslogtreecommitdiffstats
path: root/src/core/hle/kernel/k_page_table.h
blob: fa11a0fe36b60c8e7e2d2c1f164534e97364eecf (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
// SPDX-FileCopyrightText: Copyright 2020 yuzu Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later

#pragma once

#include <memory>

#include "common/common_funcs.h"
#include "common/common_types.h"
#include "common/page_table.h"
#include "core/file_sys/program_metadata.h"
#include "core/hle/kernel/k_dynamic_resource_manager.h"
#include "core/hle/kernel/k_light_lock.h"
#include "core/hle/kernel/k_memory_block.h"
#include "core/hle/kernel/k_memory_block_manager.h"
#include "core/hle/kernel/k_memory_layout.h"
#include "core/hle/kernel/k_memory_manager.h"
#include "core/hle/result.h"

namespace Core {
class System;
}

namespace Kernel {

class KMemoryBlockManager;

class KPageTable final {
public:
    enum class ICacheInvalidationStrategy : u32 { InvalidateRange, InvalidateAll };

    YUZU_NON_COPYABLE(KPageTable);
    YUZU_NON_MOVEABLE(KPageTable);

    explicit KPageTable(Core::System& system_);
    ~KPageTable();

    Result InitializeForProcess(FileSys::ProgramAddressSpaceType as_type, bool enable_aslr,
                                VAddr code_addr, std::size_t code_size,
                                KMemoryBlockSlabManager* mem_block_slab_manager,
                                KMemoryManager::Pool pool);

    void Finalize();

    Result MapProcessCode(VAddr addr, std::size_t pages_count, KMemoryState state,
                          KMemoryPermission perm);
    Result MapCodeMemory(VAddr dst_address, VAddr src_address, std::size_t size);
    Result UnmapCodeMemory(VAddr dst_address, VAddr src_address, std::size_t size,
                           ICacheInvalidationStrategy icache_invalidation_strategy);
    Result UnmapProcessMemory(VAddr dst_addr, std::size_t size, KPageTable& src_page_table,
                              VAddr src_addr);
    Result MapPhysicalMemory(VAddr addr, std::size_t size);
    Result UnmapPhysicalMemory(VAddr addr, std::size_t size);
    Result MapMemory(VAddr dst_addr, VAddr src_addr, std::size_t size);
    Result UnmapMemory(VAddr dst_addr, VAddr src_addr, std::size_t size);
    Result MapPages(VAddr addr, KPageGroup& page_linked_list, KMemoryState state,
                    KMemoryPermission perm);
    Result MapPages(VAddr* out_addr, std::size_t num_pages, std::size_t alignment, PAddr phys_addr,
                    KMemoryState state, KMemoryPermission perm) {
        return this->MapPages(out_addr, num_pages, alignment, phys_addr, true,
                              this->GetRegionAddress(state), this->GetRegionSize(state) / PageSize,
                              state, perm);
    }
    Result UnmapPages(VAddr addr, KPageGroup& page_linked_list, KMemoryState state);
    Result UnmapPages(VAddr address, std::size_t num_pages, KMemoryState state);
    Result SetProcessMemoryPermission(VAddr addr, std::size_t size, Svc::MemoryPermission svc_perm);
    KMemoryInfo QueryInfo(VAddr addr);
    Result SetMemoryPermission(VAddr addr, std::size_t size, Svc::MemoryPermission perm);
    Result SetMemoryAttribute(VAddr addr, std::size_t size, u32 mask, u32 attr);
    Result SetMaxHeapSize(std::size_t size);
    Result SetHeapSize(VAddr* out, std::size_t size);
    ResultVal<VAddr> AllocateAndMapMemory(std::size_t needed_num_pages, std::size_t align,
                                          bool is_map_only, VAddr region_start,
                                          std::size_t region_num_pages, KMemoryState state,
                                          KMemoryPermission perm, PAddr map_addr = 0);
    Result UnlockForDeviceAddressSpace(VAddr addr, std::size_t size);
    Result LockForCodeMemory(KPageGroup* out, VAddr addr, std::size_t size);
    Result UnlockForCodeMemory(VAddr addr, std::size_t size, const KPageGroup& pg);
    Result MakeAndOpenPageGroup(KPageGroup* out, VAddr address, size_t num_pages,
                                KMemoryState state_mask, KMemoryState state,
                                KMemoryPermission perm_mask, KMemoryPermission perm,
                                KMemoryAttribute attr_mask, KMemoryAttribute attr);

    Common::PageTable& PageTableImpl() {
        return page_table_impl;
    }

    const Common::PageTable& PageTableImpl() const {
        return page_table_impl;
    }

    bool CanContain(VAddr addr, std::size_t size, KMemoryState state) const;

private:
    enum class OperationType : u32 {
        Map,
        MapGroup,
        Unmap,
        ChangePermissions,
        ChangePermissionsAndRefresh,
    };

    static constexpr KMemoryAttribute DefaultMemoryIgnoreAttr =
        KMemoryAttribute::IpcLocked | KMemoryAttribute::DeviceShared;

    Result MapPages(VAddr addr, const KPageGroup& page_linked_list, KMemoryPermission perm);
    Result MapPages(VAddr* out_addr, std::size_t num_pages, std::size_t alignment, PAddr phys_addr,
                    bool is_pa_valid, VAddr region_start, std::size_t region_num_pages,
                    KMemoryState state, KMemoryPermission perm);
    Result UnmapPages(VAddr addr, const KPageGroup& page_linked_list);
    bool IsRegionContiguous(VAddr addr, u64 size) const;
    void AddRegionToPages(VAddr start, std::size_t num_pages, KPageGroup& page_linked_list);
    KMemoryInfo QueryInfoImpl(VAddr addr);
    VAddr AllocateVirtualMemory(VAddr start, std::size_t region_num_pages, u64 needed_num_pages,
                                std::size_t align);
    Result Operate(VAddr addr, std::size_t num_pages, const KPageGroup& page_group,
                   OperationType operation);
    Result Operate(VAddr addr, std::size_t num_pages, KMemoryPermission perm,
                   OperationType operation, PAddr map_addr = 0);
    VAddr GetRegionAddress(KMemoryState state) const;
    std::size_t GetRegionSize(KMemoryState state) const;

    VAddr FindFreeArea(VAddr region_start, std::size_t region_num_pages, std::size_t num_pages,
                       std::size_t alignment, std::size_t offset, std::size_t guard_pages);

    Result CheckMemoryStateContiguous(std::size_t* out_blocks_needed, VAddr addr, std::size_t size,
                                      KMemoryState state_mask, KMemoryState state,
                                      KMemoryPermission perm_mask, KMemoryPermission perm,
                                      KMemoryAttribute attr_mask, KMemoryAttribute attr) const;
    Result CheckMemoryStateContiguous(VAddr addr, std::size_t size, KMemoryState state_mask,
                                      KMemoryState state, KMemoryPermission perm_mask,
                                      KMemoryPermission perm, KMemoryAttribute attr_mask,
                                      KMemoryAttribute attr) const {
        return this->CheckMemoryStateContiguous(nullptr, addr, size, state_mask, state, perm_mask,
                                                perm, attr_mask, attr);
    }

    Result CheckMemoryState(const KMemoryInfo& info, KMemoryState state_mask, KMemoryState state,
                            KMemoryPermission perm_mask, KMemoryPermission perm,
                            KMemoryAttribute attr_mask, KMemoryAttribute attr) const;
    Result CheckMemoryState(KMemoryState* out_state, KMemoryPermission* out_perm,
                            KMemoryAttribute* out_attr, std::size_t* out_blocks_needed, VAddr addr,
                            std::size_t size, KMemoryState state_mask, KMemoryState state,
                            KMemoryPermission perm_mask, KMemoryPermission perm,
                            KMemoryAttribute attr_mask, KMemoryAttribute attr,
                            KMemoryAttribute ignore_attr = DefaultMemoryIgnoreAttr) const;
    Result CheckMemoryState(std::size_t* out_blocks_needed, VAddr addr, std::size_t size,
                            KMemoryState state_mask, KMemoryState state,
                            KMemoryPermission perm_mask, KMemoryPermission perm,
                            KMemoryAttribute attr_mask, KMemoryAttribute attr,
                            KMemoryAttribute ignore_attr = DefaultMemoryIgnoreAttr) const {
        return CheckMemoryState(nullptr, nullptr, nullptr, out_blocks_needed, addr, size,
                                state_mask, state, perm_mask, perm, attr_mask, attr, ignore_attr);
    }
    Result CheckMemoryState(VAddr addr, std::size_t size, KMemoryState state_mask,
                            KMemoryState state, KMemoryPermission perm_mask, KMemoryPermission perm,
                            KMemoryAttribute attr_mask, KMemoryAttribute attr,
                            KMemoryAttribute ignore_attr = DefaultMemoryIgnoreAttr) const {
        return this->CheckMemoryState(nullptr, addr, size, state_mask, state, perm_mask, perm,
                                      attr_mask, attr, ignore_attr);
    }

    Result LockMemoryAndOpen(KPageGroup* out_pg, PAddr* out_paddr, VAddr addr, size_t size,
                             KMemoryState state_mask, KMemoryState state,
                             KMemoryPermission perm_mask, KMemoryPermission perm,
                             KMemoryAttribute attr_mask, KMemoryAttribute attr,
                             KMemoryPermission new_perm, KMemoryAttribute lock_attr);
    Result UnlockMemory(VAddr addr, size_t size, KMemoryState state_mask, KMemoryState state,
                        KMemoryPermission perm_mask, KMemoryPermission perm,
                        KMemoryAttribute attr_mask, KMemoryAttribute attr,
                        KMemoryPermission new_perm, KMemoryAttribute lock_attr,
                        const KPageGroup* pg);

    Result MakePageGroup(KPageGroup& pg, VAddr addr, size_t num_pages);
    bool IsValidPageGroup(const KPageGroup& pg, VAddr addr, size_t num_pages);

    bool IsLockedByCurrentThread() const {
        return general_lock.IsLockedByCurrentThread();
    }

    bool IsHeapPhysicalAddress(const KMemoryLayout& layout, PAddr phys_addr) {
        ASSERT(this->IsLockedByCurrentThread());

        return layout.IsHeapPhysicalAddress(cached_physical_heap_region, phys_addr);
    }

    bool GetPhysicalAddressLocked(PAddr* out, VAddr virt_addr) const {
        ASSERT(this->IsLockedByCurrentThread());

        *out = GetPhysicalAddr(virt_addr);

        return *out != 0;
    }

    mutable KLightLock general_lock;
    mutable KLightLock map_physical_memory_lock;

public:
    constexpr VAddr GetAddressSpaceStart() const {
        return address_space_start;
    }
    constexpr VAddr GetAddressSpaceEnd() const {
        return address_space_end;
    }
    constexpr std::size_t GetAddressSpaceSize() const {
        return address_space_end - address_space_start;
    }
    constexpr VAddr GetHeapRegionStart() const {
        return heap_region_start;
    }
    constexpr VAddr GetHeapRegionEnd() const {
        return heap_region_end;
    }
    constexpr std::size_t GetHeapRegionSize() const {
        return heap_region_end - heap_region_start;
    }
    constexpr VAddr GetAliasRegionStart() const {
        return alias_region_start;
    }
    constexpr VAddr GetAliasRegionEnd() const {
        return alias_region_end;
    }
    constexpr std::size_t GetAliasRegionSize() const {
        return alias_region_end - alias_region_start;
    }
    constexpr VAddr GetStackRegionStart() const {
        return stack_region_start;
    }
    constexpr VAddr GetStackRegionEnd() const {
        return stack_region_end;
    }
    constexpr std::size_t GetStackRegionSize() const {
        return stack_region_end - stack_region_start;
    }
    constexpr VAddr GetKernelMapRegionStart() const {
        return kernel_map_region_start;
    }
    constexpr VAddr GetKernelMapRegionEnd() const {
        return kernel_map_region_end;
    }
    constexpr VAddr GetCodeRegionStart() const {
        return code_region_start;
    }
    constexpr VAddr GetCodeRegionEnd() const {
        return code_region_end;
    }
    constexpr VAddr GetAliasCodeRegionStart() const {
        return alias_code_region_start;
    }
    constexpr VAddr GetAliasCodeRegionSize() const {
        return alias_code_region_end - alias_code_region_start;
    }
    std::size_t GetNormalMemorySize() {
        KScopedLightLock lk(general_lock);
        return GetHeapSize() + mapped_physical_memory_size;
    }
    constexpr std::size_t GetAddressSpaceWidth() const {
        return address_space_width;
    }
    constexpr std::size_t GetHeapSize() const {
        return current_heap_end - heap_region_start;
    }
    constexpr bool IsInsideAddressSpace(VAddr address, std::size_t size) const {
        return address_space_start <= address && address + size - 1 <= address_space_end - 1;
    }
    constexpr bool IsOutsideAliasRegion(VAddr address, std::size_t size) const {
        return alias_region_start > address || address + size - 1 > alias_region_end - 1;
    }
    constexpr bool IsOutsideStackRegion(VAddr address, std::size_t size) const {
        return stack_region_start > address || address + size - 1 > stack_region_end - 1;
    }
    constexpr bool IsInvalidRegion(VAddr address, std::size_t size) const {
        return address + size - 1 > GetAliasCodeRegionStart() + GetAliasCodeRegionSize() - 1;
    }
    constexpr bool IsInsideHeapRegion(VAddr address, std::size_t size) const {
        return address + size > heap_region_start && heap_region_end > address;
    }
    constexpr bool IsInsideAliasRegion(VAddr address, std::size_t size) const {
        return address + size > alias_region_start && alias_region_end > address;
    }
    constexpr bool IsOutsideASLRRegion(VAddr address, std::size_t size) const {
        if (IsInvalidRegion(address, size)) {
            return true;
        }
        if (IsInsideHeapRegion(address, size)) {
            return true;
        }
        if (IsInsideAliasRegion(address, size)) {
            return true;
        }
        return {};
    }
    constexpr bool IsInsideASLRRegion(VAddr address, std::size_t size) const {
        return !IsOutsideASLRRegion(address, size);
    }
    constexpr std::size_t GetNumGuardPages() const {
        return IsKernel() ? 1 : 4;
    }
    PAddr GetPhysicalAddr(VAddr addr) const {
        const auto backing_addr = page_table_impl.backing_addr[addr >> PageBits];
        ASSERT(backing_addr);
        return backing_addr + addr;
    }
    constexpr bool Contains(VAddr addr) const {
        return address_space_start <= addr && addr <= address_space_end - 1;
    }
    constexpr bool Contains(VAddr addr, std::size_t size) const {
        return address_space_start <= addr && addr < addr + size &&
               addr + size - 1 <= address_space_end - 1;
    }

private:
    constexpr bool IsKernel() const {
        return is_kernel;
    }
    constexpr bool IsAslrEnabled() const {
        return is_aslr_enabled;
    }

    constexpr bool ContainsPages(VAddr addr, std::size_t num_pages) const {
        return (address_space_start <= addr) &&
               (num_pages <= (address_space_end - address_space_start) / PageSize) &&
               (addr + num_pages * PageSize - 1 <= address_space_end - 1);
    }

private:
    VAddr address_space_start{};
    VAddr address_space_end{};
    VAddr heap_region_start{};
    VAddr heap_region_end{};
    VAddr current_heap_end{};
    VAddr alias_region_start{};
    VAddr alias_region_end{};
    VAddr stack_region_start{};
    VAddr stack_region_end{};
    VAddr kernel_map_region_start{};
    VAddr kernel_map_region_end{};
    VAddr code_region_start{};
    VAddr code_region_end{};
    VAddr alias_code_region_start{};
    VAddr alias_code_region_end{};

    std::size_t mapped_physical_memory_size{};
    std::size_t max_heap_size{};
    std::size_t max_physical_memory_size{};
    std::size_t address_space_width{};

    KMemoryBlockManager memory_block_manager;

    bool is_kernel{};
    bool is_aslr_enabled{};

    KMemoryBlockSlabManager* memory_block_slab_manager{};

    u32 heap_fill_value{};
    const KMemoryRegion* cached_physical_heap_region{};

    KMemoryManager::Pool memory_pool{KMemoryManager::Pool::Application};
    KMemoryManager::Direction allocation_option{KMemoryManager::Direction::FromFront};

    Common::PageTable page_table_impl;

    Core::System& system;
};

} // namespace Kernel