SMMU: Simplify and remove old code.
This commit is contained in:
		@@ -38,7 +38,6 @@ public:
 | 
			
		||||
 | 
			
		||||
    DAddr Allocate(size_t size);
 | 
			
		||||
    void AllocateFixed(DAddr start, size_t size);
 | 
			
		||||
    DAddr AllocatePinned(size_t size);
 | 
			
		||||
    void Free(DAddr start, size_t size);
 | 
			
		||||
 | 
			
		||||
    void Map(DAddr address, VAddr virtual_address, size_t size, size_t process_id);
 | 
			
		||||
@@ -108,7 +107,6 @@ public:
 | 
			
		||||
    static constexpr size_t AS_BITS = Traits::device_virtual_bits;
 | 
			
		||||
 | 
			
		||||
private:
 | 
			
		||||
    static constexpr bool supports_pinning = Traits::supports_pinning;
 | 
			
		||||
    static constexpr size_t device_virtual_bits = Traits::device_virtual_bits;
 | 
			
		||||
    static constexpr size_t device_as_size = 1ULL << device_virtual_bits;
 | 
			
		||||
    static constexpr size_t physical_max_bits = 33;
 | 
			
		||||
@@ -167,28 +165,28 @@ private:
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    void InsertCPUBacking(size_t page_index, VAddr address, size_t process_id) {
 | 
			
		||||
        cpu_backing_address[page_index] = address | (process_id << page_index);
 | 
			
		||||
        cpu_backing_address[page_index] = address | (process_id << process_id_start_bit);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    Common::VirtualBuffer<VAddr> cpu_backing_address;
 | 
			
		||||
    static constexpr size_t subentries = 4;
 | 
			
		||||
    static constexpr size_t subentries = 8 / sizeof(u8);
 | 
			
		||||
    static constexpr size_t subentries_mask = subentries - 1;
 | 
			
		||||
    class CounterEntry final {
 | 
			
		||||
    public:
 | 
			
		||||
        CounterEntry() = default;
 | 
			
		||||
 | 
			
		||||
        std::atomic_uint16_t& Count(std::size_t page) {
 | 
			
		||||
        std::atomic_uint8_t& Count(std::size_t page) {
 | 
			
		||||
            return values[page & subentries_mask];
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        const std::atomic_uint16_t& Count(std::size_t page) const {
 | 
			
		||||
        const std::atomic_uint8_t& Count(std::size_t page) const {
 | 
			
		||||
            return values[page & subentries_mask];
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    private:
 | 
			
		||||
        std::array<std::atomic_uint16_t, subentries> values{};
 | 
			
		||||
        std::array<std::atomic_uint8_t, subentries> values{};
 | 
			
		||||
    };
 | 
			
		||||
    static_assert(sizeof(CounterEntry) == subentries * sizeof(u16),
 | 
			
		||||
    static_assert(sizeof(CounterEntry) == subentries * sizeof(u8),
 | 
			
		||||
                  "CounterEntry should be 8 bytes!");
 | 
			
		||||
 | 
			
		||||
    static constexpr size_t num_counter_entries =
 | 
			
		||||
 
 | 
			
		||||
@@ -131,81 +131,31 @@ struct EmptyAllocator {
 | 
			
		||||
 | 
			
		||||
template <typename DTraits>
 | 
			
		||||
struct DeviceMemoryManagerAllocator {
 | 
			
		||||
    static constexpr bool supports_pinning = DTraits::supports_pinning;
 | 
			
		||||
    static constexpr size_t device_virtual_bits = DTraits::device_virtual_bits;
 | 
			
		||||
    static constexpr size_t pin_bits = 32;
 | 
			
		||||
    static constexpr DAddr first_address = 1ULL << Memory::YUZU_PAGEBITS;
 | 
			
		||||
    static constexpr DAddr max_pin_area = supports_pinning ? 1ULL << pin_bits : first_address;
 | 
			
		||||
    static constexpr DAddr max_device_area = 1ULL << device_virtual_bits;
 | 
			
		||||
 | 
			
		||||
    DeviceMemoryManagerAllocator()
 | 
			
		||||
        : pin_allocator(first_address),
 | 
			
		||||
          main_allocator(supports_pinning ? 1ULL << pin_bits : first_address) {}
 | 
			
		||||
    DeviceMemoryManagerAllocator() : main_allocator(first_address) {}
 | 
			
		||||
 | 
			
		||||
    std::conditional_t<supports_pinning, Common::FlatAllocator<DAddr, 0, pin_bits>, EmptyAllocator>
 | 
			
		||||
        pin_allocator;
 | 
			
		||||
    Common::FlatAllocator<DAddr, 0, device_virtual_bits> main_allocator;
 | 
			
		||||
    MultiAddressContainer multi_dev_address;
 | 
			
		||||
 | 
			
		||||
    /// Returns true when vaddr -> vaddr+size is fully contained in the buffer
 | 
			
		||||
    template <bool pin_area>
 | 
			
		||||
    [[nodiscard]] bool IsInBounds(VAddr addr, u64 size) const noexcept {
 | 
			
		||||
        if constexpr (pin_area) {
 | 
			
		||||
            return addr >= 0 && addr + size <= max_pin_area;
 | 
			
		||||
        } else {
 | 
			
		||||
            return addr >= max_pin_area && addr + size <= max_device_area;
 | 
			
		||||
        }
 | 
			
		||||
        return addr >= 0 && addr + size <= max_device_area;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    DAddr Allocate(size_t size) {
 | 
			
		||||
        return main_allocator.Allocate(size);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    DAddr AllocatePinned(size_t size) {
 | 
			
		||||
        if constexpr (supports_pinning) {
 | 
			
		||||
            return pin_allocator.Allocate(size);
 | 
			
		||||
        } else {
 | 
			
		||||
            return DAddr{};
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    void DoInRange(DAddr address, size_t size, auto pin_func, auto main_func) {
 | 
			
		||||
        if (IsInBounds<true>(address, size)) {
 | 
			
		||||
            pin_func(address, size);
 | 
			
		||||
            return;
 | 
			
		||||
        }
 | 
			
		||||
        if (IsInBounds<false>(address, size)) {
 | 
			
		||||
            main_func(address, size);
 | 
			
		||||
            return;
 | 
			
		||||
        }
 | 
			
		||||
        DAddr end_size = address + size - max_pin_area;
 | 
			
		||||
        DAddr end_size2 = max_pin_area - address;
 | 
			
		||||
        pin_func(address, end_size2);
 | 
			
		||||
        main_func(max_pin_area, end_size);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    void AllocateFixed(DAddr b_address, size_t b_size) {
 | 
			
		||||
        if constexpr (supports_pinning) {
 | 
			
		||||
            DoInRange(
 | 
			
		||||
                b_address, b_size,
 | 
			
		||||
                [this](DAddr address, size_t size) { pin_allocator.AllocateFixed(address, size); },
 | 
			
		||||
                [this](DAddr address, size_t size) {
 | 
			
		||||
                    main_allocator.AllocateFixed(address, size);
 | 
			
		||||
                });
 | 
			
		||||
        } else {
 | 
			
		||||
            main_allocator.AllocateFixed(b_address, b_size);
 | 
			
		||||
        }
 | 
			
		||||
        main_allocator.AllocateFixed(b_address, b_size);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    void Free(DAddr b_address, size_t b_size) {
 | 
			
		||||
        if constexpr (supports_pinning) {
 | 
			
		||||
            DoInRange(
 | 
			
		||||
                b_address, b_size,
 | 
			
		||||
                [this](DAddr address, size_t size) { pin_allocator.Free(address, size); },
 | 
			
		||||
                [this](DAddr address, size_t size) { main_allocator.Free(address, size); });
 | 
			
		||||
        } else {
 | 
			
		||||
            main_allocator.Free(b_address, b_size);
 | 
			
		||||
        }
 | 
			
		||||
        main_allocator.Free(b_address, b_size);
 | 
			
		||||
    }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
@@ -237,11 +187,6 @@ void DeviceMemoryManager<Traits>::AllocateFixed(DAddr start, size_t size) {
 | 
			
		||||
    return impl->AllocateFixed(start, size);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
template <typename Traits>
 | 
			
		||||
DAddr DeviceMemoryManager<Traits>::AllocatePinned(size_t size) {
 | 
			
		||||
    return impl->AllocatePinned(size);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
template <typename Traits>
 | 
			
		||||
void DeviceMemoryManager<Traits>::Free(DAddr start, size_t size) {
 | 
			
		||||
    impl->Free(start, size);
 | 
			
		||||
@@ -523,10 +468,10 @@ void DeviceMemoryManager<Traits>::UpdatePagesCachedCount(DAddr addr, size_t size
 | 
			
		||||
    size_t vpage = base_vaddress >> Memory::YUZU_PAGEBITS;
 | 
			
		||||
    auto* memory_interface = registered_processes[process_id];
 | 
			
		||||
    for (; page != page_end; ++page) {
 | 
			
		||||
        std::atomic_uint16_t& count = cached_pages->at(page >> 2).Count(page);
 | 
			
		||||
        std::atomic_uint8_t& count = cached_pages->at(page >> 3).Count(page);
 | 
			
		||||
 | 
			
		||||
        if (delta > 0) {
 | 
			
		||||
            ASSERT_MSG(count.load(std::memory_order::relaxed) < std::numeric_limits<u16>::max(),
 | 
			
		||||
            ASSERT_MSG(count.load(std::memory_order::relaxed) < std::numeric_limits<u8>::max(),
 | 
			
		||||
                       "Count may overflow!");
 | 
			
		||||
        } else if (delta < 0) {
 | 
			
		||||
            ASSERT_MSG(count.load(std::memory_order::relaxed) > 0, "Count may underflow!");
 | 
			
		||||
@@ -535,7 +480,7 @@ void DeviceMemoryManager<Traits>::UpdatePagesCachedCount(DAddr addr, size_t size
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        // Adds or subtracts 1, as count is a unsigned 8-bit value
 | 
			
		||||
        count.fetch_add(static_cast<u16>(delta), std::memory_order_release);
 | 
			
		||||
        count.fetch_add(static_cast<u8>(delta), std::memory_order_release);
 | 
			
		||||
 | 
			
		||||
        // Assume delta is either -1 or 1
 | 
			
		||||
        if (count.load(std::memory_order::relaxed) == 0) {
 | 
			
		||||
 
 | 
			
		||||
@@ -14,7 +14,6 @@ namespace Tegra {
 | 
			
		||||
struct MaxwellDeviceMethods;
 | 
			
		||||
 | 
			
		||||
struct MaxwellDeviceTraits {
 | 
			
		||||
    static constexpr bool supports_pinning = false;
 | 
			
		||||
    static constexpr size_t device_virtual_bits = 34;
 | 
			
		||||
    using DeviceInterface = typename VideoCore::RasterizerInterface;
 | 
			
		||||
    using DeviceMethods = MaxwellDeviceMethods;
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user