mirror of
				https://git.suyu.dev/suyu/suyu
				synced 2025-10-30 07:29:02 -05:00 
			
		
		
		
	common/atomic_ops: Don't cast away volatile from pointers
Preserves the volatility of the pointers being casted.
This commit is contained in:
		| @@ -14,50 +14,55 @@ namespace Common { | ||||
|  | ||||
| #if _MSC_VER | ||||
|  | ||||
| bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected) { | ||||
|     u8 result = _InterlockedCompareExchange8((char*)pointer, value, expected); | ||||
| bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected) { | ||||
|     const u8 result = | ||||
|         _InterlockedCompareExchange8(reinterpret_cast<volatile char*>(pointer), value, expected); | ||||
|     return result == expected; | ||||
| } | ||||
|  | ||||
| bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected) { | ||||
|     u16 result = _InterlockedCompareExchange16((short*)pointer, value, expected); | ||||
| bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected) { | ||||
|     const u16 result = | ||||
|         _InterlockedCompareExchange16(reinterpret_cast<volatile short*>(pointer), value, expected); | ||||
|     return result == expected; | ||||
| } | ||||
|  | ||||
| bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected) { | ||||
|     u32 result = _InterlockedCompareExchange((long*)pointer, value, expected); | ||||
| bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected) { | ||||
|     const u32 result = | ||||
|         _InterlockedCompareExchange(reinterpret_cast<volatile long*>(pointer), value, expected); | ||||
|     return result == expected; | ||||
| } | ||||
|  | ||||
| bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected) { | ||||
|     u64 result = _InterlockedCompareExchange64((__int64*)pointer, value, expected); | ||||
| bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected) { | ||||
|     const u64 result = _InterlockedCompareExchange64(reinterpret_cast<volatile __int64*>(pointer), | ||||
|                                                      value, expected); | ||||
|     return result == expected; | ||||
| } | ||||
|  | ||||
| bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected) { | ||||
|     return _InterlockedCompareExchange128((__int64*)pointer, value[1], value[0], | ||||
|                                           (__int64*)expected.data()) != 0; | ||||
| bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) { | ||||
|     return _InterlockedCompareExchange128(reinterpret_cast<volatile __int64*>(pointer), value[1], | ||||
|                                           value[0], | ||||
|                                           reinterpret_cast<__int64*>(expected.data())) != 0; | ||||
| } | ||||
|  | ||||
| #else | ||||
|  | ||||
| bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected) { | ||||
| bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected) { | ||||
|     return __sync_bool_compare_and_swap(pointer, expected, value); | ||||
| } | ||||
|  | ||||
| bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected) { | ||||
| bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected) { | ||||
|     return __sync_bool_compare_and_swap(pointer, expected, value); | ||||
| } | ||||
|  | ||||
| bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected) { | ||||
| bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected) { | ||||
|     return __sync_bool_compare_and_swap(pointer, expected, value); | ||||
| } | ||||
|  | ||||
| bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected) { | ||||
| bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected) { | ||||
|     return __sync_bool_compare_and_swap(pointer, expected, value); | ||||
| } | ||||
|  | ||||
| bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected) { | ||||
| bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected) { | ||||
|     unsigned __int128 value_a; | ||||
|     unsigned __int128 expected_a; | ||||
|     std::memcpy(&value_a, value.data(), sizeof(u128)); | ||||
|   | ||||
| @@ -8,10 +8,10 @@ | ||||
|  | ||||
| namespace Common { | ||||
|  | ||||
| bool AtomicCompareAndSwap(u8 volatile* pointer, u8 value, u8 expected); | ||||
| bool AtomicCompareAndSwap(u16 volatile* pointer, u16 value, u16 expected); | ||||
| bool AtomicCompareAndSwap(u32 volatile* pointer, u32 value, u32 expected); | ||||
| bool AtomicCompareAndSwap(u64 volatile* pointer, u64 value, u64 expected); | ||||
| bool AtomicCompareAndSwap(u64 volatile* pointer, u128 value, u128 expected); | ||||
| bool AtomicCompareAndSwap(volatile u8* pointer, u8 value, u8 expected); | ||||
| bool AtomicCompareAndSwap(volatile u16* pointer, u16 value, u16 expected); | ||||
| bool AtomicCompareAndSwap(volatile u32* pointer, u32 value, u32 expected); | ||||
| bool AtomicCompareAndSwap(volatile u64* pointer, u64 value, u64 expected); | ||||
| bool AtomicCompareAndSwap(volatile u64* pointer, u128 value, u128 expected); | ||||
|  | ||||
| } // namespace Common | ||||
|   | ||||
| @@ -704,7 +704,7 @@ struct Memory::Impl { | ||||
|         u8* page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS]; | ||||
|         if (page_pointer != nullptr) { | ||||
|             // NOTE: Avoid adding any extra logic to this fast-path block | ||||
|             T volatile* pointer = reinterpret_cast<T volatile*>(&page_pointer[vaddr]); | ||||
|             auto* pointer = reinterpret_cast<volatile T*>(&page_pointer[vaddr]); | ||||
|             return Common::AtomicCompareAndSwap(pointer, data, expected); | ||||
|         } | ||||
|  | ||||
| @@ -720,9 +720,8 @@ struct Memory::Impl { | ||||
|         case Common::PageType::RasterizerCachedMemory: { | ||||
|             u8* host_ptr{GetPointerFromRasterizerCachedMemory(vaddr)}; | ||||
|             system.GPU().InvalidateRegion(vaddr, sizeof(T)); | ||||
|             T volatile* pointer = reinterpret_cast<T volatile*>(&host_ptr); | ||||
|             auto* pointer = reinterpret_cast<volatile T*>(&host_ptr); | ||||
|             return Common::AtomicCompareAndSwap(pointer, data, expected); | ||||
|             break; | ||||
|         } | ||||
|         default: | ||||
|             UNREACHABLE(); | ||||
| @@ -734,7 +733,7 @@ struct Memory::Impl { | ||||
|         u8* const page_pointer = current_page_table->pointers[vaddr >> PAGE_BITS]; | ||||
|         if (page_pointer != nullptr) { | ||||
|             // NOTE: Avoid adding any extra logic to this fast-path block | ||||
|             u64 volatile* pointer = reinterpret_cast<u64 volatile*>(&page_pointer[vaddr]); | ||||
|             auto* pointer = reinterpret_cast<volatile u64*>(&page_pointer[vaddr]); | ||||
|             return Common::AtomicCompareAndSwap(pointer, data, expected); | ||||
|         } | ||||
|  | ||||
| @@ -750,9 +749,8 @@ struct Memory::Impl { | ||||
|         case Common::PageType::RasterizerCachedMemory: { | ||||
|             u8* host_ptr{GetPointerFromRasterizerCachedMemory(vaddr)}; | ||||
|             system.GPU().InvalidateRegion(vaddr, sizeof(u128)); | ||||
|             u64 volatile* pointer = reinterpret_cast<u64 volatile*>(&host_ptr); | ||||
|             auto* pointer = reinterpret_cast<volatile u64*>(&host_ptr); | ||||
|             return Common::AtomicCompareAndSwap(pointer, data, expected); | ||||
|             break; | ||||
|         } | ||||
|         default: | ||||
|             UNREACHABLE(); | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Lioncash
					Lioncash