shader_ir: Add local memory getters
This commit is contained in:
		| @@ -117,6 +117,10 @@ Node ShaderIR::GetInternalFlag(InternalFlag flag, bool negated) { | ||||
|     return node; | ||||
| } | ||||
|  | ||||
| Node ShaderIR::GetLocalMemory(Node address) { | ||||
|     return StoreNode(LmemNode(address)); | ||||
| } | ||||
|  | ||||
| /*static*/ OperationCode ShaderIR::SignedToUnsignedCode(OperationCode operation_code, | ||||
|                                                         bool is_signed) { | ||||
|     if (is_signed) { | ||||
|   | ||||
| @@ -631,6 +631,9 @@ private: | ||||
|     Node GetOutputAttribute(Tegra::Shader::Attribute::Index index, u64 element, Node buffer); | ||||
|     /// Generates a node representing an internal flag | ||||
|     Node GetInternalFlag(InternalFlag flag, bool negated = false); | ||||
|     /// Generates a node representing a local memory address | ||||
|     Node GetLocalMemory(Node address); | ||||
|  | ||||
|  | ||||
|     template <typename... T> | ||||
|     inline Node Operation(OperationCode code, const T*... operands) { | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 ReinUsesLisp
					ReinUsesLisp