summaryrefslogtreecommitdiffstats
path: root/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
diff options
context:
space:
mode:
authorameerj <52414509+ameerj@users.noreply.github.com>2021-06-14 17:32:28 +0200
committerameerj <52414509+ameerj@users.noreply.github.com>2021-07-23 03:51:38 +0200
commitae4e452759573d145738688d9284077934e61ae4 (patch)
tree843caa97bc872322d3e80739492805062e6aee5e /src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
parentglsl: Add LoopSafety instructions (diff)
downloadyuzu-ae4e452759573d145738688d9284077934e61ae4.tar
yuzu-ae4e452759573d145738688d9284077934e61ae4.tar.gz
yuzu-ae4e452759573d145738688d9284077934e61ae4.tar.bz2
yuzu-ae4e452759573d145738688d9284077934e61ae4.tar.lz
yuzu-ae4e452759573d145738688d9284077934e61ae4.tar.xz
yuzu-ae4e452759573d145738688d9284077934e61ae4.tar.zst
yuzu-ae4e452759573d145738688d9284077934e61ae4.zip
Diffstat (limited to 'src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp')
-rw-r--r--src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp13
1 files changed, 6 insertions, 7 deletions
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
index 850eee1e1..9152ace98 100644
--- a/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
+++ b/src/shader_recompiler/backend/glsl/emit_glsl_atomic.cpp
@@ -98,7 +98,7 @@ void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, std::string_vi
void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
std::string_view value) {
- // LOG_WARNING("Int64 Atomics not supported, fallback to non-atomic");
+ LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic");
ctx.AddU64("{}=packUint2x32(uvec2(smem[{}>>2],smem[({}+4)>>2]));", inst, pointer_offset,
pointer_offset);
ctx.Add("smem[{}>>2]=unpackUint2x32({}).x;smem[({}+4)>>2]=unpackUint2x32({}).y;",
@@ -171,7 +171,7 @@ void EmitStorageAtomicExchange32(EmitContext& ctx, IR::Inst& inst, const IR::Val
void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
const IR::Value& offset, std::string_view value) {
- // LOG_WARNING(..., "Op falling to non-atomic");
+ LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic");
ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
binding.U32(), ctx.var_alloc.Consume(offset));
@@ -182,7 +182,7 @@ void EmitStorageAtomicIAdd64(EmitContext& ctx, IR::Inst& inst, const IR::Value&
void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
const IR::Value& offset, std::string_view value) {
- // LOG_WARNING(..., "Op falling to non-atomic");
+ LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic");
ctx.AddU64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
binding.U32(), ctx.var_alloc.Consume(offset));
@@ -195,7 +195,7 @@ void EmitStorageAtomicSMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value&
void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
const IR::Value& offset, std::string_view value) {
- // LOG_WARNING(..., "Op falling to non-atomic");
+ LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic");
ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
binding.U32(), ctx.var_alloc.Consume(offset));
@@ -207,7 +207,7 @@ void EmitStorageAtomicUMin64(EmitContext& ctx, IR::Inst& inst, const IR::Value&
void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
const IR::Value& offset, std::string_view value) {
- // LOG_WARNING(..., "Op falling to non-atomic");
+ LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic");
ctx.AddU64("{}=packInt2x32(ivec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
binding.U32(), ctx.var_alloc.Consume(offset));
@@ -220,8 +220,7 @@ void EmitStorageAtomicSMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value&
void EmitStorageAtomicUMax64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
const IR::Value& offset, std::string_view value) {
- // LOG_WARNING(..., "Op falling to non-atomic");
-
+ LOG_WARNING(Shader_GLSL, "Int64 Atomics not supported, fallback to non-atomic");
ctx.AddU64("{}=packUint2x32(uvec2({}_ssbo{}[{}>>2],{}_ssbo{}[({}>>2)+1]));", inst,
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), ctx.stage_name,
binding.U32(), ctx.var_alloc.Consume(offset));