diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc index bc126cc60fc..bde21f7f7f5 100644 --- a/gcc/config/aarch64/aarch64.cc +++ b/gcc/config/aarch64/aarch64.cc @@ -21272,11 +21272,11 @@ aarch64_split_compare_and_swap (rtx operands[]) mem = operands[1]; oldval = operands[2]; newval = operands[3]; - is_weak = (operands[4] != const0_rtx); model_rtx = operands[5]; scratch = operands[7]; mode = GET_MODE (mem); model = memmodel_from_int (INTVAL (model_rtx)); + is_weak = operands[4] != const0_rtx && mode != TImode; /* When OLDVAL is zero and we want the strong version we can emit a tighter loop: @@ -21337,6 +21337,33 @@ aarch64_split_compare_and_swap (rtx operands[]) else aarch64_gen_compare_reg (NE, scratch, const0_rtx); + /* 128-bit LDAXP is not atomic unless STLXP succeeds. So for a mismatch, + store the returned value and loop if the STLXP fails. */ + if (mode == TImode) + { + rtx_code_label *label3 = gen_label_rtx (); + emit_jump_insn (gen_rtx_SET (pc_rtx, gen_rtx_LABEL_REF (Pmode, label3))); + emit_barrier (); + + emit_label (label2); + aarch64_emit_store_exclusive (mode, scratch, mem, rval, model_rtx); + + if (aarch64_track_speculation) + { + /* Emit an explicit compare instruction, so that we can correctly + track the condition codes. */ + rtx cc_reg = aarch64_gen_compare_reg (NE, scratch, const0_rtx); + x = gen_rtx_NE (GET_MODE (cc_reg), cc_reg, const0_rtx); + } + else + x = gen_rtx_NE (VOIDmode, scratch, const0_rtx); + x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, + gen_rtx_LABEL_REF (Pmode, label1), pc_rtx); + aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x)); + + label2 = label3; + } + emit_label (label2); /* If we used a CBNZ in the exchange loop emit an explicit compare with RVAL diff --git a/libgcc/config/aarch64/lse.S b/libgcc/config/aarch64/lse.S index f64a3e98bfa..046dd40659d 100644 --- a/libgcc/config/aarch64/lse.S +++ b/libgcc/config/aarch64/lse.S @@ -163,6 +163,8 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see #define tmp0 16 #define tmp1 17 #define tmp2 15 +#define tmp3 14 +#define tmp4 13 /* Start and end a function. */ .macro STARTFN name @@ -233,10 +235,11 @@ STARTFN NAME(cas) 0: LDXP x0, x1, [x4] cmp x0, x(tmp0) ccmp x1, x(tmp1), #0, eq - bne 1f - STXP w(tmp2), x2, x3, [x4] - cbnz w(tmp2), 0b -1: BARRIER + csel x(tmp2), x2, x0, eq + csel x(tmp3), x3, x1, eq + STXP w(tmp4), x(tmp2), x(tmp3), [x4] + cbnz w(tmp4), 0b + BARRIER ret #endif