From 4fda1ad1dcd40178e001a11b6cd82bce5eed7260 Mon Sep 17 00:00:00 2001 From: James Greenhalgh Date: Tue, 30 Oct 2012 12:31:49 +0000 Subject: [PATCH] 2012-10-30 James Greenhalgh Tejas Belagod * config/aarch64/aarch64-simd.md (aarch64_simd_bsl_internal): New pattern. (aarch64_simd_bsl): Likewise. (aarch64_vcond_internal): Likewise. (vcondu): Likewise. (vcond): Likewise. * config/aarch64/iterators.md (UNSPEC_BSL): Add to define_constants. Co-Authored-By: Tejas Belagod From-SVN: r192985 --- gcc/ChangeLog | 11 +++ gcc/config/aarch64/aarch64-simd.md | 144 +++++++++++++++++++++++++++++ gcc/config/aarch64/iterators.md | 1 + 3 files changed, 156 insertions(+) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 6cfacde346f..467bec3c3c7 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,14 @@ +2012-10-30 James Greenhalgh + Tejas Belagod + + * config/aarch64/aarch64-simd.md + (aarch64_simd_bsl_internal): New pattern. + (aarch64_simd_bsl): Likewise. + (aarch64_vcond_internal): Likewise. + (vcondu): Likewise. + (vcond): Likewise. + * config/aarch64/iterators.md (UNSPEC_BSL): Add to define_constants. + 2012-10-30 Richard Biener PR tree-optimization/55111 diff --git a/gcc/config/aarch64/aarch64-simd.md b/gcc/config/aarch64/aarch64-simd.md index a7ddfb1c1d3..b3d01c19199 100644 --- a/gcc/config/aarch64/aarch64-simd.md +++ b/gcc/config/aarch64/aarch64-simd.md @@ -1467,6 +1467,150 @@ (set_attr "simd_mode" "V2SI")] ) +;; vbsl_* intrinsics may compile to any of vbsl/vbif/vbit depending on register +;; allocation. For an intrinsic of form: +;; vD = bsl_* (vS, vN, vM) +;; We can use any of: +;; bsl vS, vN, vM (if D = S) +;; bit vD, vN, vS (if D = M, so 1-bits in vS choose bits from vN, else vM) +;; bif vD, vM, vS (if D = N, so 0-bits in vS choose bits from vM, else vN) + +(define_insn "aarch64_simd_bsl_internal" + [(set (match_operand:VDQ 0 "register_operand" "=w,w,w") + (unspec:VDQ [(match_operand:VDQ 1 "register_operand" " 0,w,w") + (match_operand:VDQ 2 "register_operand" " w,w,0") + (match_operand:VDQ 3 "register_operand" " w,0,w")] + UNSPEC_BSL))] + "TARGET_SIMD" + "@ + bsl\\t%0., %2., %3. + bit\\t%0., %2., %1. + bif\\t%0., %3., %1." +) + +(define_expand "aarch64_simd_bsl" + [(set (match_operand:VDQ 0 "register_operand") + (unspec:VDQ [(match_operand: 1 "register_operand") + (match_operand:VDQ 2 "register_operand") + (match_operand:VDQ 3 "register_operand")] + UNSPEC_BSL))] + "TARGET_SIMD" +{ + /* We can't alias operands together if they have different modes. */ + operands[1] = gen_lowpart (mode, operands[1]); +}) + +(define_expand "aarch64_vcond_internal" + [(set (match_operand:VDQ 0 "register_operand") + (if_then_else:VDQ + (match_operator 3 "comparison_operator" + [(match_operand:VDQ 4 "register_operand") + (match_operand:VDQ 5 "nonmemory_operand")]) + (match_operand:VDQ 1 "register_operand") + (match_operand:VDQ 2 "register_operand")))] + "TARGET_SIMD" +{ + int inverse = 0, has_zero_imm_form = 0; + rtx mask = gen_reg_rtx (mode); + + switch (GET_CODE (operands[3])) + { + case LE: + case LT: + case NE: + inverse = 1; + /* Fall through. */ + case GE: + case GT: + case EQ: + has_zero_imm_form = 1; + break; + case LEU: + case LTU: + inverse = 1; + break; + default: + break; + } + + if (!REG_P (operands[5]) + && (operands[5] != CONST0_RTX (mode) || !has_zero_imm_form)) + operands[5] = force_reg (mode, operands[5]); + + switch (GET_CODE (operands[3])) + { + case LT: + case GE: + emit_insn (gen_aarch64_cmge (mask, operands[4], operands[5])); + break; + + case LE: + case GT: + emit_insn (gen_aarch64_cmgt (mask, operands[4], operands[5])); + break; + + case LTU: + case GEU: + emit_insn (gen_aarch64_cmhs (mask, operands[4], operands[5])); + break; + + case LEU: + case GTU: + emit_insn (gen_aarch64_cmhi (mask, operands[4], operands[5])); + break; + + case NE: + case EQ: + emit_insn (gen_aarch64_cmeq (mask, operands[4], operands[5])); + break; + + default: + gcc_unreachable (); + } + + if (inverse) + emit_insn (gen_aarch64_simd_bsl (operands[0], mask, operands[2], + operands[1])); + else + emit_insn (gen_aarch64_simd_bsl (operands[0], mask, operands[1], + operands[2])); + + DONE; +}) + +(define_expand "vcond" + [(set (match_operand:VDQ 0 "register_operand") + (if_then_else:VDQ + (match_operator 3 "comparison_operator" + [(match_operand:VDQ 4 "register_operand") + (match_operand:VDQ 5 "nonmemory_operand")]) + (match_operand:VDQ 1 "register_operand") + (match_operand:VDQ 2 "register_operand")))] + "TARGET_SIMD" +{ + emit_insn (gen_aarch64_vcond_internal (operands[0], operands[1], + operands[2], operands[3], + operands[4], operands[5])); + DONE; +}) + + +(define_expand "vcondu" + [(set (match_operand:VDQ 0 "register_operand") + (if_then_else:VDQ + (match_operator 3 "comparison_operator" + [(match_operand:VDQ 4 "register_operand") + (match_operand:VDQ 5 "nonmemory_operand")]) + (match_operand:VDQ 1 "register_operand") + (match_operand:VDQ 2 "register_operand")))] + "TARGET_SIMD" +{ + emit_insn (gen_aarch64_vcond_internal (operands[0], operands[1], + operands[2], operands[3], + operands[4], operands[5])); + DONE; +}) + ;; Patterns for AArch64 SIMD Intrinsics. (define_expand "aarch64_create" diff --git a/gcc/config/aarch64/iterators.md b/gcc/config/aarch64/iterators.md index bf2041e78e5..8d5d4b0d5ed 100644 --- a/gcc/config/aarch64/iterators.md +++ b/gcc/config/aarch64/iterators.md @@ -227,6 +227,7 @@ UNSPEC_CMTST ; Used in aarch64-simd.md. UNSPEC_FMAX ; Used in aarch64-simd.md. UNSPEC_FMIN ; Used in aarch64-simd.md. + UNSPEC_BSL ; Used in aarch64-simd.md. ]) ;; -------------------------------------------------------------------