diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 815c5824ac4..24839c42fab 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,9 @@ +2012-10-23 Marc Glisse + + * tree-ssa-forwprop.c (forward_propagate_into_cond): Handle vectors. + * fold-const.c (fold_relational_const): Handle VECTOR_CST. + * doc/generic.texi (VEC_COND_EXPR): Document current policy. + 2012-10-23 Jan Hubicka PR middle-end/54937 diff --git a/gcc/doc/generic.texi b/gcc/doc/generic.texi index 082a5282c8d..c739731de67 100644 --- a/gcc/doc/generic.texi +++ b/gcc/doc/generic.texi @@ -1780,8 +1780,9 @@ operand is of signed integral vector type. If an element of the first operand evaluates to a zero value, the corresponding element of the result is taken from the third operand. If it evaluates to a minus one value, it is taken from the second operand. It should never evaluate to -any other value. In contrast with a @code{COND_EXPR}, all operands are -always evaluated. +any other value currently, but optimizations should not rely on that +property. In contrast with a @code{COND_EXPR}, all operands are always +evaluated. @end table diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 053b3f524c5..e3e4151ae60 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -16130,6 +16130,31 @@ fold_relational_const (enum tree_code code, tree type, tree op0, tree op1) return NULL_TREE; } + if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST) + { + unsigned count = VECTOR_CST_NELTS (op0); + tree *elts = XALLOCAVEC (tree, count); + gcc_assert (VECTOR_CST_NELTS (op1) == count + && TYPE_VECTOR_SUBPARTS (type) == count); + + for (unsigned i = 0; i < count; i++) + { + tree elem_type = TREE_TYPE (type); + tree elem0 = VECTOR_CST_ELT (op0, i); + tree elem1 = VECTOR_CST_ELT (op1, i); + + tree tem = fold_relational_const (code, elem_type, + elem0, elem1); + + if (tem == NULL_TREE) + return NULL_TREE; + + elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1); + } + + return build_vector (type, elts); + } + /* From here on we only handle LT, LE, GT, GE, EQ and NE. To compute GT, swap the arguments and do LT. diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog index 7a6d25847b6..7c7ce7c0c4a 100644 --- a/gcc/testsuite/ChangeLog +++ b/gcc/testsuite/ChangeLog @@ -1,3 +1,7 @@ +2012-10-23 Marc Glisse + + * gcc.dg/tree-ssa/foldconst-6.c: New testcase. + 2012-10-23 Jan Hubicka PR middle-end/54937 diff --git a/gcc/testsuite/gcc.dg/tree-ssa/foldconst-6.c b/gcc/testsuite/gcc.dg/tree-ssa/foldconst-6.c new file mode 100644 index 00000000000..0c08f8f258b --- /dev/null +++ b/gcc/testsuite/gcc.dg/tree-ssa/foldconst-6.c @@ -0,0 +1,14 @@ +/* { dg-do compile } */ +/* { dg-options "-O -fdump-tree-ccp1" } */ + +typedef long vec __attribute__ ((vector_size (2 * sizeof(long)))); + +void f (vec *r) +{ + vec a = { -2, 666 }; + vec b = { 3, 2 }; + *r = a < b; +} + +/* { dg-final { scan-tree-dump-not "666" "ccp1"} } */ +/* { dg-final { cleanup-tree-dump "ccp1" } } */ diff --git a/gcc/tree-ssa-forwprop.c b/gcc/tree-ssa-forwprop.c index b6c5654da01..f193fa90208 100644 --- a/gcc/tree-ssa-forwprop.c +++ b/gcc/tree-ssa-forwprop.c @@ -551,6 +551,7 @@ forward_propagate_into_cond (gimple_stmt_iterator *gsi_p) gimple stmt = gsi_stmt (*gsi_p); tree tmp = NULL_TREE; tree cond = gimple_assign_rhs1 (stmt); + enum tree_code code = gimple_assign_rhs_code (stmt); bool swap = false; /* We can do tree combining on SSA_NAME and comparison expressions. */ @@ -561,23 +562,24 @@ forward_propagate_into_cond (gimple_stmt_iterator *gsi_p) TREE_OPERAND (cond, 1)); else if (TREE_CODE (cond) == SSA_NAME) { - enum tree_code code; + enum tree_code def_code; tree name = cond; gimple def_stmt = get_prop_source_stmt (name, true, NULL); if (!def_stmt || !can_propagate_from (def_stmt)) return 0; - code = gimple_assign_rhs_code (def_stmt); - if (TREE_CODE_CLASS (code) == tcc_comparison) + def_code = gimple_assign_rhs_code (def_stmt); + if (TREE_CODE_CLASS (def_code) == tcc_comparison) tmp = fold_build2_loc (gimple_location (def_stmt), - code, + def_code, TREE_TYPE (cond), gimple_assign_rhs1 (def_stmt), gimple_assign_rhs2 (def_stmt)); - else if ((code == BIT_NOT_EXPR - && TYPE_PRECISION (TREE_TYPE (cond)) == 1) - || (code == BIT_XOR_EXPR - && integer_onep (gimple_assign_rhs2 (def_stmt)))) + else if (code == COND_EXPR + && ((def_code == BIT_NOT_EXPR + && TYPE_PRECISION (TREE_TYPE (cond)) == 1) + || (def_code == BIT_XOR_EXPR + && integer_onep (gimple_assign_rhs2 (def_stmt))))) { tmp = gimple_assign_rhs1 (def_stmt); swap = true; @@ -596,7 +598,8 @@ forward_propagate_into_cond (gimple_stmt_iterator *gsi_p) fprintf (dump_file, "'\n"); } - if (integer_onep (tmp)) + if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp) + : integer_onep (tmp)) gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt)); else if (integer_zerop (tmp)) gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));