expr.h (emit_storent_insn, [...]): Declare.
* expr.h (emit_storent_insn, expand_expr_real_1, expand_expr_real_2): Declare. * expr.c (emit_storent_insn, expand_expr_real_1, expand_expr_real_2): Export. (store_expr): Setting and evaluating dont_return_target is useless. (expand_expr_real_1, <case GOTO_EXPR, RETURN_EXPR, SWITCH_EXPR, LABEL_EXPR and ASM_EXPR>): Move to gcc_unreachable. * except.c (expand_resx_expr): Rename to ... (expand_resx_stmt): ... this. Rewrite to take gimple statement. * except.h (expand_resx_stmt): Declare. * stmt.c: Add include gimple.h (expand_asm_expr): Rename to ... (expand_asm_stmt): ... this. Rewrite to take gimple statement. (expand_case): Rewrite to take gimple statement. * tree.h (expand_asm_stmt): Declare. (expand_case): Change prototype. * Makefile.in (stmt.o): Depend on gimple.h. * builtins.c (expand_builtin_synchronize): Build gimple asm statement, not an ASM_EXPR. * cfgexpand.c (gimple_cond_pred_to_tree, set_expr_location_r, gimple_to_tree, release_stmt_tree): Remove. (expand_gimple_cond): Don't call gimple_cond_pred_to_tree or ggc_free, but hold comparison code and operands separately. Call jumpif_1 and jumpifnot_1 instead of jumpif and jumpifnot. (expand_call_stmt, expand_gimple_stmt_1, expand_gimple_stmt): New helpers. (expand_gimple_tailcall): Don't call gimple_to_tree, expand_expr_stmt, release_stmt_tree. Call expand_gimple_stmt instead. (expand_gimple_basic_block): Ditto. * calls.c (emit_call_1): Don't look at EH regions here, make fntree parameter useless. (expand_call): New local rettype for TREE_TYPE(exp), use it throughout. Remove local p, use addr instead. Don't look at EH regions here. From-SVN: r151350
This commit is contained in:
parent
2f6924a484
commit
28ed065ef9
11 changed files with 464 additions and 479 deletions
|
@ -1,3 +1,42 @@
|
|||
2009-09-01 Michael Matz <matz@suse.de>
|
||||
|
||||
* expr.h (emit_storent_insn, expand_expr_real_1,
|
||||
expand_expr_real_2): Declare.
|
||||
* expr.c (emit_storent_insn, expand_expr_real_1,
|
||||
expand_expr_real_2): Export.
|
||||
(store_expr): Setting and evaluating dont_return_target is
|
||||
useless.
|
||||
(expand_expr_real_1, <case GOTO_EXPR, RETURN_EXPR, SWITCH_EXPR,
|
||||
LABEL_EXPR and ASM_EXPR>): Move to gcc_unreachable.
|
||||
* except.c (expand_resx_expr): Rename to ...
|
||||
(expand_resx_stmt): ... this. Rewrite to take gimple statement.
|
||||
* except.h (expand_resx_stmt): Declare.
|
||||
* stmt.c: Add include gimple.h
|
||||
(expand_asm_expr): Rename to ...
|
||||
(expand_asm_stmt): ... this. Rewrite to take gimple statement.
|
||||
(expand_case): Rewrite to take gimple statement.
|
||||
* tree.h (expand_asm_stmt): Declare.
|
||||
(expand_case): Change prototype.
|
||||
* Makefile.in (stmt.o): Depend on gimple.h.
|
||||
* builtins.c (expand_builtin_synchronize): Build gimple asm
|
||||
statement, not an ASM_EXPR.
|
||||
* cfgexpand.c (gimple_cond_pred_to_tree, set_expr_location_r,
|
||||
gimple_to_tree, release_stmt_tree): Remove.
|
||||
(expand_gimple_cond): Don't call gimple_cond_pred_to_tree or
|
||||
ggc_free, but hold comparison code and operands separately.
|
||||
Call jumpif_1 and jumpifnot_1 instead of jumpif and jumpifnot.
|
||||
(expand_call_stmt, expand_gimple_stmt_1,
|
||||
expand_gimple_stmt): New helpers.
|
||||
(expand_gimple_tailcall): Don't call gimple_to_tree, expand_expr_stmt,
|
||||
release_stmt_tree. Call expand_gimple_stmt instead.
|
||||
(expand_gimple_basic_block): Ditto.
|
||||
|
||||
* calls.c (emit_call_1): Don't look at EH regions here, make
|
||||
fntree parameter useless.
|
||||
(expand_call): New local rettype for TREE_TYPE(exp), use it
|
||||
throughout. Remove local p, use addr instead.
|
||||
Don't look at EH regions here.
|
||||
|
||||
2009-09-02 Vladimir Makarov <vmakarov@redhat.com>
|
||||
|
||||
* doc/invoke.texi (-fsched-pressure): Document it.
|
||||
|
|
|
@ -2284,7 +2284,7 @@ tree-ssa-pre.o : tree-ssa-pre.c $(TREE_FLOW_H) $(CONFIG_H) \
|
|||
$(TM_H) coretypes.h $(TREE_DUMP_H) $(TREE_PASS_H) $(FLAGS_H) langhooks.h \
|
||||
$(CFGLOOP_H) alloc-pool.h $(BASIC_BLOCK_H) $(BITMAP_H) $(HASHTAB_H) \
|
||||
$(GIMPLE_H) $(TREE_INLINE_H) tree-iterator.h tree-ssa-sccvn.h $(PARAMS_H) \
|
||||
$(DBGCNT_H)
|
||||
$(DBGCNT_H) tree-scalar-evolution.h
|
||||
tree-ssa-sccvn.o : tree-ssa-sccvn.c $(TREE_FLOW_H) $(CONFIG_H) \
|
||||
$(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) $(FIBHEAP_H) \
|
||||
$(TM_H) coretypes.h $(TREE_DUMP_H) $(TREE_PASS_H) $(FLAGS_H) $(CFGLOOP_H) \
|
||||
|
@ -2675,7 +2675,7 @@ statistics.o : statistics.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
|
|||
stmt.o : stmt.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
|
||||
$(TREE_H) $(FLAGS_H) $(FUNCTION_H) insn-config.h hard-reg-set.h $(EXPR_H) \
|
||||
libfuncs.h $(EXCEPT_H) $(RECOG_H) $(TOPLEV_H) output.h $(GGC_H) $(TM_P_H) \
|
||||
langhooks.h $(PREDICT_H) $(OPTABS_H) $(TARGET_H) $(MACHMODE_H) \
|
||||
langhooks.h $(PREDICT_H) $(OPTABS_H) $(TARGET_H) gimple.h $(MACHMODE_H) \
|
||||
$(REGS_H) alloc-pool.h
|
||||
except.o : except.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
|
||||
$(TREE_H) $(FLAGS_H) $(EXCEPT_H) $(FUNCTION_H) $(EXPR_H) libfuncs.h \
|
||||
|
|
|
@ -6235,7 +6235,7 @@ expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
|
|||
static void
|
||||
expand_builtin_synchronize (void)
|
||||
{
|
||||
tree x;
|
||||
gimple x;
|
||||
|
||||
#ifdef HAVE_memory_barrier
|
||||
if (HAVE_memory_barrier)
|
||||
|
@ -6253,10 +6253,10 @@ expand_builtin_synchronize (void)
|
|||
|
||||
/* If no explicit memory barrier instruction is available, create an
|
||||
empty asm stmt with a memory clobber. */
|
||||
x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
|
||||
tree_cons (NULL, build_string (6, "memory"), NULL));
|
||||
ASM_VOLATILE_P (x) = 1;
|
||||
expand_asm_expr (x);
|
||||
x = gimple_build_asm ("", 0, 0, 1,
|
||||
tree_cons (NULL, build_string (6, "memory"), NULL));
|
||||
gimple_asm_set_volatile (x, true);
|
||||
expand_asm_stmt (x);
|
||||
}
|
||||
|
||||
/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
|
||||
|
|
73
gcc/calls.c
73
gcc/calls.c
|
@ -238,7 +238,7 @@ prepare_call_address (rtx funexp, rtx static_chain_value,
|
|||
denote registers used by the called function. */
|
||||
|
||||
static void
|
||||
emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
|
||||
emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
|
||||
tree funtype ATTRIBUTE_UNUSED,
|
||||
HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
|
||||
HOST_WIDE_INT rounded_stack_size,
|
||||
|
@ -380,15 +380,6 @@ emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
|
|||
effect. */
|
||||
if (ecf_flags & ECF_NOTHROW)
|
||||
add_reg_note (call_insn, REG_EH_REGION, const0_rtx);
|
||||
else
|
||||
{
|
||||
int rn = lookup_expr_eh_region (fntree);
|
||||
|
||||
/* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
|
||||
throw, which we already took care of. */
|
||||
if (rn > 0)
|
||||
add_reg_note (call_insn, REG_EH_REGION, GEN_INT (rn));
|
||||
}
|
||||
|
||||
if (ecf_flags & ECF_NORETURN)
|
||||
add_reg_note (call_insn, REG_NORETURN, const0_rtx);
|
||||
|
@ -1927,6 +1918,7 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
/* Data type of the function. */
|
||||
tree funtype;
|
||||
tree type_arg_types;
|
||||
tree rettype;
|
||||
/* Declaration of the function being called,
|
||||
or 0 if the function is computed (not known by name). */
|
||||
tree fndecl = 0;
|
||||
|
@ -2022,7 +2014,6 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
int old_stack_pointer_delta = 0;
|
||||
|
||||
rtx call_fusage;
|
||||
tree p = CALL_EXPR_FN (exp);
|
||||
tree addr = CALL_EXPR_FN (exp);
|
||||
int i;
|
||||
/* The alignment of the stack, in bits. */
|
||||
|
@ -2045,15 +2036,16 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
}
|
||||
else
|
||||
{
|
||||
fntype = TREE_TYPE (TREE_TYPE (p));
|
||||
fntype = TREE_TYPE (TREE_TYPE (addr));
|
||||
flags |= flags_from_decl_or_type (fntype);
|
||||
}
|
||||
rettype = TREE_TYPE (exp);
|
||||
|
||||
struct_value = targetm.calls.struct_value_rtx (fntype, 0);
|
||||
|
||||
/* Warn if this value is an aggregate type,
|
||||
regardless of which calling convention we are using for it. */
|
||||
if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
|
||||
if (AGGREGATE_TYPE_P (rettype))
|
||||
warning (OPT_Waggregate_return, "function call has aggregate value");
|
||||
|
||||
/* If the result of a non looping pure or const function call is
|
||||
|
@ -2063,7 +2055,7 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
if ((flags & (ECF_CONST | ECF_PURE))
|
||||
&& (!(flags & ECF_LOOPING_CONST_OR_PURE))
|
||||
&& (ignore || target == const0_rtx
|
||||
|| TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
|
||||
|| TYPE_MODE (rettype) == VOIDmode))
|
||||
{
|
||||
bool volatilep = false;
|
||||
tree arg;
|
||||
|
@ -2106,7 +2098,7 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
}
|
||||
#else /* not PCC_STATIC_STRUCT_RETURN */
|
||||
{
|
||||
struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
|
||||
struct_value_size = int_size_in_bytes (rettype);
|
||||
|
||||
if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
|
||||
structure_value_addr = XEXP (target, 0);
|
||||
|
@ -2115,7 +2107,7 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
/* For variable-sized objects, we must be called with a target
|
||||
specified. If we were to allocate space on the stack here,
|
||||
we would have no way of knowing when to free it. */
|
||||
rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
|
||||
rtx d = assign_temp (rettype, 0, 1, 1);
|
||||
|
||||
mark_temp_addr_taken (d);
|
||||
structure_value_addr = XEXP (d, 0);
|
||||
|
@ -2286,7 +2278,6 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
if (currently_expanding_call++ != 0
|
||||
|| !flag_optimize_sibling_calls
|
||||
|| args_size.var
|
||||
|| lookup_expr_eh_region (exp) >= 0
|
||||
|| dbg_cnt (tail_call) == false)
|
||||
try_tail_call = 0;
|
||||
|
||||
|
@ -2693,14 +2684,14 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
|
||||
/* Figure out the register where the value, if any, will come back. */
|
||||
valreg = 0;
|
||||
if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
|
||||
if (TYPE_MODE (rettype) != VOIDmode
|
||||
&& ! structure_value_addr)
|
||||
{
|
||||
if (pcc_struct_value)
|
||||
valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
|
||||
valreg = hard_function_value (build_pointer_type (rettype),
|
||||
fndecl, NULL, (pass == 0));
|
||||
else
|
||||
valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
|
||||
valreg = hard_function_value (rettype, fndecl, fntype,
|
||||
(pass == 0));
|
||||
|
||||
/* If VALREG is a PARALLEL whose first member has a zero
|
||||
|
@ -2865,12 +2856,12 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
group load/store machinery below. */
|
||||
if (!structure_value_addr
|
||||
&& !pcc_struct_value
|
||||
&& TYPE_MODE (TREE_TYPE (exp)) != BLKmode
|
||||
&& targetm.calls.return_in_msb (TREE_TYPE (exp)))
|
||||
&& TYPE_MODE (rettype) != BLKmode
|
||||
&& targetm.calls.return_in_msb (rettype))
|
||||
{
|
||||
if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
|
||||
if (shift_return_value (TYPE_MODE (rettype), false, valreg))
|
||||
sibcall_failure = 1;
|
||||
valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
|
||||
valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
|
||||
}
|
||||
|
||||
if (pass && (flags & ECF_MALLOC))
|
||||
|
@ -2879,7 +2870,7 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
rtx last, insns;
|
||||
|
||||
/* The return value from a malloc-like function is a pointer. */
|
||||
if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
|
||||
if (TREE_CODE (rettype) == POINTER_TYPE)
|
||||
mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
|
||||
|
||||
emit_move_insn (temp, valreg);
|
||||
|
@ -2929,7 +2920,7 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
|
||||
/* If value type not void, return an rtx for the value. */
|
||||
|
||||
if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
|
||||
if (TYPE_MODE (rettype) == VOIDmode
|
||||
|| ignore)
|
||||
target = const0_rtx;
|
||||
else if (structure_value_addr)
|
||||
|
@ -2937,10 +2928,10 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
if (target == 0 || !MEM_P (target))
|
||||
{
|
||||
target
|
||||
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
|
||||
memory_address (TYPE_MODE (TREE_TYPE (exp)),
|
||||
= gen_rtx_MEM (TYPE_MODE (rettype),
|
||||
memory_address (TYPE_MODE (rettype),
|
||||
structure_value_addr));
|
||||
set_mem_attributes (target, exp, 1);
|
||||
set_mem_attributes (target, rettype, 1);
|
||||
}
|
||||
}
|
||||
else if (pcc_struct_value)
|
||||
|
@ -2948,9 +2939,9 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
/* This is the special C++ case where we need to
|
||||
know what the true target was. We take care to
|
||||
never use this value more than once in one expression. */
|
||||
target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
|
||||
target = gen_rtx_MEM (TYPE_MODE (rettype),
|
||||
copy_to_reg (valreg));
|
||||
set_mem_attributes (target, exp, 1);
|
||||
set_mem_attributes (target, rettype, 1);
|
||||
}
|
||||
/* Handle calls that return values in multiple non-contiguous locations.
|
||||
The Irix 6 ABI has examples of this. */
|
||||
|
@ -2959,22 +2950,22 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
if (target == 0)
|
||||
{
|
||||
/* This will only be assigned once, so it can be readonly. */
|
||||
tree nt = build_qualified_type (TREE_TYPE (exp),
|
||||
(TYPE_QUALS (TREE_TYPE (exp))
|
||||
tree nt = build_qualified_type (rettype,
|
||||
(TYPE_QUALS (rettype)
|
||||
| TYPE_QUAL_CONST));
|
||||
|
||||
target = assign_temp (nt, 0, 1, 1);
|
||||
}
|
||||
|
||||
if (! rtx_equal_p (target, valreg))
|
||||
emit_group_store (target, valreg, TREE_TYPE (exp),
|
||||
int_size_in_bytes (TREE_TYPE (exp)));
|
||||
emit_group_store (target, valreg, rettype,
|
||||
int_size_in_bytes (rettype));
|
||||
|
||||
/* We can not support sibling calls for this case. */
|
||||
sibcall_failure = 1;
|
||||
}
|
||||
else if (target
|
||||
&& GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
|
||||
&& GET_MODE (target) == TYPE_MODE (rettype)
|
||||
&& GET_MODE (target) == GET_MODE (valreg))
|
||||
{
|
||||
bool may_overlap = false;
|
||||
|
@ -3019,9 +3010,9 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
sibcall_failure = 1;
|
||||
}
|
||||
}
|
||||
else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
|
||||
else if (TYPE_MODE (rettype) == BLKmode)
|
||||
{
|
||||
target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
|
||||
target = copy_blkmode_from_reg (target, valreg, rettype);
|
||||
|
||||
/* We can not support sibling calls for this case. */
|
||||
sibcall_failure = 1;
|
||||
|
@ -3032,10 +3023,10 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
/* If we promoted this return value, make the proper SUBREG.
|
||||
TARGET might be const0_rtx here, so be careful. */
|
||||
if (REG_P (target)
|
||||
&& TYPE_MODE (TREE_TYPE (exp)) != BLKmode
|
||||
&& GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
|
||||
&& TYPE_MODE (rettype) != BLKmode
|
||||
&& GET_MODE (target) != TYPE_MODE (rettype))
|
||||
{
|
||||
tree type = TREE_TYPE (exp);
|
||||
tree type = rettype;
|
||||
int unsignedp = TYPE_UNSIGNED (type);
|
||||
int offset = 0;
|
||||
enum machine_mode pmode;
|
||||
|
|
643
gcc/cfgexpand.c
643
gcc/cfgexpand.c
|
@ -86,326 +86,6 @@ gimple_assign_rhs_to_tree (gimple stmt)
|
|||
return t;
|
||||
}
|
||||
|
||||
/* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND
|
||||
statement STMT. */
|
||||
|
||||
static tree
|
||||
gimple_cond_pred_to_tree (gimple stmt)
|
||||
{
|
||||
/* We're sometimes presented with such code:
|
||||
D.123_1 = x < y;
|
||||
if (D.123_1 != 0)
|
||||
...
|
||||
This would expand to two comparisons which then later might
|
||||
be cleaned up by combine. But some pattern matchers like if-conversion
|
||||
work better when there's only one compare, so make up for this
|
||||
here as special exception if TER would have made the same change. */
|
||||
tree lhs = gimple_cond_lhs (stmt);
|
||||
if (SA.values
|
||||
&& TREE_CODE (lhs) == SSA_NAME
|
||||
&& bitmap_bit_p (SA.values, SSA_NAME_VERSION (lhs)))
|
||||
lhs = gimple_assign_rhs_to_tree (SSA_NAME_DEF_STMT (lhs));
|
||||
|
||||
return build2 (gimple_cond_code (stmt), boolean_type_node,
|
||||
lhs, gimple_cond_rhs (stmt));
|
||||
}
|
||||
|
||||
/* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression
|
||||
inside *TP. DATA is the location to set. */
|
||||
|
||||
static tree
|
||||
set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data)
|
||||
{
|
||||
location_t *loc = (location_t *) data;
|
||||
if (EXPR_P (*tp))
|
||||
SET_EXPR_LOCATION (*tp, *loc);
|
||||
|
||||
return NULL_TREE;
|
||||
}
|
||||
|
||||
|
||||
/* RTL expansion has traditionally been done on trees, so the
|
||||
transition to doing it on GIMPLE tuples is very invasive to the RTL
|
||||
expander. To facilitate the transition, this function takes a
|
||||
GIMPLE tuple STMT and returns the same statement in the form of a
|
||||
tree. */
|
||||
|
||||
static tree
|
||||
gimple_to_tree (gimple stmt)
|
||||
{
|
||||
tree t;
|
||||
int rn;
|
||||
tree_ann_common_t ann;
|
||||
location_t loc;
|
||||
|
||||
switch (gimple_code (stmt))
|
||||
{
|
||||
case GIMPLE_ASSIGN:
|
||||
{
|
||||
tree lhs = gimple_assign_lhs (stmt);
|
||||
|
||||
t = gimple_assign_rhs_to_tree (stmt);
|
||||
t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
|
||||
if (gimple_assign_nontemporal_move_p (stmt))
|
||||
MOVE_NONTEMPORAL (t) = true;
|
||||
}
|
||||
break;
|
||||
|
||||
case GIMPLE_COND:
|
||||
t = gimple_cond_pred_to_tree (stmt);
|
||||
t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
|
||||
break;
|
||||
|
||||
case GIMPLE_GOTO:
|
||||
t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt));
|
||||
break;
|
||||
|
||||
case GIMPLE_LABEL:
|
||||
t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt));
|
||||
break;
|
||||
|
||||
case GIMPLE_RETURN:
|
||||
{
|
||||
tree retval = gimple_return_retval (stmt);
|
||||
|
||||
if (retval && retval != error_mark_node)
|
||||
{
|
||||
tree result = DECL_RESULT (current_function_decl);
|
||||
|
||||
/* If we are not returning the current function's RESULT_DECL,
|
||||
build an assignment to it. */
|
||||
if (retval != result)
|
||||
{
|
||||
/* I believe that a function's RESULT_DECL is unique. */
|
||||
gcc_assert (TREE_CODE (retval) != RESULT_DECL);
|
||||
|
||||
retval = build2 (MODIFY_EXPR, TREE_TYPE (result),
|
||||
result, retval);
|
||||
}
|
||||
}
|
||||
t = build1 (RETURN_EXPR, void_type_node, retval);
|
||||
}
|
||||
break;
|
||||
|
||||
case GIMPLE_ASM:
|
||||
{
|
||||
size_t i, n;
|
||||
tree out, in, cl;
|
||||
const char *s;
|
||||
|
||||
out = NULL_TREE;
|
||||
n = gimple_asm_noutputs (stmt);
|
||||
if (n > 0)
|
||||
{
|
||||
t = out = gimple_asm_output_op (stmt, 0);
|
||||
for (i = 1; i < n; i++)
|
||||
{
|
||||
TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
|
||||
t = gimple_asm_output_op (stmt, i);
|
||||
}
|
||||
}
|
||||
|
||||
in = NULL_TREE;
|
||||
n = gimple_asm_ninputs (stmt);
|
||||
if (n > 0)
|
||||
{
|
||||
t = in = gimple_asm_input_op (stmt, 0);
|
||||
for (i = 1; i < n; i++)
|
||||
{
|
||||
TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
|
||||
t = gimple_asm_input_op (stmt, i);
|
||||
}
|
||||
}
|
||||
|
||||
cl = NULL_TREE;
|
||||
n = gimple_asm_nclobbers (stmt);
|
||||
if (n > 0)
|
||||
{
|
||||
t = cl = gimple_asm_clobber_op (stmt, 0);
|
||||
for (i = 1; i < n; i++)
|
||||
{
|
||||
TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
|
||||
t = gimple_asm_clobber_op (stmt, i);
|
||||
}
|
||||
}
|
||||
|
||||
s = gimple_asm_string (stmt);
|
||||
t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s),
|
||||
out, in, cl);
|
||||
ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt);
|
||||
ASM_INPUT_P (t) = gimple_asm_input_p (stmt);
|
||||
}
|
||||
break;
|
||||
|
||||
case GIMPLE_CALL:
|
||||
{
|
||||
size_t i;
|
||||
tree fn;
|
||||
tree_ann_common_t ann;
|
||||
|
||||
t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
|
||||
|
||||
CALL_EXPR_FN (t) = gimple_call_fn (stmt);
|
||||
TREE_TYPE (t) = gimple_call_return_type (stmt);
|
||||
CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt);
|
||||
|
||||
for (i = 0; i < gimple_call_num_args (stmt); i++)
|
||||
CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i);
|
||||
|
||||
if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
|
||||
TREE_SIDE_EFFECTS (t) = 1;
|
||||
|
||||
if (gimple_call_flags (stmt) & ECF_NOTHROW)
|
||||
TREE_NOTHROW (t) = 1;
|
||||
|
||||
CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt);
|
||||
CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt);
|
||||
CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt);
|
||||
CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt);
|
||||
CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt);
|
||||
|
||||
/* If the call has a LHS then create a MODIFY_EXPR to hold it. */
|
||||
{
|
||||
tree lhs = gimple_call_lhs (stmt);
|
||||
|
||||
if (lhs)
|
||||
t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
|
||||
}
|
||||
|
||||
/* Record the original call statement, as it may be used
|
||||
to retrieve profile information during expansion. */
|
||||
|
||||
if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
|
||||
&& DECL_BUILT_IN (fn))
|
||||
{
|
||||
ann = get_tree_common_ann (t);
|
||||
ann->stmt = stmt;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case GIMPLE_SWITCH:
|
||||
{
|
||||
tree label_vec;
|
||||
size_t i;
|
||||
tree elt = gimple_switch_label (stmt, 0);
|
||||
|
||||
label_vec = make_tree_vec (gimple_switch_num_labels (stmt));
|
||||
|
||||
if (!CASE_LOW (elt) && !CASE_HIGH (elt))
|
||||
{
|
||||
for (i = 1; i < gimple_switch_num_labels (stmt); i++)
|
||||
TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i);
|
||||
|
||||
/* The default case in a SWITCH_EXPR must be at the end of
|
||||
the label vector. */
|
||||
TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
for (i = 0; i < gimple_switch_num_labels (stmt); i++)
|
||||
TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i);
|
||||
}
|
||||
|
||||
t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt),
|
||||
NULL, label_vec);
|
||||
}
|
||||
break;
|
||||
|
||||
case GIMPLE_NOP:
|
||||
case GIMPLE_PREDICT:
|
||||
t = build1 (NOP_EXPR, void_type_node, size_zero_node);
|
||||
break;
|
||||
|
||||
case GIMPLE_RESX:
|
||||
t = build_resx (gimple_resx_region (stmt));
|
||||
break;
|
||||
|
||||
default:
|
||||
if (errorcount == 0)
|
||||
{
|
||||
error ("Unrecognized GIMPLE statement during RTL expansion");
|
||||
print_gimple_stmt (stderr, stmt, 4, 0);
|
||||
gcc_unreachable ();
|
||||
}
|
||||
else
|
||||
{
|
||||
/* Ignore any bad gimple codes if we're going to die anyhow,
|
||||
so we can at least set TREE_ASM_WRITTEN and have the rest
|
||||
of compilation advance without sudden ICE death. */
|
||||
t = build1 (NOP_EXPR, void_type_node, size_zero_node);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/* If STMT is inside an exception region, record it in the generated
|
||||
expression. */
|
||||
rn = lookup_stmt_eh_region (stmt);
|
||||
if (rn >= 0)
|
||||
{
|
||||
tree call = get_call_expr_in (t);
|
||||
|
||||
ann = get_tree_common_ann (t);
|
||||
ann->rn = rn;
|
||||
|
||||
/* For a CALL_EXPR on the RHS of an assignment, calls.c looks up
|
||||
the CALL_EXPR not the assignment statment for EH region number. */
|
||||
if (call && call != t)
|
||||
{
|
||||
ann = get_tree_common_ann (call);
|
||||
ann->rn = rn;
|
||||
}
|
||||
}
|
||||
|
||||
/* Set EXPR_LOCATION in all the embedded expressions. */
|
||||
loc = gimple_location (stmt);
|
||||
walk_tree (&t, set_expr_location_r, (void *) &loc, NULL);
|
||||
|
||||
TREE_BLOCK (t) = gimple_block (stmt);
|
||||
|
||||
return t;
|
||||
}
|
||||
|
||||
|
||||
/* Release back to GC memory allocated by gimple_to_tree. */
|
||||
|
||||
static void
|
||||
release_stmt_tree (gimple stmt, tree stmt_tree)
|
||||
{
|
||||
tree_ann_common_t ann;
|
||||
|
||||
switch (gimple_code (stmt))
|
||||
{
|
||||
case GIMPLE_ASSIGN:
|
||||
if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS)
|
||||
ggc_free (TREE_OPERAND (stmt_tree, 1));
|
||||
break;
|
||||
case GIMPLE_COND:
|
||||
ggc_free (COND_EXPR_COND (stmt_tree));
|
||||
break;
|
||||
case GIMPLE_RETURN:
|
||||
if (TREE_OPERAND (stmt_tree, 0)
|
||||
&& TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR)
|
||||
ggc_free (TREE_OPERAND (stmt_tree, 0));
|
||||
break;
|
||||
case GIMPLE_CALL:
|
||||
if (gimple_call_lhs (stmt))
|
||||
{
|
||||
ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1));
|
||||
if (ann)
|
||||
ggc_free (ann);
|
||||
ggc_free (TREE_OPERAND (stmt_tree, 1));
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
ann = tree_common_ann (stmt_tree);
|
||||
if (ann)
|
||||
ggc_free (ann);
|
||||
ggc_free (stmt_tree);
|
||||
}
|
||||
|
||||
|
||||
/* Verify that there is exactly single jump instruction since last and attach
|
||||
REG_BR_PROB note specifying probability.
|
||||
|
@ -1935,7 +1615,6 @@ maybe_cleanup_end_of_block (edge e)
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
|
||||
Returns a new basic block if we've terminated the current basic
|
||||
block and created a new one. */
|
||||
|
@ -1947,8 +1626,36 @@ expand_gimple_cond (basic_block bb, gimple stmt)
|
|||
edge new_edge;
|
||||
edge true_edge;
|
||||
edge false_edge;
|
||||
tree pred = gimple_cond_pred_to_tree (stmt);
|
||||
rtx last2, last;
|
||||
enum tree_code code;
|
||||
tree op0, op1;
|
||||
|
||||
code = gimple_cond_code (stmt);
|
||||
op0 = gimple_cond_lhs (stmt);
|
||||
op1 = gimple_cond_rhs (stmt);
|
||||
/* We're sometimes presented with such code:
|
||||
D.123_1 = x < y;
|
||||
if (D.123_1 != 0)
|
||||
...
|
||||
This would expand to two comparisons which then later might
|
||||
be cleaned up by combine. But some pattern matchers like if-conversion
|
||||
work better when there's only one compare, so make up for this
|
||||
here as special exception if TER would have made the same change. */
|
||||
if (gimple_cond_single_var_p (stmt)
|
||||
&& SA.values
|
||||
&& TREE_CODE (op0) == SSA_NAME
|
||||
&& bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
|
||||
{
|
||||
gimple second = SSA_NAME_DEF_STMT (op0);
|
||||
if (gimple_code (second) == GIMPLE_ASSIGN
|
||||
&& TREE_CODE_CLASS (gimple_assign_rhs_code (second))
|
||||
== tcc_comparison)
|
||||
{
|
||||
code = gimple_assign_rhs_code (second);
|
||||
op0 = gimple_assign_rhs1 (second);
|
||||
op1 = gimple_assign_rhs2 (second);
|
||||
}
|
||||
}
|
||||
|
||||
last2 = last = get_last_insn ();
|
||||
|
||||
|
@ -1967,7 +1674,7 @@ expand_gimple_cond (basic_block bb, gimple stmt)
|
|||
two-way jump that needs to be decomposed into two basic blocks. */
|
||||
if (false_edge->dest == bb->next_bb)
|
||||
{
|
||||
jumpif (pred, label_rtx_for_bb (true_edge->dest));
|
||||
jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest));
|
||||
add_reg_br_prob_note (last, true_edge->probability);
|
||||
maybe_dump_rtl_for_gimple_stmt (stmt, last);
|
||||
if (true_edge->goto_locus)
|
||||
|
@ -1978,13 +1685,12 @@ expand_gimple_cond (basic_block bb, gimple stmt)
|
|||
}
|
||||
true_edge->goto_block = NULL;
|
||||
false_edge->flags |= EDGE_FALLTHRU;
|
||||
ggc_free (pred);
|
||||
maybe_cleanup_end_of_block (false_edge);
|
||||
return NULL;
|
||||
}
|
||||
if (true_edge->dest == bb->next_bb)
|
||||
{
|
||||
jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
|
||||
jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest));
|
||||
add_reg_br_prob_note (last, false_edge->probability);
|
||||
maybe_dump_rtl_for_gimple_stmt (stmt, last);
|
||||
if (false_edge->goto_locus)
|
||||
|
@ -1995,12 +1701,11 @@ expand_gimple_cond (basic_block bb, gimple stmt)
|
|||
}
|
||||
false_edge->goto_block = NULL;
|
||||
true_edge->flags |= EDGE_FALLTHRU;
|
||||
ggc_free (pred);
|
||||
maybe_cleanup_end_of_block (true_edge);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
jumpif (pred, label_rtx_for_bb (true_edge->dest));
|
||||
jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest));
|
||||
add_reg_br_prob_note (last, true_edge->probability);
|
||||
last = get_last_insn ();
|
||||
if (false_edge->goto_locus)
|
||||
|
@ -2040,10 +1745,275 @@ expand_gimple_cond (basic_block bb, gimple stmt)
|
|||
}
|
||||
true_edge->goto_block = NULL;
|
||||
|
||||
ggc_free (pred);
|
||||
return new_bb;
|
||||
}
|
||||
|
||||
/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
|
||||
statement STMT. */
|
||||
|
||||
static void
|
||||
expand_call_stmt (gimple stmt)
|
||||
{
|
||||
tree exp;
|
||||
tree lhs = gimple_call_lhs (stmt);
|
||||
tree fndecl = gimple_call_fndecl (stmt);
|
||||
size_t i;
|
||||
|
||||
exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
|
||||
|
||||
CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
|
||||
TREE_TYPE (exp) = gimple_call_return_type (stmt);
|
||||
CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
|
||||
|
||||
for (i = 0; i < gimple_call_num_args (stmt); i++)
|
||||
CALL_EXPR_ARG (exp, i) = gimple_call_arg (stmt, i);
|
||||
|
||||
if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
|
||||
TREE_SIDE_EFFECTS (exp) = 1;
|
||||
|
||||
if (gimple_call_flags (stmt) & ECF_NOTHROW)
|
||||
TREE_NOTHROW (exp) = 1;
|
||||
|
||||
CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
|
||||
CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
|
||||
CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
|
||||
CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
|
||||
CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
|
||||
SET_EXPR_LOCATION (exp, gimple_location (stmt));
|
||||
TREE_BLOCK (exp) = gimple_block (stmt);
|
||||
|
||||
/* Record the original call statement, as it may be used
|
||||
to retrieve profile information during expansion. */
|
||||
|
||||
if (fndecl && DECL_BUILT_IN (fndecl))
|
||||
{
|
||||
tree_ann_common_t ann = get_tree_common_ann (exp);
|
||||
ann->stmt = stmt;
|
||||
}
|
||||
|
||||
if (lhs)
|
||||
expand_assignment (lhs, exp, false);
|
||||
else
|
||||
expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
|
||||
}
|
||||
|
||||
/* A subroutine of expand_gimple_stmt, expanding one gimple statement
|
||||
STMT that doesn't require special handling for outgoing edges. That
|
||||
is no tailcalls and no GIMPLE_COND. */
|
||||
|
||||
static void
|
||||
expand_gimple_stmt_1 (gimple stmt)
|
||||
{
|
||||
tree op0;
|
||||
switch (gimple_code (stmt))
|
||||
{
|
||||
case GIMPLE_GOTO:
|
||||
op0 = gimple_goto_dest (stmt);
|
||||
if (TREE_CODE (op0) == LABEL_DECL)
|
||||
expand_goto (op0);
|
||||
else
|
||||
expand_computed_goto (op0);
|
||||
break;
|
||||
case GIMPLE_LABEL:
|
||||
expand_label (gimple_label_label (stmt));
|
||||
break;
|
||||
case GIMPLE_NOP:
|
||||
case GIMPLE_PREDICT:
|
||||
break;
|
||||
case GIMPLE_RESX:
|
||||
expand_resx_stmt (stmt);
|
||||
break;
|
||||
case GIMPLE_SWITCH:
|
||||
expand_case (stmt);
|
||||
break;
|
||||
case GIMPLE_ASM:
|
||||
expand_asm_stmt (stmt);
|
||||
break;
|
||||
case GIMPLE_CALL:
|
||||
expand_call_stmt (stmt);
|
||||
break;
|
||||
|
||||
case GIMPLE_RETURN:
|
||||
op0 = gimple_return_retval (stmt);
|
||||
|
||||
if (op0 && op0 != error_mark_node)
|
||||
{
|
||||
tree result = DECL_RESULT (current_function_decl);
|
||||
|
||||
/* If we are not returning the current function's RESULT_DECL,
|
||||
build an assignment to it. */
|
||||
if (op0 != result)
|
||||
{
|
||||
/* I believe that a function's RESULT_DECL is unique. */
|
||||
gcc_assert (TREE_CODE (op0) != RESULT_DECL);
|
||||
|
||||
/* ??? We'd like to use simply expand_assignment here,
|
||||
but this fails if the value is of BLKmode but the return
|
||||
decl is a register. expand_return has special handling
|
||||
for this combination, which eventually should move
|
||||
to common code. See comments there. Until then, let's
|
||||
build a modify expression :-/ */
|
||||
op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
|
||||
result, op0);
|
||||
}
|
||||
}
|
||||
if (!op0)
|
||||
expand_null_return ();
|
||||
else
|
||||
expand_return (op0);
|
||||
break;
|
||||
|
||||
case GIMPLE_ASSIGN:
|
||||
{
|
||||
tree lhs = gimple_assign_lhs (stmt);
|
||||
|
||||
/* Tree expand used to fiddle with |= and &= of two bitfield
|
||||
COMPONENT_REFs here. This can't happen with gimple, the LHS
|
||||
of binary assigns must be a gimple reg. */
|
||||
|
||||
if (TREE_CODE (lhs) != SSA_NAME
|
||||
|| get_gimple_rhs_class (gimple_expr_code (stmt))
|
||||
== GIMPLE_SINGLE_RHS)
|
||||
{
|
||||
tree rhs = gimple_assign_rhs1 (stmt);
|
||||
gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
|
||||
== GIMPLE_SINGLE_RHS);
|
||||
if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
|
||||
SET_EXPR_LOCATION (rhs, gimple_location (stmt));
|
||||
expand_assignment (lhs, rhs,
|
||||
gimple_assign_nontemporal_move_p (stmt));
|
||||
}
|
||||
else
|
||||
{
|
||||
rtx target, temp;
|
||||
bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
|
||||
struct separate_ops ops;
|
||||
bool promoted = false;
|
||||
|
||||
target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
|
||||
if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
|
||||
promoted = true;
|
||||
|
||||
ops.code = gimple_assign_rhs_code (stmt);
|
||||
ops.type = TREE_TYPE (lhs);
|
||||
switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
|
||||
{
|
||||
case GIMPLE_BINARY_RHS:
|
||||
ops.op1 = gimple_assign_rhs2 (stmt);
|
||||
/* Fallthru */
|
||||
case GIMPLE_UNARY_RHS:
|
||||
ops.op0 = gimple_assign_rhs1 (stmt);
|
||||
break;
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
}
|
||||
ops.location = gimple_location (stmt);
|
||||
|
||||
/* If we want to use a nontemporal store, force the value to
|
||||
register first. If we store into a promoted register,
|
||||
don't directly expand to target. */
|
||||
temp = nontemporal || promoted ? NULL_RTX : target;
|
||||
temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
|
||||
EXPAND_NORMAL);
|
||||
|
||||
if (temp == target)
|
||||
;
|
||||
else if (promoted)
|
||||
{
|
||||
bool unsigndp = SUBREG_PROMOTED_UNSIGNED_P (target);
|
||||
/* If TEMP is a VOIDmode constant, use convert_modes to make
|
||||
sure that we properly convert it. */
|
||||
if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
|
||||
{
|
||||
temp = convert_modes (GET_MODE (target),
|
||||
TYPE_MODE (ops.type),
|
||||
temp, unsigndp);
|
||||
temp = convert_modes (GET_MODE (SUBREG_REG (target)),
|
||||
GET_MODE (target), temp, unsigndp);
|
||||
}
|
||||
|
||||
convert_move (SUBREG_REG (target), temp, unsigndp);
|
||||
}
|
||||
else if (nontemporal && emit_storent_insn (target, temp))
|
||||
;
|
||||
else
|
||||
{
|
||||
temp = force_operand (temp, target);
|
||||
if (temp != target)
|
||||
emit_move_insn (target, temp);
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
}
|
||||
}
|
||||
|
||||
/* Expand one gimple statement STMT and return the last RTL instruction
|
||||
before any of the newly generated ones.
|
||||
|
||||
In addition to generating the necessary RTL instructions this also
|
||||
sets REG_EH_REGION notes if necessary and sets the current source
|
||||
location for diagnostics. */
|
||||
|
||||
static rtx
|
||||
expand_gimple_stmt (gimple stmt)
|
||||
{
|
||||
int rn = -1;
|
||||
rtx last = NULL;
|
||||
location_t saved_location = input_location;
|
||||
|
||||
last = get_last_insn ();
|
||||
|
||||
/* If this is an expression of some kind and it has an associated line
|
||||
number, then emit the line number before expanding the expression.
|
||||
|
||||
We need to save and restore the file and line information so that
|
||||
errors discovered during expansion are emitted with the right
|
||||
information. It would be better of the diagnostic routines
|
||||
used the file/line information embedded in the tree nodes rather
|
||||
than globals. */
|
||||
gcc_assert (cfun);
|
||||
|
||||
if (gimple_has_location (stmt))
|
||||
{
|
||||
input_location = gimple_location (stmt);
|
||||
set_curr_insn_source_location (input_location);
|
||||
|
||||
/* Record where the insns produced belong. */
|
||||
set_curr_insn_block (gimple_block (stmt));
|
||||
}
|
||||
|
||||
expand_gimple_stmt_1 (stmt);
|
||||
/* Free any temporaries used to evaluate this statement. */
|
||||
free_temp_slots ();
|
||||
|
||||
input_location = saved_location;
|
||||
|
||||
/* Mark all insns that may trap. */
|
||||
rn = lookup_stmt_eh_region (stmt);
|
||||
if (rn >= 0)
|
||||
{
|
||||
rtx insn;
|
||||
for (insn = next_real_insn (last); insn;
|
||||
insn = next_real_insn (insn))
|
||||
{
|
||||
if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
|
||||
/* If we want exceptions for non-call insns, any
|
||||
may_trap_p instruction may throw. */
|
||||
&& GET_CODE (PATTERN (insn)) != CLOBBER
|
||||
&& GET_CODE (PATTERN (insn)) != USE
|
||||
&& (CALL_P (insn)
|
||||
|| (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))))
|
||||
add_reg_note (insn, REG_EH_REGION, GEN_INT (rn));
|
||||
}
|
||||
}
|
||||
|
||||
return last;
|
||||
}
|
||||
|
||||
/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
|
||||
that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
|
||||
generated a tail call (something that might be denied by the ABI
|
||||
|
@ -2062,13 +2032,8 @@ expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
|
|||
edge_iterator ei;
|
||||
int probability;
|
||||
gcov_type count;
|
||||
tree stmt_tree = gimple_to_tree (stmt);
|
||||
|
||||
last2 = last = get_last_insn ();
|
||||
|
||||
expand_expr_stmt (stmt_tree);
|
||||
|
||||
release_stmt_tree (stmt, stmt_tree);
|
||||
last2 = last = expand_gimple_stmt (stmt);
|
||||
|
||||
for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
|
||||
if (CALL_P (last) && SIBLING_CALL_P (last))
|
||||
|
@ -3018,9 +2983,7 @@ expand_gimple_basic_block (basic_block bb)
|
|||
|
||||
if (stmt)
|
||||
{
|
||||
tree stmt_tree = gimple_to_tree (stmt);
|
||||
expand_expr_stmt (stmt_tree);
|
||||
release_stmt_tree (stmt, stmt_tree);
|
||||
expand_gimple_stmt (stmt);
|
||||
gsi_next (&gsi);
|
||||
}
|
||||
|
||||
|
@ -3126,7 +3089,6 @@ expand_gimple_basic_block (basic_block bb)
|
|||
else
|
||||
{
|
||||
def_operand_p def_p;
|
||||
tree stmt_tree;
|
||||
def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
|
||||
|
||||
if (def_p != NULL)
|
||||
|
@ -3138,11 +3100,8 @@ expand_gimple_basic_block (basic_block bb)
|
|||
SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
|
||||
continue;
|
||||
}
|
||||
stmt_tree = gimple_to_tree (stmt);
|
||||
last = get_last_insn ();
|
||||
expand_expr_stmt (stmt_tree);
|
||||
last = expand_gimple_stmt (stmt);
|
||||
maybe_dump_rtl_for_gimple_stmt (stmt, last);
|
||||
release_stmt_tree (stmt, stmt_tree);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -437,9 +437,9 @@ set_eh_region_tree_label (struct eh_region_d *region, tree lab)
|
|||
}
|
||||
|
||||
void
|
||||
expand_resx_expr (tree exp)
|
||||
expand_resx_stmt (gimple stmt)
|
||||
{
|
||||
int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
|
||||
int region_nr = gimple_resx_region (stmt);
|
||||
rtx insn;
|
||||
struct eh_region_d *reg = VEC_index (eh_region,
|
||||
cfun->eh->region_array, region_nr);
|
||||
|
|
|
@ -198,7 +198,7 @@ extern void foreach_reachable_handler (int, bool, bool,
|
|||
void *);
|
||||
|
||||
extern void collect_eh_region_array (void);
|
||||
extern void expand_resx_expr (tree);
|
||||
extern void expand_resx_stmt (gimple);
|
||||
extern void verify_eh_tree (struct function *);
|
||||
extern void dump_eh_tree (FILE *, struct function *);
|
||||
void debug_eh_tree (struct function *);
|
||||
|
|
67
gcc/expr.c
67
gcc/expr.c
|
@ -4435,7 +4435,7 @@ expand_assignment (tree to, tree from, bool nontemporal)
|
|||
/* Emits nontemporal store insn that moves FROM to TO. Returns true if this
|
||||
succeeded, false otherwise. */
|
||||
|
||||
static bool
|
||||
bool
|
||||
emit_storent_insn (rtx to, rtx from)
|
||||
{
|
||||
enum machine_mode mode = GET_MODE (to), imode;
|
||||
|
@ -4485,7 +4485,6 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
|
|||
{
|
||||
rtx temp;
|
||||
rtx alt_rtl = NULL_RTX;
|
||||
int dont_return_target = 0;
|
||||
location_t loc = EXPR_LOCATION (exp);
|
||||
|
||||
if (VOID_TYPE_P (TREE_TYPE (exp)))
|
||||
|
@ -4646,19 +4645,6 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
|
|||
(call_param_p
|
||||
? EXPAND_STACK_PARM : EXPAND_NORMAL),
|
||||
&alt_rtl);
|
||||
/* Return TARGET if it's a specified hardware register.
|
||||
If TARGET is a volatile mem ref, either return TARGET
|
||||
or return a reg copied *from* TARGET; ANSI requires this.
|
||||
|
||||
Otherwise, if TEMP is not TARGET, return TEMP
|
||||
if it is constant (for efficiency),
|
||||
or if we really want the correct value. */
|
||||
if (!(target && REG_P (target)
|
||||
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
|
||||
&& !(MEM_P (target) && MEM_VOLATILE_P (target))
|
||||
&& ! rtx_equal_p (temp, target)
|
||||
&& CONSTANT_P (temp))
|
||||
dont_return_target = 1;
|
||||
}
|
||||
|
||||
/* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
|
||||
|
@ -4707,15 +4693,7 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
|
|||
&& GET_MODE (temp) != VOIDmode)
|
||||
{
|
||||
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
|
||||
if (dont_return_target)
|
||||
{
|
||||
/* In this case, we will return TEMP,
|
||||
so make sure it has the proper mode.
|
||||
But don't forget to store the value into TARGET. */
|
||||
temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
|
||||
emit_move_insn (target, temp);
|
||||
}
|
||||
else if (GET_MODE (target) == BLKmode
|
||||
if (GET_MODE (target) == BLKmode
|
||||
|| GET_MODE (temp) == BLKmode)
|
||||
emit_block_move (target, temp, expr_size (exp),
|
||||
(call_param_p
|
||||
|
@ -7128,9 +7106,6 @@ expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
|
|||
COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
|
||||
recursively. */
|
||||
|
||||
static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
|
||||
enum expand_modifier, rtx *);
|
||||
|
||||
rtx
|
||||
expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
|
||||
enum expand_modifier modifier, rtx *alt_rtl)
|
||||
|
@ -7203,7 +7178,7 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
|
|||
return ret;
|
||||
}
|
||||
|
||||
static rtx
|
||||
rtx
|
||||
expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
|
||||
enum expand_modifier modifier)
|
||||
{
|
||||
|
@ -8251,7 +8226,7 @@ expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
|
|||
}
|
||||
#undef REDUCE_BIT_FIELD
|
||||
|
||||
static rtx
|
||||
rtx
|
||||
expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
||||
enum expand_modifier modifier, rtx *alt_rtl)
|
||||
{
|
||||
|
@ -8611,12 +8586,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
return ret;
|
||||
}
|
||||
|
||||
case GOTO_EXPR:
|
||||
if (TREE_CODE (treeop0) == LABEL_DECL)
|
||||
expand_goto (treeop0);
|
||||
else
|
||||
expand_computed_goto (treeop0);
|
||||
return const0_rtx;
|
||||
|
||||
case CONSTRUCTOR:
|
||||
/* If we don't need the result, just ensure we evaluate any
|
||||
|
@ -9505,13 +9474,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
return const0_rtx;
|
||||
}
|
||||
|
||||
case RETURN_EXPR:
|
||||
if (!treeop0)
|
||||
expand_null_return ();
|
||||
else
|
||||
expand_return (treeop0);
|
||||
return const0_rtx;
|
||||
|
||||
case ADDR_EXPR:
|
||||
return expand_expr_addr_expr (exp, target, tmode, modifier);
|
||||
|
||||
|
@ -9523,9 +9485,14 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
op0 = expand_normal (treeop0);
|
||||
return read_complex_part (op0, true);
|
||||
|
||||
case RETURN_EXPR:
|
||||
case LABEL_EXPR:
|
||||
case GOTO_EXPR:
|
||||
case SWITCH_EXPR:
|
||||
case ASM_EXPR:
|
||||
case RESX_EXPR:
|
||||
expand_resx_expr (exp);
|
||||
return const0_rtx;
|
||||
/* Expanded in cfgexpand.c. */
|
||||
gcc_unreachable ();
|
||||
|
||||
case TRY_CATCH_EXPR:
|
||||
case CATCH_EXPR:
|
||||
|
@ -9563,18 +9530,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
initialization constants, and should not be expanded. */
|
||||
gcc_unreachable ();
|
||||
|
||||
case SWITCH_EXPR:
|
||||
expand_case (exp);
|
||||
return const0_rtx;
|
||||
|
||||
case LABEL_EXPR:
|
||||
expand_label (treeop0);
|
||||
return const0_rtx;
|
||||
|
||||
case ASM_EXPR:
|
||||
expand_asm_expr (exp);
|
||||
return const0_rtx;
|
||||
|
||||
case WITH_SIZE_EXPR:
|
||||
/* WITH_SIZE_EXPR expands to its first argument. The caller should
|
||||
have pulled out the size to use in whatever context it needed. */
|
||||
|
|
|
@ -426,6 +426,7 @@ extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods);
|
|||
extern rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
|
||||
extern rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods,
|
||||
unsigned int, HOST_WIDE_INT);
|
||||
extern bool emit_storent_insn (rtx to, rtx from);
|
||||
|
||||
/* Copy all or part of a value X into registers starting at REGNO.
|
||||
The number of registers to be filled is NREGS. */
|
||||
|
@ -539,9 +540,13 @@ extern rtx store_expr (tree, rtx, int, bool);
|
|||
Useful after calling expand_expr with 1 as sum_ok. */
|
||||
extern rtx force_operand (rtx, rtx);
|
||||
|
||||
/* Work horse for expand_expr. */
|
||||
/* Work horses for expand_expr. */
|
||||
extern rtx expand_expr_real (tree, rtx, enum machine_mode,
|
||||
enum expand_modifier, rtx *);
|
||||
extern rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
|
||||
enum expand_modifier, rtx *);
|
||||
extern rtx expand_expr_real_2 (sepops, rtx, enum machine_mode,
|
||||
enum expand_modifier);
|
||||
|
||||
/* Generate code for computing expression EXP.
|
||||
An rtx for the computed value is returned. The value is never null.
|
||||
|
|
90
gcc/stmt.c
90
gcc/stmt.c
|
@ -48,6 +48,7 @@ along with GCC; see the file COPYING3. If not see
|
|||
#include "predict.h"
|
||||
#include "optabs.h"
|
||||
#include "target.h"
|
||||
#include "gimple.h"
|
||||
#include "regs.h"
|
||||
#include "alloc-pool.h"
|
||||
#include "pretty-print.h"
|
||||
|
@ -1075,20 +1076,65 @@ expand_asm_operands (tree string, tree outputs, tree inputs,
|
|||
}
|
||||
|
||||
void
|
||||
expand_asm_expr (tree exp)
|
||||
expand_asm_stmt (gimple stmt)
|
||||
{
|
||||
int noutputs, i;
|
||||
tree outputs, tail;
|
||||
int noutputs;
|
||||
tree outputs, tail, t;
|
||||
tree *o;
|
||||
size_t i, n;
|
||||
const char *s;
|
||||
tree str, out, in, cl;
|
||||
|
||||
if (ASM_INPUT_P (exp))
|
||||
/* Meh... convert the gimple asm operands into real tree lists.
|
||||
Eventually we should make all routines work on the vectors instead
|
||||
of relying on TREE_CHAIN. */
|
||||
out = NULL_TREE;
|
||||
n = gimple_asm_noutputs (stmt);
|
||||
if (n > 0)
|
||||
{
|
||||
expand_asm_loc (ASM_STRING (exp), ASM_VOLATILE_P (exp), input_location);
|
||||
t = out = gimple_asm_output_op (stmt, 0);
|
||||
for (i = 1; i < n; i++)
|
||||
{
|
||||
TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
|
||||
t = gimple_asm_output_op (stmt, i);
|
||||
}
|
||||
}
|
||||
|
||||
in = NULL_TREE;
|
||||
n = gimple_asm_ninputs (stmt);
|
||||
if (n > 0)
|
||||
{
|
||||
t = in = gimple_asm_input_op (stmt, 0);
|
||||
for (i = 1; i < n; i++)
|
||||
{
|
||||
TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
|
||||
t = gimple_asm_input_op (stmt, i);
|
||||
}
|
||||
}
|
||||
|
||||
cl = NULL_TREE;
|
||||
n = gimple_asm_nclobbers (stmt);
|
||||
if (n > 0)
|
||||
{
|
||||
t = cl = gimple_asm_clobber_op (stmt, 0);
|
||||
for (i = 1; i < n; i++)
|
||||
{
|
||||
TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
|
||||
t = gimple_asm_clobber_op (stmt, i);
|
||||
}
|
||||
}
|
||||
|
||||
s = gimple_asm_string (stmt);
|
||||
str = build_string (strlen (s), s);
|
||||
|
||||
if (gimple_asm_input_p (stmt))
|
||||
{
|
||||
expand_asm_loc (str, gimple_asm_volatile_p (stmt), input_location);
|
||||
return;
|
||||
}
|
||||
|
||||
outputs = ASM_OUTPUTS (exp);
|
||||
noutputs = list_length (outputs);
|
||||
outputs = out;
|
||||
noutputs = gimple_asm_noutputs (stmt);
|
||||
/* o[I] is the place that output number I should be written. */
|
||||
o = (tree *) alloca (noutputs * sizeof (tree));
|
||||
|
||||
|
@ -1098,8 +1144,7 @@ expand_asm_expr (tree exp)
|
|||
|
||||
/* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
|
||||
OUTPUTS some trees for where the values were actually stored. */
|
||||
expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
|
||||
ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
|
||||
expand_asm_operands (str, outputs, in, cl, gimple_asm_volatile_p (stmt),
|
||||
input_location);
|
||||
|
||||
/* Copy all the intermediate outputs into the specified outputs. */
|
||||
|
@ -2154,7 +2199,7 @@ emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
|
|||
Generate the code to test it and jump to the right place. */
|
||||
|
||||
void
|
||||
expand_case (tree exp)
|
||||
expand_case (gimple stmt)
|
||||
{
|
||||
tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
|
||||
rtx default_label = 0;
|
||||
|
@ -2167,9 +2212,7 @@ expand_case (tree exp)
|
|||
int i;
|
||||
rtx before_case, end, lab;
|
||||
|
||||
tree vec = SWITCH_LABELS (exp);
|
||||
tree orig_type = TREE_TYPE (exp);
|
||||
tree index_expr = SWITCH_COND (exp);
|
||||
tree index_expr = gimple_switch_index (stmt);
|
||||
tree index_type = TREE_TYPE (index_expr);
|
||||
int unsignedp = TYPE_UNSIGNED (index_type);
|
||||
|
||||
|
@ -2188,11 +2231,6 @@ expand_case (tree exp)
|
|||
sizeof (struct case_node),
|
||||
100);
|
||||
|
||||
/* The switch body is lowered in gimplify.c, we should never have
|
||||
switches with a non-NULL SWITCH_BODY here. */
|
||||
gcc_assert (!SWITCH_BODY (exp));
|
||||
gcc_assert (SWITCH_LABELS (exp));
|
||||
|
||||
do_pending_stack_adjust ();
|
||||
|
||||
/* An ERROR_MARK occurs for various reasons including invalid data type. */
|
||||
|
@ -2200,24 +2238,24 @@ expand_case (tree exp)
|
|||
{
|
||||
tree elt;
|
||||
bitmap label_bitmap;
|
||||
int vl = TREE_VEC_LENGTH (vec);
|
||||
int stopi = 0;
|
||||
|
||||
/* cleanup_tree_cfg removes all SWITCH_EXPR with their index
|
||||
expressions being INTEGER_CST. */
|
||||
gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
|
||||
|
||||
/* The default case, if ever taken, is at the end of TREE_VEC. */
|
||||
elt = TREE_VEC_ELT (vec, vl - 1);
|
||||
/* The default case, if ever taken, is the first element. */
|
||||
elt = gimple_switch_label (stmt, 0);
|
||||
if (!CASE_LOW (elt) && !CASE_HIGH (elt))
|
||||
{
|
||||
default_label_decl = CASE_LABEL (elt);
|
||||
--vl;
|
||||
stopi = 1;
|
||||
}
|
||||
|
||||
for (i = vl - 1; i >= 0; --i)
|
||||
for (i = gimple_switch_num_labels (stmt) - 1; i >= stopi; --i)
|
||||
{
|
||||
tree low, high;
|
||||
elt = TREE_VEC_ELT (vec, i);
|
||||
elt = gimple_switch_label (stmt, i);
|
||||
|
||||
low = CASE_LOW (elt);
|
||||
gcc_assert (low);
|
||||
|
@ -2371,9 +2409,7 @@ expand_case (tree exp)
|
|||
decision tree an unconditional jump to the
|
||||
default code is emitted. */
|
||||
|
||||
use_cost_table
|
||||
= (TREE_CODE (orig_type) != ENUMERAL_TYPE
|
||||
&& estimate_case_costs (case_list));
|
||||
use_cost_table = estimate_case_costs (case_list);
|
||||
balance_case_nodes (&case_list, NULL);
|
||||
emit_case_nodes (index, case_list, default_label, index_type);
|
||||
if (default_label)
|
||||
|
|
|
@ -5054,9 +5054,9 @@ extern bool parse_output_constraint (const char **, int, int, int,
|
|||
bool *, bool *, bool *);
|
||||
extern bool parse_input_constraint (const char **, int, int, int, int,
|
||||
const char * const *, bool *, bool *);
|
||||
extern void expand_asm_expr (tree);
|
||||
extern void expand_asm_stmt (gimple);
|
||||
extern tree resolve_asm_operand_names (tree, tree, tree);
|
||||
extern void expand_case (tree);
|
||||
extern void expand_case (gimple);
|
||||
extern void expand_decl (tree);
|
||||
#ifdef HARD_CONST
|
||||
/* Silly ifdef to avoid having all includers depend on hard-reg-set.h. */
|
||||
|
|
Loading…
Add table
Reference in a new issue