basic-block.h: Remove the prototypes for can_hoist_insn_p...
* basic-block.h: Remove the prototypes for can_hoist_insn_p, hoist_insn_after, and hoist_insn_to_edge. * rtl.h: Remove the prototypes for reg_referenced_between_p, no_jumps_between_p, and insn_dependent_p. * rtlanal.c (no_jumps_between_p, reg_referenced_between_p, insn_dependent_p, insn_dependent_p_1, hoist_test_store, can_hoist_insn_p, hoist_update_store, hoist_insn_after, hoist_insn_to_edge): Remove. From-SVN: r88860
This commit is contained in:
parent
382c6e2df8
commit
9b1f72c413
4 changed files with 11 additions and 332 deletions
|
@ -1,3 +1,14 @@
|
|||
2004-10-10 Kazu Hirata <kazu@cs.umass.edu>
|
||||
|
||||
* basic-block.h: Remove the prototypes for can_hoist_insn_p,
|
||||
hoist_insn_after, and hoist_insn_to_edge.
|
||||
* rtl.h: Remove the prototypes for reg_referenced_between_p,
|
||||
no_jumps_between_p, and insn_dependent_p.
|
||||
* rtlanal.c (no_jumps_between_p, reg_referenced_between_p,
|
||||
insn_dependent_p, insn_dependent_p_1, hoist_test_store,
|
||||
can_hoist_insn_p, hoist_update_store, hoist_insn_after,
|
||||
hoist_insn_to_edge): Remove.
|
||||
|
||||
2004-10-10 Joseph S. Myers <jsm@polyomino.org.uk>
|
||||
|
||||
PR c/17881
|
||||
|
|
|
@ -815,9 +815,6 @@ extern bool mark_dfs_back_edges (void);
|
|||
extern void set_edge_can_fallthru_flag (void);
|
||||
extern void update_br_prob_note (basic_block);
|
||||
extern void fixup_abnormal_edges (void);
|
||||
extern bool can_hoist_insn_p (rtx, rtx, regset);
|
||||
extern rtx hoist_insn_after (rtx, rtx, rtx, rtx);
|
||||
extern rtx hoist_insn_to_edge (rtx, edge, rtx, rtx);
|
||||
extern bool inside_basic_block_p (rtx);
|
||||
extern bool control_flow_insn_p (rtx);
|
||||
|
||||
|
|
|
@ -1607,16 +1607,13 @@ extern int reg_mentioned_p (rtx, rtx);
|
|||
extern int count_occurrences (rtx, rtx, int);
|
||||
extern int reg_referenced_p (rtx, rtx);
|
||||
extern int reg_used_between_p (rtx, rtx, rtx);
|
||||
extern int reg_referenced_between_p (rtx, rtx, rtx);
|
||||
extern int reg_set_between_p (rtx, rtx, rtx);
|
||||
extern int regs_set_between_p (rtx, rtx, rtx);
|
||||
extern int commutative_operand_precedence (rtx);
|
||||
extern int swap_commutative_operands_p (rtx, rtx);
|
||||
extern int modified_between_p (rtx, rtx, rtx);
|
||||
extern int no_labels_between_p (rtx, rtx);
|
||||
extern int no_jumps_between_p (rtx, rtx);
|
||||
extern int modified_in_p (rtx, rtx);
|
||||
extern int insn_dependent_p (rtx, rtx);
|
||||
extern int reg_set_p (rtx, rtx);
|
||||
extern rtx single_set_2 (rtx, rtx);
|
||||
extern int multiple_sets (rtx);
|
||||
|
|
326
gcc/rtlanal.c
326
gcc/rtlanal.c
|
@ -41,12 +41,9 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
|||
/* Forward declarations */
|
||||
static int global_reg_mentioned_p_1 (rtx *, void *);
|
||||
static void set_of_1 (rtx, rtx, void *);
|
||||
static void insn_dependent_p_1 (rtx, rtx, void *);
|
||||
static int rtx_referenced_p_1 (rtx *, void *);
|
||||
static int computed_jump_p_1 (rtx);
|
||||
static void parms_set (rtx, rtx, void *);
|
||||
static bool hoist_test_store (rtx, rtx, regset);
|
||||
static void hoist_update_store (rtx, rtx *, rtx, rtx);
|
||||
|
||||
static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
|
||||
rtx, enum machine_mode,
|
||||
|
@ -650,19 +647,6 @@ no_labels_between_p (rtx beg, rtx end)
|
|||
return 1;
|
||||
}
|
||||
|
||||
/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
|
||||
no JUMP_INSN insn. */
|
||||
|
||||
int
|
||||
no_jumps_between_p (rtx beg, rtx end)
|
||||
{
|
||||
rtx p;
|
||||
for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
|
||||
if (JUMP_P (p))
|
||||
return 0;
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* Nonzero if register REG is used in an insn between
|
||||
FROM_INSN and TO_INSN (exclusive of those two). */
|
||||
|
||||
|
@ -760,27 +744,6 @@ reg_referenced_p (rtx x, rtx body)
|
|||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/* Nonzero if register REG is referenced in an insn between
|
||||
FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
|
||||
not count. */
|
||||
|
||||
int
|
||||
reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
if (from_insn == to_insn)
|
||||
return 0;
|
||||
|
||||
for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
|
||||
if (INSN_P (insn)
|
||||
&& (reg_referenced_p (reg, PATTERN (insn))
|
||||
|| (CALL_P (insn)
|
||||
&& find_reg_fusage (insn, USE, reg))))
|
||||
return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Nonzero if register REG is set or clobbered in an insn between
|
||||
FROM_INSN and TO_INSN (exclusive of those two). */
|
||||
|
@ -982,41 +945,6 @@ modified_in_p (rtx x, rtx insn)
|
|||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Return true if anything in insn X is (anti,output,true) dependent on
|
||||
anything in insn Y. */
|
||||
|
||||
int
|
||||
insn_dependent_p (rtx x, rtx y)
|
||||
{
|
||||
rtx tmp;
|
||||
|
||||
gcc_assert (INSN_P (x));
|
||||
gcc_assert (INSN_P (y));
|
||||
|
||||
tmp = PATTERN (y);
|
||||
note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
|
||||
if (tmp == NULL_RTX)
|
||||
return 1;
|
||||
|
||||
tmp = PATTERN (x);
|
||||
note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
|
||||
if (tmp == NULL_RTX)
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* A helper routine for insn_dependent_p called through note_stores. */
|
||||
|
||||
static void
|
||||
insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
|
||||
{
|
||||
rtx * pinsn = (rtx *) data;
|
||||
|
||||
if (*pinsn && reg_mentioned_p (x, *pinsn))
|
||||
*pinsn = NULL_RTX;
|
||||
}
|
||||
|
||||
/* Helper function for set_of. */
|
||||
struct set_of_data
|
||||
|
@ -3303,260 +3231,6 @@ keep_with_call_p (rtx insn)
|
|||
return false;
|
||||
}
|
||||
|
||||
/* Return true when store to register X can be hoisted to the place
|
||||
with LIVE registers (can be NULL). Value VAL contains destination
|
||||
whose value will be used. */
|
||||
|
||||
static bool
|
||||
hoist_test_store (rtx x, rtx val, regset live)
|
||||
{
|
||||
if (GET_CODE (x) == SCRATCH)
|
||||
return true;
|
||||
|
||||
if (rtx_equal_p (x, val))
|
||||
return true;
|
||||
|
||||
/* Allow subreg of X in case it is not writing just part of multireg pseudo.
|
||||
Then we would need to update all users to care hoisting the store too.
|
||||
Caller may represent that by specifying whole subreg as val. */
|
||||
|
||||
if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
|
||||
{
|
||||
if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
|
||||
&& GET_MODE_BITSIZE (GET_MODE (x)) <
|
||||
GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
if (GET_CODE (x) == SUBREG)
|
||||
x = SUBREG_REG (x);
|
||||
|
||||
/* Anything except register store is not hoistable. This includes the
|
||||
partial stores to registers. */
|
||||
|
||||
if (!REG_P (x))
|
||||
return false;
|
||||
|
||||
/* Pseudo registers can be always replaced by another pseudo to avoid
|
||||
the side effect, for hard register we must ensure that they are dead.
|
||||
Eventually we may want to add code to try turn pseudos to hards, but it
|
||||
is unlikely useful. */
|
||||
|
||||
if (REGNO (x) < FIRST_PSEUDO_REGISTER)
|
||||
{
|
||||
int regno = REGNO (x);
|
||||
int n = hard_regno_nregs[regno][GET_MODE (x)];
|
||||
|
||||
if (!live)
|
||||
return false;
|
||||
if (REGNO_REG_SET_P (live, regno))
|
||||
return false;
|
||||
while (--n > 0)
|
||||
if (REGNO_REG_SET_P (live, regno + n))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/* Return true if INSN can be hoisted to place with LIVE hard registers
|
||||
(LIVE can be NULL when unknown). VAL is expected to be stored by the insn
|
||||
and used by the hoisting pass. */
|
||||
|
||||
bool
|
||||
can_hoist_insn_p (rtx insn, rtx val, regset live)
|
||||
{
|
||||
rtx pat = PATTERN (insn);
|
||||
int i;
|
||||
|
||||
/* It probably does not worth the complexity to handle multiple
|
||||
set stores. */
|
||||
if (!single_set (insn))
|
||||
return false;
|
||||
/* We can move CALL_INSN, but we need to check that all caller clobbered
|
||||
regs are dead. */
|
||||
if (CALL_P (insn))
|
||||
return false;
|
||||
/* In future we will handle hoisting of libcall sequences, but
|
||||
give up for now. */
|
||||
if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
|
||||
return false;
|
||||
switch (GET_CODE (pat))
|
||||
{
|
||||
case SET:
|
||||
if (!hoist_test_store (SET_DEST (pat), val, live))
|
||||
return false;
|
||||
break;
|
||||
case USE:
|
||||
/* USES do have sick semantics, so do not move them. */
|
||||
return false;
|
||||
break;
|
||||
case CLOBBER:
|
||||
if (!hoist_test_store (XEXP (pat, 0), val, live))
|
||||
return false;
|
||||
break;
|
||||
case PARALLEL:
|
||||
for (i = 0; i < XVECLEN (pat, 0); i++)
|
||||
{
|
||||
rtx x = XVECEXP (pat, 0, i);
|
||||
switch (GET_CODE (x))
|
||||
{
|
||||
case SET:
|
||||
if (!hoist_test_store (SET_DEST (x), val, live))
|
||||
return false;
|
||||
break;
|
||||
case USE:
|
||||
/* We need to fix callers to really ensure availability
|
||||
of all values insn uses, but for now it is safe to prohibit
|
||||
hoisting of any insn having such a hidden uses. */
|
||||
return false;
|
||||
break;
|
||||
case CLOBBER:
|
||||
if (!hoist_test_store (SET_DEST (x), val, live))
|
||||
return false;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Update store after hoisting - replace all stores to pseudo registers
|
||||
by new ones to avoid clobbering of values except for store to VAL that will
|
||||
be updated to NEW. */
|
||||
|
||||
static void
|
||||
hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
|
||||
{
|
||||
rtx x = *xp;
|
||||
|
||||
if (GET_CODE (x) == SCRATCH)
|
||||
return;
|
||||
|
||||
if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
|
||||
validate_change (insn, xp,
|
||||
simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
|
||||
SUBREG_BYTE (x)), 1);
|
||||
if (rtx_equal_p (x, val))
|
||||
{
|
||||
validate_change (insn, xp, new, 1);
|
||||
return;
|
||||
}
|
||||
if (GET_CODE (x) == SUBREG)
|
||||
{
|
||||
xp = &SUBREG_REG (x);
|
||||
x = *xp;
|
||||
}
|
||||
|
||||
gcc_assert (REG_P (x));
|
||||
|
||||
/* We've verified that hard registers are dead, so we may keep the side
|
||||
effect. Otherwise replace it by new pseudo. */
|
||||
if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
|
||||
validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
|
||||
REG_NOTES (insn)
|
||||
= alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
|
||||
}
|
||||
|
||||
/* Create a copy of INSN after AFTER replacing store of VAL to NEW
|
||||
and each other side effect to pseudo register by new pseudo register. */
|
||||
|
||||
rtx
|
||||
hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
|
||||
{
|
||||
rtx pat;
|
||||
int i;
|
||||
rtx note;
|
||||
int applied;
|
||||
|
||||
insn = emit_copy_of_insn_after (insn, after);
|
||||
pat = PATTERN (insn);
|
||||
|
||||
/* Remove REG_UNUSED notes as we will re-emit them. */
|
||||
while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
|
||||
remove_note (insn, note);
|
||||
|
||||
/* To get this working callers must ensure to move everything referenced
|
||||
by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
|
||||
easier. */
|
||||
while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
|
||||
remove_note (insn, note);
|
||||
while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
|
||||
remove_note (insn, note);
|
||||
|
||||
/* Remove REG_DEAD notes as they might not be valid anymore in case
|
||||
we create redundancy. */
|
||||
while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
|
||||
remove_note (insn, note);
|
||||
switch (GET_CODE (pat))
|
||||
{
|
||||
case SET:
|
||||
hoist_update_store (insn, &SET_DEST (pat), val, new);
|
||||
break;
|
||||
case USE:
|
||||
break;
|
||||
case CLOBBER:
|
||||
hoist_update_store (insn, &XEXP (pat, 0), val, new);
|
||||
break;
|
||||
case PARALLEL:
|
||||
for (i = 0; i < XVECLEN (pat, 0); i++)
|
||||
{
|
||||
rtx x = XVECEXP (pat, 0, i);
|
||||
switch (GET_CODE (x))
|
||||
{
|
||||
case SET:
|
||||
hoist_update_store (insn, &SET_DEST (x), val, new);
|
||||
break;
|
||||
case USE:
|
||||
break;
|
||||
case CLOBBER:
|
||||
hoist_update_store (insn, &SET_DEST (x), val, new);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
}
|
||||
applied = apply_change_group ();
|
||||
gcc_assert (applied);
|
||||
|
||||
return insn;
|
||||
}
|
||||
|
||||
rtx
|
||||
hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
|
||||
{
|
||||
rtx new_insn;
|
||||
|
||||
/* We cannot insert instructions on an abnormal critical edge.
|
||||
It will be easier to find the culprit if we die now. */
|
||||
gcc_assert (!(e->flags & EDGE_ABNORMAL) || !EDGE_CRITICAL_P (e));
|
||||
|
||||
/* Do not use emit_insn_on_edge as we want to preserve notes and similar
|
||||
stuff. We also emit CALL_INSNS and firends. */
|
||||
if (e->insns.r == NULL_RTX)
|
||||
{
|
||||
start_sequence ();
|
||||
emit_note (NOTE_INSN_DELETED);
|
||||
}
|
||||
else
|
||||
push_to_sequence (e->insns.r);
|
||||
|
||||
new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
|
||||
|
||||
e->insns.r = get_insns ();
|
||||
end_sequence ();
|
||||
return new_insn;
|
||||
}
|
||||
|
||||
/* Return true if LABEL is a target of JUMP_INSN. This applies only
|
||||
to non-complex jumps. That is, direct unconditional, conditional,
|
||||
and tablejumps, but not computed jumps or returns. It also does
|
||||
|
|
Loading…
Add table
Reference in a new issue