bt-load.c (add_btr_def, [...]): Avoid C++ keywords.
* bt-load.c (add_btr_def, migrate_btr_def, branch_target_load_optimize): Avoid C++ keywords. * caller-save.c (insert_restore, insert_save, insert_one_insn): Likewise. * combine.c (subst, simplify_set, make_extraction, make_compound_operation, known_cond, simplify_shift_const_1): Likewise. * cse.c (make_regs_eqv, merge_equiv_classes, validate_canon_reg, fold_rtx, equiv_constant, cse_insn, cse_process_notes_1): Likewise. From-SVN: r137847
This commit is contained in:
parent
ac7ee6adbe
commit
32e9fa4804
5 changed files with 243 additions and 231 deletions
|
@ -1,3 +1,16 @@
|
|||
2008-07-15 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
|
||||
|
||||
* bt-load.c (add_btr_def, migrate_btr_def,
|
||||
branch_target_load_optimize): Avoid C++ keywords.
|
||||
* caller-save.c (insert_restore, insert_save, insert_one_insn):
|
||||
Likewise.
|
||||
* combine.c (subst, simplify_set, make_extraction,
|
||||
make_compound_operation, known_cond, simplify_shift_const_1):
|
||||
Likewise.
|
||||
* cse.c (make_regs_eqv, merge_equiv_classes, validate_canon_reg,
|
||||
fold_rtx, equiv_constant, cse_insn, cse_process_notes_1):
|
||||
Likewise.
|
||||
|
||||
2008-07-15 Richard Guenther <rguenther@suse.de>
|
||||
|
||||
PR middle-end/36369
|
||||
|
|
|
@ -301,31 +301,30 @@ add_btr_def (fibheap_t all_btr_defs, basic_block bb, int insn_luid, rtx insn,
|
|||
unsigned int dest_reg, int other_btr_uses_before_def,
|
||||
btr_def_group *all_btr_def_groups)
|
||||
{
|
||||
btr_def this
|
||||
= XOBNEW (&migrate_btrl_obstack, struct btr_def_s);
|
||||
this->bb = bb;
|
||||
this->luid = insn_luid;
|
||||
this->insn = insn;
|
||||
this->btr = dest_reg;
|
||||
this->cost = basic_block_freq (bb);
|
||||
this->has_ambiguous_use = 0;
|
||||
this->other_btr_uses_before_def = other_btr_uses_before_def;
|
||||
this->other_btr_uses_after_use = 0;
|
||||
this->next_this_bb = NULL;
|
||||
this->next_this_group = NULL;
|
||||
this->uses = NULL;
|
||||
this->live_range = NULL;
|
||||
find_btr_def_group (all_btr_def_groups, this);
|
||||
btr_def this_def = XOBNEW (&migrate_btrl_obstack, struct btr_def_s);
|
||||
this_def->bb = bb;
|
||||
this_def->luid = insn_luid;
|
||||
this_def->insn = insn;
|
||||
this_def->btr = dest_reg;
|
||||
this_def->cost = basic_block_freq (bb);
|
||||
this_def->has_ambiguous_use = 0;
|
||||
this_def->other_btr_uses_before_def = other_btr_uses_before_def;
|
||||
this_def->other_btr_uses_after_use = 0;
|
||||
this_def->next_this_bb = NULL;
|
||||
this_def->next_this_group = NULL;
|
||||
this_def->uses = NULL;
|
||||
this_def->live_range = NULL;
|
||||
find_btr_def_group (all_btr_def_groups, this_def);
|
||||
|
||||
fibheap_insert (all_btr_defs, -this->cost, this);
|
||||
fibheap_insert (all_btr_defs, -this_def->cost, this_def);
|
||||
|
||||
if (dump_file)
|
||||
fprintf (dump_file,
|
||||
"Found target reg definition: sets %u { bb %d, insn %d }%s priority %d\n",
|
||||
dest_reg, bb->index, INSN_UID (insn), (this->group ? "" : ":not const"),
|
||||
this->cost);
|
||||
dest_reg, bb->index, INSN_UID (insn),
|
||||
(this_def->group ? "" : ":not const"), this_def->cost);
|
||||
|
||||
return this;
|
||||
return this_def;
|
||||
}
|
||||
|
||||
/* Create a new target register user structure, for a use in block BB,
|
||||
|
@ -1274,7 +1273,7 @@ migrate_btr_def (btr_def def, int min_cost)
|
|||
HARD_REG_SET btrs_live_in_range;
|
||||
int btr_used_near_def = 0;
|
||||
int def_basic_block_freq;
|
||||
basic_block try;
|
||||
basic_block attempt;
|
||||
int give_up = 0;
|
||||
int def_moved = 0;
|
||||
btr_user user;
|
||||
|
@ -1328,31 +1327,31 @@ migrate_btr_def (btr_def def, int min_cost)
|
|||
|
||||
def_basic_block_freq = basic_block_freq (def->bb);
|
||||
|
||||
for (try = get_immediate_dominator (CDI_DOMINATORS, def->bb);
|
||||
!give_up && try && try != ENTRY_BLOCK_PTR && def->cost >= min_cost;
|
||||
try = get_immediate_dominator (CDI_DOMINATORS, try))
|
||||
for (attempt = get_immediate_dominator (CDI_DOMINATORS, def->bb);
|
||||
!give_up && attempt && attempt != ENTRY_BLOCK_PTR && def->cost >= min_cost;
|
||||
attempt = get_immediate_dominator (CDI_DOMINATORS, attempt))
|
||||
{
|
||||
/* Try to move the instruction that sets the target register into
|
||||
basic block TRY. */
|
||||
int try_freq = basic_block_freq (try);
|
||||
basic block ATTEMPT. */
|
||||
int try_freq = basic_block_freq (attempt);
|
||||
edge_iterator ei;
|
||||
edge e;
|
||||
|
||||
/* If TRY has abnormal edges, skip it. */
|
||||
FOR_EACH_EDGE (e, ei, try->succs)
|
||||
/* If ATTEMPT has abnormal edges, skip it. */
|
||||
FOR_EACH_EDGE (e, ei, attempt->succs)
|
||||
if (e->flags & EDGE_COMPLEX)
|
||||
break;
|
||||
if (e)
|
||||
continue;
|
||||
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "trying block %d ...", try->index);
|
||||
fprintf (dump_file, "trying block %d ...", attempt->index);
|
||||
|
||||
if (try_freq < def_basic_block_freq
|
||||
|| (try_freq == def_basic_block_freq && btr_used_near_def))
|
||||
{
|
||||
int btr;
|
||||
augment_live_range (live_range, &btrs_live_in_range, def->bb, try,
|
||||
augment_live_range (live_range, &btrs_live_in_range, def->bb, attempt,
|
||||
flag_btr_bb_exclusive);
|
||||
if (dump_file)
|
||||
{
|
||||
|
@ -1363,7 +1362,7 @@ migrate_btr_def (btr_def def, int min_cost)
|
|||
btr = choose_btr (btrs_live_in_range);
|
||||
if (btr != -1)
|
||||
{
|
||||
move_btr_def (try, btr, def, live_range, &btrs_live_in_range);
|
||||
move_btr_def (attempt, btr, def, live_range, &btrs_live_in_range);
|
||||
bitmap_copy(live_range, def->live_range);
|
||||
btr_used_near_def = 0;
|
||||
def_moved = 1;
|
||||
|
@ -1459,8 +1458,8 @@ migrate_btr_defs (enum reg_class btr_class, int allow_callee_save)
|
|||
static void
|
||||
branch_target_load_optimize (bool after_prologue_epilogue_gen)
|
||||
{
|
||||
enum reg_class class = targetm.branch_target_register_class ();
|
||||
if (class != NO_REGS)
|
||||
enum reg_class klass = targetm.branch_target_register_class ();
|
||||
if (klass != NO_REGS)
|
||||
{
|
||||
/* Initialize issue_rate. */
|
||||
if (targetm.sched.issue_rate)
|
||||
|
@ -1482,7 +1481,7 @@ branch_target_load_optimize (bool after_prologue_epilogue_gen)
|
|||
|
||||
/* Dominator info is also needed for migrate_btr_def. */
|
||||
calculate_dominance_info (CDI_DOMINATORS);
|
||||
migrate_btr_defs (class,
|
||||
migrate_btr_defs (klass,
|
||||
(targetm.branch_target_register_callee_saved
|
||||
(after_prologue_epilogue_gen)));
|
||||
|
||||
|
|
|
@ -660,7 +660,7 @@ insert_restore (struct insn_chain *chain, int before_p, int regno,
|
|||
rtx pat = NULL_RTX;
|
||||
int code;
|
||||
unsigned int numregs = 0;
|
||||
struct insn_chain *new;
|
||||
struct insn_chain *new_chain;
|
||||
rtx mem;
|
||||
|
||||
/* A common failure mode if register status is not correct in the
|
||||
|
@ -713,13 +713,13 @@ insert_restore (struct insn_chain *chain, int before_p, int regno,
|
|||
gen_rtx_REG (GET_MODE (mem),
|
||||
regno), mem);
|
||||
code = reg_restore_code (regno, GET_MODE (mem));
|
||||
new = insert_one_insn (chain, before_p, code, pat);
|
||||
new_chain = insert_one_insn (chain, before_p, code, pat);
|
||||
|
||||
/* Clear status for all registers we restored. */
|
||||
for (k = 0; k < i; k++)
|
||||
{
|
||||
CLEAR_HARD_REG_BIT (hard_regs_saved, regno + k);
|
||||
SET_REGNO_REG_SET (&new->dead_or_set, regno + k);
|
||||
SET_REGNO_REG_SET (&new_chain->dead_or_set, regno + k);
|
||||
n_regs_saved--;
|
||||
}
|
||||
|
||||
|
@ -738,7 +738,7 @@ insert_save (struct insn_chain *chain, int before_p, int regno,
|
|||
rtx pat = NULL_RTX;
|
||||
int code;
|
||||
unsigned int numregs = 0;
|
||||
struct insn_chain *new;
|
||||
struct insn_chain *new_chain;
|
||||
rtx mem;
|
||||
|
||||
/* A common failure mode if register status is not correct in the
|
||||
|
@ -790,13 +790,13 @@ insert_save (struct insn_chain *chain, int before_p, int regno,
|
|||
gen_rtx_REG (GET_MODE (mem),
|
||||
regno));
|
||||
code = reg_save_code (regno, GET_MODE (mem));
|
||||
new = insert_one_insn (chain, before_p, code, pat);
|
||||
new_chain = insert_one_insn (chain, before_p, code, pat);
|
||||
|
||||
/* Set hard_regs_saved and dead_or_set for all the registers we saved. */
|
||||
for (k = 0; k < numregs; k++)
|
||||
{
|
||||
SET_HARD_REG_BIT (hard_regs_saved, regno + k);
|
||||
SET_REGNO_REG_SET (&new->dead_or_set, regno + k);
|
||||
SET_REGNO_REG_SET (&new_chain->dead_or_set, regno + k);
|
||||
n_regs_saved++;
|
||||
}
|
||||
|
||||
|
@ -809,7 +809,7 @@ static struct insn_chain *
|
|||
insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
|
||||
{
|
||||
rtx insn = chain->insn;
|
||||
struct insn_chain *new;
|
||||
struct insn_chain *new_chain;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
/* If INSN references CC0, put our insns in front of the insn that sets
|
||||
|
@ -824,23 +824,23 @@ insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
|
|||
chain = chain->prev, insn = chain->insn;
|
||||
#endif
|
||||
|
||||
new = new_insn_chain ();
|
||||
new_chain = new_insn_chain ();
|
||||
if (before_p)
|
||||
{
|
||||
rtx link;
|
||||
|
||||
new->prev = chain->prev;
|
||||
if (new->prev != 0)
|
||||
new->prev->next = new;
|
||||
new_chain->prev = chain->prev;
|
||||
if (new_chain->prev != 0)
|
||||
new_chain->prev->next = new_chain;
|
||||
else
|
||||
reload_insn_chain = new;
|
||||
reload_insn_chain = new_chain;
|
||||
|
||||
chain->prev = new;
|
||||
new->next = chain;
|
||||
new->insn = emit_insn_before (pat, insn);
|
||||
chain->prev = new_chain;
|
||||
new_chain->next = chain;
|
||||
new_chain->insn = emit_insn_before (pat, insn);
|
||||
/* ??? It would be nice if we could exclude the already / still saved
|
||||
registers from the live sets. */
|
||||
COPY_REG_SET (&new->live_throughout, &chain->live_throughout);
|
||||
COPY_REG_SET (&new_chain->live_throughout, &chain->live_throughout);
|
||||
/* Registers that die in CHAIN->INSN still live in the new insn. */
|
||||
for (link = REG_NOTES (chain->insn); link; link = XEXP (link, 1))
|
||||
{
|
||||
|
@ -857,7 +857,7 @@ insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
|
|||
continue;
|
||||
for (i = hard_regno_nregs[regno][GET_MODE (reg)] - 1;
|
||||
i >= 0; i--)
|
||||
SET_REGNO_REG_SET (&new->live_throughout, regno + i);
|
||||
SET_REGNO_REG_SET (&new_chain->live_throughout, regno + i);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -885,41 +885,41 @@ insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
|
|||
|
||||
for (i = hard_regno_nregs[regno][GET_MODE (reg)] - 1;
|
||||
i >= 0; i--)
|
||||
SET_REGNO_REG_SET (&new->live_throughout, regno + i);
|
||||
SET_REGNO_REG_SET (&new_chain->live_throughout, regno + i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
CLEAR_REG_SET (&new->dead_or_set);
|
||||
CLEAR_REG_SET (&new_chain->dead_or_set);
|
||||
if (chain->insn == BB_HEAD (BASIC_BLOCK (chain->block)))
|
||||
BB_HEAD (BASIC_BLOCK (chain->block)) = new->insn;
|
||||
BB_HEAD (BASIC_BLOCK (chain->block)) = new_chain->insn;
|
||||
}
|
||||
else
|
||||
{
|
||||
new->next = chain->next;
|
||||
if (new->next != 0)
|
||||
new->next->prev = new;
|
||||
chain->next = new;
|
||||
new->prev = chain;
|
||||
new->insn = emit_insn_after (pat, insn);
|
||||
new_chain->next = chain->next;
|
||||
if (new_chain->next != 0)
|
||||
new_chain->next->prev = new_chain;
|
||||
chain->next = new_chain;
|
||||
new_chain->prev = chain;
|
||||
new_chain->insn = emit_insn_after (pat, insn);
|
||||
/* ??? It would be nice if we could exclude the already / still saved
|
||||
registers from the live sets, and observe REG_UNUSED notes. */
|
||||
COPY_REG_SET (&new->live_throughout, &chain->live_throughout);
|
||||
COPY_REG_SET (&new_chain->live_throughout, &chain->live_throughout);
|
||||
/* Registers that are set in CHAIN->INSN live in the new insn.
|
||||
(Unless there is a REG_UNUSED note for them, but we don't
|
||||
look for them here.) */
|
||||
note_stores (PATTERN (chain->insn), add_stored_regs,
|
||||
&new->live_throughout);
|
||||
CLEAR_REG_SET (&new->dead_or_set);
|
||||
&new_chain->live_throughout);
|
||||
CLEAR_REG_SET (&new_chain->dead_or_set);
|
||||
if (chain->insn == BB_END (BASIC_BLOCK (chain->block)))
|
||||
BB_END (BASIC_BLOCK (chain->block)) = new->insn;
|
||||
BB_END (BASIC_BLOCK (chain->block)) = new_chain->insn;
|
||||
}
|
||||
new->block = chain->block;
|
||||
new->is_caller_save_insn = 1;
|
||||
new_chain->block = chain->block;
|
||||
new_chain->is_caller_save_insn = 1;
|
||||
|
||||
INSN_CODE (new->insn) = code;
|
||||
return new;
|
||||
INSN_CODE (new_chain->insn) = code;
|
||||
return new_chain;
|
||||
}
|
||||
#include "gt-caller-save.h"
|
||||
|
|
202
gcc/combine.c
202
gcc/combine.c
|
@ -4278,7 +4278,7 @@ subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
|
|||
enum machine_mode op0_mode = VOIDmode;
|
||||
const char *fmt;
|
||||
int len, i;
|
||||
rtx new;
|
||||
rtx new_rtx;
|
||||
|
||||
/* Two expressions are equal if they are identical copies of a shared
|
||||
RTX or if they are both registers with the same register number
|
||||
|
@ -4333,14 +4333,14 @@ subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
|
|||
&& GET_CODE (XVECEXP (x, 0, 0)) == SET
|
||||
&& GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
|
||||
{
|
||||
new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
|
||||
new_rtx = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
|
||||
|
||||
/* If this substitution failed, this whole thing fails. */
|
||||
if (GET_CODE (new) == CLOBBER
|
||||
&& XEXP (new, 0) == const0_rtx)
|
||||
return new;
|
||||
if (GET_CODE (new_rtx) == CLOBBER
|
||||
&& XEXP (new_rtx, 0) == const0_rtx)
|
||||
return new_rtx;
|
||||
|
||||
SUBST (XVECEXP (x, 0, 0), new);
|
||||
SUBST (XVECEXP (x, 0, 0), new_rtx);
|
||||
|
||||
for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
|
||||
{
|
||||
|
@ -4350,14 +4350,14 @@ subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
|
|||
&& GET_CODE (dest) != CC0
|
||||
&& GET_CODE (dest) != PC)
|
||||
{
|
||||
new = subst (dest, from, to, 0, unique_copy);
|
||||
new_rtx = subst (dest, from, to, 0, unique_copy);
|
||||
|
||||
/* If this substitution failed, this whole thing fails. */
|
||||
if (GET_CODE (new) == CLOBBER
|
||||
&& XEXP (new, 0) == const0_rtx)
|
||||
return new;
|
||||
if (GET_CODE (new_rtx) == CLOBBER
|
||||
&& XEXP (new_rtx, 0) == const0_rtx)
|
||||
return new_rtx;
|
||||
|
||||
SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
|
||||
SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4390,33 +4390,33 @@ subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
|
|||
{
|
||||
if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
|
||||
{
|
||||
new = (unique_copy && n_occurrences
|
||||
new_rtx = (unique_copy && n_occurrences
|
||||
? copy_rtx (to) : to);
|
||||
n_occurrences++;
|
||||
}
|
||||
else
|
||||
{
|
||||
new = subst (XVECEXP (x, i, j), from, to, 0,
|
||||
new_rtx = subst (XVECEXP (x, i, j), from, to, 0,
|
||||
unique_copy);
|
||||
|
||||
/* If this substitution failed, this whole thing
|
||||
fails. */
|
||||
if (GET_CODE (new) == CLOBBER
|
||||
&& XEXP (new, 0) == const0_rtx)
|
||||
return new;
|
||||
if (GET_CODE (new_rtx) == CLOBBER
|
||||
&& XEXP (new_rtx, 0) == const0_rtx)
|
||||
return new_rtx;
|
||||
}
|
||||
|
||||
SUBST (XVECEXP (x, i, j), new);
|
||||
SUBST (XVECEXP (x, i, j), new_rtx);
|
||||
}
|
||||
}
|
||||
else if (fmt[i] == 'e')
|
||||
{
|
||||
/* If this is a register being set, ignore it. */
|
||||
new = XEXP (x, i);
|
||||
new_rtx = XEXP (x, i);
|
||||
if (in_dest
|
||||
&& i == 0
|
||||
&& (((code == SUBREG || code == ZERO_EXTRACT)
|
||||
&& REG_P (new))
|
||||
&& REG_P (new_rtx))
|
||||
|| code == STRICT_LOW_PART))
|
||||
;
|
||||
|
||||
|
@ -4457,7 +4457,7 @@ subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
|
|||
return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
|
||||
#endif
|
||||
|
||||
new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
|
||||
new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
|
||||
n_occurrences++;
|
||||
}
|
||||
else
|
||||
|
@ -4469,7 +4469,7 @@ subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
|
|||
STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
|
||||
things aside from REG and MEM that should appear in a
|
||||
SET_DEST. */
|
||||
new = subst (XEXP (x, i), from, to,
|
||||
new_rtx = subst (XEXP (x, i), from, to,
|
||||
(((in_dest
|
||||
&& (code == SUBREG || code == STRICT_LOW_PART
|
||||
|| code == ZERO_EXTRACT))
|
||||
|
@ -4482,30 +4482,30 @@ subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
|
|||
well as prevent accidents where two CLOBBERs are considered
|
||||
to be equal, thus producing an incorrect simplification. */
|
||||
|
||||
if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
|
||||
return new;
|
||||
if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
|
||||
return new_rtx;
|
||||
|
||||
if (GET_CODE (x) == SUBREG
|
||||
&& (GET_CODE (new) == CONST_INT
|
||||
|| GET_CODE (new) == CONST_DOUBLE))
|
||||
&& (GET_CODE (new_rtx) == CONST_INT
|
||||
|| GET_CODE (new_rtx) == CONST_DOUBLE))
|
||||
{
|
||||
enum machine_mode mode = GET_MODE (x);
|
||||
|
||||
x = simplify_subreg (GET_MODE (x), new,
|
||||
x = simplify_subreg (GET_MODE (x), new_rtx,
|
||||
GET_MODE (SUBREG_REG (x)),
|
||||
SUBREG_BYTE (x));
|
||||
if (! x)
|
||||
x = gen_rtx_CLOBBER (mode, const0_rtx);
|
||||
}
|
||||
else if (GET_CODE (new) == CONST_INT
|
||||
else if (GET_CODE (new_rtx) == CONST_INT
|
||||
&& GET_CODE (x) == ZERO_EXTEND)
|
||||
{
|
||||
x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
|
||||
new, GET_MODE (XEXP (x, 0)));
|
||||
new_rtx, GET_MODE (XEXP (x, 0)));
|
||||
gcc_assert (x);
|
||||
}
|
||||
else
|
||||
SUBST (XEXP (x, i), new);
|
||||
SUBST (XEXP (x, i), new_rtx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5638,9 +5638,9 @@ simplify_set (rtx x)
|
|||
/* Attempt to simplify CC user. */
|
||||
if (GET_CODE (pat) == SET)
|
||||
{
|
||||
rtx new = simplify_rtx (SET_SRC (pat));
|
||||
if (new != NULL_RTX)
|
||||
SUBST (SET_SRC (pat), new);
|
||||
rtx new_rtx = simplify_rtx (SET_SRC (pat));
|
||||
if (new_rtx != NULL_RTX)
|
||||
SUBST (SET_SRC (pat), new_rtx);
|
||||
}
|
||||
|
||||
/* Convert X into a no-op move. */
|
||||
|
@ -6373,7 +6373,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
enum machine_mode pos_mode = word_mode;
|
||||
enum machine_mode extraction_mode = word_mode;
|
||||
enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
|
||||
rtx new = 0;
|
||||
rtx new_rtx = 0;
|
||||
rtx orig_pos_rtx = pos_rtx;
|
||||
HOST_WIDE_INT orig_pos;
|
||||
|
||||
|
@ -6397,11 +6397,11 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
(ashift X (const_int C)), where LEN > C. Extract the
|
||||
least significant (LEN - C) bits of X, giving an rtx
|
||||
whose mode is MODE, then shift it left C times. */
|
||||
new = make_extraction (mode, XEXP (inner, 0),
|
||||
new_rtx = make_extraction (mode, XEXP (inner, 0),
|
||||
0, 0, len - INTVAL (XEXP (inner, 1)),
|
||||
unsignedp, in_dest, in_compare);
|
||||
if (new != 0)
|
||||
return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
|
||||
if (new_rtx != 0)
|
||||
return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
|
||||
}
|
||||
|
||||
inner_mode = GET_MODE (inner);
|
||||
|
@ -6457,7 +6457,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
else
|
||||
offset = pos / BITS_PER_UNIT;
|
||||
|
||||
new = adjust_address_nv (inner, tmode, offset);
|
||||
new_rtx = adjust_address_nv (inner, tmode, offset);
|
||||
}
|
||||
else if (REG_P (inner))
|
||||
{
|
||||
|
@ -6487,16 +6487,16 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
if (!validate_subreg (tmode, inner_mode, inner, final_word))
|
||||
return NULL_RTX;
|
||||
|
||||
new = gen_rtx_SUBREG (tmode, inner, final_word);
|
||||
new_rtx = gen_rtx_SUBREG (tmode, inner, final_word);
|
||||
}
|
||||
else
|
||||
new = gen_lowpart (tmode, inner);
|
||||
new_rtx = gen_lowpart (tmode, inner);
|
||||
}
|
||||
else
|
||||
new = inner;
|
||||
new_rtx = inner;
|
||||
}
|
||||
else
|
||||
new = force_to_mode (inner, tmode,
|
||||
new_rtx = force_to_mode (inner, tmode,
|
||||
len >= HOST_BITS_PER_WIDE_INT
|
||||
? ~(unsigned HOST_WIDE_INT) 0
|
||||
: ((unsigned HOST_WIDE_INT) 1 << len) - 1,
|
||||
|
@ -6506,30 +6506,30 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
make a STRICT_LOW_PART unless we made a MEM. */
|
||||
|
||||
if (in_dest)
|
||||
return (MEM_P (new) ? new
|
||||
: (GET_CODE (new) != SUBREG
|
||||
return (MEM_P (new_rtx) ? new_rtx
|
||||
: (GET_CODE (new_rtx) != SUBREG
|
||||
? gen_rtx_CLOBBER (tmode, const0_rtx)
|
||||
: gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
|
||||
: gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
|
||||
|
||||
if (mode == tmode)
|
||||
return new;
|
||||
return new_rtx;
|
||||
|
||||
if (GET_CODE (new) == CONST_INT)
|
||||
return gen_int_mode (INTVAL (new), mode);
|
||||
if (GET_CODE (new_rtx) == CONST_INT)
|
||||
return gen_int_mode (INTVAL (new_rtx), mode);
|
||||
|
||||
/* If we know that no extraneous bits are set, and that the high
|
||||
bit is not set, convert the extraction to the cheaper of
|
||||
sign and zero extension, that are equivalent in these cases. */
|
||||
if (flag_expensive_optimizations
|
||||
&& (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& ((nonzero_bits (new, tmode)
|
||||
&& ((nonzero_bits (new_rtx, tmode)
|
||||
& ~(((unsigned HOST_WIDE_INT)
|
||||
GET_MODE_MASK (tmode))
|
||||
>> 1))
|
||||
== 0)))
|
||||
{
|
||||
rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
|
||||
rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
|
||||
rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
|
||||
rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
|
||||
|
||||
/* Prefer ZERO_EXTENSION, since it gives more information to
|
||||
backends. */
|
||||
|
@ -6542,7 +6542,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
proper mode. */
|
||||
|
||||
return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
|
||||
mode, new));
|
||||
mode, new_rtx));
|
||||
}
|
||||
|
||||
/* Unless this is a COMPARE or we have a funny memory reference,
|
||||
|
@ -6746,12 +6746,12 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
pos_rtx = GEN_INT (pos);
|
||||
|
||||
/* Make the required operation. See if we can use existing rtx. */
|
||||
new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
|
||||
new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
|
||||
extraction_mode, inner, GEN_INT (len), pos_rtx);
|
||||
if (! in_dest)
|
||||
new = gen_lowpart (mode, new);
|
||||
new_rtx = gen_lowpart (mode, new_rtx);
|
||||
|
||||
return new;
|
||||
return new_rtx;
|
||||
}
|
||||
|
||||
/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
|
||||
|
@ -6827,7 +6827,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
rtx rhs, lhs;
|
||||
enum rtx_code next_code;
|
||||
int i;
|
||||
rtx new = 0;
|
||||
rtx new_rtx = 0;
|
||||
rtx tem;
|
||||
const char *fmt;
|
||||
|
||||
|
@ -6852,8 +6852,8 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
|
||||
&& INTVAL (XEXP (x, 1)) >= 0)
|
||||
{
|
||||
new = make_compound_operation (XEXP (x, 0), next_code);
|
||||
new = gen_rtx_MULT (mode, new,
|
||||
new_rtx = make_compound_operation (XEXP (x, 0), next_code);
|
||||
new_rtx = gen_rtx_MULT (mode, new_rtx,
|
||||
GEN_INT ((HOST_WIDE_INT) 1
|
||||
<< INTVAL (XEXP (x, 1))));
|
||||
}
|
||||
|
@ -6870,8 +6870,8 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
|
||||
&& (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
|
||||
{
|
||||
new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
|
||||
new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
|
||||
new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
|
||||
new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1), i, 1,
|
||||
0, in_code == COMPARE);
|
||||
}
|
||||
|
||||
|
@ -6881,9 +6881,9 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
|
||||
&& (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
|
||||
{
|
||||
new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
|
||||
new_rtx = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
|
||||
next_code);
|
||||
new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
|
||||
new_rtx = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new_rtx, 0,
|
||||
XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
|
||||
0, in_code == COMPARE);
|
||||
}
|
||||
|
@ -6895,12 +6895,12 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
&& (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
|
||||
{
|
||||
/* Apply the distributive law, and then try to make extractions. */
|
||||
new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
|
||||
new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
|
||||
gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
|
||||
XEXP (x, 1)),
|
||||
gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
|
||||
XEXP (x, 1)));
|
||||
new = make_compound_operation (new, in_code);
|
||||
new_rtx = make_compound_operation (new_rtx, in_code);
|
||||
}
|
||||
|
||||
/* If we are have (and (rotate X C) M) and C is larger than the number
|
||||
|
@ -6911,8 +6911,8 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
&& (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
|
||||
&& i <= INTVAL (XEXP (XEXP (x, 0), 1)))
|
||||
{
|
||||
new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
|
||||
new = make_extraction (mode, new,
|
||||
new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
|
||||
new_rtx = make_extraction (mode, new_rtx,
|
||||
(GET_MODE_BITSIZE (mode)
|
||||
- INTVAL (XEXP (XEXP (x, 0), 1))),
|
||||
NULL_RTX, i, 1, 0, in_code == COMPARE);
|
||||
|
@ -6945,7 +6945,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
|
||||
we are in a COMPARE. */
|
||||
else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
|
||||
new = make_extraction (mode,
|
||||
new_rtx = make_extraction (mode,
|
||||
make_compound_operation (XEXP (x, 0),
|
||||
next_code),
|
||||
0, NULL_RTX, i, 1, 0, in_code == COMPARE);
|
||||
|
@ -6954,7 +6954,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
convert this into the appropriate bit extract. */
|
||||
else if (in_code == COMPARE
|
||||
&& (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
|
||||
new = make_extraction (mode,
|
||||
new_rtx = make_extraction (mode,
|
||||
make_compound_operation (XEXP (x, 0),
|
||||
next_code),
|
||||
i, NULL_RTX, 1, 1, 0, 1);
|
||||
|
@ -6969,7 +6969,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
&& mode_width <= HOST_BITS_PER_WIDE_INT
|
||||
&& (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
|
||||
{
|
||||
new = gen_rtx_ASHIFTRT (mode,
|
||||
new_rtx = gen_rtx_ASHIFTRT (mode,
|
||||
make_compound_operation (XEXP (x, 0),
|
||||
next_code),
|
||||
XEXP (x, 1));
|
||||
|
@ -6989,8 +6989,8 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
&& GET_CODE (XEXP (lhs, 1)) == CONST_INT
|
||||
&& INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
|
||||
{
|
||||
new = make_compound_operation (XEXP (lhs, 0), next_code);
|
||||
new = make_extraction (mode, new,
|
||||
new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
|
||||
new_rtx = make_extraction (mode, new_rtx,
|
||||
INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
|
||||
NULL_RTX, mode_width - INTVAL (rhs),
|
||||
code == LSHIFTRT, 0, in_code == COMPARE);
|
||||
|
@ -7007,8 +7007,8 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
&& (OBJECT_P (SUBREG_REG (lhs))))
|
||||
&& GET_CODE (rhs) == CONST_INT
|
||||
&& INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
|
||||
&& (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
|
||||
new = make_extraction (mode, make_compound_operation (new, next_code),
|
||||
&& (new_rtx = extract_left_shift (lhs, INTVAL (rhs))) != 0)
|
||||
new_rtx = make_extraction (mode, make_compound_operation (new_rtx, next_code),
|
||||
0, NULL_RTX, mode_width - INTVAL (rhs),
|
||||
code == LSHIFTRT, 0, in_code == COMPARE);
|
||||
|
||||
|
@ -7053,9 +7053,9 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
break;
|
||||
}
|
||||
|
||||
if (new)
|
||||
if (new_rtx)
|
||||
{
|
||||
x = gen_lowpart (mode, new);
|
||||
x = gen_lowpart (mode, new_rtx);
|
||||
code = GET_CODE (x);
|
||||
}
|
||||
|
||||
|
@ -7064,8 +7064,8 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
|||
for (i = 0; i < GET_RTX_LENGTH (code); i++)
|
||||
if (fmt[i] == 'e')
|
||||
{
|
||||
new = make_compound_operation (XEXP (x, i), next_code);
|
||||
SUBST (XEXP (x, i), new);
|
||||
new_rtx = make_compound_operation (XEXP (x, i), next_code);
|
||||
SUBST (XEXP (x, i), new_rtx);
|
||||
}
|
||||
|
||||
/* If this is a commutative operation, the changes to the operands
|
||||
|
@ -8074,16 +8074,16 @@ known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
|
|||
else if (code == SUBREG)
|
||||
{
|
||||
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
|
||||
rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
|
||||
rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
|
||||
|
||||
if (SUBREG_REG (x) != r)
|
||||
{
|
||||
/* We must simplify subreg here, before we lose track of the
|
||||
original inner_mode. */
|
||||
new = simplify_subreg (GET_MODE (x), r,
|
||||
new_rtx = simplify_subreg (GET_MODE (x), r,
|
||||
inner_mode, SUBREG_BYTE (x));
|
||||
if (new)
|
||||
return new;
|
||||
if (new_rtx)
|
||||
return new_rtx;
|
||||
else
|
||||
SUBST (SUBREG_REG (x), r);
|
||||
}
|
||||
|
@ -8099,16 +8099,16 @@ known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
|
|||
else if (code == ZERO_EXTEND)
|
||||
{
|
||||
enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
|
||||
rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
|
||||
rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
|
||||
|
||||
if (XEXP (x, 0) != r)
|
||||
{
|
||||
/* We must simplify the zero_extend here, before we lose
|
||||
track of the original inner_mode. */
|
||||
new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
|
||||
new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
|
||||
r, inner_mode);
|
||||
if (new)
|
||||
return new;
|
||||
if (new_rtx)
|
||||
return new_rtx;
|
||||
else
|
||||
SUBST (XEXP (x, 0), r);
|
||||
}
|
||||
|
@ -8961,7 +8961,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
|||
enum rtx_code outer_op = UNKNOWN;
|
||||
HOST_WIDE_INT outer_const = 0;
|
||||
int complement_p = 0;
|
||||
rtx new, x;
|
||||
rtx new_rtx, x;
|
||||
|
||||
/* Make sure and truncate the "natural" shift on the way in. We don't
|
||||
want to do this inside the loop as it makes it more difficult to
|
||||
|
@ -9083,10 +9083,10 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
|||
case ZERO_EXTEND:
|
||||
case SIGN_EXTRACT:
|
||||
case ZERO_EXTRACT:
|
||||
new = expand_compound_operation (varop);
|
||||
if (new != varop)
|
||||
new_rtx = expand_compound_operation (varop);
|
||||
if (new_rtx != varop)
|
||||
{
|
||||
varop = new;
|
||||
varop = new_rtx;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
|
@ -9101,12 +9101,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
|||
&& (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
|
||||
MODE_INT, 1)) != BLKmode)
|
||||
{
|
||||
new = adjust_address_nv (varop, tmode,
|
||||
new_rtx = adjust_address_nv (varop, tmode,
|
||||
BYTES_BIG_ENDIAN ? 0
|
||||
: count / BITS_PER_UNIT);
|
||||
|
||||
varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
|
||||
: ZERO_EXTEND, mode, new);
|
||||
: ZERO_EXTEND, mode, new_rtx);
|
||||
count = 0;
|
||||
continue;
|
||||
}
|
||||
|
@ -9327,10 +9327,10 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
|||
&& GET_CODE (XEXP (varop, 0)) == CONST_INT
|
||||
&& GET_CODE (XEXP (varop, 1)) != CONST_INT)
|
||||
{
|
||||
rtx new = simplify_const_binary_operation (code, mode,
|
||||
rtx new_rtx = simplify_const_binary_operation (code, mode,
|
||||
XEXP (varop, 0),
|
||||
GEN_INT (count));
|
||||
varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
|
||||
varop = gen_rtx_fmt_ee (code, mode, new_rtx, XEXP (varop, 1));
|
||||
count = 0;
|
||||
continue;
|
||||
}
|
||||
|
@ -9384,12 +9384,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
|||
&& !(code == ASHIFTRT && GET_CODE (varop) == XOR
|
||||
&& 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
|
||||
shift_mode))
|
||||
&& (new = simplify_const_binary_operation (code, result_mode,
|
||||
&& (new_rtx = simplify_const_binary_operation (code, result_mode,
|
||||
XEXP (varop, 1),
|
||||
GEN_INT (count))) != 0
|
||||
&& GET_CODE (new) == CONST_INT
|
||||
&& GET_CODE (new_rtx) == CONST_INT
|
||||
&& merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
|
||||
INTVAL (new), result_mode, &complement_p))
|
||||
INTVAL (new_rtx), result_mode, &complement_p))
|
||||
{
|
||||
varop = XEXP (varop, 0);
|
||||
continue;
|
||||
|
@ -9512,12 +9512,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
|||
/* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
|
||||
if (code == ASHIFT
|
||||
&& GET_CODE (XEXP (varop, 1)) == CONST_INT
|
||||
&& (new = simplify_const_binary_operation (ASHIFT, result_mode,
|
||||
&& (new_rtx = simplify_const_binary_operation (ASHIFT, result_mode,
|
||||
XEXP (varop, 1),
|
||||
GEN_INT (count))) != 0
|
||||
&& GET_CODE (new) == CONST_INT
|
||||
&& GET_CODE (new_rtx) == CONST_INT
|
||||
&& merge_outer_ops (&outer_op, &outer_const, PLUS,
|
||||
INTVAL (new), result_mode, &complement_p))
|
||||
INTVAL (new_rtx), result_mode, &complement_p))
|
||||
{
|
||||
varop = XEXP (varop, 0);
|
||||
continue;
|
||||
|
@ -9531,12 +9531,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
|||
if (code == LSHIFTRT
|
||||
&& GET_CODE (XEXP (varop, 1)) == CONST_INT
|
||||
&& mode_signbit_p (result_mode, XEXP (varop, 1))
|
||||
&& (new = simplify_const_binary_operation (code, result_mode,
|
||||
&& (new_rtx = simplify_const_binary_operation (code, result_mode,
|
||||
XEXP (varop, 1),
|
||||
GEN_INT (count))) != 0
|
||||
&& GET_CODE (new) == CONST_INT
|
||||
&& GET_CODE (new_rtx) == CONST_INT
|
||||
&& merge_outer_ops (&outer_op, &outer_const, XOR,
|
||||
INTVAL (new), result_mode, &complement_p))
|
||||
INTVAL (new_rtx), result_mode, &complement_p))
|
||||
{
|
||||
varop = XEXP (varop, 0);
|
||||
continue;
|
||||
|
|
126
gcc/cse.c
126
gcc/cse.c
|
@ -913,18 +913,18 @@ make_new_qty (unsigned int reg, enum machine_mode mode)
|
|||
OLD is not changing; NEW is. */
|
||||
|
||||
static void
|
||||
make_regs_eqv (unsigned int new, unsigned int old)
|
||||
make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
|
||||
{
|
||||
unsigned int lastr, firstr;
|
||||
int q = REG_QTY (old);
|
||||
int q = REG_QTY (old_reg);
|
||||
struct qty_table_elem *ent;
|
||||
|
||||
ent = &qty_table[q];
|
||||
|
||||
/* Nothing should become eqv until it has a "non-invalid" qty number. */
|
||||
gcc_assert (REGNO_QTY_VALID_P (old));
|
||||
gcc_assert (REGNO_QTY_VALID_P (old_reg));
|
||||
|
||||
REG_QTY (new) = q;
|
||||
REG_QTY (new_reg) = q;
|
||||
firstr = ent->first_reg;
|
||||
lastr = ent->last_reg;
|
||||
|
||||
|
@ -937,19 +937,19 @@ make_regs_eqv (unsigned int new, unsigned int old)
|
|||
that not only can they not be allocated by the compiler, but
|
||||
they cannot be used in substitutions or canonicalizations
|
||||
either. */
|
||||
&& (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
|
||||
&& ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
|
||||
|| (new >= FIRST_PSEUDO_REGISTER
|
||||
&& (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
|
||||
&& ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
|
||||
|| (new_reg >= FIRST_PSEUDO_REGISTER
|
||||
&& (firstr < FIRST_PSEUDO_REGISTER
|
||||
|| (bitmap_bit_p (cse_ebb_live_out, new)
|
||||
|| (bitmap_bit_p (cse_ebb_live_out, new_reg)
|
||||
&& !bitmap_bit_p (cse_ebb_live_out, firstr))
|
||||
|| (bitmap_bit_p (cse_ebb_live_in, new)
|
||||
|| (bitmap_bit_p (cse_ebb_live_in, new_reg)
|
||||
&& !bitmap_bit_p (cse_ebb_live_in, firstr))))))
|
||||
{
|
||||
reg_eqv_table[firstr].prev = new;
|
||||
reg_eqv_table[new].next = firstr;
|
||||
reg_eqv_table[new].prev = -1;
|
||||
ent->first_reg = new;
|
||||
reg_eqv_table[firstr].prev = new_reg;
|
||||
reg_eqv_table[new_reg].next = firstr;
|
||||
reg_eqv_table[new_reg].prev = -1;
|
||||
ent->first_reg = new_reg;
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -959,15 +959,15 @@ make_regs_eqv (unsigned int new, unsigned int old)
|
|||
equivalent for anything. */
|
||||
while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
|
||||
&& (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
|
||||
&& new >= FIRST_PSEUDO_REGISTER)
|
||||
&& new_reg >= FIRST_PSEUDO_REGISTER)
|
||||
lastr = reg_eqv_table[lastr].prev;
|
||||
reg_eqv_table[new].next = reg_eqv_table[lastr].next;
|
||||
reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
|
||||
if (reg_eqv_table[lastr].next >= 0)
|
||||
reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
|
||||
reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
|
||||
else
|
||||
qty_table[q].last_reg = new;
|
||||
reg_eqv_table[lastr].next = new;
|
||||
reg_eqv_table[new].prev = lastr;
|
||||
qty_table[q].last_reg = new_reg;
|
||||
reg_eqv_table[lastr].next = new_reg;
|
||||
reg_eqv_table[new_reg].prev = lastr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1584,7 +1584,7 @@ insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mo
|
|||
static void
|
||||
merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
|
||||
{
|
||||
struct table_elt *elt, *next, *new;
|
||||
struct table_elt *elt, *next, *new_elt;
|
||||
|
||||
/* Ensure we start with the head of the classes. */
|
||||
class1 = class1->first_same_value;
|
||||
|
@ -1628,8 +1628,8 @@ merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
|
|||
rehash_using_reg (exp);
|
||||
hash = HASH (exp, mode);
|
||||
}
|
||||
new = insert (exp, class1, hash, mode);
|
||||
new->in_memory = hash_arg_in_memory;
|
||||
new_elt = insert (exp, class1, hash, mode);
|
||||
new_elt->in_memory = hash_arg_in_memory;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2648,12 +2648,12 @@ validate_canon_reg (rtx *xloc, rtx insn)
|
|||
{
|
||||
if (*xloc)
|
||||
{
|
||||
rtx new = canon_reg (*xloc, insn);
|
||||
rtx new_rtx = canon_reg (*xloc, insn);
|
||||
|
||||
/* If replacing pseudo with hard reg or vice versa, ensure the
|
||||
insn remains valid. Likewise if the insn has MATCH_DUPs. */
|
||||
gcc_assert (insn && new);
|
||||
validate_change (insn, xloc, new, 1);
|
||||
gcc_assert (insn && new_rtx);
|
||||
validate_change (insn, xloc, new_rtx, 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2948,7 +2948,7 @@ fold_rtx (rtx x, rtx insn)
|
|||
enum machine_mode mode;
|
||||
const char *fmt;
|
||||
int i;
|
||||
rtx new = 0;
|
||||
rtx new_rtx = 0;
|
||||
int changed = 0;
|
||||
|
||||
/* Operands of X. */
|
||||
|
@ -2974,8 +2974,8 @@ fold_rtx (rtx x, rtx insn)
|
|||
{
|
||||
case MEM:
|
||||
case SUBREG:
|
||||
if ((new = equiv_constant (x)) != NULL_RTX)
|
||||
return new;
|
||||
if ((new_rtx = equiv_constant (x)) != NULL_RTX)
|
||||
return new_rtx;
|
||||
return x;
|
||||
|
||||
case CONST:
|
||||
|
@ -3150,7 +3150,7 @@ fold_rtx (rtx x, rtx insn)
|
|||
if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
|
||||
is_const = 1, const_arg0 = XEXP (const_arg0, 0);
|
||||
|
||||
new = simplify_unary_operation (code, mode,
|
||||
new_rtx = simplify_unary_operation (code, mode,
|
||||
const_arg0 ? const_arg0 : folded_arg0,
|
||||
mode_arg0);
|
||||
/* NEG of PLUS could be converted into MINUS, but that causes
|
||||
|
@ -3158,12 +3158,12 @@ fold_rtx (rtx x, rtx insn)
|
|||
(CONST (MINUS (CONST_INT) (SYMBOL_REF)))
|
||||
which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
|
||||
FIXME: those ports should be fixed. */
|
||||
if (new != 0 && is_const
|
||||
&& GET_CODE (new) == PLUS
|
||||
&& (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
|
||||
|| GET_CODE (XEXP (new, 0)) == LABEL_REF)
|
||||
&& GET_CODE (XEXP (new, 1)) == CONST_INT)
|
||||
new = gen_rtx_CONST (mode, new);
|
||||
if (new_rtx != 0 && is_const
|
||||
&& GET_CODE (new_rtx) == PLUS
|
||||
&& (GET_CODE (XEXP (new_rtx, 0)) == SYMBOL_REF
|
||||
|| GET_CODE (XEXP (new_rtx, 0)) == LABEL_REF)
|
||||
&& GET_CODE (XEXP (new_rtx, 1)) == CONST_INT)
|
||||
new_rtx = gen_rtx_CONST (mode, new_rtx);
|
||||
}
|
||||
break;
|
||||
|
||||
|
@ -3324,7 +3324,7 @@ fold_rtx (rtx x, rtx insn)
|
|||
{
|
||||
rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
|
||||
rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
|
||||
new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
|
||||
new_rtx = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
|
||||
}
|
||||
break;
|
||||
|
||||
|
@ -3560,7 +3560,7 @@ fold_rtx (rtx x, rtx insn)
|
|||
break;
|
||||
}
|
||||
|
||||
new = simplify_binary_operation (code, mode,
|
||||
new_rtx = simplify_binary_operation (code, mode,
|
||||
const_arg0 ? const_arg0 : folded_arg0,
|
||||
const_arg1 ? const_arg1 : folded_arg1);
|
||||
break;
|
||||
|
@ -3575,7 +3575,7 @@ fold_rtx (rtx x, rtx insn)
|
|||
|
||||
case RTX_TERNARY:
|
||||
case RTX_BITFIELD_OPS:
|
||||
new = simplify_ternary_operation (code, mode, mode_arg0,
|
||||
new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
|
||||
const_arg0 ? const_arg0 : folded_arg0,
|
||||
const_arg1 ? const_arg1 : folded_arg1,
|
||||
const_arg2 ? const_arg2 : XEXP (x, 2));
|
||||
|
@ -3585,7 +3585,7 @@ fold_rtx (rtx x, rtx insn)
|
|||
break;
|
||||
}
|
||||
|
||||
return new ? new : x;
|
||||
return new_rtx ? new_rtx : x;
|
||||
}
|
||||
|
||||
/* Return a constant value currently equivalent to X.
|
||||
|
@ -3609,16 +3609,16 @@ equiv_constant (rtx x)
|
|||
|
||||
if (GET_CODE (x) == SUBREG)
|
||||
{
|
||||
rtx new;
|
||||
rtx new_rtx;
|
||||
|
||||
/* See if we previously assigned a constant value to this SUBREG. */
|
||||
if ((new = lookup_as_function (x, CONST_INT)) != 0
|
||||
|| (new = lookup_as_function (x, CONST_DOUBLE)) != 0
|
||||
|| (new = lookup_as_function (x, CONST_FIXED)) != 0)
|
||||
return new;
|
||||
if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
|
||||
|| (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
|
||||
|| (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
|
||||
return new_rtx;
|
||||
|
||||
if (REG_P (SUBREG_REG (x))
|
||||
&& (new = equiv_constant (SUBREG_REG (x))) != 0)
|
||||
&& (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
|
||||
return simplify_subreg (GET_MODE (x), SUBREG_REG (x),
|
||||
GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
|
||||
|
||||
|
@ -4161,9 +4161,9 @@ cse_insn (rtx insn)
|
|||
{
|
||||
rtx dest = SET_DEST (sets[i].rtl);
|
||||
rtx src = SET_SRC (sets[i].rtl);
|
||||
rtx new = canon_reg (src, insn);
|
||||
rtx new_rtx = canon_reg (src, insn);
|
||||
|
||||
validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
|
||||
validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
|
||||
|
||||
if (GET_CODE (dest) == ZERO_EXTRACT)
|
||||
{
|
||||
|
@ -4811,12 +4811,12 @@ cse_insn (rtx insn)
|
|||
else if (validate_unshare_change
|
||||
(insn, &SET_SRC (sets[i].rtl), trial, 0))
|
||||
{
|
||||
rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
|
||||
rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
|
||||
|
||||
/* The result of apply_change_group can be ignored; see
|
||||
canon_reg. */
|
||||
|
||||
validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
|
||||
validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
|
||||
apply_change_group ();
|
||||
|
||||
break;
|
||||
|
@ -5016,10 +5016,10 @@ cse_insn (rtx insn)
|
|||
and hope for the best. */
|
||||
if (n_sets == 1)
|
||||
{
|
||||
rtx new, note;
|
||||
rtx new_rtx, note;
|
||||
|
||||
new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
|
||||
JUMP_LABEL (new) = XEXP (src, 0);
|
||||
new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
|
||||
JUMP_LABEL (new_rtx) = XEXP (src, 0);
|
||||
LABEL_NUSES (XEXP (src, 0))++;
|
||||
|
||||
/* Make sure to copy over REG_NON_LOCAL_GOTO. */
|
||||
|
@ -5027,11 +5027,11 @@ cse_insn (rtx insn)
|
|||
if (note)
|
||||
{
|
||||
XEXP (note, 1) = NULL_RTX;
|
||||
REG_NOTES (new) = note;
|
||||
REG_NOTES (new_rtx) = note;
|
||||
}
|
||||
|
||||
delete_insn_and_edges (insn);
|
||||
insn = new;
|
||||
insn = new_rtx;
|
||||
}
|
||||
else
|
||||
INSN_CODE (insn) = -1;
|
||||
|
@ -5674,11 +5674,11 @@ cse_process_notes_1 (rtx x, rtx object, bool *changed)
|
|||
case ZERO_EXTEND:
|
||||
case SUBREG:
|
||||
{
|
||||
rtx new = cse_process_notes (XEXP (x, 0), object, changed);
|
||||
rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
|
||||
/* We don't substitute VOIDmode constants into these rtx,
|
||||
since they would impede folding. */
|
||||
if (GET_MODE (new) != VOIDmode)
|
||||
validate_change (object, &XEXP (x, 0), new, 0);
|
||||
if (GET_MODE (new_rtx) != VOIDmode)
|
||||
validate_change (object, &XEXP (x, 0), new_rtx, 0);
|
||||
return x;
|
||||
}
|
||||
|
||||
|
@ -5694,9 +5694,9 @@ cse_process_notes_1 (rtx x, rtx object, bool *changed)
|
|||
&& (CONSTANT_P (ent->const_rtx)
|
||||
|| REG_P (ent->const_rtx)))
|
||||
{
|
||||
rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
|
||||
if (new)
|
||||
return copy_rtx (new);
|
||||
rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
|
||||
if (new_rtx)
|
||||
return copy_rtx (new_rtx);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5718,10 +5718,10 @@ cse_process_notes_1 (rtx x, rtx object, bool *changed)
|
|||
static rtx
|
||||
cse_process_notes (rtx x, rtx object, bool *changed)
|
||||
{
|
||||
rtx new = cse_process_notes_1 (x, object, changed);
|
||||
if (new != x)
|
||||
rtx new_rtx = cse_process_notes_1 (x, object, changed);
|
||||
if (new_rtx != x)
|
||||
*changed = true;
|
||||
return new;
|
||||
return new_rtx;
|
||||
}
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue