insn_t becomes an rtx_insn *
gcc/ * sel-sched-ir.h (insn_t): Strengthen from rtx to rtx_insn *. (BND_TO): Delete this function and... (SET_BND_TO): ...this functions in favor of... (BND_TO): ...reinstating this macro. (struct _fence): Strengthen field "executing_insns" from vec<rtx, va_gc> * to vec<rtx_insn *, va_gc> *. Strengthen fields "last_scheduled_insn" and "sched_next" from rtx to rtx_insn *. (_succ_iter_cond): Update param "succp" from rtx * to insn_t * and param "insn" from rtx to insn_t. (create_vinsn_from_insn_rtx): Strengthen first param from rtx to rtx_insn *. * sched-int.h (insn_vec_t): Strengthen from vec<rtx> to vec<rtx_insn *> . (rtx_vec_t): Likewise. (struct sched_deps_info_def): Strengthen param of "start_insn" callback from rtx to rtx_insn *. Likewise for param "insn2" of "note_mem_dep" callback and first param of "note_dep" callback. * haifa-sched.c (add_to_speculative_block): Strengthen param "insn" from rtx to rtx_insn *. (clear_priorities): Likewise. (calc_priorities): Likewise for local "insn". * sched-deps.c (haifa_start_insn): Likewise for param "insn". Remove redundant checked cast. (haifa_note_mem_dep): Likewise for param "pending_insn". (haifa_note_dep): Likewise for param "elem". (note_mem_dep): Likewise for param "e". (sched_analyze_1): Add checked casts. (sched_analyze_2): Likewise. * sel-sched-dump.c (dump_insn_vector): Strengthen local "succ" from rtx to rtx_insn *. (debug): Update param from vec<rtx> & to vec<rtx_insn *>, and from vec<rtx> * to vec<rtx_insn *> *. * sel-sched-ir.c (blist_add): Remove use of SET_BND_TO scaffolding. (flist_add): Strengthen param "executing_insns" from vec<rtx, va_gc> * to vec<rtx_insn *, va_gc> *. (advance_deps_context): Remove now-redundant checked cast. (init_fences): Replace uses of NULL_RTX with NULL. (merge_fences): Strengthen params "last_scheduled_insn" and "sched_next" from rtx to rtx_insn * and "executing_insns" from vec<rtx, va_gc> * to vec<rtx_insn *, va_gc> *. (add_clean_fence_to_fences): Replace uses of NULL_RTX with NULL. (get_nop_from_pool): Add local "nop_pat" so that "nop" can be an instruction, rather than doing double-duty as a pattern. (return_nop_to_pool): Update for change of insn_t. (deps_init_id): Remove now-redundant checked cast. (struct sched_scan_info_def): Strengthen param of "init_insn" callback from rtx to insn_t. (sched_scan): Strengthen local "insn" from rtx to rtx_insn *. (init_global_and_expr_for_insn): Replace uses of NULL_RTX with NULL. (get_seqno_by_succs): Strengthen param "insn" and locals "tmp", "end" from rtx to rtx_insn *. (create_vinsn_from_insn_rtx): Likewise for param "insn_rtx". (rtx insn_rtx, bool force_unique_p) (BND_TO): Delete function. (SET_BND_TO): Delete function. * sel-sched.c (advance_one_cycle): Strengthen local "insn" from rtx to rtx_insn *. (extract_new_fences_from): Replace uses of NULL_RTX with NULL. (replace_dest_with_reg_in_expr): Strengthen local "insn_rtx" from rtx to rtx_insn *. (undo_transformations): Likewise for param "insn". (update_liveness_on_insn): Likewise. (compute_live_below_insn): Likewise for param "insn" and local "succ". (update_data_sets): Likewise for param "insn". (fill_vec_av_set): Replace uses of NULL_RTX with NULL. (convert_vec_av_set_to_ready): Drop now-redundant checked cast. (invoke_aftermath_hooks): Strengthen param "best_insn" from rtx to rtx_insn *. (move_cond_jump): Likewise for param "insn". (move_cond_jump): Drop use of SET_BND_TO. (compute_av_set_on_boundaries): Likewise. (update_fence_and_insn): Replace uses of NULL_RTX with NULL. (update_and_record_unavailable_insns): Strengthen local "bb_end" from rtx to rtx_insn *. (maybe_emit_renaming_copy): Likewise for param "insn". (maybe_emit_speculative_check): Likewise. (handle_emitting_transformations): Likewise. (remove_insn_from_stream): Likewise. (code_motion_process_successors): Strengthen local "succ" from rtx to insn_t. / * rtx-classes-status.txt (TODO): Remove SET_BND_TO. From-SVN: r214528
This commit is contained in:
parent
de8ea9631c
commit
6144a8363c
10 changed files with 175 additions and 85 deletions
|
@ -1,3 +1,7 @@
|
|||
2014-08-26 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* rtx-classes-status.txt (TODO): Remove SET_BND_TO.
|
||||
|
||||
2014-08-25 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* rtx-classes-status.txt (TODO): Remove SET_BB_NOTE_LIST.
|
||||
|
|
|
@ -1,3 +1,95 @@
|
|||
2014-08-26 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* sel-sched-ir.h (insn_t): Strengthen from rtx to rtx_insn *.
|
||||
(BND_TO): Delete this function and...
|
||||
(SET_BND_TO): ...this functions in favor of...
|
||||
(BND_TO): ...reinstating this macro.
|
||||
(struct _fence): Strengthen field "executing_insns" from
|
||||
vec<rtx, va_gc> * to vec<rtx_insn *, va_gc> *. Strengthen fields
|
||||
"last_scheduled_insn" and "sched_next" from rtx to rtx_insn *.
|
||||
(_succ_iter_cond): Update param "succp" from rtx * to insn_t *
|
||||
and param "insn" from rtx to insn_t.
|
||||
(create_vinsn_from_insn_rtx): Strengthen first param from rtx to
|
||||
rtx_insn *.
|
||||
|
||||
* sched-int.h (insn_vec_t): Strengthen from vec<rtx> to
|
||||
vec<rtx_insn *> .
|
||||
(rtx_vec_t): Likewise.
|
||||
(struct sched_deps_info_def): Strengthen param of "start_insn"
|
||||
callback from rtx to rtx_insn *. Likewise for param "insn2" of
|
||||
"note_mem_dep" callback and first param of "note_dep" callback.
|
||||
|
||||
* haifa-sched.c (add_to_speculative_block): Strengthen param
|
||||
"insn" from rtx to rtx_insn *.
|
||||
(clear_priorities): Likewise.
|
||||
(calc_priorities): Likewise for local "insn".
|
||||
|
||||
* sched-deps.c (haifa_start_insn): Likewise for param "insn".
|
||||
Remove redundant checked cast.
|
||||
(haifa_note_mem_dep): Likewise for param "pending_insn".
|
||||
(haifa_note_dep): Likewise for param "elem".
|
||||
(note_mem_dep): Likewise for param "e".
|
||||
(sched_analyze_1): Add checked casts.
|
||||
(sched_analyze_2): Likewise.
|
||||
|
||||
* sel-sched-dump.c (dump_insn_vector): Strengthen local "succ"
|
||||
from rtx to rtx_insn *.
|
||||
(debug): Update param from vec<rtx> & to vec<rtx_insn *>, and
|
||||
from vec<rtx> * to vec<rtx_insn *> *.
|
||||
|
||||
* sel-sched-ir.c (blist_add): Remove use of SET_BND_TO
|
||||
scaffolding.
|
||||
(flist_add): Strengthen param "executing_insns" from
|
||||
vec<rtx, va_gc> * to vec<rtx_insn *, va_gc> *.
|
||||
(advance_deps_context): Remove now-redundant checked cast.
|
||||
(init_fences): Replace uses of NULL_RTX with NULL.
|
||||
(merge_fences): Strengthen params "last_scheduled_insn" and
|
||||
"sched_next" from rtx to rtx_insn * and "executing_insns" from
|
||||
vec<rtx, va_gc> * to vec<rtx_insn *, va_gc> *.
|
||||
(add_clean_fence_to_fences): Replace uses of NULL_RTX with NULL.
|
||||
(get_nop_from_pool): Add local "nop_pat" so that "nop" can be
|
||||
an instruction, rather than doing double-duty as a pattern.
|
||||
(return_nop_to_pool): Update for change of insn_t.
|
||||
(deps_init_id): Remove now-redundant checked cast.
|
||||
(struct sched_scan_info_def): Strengthen param of "init_insn"
|
||||
callback from rtx to insn_t.
|
||||
(sched_scan): Strengthen local "insn" from rtx to rtx_insn *.
|
||||
(init_global_and_expr_for_insn): Replace uses of NULL_RTX with
|
||||
NULL.
|
||||
(get_seqno_by_succs): Strengthen param "insn" and locals "tmp",
|
||||
"end" from rtx to rtx_insn *.
|
||||
(create_vinsn_from_insn_rtx): Likewise for param "insn_rtx".
|
||||
(rtx insn_rtx, bool force_unique_p)
|
||||
(BND_TO): Delete function.
|
||||
(SET_BND_TO): Delete function.
|
||||
|
||||
* sel-sched.c (advance_one_cycle): Strengthen local "insn" from
|
||||
rtx to rtx_insn *.
|
||||
(extract_new_fences_from): Replace uses of NULL_RTX with NULL.
|
||||
(replace_dest_with_reg_in_expr): Strengthen local "insn_rtx" from
|
||||
rtx to rtx_insn *.
|
||||
(undo_transformations): Likewise for param "insn".
|
||||
(update_liveness_on_insn): Likewise.
|
||||
(compute_live_below_insn): Likewise for param "insn" and local
|
||||
"succ".
|
||||
(update_data_sets): Likewise for param "insn".
|
||||
(fill_vec_av_set): Replace uses of NULL_RTX with NULL.
|
||||
(convert_vec_av_set_to_ready): Drop now-redundant checked cast.
|
||||
(invoke_aftermath_hooks): Strengthen param "best_insn" from rtx to
|
||||
rtx_insn *.
|
||||
(move_cond_jump): Likewise for param "insn".
|
||||
(move_cond_jump): Drop use of SET_BND_TO.
|
||||
(compute_av_set_on_boundaries): Likewise.
|
||||
(update_fence_and_insn): Replace uses of NULL_RTX with NULL.
|
||||
(update_and_record_unavailable_insns): Strengthen local "bb_end"
|
||||
from rtx to rtx_insn *.
|
||||
(maybe_emit_renaming_copy): Likewise for param "insn".
|
||||
(maybe_emit_speculative_check): Likewise.
|
||||
(handle_emitting_transformations): Likewise.
|
||||
(remove_insn_from_stream): Likewise.
|
||||
(code_motion_process_successors): Strengthen local "succ" from rtx
|
||||
to insn_t.
|
||||
|
||||
2014-08-26 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* sel-sched-ir.h (ilist_t): Redefine this typedef in terms of
|
||||
|
|
|
@ -877,7 +877,7 @@ static int haifa_speculate_insn (rtx, ds_t, rtx *);
|
|||
static void generate_recovery_code (rtx_insn *);
|
||||
static void process_insn_forw_deps_be_in_spec (rtx, rtx, ds_t);
|
||||
static void begin_speculative_block (rtx_insn *);
|
||||
static void add_to_speculative_block (rtx);
|
||||
static void add_to_speculative_block (rtx_insn *);
|
||||
static void init_before_recovery (basic_block *);
|
||||
static void create_check_block_twin (rtx_insn *, bool);
|
||||
static void fix_recovery_deps (basic_block);
|
||||
|
@ -888,7 +888,7 @@ static void fix_jump_move (rtx);
|
|||
static void move_block_after_check (rtx);
|
||||
static void move_succs (vec<edge, va_gc> **, basic_block);
|
||||
static void sched_remove_insn (rtx_insn *);
|
||||
static void clear_priorities (rtx, rtx_vec_t *);
|
||||
static void clear_priorities (rtx_insn *, rtx_vec_t *);
|
||||
static void calc_priorities (rtx_vec_t);
|
||||
static void add_jump_dependencies (rtx, rtx);
|
||||
|
||||
|
@ -7417,7 +7417,7 @@ static void haifa_init_insn (rtx);
|
|||
|
||||
/* Generates recovery code for BE_IN speculative INSN. */
|
||||
static void
|
||||
add_to_speculative_block (rtx insn)
|
||||
add_to_speculative_block (rtx_insn *insn)
|
||||
{
|
||||
ds_t ts;
|
||||
sd_iterator_def sd_it;
|
||||
|
@ -8383,7 +8383,7 @@ sched_remove_insn (rtx_insn *insn)
|
|||
Store in vector pointed to by ROOTS_PTR insns on which priority () should
|
||||
be invoked to initialize all cleared priorities. */
|
||||
static void
|
||||
clear_priorities (rtx insn, rtx_vec_t *roots_ptr)
|
||||
clear_priorities (rtx_insn *insn, rtx_vec_t *roots_ptr)
|
||||
{
|
||||
sd_iterator_def sd_it;
|
||||
dep_t dep;
|
||||
|
@ -8419,7 +8419,7 @@ static void
|
|||
calc_priorities (rtx_vec_t roots)
|
||||
{
|
||||
int i;
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
FOR_EACH_VEC_ELT (roots, i, insn)
|
||||
priority (insn);
|
||||
|
|
|
@ -1801,11 +1801,11 @@ static rtx_insn *cur_insn = NULL;
|
|||
/* Implement hooks for haifa scheduler. */
|
||||
|
||||
static void
|
||||
haifa_start_insn (rtx insn)
|
||||
haifa_start_insn (rtx_insn *insn)
|
||||
{
|
||||
gcc_assert (insn && !cur_insn);
|
||||
|
||||
cur_insn = as_a <rtx_insn *> (insn);
|
||||
cur_insn = insn;
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -1833,7 +1833,7 @@ haifa_note_reg_use (int regno)
|
|||
}
|
||||
|
||||
static void
|
||||
haifa_note_mem_dep (rtx mem, rtx pending_mem, rtx pending_insn, ds_t ds)
|
||||
haifa_note_mem_dep (rtx mem, rtx pending_mem, rtx_insn *pending_insn, ds_t ds)
|
||||
{
|
||||
if (!(ds & SPECULATIVE))
|
||||
{
|
||||
|
@ -1855,7 +1855,7 @@ haifa_note_mem_dep (rtx mem, rtx pending_mem, rtx pending_insn, ds_t ds)
|
|||
}
|
||||
|
||||
static void
|
||||
haifa_note_dep (rtx elem, ds_t ds)
|
||||
haifa_note_dep (rtx_insn *elem, ds_t ds)
|
||||
{
|
||||
dep_def _dep;
|
||||
dep_t dep = &_dep;
|
||||
|
@ -1888,7 +1888,7 @@ note_reg_clobber (int r)
|
|||
}
|
||||
|
||||
static void
|
||||
note_mem_dep (rtx m1, rtx m2, rtx e, ds_t ds)
|
||||
note_mem_dep (rtx m1, rtx m2, rtx_insn *e, ds_t ds)
|
||||
{
|
||||
if (sched_deps_info->note_mem_dep)
|
||||
sched_deps_info->note_mem_dep (m1, m2, e, ds);
|
||||
|
@ -2501,7 +2501,7 @@ sched_analyze_1 (struct deps_desc *deps, rtx x, rtx_insn *insn)
|
|||
{
|
||||
if (anti_dependence (XEXP (pending_mem, 0), t)
|
||||
&& ! sched_insns_conditions_mutex_p (insn, XEXP (pending, 0)))
|
||||
note_mem_dep (t, XEXP (pending_mem, 0), XEXP (pending, 0),
|
||||
note_mem_dep (t, XEXP (pending_mem, 0), as_a <rtx_insn *> (XEXP (pending, 0)),
|
||||
DEP_ANTI);
|
||||
|
||||
pending = XEXP (pending, 1);
|
||||
|
@ -2514,7 +2514,8 @@ sched_analyze_1 (struct deps_desc *deps, rtx x, rtx_insn *insn)
|
|||
{
|
||||
if (output_dependence (XEXP (pending_mem, 0), t)
|
||||
&& ! sched_insns_conditions_mutex_p (insn, XEXP (pending, 0)))
|
||||
note_mem_dep (t, XEXP (pending_mem, 0), XEXP (pending, 0),
|
||||
note_mem_dep (t, XEXP (pending_mem, 0),
|
||||
as_a <rtx_insn *> (XEXP (pending, 0)),
|
||||
DEP_OUTPUT);
|
||||
|
||||
pending = XEXP (pending, 1);
|
||||
|
@ -2646,7 +2647,8 @@ sched_analyze_2 (struct deps_desc *deps, rtx x, rtx_insn *insn)
|
|||
if (read_dependence (XEXP (pending_mem, 0), t)
|
||||
&& ! sched_insns_conditions_mutex_p (insn,
|
||||
XEXP (pending, 0)))
|
||||
note_mem_dep (t, XEXP (pending_mem, 0), XEXP (pending, 0),
|
||||
note_mem_dep (t, XEXP (pending_mem, 0),
|
||||
as_a <rtx_insn *> (XEXP (pending, 0)),
|
||||
DEP_ANTI);
|
||||
|
||||
pending = XEXP (pending, 1);
|
||||
|
@ -2660,7 +2662,8 @@ sched_analyze_2 (struct deps_desc *deps, rtx x, rtx_insn *insn)
|
|||
if (true_dependence (XEXP (pending_mem, 0), VOIDmode, t)
|
||||
&& ! sched_insns_conditions_mutex_p (insn,
|
||||
XEXP (pending, 0)))
|
||||
note_mem_dep (t, XEXP (pending_mem, 0), XEXP (pending, 0),
|
||||
note_mem_dep (t, XEXP (pending_mem, 0),
|
||||
as_a <rtx_insn *> (XEXP (pending, 0)),
|
||||
sched_deps_info->generate_spec_deps
|
||||
? BEGIN_DATA | DEP_TRUE : DEP_TRUE);
|
||||
|
||||
|
|
|
@ -41,8 +41,8 @@ enum sched_pressure_algorithm
|
|||
};
|
||||
|
||||
typedef vec<basic_block> bb_vec_t;
|
||||
typedef vec<rtx> insn_vec_t;
|
||||
typedef vec<rtx> rtx_vec_t;
|
||||
typedef vec<rtx_insn *> insn_vec_t;
|
||||
typedef vec<rtx_insn *> rtx_vec_t;
|
||||
|
||||
extern void sched_init_bbs (void);
|
||||
|
||||
|
@ -1241,7 +1241,7 @@ struct sched_deps_info_def
|
|||
void (*compute_jump_reg_dependencies) (rtx, regset);
|
||||
|
||||
/* Start analyzing insn. */
|
||||
void (*start_insn) (rtx);
|
||||
void (*start_insn) (rtx_insn *);
|
||||
|
||||
/* Finish analyzing insn. */
|
||||
void (*finish_insn) (void);
|
||||
|
@ -1269,10 +1269,10 @@ struct sched_deps_info_def
|
|||
|
||||
/* Note memory dependence of type DS between MEM1 and MEM2 (which is
|
||||
in the INSN2). */
|
||||
void (*note_mem_dep) (rtx mem1, rtx mem2, rtx insn2, ds_t ds);
|
||||
void (*note_mem_dep) (rtx mem1, rtx mem2, rtx_insn *insn2, ds_t ds);
|
||||
|
||||
/* Note a dependence of type DS from the INSN. */
|
||||
void (*note_dep) (rtx, ds_t ds);
|
||||
void (*note_dep) (rtx_insn *, ds_t ds);
|
||||
|
||||
/* Nonzero if we should use cselib for better alias analysis. This
|
||||
must be 0 if the dependency information is used after sched_analyze
|
||||
|
|
|
@ -534,7 +534,7 @@ void
|
|||
dump_insn_vector (rtx_vec_t succs)
|
||||
{
|
||||
int i;
|
||||
rtx succ;
|
||||
rtx_insn *succ;
|
||||
|
||||
FOR_EACH_VEC_ELT (succs, i, succ)
|
||||
if (succ)
|
||||
|
@ -996,7 +996,7 @@ debug_blist (blist_t bnds)
|
|||
|
||||
/* Dump a rtx vector REF. */
|
||||
DEBUG_FUNCTION void
|
||||
debug (vec<rtx> &ref)
|
||||
debug (vec<rtx_insn *> &ref)
|
||||
{
|
||||
switch_dump (stderr);
|
||||
dump_insn_vector (ref);
|
||||
|
@ -1005,7 +1005,7 @@ debug (vec<rtx> &ref)
|
|||
}
|
||||
|
||||
DEBUG_FUNCTION void
|
||||
debug (vec<rtx> *ptr)
|
||||
debug (vec<rtx_insn *> *ptr)
|
||||
{
|
||||
if (ptr)
|
||||
debug (*ptr);
|
||||
|
|
|
@ -207,7 +207,7 @@ blist_add (blist_t *lp, insn_t to, ilist_t ptr, deps_t dc)
|
|||
_list_add (lp);
|
||||
bnd = BLIST_BND (*lp);
|
||||
|
||||
SET_BND_TO (bnd) = to;
|
||||
BND_TO (bnd) = to;
|
||||
BND_PTR (bnd) = ptr;
|
||||
BND_AV (bnd) = NULL;
|
||||
BND_AV1 (bnd) = NULL;
|
||||
|
@ -262,7 +262,7 @@ init_fence_for_scheduling (fence_t f)
|
|||
/* Add new fence consisting of INSN and STATE to the list pointed to by LP. */
|
||||
static void
|
||||
flist_add (flist_t *lp, insn_t insn, state_t state, deps_t dc, void *tc,
|
||||
insn_t last_scheduled_insn, vec<rtx, va_gc> *executing_insns,
|
||||
insn_t last_scheduled_insn, vec<rtx_insn *, va_gc> *executing_insns,
|
||||
int *ready_ticks, int ready_ticks_size, insn_t sched_next,
|
||||
int cycle, int cycle_issued_insns, int issue_more,
|
||||
bool starts_cycle_p, bool after_stall_p)
|
||||
|
@ -516,7 +516,7 @@ void
|
|||
advance_deps_context (deps_t dc, insn_t insn)
|
||||
{
|
||||
sched_deps_info = &advance_deps_context_sched_deps_info;
|
||||
deps_analyze_insn (dc, as_a <rtx_insn *> (insn));
|
||||
deps_analyze_insn (dc, insn);
|
||||
}
|
||||
|
||||
|
||||
|
@ -614,11 +614,11 @@ init_fences (insn_t old_fence)
|
|||
state_create (),
|
||||
create_deps_context () /* dc */,
|
||||
create_target_context (true) /* tc */,
|
||||
NULL_RTX /* last_scheduled_insn */,
|
||||
NULL /* last_scheduled_insn */,
|
||||
NULL, /* executing_insns */
|
||||
XCNEWVEC (int, ready_ticks_size), /* ready_ticks */
|
||||
ready_ticks_size,
|
||||
NULL_RTX /* sched_next */,
|
||||
NULL /* sched_next */,
|
||||
1 /* cycle */, 0 /* cycle_issued_insns */,
|
||||
issue_rate, /* issue_more */
|
||||
1 /* starts_cycle_p */, 0 /* after_stall_p */);
|
||||
|
@ -637,7 +637,8 @@ init_fences (insn_t old_fence)
|
|||
static void
|
||||
merge_fences (fence_t f, insn_t insn,
|
||||
state_t state, deps_t dc, void *tc,
|
||||
rtx last_scheduled_insn, vec<rtx, va_gc> *executing_insns,
|
||||
rtx_insn *last_scheduled_insn,
|
||||
vec<rtx_insn *, va_gc> *executing_insns,
|
||||
int *ready_ticks, int ready_ticks_size,
|
||||
rtx sched_next, int cycle, int issue_more, bool after_stall_p)
|
||||
{
|
||||
|
@ -802,9 +803,10 @@ merge_fences (fence_t f, insn_t insn,
|
|||
other parameters. */
|
||||
static void
|
||||
add_to_fences (flist_tail_t new_fences, insn_t insn,
|
||||
state_t state, deps_t dc, void *tc, rtx last_scheduled_insn,
|
||||
vec<rtx, va_gc> *executing_insns, int *ready_ticks,
|
||||
int ready_ticks_size, rtx sched_next, int cycle,
|
||||
state_t state, deps_t dc, void *tc,
|
||||
rtx_insn *last_scheduled_insn,
|
||||
vec<rtx_insn *, va_gc> *executing_insns, int *ready_ticks,
|
||||
int ready_ticks_size, rtx_insn *sched_next, int cycle,
|
||||
int cycle_issued_insns, int issue_rate,
|
||||
bool starts_cycle_p, bool after_stall_p)
|
||||
{
|
||||
|
@ -866,9 +868,9 @@ add_clean_fence_to_fences (flist_tail_t new_fences, insn_t succ, fence_t fence)
|
|||
add_to_fences (new_fences,
|
||||
succ, state_create (), create_deps_context (),
|
||||
create_target_context (true),
|
||||
NULL_RTX, NULL,
|
||||
NULL, NULL,
|
||||
XCNEWVEC (int, ready_ticks_size), ready_ticks_size,
|
||||
NULL_RTX, FENCE_CYCLE (fence) + 1,
|
||||
NULL, FENCE_CYCLE (fence) + 1,
|
||||
0, issue_rate, 1, FENCE_AFTER_STALL_P (fence));
|
||||
}
|
||||
|
||||
|
@ -1036,16 +1038,17 @@ static vinsn_t nop_vinsn = NULL;
|
|||
insn_t
|
||||
get_nop_from_pool (insn_t insn)
|
||||
{
|
||||
rtx nop_pat;
|
||||
insn_t nop;
|
||||
bool old_p = nop_pool.n != 0;
|
||||
int flags;
|
||||
|
||||
if (old_p)
|
||||
nop = nop_pool.v[--nop_pool.n];
|
||||
nop_pat = nop_pool.v[--nop_pool.n];
|
||||
else
|
||||
nop = nop_pattern;
|
||||
nop_pat = nop_pattern;
|
||||
|
||||
nop = emit_insn_before (nop, insn);
|
||||
nop = emit_insn_before (nop_pat, insn);
|
||||
|
||||
if (old_p)
|
||||
flags = INSN_INIT_TODO_SSID;
|
||||
|
@ -1069,7 +1072,7 @@ return_nop_to_pool (insn_t nop, bool full_tidying)
|
|||
INSN_DELETED_P (nop) = 0;
|
||||
|
||||
if (nop_pool.n == nop_pool.s)
|
||||
nop_pool.v = XRESIZEVEC (rtx, nop_pool.v,
|
||||
nop_pool.v = XRESIZEVEC (rtx_insn *, nop_pool.v,
|
||||
(nop_pool.s = 2 * nop_pool.s + 1));
|
||||
nop_pool.v[nop_pool.n++] = nop;
|
||||
}
|
||||
|
@ -2748,7 +2751,7 @@ deps_init_id (idata_t id, insn_t insn, bool force_unique_p)
|
|||
|
||||
sched_deps_info = &deps_init_id_sched_deps_info;
|
||||
|
||||
deps_analyze_insn (dc, as_a <rtx_insn *> (insn));
|
||||
deps_analyze_insn (dc, insn);
|
||||
|
||||
free_deps (dc);
|
||||
|
||||
|
@ -2774,7 +2777,7 @@ struct sched_scan_info_def
|
|||
|
||||
/* This hook makes scheduler frontend to initialize its internal data
|
||||
structures for the passed insn. */
|
||||
void (*init_insn) (rtx);
|
||||
void (*init_insn) (insn_t);
|
||||
};
|
||||
|
||||
/* A driver function to add a set of basic blocks (BBS) to the
|
||||
|
@ -2798,7 +2801,7 @@ sched_scan (const struct sched_scan_info_def *ssi, bb_vec_t bbs)
|
|||
if (ssi->init_insn)
|
||||
FOR_EACH_VEC_ELT (bbs, i, bb)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
ssi->init_insn (insn);
|
||||
|
@ -2937,7 +2940,7 @@ init_global_and_expr_for_insn (insn_t insn)
|
|||
|
||||
if (NOTE_INSN_BASIC_BLOCK_P (insn))
|
||||
{
|
||||
init_global_data.prev_insn = NULL_RTX;
|
||||
init_global_data.prev_insn = NULL;
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -2954,7 +2957,7 @@ init_global_and_expr_for_insn (insn_t insn)
|
|||
init_global_data.prev_insn = insn;
|
||||
}
|
||||
else
|
||||
init_global_data.prev_insn = NULL_RTX;
|
||||
init_global_data.prev_insn = NULL;
|
||||
|
||||
if (GET_CODE (PATTERN (insn)) == ASM_INPUT
|
||||
|| asm_noperands (PATTERN (insn)) >= 0)
|
||||
|
@ -3978,10 +3981,10 @@ sel_luid_for_non_insn (rtx x)
|
|||
/* Find the proper seqno for inserting at INSN by successors.
|
||||
Return -1 if no successors with positive seqno exist. */
|
||||
static int
|
||||
get_seqno_by_succs (rtx insn)
|
||||
get_seqno_by_succs (rtx_insn *insn)
|
||||
{
|
||||
basic_block bb = BLOCK_FOR_INSN (insn);
|
||||
rtx tmp = insn, end = BB_END (bb);
|
||||
rtx_insn *tmp = insn, *end = BB_END (bb);
|
||||
int seqno;
|
||||
insn_t succ = NULL;
|
||||
succ_iterator si;
|
||||
|
@ -5756,7 +5759,7 @@ create_insn_rtx_from_pattern (rtx pattern, rtx label)
|
|||
/* Create a new vinsn for INSN_RTX. FORCE_UNIQUE_P is true when the vinsn
|
||||
must not be clonable. */
|
||||
vinsn_t
|
||||
create_vinsn_from_insn_rtx (rtx insn_rtx, bool force_unique_p)
|
||||
create_vinsn_from_insn_rtx (rtx_insn *insn_rtx, bool force_unique_p)
|
||||
{
|
||||
gcc_assert (INSN_P (insn_rtx) && !INSN_IN_STREAM_P (insn_rtx));
|
||||
|
||||
|
@ -6456,14 +6459,4 @@ rtx& SET_VINSN_INSN_RTX (vinsn_t vi)
|
|||
return vi->insn_rtx;
|
||||
}
|
||||
|
||||
rtx_insn *BND_TO (bnd_t bnd)
|
||||
{
|
||||
return safe_as_a <rtx_insn *> (bnd->to);
|
||||
}
|
||||
|
||||
insn_t& SET_BND_TO (bnd_t bnd)
|
||||
{
|
||||
return bnd->to;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -60,7 +60,7 @@ typedef _list_t _xlist_t;
|
|||
#define _XLIST_NEXT(L) (_LIST_NEXT (L))
|
||||
|
||||
/* Instruction. */
|
||||
typedef rtx insn_t;
|
||||
typedef rtx_insn *insn_t;
|
||||
|
||||
/* List of insns. */
|
||||
typedef _list_t ilist_t;
|
||||
|
@ -233,8 +233,7 @@ struct _bnd
|
|||
deps_t dc;
|
||||
};
|
||||
typedef struct _bnd *bnd_t;
|
||||
extern rtx_insn *BND_TO (bnd_t bnd);
|
||||
extern insn_t& SET_BND_TO (bnd_t bnd);
|
||||
#define BND_TO(B) ((B)->to)
|
||||
|
||||
/* PTR stands not for pointer as you might think, but as a Path To Root of the
|
||||
current instruction group from boundary B. */
|
||||
|
@ -279,7 +278,7 @@ struct _fence
|
|||
tc_t tc;
|
||||
|
||||
/* A vector of insns that are scheduled but not yet completed. */
|
||||
vec<rtx, va_gc> *executing_insns;
|
||||
vec<rtx_insn *, va_gc> *executing_insns;
|
||||
|
||||
/* A vector indexed by UIDs that caches the earliest cycle on which
|
||||
an insn can be scheduled on this fence. */
|
||||
|
@ -289,13 +288,13 @@ struct _fence
|
|||
int ready_ticks_size;
|
||||
|
||||
/* Insn, which has been scheduled last on this fence. */
|
||||
rtx last_scheduled_insn;
|
||||
rtx_insn *last_scheduled_insn;
|
||||
|
||||
/* The last value of can_issue_more variable on this fence. */
|
||||
int issue_more;
|
||||
|
||||
/* If non-NULL force the next scheduled insn to be SCHED_NEXT. */
|
||||
rtx sched_next;
|
||||
rtx_insn *sched_next;
|
||||
|
||||
/* True if fill_insns processed this fence. */
|
||||
BOOL_BITFIELD processed_p : 1;
|
||||
|
@ -1255,7 +1254,7 @@ _succ_iter_start (insn_t *succp, insn_t insn, int flags)
|
|||
}
|
||||
|
||||
static inline bool
|
||||
_succ_iter_cond (succ_iterator *ip, rtx *succp, rtx insn,
|
||||
_succ_iter_cond (succ_iterator *ip, insn_t *succp, insn_t insn,
|
||||
bool check (edge, succ_iterator *))
|
||||
{
|
||||
if (!ip->bb_end)
|
||||
|
@ -1661,7 +1660,7 @@ extern void sel_unregister_cfg_hooks (void);
|
|||
|
||||
/* Expression transformation routines. */
|
||||
extern rtx_insn *create_insn_rtx_from_pattern (rtx, rtx);
|
||||
extern vinsn_t create_vinsn_from_insn_rtx (rtx, bool);
|
||||
extern vinsn_t create_vinsn_from_insn_rtx (rtx_insn *, bool);
|
||||
extern rtx_insn *create_copy_of_insn_rtx (rtx);
|
||||
extern void change_vinsn_in_expr (expr_t, vinsn_t);
|
||||
|
||||
|
|
|
@ -572,7 +572,7 @@ advance_one_cycle (fence_t fence)
|
|||
{
|
||||
unsigned i;
|
||||
int cycle;
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
advance_state (FENCE_STATE (fence));
|
||||
cycle = ++FENCE_CYCLE (fence);
|
||||
|
@ -630,7 +630,7 @@ extract_new_fences_from (flist_t old_fences, flist_tail_t new_fences,
|
|||
int orig_max_seqno)
|
||||
{
|
||||
bool was_here_p = false;
|
||||
insn_t insn = NULL_RTX;
|
||||
insn_t insn = NULL;
|
||||
insn_t succ;
|
||||
succ_iterator si;
|
||||
ilist_iterator ii;
|
||||
|
@ -965,7 +965,7 @@ create_insn_rtx_with_lhs (vinsn_t vi, rtx lhs_rtx)
|
|||
static void
|
||||
replace_dest_with_reg_in_expr (expr_t expr, rtx new_reg)
|
||||
{
|
||||
rtx insn_rtx;
|
||||
rtx_insn *insn_rtx;
|
||||
vinsn_t vinsn;
|
||||
|
||||
insn_rtx = create_insn_rtx_with_lhs (EXPR_VINSN (expr), new_reg);
|
||||
|
@ -1894,7 +1894,7 @@ identical_copy_p (rtx insn)
|
|||
/* Undo all transformations on *AV_PTR that were done when
|
||||
moving through INSN. */
|
||||
static void
|
||||
undo_transformations (av_set_t *av_ptr, rtx insn)
|
||||
undo_transformations (av_set_t *av_ptr, rtx_insn *insn)
|
||||
{
|
||||
av_set_iterator av_iter;
|
||||
expr_t expr;
|
||||
|
@ -3189,7 +3189,7 @@ compute_live (insn_t insn)
|
|||
|
||||
/* Update liveness sets for INSN. */
|
||||
static inline void
|
||||
update_liveness_on_insn (rtx insn)
|
||||
update_liveness_on_insn (rtx_insn *insn)
|
||||
{
|
||||
ignore_first = true;
|
||||
compute_live (insn);
|
||||
|
@ -3197,9 +3197,9 @@ update_liveness_on_insn (rtx insn)
|
|||
|
||||
/* Compute liveness below INSN and write it into REGS. */
|
||||
static inline void
|
||||
compute_live_below_insn (rtx insn, regset regs)
|
||||
compute_live_below_insn (rtx_insn *insn, regset regs)
|
||||
{
|
||||
rtx succ;
|
||||
rtx_insn *succ;
|
||||
succ_iterator si;
|
||||
|
||||
FOR_EACH_SUCC_1 (succ, si, insn, SUCCS_ALL)
|
||||
|
@ -3208,7 +3208,7 @@ compute_live_below_insn (rtx insn, regset regs)
|
|||
|
||||
/* Update the data gathered in av and lv sets starting from INSN. */
|
||||
static void
|
||||
update_data_sets (rtx insn)
|
||||
update_data_sets (rtx_insn *insn)
|
||||
{
|
||||
update_liveness_on_insn (insn);
|
||||
if (sel_bb_head_p (insn))
|
||||
|
@ -3955,7 +3955,7 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
|
|||
if (FENCE_SCHED_NEXT (fence))
|
||||
{
|
||||
gcc_assert (sched_next_worked == 1);
|
||||
FENCE_SCHED_NEXT (fence) = NULL_RTX;
|
||||
FENCE_SCHED_NEXT (fence) = NULL;
|
||||
}
|
||||
|
||||
/* No need to stall if this variable was not initialized. */
|
||||
|
@ -4015,7 +4015,7 @@ convert_vec_av_set_to_ready (void)
|
|||
insn_t insn = VINSN_INSN_RTX (vi);
|
||||
|
||||
ready_try[n] = 0;
|
||||
ready.vec[n] = as_a <rtx_insn *> (insn);
|
||||
ready.vec[n] = insn;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4281,7 +4281,7 @@ calculate_privileged_insns (void)
|
|||
number is ISSUE_MORE. FENCE and BEST_INSN are the current fence
|
||||
and the insn chosen for scheduling, respectively. */
|
||||
static int
|
||||
invoke_aftermath_hooks (fence_t fence, rtx best_insn, int issue_more)
|
||||
invoke_aftermath_hooks (fence_t fence, rtx_insn *best_insn, int issue_more)
|
||||
{
|
||||
gcc_assert (INSN_P (best_insn));
|
||||
|
||||
|
@ -4922,7 +4922,7 @@ remove_insns_that_need_bookkeeping (fence_t fence, av_set_t *av_ptr)
|
|||
...
|
||||
*/
|
||||
static void
|
||||
move_cond_jump (rtx insn, bnd_t bnd)
|
||||
move_cond_jump (rtx_insn *insn, bnd_t bnd)
|
||||
{
|
||||
edge ft_edge;
|
||||
basic_block block_from, block_next, block_new, block_bnd, bb;
|
||||
|
@ -4955,7 +4955,7 @@ move_cond_jump (rtx insn, bnd_t bnd)
|
|||
|
||||
/* Jump is moved to the boundary. */
|
||||
next = PREV_INSN (insn);
|
||||
SET_BND_TO (bnd) = insn;
|
||||
BND_TO (bnd) = insn;
|
||||
|
||||
ft_edge = find_fallthru_edge_from (block_from);
|
||||
block_next = ft_edge->dest;
|
||||
|
@ -5096,7 +5096,7 @@ compute_av_set_on_boundaries (fence_t fence, blist_t bnds, av_set_t *av_vliw_p)
|
|||
{
|
||||
gcc_assert (FENCE_INSN (fence) == BND_TO (bnd));
|
||||
FENCE_INSN (fence) = bnd_to;
|
||||
SET_BND_TO (bnd) = bnd_to;
|
||||
BND_TO (bnd) = bnd_to;
|
||||
}
|
||||
|
||||
av_set_clear (&BND_AV (bnd));
|
||||
|
@ -5373,7 +5373,7 @@ update_fence_and_insn (fence_t fence, insn_t insn, int need_stall)
|
|||
SCHED_GROUP_P (insn) = 0;
|
||||
}
|
||||
else
|
||||
FENCE_SCHED_NEXT (fence) = NULL_RTX;
|
||||
FENCE_SCHED_NEXT (fence) = NULL;
|
||||
if (INSN_UID (insn) < FENCE_READY_TICKS_SIZE (fence))
|
||||
FENCE_READY_TICKS (fence) [INSN_UID (insn)] = 0;
|
||||
|
||||
|
@ -5707,7 +5707,7 @@ update_and_record_unavailable_insns (basic_block book_block)
|
|||
av_set_iterator i;
|
||||
av_set_t old_av_set = NULL;
|
||||
expr_t cur_expr;
|
||||
rtx bb_end = sel_bb_end (book_block);
|
||||
rtx_insn *bb_end = sel_bb_end (book_block);
|
||||
|
||||
/* First, get correct liveness in the bookkeeping block. The problem is
|
||||
the range between the bookeeping insn and the end of block. */
|
||||
|
@ -5875,7 +5875,7 @@ track_scheduled_insns_and_blocks (rtx insn)
|
|||
/* Emit a register-register copy for INSN if needed. Return true if
|
||||
emitted one. PARAMS is the move_op static parameters. */
|
||||
static bool
|
||||
maybe_emit_renaming_copy (rtx insn,
|
||||
maybe_emit_renaming_copy (rtx_insn *insn,
|
||||
moveop_static_params_p params)
|
||||
{
|
||||
bool insn_emitted = false;
|
||||
|
@ -5915,7 +5915,7 @@ maybe_emit_renaming_copy (rtx insn,
|
|||
Return true if we've emitted one. PARAMS is the move_op static
|
||||
parameters. */
|
||||
static bool
|
||||
maybe_emit_speculative_check (rtx insn, expr_t expr,
|
||||
maybe_emit_speculative_check (rtx_insn *insn, expr_t expr,
|
||||
moveop_static_params_p params)
|
||||
{
|
||||
bool insn_emitted = false;
|
||||
|
@ -5944,7 +5944,7 @@ maybe_emit_speculative_check (rtx insn, expr_t expr,
|
|||
insn such as renaming/speculation. Return true if one of such
|
||||
transformations actually happened, and we have emitted this insn. */
|
||||
static bool
|
||||
handle_emitting_transformations (rtx insn, expr_t expr,
|
||||
handle_emitting_transformations (rtx_insn *insn, expr_t expr,
|
||||
moveop_static_params_p params)
|
||||
{
|
||||
bool insn_emitted = false;
|
||||
|
@ -6003,7 +6003,7 @@ need_nop_to_preserve_insn_bb (rtx insn)
|
|||
/* Remove INSN from stream. When ONLY_DISCONNECT is true, its data
|
||||
is not removed but reused when INSN is re-emitted. */
|
||||
static void
|
||||
remove_insn_from_stream (rtx insn, bool only_disconnect)
|
||||
remove_insn_from_stream (rtx_insn *insn, bool only_disconnect)
|
||||
{
|
||||
/* If there's only one insn in the BB, make sure that a nop is
|
||||
inserted into it, so the basic block won't disappear when we'll
|
||||
|
@ -6351,7 +6351,7 @@ code_motion_process_successors (insn_t insn, av_set_t orig_ops,
|
|||
{
|
||||
int res = 0;
|
||||
succ_iterator succ_i;
|
||||
rtx succ;
|
||||
insn_t succ;
|
||||
basic_block bb;
|
||||
int old_index;
|
||||
unsigned old_succs;
|
||||
|
|
|
@ -16,7 +16,6 @@ TODO: "Scaffolding" to be removed
|
|||
=================================
|
||||
* DF_REF_INSN
|
||||
* SET_BB_HEAD, SET_BB_END, SET_BB_HEADER
|
||||
* SET_BND_TO
|
||||
* SET_DEP_PRO, SET_DEP_CON
|
||||
* SET_NEXT_INSN, SET_PREV_INSN
|
||||
* SET_VINSN_INSN_RTX
|
||||
|
|
Loading…
Add table
Reference in a new issue