re PR middle-end/37448 (cannot compile big function)
2008-10-11 Kenneth Zadeck <zadeck@naturalbridge.com> PR rtl-optimization/37448 * df.h: (df_ref_class): New enum. (DF_REF_TYPE_NAMES, df_ref_extract): Removed. (struct df_ref): Replaced with union df_ref_d. (df_base_ref, df_artificial_ref, df_regular_ref, df_extract_ref): New members of df_ref_d union. (DF_REF_REAL_REG, DF_REF_REGNO, DF_REF_REAL_LOC, DF_REF_REG, DF_REF_LOC, DF_REF_BB, DF_REF_INSN_INFO, DF_REF_INSN, DF_REF_CLASS, DF_REF_TYPE, DF_REF_CHAIN, DF_REF_ID, DF_REF_FLAGS, DF_REF_ORDER, DF_REF_IS_ARTIFICIAL, DF_REF_NEXT_REG, DF_REF_PREV_REG, DF_REF_EXTRACT_WIDTH, DF_REF_EXTRACT_OFFSET, DF_REF_EXTRACT_MODE): Replaced definition to access union df_ref_d. (DF_MWS_REG_DEF_P, DF_MWS_REG_USE_P, DF_MWS_TYPE): New macros. (df_scan_bb_info, df_bb_regno_first_def_find, df_bb_regno_last_def_find, df_find_def, df_find_use, df_refs_chain_dump, df_regs_chain_dump, df_ref_debug, debug_df_ref, df_chain_create, df_chain_unlink, df_chain_copy, df_ref_create, df_ref_remove, df_compute_accessed_bytes, df_get_artificial_defs, df_get_artificial_uses, union_defs) Replaced struct df_ref * with df_ref. * df-scan.c (df_collection_rec, df_null_ref_rec, df_ref_chain_delete_du_chain, df_ref_chain_delete, df_install_ref, df_grow_ref_info, df_ref_create, df_reg_chain_unlink, df_ref_compress_rec, df_ref_remove, df_ref_chain_delete_du_chain, df_ref_chain_delete, df_free_collection_rec, df_insn_rescan, df_reorganize_refs_by_reg_by_reg, df_reorganize_refs_by_reg_by_insn, df_reorganize_refs_by_reg, df_ref_change_reg_with_loc_1, df_notes_rescan, df_swap_refs, df_sort_and_compress_refs, df_install_ref, df_install_refs, df_ref_record, df_get_conditional_uses, df_get_call_refs, df_bb_refs_record, df_exit_block_uses_collect, df_record_exit_block_uses, df_reg_chain_mark, df_reg_chain_verify_unmarked, df_refs_verify): Replaced struct df_ref * with df_ref. (df_ref_record, df_uses_record, df_ref_create_structure): Added df_ref_class parameter. (df_scan_problem_data): Added new pools for different types of refs. (df_scan_free_internal, df_scan_alloc, df_free_ref, df_ref_create_structure): Processed new ref pools. (df_scan_start_dump): Added counts of refs and insns. (df_ref_create, df_notes_rescan, df_def_record_1, df_uses_record, df_get_call_refs, df_insn_refs_collect, df_bb_refs_collect, df_entry_block_defs_collect, df_exit_block_uses_collect): Added code to pass df_ref_class down to ref creation functions. (df_reg_chain_unlink, df_ref_remove, df_ref_change_reg_with_loc_1, df_reg_chain_mark): Use macros to hide references to df_refs. (df_ref_chain_change_bb): Removed. (df_insn_change_bb): Remove calls to df_ref_insn_change_bb. (df_ref_equal_p, df_ref_compare, df_ref_create_structure): Enhanced to understand df_ref union structure. * fwprop.c (local_ref_killed_between_p, use_killed_between, all_uses_available_at, update_df, try_fwprop_subst, forward_propagate_subreg, forward_propagate_and_simplify, forward_propagate_into, fwprop, fwprop_addr): Replaced struct df_ref * with df_ref. (use_killed_between, all_uses_available_at): Use macros to hide references to df_refs. * regstat.c (regstat_bb_compute_ri, regstat_bb_compute_calls_crossed): Replaced struct df_ref * with df_ref. * see.c (see_handle_relevant_defs, see_handle_relevant_uses, see_handle_relevant_refs, see_analyze_one_def, see_update_relevancy, see_propagate_extensions_to_uses): Replaced struct df_ref * with df_ref. * ra-conflict.c (record_one_conflict, clear_reg_in_live, global_conflicts): Replaced struct df_ref * with df_ref. * ddg.c (create_ddg_dep_from_intra_loop_link, add_cross_iteration_register_deps, build_inter_loop_deps): Replaced struct df_ref * with df_ref. (create_ddg_dep_from_intra_loop_link, add_cross_iteration_register_deps): Use macros to hide references to df_refs. * auto-inc-dec.c (find_inc, merge_in_block): Replaced struct df_ref * with df_ref. * df-core.c (df_bb_regno_first_def_find, df_bb_regno_last_def_find, df_find_def, df_find_use, df_refs_chain_dump, df_regs_chain_dump, df_ref_debug, debug_df_ref): Replaced struct df_ref * with df_ref. (df_mws_dump, df_ref_debug): Use macros to hide references to df_refs. * cse.c (cse_extended_basic_block): Replaced struct df_ref * with df_ref. * web.c (union_defs, entry_register, replace_ref, web_main): Replaced struct df_ref * with df_ref. (union_defs, replace_ref): Use macros to hide references to df_refs. * global.c (compute_regs_asm_clobbered, build_insn_chain): Replaced struct df_ref * with df_ref. * ifcvt.c (dead_or_predicable): Replaced struct df_ref * with df_ref. * sel-sched-ir.c (maybe_downgrade_id_to_use, setup_id_reg_sets, ): Replaced struct df_ref * with df_ref. * ira-lives.c (mark_ref_live, def_conflicts_with_inputs_p, mark_ref_dead, process_bb_node_lives): Replaced struct df_ref * with df_ref. * local-alloc.c (block_alloc): Replaced struct df_ref * with df_ref. * df-byte-scan.c (df_compute_accessed_bytes_extract, df_compute_accessed_bytes_strict_low_part, df_compute_accessed_bytes_subreg, df_compute_accessed_bytes): Replaced struct df_ref * with df_ref. (df_compute_accessed_bytes): Use macros to hide references to df_refs. * init-regs.c (initialize_uninitialized_regs): Replaced struct df_ref * with df_ref. * loop-invariant.c (invariant_for_use, hash_invariant_expr_1, check_dependency, check_dependencies, record_uses): Replaced struct df_ref * with df_ref. (invariant_for_use, check_dependency): Use macros to hide references to df_refs. * loop-iv.c (iv_analysis_loop_init, iv_get_reaching_def, get_biv_step_1, get_biv_step, record_iv, iv_analyze_def, iv_analyze, biv_p): Replaced struct df_ref * with df_ref. (iv_analysis_loop_init, iv_get_reaching_def): Use macros to hide references to df_refs. * ira.c (compute_regs_asm_clobbered): Replaced struct df_ref * with df_ref. * combine.c (create_log_links): Replaced struct df_ref * with df_ref. * df-problems.c (df_rd_bb_local_compute_process_def, df_lr_bb_local_compute, df_live_bb_local_compute, df_chain_create, df_chain_unlink_1, df_chain_unlink, df_chain_copy, df_chain_remove_problem, df_chain_create_bb_process_use, df_chain_create_bb, df_chain_top_dump, df_chain_bottom_dump, df_byte_lr_check_regs, df_byte_lr_bb_local_compute, df_byte_lr_simulate_defs, df_byte_lr_simulate_uses, df_byte_lr_simulate_artificial_refs_at_top, df_byte_lr_simulate_artificial_refs_at_end, df_create_unused_note, df_note_bb_compute, df_note_add_problem, df_simulate_defs, df_simulate_uses, df_simulate_artificial_refs_at_end, df_simulate_artificial_refs_at_top): Replaced struct df_ref * with df_ref. (df_chain_dump): Use macros to hide references to df_refs. * config/mips/mips.c (r10k_simplify_address): Replaced struct df_ref * with df_ref. * dce.c (mark_nonreg_stores, delete_corresponding_reg_eq_notes, mark_artificial_uses, mark_reg_dependencies, byte_dce_process_block): Replaced struct df_ref * with df_ref. From-SVN: r141067
This commit is contained in:
parent
4849e8364d
commit
57512f5363
26 changed files with 916 additions and 640 deletions
141
gcc/ChangeLog
141
gcc/ChangeLog
|
@ -1,3 +1,144 @@
|
|||
2008-10-11 Kenneth Zadeck <zadeck@naturalbridge.com>
|
||||
|
||||
PR rtl-optimization/37448
|
||||
* df.h: (df_ref_class): New enum.
|
||||
(DF_REF_TYPE_NAMES, df_ref_extract): Removed.
|
||||
(struct df_ref): Replaced with union df_ref_d.
|
||||
(df_base_ref, df_artificial_ref, df_regular_ref, df_extract_ref):
|
||||
New members of df_ref_d union.
|
||||
(DF_REF_REAL_REG, DF_REF_REGNO, DF_REF_REAL_LOC, DF_REF_REG,
|
||||
DF_REF_LOC, DF_REF_BB, DF_REF_INSN_INFO, DF_REF_INSN,
|
||||
DF_REF_CLASS, DF_REF_TYPE, DF_REF_CHAIN, DF_REF_ID, DF_REF_FLAGS,
|
||||
DF_REF_ORDER, DF_REF_IS_ARTIFICIAL, DF_REF_NEXT_REG,
|
||||
DF_REF_PREV_REG, DF_REF_EXTRACT_WIDTH, DF_REF_EXTRACT_OFFSET,
|
||||
DF_REF_EXTRACT_MODE): Replaced definition to access union
|
||||
df_ref_d.
|
||||
(DF_MWS_REG_DEF_P, DF_MWS_REG_USE_P, DF_MWS_TYPE): New macros.
|
||||
(df_scan_bb_info, df_bb_regno_first_def_find,
|
||||
df_bb_regno_last_def_find, df_find_def, df_find_use,
|
||||
df_refs_chain_dump, df_regs_chain_dump, df_ref_debug,
|
||||
debug_df_ref, df_chain_create, df_chain_unlink, df_chain_copy,
|
||||
df_ref_create, df_ref_remove, df_compute_accessed_bytes,
|
||||
df_get_artificial_defs, df_get_artificial_uses, union_defs)
|
||||
Replaced struct df_ref * with df_ref.
|
||||
* df-scan.c (df_collection_rec, df_null_ref_rec,
|
||||
df_ref_chain_delete_du_chain, df_ref_chain_delete, df_install_ref,
|
||||
df_grow_ref_info, df_ref_create, df_reg_chain_unlink,
|
||||
df_ref_compress_rec, df_ref_remove, df_ref_chain_delete_du_chain,
|
||||
df_ref_chain_delete, df_free_collection_rec, df_insn_rescan,
|
||||
df_reorganize_refs_by_reg_by_reg,
|
||||
df_reorganize_refs_by_reg_by_insn, df_reorganize_refs_by_reg,
|
||||
df_ref_change_reg_with_loc_1, df_notes_rescan, df_swap_refs,
|
||||
df_sort_and_compress_refs, df_install_ref, df_install_refs,
|
||||
df_ref_record, df_get_conditional_uses, df_get_call_refs,
|
||||
df_bb_refs_record, df_exit_block_uses_collect,
|
||||
df_record_exit_block_uses, df_reg_chain_mark,
|
||||
df_reg_chain_verify_unmarked, df_refs_verify): Replaced struct
|
||||
df_ref * with df_ref.
|
||||
(df_ref_record, df_uses_record, df_ref_create_structure): Added
|
||||
df_ref_class parameter.
|
||||
(df_scan_problem_data): Added new pools for different types of
|
||||
refs.
|
||||
(df_scan_free_internal, df_scan_alloc, df_free_ref,
|
||||
df_ref_create_structure): Processed new ref pools.
|
||||
(df_scan_start_dump): Added counts of refs and insns.
|
||||
(df_ref_create, df_notes_rescan, df_def_record_1, df_uses_record,
|
||||
df_get_call_refs, df_insn_refs_collect, df_bb_refs_collect,
|
||||
df_entry_block_defs_collect, df_exit_block_uses_collect): Added
|
||||
code to pass df_ref_class down to ref creation functions.
|
||||
(df_reg_chain_unlink, df_ref_remove, df_ref_change_reg_with_loc_1,
|
||||
df_reg_chain_mark): Use macros to hide references to df_refs.
|
||||
(df_ref_chain_change_bb): Removed.
|
||||
(df_insn_change_bb): Remove calls to df_ref_insn_change_bb.
|
||||
(df_ref_equal_p, df_ref_compare, df_ref_create_structure):
|
||||
Enhanced to understand df_ref union structure.
|
||||
* fwprop.c (local_ref_killed_between_p, use_killed_between,
|
||||
all_uses_available_at, update_df, try_fwprop_subst,
|
||||
forward_propagate_subreg, forward_propagate_and_simplify,
|
||||
forward_propagate_into, fwprop, fwprop_addr): Replaced struct
|
||||
df_ref * with df_ref.
|
||||
(use_killed_between, all_uses_available_at): Use macros to hide
|
||||
references to df_refs.
|
||||
* regstat.c (regstat_bb_compute_ri,
|
||||
regstat_bb_compute_calls_crossed): Replaced struct df_ref * with
|
||||
df_ref.
|
||||
* see.c (see_handle_relevant_defs, see_handle_relevant_uses,
|
||||
see_handle_relevant_refs, see_analyze_one_def,
|
||||
see_update_relevancy, see_propagate_extensions_to_uses): Replaced
|
||||
struct df_ref * with df_ref.
|
||||
* ra-conflict.c (record_one_conflict, clear_reg_in_live,
|
||||
global_conflicts): Replaced struct df_ref * with df_ref.
|
||||
* ddg.c (create_ddg_dep_from_intra_loop_link,
|
||||
add_cross_iteration_register_deps, build_inter_loop_deps):
|
||||
Replaced struct df_ref * with df_ref.
|
||||
(create_ddg_dep_from_intra_loop_link,
|
||||
add_cross_iteration_register_deps): Use macros to hide references
|
||||
to df_refs.
|
||||
* auto-inc-dec.c (find_inc, merge_in_block): Replaced struct
|
||||
df_ref * with df_ref.
|
||||
* df-core.c (df_bb_regno_first_def_find,
|
||||
df_bb_regno_last_def_find, df_find_def, df_find_use,
|
||||
df_refs_chain_dump, df_regs_chain_dump, df_ref_debug,
|
||||
debug_df_ref): Replaced struct df_ref * with df_ref.
|
||||
(df_mws_dump, df_ref_debug): Use macros to hide references to
|
||||
df_refs.
|
||||
* cse.c (cse_extended_basic_block): Replaced struct df_ref * with
|
||||
df_ref.
|
||||
* web.c (union_defs, entry_register, replace_ref, web_main):
|
||||
Replaced struct df_ref * with df_ref.
|
||||
(union_defs, replace_ref): Use macros to hide references to
|
||||
df_refs.
|
||||
* global.c (compute_regs_asm_clobbered, build_insn_chain):
|
||||
Replaced struct df_ref * with df_ref.
|
||||
* ifcvt.c (dead_or_predicable): Replaced struct df_ref * with
|
||||
df_ref.
|
||||
* sel-sched-ir.c (maybe_downgrade_id_to_use, setup_id_reg_sets, ):
|
||||
Replaced struct df_ref * with df_ref.
|
||||
* ira-lives.c (mark_ref_live, def_conflicts_with_inputs_p,
|
||||
mark_ref_dead, process_bb_node_lives): Replaced struct df_ref *
|
||||
with df_ref.
|
||||
* local-alloc.c (block_alloc): Replaced struct df_ref * with
|
||||
df_ref.
|
||||
* df-byte-scan.c (df_compute_accessed_bytes_extract,
|
||||
df_compute_accessed_bytes_strict_low_part,
|
||||
df_compute_accessed_bytes_subreg, df_compute_accessed_bytes):
|
||||
Replaced struct df_ref * with df_ref.
|
||||
(df_compute_accessed_bytes): Use macros to hide references to
|
||||
df_refs.
|
||||
* init-regs.c (initialize_uninitialized_regs): Replaced struct
|
||||
df_ref * with df_ref.
|
||||
* loop-invariant.c (invariant_for_use, hash_invariant_expr_1,
|
||||
check_dependency, check_dependencies, record_uses): Replaced
|
||||
struct df_ref * with df_ref.
|
||||
(invariant_for_use, check_dependency): Use macros to hide
|
||||
references to df_refs.
|
||||
* loop-iv.c (iv_analysis_loop_init, iv_get_reaching_def,
|
||||
get_biv_step_1, get_biv_step, record_iv, iv_analyze_def,
|
||||
iv_analyze, biv_p): Replaced struct df_ref * with df_ref.
|
||||
(iv_analysis_loop_init, iv_get_reaching_def): Use macros to hide
|
||||
references to df_refs.
|
||||
* ira.c (compute_regs_asm_clobbered): Replaced struct df_ref * with df_ref.
|
||||
* combine.c (create_log_links): Replaced struct df_ref * with df_ref.
|
||||
* df-problems.c (df_rd_bb_local_compute_process_def,
|
||||
df_lr_bb_local_compute, df_live_bb_local_compute, df_chain_create,
|
||||
df_chain_unlink_1, df_chain_unlink, df_chain_copy,
|
||||
df_chain_remove_problem, df_chain_create_bb_process_use,
|
||||
df_chain_create_bb, df_chain_top_dump, df_chain_bottom_dump,
|
||||
df_byte_lr_check_regs, df_byte_lr_bb_local_compute,
|
||||
df_byte_lr_simulate_defs, df_byte_lr_simulate_uses,
|
||||
df_byte_lr_simulate_artificial_refs_at_top,
|
||||
df_byte_lr_simulate_artificial_refs_at_end, df_create_unused_note,
|
||||
df_note_bb_compute, df_note_add_problem, df_simulate_defs,
|
||||
df_simulate_uses, df_simulate_artificial_refs_at_end,
|
||||
df_simulate_artificial_refs_at_top): Replaced struct df_ref * with df_ref.
|
||||
(df_chain_dump): Use macros to hide
|
||||
references to df_refs.
|
||||
* config/mips/mips.c (r10k_simplify_address): Replaced struct
|
||||
df_ref * with df_ref.
|
||||
* dce.c (mark_nonreg_stores, delete_corresponding_reg_eq_notes,
|
||||
mark_artificial_uses, mark_reg_dependencies,
|
||||
byte_dce_process_block): Replaced struct df_ref * with df_ref.
|
||||
|
||||
2008-10-11 Eric Botcazou <ebotcazou@adacore.com>
|
||||
|
||||
* tree.h (contains_packed_reference): Mention ARRAY_RANGE_REF in
|
||||
|
|
|
@ -1007,7 +1007,7 @@ find_inc (bool first_try)
|
|||
rtx insn;
|
||||
basic_block bb = BASIC_BLOCK (BLOCK_NUM (mem_insn.insn));
|
||||
rtx other_insn;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
|
||||
/* Make sure this reg appears only once in this insn. */
|
||||
if (count_occurrences (PATTERN (mem_insn.insn), mem_insn.reg0, 1) != 1)
|
||||
|
@ -1053,7 +1053,7 @@ find_inc (bool first_try)
|
|||
assigned to by the mem insn. */
|
||||
for (def_rec = DF_INSN_DEFS (mem_insn.insn); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int regno = DF_REF_REGNO (def);
|
||||
if ((regno == REGNO (inc_insn.reg0))
|
||||
|| (regno == REGNO (inc_insn.reg_res)))
|
||||
|
@ -1454,12 +1454,12 @@ merge_in_block (int max_reg, basic_block bb)
|
|||
and there is noting to update. */
|
||||
if (DF_INSN_UID_GET(uid))
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
/* Need to update next use. */
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
reg_next_use[DF_REF_REGNO (def)] = NULL;
|
||||
reg_next_inc_use[DF_REF_REGNO (def)] = NULL;
|
||||
reg_next_def[DF_REF_REGNO (def)] = insn;
|
||||
|
@ -1467,7 +1467,7 @@ merge_in_block (int max_reg, basic_block bb)
|
|||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
reg_next_use[DF_REF_REGNO (use)] = insn;
|
||||
if (insn_is_add_or_inc)
|
||||
reg_next_inc_use[DF_REF_REGNO (use)] = insn;
|
||||
|
|
|
@ -900,7 +900,7 @@ create_log_links (void)
|
|||
{
|
||||
basic_block bb;
|
||||
rtx *next_use, insn;
|
||||
struct df_ref **def_vec, **use_vec;
|
||||
df_ref *def_vec, *use_vec;
|
||||
|
||||
next_use = XCNEWVEC (rtx, max_reg_num ());
|
||||
|
||||
|
@ -925,7 +925,7 @@ create_log_links (void)
|
|||
|
||||
for (def_vec = DF_INSN_DEFS (insn); *def_vec; def_vec++)
|
||||
{
|
||||
struct df_ref *def = *def_vec;
|
||||
df_ref def = *def_vec;
|
||||
int regno = DF_REF_REGNO (def);
|
||||
rtx use_insn;
|
||||
|
||||
|
@ -979,7 +979,7 @@ create_log_links (void)
|
|||
|
||||
for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
|
||||
{
|
||||
struct df_ref *use = *use_vec;
|
||||
df_ref use = *use_vec;
|
||||
int regno = DF_REF_REGNO (use);
|
||||
|
||||
/* Do not consider the usage of the stack pointer
|
||||
|
|
|
@ -12105,7 +12105,7 @@ static rtx
|
|||
r10k_simplify_address (rtx x, rtx insn)
|
||||
{
|
||||
rtx newx, op0, op1, set, def_insn, note;
|
||||
struct df_ref *use, *def;
|
||||
df_ref use, def;
|
||||
struct df_link *defs;
|
||||
|
||||
newx = NULL_RTX;
|
||||
|
|
|
@ -6008,11 +6008,11 @@ cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
|
|||
edge pointing to that bb. */
|
||||
if (bb_has_eh_pred (bb))
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
|
||||
for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
|
||||
invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
|
||||
}
|
||||
|
|
18
gcc/dce.c
18
gcc/dce.c
|
@ -218,10 +218,10 @@ mark_nonreg_stores (rtx body, rtx insn, bool fast)
|
|||
static void
|
||||
delete_corresponding_reg_eq_notes (rtx insn)
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int regno = DF_REF_REGNO (def);
|
||||
/* This loop is a little tricky. We cannot just go down the
|
||||
chain because it is being modified by the actions in the
|
||||
|
@ -229,7 +229,7 @@ delete_corresponding_reg_eq_notes (rtx insn)
|
|||
anyway. */
|
||||
while (DF_REG_EQ_USE_CHAIN (regno))
|
||||
{
|
||||
struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
|
||||
df_ref eq_use = DF_REG_EQ_USE_CHAIN (regno);
|
||||
rtx noted_insn = DF_REF_INSN (eq_use);
|
||||
rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
|
||||
if (!note)
|
||||
|
@ -330,7 +330,7 @@ mark_artificial_uses (void)
|
|||
{
|
||||
basic_block bb;
|
||||
struct df_link *defs;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
|
||||
FOR_ALL_BB (bb)
|
||||
{
|
||||
|
@ -349,11 +349,11 @@ static void
|
|||
mark_reg_dependencies (rtx insn)
|
||||
{
|
||||
struct df_link *defs;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
|
||||
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (dump_file)
|
||||
{
|
||||
fprintf (dump_file, "Processing use of ");
|
||||
|
@ -480,7 +480,7 @@ byte_dce_process_block (basic_block bb, bool redo_out, bitmap au)
|
|||
bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
|
||||
rtx insn;
|
||||
bool block_changed;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
|
||||
if (redo_out)
|
||||
{
|
||||
|
@ -511,7 +511,7 @@ byte_dce_process_block (basic_block bb, bool redo_out, bitmap au)
|
|||
/* The insn is needed if there is someone who uses the output. */
|
||||
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int last;
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
unsigned int start = df_byte_lr_get_regno_start (dregno);
|
||||
|
@ -584,7 +584,7 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au)
|
|||
bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
|
||||
rtx insn;
|
||||
bool block_changed;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
|
||||
if (redo_out)
|
||||
{
|
||||
|
|
18
gcc/ddg.c
18
gcc/ddg.c
|
@ -183,13 +183,13 @@ create_ddg_dep_from_intra_loop_link (ddg_ptr g, ddg_node_ptr src_node,
|
|||
if (set && REG_P (SET_DEST (set)))
|
||||
{
|
||||
int regno = REGNO (SET_DEST (set));
|
||||
struct df_ref *first_def;
|
||||
df_ref first_def;
|
||||
struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
|
||||
|
||||
first_def = df_bb_regno_first_def_find (g->bb, regno);
|
||||
gcc_assert (first_def);
|
||||
|
||||
if (bitmap_bit_p (bb_info->gen, first_def->id))
|
||||
if (bitmap_bit_p (bb_info->gen, DF_REF_ID (first_def)))
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -239,7 +239,7 @@ create_ddg_dep_no_link (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to,
|
|||
and anti-dependences from its uses in the current iteration to the
|
||||
first definition in the next iteration. */
|
||||
static void
|
||||
add_cross_iteration_register_deps (ddg_ptr g, struct df_ref *last_def)
|
||||
add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def)
|
||||
{
|
||||
int regno = DF_REF_REGNO (last_def);
|
||||
struct df_link *r_use;
|
||||
|
@ -250,14 +250,14 @@ add_cross_iteration_register_deps (ddg_ptr g, struct df_ref *last_def)
|
|||
#ifdef ENABLE_CHECKING
|
||||
struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
|
||||
#endif
|
||||
struct df_ref *first_def = df_bb_regno_first_def_find (g->bb, regno);
|
||||
df_ref first_def = df_bb_regno_first_def_find (g->bb, regno);
|
||||
|
||||
gcc_assert (last_def_node);
|
||||
gcc_assert (first_def);
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
if (last_def->id != first_def->id)
|
||||
gcc_assert (!bitmap_bit_p (bb_info->gen, first_def->id));
|
||||
if (DF_REF_ID (last_def) != DF_REF_ID (first_def))
|
||||
gcc_assert (!bitmap_bit_p (bb_info->gen, DF_REF_ID (first_def)));
|
||||
#endif
|
||||
|
||||
/* Create inter-loop true dependences and anti dependences. */
|
||||
|
@ -293,7 +293,7 @@ add_cross_iteration_register_deps (ddg_ptr g, struct df_ref *last_def)
|
|||
|
||||
gcc_assert (first_def_node);
|
||||
|
||||
if (last_def->id != first_def->id
|
||||
if (DF_REF_ID (last_def) != DF_REF_ID (first_def)
|
||||
|| !flag_modulo_sched_allow_regmoves)
|
||||
create_ddg_dep_no_link (g, use_node, first_def_node, ANTI_DEP,
|
||||
REG_DEP, 1);
|
||||
|
@ -311,7 +311,7 @@ add_cross_iteration_register_deps (ddg_ptr g, struct df_ref *last_def)
|
|||
{
|
||||
ddg_node_ptr dest_node;
|
||||
|
||||
if (last_def->id == first_def->id)
|
||||
if (DF_REF_ID (last_def) == DF_REF_ID (first_def))
|
||||
return;
|
||||
|
||||
dest_node = get_node_of_insn (g, DF_REF_INSN (first_def));
|
||||
|
@ -333,7 +333,7 @@ build_inter_loop_deps (ddg_ptr g)
|
|||
/* Find inter-loop register output, true and anti deps. */
|
||||
EXECUTE_IF_SET_IN_BITMAP (rd_bb_info->gen, 0, rd_num, bi)
|
||||
{
|
||||
struct df_ref *rd = DF_DEFS_GET (rd_num);
|
||||
df_ref rd = DF_DEFS_GET (rd_num);
|
||||
|
||||
add_cross_iteration_register_deps (g, rd);
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ along with GCC; see the file COPYING3. If not see
|
|||
df_compute_accessed_bytes for a description of MM. */
|
||||
|
||||
static bool
|
||||
df_compute_accessed_bytes_extract (struct df_ref *ref,
|
||||
df_compute_accessed_bytes_extract (df_ref ref,
|
||||
enum df_mm mm ,
|
||||
unsigned int *start_byte,
|
||||
unsigned int *last_byte)
|
||||
|
@ -148,7 +148,7 @@ df_compute_accessed_bytes_extract (struct df_ref *ref,
|
|||
otherwise and set START_BYTE and LAST_BYTE. */
|
||||
|
||||
static bool
|
||||
df_compute_accessed_bytes_strict_low_part (struct df_ref *ref,
|
||||
df_compute_accessed_bytes_strict_low_part (df_ref ref,
|
||||
unsigned int *start_byte,
|
||||
unsigned int *last_byte)
|
||||
{
|
||||
|
@ -200,7 +200,7 @@ df_compute_accessed_bytes_strict_low_part (struct df_ref *ref,
|
|||
otherwise and set START_BYTE and LAST_BYTE. */
|
||||
|
||||
static bool
|
||||
df_compute_accessed_bytes_subreg (struct df_ref *ref, unsigned int *start_byte,
|
||||
df_compute_accessed_bytes_subreg (df_ref ref, unsigned int *start_byte,
|
||||
unsigned int *last_byte)
|
||||
|
||||
{
|
||||
|
@ -229,7 +229,7 @@ df_compute_accessed_bytes_subreg (struct df_ref *ref, unsigned int *start_byte,
|
|||
return true;
|
||||
|
||||
/* Defs and uses are different in the amount of the reg that touch. */
|
||||
if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
|
||||
if (DF_REF_REG_DEF_P (ref))
|
||||
{
|
||||
/* This is an lvalue. */
|
||||
|
||||
|
@ -300,14 +300,14 @@ df_compute_accessed_bytes_subreg (struct df_ref *ref, unsigned int *start_byte,
|
|||
This means that this use can be ignored. */
|
||||
|
||||
bool
|
||||
df_compute_accessed_bytes (struct df_ref *ref, enum df_mm mm,
|
||||
df_compute_accessed_bytes (df_ref ref, enum df_mm mm,
|
||||
unsigned int *start_byte,
|
||||
unsigned int *last_byte)
|
||||
{
|
||||
if (!dbg_cnt (df_byte_scan))
|
||||
return true;
|
||||
|
||||
if (DF_REF_TYPE (ref) != DF_REF_REG_DEF
|
||||
if (!DF_REF_REG_DEF_P (ref)
|
||||
&& DF_REF_FLAGS_IS_SET (ref, DF_REF_READ_WRITE))
|
||||
{
|
||||
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_PRE_POST_MODIFY))
|
||||
|
|
|
@ -1725,11 +1725,11 @@ df_set_clean_cfg (void)
|
|||
|
||||
/* Return first def of REGNO within BB. */
|
||||
|
||||
struct df_ref *
|
||||
df_ref
|
||||
df_bb_regno_first_def_find (basic_block bb, unsigned int regno)
|
||||
{
|
||||
rtx insn;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
unsigned int uid;
|
||||
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
|
@ -1740,7 +1740,7 @@ df_bb_regno_first_def_find (basic_block bb, unsigned int regno)
|
|||
uid = INSN_UID (insn);
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (DF_REF_REGNO (def) == regno)
|
||||
return def;
|
||||
}
|
||||
|
@ -1751,11 +1751,11 @@ df_bb_regno_first_def_find (basic_block bb, unsigned int regno)
|
|||
|
||||
/* Return last def of REGNO within BB. */
|
||||
|
||||
struct df_ref *
|
||||
df_ref
|
||||
df_bb_regno_last_def_find (basic_block bb, unsigned int regno)
|
||||
{
|
||||
rtx insn;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
unsigned int uid;
|
||||
|
||||
FOR_BB_INSNS_REVERSE (bb, insn)
|
||||
|
@ -1766,7 +1766,7 @@ df_bb_regno_last_def_find (basic_block bb, unsigned int regno)
|
|||
uid = INSN_UID (insn);
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (DF_REF_REGNO (def) == regno)
|
||||
return def;
|
||||
}
|
||||
|
@ -1778,11 +1778,11 @@ df_bb_regno_last_def_find (basic_block bb, unsigned int regno)
|
|||
/* Finds the reference corresponding to the definition of REG in INSN.
|
||||
DF is the dataflow object. */
|
||||
|
||||
struct df_ref *
|
||||
df_ref
|
||||
df_find_def (rtx insn, rtx reg)
|
||||
{
|
||||
unsigned int uid;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
|
||||
if (GET_CODE (reg) == SUBREG)
|
||||
reg = SUBREG_REG (reg);
|
||||
|
@ -1791,7 +1791,7 @@ df_find_def (rtx insn, rtx reg)
|
|||
uid = INSN_UID (insn);
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (rtx_equal_p (DF_REF_REAL_REG (def), reg))
|
||||
return def;
|
||||
}
|
||||
|
@ -1812,11 +1812,11 @@ df_reg_defined (rtx insn, rtx reg)
|
|||
/* Finds the reference corresponding to the use of REG in INSN.
|
||||
DF is the dataflow object. */
|
||||
|
||||
struct df_ref *
|
||||
df_ref
|
||||
df_find_use (rtx insn, rtx reg)
|
||||
{
|
||||
unsigned int uid;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
|
||||
if (GET_CODE (reg) == SUBREG)
|
||||
reg = SUBREG_REG (reg);
|
||||
|
@ -1825,14 +1825,14 @@ df_find_use (rtx insn, rtx reg)
|
|||
uid = INSN_UID (insn);
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (rtx_equal_p (DF_REF_REAL_REG (use), reg))
|
||||
return use;
|
||||
}
|
||||
if (df->changeable_flags & DF_EQ_NOTES)
|
||||
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (rtx_equal_p (DF_REF_REAL_REG (use), reg))
|
||||
return use;
|
||||
}
|
||||
|
@ -2064,12 +2064,12 @@ df_dump_bottom (basic_block bb, FILE *file)
|
|||
|
||||
|
||||
void
|
||||
df_refs_chain_dump (struct df_ref **ref_rec, bool follow_chain, FILE *file)
|
||||
df_refs_chain_dump (df_ref *ref_rec, bool follow_chain, FILE *file)
|
||||
{
|
||||
fprintf (file, "{ ");
|
||||
while (*ref_rec)
|
||||
{
|
||||
struct df_ref *ref = *ref_rec;
|
||||
df_ref ref = *ref_rec;
|
||||
fprintf (file, "%c%d(%d)",
|
||||
DF_REF_REG_DEF_P (ref) ? 'd' : (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) ? 'e' : 'u',
|
||||
DF_REF_ID (ref),
|
||||
|
@ -2085,7 +2085,7 @@ df_refs_chain_dump (struct df_ref **ref_rec, bool follow_chain, FILE *file)
|
|||
/* Dump either a ref-def or reg-use chain. */
|
||||
|
||||
void
|
||||
df_regs_chain_dump (struct df_ref *ref, FILE *file)
|
||||
df_regs_chain_dump (df_ref ref, FILE *file)
|
||||
{
|
||||
fprintf (file, "{ ");
|
||||
while (ref)
|
||||
|
@ -2094,7 +2094,7 @@ df_regs_chain_dump (struct df_ref *ref, FILE *file)
|
|||
DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
|
||||
DF_REF_ID (ref),
|
||||
DF_REF_REGNO (ref));
|
||||
ref = ref->next_reg;
|
||||
ref = DF_REF_NEXT_REG (ref);
|
||||
}
|
||||
fprintf (file, "}");
|
||||
}
|
||||
|
@ -2106,7 +2106,7 @@ df_mws_dump (struct df_mw_hardreg **mws, FILE *file)
|
|||
while (*mws)
|
||||
{
|
||||
fprintf (file, "mw %c r[%d..%d]\n",
|
||||
((*mws)->type == DF_REF_REG_DEF) ? 'd' : 'u',
|
||||
(DF_MWS_REG_DEF_P (*mws)) ? 'd' : 'u',
|
||||
(*mws)->start_regno, (*mws)->end_regno);
|
||||
mws++;
|
||||
}
|
||||
|
@ -2185,7 +2185,7 @@ df_regno_debug (unsigned int regno, FILE *file)
|
|||
|
||||
|
||||
void
|
||||
df_ref_debug (struct df_ref *ref, FILE *file)
|
||||
df_ref_debug (df_ref ref, FILE *file)
|
||||
{
|
||||
fprintf (file, "%c%d ",
|
||||
DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
|
||||
|
@ -2193,7 +2193,7 @@ df_ref_debug (struct df_ref *ref, FILE *file)
|
|||
fprintf (file, "reg %d bb %d insn %d flag 0x%x type 0x%x ",
|
||||
DF_REF_REGNO (ref),
|
||||
DF_REF_BBNO (ref),
|
||||
DF_REF_INSN_INFO (ref) ? INSN_UID (DF_REF_INSN (ref)) : -1,
|
||||
DF_REF_IS_ARTIFICIAL (ref) ? -1 : DF_REF_INSN_UID (ref),
|
||||
DF_REF_FLAGS (ref),
|
||||
DF_REF_TYPE (ref));
|
||||
if (DF_REF_LOC (ref))
|
||||
|
@ -2229,7 +2229,7 @@ debug_df_regno (unsigned int regno)
|
|||
|
||||
|
||||
void
|
||||
debug_df_ref (struct df_ref *ref)
|
||||
debug_df_ref (df_ref ref)
|
||||
{
|
||||
df_ref_debug (ref, stderr);
|
||||
}
|
||||
|
|
|
@ -128,7 +128,7 @@ df_chain_dump (struct df_link *link, FILE *file)
|
|||
DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
|
||||
DF_REF_ID (link->ref),
|
||||
DF_REF_BBNO (link->ref),
|
||||
DF_REF_INSN_INFO (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
|
||||
DF_REF_IS_ARTIFICIAL (link->ref) ? -1 : DF_REF_INSN_UID (link->ref));
|
||||
}
|
||||
fprintf (file, "}");
|
||||
}
|
||||
|
@ -320,12 +320,12 @@ df_rd_alloc (bitmap all_blocks)
|
|||
|
||||
static void
|
||||
df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
|
||||
struct df_ref **def_rec,
|
||||
df_ref *def_rec,
|
||||
enum df_ref_flags top_flag)
|
||||
{
|
||||
while (*def_rec)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
|
||||
{
|
||||
unsigned int regno = DF_REF_REGNO (def);
|
||||
|
@ -795,13 +795,13 @@ df_lr_bb_local_compute (unsigned int bb_index)
|
|||
basic_block bb = BASIC_BLOCK (bb_index);
|
||||
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
rtx insn;
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
|
||||
/* Process the registers set in an exception handler. */
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
|
||||
{
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
|
@ -813,7 +813,7 @@ df_lr_bb_local_compute (unsigned int bb_index)
|
|||
/* Process the hardware registers that are always live. */
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
/* Add use to set of uses in this BB. */
|
||||
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
|
||||
bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
|
||||
|
@ -828,7 +828,7 @@ df_lr_bb_local_compute (unsigned int bb_index)
|
|||
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
/* If the def is to only part of the reg, it does
|
||||
not kill the other defs that reach here. */
|
||||
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
|
||||
|
@ -841,7 +841,7 @@ df_lr_bb_local_compute (unsigned int bb_index)
|
|||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
/* Add use to set of uses in this BB. */
|
||||
bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
|
||||
}
|
||||
|
@ -852,7 +852,7 @@ df_lr_bb_local_compute (unsigned int bb_index)
|
|||
goto. */
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
|
||||
{
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
|
@ -865,7 +865,7 @@ df_lr_bb_local_compute (unsigned int bb_index)
|
|||
/* Process the uses that are live into an exception handler. */
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
/* Add use to set of uses in this BB. */
|
||||
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
|
||||
bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
|
||||
|
@ -1415,7 +1415,7 @@ df_live_bb_local_compute (unsigned int bb_index)
|
|||
basic_block bb = BASIC_BLOCK (bb_index);
|
||||
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
rtx insn;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
int luid = 0;
|
||||
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
|
@ -1438,7 +1438,7 @@ df_live_bb_local_compute (unsigned int bb_index)
|
|||
luid++;
|
||||
for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int regno = DF_REF_REGNO (def);
|
||||
|
||||
if (DF_REF_FLAGS_IS_SET (def,
|
||||
|
@ -1457,7 +1457,7 @@ df_live_bb_local_compute (unsigned int bb_index)
|
|||
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
|
||||
}
|
||||
}
|
||||
|
@ -1858,7 +1858,7 @@ df_live_verify_transfer_functions (void)
|
|||
/* Create a du or ud chain from SRC to DST and link it into SRC. */
|
||||
|
||||
struct df_link *
|
||||
df_chain_create (struct df_ref *src, struct df_ref *dst)
|
||||
df_chain_create (df_ref src, df_ref dst)
|
||||
{
|
||||
struct df_link *head = DF_REF_CHAIN (src);
|
||||
struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
|
||||
|
@ -1873,7 +1873,7 @@ df_chain_create (struct df_ref *src, struct df_ref *dst)
|
|||
/* Delete any du or ud chains that start at REF and point to
|
||||
TARGET. */
|
||||
static void
|
||||
df_chain_unlink_1 (struct df_ref *ref, struct df_ref *target)
|
||||
df_chain_unlink_1 (df_ref ref, df_ref target)
|
||||
{
|
||||
struct df_link *chain = DF_REF_CHAIN (ref);
|
||||
struct df_link *prev = NULL;
|
||||
|
@ -1898,7 +1898,7 @@ df_chain_unlink_1 (struct df_ref *ref, struct df_ref *target)
|
|||
/* Delete a du or ud chain that leave or point to REF. */
|
||||
|
||||
void
|
||||
df_chain_unlink (struct df_ref *ref)
|
||||
df_chain_unlink (df_ref ref)
|
||||
{
|
||||
struct df_link *chain = DF_REF_CHAIN (ref);
|
||||
while (chain)
|
||||
|
@ -1917,7 +1917,7 @@ df_chain_unlink (struct df_ref *ref)
|
|||
TO_REF. */
|
||||
|
||||
void
|
||||
df_chain_copy (struct df_ref *to_ref,
|
||||
df_chain_copy (df_ref to_ref,
|
||||
struct df_link *from_ref)
|
||||
{
|
||||
while (from_ref)
|
||||
|
@ -1943,8 +1943,8 @@ df_chain_remove_problem (void)
|
|||
EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
|
||||
{
|
||||
rtx insn;
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
basic_block bb = BASIC_BLOCK (bb_index);
|
||||
|
||||
if (df_chain_problem_p (DF_DU_CHAIN))
|
||||
|
@ -2015,7 +2015,7 @@ df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
|
|||
|
||||
static void
|
||||
df_chain_create_bb_process_use (bitmap local_rd,
|
||||
struct df_ref **use_rec,
|
||||
df_ref *use_rec,
|
||||
enum df_ref_flags top_flag)
|
||||
{
|
||||
bitmap_iterator bi;
|
||||
|
@ -2023,7 +2023,7 @@ df_chain_create_bb_process_use (bitmap local_rd,
|
|||
|
||||
while (*use_rec)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
unsigned int uregno = DF_REF_REGNO (use);
|
||||
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|
||||
|| (uregno >= FIRST_PSEUDO_REGISTER))
|
||||
|
@ -2039,7 +2039,7 @@ df_chain_create_bb_process_use (bitmap local_rd,
|
|||
|
||||
EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
|
||||
{
|
||||
struct df_ref *def;
|
||||
df_ref def;
|
||||
if (def_index > last_index)
|
||||
break;
|
||||
|
||||
|
@ -2067,7 +2067,7 @@ df_chain_create_bb (unsigned int bb_index)
|
|||
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
rtx insn;
|
||||
bitmap cpy = BITMAP_ALLOC (NULL);
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
|
||||
bitmap_copy (cpy, bb_info->in);
|
||||
bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
|
||||
|
@ -2088,7 +2088,7 @@ df_chain_create_bb (unsigned int bb_index)
|
|||
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
|
||||
{
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
|
@ -2103,7 +2103,7 @@ df_chain_create_bb (unsigned int bb_index)
|
|||
/* Process the regular instructions next. */
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
|
||||
if (!INSN_P (insn))
|
||||
|
@ -2122,7 +2122,7 @@ df_chain_create_bb (unsigned int bb_index)
|
|||
pass only changes the bits in cpy. */
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|
||||
|| (dregno >= FIRST_PSEUDO_REGISTER))
|
||||
|
@ -2183,14 +2183,14 @@ df_chain_top_dump (basic_block bb, FILE *file)
|
|||
if (df_chain_problem_p (DF_DU_CHAIN))
|
||||
{
|
||||
rtx insn;
|
||||
struct df_ref **def_rec = df_get_artificial_defs (bb->index);
|
||||
df_ref *def_rec = df_get_artificial_defs (bb->index);
|
||||
if (*def_rec)
|
||||
{
|
||||
|
||||
fprintf (file, ";; DU chains for artificial defs\n");
|
||||
while (*def_rec)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
|
||||
df_chain_dump (DF_REF_CHAIN (def), file);
|
||||
fprintf (file, "\n");
|
||||
|
@ -2211,9 +2211,9 @@ df_chain_top_dump (basic_block bb, FILE *file)
|
|||
|
||||
while (*def_rec)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
|
||||
if (def->flags & DF_REF_READ_WRITE)
|
||||
if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
|
||||
fprintf (file, "read/write ");
|
||||
df_chain_dump (DF_REF_CHAIN (def), file);
|
||||
fprintf (file, "\n");
|
||||
|
@ -2232,14 +2232,14 @@ df_chain_bottom_dump (basic_block bb, FILE *file)
|
|||
if (df_chain_problem_p (DF_UD_CHAIN))
|
||||
{
|
||||
rtx insn;
|
||||
struct df_ref **use_rec = df_get_artificial_uses (bb->index);
|
||||
df_ref *use_rec = df_get_artificial_uses (bb->index);
|
||||
|
||||
if (*use_rec)
|
||||
{
|
||||
fprintf (file, ";; UD chains for artificial uses\n");
|
||||
while (*use_rec)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
|
||||
df_chain_dump (DF_REF_CHAIN (use), file);
|
||||
fprintf (file, "\n");
|
||||
|
@ -2252,7 +2252,7 @@ df_chain_bottom_dump (basic_block bb, FILE *file)
|
|||
if (INSN_P (insn))
|
||||
{
|
||||
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
|
||||
struct df_ref **eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
|
||||
df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
|
||||
use_rec = DF_INSN_INFO_USES (insn_info);
|
||||
if (*use_rec || *eq_use_rec)
|
||||
{
|
||||
|
@ -2261,9 +2261,9 @@ df_chain_bottom_dump (basic_block bb, FILE *file)
|
|||
|
||||
while (*use_rec)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
|
||||
if (use->flags & DF_REF_READ_WRITE)
|
||||
if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
|
||||
fprintf (file, "read/write ");
|
||||
df_chain_dump (DF_REF_CHAIN (use), file);
|
||||
fprintf (file, "\n");
|
||||
|
@ -2271,7 +2271,7 @@ df_chain_bottom_dump (basic_block bb, FILE *file)
|
|||
}
|
||||
while (*eq_use_rec)
|
||||
{
|
||||
struct df_ref *use = *eq_use_rec;
|
||||
df_ref use = *eq_use_rec;
|
||||
fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
|
||||
df_chain_dump (DF_REF_CHAIN (use), file);
|
||||
fprintf (file, "\n");
|
||||
|
@ -2432,14 +2432,14 @@ df_byte_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
|
|||
extracts, subregs or strict_low_parts. */
|
||||
|
||||
static void
|
||||
df_byte_lr_check_regs (struct df_ref **ref_rec)
|
||||
df_byte_lr_check_regs (df_ref *ref_rec)
|
||||
{
|
||||
struct df_byte_lr_problem_data *problem_data
|
||||
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
|
||||
|
||||
for (; *ref_rec; ref_rec++)
|
||||
{
|
||||
struct df_ref *ref = *ref_rec;
|
||||
df_ref ref = *ref_rec;
|
||||
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT
|
||||
| DF_REF_ZERO_EXTRACT
|
||||
| DF_REF_STRICT_LOW_PART)
|
||||
|
@ -2595,13 +2595,13 @@ df_byte_lr_bb_local_compute (unsigned int bb_index)
|
|||
basic_block bb = BASIC_BLOCK (bb_index);
|
||||
struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
|
||||
rtx insn;
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
|
||||
/* Process the registers set in an exception handler. */
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
|
||||
{
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
|
@ -2615,7 +2615,7 @@ df_byte_lr_bb_local_compute (unsigned int bb_index)
|
|||
/* Process the hardware registers that are always live. */
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
/* Add use to set of uses in this BB. */
|
||||
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
|
||||
{
|
||||
|
@ -2635,7 +2635,7 @@ df_byte_lr_bb_local_compute (unsigned int bb_index)
|
|||
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
/* If the def is to only part of the reg, it does
|
||||
not kill the other defs that reach here. */
|
||||
if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
|
||||
|
@ -2660,7 +2660,7 @@ df_byte_lr_bb_local_compute (unsigned int bb_index)
|
|||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
unsigned int uregno = DF_REF_REGNO (use);
|
||||
unsigned int start = problem_data->regno_start[uregno];
|
||||
unsigned int len = problem_data->regno_len[uregno];
|
||||
|
@ -2682,7 +2682,7 @@ df_byte_lr_bb_local_compute (unsigned int bb_index)
|
|||
goto. */
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
|
||||
{
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
|
@ -2697,7 +2697,7 @@ df_byte_lr_bb_local_compute (unsigned int bb_index)
|
|||
/* Process the uses that are live into an exception handler. */
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
/* Add use to set of uses in this BB. */
|
||||
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
|
||||
{
|
||||
|
@ -2911,12 +2911,12 @@ df_byte_lr_simulate_defs (rtx insn, bitmap live)
|
|||
{
|
||||
struct df_byte_lr_problem_data *problem_data
|
||||
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
|
||||
/* If the def is to only part of the reg, it does
|
||||
not kill the other defs that reach here. */
|
||||
|
@ -2947,12 +2947,12 @@ df_byte_lr_simulate_uses (rtx insn, bitmap live)
|
|||
{
|
||||
struct df_byte_lr_problem_data *problem_data
|
||||
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
unsigned int uregno = DF_REF_REGNO (use);
|
||||
unsigned int start = problem_data->regno_start[uregno];
|
||||
unsigned int len = problem_data->regno_len[uregno];
|
||||
|
@ -2980,16 +2980,16 @@ df_byte_lr_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
|
|||
{
|
||||
struct df_byte_lr_problem_data *problem_data
|
||||
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
#ifdef EH_USES
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
#endif
|
||||
int bb_index = bb->index;
|
||||
|
||||
#ifdef EH_USES
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
|
||||
{
|
||||
unsigned int uregno = DF_REF_REGNO (use);
|
||||
|
@ -3002,7 +3002,7 @@ df_byte_lr_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
|
|||
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
|
||||
{
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
|
@ -3022,13 +3022,13 @@ df_byte_lr_simulate_artificial_refs_at_end (basic_block bb, bitmap live)
|
|||
{
|
||||
struct df_byte_lr_problem_data *problem_data
|
||||
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
int bb_index = bb->index;
|
||||
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
|
||||
{
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
|
@ -3040,7 +3040,7 @@ df_byte_lr_simulate_artificial_refs_at_end (basic_block bb, bitmap live)
|
|||
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
|
||||
{
|
||||
unsigned int uregno = DF_REF_REGNO (use);
|
||||
|
@ -3351,7 +3351,7 @@ df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws,
|
|||
LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
|
||||
|
||||
static rtx
|
||||
df_create_unused_note (rtx insn, rtx old, struct df_ref *def,
|
||||
df_create_unused_note (rtx insn, rtx old, df_ref def,
|
||||
bitmap live, bitmap artificial_uses)
|
||||
{
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
|
@ -3391,8 +3391,8 @@ df_note_bb_compute (unsigned int bb_index,
|
|||
{
|
||||
basic_block bb = BASIC_BLOCK (bb_index);
|
||||
rtx insn;
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
|
||||
bitmap_copy (live, df_get_live_out (bb));
|
||||
bitmap_clear (artificial_uses);
|
||||
|
@ -3409,7 +3409,7 @@ df_note_bb_compute (unsigned int bb_index,
|
|||
to begin processing. */
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
#ifdef REG_DEAD_DEBUGGING
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
|
||||
|
@ -3421,7 +3421,7 @@ df_note_bb_compute (unsigned int bb_index,
|
|||
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
|
||||
{
|
||||
unsigned int regno = DF_REF_REGNO (use);
|
||||
|
@ -3470,7 +3470,7 @@ df_note_bb_compute (unsigned int bb_index,
|
|||
while (*mws_rec)
|
||||
{
|
||||
struct df_mw_hardreg *mws = *mws_rec;
|
||||
if ((mws->type == DF_REF_REG_DEF)
|
||||
if ((DF_MWS_REG_DEF_P (mws))
|
||||
&& !df_ignore_stack_reg (mws->start_regno))
|
||||
old_unused_notes
|
||||
= df_set_unused_notes_for_mw (insn, old_unused_notes,
|
||||
|
@ -3483,7 +3483,7 @@ df_note_bb_compute (unsigned int bb_index,
|
|||
clobber. This code is for the return. */
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
|
||||
{
|
||||
|
@ -3504,7 +3504,7 @@ df_note_bb_compute (unsigned int bb_index,
|
|||
while (*mws_rec)
|
||||
{
|
||||
struct df_mw_hardreg *mws = *mws_rec;
|
||||
if (mws->type == DF_REF_REG_DEF)
|
||||
if (DF_MWS_REG_DEF_P (mws))
|
||||
old_unused_notes
|
||||
= df_set_unused_notes_for_mw (insn, old_unused_notes,
|
||||
mws, live, do_not_gen,
|
||||
|
@ -3514,7 +3514,7 @@ df_note_bb_compute (unsigned int bb_index,
|
|||
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
old_unused_notes
|
||||
= df_create_unused_note (insn, old_unused_notes,
|
||||
|
@ -3533,7 +3533,7 @@ df_note_bb_compute (unsigned int bb_index,
|
|||
while (*mws_rec)
|
||||
{
|
||||
struct df_mw_hardreg *mws = *mws_rec;
|
||||
if ((mws->type != DF_REF_REG_DEF)
|
||||
if ((DF_MWS_REG_DEF_P (mws))
|
||||
&& !df_ignore_stack_reg (mws->start_regno))
|
||||
old_dead_notes
|
||||
= df_set_dead_notes_for_mw (insn, old_dead_notes,
|
||||
|
@ -3544,7 +3544,7 @@ df_note_bb_compute (unsigned int bb_index,
|
|||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
unsigned int uregno = DF_REF_REGNO (use);
|
||||
|
||||
#ifdef REG_DEAD_DEBUGGING
|
||||
|
@ -3689,12 +3689,12 @@ df_note_add_problem (void)
|
|||
void
|
||||
df_simulate_find_defs (rtx insn, bitmap defs)
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
/* If the def is to only part of the reg, it does
|
||||
not kill the other defs that reach here. */
|
||||
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
|
||||
|
@ -3708,12 +3708,12 @@ df_simulate_find_defs (rtx insn, bitmap defs)
|
|||
void
|
||||
df_simulate_defs (rtx insn, bitmap live)
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
|
||||
/* If the def is to only part of the reg, it does
|
||||
|
@ -3729,12 +3729,12 @@ df_simulate_defs (rtx insn, bitmap live)
|
|||
void
|
||||
df_simulate_uses (rtx insn, bitmap live)
|
||||
{
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
/* Add use to set of uses in this BB. */
|
||||
bitmap_set_bit (live, DF_REF_REGNO (use));
|
||||
}
|
||||
|
@ -3778,20 +3778,20 @@ df_simulate_fixup_sets (basic_block bb, bitmap live)
|
|||
void
|
||||
df_simulate_artificial_refs_at_end (basic_block bb, bitmap live)
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
int bb_index = bb->index;
|
||||
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
|
||||
bitmap_clear_bit (live, DF_REF_REGNO (def));
|
||||
}
|
||||
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
|
||||
bitmap_set_bit (live, DF_REF_REGNO (use));
|
||||
}
|
||||
|
@ -3818,15 +3818,15 @@ df_simulate_one_insn (basic_block bb, rtx insn, bitmap live)
|
|||
void
|
||||
df_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
#ifdef EH_USES
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
#endif
|
||||
int bb_index = bb->index;
|
||||
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
|
||||
bitmap_clear_bit (live, DF_REF_REGNO (def));
|
||||
}
|
||||
|
@ -3834,7 +3834,7 @@ df_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
|
|||
#ifdef EH_USES
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
|
||||
bitmap_set_bit (live, DF_REF_REGNO (use));
|
||||
}
|
||||
|
|
616
gcc/df-scan.c
616
gcc/df-scan.c
File diff suppressed because it is too large
Load diff
235
gcc/df.h
235
gcc/df.h
|
@ -34,6 +34,8 @@ struct dataflow;
|
|||
struct df;
|
||||
struct df_problem;
|
||||
struct df_link;
|
||||
struct df_insn_info;
|
||||
union df_ref_d;
|
||||
|
||||
/* Data flow problems. All problems must have a unique id here. */
|
||||
|
||||
|
@ -68,13 +70,14 @@ enum df_mm
|
|||
DF_MM_MUST
|
||||
};
|
||||
|
||||
/* The first of these is a set of a register. The remaining three are
|
||||
all uses of a register (the mem_load and mem_store relate to how
|
||||
the register as an addressing operand). */
|
||||
enum df_ref_type {DF_REF_REG_DEF, DF_REF_REG_USE, DF_REF_REG_MEM_LOAD,
|
||||
DF_REF_REG_MEM_STORE};
|
||||
/* Descriminator for the various df_ref types. */
|
||||
enum df_ref_class {DF_REF_BASE, DF_REF_ARTIFICIAL, DF_REF_REGULAR, DF_REF_EXTRACT};
|
||||
|
||||
#define DF_REF_TYPE_NAMES {"def", "use", "mem load", "mem store"}
|
||||
/* The first of these us a set of a registers. The remaining three
|
||||
are all uses of a register (the mem_load and mem_store relate to
|
||||
how the register as an addressing operand). */
|
||||
enum df_ref_type {DF_REF_REG_DEF, DF_REF_REG_USE,
|
||||
DF_REF_REG_MEM_LOAD, DF_REF_REG_MEM_STORE};
|
||||
|
||||
enum df_ref_flags
|
||||
{
|
||||
|
@ -346,75 +349,106 @@ struct df_mw_hardreg
|
|||
};
|
||||
|
||||
|
||||
/* One of these structures is allocated for every insn. */
|
||||
struct df_insn_info
|
||||
{
|
||||
rtx insn; /* The insn this info comes from. */
|
||||
struct df_ref **defs; /* Head of insn-def chain. */
|
||||
struct df_ref **uses; /* Head of insn-use chain. */
|
||||
/* Head of insn-use chain for uses in REG_EQUAL/EQUIV notes. */
|
||||
struct df_ref **eq_uses;
|
||||
struct df_mw_hardreg **mw_hardregs;
|
||||
/* The logical uid of the insn in the basic block. This is valid
|
||||
after any call to df_analyze but may rot after insns are added,
|
||||
deleted or moved. */
|
||||
int luid;
|
||||
};
|
||||
|
||||
|
||||
/* Define a register reference structure. One of these is allocated
|
||||
for every register reference (use or def). Note some register
|
||||
references (e.g., post_inc, subreg) generate both a def and a use. */
|
||||
struct df_ref
|
||||
for every register reference (use or def). Note some register
|
||||
references (e.g., post_inc, subreg) generate both a def and a use. */
|
||||
struct df_base_ref
|
||||
{
|
||||
/* These three bitfields are intentionally oversized, in the hope that
|
||||
accesses to 8 and 16-bit fields will usually be quicker. */
|
||||
ENUM_BITFIELD(df_ref_class) cl : 8;
|
||||
|
||||
ENUM_BITFIELD(df_ref_type) type : 8;
|
||||
/* Type of ref. */
|
||||
ENUM_BITFIELD(df_ref_flags) flags : 16;
|
||||
/* Various flags. */
|
||||
rtx reg; /* The register referenced. */
|
||||
basic_block bb; /* Basic block containing the instruction. */
|
||||
|
||||
/* Insn info for the insn containing ref. This will be null if this is
|
||||
an artificial reference. */
|
||||
struct df_insn_info *insn_info;
|
||||
|
||||
rtx *loc; /* The location of the reg. */
|
||||
struct df_link *chain; /* Head of def-use, use-def. */
|
||||
/* Pointer to the insn info of the containing instruction. FIXME!
|
||||
Currently this is NULL for artificial refs but this will be used
|
||||
when FUDs are added. */
|
||||
struct df_insn_info *insn_info;
|
||||
/* For each regno, there are three chains of refs, one for the uses,
|
||||
the eq_uses and the defs. These chains go thru the refs
|
||||
themselves rather than using an external structure. */
|
||||
union df_ref_d *next_reg; /* Next ref with same regno and type. */
|
||||
union df_ref_d *prev_reg; /* Prev ref with same regno and type. */
|
||||
unsigned int regno; /* The register number referenced. */
|
||||
/* Location in the ref table. This is only valid after a call to
|
||||
df_maybe_reorganize_[use,def]_refs which is an expensive operation. */
|
||||
int id;
|
||||
/* The index at which the operand was scanned in the insn. This is
|
||||
used to totally order the refs in an insn. */
|
||||
unsigned int ref_order;
|
||||
|
||||
unsigned int regno; /* The register number referenced. */
|
||||
/* These two bitfields are intentionally oversized, in the hope that
|
||||
accesses to 16-bit fields will usually be quicker. */
|
||||
ENUM_BITFIELD(df_ref_type) type : 16;
|
||||
/* Type of ref. */
|
||||
ENUM_BITFIELD(df_ref_flags) flags : 16;
|
||||
/* Various flags. */
|
||||
|
||||
/* For each regno, there are three chains of refs, one for the uses,
|
||||
the eq_uses and the defs. These chains go thru the refs
|
||||
themselves rather than using an external structure. */
|
||||
struct df_ref *next_reg; /* Next ref with same regno and type. */
|
||||
struct df_ref *prev_reg; /* Prev ref with same regno and type. */
|
||||
};
|
||||
|
||||
|
||||
/* The three types of df_refs. Note that the df_ref_extract is an
|
||||
extension of the df_regular_ref, not the df_base_ref. */
|
||||
struct df_artificial_ref
|
||||
{
|
||||
struct df_base_ref base;
|
||||
|
||||
/* Artificial refs do not have an insn, so to get the basic block,
|
||||
it must be explicitly here. */
|
||||
basic_block bb;
|
||||
};
|
||||
|
||||
|
||||
struct df_regular_ref
|
||||
{
|
||||
struct df_base_ref base;
|
||||
/* The loc is the address in the insn of the reg. This is not
|
||||
defined for special registers, such as clobbers and stack
|
||||
pointers that are also associated with call insns and so those
|
||||
just use the base. */
|
||||
rtx *loc;
|
||||
};
|
||||
|
||||
|
||||
/* A df_ref_extract is just a df_ref with a width and offset field at
|
||||
the end of it. It is used to hold this information if the ref was
|
||||
wrapped by a SIGN_EXTRACT or a ZERO_EXTRACT and to pass this info
|
||||
to passes that wish to process partial regs precisely. */
|
||||
struct df_ref_extract
|
||||
struct df_extract_ref
|
||||
{
|
||||
struct df_ref ref;
|
||||
struct df_regular_ref base;
|
||||
int width;
|
||||
int offset;
|
||||
enum machine_mode mode;
|
||||
};
|
||||
|
||||
/* Union of the different kinds of defs/uses placeholders. */
|
||||
union df_ref_d
|
||||
{
|
||||
struct df_base_ref base;
|
||||
struct df_regular_ref regular_ref;
|
||||
struct df_artificial_ref artificial_ref;
|
||||
struct df_extract_ref extract_ref;
|
||||
};
|
||||
typedef union df_ref_d *df_ref;
|
||||
|
||||
|
||||
/* One of these structures is allocated for every insn. */
|
||||
struct df_insn_info
|
||||
{
|
||||
rtx insn; /* The insn this info comes from. */
|
||||
df_ref *defs; /* Head of insn-def chain. */
|
||||
df_ref *uses; /* Head of insn-use chain. */
|
||||
/* Head of insn-use chain for uses in REG_EQUAL/EQUIV notes. */
|
||||
df_ref *eq_uses;
|
||||
struct df_mw_hardreg **mw_hardregs;
|
||||
/* The logical uid of the insn in the basic block. This is valid
|
||||
after any call to df_analyze but may rot after insns are added,
|
||||
deleted or moved. */
|
||||
int luid;
|
||||
};
|
||||
|
||||
/* These links are used for ref-ref chains. Currently only DEF-USE and
|
||||
USE-DEF chains can be built by DF. */
|
||||
struct df_link
|
||||
{
|
||||
struct df_ref *ref;
|
||||
df_ref ref;
|
||||
struct df_link *next;
|
||||
};
|
||||
|
||||
|
@ -454,7 +488,7 @@ enum df_changeable_flags
|
|||
boundary of the df_set_blocks if that has been defined. */
|
||||
struct df_ref_info
|
||||
{
|
||||
struct df_ref **refs; /* Ref table, indexed by id. */
|
||||
df_ref *refs; /* Ref table, indexed by id. */
|
||||
unsigned int *begin; /* First ref_index for this pseudo. */
|
||||
unsigned int *count; /* Count of refs for this pseudo. */
|
||||
unsigned int refs_size; /* Size of currently allocated refs table. */
|
||||
|
@ -476,7 +510,7 @@ struct df_ref_info
|
|||
struct df_reg_info
|
||||
{
|
||||
/* Head of chain for refs of that type and regno. */
|
||||
struct df_ref *reg_chain;
|
||||
df_ref reg_chain;
|
||||
/* Number of refs in the chain. */
|
||||
unsigned int n_refs;
|
||||
};
|
||||
|
@ -604,46 +638,45 @@ struct df
|
|||
/* Macros to access the elements within the ref structure. */
|
||||
|
||||
|
||||
#define DF_REF_REAL_REG(REF) (GET_CODE ((REF)->reg) == SUBREG \
|
||||
? SUBREG_REG ((REF)->reg) : ((REF)->reg))
|
||||
#define DF_REF_REGNO(REF) ((REF)->regno)
|
||||
#define DF_REF_REAL_LOC(REF) (GET_CODE (*((REF)->loc)) == SUBREG \
|
||||
? &SUBREG_REG (*((REF)->loc)) : ((REF)->loc))
|
||||
#define DF_REF_REG(REF) ((REF)->reg)
|
||||
#define DF_REF_LOC(REF) ((REF)->loc)
|
||||
#define DF_REF_BB(REF) ((REF)->bb)
|
||||
#define DF_REF_REAL_REG(REF) (GET_CODE ((REF)->base.reg) == SUBREG \
|
||||
? SUBREG_REG ((REF)->base.reg) : ((REF)->base.reg))
|
||||
#define DF_REF_REGNO(REF) ((REF)->base.regno)
|
||||
#define DF_REF_REAL_LOC(REF) (GET_CODE (*((REF)->regular_ref.loc)) == SUBREG \
|
||||
? &SUBREG_REG (*((REF)->regular_ref.loc)) : ((REF)->regular_ref.loc))
|
||||
#define DF_REF_REG(REF) ((REF)->base.reg)
|
||||
#define DF_REF_LOC(REF) ((DF_REF_CLASS(REF) == DF_REF_REGULAR || DF_REF_CLASS(REF) == DF_REF_EXTRACT) ? \
|
||||
(REF)->regular_ref.loc : NULL)
|
||||
#define DF_REF_BB(REF) (DF_REF_IS_ARTIFICIAL(REF) ? \
|
||||
(REF)->artificial_ref.bb : BLOCK_FOR_INSN (DF_REF_INSN(REF)))
|
||||
#define DF_REF_BBNO(REF) (DF_REF_BB (REF)->index)
|
||||
#define DF_REF_INSN_INFO(REF) ((REF)->insn_info)
|
||||
#define DF_REF_INSN(REF) ((REF)->insn_info->insn)
|
||||
#define DF_REF_INSN_INFO(REF) ((REF)->base.insn_info)
|
||||
#define DF_REF_INSN(REF) ((REF)->base.insn_info->insn)
|
||||
#define DF_REF_INSN_UID(REF) (INSN_UID (DF_REF_INSN(REF)))
|
||||
#define DF_REF_TYPE(REF) ((REF)->type)
|
||||
#define DF_REF_CHAIN(REF) ((REF)->chain)
|
||||
#define DF_REF_ID(REF) ((REF)->id)
|
||||
#define DF_REF_FLAGS(REF) ((REF)->flags)
|
||||
#define DF_REF_CLASS(REF) ((REF)->base.cl)
|
||||
#define DF_REF_TYPE(REF) ((REF)->base.type)
|
||||
#define DF_REF_CHAIN(REF) ((REF)->base.chain)
|
||||
#define DF_REF_ID(REF) ((REF)->base.id)
|
||||
#define DF_REF_FLAGS(REF) ((REF)->base.flags)
|
||||
#define DF_REF_FLAGS_IS_SET(REF, v) ((DF_REF_FLAGS (REF) & (v)) != 0)
|
||||
#define DF_REF_FLAGS_SET(REF, v) (DF_REF_FLAGS (REF) |= (v))
|
||||
#define DF_REF_FLAGS_CLEAR(REF, v) (DF_REF_FLAGS (REF) &= ~(v))
|
||||
#define DF_REF_ORDER(REF) ((REF)->ref_order)
|
||||
/* If DF_REF_IS_ARTIFICIAL () is true, this is not a real definition/use,
|
||||
but an artificial one created to model
|
||||
always live registers, eh uses, etc.
|
||||
ARTIFICIAL refs has NULL insn. */
|
||||
#define DF_REF_IS_ARTIFICIAL(REF) ((REF)->insn_info == NULL)
|
||||
#define DF_REF_ORDER(REF) ((REF)->base.ref_order)
|
||||
/* If DF_REF_IS_ARTIFICIAL () is true, this is not a real
|
||||
definition/use, but an artificial one created to model always live
|
||||
registers, eh uses, etc. */
|
||||
#define DF_REF_IS_ARTIFICIAL(REF) (DF_REF_CLASS(REF) == DF_REF_ARTIFICIAL)
|
||||
#define DF_REF_REG_MARK(REF) (DF_REF_FLAGS_SET ((REF),DF_REF_REG_MARKER))
|
||||
#define DF_REF_REG_UNMARK(REF) (DF_REF_FLAGS_CLEAR ((REF),DF_REF_REG_MARKER))
|
||||
#define DF_REF_IS_REG_MARKED(REF) (DF_REF_FLAGS_IS_SET ((REF),DF_REF_REG_MARKER))
|
||||
#define DF_REF_NEXT_REG(REF) ((REF)->next_reg)
|
||||
#define DF_REF_PREV_REG(REF) ((REF)->prev_reg)
|
||||
#define DF_REF_NEXT_REG(REF) ((REF)->base.next_reg)
|
||||
#define DF_REF_PREV_REG(REF) ((REF)->base.prev_reg)
|
||||
/* The following two macros may only be applied if one of
|
||||
DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT is true. */
|
||||
#define DF_REF_EXTRACT_WIDTH(REF) (((struct df_ref_extract *)(REF))->width)
|
||||
#define DF_REF_EXTRACT_WIDTH_CONST(REF) (((const struct df_ref_extract *)(REF))->width)
|
||||
#define DF_REF_EXTRACT_OFFSET(REF) (((struct df_ref_extract *)(REF))->offset)
|
||||
#define DF_REF_EXTRACT_OFFSET_CONST(REF) (((const struct df_ref_extract *)(REF))->offset)
|
||||
#define DF_REF_EXTRACT_MODE(REF) (((struct df_ref_extract *)(REF))->mode)
|
||||
#define DF_REF_EXTRACT_MODE_CONST(REF) (((const struct df_ref_extract *)(REF))->mode)
|
||||
/* Macros to determine the reference type. */
|
||||
#define DF_REF_EXTRACT_WIDTH(REF) ((REF)->extract_ref.width)
|
||||
#define DF_REF_EXTRACT_OFFSET(REF) ((REF)->extract_ref.offset)
|
||||
#define DF_REF_EXTRACT_MODE(REF) ((REF)->extract_ref.mode)
|
||||
|
||||
/* Macros to determine the reference type. */
|
||||
#define DF_REF_REG_DEF_P(REF) (DF_REF_TYPE (REF) == DF_REF_REG_DEF)
|
||||
#define DF_REF_REG_USE_P(REF) ((REF) && !DF_REF_REG_DEF_P (REF))
|
||||
#define DF_REF_REG_MEM_STORE_P(REF) (DF_REF_TYPE (REF) == DF_REF_REG_MEM_STORE)
|
||||
|
@ -651,6 +684,10 @@ struct df
|
|||
#define DF_REF_REG_MEM_P(REF) (DF_REF_REG_MEM_STORE_P (REF) \
|
||||
|| DF_REF_REG_MEM_LOAD_P (REF))
|
||||
|
||||
#define DF_MWS_REG_DEF_P(MREF) (DF_MWS_TYPE (MREF) == DF_REF_REG_DEF)
|
||||
#define DF_MWS_REG_USE_P(MREF) ((MREF) && !DF_MWS_REG_DEF_P (MREF))
|
||||
#define DF_MWS_TYPE(MREF) ((MREF)->type)
|
||||
|
||||
/* Macros to get the refs out of def_info or use_info refs table. If
|
||||
the focus of the dataflow has been set to some subset of blocks
|
||||
with df_set_blocks, these macros will only find the uses and defs
|
||||
|
@ -743,13 +780,13 @@ struct df_scan_bb_info
|
|||
|
||||
Blocks that are the targets of non-local goto's have the hard
|
||||
frame pointer defined at the top of the block. */
|
||||
struct df_ref **artificial_defs;
|
||||
df_ref *artificial_defs;
|
||||
|
||||
/* Blocks that are targets of exception edges may have some
|
||||
artificial uses. These are logically at the top of the block.
|
||||
|
||||
Most blocks have artificial uses at the bottom of the block. */
|
||||
struct df_ref **artificial_uses;
|
||||
df_ref *artificial_uses;
|
||||
};
|
||||
|
||||
|
||||
|
@ -866,11 +903,11 @@ extern void df_verify (void);
|
|||
#ifdef DF_DEBUG_CFG
|
||||
extern void df_check_cfg_clean (void);
|
||||
#endif
|
||||
extern struct df_ref *df_bb_regno_first_def_find (basic_block, unsigned int);
|
||||
extern struct df_ref *df_bb_regno_last_def_find (basic_block, unsigned int);
|
||||
extern struct df_ref *df_find_def (rtx, rtx);
|
||||
extern df_ref df_bb_regno_first_def_find (basic_block, unsigned int);
|
||||
extern df_ref df_bb_regno_last_def_find (basic_block, unsigned int);
|
||||
extern df_ref df_find_def (rtx, rtx);
|
||||
extern bool df_reg_defined (rtx, rtx);
|
||||
extern struct df_ref *df_find_use (rtx, rtx);
|
||||
extern df_ref df_find_use (rtx, rtx);
|
||||
extern bool df_reg_used (rtx, rtx);
|
||||
extern void df_worklist_dataflow (struct dataflow *,bitmap, int *, int);
|
||||
extern void df_print_regset (FILE *file, bitmap r);
|
||||
|
@ -880,25 +917,25 @@ extern void df_dump_region (FILE *);
|
|||
extern void df_dump_start (FILE *);
|
||||
extern void df_dump_top (basic_block, FILE *);
|
||||
extern void df_dump_bottom (basic_block, FILE *);
|
||||
extern void df_refs_chain_dump (struct df_ref **, bool, FILE *);
|
||||
extern void df_regs_chain_dump (struct df_ref *, FILE *);
|
||||
extern void df_refs_chain_dump (df_ref *, bool, FILE *);
|
||||
extern void df_regs_chain_dump (df_ref, FILE *);
|
||||
extern void df_insn_debug (rtx, bool, FILE *);
|
||||
extern void df_insn_debug_regno (rtx, FILE *);
|
||||
extern void df_regno_debug (unsigned int, FILE *);
|
||||
extern void df_ref_debug (struct df_ref *, FILE *);
|
||||
extern void df_ref_debug (df_ref, FILE *);
|
||||
extern void debug_df_insn (rtx);
|
||||
extern void debug_df_regno (unsigned int);
|
||||
extern void debug_df_reg (rtx);
|
||||
extern void debug_df_defno (unsigned int);
|
||||
extern void debug_df_useno (unsigned int);
|
||||
extern void debug_df_ref (struct df_ref *);
|
||||
extern void debug_df_ref (df_ref);
|
||||
extern void debug_df_chain (struct df_link *);
|
||||
|
||||
/* Functions defined in df-problems.c. */
|
||||
|
||||
extern struct df_link *df_chain_create (struct df_ref *, struct df_ref *);
|
||||
extern void df_chain_unlink (struct df_ref *);
|
||||
extern void df_chain_copy (struct df_ref *, struct df_link *);
|
||||
extern struct df_link *df_chain_create (df_ref, df_ref);
|
||||
extern void df_chain_unlink (df_ref);
|
||||
extern void df_chain_copy (df_ref, struct df_link *);
|
||||
extern bitmap df_get_live_in (basic_block);
|
||||
extern bitmap df_get_live_out (basic_block);
|
||||
extern void df_grow_bb_info (struct dataflow *);
|
||||
|
@ -933,10 +970,10 @@ extern void df_scan_add_problem (void);
|
|||
extern void df_grow_reg_info (void);
|
||||
extern void df_grow_insn_info (void);
|
||||
extern void df_scan_blocks (void);
|
||||
extern struct df_ref *df_ref_create (rtx, rtx *, rtx,basic_block,
|
||||
extern df_ref df_ref_create (rtx, rtx *, rtx,basic_block,
|
||||
enum df_ref_type, enum df_ref_flags,
|
||||
int, int, enum machine_mode);
|
||||
extern void df_ref_remove (struct df_ref *);
|
||||
extern void df_ref_remove (df_ref);
|
||||
extern struct df_insn_info * df_insn_create_insn_record (rtx);
|
||||
extern void df_insn_delete (basic_block, unsigned int);
|
||||
extern void df_bb_refs_record (int, bool);
|
||||
|
@ -962,7 +999,7 @@ extern bool df_read_modify_subreg_p (rtx);
|
|||
extern void df_scan_verify (void);
|
||||
|
||||
/* Functions defined in df-byte-scan.c. */
|
||||
extern bool df_compute_accessed_bytes (struct df_ref *, enum df_mm,
|
||||
extern bool df_compute_accessed_bytes (df_ref, enum df_mm,
|
||||
unsigned int *, unsigned int *);
|
||||
|
||||
|
||||
|
@ -1015,7 +1052,7 @@ df_byte_lr_get_bb_info (unsigned int index)
|
|||
|
||||
/* Get the artificial defs for a basic block. */
|
||||
|
||||
static inline struct df_ref **
|
||||
static inline df_ref *
|
||||
df_get_artificial_defs (unsigned int bb_index)
|
||||
{
|
||||
return df_scan_get_bb_info (bb_index)->artificial_defs;
|
||||
|
@ -1024,7 +1061,7 @@ df_get_artificial_defs (unsigned int bb_index)
|
|||
|
||||
/* Get the artificial uses for a basic block. */
|
||||
|
||||
static inline struct df_ref **
|
||||
static inline df_ref *
|
||||
df_get_artificial_uses (unsigned int bb_index)
|
||||
{
|
||||
return df_scan_get_bb_info (bb_index)->artificial_uses;
|
||||
|
@ -1045,7 +1082,7 @@ struct web_entry
|
|||
|
||||
extern struct web_entry *unionfind_root (struct web_entry *);
|
||||
extern bool unionfind_union (struct web_entry *, struct web_entry *);
|
||||
extern void union_defs (struct df_ref *,
|
||||
extern void union_defs (df_ref,
|
||||
struct web_entry *, struct web_entry *,
|
||||
bool (*fun) (struct web_entry *, struct web_entry *));
|
||||
|
||||
|
|
46
gcc/fwprop.c
46
gcc/fwprop.c
|
@ -487,19 +487,19 @@ propagate_rtx (rtx x, enum machine_mode mode, rtx old_rtx, rtx new_rtx,
|
|||
between FROM to (but not including) TO. */
|
||||
|
||||
static bool
|
||||
local_ref_killed_between_p (struct df_ref * ref, rtx from, rtx to)
|
||||
local_ref_killed_between_p (df_ref ref, rtx from, rtx to)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
for (insn = from; insn != to; insn = NEXT_INSN (insn))
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
if (!INSN_P (insn))
|
||||
continue;
|
||||
|
||||
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (DF_REF_REGNO (ref) == DF_REF_REGNO (def))
|
||||
return true;
|
||||
}
|
||||
|
@ -517,12 +517,12 @@ local_ref_killed_between_p (struct df_ref * ref, rtx from, rtx to)
|
|||
we check if the definition is killed after DEF_INSN or before
|
||||
TARGET_INSN insn, in their respective basic blocks. */
|
||||
static bool
|
||||
use_killed_between (struct df_ref *use, rtx def_insn, rtx target_insn)
|
||||
use_killed_between (df_ref use, rtx def_insn, rtx target_insn)
|
||||
{
|
||||
basic_block def_bb = BLOCK_FOR_INSN (def_insn);
|
||||
basic_block target_bb = BLOCK_FOR_INSN (target_insn);
|
||||
int regno;
|
||||
struct df_ref * def;
|
||||
df_ref def;
|
||||
|
||||
/* In some obscure situations we can have a def reaching a use
|
||||
that is _before_ the def. In other words the def does not
|
||||
|
@ -543,7 +543,7 @@ use_killed_between (struct df_ref *use, rtx def_insn, rtx target_insn)
|
|||
regno = DF_REF_REGNO (use);
|
||||
def = DF_REG_DEF_CHAIN (regno);
|
||||
if (def
|
||||
&& def->next_reg == NULL
|
||||
&& DF_REF_NEXT_REG (def) == NULL
|
||||
&& regno >= FIRST_PSEUDO_REGISTER)
|
||||
return false;
|
||||
|
||||
|
@ -555,7 +555,7 @@ use_killed_between (struct df_ref *use, rtx def_insn, rtx target_insn)
|
|||
if (single_pred_p (target_bb)
|
||||
&& single_pred (target_bb) == def_bb)
|
||||
{
|
||||
struct df_ref *x;
|
||||
df_ref x;
|
||||
|
||||
/* See if USE is killed between DEF_INSN and the last insn in the
|
||||
basic block containing DEF_INSN. */
|
||||
|
@ -583,7 +583,7 @@ use_killed_between (struct df_ref *use, rtx def_insn, rtx target_insn)
|
|||
static bool
|
||||
all_uses_available_at (rtx def_insn, rtx target_insn)
|
||||
{
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
struct df_insn_info *insn_info = DF_INSN_INFO_GET (def_insn);
|
||||
rtx def_set = single_set (def_insn);
|
||||
|
||||
|
@ -600,14 +600,14 @@ all_uses_available_at (rtx def_insn, rtx target_insn)
|
|||
invalid. */
|
||||
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (rtx_equal_p (DF_REF_REG (use), def_reg))
|
||||
return false;
|
||||
}
|
||||
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
if (rtx_equal_p (use->reg, def_reg))
|
||||
df_ref use = *use_rec;
|
||||
if (rtx_equal_p (DF_REF_REG (use), def_reg))
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -617,13 +617,13 @@ all_uses_available_at (rtx def_insn, rtx target_insn)
|
|||
killed between DEF_INSN and TARGET_INSN. */
|
||||
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (use_killed_between (use, def_insn, target_insn))
|
||||
return false;
|
||||
}
|
||||
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (use_killed_between (use, def_insn, target_insn))
|
||||
return false;
|
||||
}
|
||||
|
@ -682,7 +682,7 @@ find_occurrence (rtx *px, rtx find)
|
|||
in the data flow object of the pass. Mark any new uses as having the
|
||||
given TYPE. */
|
||||
static void
|
||||
update_df (rtx insn, rtx *loc, struct df_ref **use_rec, enum df_ref_type type,
|
||||
update_df (rtx insn, rtx *loc, df_ref *use_rec, enum df_ref_type type,
|
||||
int new_flags)
|
||||
{
|
||||
bool changed = false;
|
||||
|
@ -690,8 +690,8 @@ update_df (rtx insn, rtx *loc, struct df_ref **use_rec, enum df_ref_type type,
|
|||
/* Add a use for the registers that were propagated. */
|
||||
while (*use_rec)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
struct df_ref *orig_use = use, *new_use;
|
||||
df_ref use = *use_rec;
|
||||
df_ref orig_use = use, new_use;
|
||||
int width = -1;
|
||||
int offset = -1;
|
||||
enum machine_mode mode = 0;
|
||||
|
@ -731,7 +731,7 @@ update_df (rtx insn, rtx *loc, struct df_ref **use_rec, enum df_ref_type type,
|
|||
performed. */
|
||||
|
||||
static bool
|
||||
try_fwprop_subst (struct df_ref *use, rtx *loc, rtx new_rtx, rtx def_insn, bool set_reg_equal)
|
||||
try_fwprop_subst (df_ref use, rtx *loc, rtx new_rtx, rtx def_insn, bool set_reg_equal)
|
||||
{
|
||||
rtx insn = DF_REF_INSN (use);
|
||||
enum df_ref_type type = DF_REF_TYPE (use);
|
||||
|
@ -821,7 +821,7 @@ try_fwprop_subst (struct df_ref *use, rtx *loc, rtx new_rtx, rtx def_insn, bool
|
|||
/* If USE is a paradoxical subreg, see if it can be replaced by a pseudo. */
|
||||
|
||||
static bool
|
||||
forward_propagate_subreg (struct df_ref *use, rtx def_insn, rtx def_set)
|
||||
forward_propagate_subreg (df_ref use, rtx def_insn, rtx def_set)
|
||||
{
|
||||
rtx use_reg = DF_REF_REG (use);
|
||||
rtx use_insn, src;
|
||||
|
@ -856,7 +856,7 @@ forward_propagate_subreg (struct df_ref *use, rtx def_insn, rtx def_set)
|
|||
result. */
|
||||
|
||||
static bool
|
||||
forward_propagate_and_simplify (struct df_ref *use, rtx def_insn, rtx def_set)
|
||||
forward_propagate_and_simplify (df_ref use, rtx def_insn, rtx def_set)
|
||||
{
|
||||
rtx use_insn = DF_REF_INSN (use);
|
||||
rtx use_set = single_set (use_insn);
|
||||
|
@ -952,10 +952,10 @@ forward_propagate_and_simplify (struct df_ref *use, rtx def_insn, rtx def_set)
|
|||
definition, try to forward propagate it into that insn. */
|
||||
|
||||
static void
|
||||
forward_propagate_into (struct df_ref *use)
|
||||
forward_propagate_into (df_ref use)
|
||||
{
|
||||
struct df_link *defs;
|
||||
struct df_ref *def;
|
||||
df_ref def;
|
||||
rtx def_insn, def_set, use_insn;
|
||||
rtx parent;
|
||||
|
||||
|
@ -1064,7 +1064,7 @@ fwprop (void)
|
|||
|
||||
for (i = 0; i < DF_USES_TABLE_SIZE (); i++)
|
||||
{
|
||||
struct df_ref *use = DF_USES_GET (i);
|
||||
df_ref use = DF_USES_GET (i);
|
||||
if (use)
|
||||
if (DF_REF_TYPE (use) == DF_REF_REG_USE
|
||||
|| DF_REF_BB (use)->loop_father == NULL
|
||||
|
@ -1109,7 +1109,7 @@ fwprop_addr (void)
|
|||
|
||||
for (i = 0; i < DF_USES_TABLE_SIZE (); i++)
|
||||
{
|
||||
struct df_ref *use = DF_USES_GET (i);
|
||||
df_ref use = DF_USES_GET (i);
|
||||
if (use)
|
||||
if (DF_REF_TYPE (use) != DF_REF_REG_USE
|
||||
&& DF_REF_BB (use)->loop_father != NULL
|
||||
|
|
12
gcc/global.c
12
gcc/global.c
|
@ -165,11 +165,11 @@ compute_regs_asm_clobbered (char *regs_asm_clobbered)
|
|||
rtx insn;
|
||||
FOR_BB_INSNS_REVERSE (bb, insn)
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
if (insn_contains_asm (insn))
|
||||
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
if (dregno < FIRST_PSEUDO_REGISTER)
|
||||
{
|
||||
|
@ -1448,8 +1448,8 @@ build_insn_chain (void)
|
|||
if (!NOTE_P (insn) && !BARRIER_P (insn))
|
||||
{
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
|
||||
c = new_insn_chain ();
|
||||
c->next = next;
|
||||
|
@ -1463,7 +1463,7 @@ build_insn_chain (void)
|
|||
if (INSN_P (insn))
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int regno = DF_REF_REGNO (def);
|
||||
|
||||
/* Ignore may clobbers because these are generated
|
||||
|
@ -1555,7 +1555,7 @@ build_insn_chain (void)
|
|||
if (INSN_P (insn))
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
unsigned int regno = DF_REF_REGNO (use);
|
||||
rtx reg = DF_REF_REG (use);
|
||||
|
||||
|
|
|
@ -3922,10 +3922,10 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
|
|||
if (INSN_P (insn))
|
||||
{
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
bitmap_set_bit (merge_set, DF_REF_REGNO (def));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,13 +70,13 @@ initialize_uninitialized_regs (void)
|
|||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
if (!INSN_P (insn))
|
||||
continue;
|
||||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
unsigned int regno = DF_REF_REGNO (use);
|
||||
|
||||
/* Only do this for the pseudos. */
|
||||
|
|
|
@ -266,7 +266,7 @@ mark_reg_live (rtx reg)
|
|||
|
||||
/* Mark the register referenced by use or def REF as live. */
|
||||
static void
|
||||
mark_ref_live (struct df_ref *ref)
|
||||
mark_ref_live (df_ref ref)
|
||||
{
|
||||
rtx reg;
|
||||
|
||||
|
@ -335,7 +335,7 @@ mark_reg_dead (rtx reg)
|
|||
/* Mark the register referenced by definition DEF as dead, if the
|
||||
definition is a total one. */
|
||||
static void
|
||||
mark_ref_dead (struct df_ref *def)
|
||||
mark_ref_dead (df_ref def)
|
||||
{
|
||||
rtx reg;
|
||||
|
||||
|
@ -356,7 +356,7 @@ mark_early_clobbers (rtx insn, bool live_p)
|
|||
{
|
||||
int alt;
|
||||
int def;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
bool set_p = false;
|
||||
|
||||
for (def = 0; def < recog_data.n_operands; def++)
|
||||
|
@ -702,7 +702,7 @@ process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
|
|||
pessimistic, but it probably doesn't matter much in practice. */
|
||||
FOR_BB_INSNS_REVERSE (bb, insn)
|
||||
{
|
||||
struct df_ref **def_rec, **use_rec;
|
||||
df_ref *def_rec, *use_rec;
|
||||
bool call_p;
|
||||
|
||||
if (! INSN_P (insn))
|
||||
|
|
|
@ -1216,12 +1216,12 @@ compute_regs_asm_clobbered (char *regs_asm_clobbered)
|
|||
rtx insn;
|
||||
FOR_BB_INSNS_REVERSE (bb, insn)
|
||||
{
|
||||
struct df_ref **def_rec;
|
||||
df_ref *def_rec;
|
||||
|
||||
if (insn_contains_asm (insn))
|
||||
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
unsigned int dregno = DF_REF_REGNO (def);
|
||||
if (dregno < FIRST_PSEUDO_REGISTER)
|
||||
{
|
||||
|
|
|
@ -1279,7 +1279,7 @@ block_alloc (basic_block b)
|
|||
int insn_count = 0;
|
||||
int max_uid = get_max_uid ();
|
||||
int *qty_order;
|
||||
struct df_ref ** def_rec;
|
||||
df_ref *def_rec;
|
||||
|
||||
/* Count the instructions in the basic block. */
|
||||
|
||||
|
|
|
@ -243,13 +243,13 @@ check_maybe_invariant (rtx x)
|
|||
invariant. */
|
||||
|
||||
static struct invariant *
|
||||
invariant_for_use (struct df_ref *use)
|
||||
invariant_for_use (df_ref use)
|
||||
{
|
||||
struct df_link *defs;
|
||||
struct df_ref *def;
|
||||
df_ref def;
|
||||
basic_block bb = DF_REF_BB (use), def_bb;
|
||||
|
||||
if (use->flags & DF_REF_READ_WRITE)
|
||||
if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
|
||||
return NULL;
|
||||
|
||||
defs = DF_REF_CHAIN (use);
|
||||
|
@ -276,7 +276,7 @@ hash_invariant_expr_1 (rtx insn, rtx x)
|
|||
const char *fmt;
|
||||
hashval_t val = code;
|
||||
int do_not_record_p;
|
||||
struct df_ref *use;
|
||||
df_ref use;
|
||||
struct invariant *inv;
|
||||
|
||||
switch (code)
|
||||
|
@ -330,7 +330,7 @@ invariant_expr_equal_p (rtx insn1, rtx e1, rtx insn2, rtx e2)
|
|||
enum rtx_code code = GET_CODE (e1);
|
||||
int i, j;
|
||||
const char *fmt;
|
||||
struct df_ref *use1, *use2;
|
||||
df_ref use1, use2;
|
||||
struct invariant *inv1 = NULL, *inv2 = NULL;
|
||||
rtx sub1, sub2;
|
||||
|
||||
|
@ -724,15 +724,15 @@ record_use (struct def *def, rtx *use, rtx insn)
|
|||
loop invariants, false otherwise. */
|
||||
|
||||
static bool
|
||||
check_dependency (basic_block bb, struct df_ref *use, bitmap depends_on)
|
||||
check_dependency (basic_block bb, df_ref use, bitmap depends_on)
|
||||
{
|
||||
struct df_ref *def;
|
||||
df_ref def;
|
||||
basic_block def_bb;
|
||||
struct df_link *defs;
|
||||
struct def *def_data;
|
||||
struct invariant *inv;
|
||||
|
||||
if (use->flags & DF_REF_READ_WRITE)
|
||||
if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
|
||||
return false;
|
||||
|
||||
defs = DF_REF_CHAIN (use);
|
||||
|
@ -772,7 +772,7 @@ static bool
|
|||
check_dependencies (rtx insn, bitmap depends_on)
|
||||
{
|
||||
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
basic_block bb = BLOCK_FOR_INSN (insn);
|
||||
|
||||
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
|
||||
|
@ -792,7 +792,7 @@ check_dependencies (rtx insn, bitmap depends_on)
|
|||
static void
|
||||
find_invariant_insn (rtx insn, bool always_reached, bool always_executed)
|
||||
{
|
||||
struct df_ref *ref;
|
||||
df_ref ref;
|
||||
struct def *def;
|
||||
bitmap depends_on;
|
||||
rtx set, dest;
|
||||
|
@ -855,19 +855,19 @@ static void
|
|||
record_uses (rtx insn)
|
||||
{
|
||||
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
struct invariant *inv;
|
||||
|
||||
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
inv = invariant_for_use (use);
|
||||
if (inv)
|
||||
record_use (inv->def, DF_REF_REAL_LOC (use), DF_REF_INSN (use));
|
||||
}
|
||||
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
inv = invariant_for_use (use);
|
||||
if (inv)
|
||||
record_use (inv->def, DF_REF_REAL_LOC (use), DF_REF_INSN (use));
|
||||
|
|
|
@ -293,15 +293,15 @@ iv_analysis_loop_init (struct loop *loop)
|
|||
is set to NULL and true is returned. */
|
||||
|
||||
static bool
|
||||
latch_dominating_def (rtx reg, struct df_ref **def)
|
||||
latch_dominating_def (rtx reg, df_ref *def)
|
||||
{
|
||||
struct df_ref *single_rd = NULL, *adef;
|
||||
df_ref single_rd = NULL, adef;
|
||||
unsigned regno = REGNO (reg);
|
||||
struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (current_loop->latch);
|
||||
|
||||
for (adef = DF_REG_DEF_CHAIN (regno); adef; adef = adef->next_reg)
|
||||
for (adef = DF_REG_DEF_CHAIN (regno); adef; adef = DF_REF_NEXT_REG (adef))
|
||||
{
|
||||
if (!bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (adef)->index)
|
||||
if (!bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (adef))
|
||||
|| !bitmap_bit_p (bb_info->out, DF_REF_ID (adef)))
|
||||
continue;
|
||||
|
||||
|
@ -322,9 +322,9 @@ latch_dominating_def (rtx reg, struct df_ref **def)
|
|||
/* Gets definition of REG reaching its use in INSN and stores it to DEF. */
|
||||
|
||||
static enum iv_grd_result
|
||||
iv_get_reaching_def (rtx insn, rtx reg, struct df_ref **def)
|
||||
iv_get_reaching_def (rtx insn, rtx reg, df_ref *def)
|
||||
{
|
||||
struct df_ref *use, *adef;
|
||||
df_ref use, adef;
|
||||
basic_block def_bb, use_bb;
|
||||
rtx def_insn;
|
||||
bool dom_p;
|
||||
|
@ -349,7 +349,7 @@ iv_get_reaching_def (rtx insn, rtx reg, struct df_ref **def)
|
|||
adef = DF_REF_CHAIN (use)->ref;
|
||||
|
||||
/* We do not handle setting only part of the register. */
|
||||
if (adef->flags & DF_REF_READ_WRITE)
|
||||
if (DF_REF_FLAGS (adef) & DF_REF_READ_WRITE)
|
||||
return GRD_INVALID;
|
||||
|
||||
def_insn = DF_REF_INSN (adef);
|
||||
|
@ -616,7 +616,7 @@ iv_shift (struct rtx_iv *iv, rtx mby)
|
|||
at get_biv_step. */
|
||||
|
||||
static bool
|
||||
get_biv_step_1 (struct df_ref *def, rtx reg,
|
||||
get_biv_step_1 (df_ref def, rtx reg,
|
||||
rtx *inner_step, enum machine_mode *inner_mode,
|
||||
enum rtx_code *extend, enum machine_mode outer_mode,
|
||||
rtx *outer_step)
|
||||
|
@ -625,7 +625,7 @@ get_biv_step_1 (struct df_ref *def, rtx reg,
|
|||
rtx next, nextr, tmp;
|
||||
enum rtx_code code;
|
||||
rtx insn = DF_REF_INSN (def);
|
||||
struct df_ref *next_def;
|
||||
df_ref next_def;
|
||||
enum iv_grd_result res;
|
||||
|
||||
set = single_set (insn);
|
||||
|
@ -783,7 +783,7 @@ get_biv_step_1 (struct df_ref *def, rtx reg,
|
|||
LAST_DEF is the definition of REG that dominates loop latch. */
|
||||
|
||||
static bool
|
||||
get_biv_step (struct df_ref *last_def, rtx reg, rtx *inner_step,
|
||||
get_biv_step (df_ref last_def, rtx reg, rtx *inner_step,
|
||||
enum machine_mode *inner_mode, enum rtx_code *extend,
|
||||
enum machine_mode *outer_mode, rtx *outer_step)
|
||||
{
|
||||
|
@ -803,7 +803,7 @@ get_biv_step (struct df_ref *last_def, rtx reg, rtx *inner_step,
|
|||
/* Records information that DEF is induction variable IV. */
|
||||
|
||||
static void
|
||||
record_iv (struct df_ref *def, struct rtx_iv *iv)
|
||||
record_iv (df_ref def, struct rtx_iv *iv)
|
||||
{
|
||||
struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
|
||||
|
||||
|
@ -849,7 +849,7 @@ iv_analyze_biv (rtx def, struct rtx_iv *iv)
|
|||
rtx inner_step, outer_step;
|
||||
enum machine_mode inner_mode, outer_mode;
|
||||
enum rtx_code extend;
|
||||
struct df_ref *last_def;
|
||||
df_ref last_def;
|
||||
|
||||
if (dump_file)
|
||||
{
|
||||
|
@ -1042,7 +1042,7 @@ iv_analyze_expr (rtx insn, rtx rhs, enum machine_mode mode, struct rtx_iv *iv)
|
|||
/* Analyzes iv DEF and stores the result to *IV. */
|
||||
|
||||
static bool
|
||||
iv_analyze_def (struct df_ref *def, struct rtx_iv *iv)
|
||||
iv_analyze_def (df_ref def, struct rtx_iv *iv)
|
||||
{
|
||||
rtx insn = DF_REF_INSN (def);
|
||||
rtx reg = DF_REF_REG (def);
|
||||
|
@ -1107,7 +1107,7 @@ iv_analyze_def (struct df_ref *def, struct rtx_iv *iv)
|
|||
static bool
|
||||
iv_analyze_op (rtx insn, rtx op, struct rtx_iv *iv)
|
||||
{
|
||||
struct df_ref *def = NULL;
|
||||
df_ref def = NULL;
|
||||
enum iv_grd_result res;
|
||||
|
||||
if (dump_file)
|
||||
|
@ -1190,7 +1190,7 @@ iv_analyze (rtx insn, rtx val, struct rtx_iv *iv)
|
|||
bool
|
||||
iv_analyze_result (rtx insn, rtx def, struct rtx_iv *iv)
|
||||
{
|
||||
struct df_ref *adef;
|
||||
df_ref adef;
|
||||
|
||||
adef = df_find_def (insn, def);
|
||||
if (!adef)
|
||||
|
@ -1207,7 +1207,7 @@ bool
|
|||
biv_p (rtx insn, rtx reg)
|
||||
{
|
||||
struct rtx_iv iv;
|
||||
struct df_ref *def, *last_def;
|
||||
df_ref def, last_def;
|
||||
|
||||
if (!simple_reg_p (reg))
|
||||
return false;
|
||||
|
|
|
@ -54,7 +54,7 @@ HOST_WIDE_INT max_bitnum;
|
|||
alloc_pool adjacency_pool;
|
||||
adjacency_t **adjacency;
|
||||
|
||||
typedef struct df_ref * df_ref_t;
|
||||
typedef df_ref df_ref_t;
|
||||
DEF_VEC_P(df_ref_t);
|
||||
DEF_VEC_ALLOC_P(df_ref_t,heap);
|
||||
|
||||
|
@ -278,7 +278,7 @@ record_one_conflict (sparseset allocnos_live,
|
|||
static void
|
||||
mark_reg_store (sparseset allocnos_live,
|
||||
HARD_REG_SET *hard_regs_live,
|
||||
struct df_ref *ref)
|
||||
df_ref ref)
|
||||
{
|
||||
rtx reg = DF_REF_REG (ref);
|
||||
unsigned int regno = DF_REF_REGNO (ref);
|
||||
|
@ -448,7 +448,7 @@ clear_reg_in_live (sparseset allocnos_live,
|
|||
sbitmap *live_subregs,
|
||||
int *live_subregs_used,
|
||||
HARD_REG_SET *hard_regs_live,
|
||||
rtx reg, struct df_ref *def)
|
||||
rtx reg, df_ref def)
|
||||
{
|
||||
unsigned int regno = (GET_CODE (reg) == SUBREG)
|
||||
? REGNO (SUBREG_REG (reg)): REGNO (reg);
|
||||
|
@ -813,8 +813,8 @@ global_conflicts (void)
|
|||
FOR_BB_INSNS_REVERSE (bb, insn)
|
||||
{
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
|
||||
if (!INSN_P (insn))
|
||||
continue;
|
||||
|
@ -849,7 +849,7 @@ global_conflicts (void)
|
|||
later. */
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
|
||||
/* FIXME: Ignoring may clobbers is technically the wrong
|
||||
thing to do. However the old version of the this
|
||||
|
@ -880,7 +880,7 @@ global_conflicts (void)
|
|||
/* Add the interferences for the defs. */
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
|
||||
mark_reg_store (allocnos_live, &renumbers_live, def);
|
||||
}
|
||||
|
@ -891,7 +891,7 @@ global_conflicts (void)
|
|||
VEC_truncate (df_ref_t, clobbers, 0);
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
|
||||
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL))
|
||||
{
|
||||
|
@ -931,7 +931,7 @@ global_conflicts (void)
|
|||
VEC_truncate (df_ref_t, dying_regs, 0);
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
unsigned int regno = DF_REF_REGNO (use);
|
||||
bool added = false;
|
||||
int renumber = reg_renumber[regno];
|
||||
|
@ -1079,12 +1079,12 @@ global_conflicts (void)
|
|||
fprintf (dump_file, " clobber conflicts\n");
|
||||
for (k = VEC_length (df_ref_t, clobbers) - 1; k >= 0; k--)
|
||||
{
|
||||
struct df_ref *def = VEC_index (df_ref_t, clobbers, k);
|
||||
df_ref def = VEC_index (df_ref_t, clobbers, k);
|
||||
int j;
|
||||
|
||||
for (j = VEC_length (df_ref_t, dying_regs) - 1; j >= 0; j--)
|
||||
{
|
||||
struct df_ref *use = VEC_index (df_ref_t, dying_regs, j);
|
||||
df_ref use = VEC_index (df_ref_t, dying_regs, j);
|
||||
record_one_conflict_between_regnos (GET_MODE (DF_REF_REG (def)),
|
||||
DF_REF_REGNO (def),
|
||||
GET_MODE (DF_REF_REG (use)),
|
||||
|
@ -1143,7 +1143,7 @@ global_conflicts (void)
|
|||
for (j = VEC_length (df_ref_t, dying_regs) - 1; j >= 0; j--)
|
||||
{
|
||||
int used_in_output = 0;
|
||||
struct df_ref *use = VEC_index (df_ref_t, dying_regs, j);
|
||||
df_ref use = VEC_index (df_ref_t, dying_regs, j);
|
||||
rtx reg = DF_REF_REG (use);
|
||||
int uregno = DF_REF_REGNO (use);
|
||||
enum machine_mode umode = GET_MODE (DF_REF_REG (use));
|
||||
|
|
|
@ -108,8 +108,8 @@ regstat_bb_compute_ri (unsigned int bb_index,
|
|||
{
|
||||
basic_block bb = BASIC_BLOCK (bb_index);
|
||||
rtx insn;
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
int luid = 0;
|
||||
bitmap_iterator bi;
|
||||
unsigned int regno;
|
||||
|
@ -126,14 +126,14 @@ regstat_bb_compute_ri (unsigned int bb_index,
|
|||
to begin processing. */
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
|
||||
bitmap_clear_bit (live, DF_REF_REGNO (def));
|
||||
}
|
||||
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
|
||||
{
|
||||
regno = DF_REF_REGNO (use);
|
||||
|
@ -205,7 +205,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
|
|||
for (mws_rec = DF_INSN_UID_MWS (uid); *mws_rec; mws_rec++)
|
||||
{
|
||||
struct df_mw_hardreg *mws = *mws_rec;
|
||||
if (mws->type == DF_REF_REG_DEF)
|
||||
if (DF_MWS_REG_DEF_P (mws))
|
||||
{
|
||||
bool all_dead = true;
|
||||
unsigned int r;
|
||||
|
@ -232,7 +232,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
|
|||
clobber. This code is for the return. */
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if ((!CALL_P (insn))
|
||||
|| (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))))
|
||||
{
|
||||
|
@ -281,7 +281,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
|
|||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
unsigned int uregno = DF_REF_REGNO (use);
|
||||
|
||||
if (uregno >= FIRST_PSEUDO_REGISTER)
|
||||
|
@ -412,8 +412,8 @@ regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
|
|||
{
|
||||
basic_block bb = BASIC_BLOCK (bb_index);
|
||||
rtx insn;
|
||||
struct df_ref **def_rec;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *def_rec;
|
||||
df_ref *use_rec;
|
||||
|
||||
bitmap_copy (live, df_get_live_out (bb));
|
||||
|
||||
|
@ -421,14 +421,14 @@ regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
|
|||
to begin processing. */
|
||||
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
|
||||
bitmap_clear_bit (live, DF_REF_REGNO (def));
|
||||
}
|
||||
|
||||
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
|
||||
bitmap_set_bit (live, DF_REF_REGNO (use));
|
||||
}
|
||||
|
@ -456,7 +456,7 @@ regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
|
|||
clobber. This code is for the return. */
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if ((!CALL_P (insn))
|
||||
|| (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))))
|
||||
{
|
||||
|
@ -468,7 +468,7 @@ regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
|
|||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
bitmap_set_bit (live, DF_REF_REGNO (use));
|
||||
}
|
||||
}
|
||||
|
|
40
gcc/see.c
40
gcc/see.c
|
@ -3242,7 +3242,7 @@ see_store_reference_and_extension (rtx ref_insn, rtx se_insn,
|
|||
happened and the optimization should be aborted. */
|
||||
|
||||
static int
|
||||
see_handle_relevant_defs (struct df_ref *ref, rtx insn)
|
||||
see_handle_relevant_defs (df_ref ref, rtx insn)
|
||||
{
|
||||
struct web_entry *root_entry = NULL;
|
||||
rtx se_insn = NULL;
|
||||
|
@ -3311,7 +3311,7 @@ see_handle_relevant_defs (struct df_ref *ref, rtx insn)
|
|||
happened and the optimization should be aborted. */
|
||||
|
||||
static int
|
||||
see_handle_relevant_uses (struct df_ref *ref, rtx insn)
|
||||
see_handle_relevant_uses (df_ref ref, rtx insn)
|
||||
{
|
||||
struct web_entry *root_entry = NULL;
|
||||
rtx se_insn = NULL;
|
||||
|
@ -3367,12 +3367,12 @@ see_handle_relevant_refs (void)
|
|||
|
||||
if (INSN_P (insn))
|
||||
{
|
||||
struct df_ref **use_rec;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *use_rec;
|
||||
df_ref *def_rec;
|
||||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
int result = see_handle_relevant_uses (use, insn);
|
||||
if (result == -1)
|
||||
return -1;
|
||||
|
@ -3380,7 +3380,7 @@ see_handle_relevant_refs (void)
|
|||
}
|
||||
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
int result = see_handle_relevant_uses (use, insn);
|
||||
if (result == -1)
|
||||
return -1;
|
||||
|
@ -3388,7 +3388,7 @@ see_handle_relevant_refs (void)
|
|||
}
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
int result = see_handle_relevant_defs (def, insn);
|
||||
if (result == -1)
|
||||
return -1;
|
||||
|
@ -3404,7 +3404,7 @@ see_handle_relevant_refs (void)
|
|||
/* Initialized the use_entry field for REF in INSN at INDEX with ET. */
|
||||
|
||||
static void
|
||||
see_update_uses_relevancy (rtx insn, struct df_ref *ref,
|
||||
see_update_uses_relevancy (rtx insn, df_ref ref,
|
||||
enum entry_type et, unsigned int index)
|
||||
{
|
||||
struct see_entry_extra_info *curr_entry_extra_info;
|
||||
|
@ -3585,7 +3585,7 @@ see_analyze_one_def (rtx insn, enum machine_mode *source_mode,
|
|||
/* Initialized the def_entry field for REF in INSN at INDEX with ET. */
|
||||
|
||||
static void
|
||||
see_update_defs_relevancy (rtx insn, struct df_ref *ref,
|
||||
see_update_defs_relevancy (rtx insn, df_ref ref,
|
||||
enum entry_type et,
|
||||
enum machine_mode source_mode,
|
||||
enum machine_mode source_mode_unsigned,
|
||||
|
@ -3685,8 +3685,8 @@ see_update_relevancy (void)
|
|||
|
||||
FOR_ALL_BB (bb)
|
||||
{
|
||||
struct df_ref **use_rec;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *use_rec;
|
||||
df_ref *def_rec;
|
||||
rtx insn;
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
|
@ -3697,14 +3697,14 @@ see_update_relevancy (void)
|
|||
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
see_update_uses_relevancy (insn, use, et, u);
|
||||
u++;
|
||||
}
|
||||
|
||||
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
see_update_uses_relevancy (insn, use, et, u);
|
||||
u++;
|
||||
}
|
||||
|
@ -3712,7 +3712,7 @@ see_update_relevancy (void)
|
|||
et = see_analyze_one_def (insn, &source_mode, &source_mode_unsigned);
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
see_update_defs_relevancy (insn, def, et, source_mode,
|
||||
source_mode_unsigned, d);
|
||||
d++;
|
||||
|
@ -3722,14 +3722,14 @@ see_update_relevancy (void)
|
|||
|
||||
for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
see_update_uses_relevancy (NULL, use, NOT_RELEVANT, u);
|
||||
u++;
|
||||
}
|
||||
|
||||
for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
see_update_defs_relevancy (NULL, def, NOT_RELEVANT,
|
||||
MAX_MACHINE_MODE, MAX_MACHINE_MODE, d);
|
||||
d++;
|
||||
|
@ -3766,7 +3766,7 @@ see_propagate_extensions_to_uses (void)
|
|||
FOR_ALL_BB (bb)
|
||||
{
|
||||
rtx insn;
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
|
@ -3775,13 +3775,13 @@ see_propagate_extensions_to_uses (void)
|
|||
{
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
union_defs (use, def_entry, use_entry, see_update_leader_extra_info);
|
||||
}
|
||||
|
||||
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
union_defs (use, def_entry, use_entry, see_update_leader_extra_info);
|
||||
}
|
||||
}
|
||||
|
@ -3789,7 +3789,7 @@ see_propagate_extensions_to_uses (void)
|
|||
|
||||
for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
union_defs (use, def_entry, use_entry, see_update_leader_extra_info);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2552,7 +2552,7 @@ maybe_downgrade_id_to_use (idata_t id, insn_t insn)
|
|||
{
|
||||
bool must_be_use = false;
|
||||
unsigned uid = INSN_UID (insn);
|
||||
struct df_ref **rec;
|
||||
df_ref *rec;
|
||||
rtx lhs = IDATA_LHS (id);
|
||||
rtx rhs = IDATA_RHS (id);
|
||||
|
||||
|
@ -2568,7 +2568,7 @@ maybe_downgrade_id_to_use (idata_t id, insn_t insn)
|
|||
|
||||
for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
|
||||
{
|
||||
struct df_ref *def = *rec;
|
||||
df_ref def = *rec;
|
||||
|
||||
if (DF_REF_INSN (def)
|
||||
&& DF_REF_FLAGS_IS_SET (def, DF_REF_PRE_POST_MODIFY)
|
||||
|
@ -2598,12 +2598,12 @@ static void
|
|||
setup_id_reg_sets (idata_t id, insn_t insn)
|
||||
{
|
||||
unsigned uid = INSN_UID (insn);
|
||||
struct df_ref **rec;
|
||||
df_ref *rec;
|
||||
regset tmp = get_clear_regset_from_pool ();
|
||||
|
||||
for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
|
||||
{
|
||||
struct df_ref *def = *rec;
|
||||
df_ref def = *rec;
|
||||
unsigned int regno = DF_REF_REGNO (def);
|
||||
|
||||
/* Post modifies are treated like clobbers by sched-deps.c. */
|
||||
|
@ -2629,7 +2629,7 @@ setup_id_reg_sets (idata_t id, insn_t insn)
|
|||
|
||||
for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
|
||||
{
|
||||
struct df_ref *use = *rec;
|
||||
df_ref use = *rec;
|
||||
unsigned int regno = DF_REF_REGNO (use);
|
||||
|
||||
/* When these refs are met for the first time, skip them, as
|
||||
|
|
44
gcc/web.c
44
gcc/web.c
|
@ -60,8 +60,8 @@ along with GCC; see the file COPYING3. If not see
|
|||
#include "tree-pass.h"
|
||||
|
||||
|
||||
static rtx entry_register (struct web_entry *, struct df_ref *, char *);
|
||||
static void replace_ref (struct df_ref *, rtx);
|
||||
static rtx entry_register (struct web_entry *, df_ref, char *);
|
||||
static void replace_ref (df_ref, rtx);
|
||||
|
||||
/* Find the root of unionfind tree (the representative of set). */
|
||||
|
||||
|
@ -101,15 +101,15 @@ unionfind_union (struct web_entry *first, struct web_entry *second)
|
|||
FUN is the function that does the union. */
|
||||
|
||||
void
|
||||
union_defs (struct df_ref *use, struct web_entry *def_entry,
|
||||
union_defs (df_ref use, struct web_entry *def_entry,
|
||||
struct web_entry *use_entry,
|
||||
bool (*fun) (struct web_entry *, struct web_entry *))
|
||||
{
|
||||
struct df_insn_info *insn_info = DF_REF_INSN_INFO (use);
|
||||
struct df_link *link = DF_REF_CHAIN (use);
|
||||
struct df_ref **use_link;
|
||||
struct df_ref **eq_use_link;
|
||||
struct df_ref **def_link;
|
||||
df_ref *use_link;
|
||||
df_ref *eq_use_link;
|
||||
df_ref *def_link;
|
||||
rtx set;
|
||||
|
||||
if (insn_info)
|
||||
|
@ -178,9 +178,9 @@ union_defs (struct df_ref *use, struct web_entry *def_entry,
|
|||
|
||||
/* A READ_WRITE use requires the corresponding def to be in the same
|
||||
register. Find it and union. */
|
||||
if (use->flags & DF_REF_READ_WRITE)
|
||||
if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
|
||||
{
|
||||
struct df_ref **link;
|
||||
df_ref *link;
|
||||
|
||||
if (insn_info)
|
||||
link = DF_INSN_INFO_DEFS (insn_info);
|
||||
|
@ -201,7 +201,7 @@ union_defs (struct df_ref *use, struct web_entry *def_entry,
|
|||
/* Find the corresponding register for the given entry. */
|
||||
|
||||
static rtx
|
||||
entry_register (struct web_entry *entry, struct df_ref *ref, char *used)
|
||||
entry_register (struct web_entry *entry, df_ref ref, char *used)
|
||||
{
|
||||
struct web_entry *root;
|
||||
rtx reg, newreg;
|
||||
|
@ -243,11 +243,11 @@ entry_register (struct web_entry *entry, struct df_ref *ref, char *used)
|
|||
/* Replace the reference by REG. */
|
||||
|
||||
static void
|
||||
replace_ref (struct df_ref *ref, rtx reg)
|
||||
replace_ref (df_ref ref, rtx reg)
|
||||
{
|
||||
rtx oldreg = DF_REF_REAL_REG (ref);
|
||||
rtx *loc = DF_REF_REAL_LOC (ref);
|
||||
unsigned int uid = INSN_UID (DF_REF_INSN (ref));
|
||||
unsigned int uid = DF_REF_INSN_UID (ref);
|
||||
|
||||
if (oldreg == reg)
|
||||
return;
|
||||
|
@ -290,16 +290,16 @@ web_main (void)
|
|||
unsigned int uid = INSN_UID (insn);
|
||||
if (INSN_P (insn))
|
||||
{
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
|
||||
DF_REF_ID (use) = uses_num++;
|
||||
}
|
||||
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
|
||||
DF_REF_ID (use) = uses_num++;
|
||||
}
|
||||
|
@ -318,16 +318,16 @@ web_main (void)
|
|||
unsigned int uid = INSN_UID (insn);
|
||||
if (INSN_P (insn))
|
||||
{
|
||||
struct df_ref **use_rec;
|
||||
df_ref *use_rec;
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
|
||||
union_defs (use, def_entry, use_entry, unionfind_union);
|
||||
}
|
||||
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
|
||||
union_defs (use, def_entry, use_entry, unionfind_union);
|
||||
}
|
||||
|
@ -342,23 +342,23 @@ web_main (void)
|
|||
unsigned int uid = INSN_UID (insn);
|
||||
if (INSN_P (insn))
|
||||
{
|
||||
struct df_ref **use_rec;
|
||||
struct df_ref **def_rec;
|
||||
df_ref *use_rec;
|
||||
df_ref *def_rec;
|
||||
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
|
||||
replace_ref (use, entry_register (use_entry + DF_REF_ID (use), use, used));
|
||||
}
|
||||
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
|
||||
{
|
||||
struct df_ref *use = *use_rec;
|
||||
df_ref use = *use_rec;
|
||||
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
|
||||
replace_ref (use, entry_register (use_entry + DF_REF_ID (use), use, used));
|
||||
}
|
||||
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
|
||||
{
|
||||
struct df_ref *def = *def_rec;
|
||||
df_ref def = *def_rec;
|
||||
if (DF_REF_REGNO (def) >= FIRST_PSEUDO_REGISTER)
|
||||
replace_ref (def, entry_register (def_entry + DF_REF_ID (def), def, used));
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue