Merge dataflow branch into mainline

From-SVN: r125624
This commit is contained in:
Daniel Berlin 2007-06-11 18:02:15 +00:00
parent a91d32a4a6
commit 6fb5fa3cbc
168 changed files with 24203 additions and 13210 deletions

View file

@ -195,6 +195,7 @@ build machinery (*.in) DJ Delorie dj@redhat.com
build machinery (*.in) Nathanael Nerode neroden@gcc.gnu.org
build machinery (*.in) Alexandre Oliva aoliva@redhat.com
dataflow Paolo Bonzini bonzini@gnu.org
dataflow Daniel Berlin dberlin@dberlin.org
docs co-maintainer Gerald Pfeifer gerald@pfeifer.com
docs co-maintainer Joseph Myers jsm@polyomino.org.uk
predict.def Jan Hubicka jh@suse.cz

View file

@ -1,3 +1,7 @@
2007-06-11 Daniel Berlin <dberlin@dberlin.org>
* Merge dataflow-branch into mainline (see ChangeLog.dataflow)
2007-06-11 Uros Bizjak <ubizjak@gmail.com>
* config/i386/i386.md ("*movtf_internal): Penalize moves to and

4481
gcc/ChangeLog.dataflow Normal file

File diff suppressed because it is too large Load diff

View file

@ -749,7 +749,7 @@ EXPR_H = expr.h insn-config.h $(FUNCTION_H) $(RTL_H) $(FLAGS_H) $(TREE_H) $(MACH
OPTABS_H = optabs.h insn-codes.h
REGS_H = regs.h varray.h $(MACHMODE_H) $(OBSTACK_H) $(BASIC_BLOCK_H) $(FUNCTION_H)
RESOURCE_H = resource.h hard-reg-set.h
SCHED_INT_H = sched-int.h $(INSN_ATTR_H) $(BASIC_BLOCK_H) $(RTL_H)
SCHED_INT_H = sched-int.h $(INSN_ATTR_H) $(BASIC_BLOCK_H) $(RTL_H) $(DF_H)
INTEGRATE_H = integrate.h $(VARRAY_H)
CFGLAYOUT_H = cfglayout.h $(BASIC_BLOCK_H)
CFGLOOP_H = cfgloop.h $(BASIC_BLOCK_H) $(RTL_H) vecprim.h double-int.h
@ -758,6 +758,7 @@ IPA_REFERENCE_H = ipa-reference.h bitmap.h $(TREE_H)
IPA_TYPE_ESCAPE_H = ipa-type-escape.h $(TREE_H)
CGRAPH_H = cgraph.h $(TREE_H)
DF_H = df.h bitmap.h $(BASIC_BLOCK_H) alloc-pool.h
RESOURCE_H = resource.h hard-reg-set.h $(DF_H)
DDG_H = ddg.h sbitmap.h $(DF_H)
GCC_H = gcc.h version.h
GGC_H = ggc.h gtype-desc.h
@ -790,6 +791,7 @@ TREE_DATA_REF_H = tree-data-ref.h $(LAMBDA_H) omega.h
VARRAY_H = varray.h $(MACHMODE_H) $(SYSTEM_H) coretypes.h $(TM_H)
TREE_INLINE_H = tree-inline.h $(VARRAY_H) pointer-set.h
REAL_H = real.h $(MACHMODE_H)
DBGCNT_H = dbgcnt.h dbgcnt.def
EBIMAP_H = ebitmap.h sbitmap.h
#
@ -953,6 +955,7 @@ OBJS-common = \
$(GGC) \
alias.o \
alloc-pool.o \
auto-inc-dec.o \
bb-reorder.o \
bitmap.o \
bt-load.o \
@ -977,6 +980,8 @@ OBJS-common = \
cse.o \
cselib.o \
dbxout.o \
dbgcnt.o \
dce.o \
ddg.o \
debug.o \
df-core.o \
@ -988,6 +993,7 @@ OBJS-common = \
dominance.o \
domwalk.o \
double-int.o \
dse.o \
dwarf2asm.o \
dwarf2out.o \
ebitmap.o \
@ -998,7 +1004,6 @@ OBJS-common = \
expmed.o \
expr.o \
final.o \
flow.o \
fold-const.o \
function.o \
fwprop.o \
@ -1014,6 +1019,7 @@ OBJS-common = \
haifa-sched.o \
hooks.o \
ifcvt.o \
init-regs.o \
integrate.o \
intl.o \
jump.o \
@ -1055,6 +1061,7 @@ OBJS-common = \
regclass.o \
regmove.o \
regrename.o \
regstat.o \
reload.o \
reload1.o \
reorg.o \
@ -1073,6 +1080,7 @@ OBJS-common = \
see.o \
simplify-rtx.o \
sreal.o \
stack-ptr-mod.o \
stmt.o \
stor-layout.o \
stringpool.o \
@ -2016,7 +2024,8 @@ tree-cfgcleanup.o : tree-cfgcleanup.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
tree-ssa-propagate.h
rtl-factoring.o : rtl-factoring.c $(CONFIG_H) $(SYSTEM_H) $(RTL_H) \
coretypes.h $(TM_H) $(BASIC_BLOCK_H) $(GGC_H) $(REGS_H) $(PARAMS_H) $(EXPR_H) \
addresses.h $(TM_P_H) tree-pass.h $(TREE_FLOW_H) $(TIMEVAR_H) output.h
addresses.h $(TM_P_H) tree-pass.h $(TREE_FLOW_H) $(TIMEVAR_H) output.h \
$(DF_H)
tree-tailcall.o : tree-tailcall.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(RTL_H) $(TREE_H) $(TM_P_H) $(FUNCTION_H) $(TM_H) coretypes.h \
$(TREE_DUMP_H) $(DIAGNOSTIC_H) except.h tree-pass.h $(FLAGS_H) langhooks.h \
@ -2227,7 +2236,7 @@ diagnostic.o : diagnostic.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
opts.o : opts.c opts.h options.h toplev.h $(CONFIG_H) $(SYSTEM_H) \
coretypes.h $(TREE_H) $(TM_H) langhooks.h $(GGC_H) $(RTL_H) \
output.h $(DIAGNOSTIC_H) $(TM_P_H) $(INSN_ATTR_H) intl.h $(TARGET_H) \
$(FLAGS_H) $(PARAMS_H) tree-pass.h
$(FLAGS_H) $(PARAMS_H) tree-pass.h $(DBGCNT_H)
opts-common.o : opts-common.c opts.h $(CONFIG_H) $(SYSTEM_H) \
coretypes.h intl.h
targhooks.o : targhooks.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TREE_H) \
@ -2257,7 +2266,7 @@ passes.o : passes.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
langhooks.h insn-flags.h $(CFGLAYOUT_H) $(REAL_H) $(CFGLOOP_H) \
hosthooks.h $(CGRAPH_H) $(COVERAGE_H) tree-pass.h $(TREE_DUMP_H) \
$(GGC_H) $(INTEGRATE_H) $(CPPLIB_H) opts.h $(TREE_FLOW_H) $(TREE_INLINE_H) \
gt-passes.h $(PREDICT_H)
gt-passes.h $(DF_H) $(PREDICT_H)
main.o : main.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) toplev.h
@ -2276,7 +2285,8 @@ print-rtl.o : print-rtl.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(BCONFIG_H) $(REAL_H)
rtlanal.o : rtlanal.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) toplev.h \
$(RTL_H) hard-reg-set.h $(TM_P_H) insn-config.h $(RECOG_H) $(REAL_H) \
$(FLAGS_H) $(REGS_H) output.h $(TARGET_H) $(FUNCTION_H) $(TREE_H)
$(FLAGS_H) $(REGS_H) output.h $(TARGET_H) $(FUNCTION_H) $(TREE_H) \
$(DF_H)
varasm.o : varasm.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
$(RTL_H) $(FLAGS_H) $(FUNCTION_H) $(EXPR_H) hard-reg-set.h $(REGS_H) \
@ -2288,7 +2298,7 @@ function.o : function.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(OPTABS_H) libfuncs.h $(REGS_H) hard-reg-set.h insn-config.h $(RECOG_H) \
output.h toplev.h except.h $(HASHTAB_H) $(GGC_H) $(TM_P_H) langhooks.h \
gt-function.h $(TARGET_H) $(BASIC_BLOCK_H) $(INTEGRATE_H) $(PREDICT_H) \
tree-pass.h vecprim.h
tree-pass.h $(DF_H) timevar.h vecprim.h
stmt.o : stmt.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TREE_H) $(FLAGS_H) $(FUNCTION_H) insn-config.h hard-reg-set.h $(EXPR_H) \
libfuncs.h except.h $(RECOG_H) toplev.h output.h $(GGC_H) $(TM_P_H) \
@ -2306,7 +2316,7 @@ expr.o : expr.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
typeclass.h hard-reg-set.h toplev.h hard-reg-set.h except.h reload.h \
$(GGC_H) langhooks.h intl.h $(TM_P_H) $(REAL_H) $(TARGET_H) \
tree-iterator.h gt-expr.h $(MACHMODE_H) $(TIMEVAR_H) $(TREE_FLOW_H) \
tree-pass.h
tree-pass.h $(DF_H)
dojump.o : dojump.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
$(FLAGS_H) $(FUNCTION_H) $(EXPR_H) $(OPTABS_H) $(INSN_ATTR_H) insn-config.h \
langhooks.h $(GGC_H) gt-dojump.h
@ -2319,10 +2329,10 @@ builtins.o : builtins.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
calls.o : calls.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TREE_H) $(FLAGS_H) $(EXPR_H) $(OPTABS_H) langhooks.h $(TARGET_H) \
libfuncs.h $(REGS_H) toplev.h output.h $(FUNCTION_H) $(TIMEVAR_H) $(TM_P_H) \
$(CGRAPH_H) except.h sbitmap.h
$(CGRAPH_H) except.h sbitmap.h $(DBGCNT_H)
expmed.o : expmed.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
$(FLAGS_H) insn-config.h $(EXPR_H) $(OPTABS_H) $(RECOG_H) $(REAL_H) \
toplev.h $(TM_P_H) langhooks.h $(TARGET_H)
toplev.h $(TM_P_H) langhooks.h $(DF_H) $(TARGET_H)
explow.o : explow.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
$(FLAGS_H) hard-reg-set.h insn-config.h $(EXPR_H) $(OPTABS_H) $(RECOG_H) \
toplev.h $(FUNCTION_H) $(GGC_H) $(TM_P_H) langhooks.h gt-explow.h \
@ -2358,7 +2368,7 @@ emit-rtl.o : emit-rtl.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TREE_H) $(FLAGS_H) $(FUNCTION_H) $(REGS_H) insn-config.h $(RECOG_H) \
$(GGC_H) $(EXPR_H) hard-reg-set.h bitmap.h toplev.h $(BASIC_BLOCK_H) \
$(HASHTAB_H) $(TM_P_H) debug.h langhooks.h tree-pass.h gt-emit-rtl.h \
$(REAL_H)
$(REAL_H) $(DF_H)
real.o : real.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
toplev.h $(TM_P_H) $(REAL_H)
dfp.o : dfp.c dfp.h $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
@ -2367,7 +2377,7 @@ integrate.o : integrate.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(TREE_H) $(FLAGS_H) debug.h $(INTEGRATE_H) insn-config.h \
$(EXPR_H) $(REAL_H) $(REGS_H) intl.h $(FUNCTION_H) output.h $(RECOG_H) \
except.h toplev.h $(PARAMS_H) $(TM_P_H) $(TARGET_H) langhooks.h \
gt-integrate.h $(GGC_H) tree-pass.h
gt-integrate.h $(GGC_H) tree-pass.h $(DF_H)
jump.o : jump.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(FLAGS_H) hard-reg-set.h $(REGS_H) insn-config.h $(RECOG_H) $(EXPR_H) \
$(REAL_H) except.h $(FUNCTION_H) tree-pass.h $(DIAGNOSTIC_H) \
@ -2441,7 +2451,15 @@ cselib.o : cselib.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
cse.o : cse.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(REGS_H) \
hard-reg-set.h $(FLAGS_H) insn-config.h $(RECOG_H) $(EXPR_H) toplev.h \
output.h $(FUNCTION_H) $(BASIC_BLOCK_H) $(GGC_H) $(TM_P_H) $(TIMEVAR_H) \
except.h $(TARGET_H) $(PARAMS_H) rtlhooks-def.h tree-pass.h $(REAL_H)
except.h $(TARGET_H) $(PARAMS_H) rtlhooks-def.h tree-pass.h $(REAL_H) \
$(DF_H) $(DBGCNT_H)
dce.o : dce.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TREE_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) $(DF_H) cselib.h \
$(DBGCNT_H) dce.h timevar.h tree-pass.h $(DBGCNT_H)
dse.o : dse.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TREE_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) insn-config.h $(RECOG_H) \
$(EXPR_H) $(DF_H) cselib.h $(DBGCNT_H) timevar.h tree-pass.h \
alloc-pool.h $(ALIAS_H) dse.h
fwprop.o : fwprop.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
toplev.h insn-config.h $(RECOG_H) $(FLAGS_H) $(OBSTACK_H) $(BASIC_BLOCK_H) \
output.h $(DF_H) alloc-pool.h $(TIMEVAR_H) tree-pass.h
@ -2455,9 +2473,9 @@ gcse.o : gcse.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(REGS_H) hard-reg-set.h $(FLAGS_H) $(REAL_H) insn-config.h $(GGC_H) \
$(RECOG_H) $(EXPR_H) $(BASIC_BLOCK_H) $(FUNCTION_H) output.h toplev.h \
$(TM_P_H) $(PARAMS_H) except.h gt-gcse.h $(TREE_H) cselib.h $(TIMEVAR_H) \
intl.h $(OBSTACK_H) tree-pass.h
intl.h $(OBSTACK_H) tree-pass.h $(DF_H) $(DBGCNT_H)
resource.o : resource.c $(CONFIG_H) $(RTL_H) hard-reg-set.h $(SYSTEM_H) \
coretypes.h $(TM_H) $(REGS_H) $(FLAGS_H) output.h $(RESOURCE_H) \
coretypes.h $(TM_H) $(REGS_H) $(FLAGS_H) output.h $(RESOURCE_H) $(DF_H) \
$(FUNCTION_H) toplev.h $(INSN_ATTR_H) except.h $(PARAMS_H) $(TM_P_H)
lcm.o : lcm.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(REGS_H) \
hard-reg-set.h $(FLAGS_H) insn-config.h $(INSN_ATTR_H) $(RECOG_H) \
@ -2465,7 +2483,7 @@ lcm.o : lcm.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(REGS_H) \
mode-switching.o : mode-switching.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
$(TM_H) $(RTL_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) insn-config.h \
$(INSN_ATTR_H) $(RECOG_H) $(BASIC_BLOCK_H) $(TM_P_H) $(FUNCTION_H) \
output.h tree-pass.h $(TIMEVAR_H) $(REAL_H)
output.h tree-pass.h $(TIMEVAR_H) $(REAL_H) $(DF_H)
tree-ssa-dce.o : tree-ssa-dce.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) \
$(RTL_H) $(TM_P_H) $(TREE_FLOW_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) $(TM_H) \
coretypes.h $(TREE_DUMP_H) tree-pass.h $(FLAGS_H) $(BASIC_BLOCK_H) \
@ -2496,11 +2514,14 @@ df-core.o : df-core.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
df-problems.o : df-problems.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) insn-config.h $(RECOG_H) $(FUNCTION_H) $(REGS_H) alloc-pool.h \
hard-reg-set.h $(BASIC_BLOCK_H) $(DF_H) bitmap.h sbitmap.h $(TM_P_H) \
$(FLAGS_H) output.h vecprim.h
$(FLAGS_H) output.h except.h dce.h vecprim.h
df-scan.o : df-scan.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
insn-config.h $(RECOG_H) $(FUNCTION_H) $(REGS_H) alloc-pool.h \
hard-reg-set.h $(BASIC_BLOCK_H) $(DF_H) bitmap.h sbitmap.h $(TM_P_H) \
$(FLAGS_H) $(TARGET_H) $(TARGET_DEF_H) $(TREE_H) output.h tree-pass.h
regstat.o : regstat.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TM_P_H) $(FLAGS_H) $(REGS_H) output.h except.h hard-reg-set.h \
$(BASIC_BLOCK_H) $(TIMEVAR_H) $(DF_H)
var-tracking.o : var-tracking.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(TREE_H) hard-reg-set.h insn-config.h reload.h $(FLAGS_H) \
$(BASIC_BLOCK_H) output.h sbitmap.h alloc-pool.h $(FIBHEAP_H) $(HASHTAB_H) \
@ -2522,14 +2543,14 @@ loop-doloop.o : loop-doloop.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(FLAGS_H) $(EXPR_H) hard-reg-set.h $(BASIC_BLOCK_H) $(TM_P_H) \
toplev.h $(CFGLOOP_H) output.h $(PARAMS_H) $(TARGET_H)
alloc-pool.o : alloc-pool.c $(CONFIG_H) $(SYSTEM_H) alloc-pool.h $(HASHTAB_H)
flow.o : flow.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TREE_H) $(FLAGS_H) insn-config.h $(BASIC_BLOCK_H) $(REGS_H) \
hard-reg-set.h output.h toplev.h $(RECOG_H) $(FUNCTION_H) except.h \
$(EXPR_H) $(TM_P_H) $(OBSTACK_H) $(SPLAY_TREE_H) $(TIMEVAR_H) tree-pass.h
auto-inc-dec.o : auto-inc-dec.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(TREE_H) $(RTL_H) $(TM_P_H) hard-reg-set.h $(BASIC_BLOCK_H) insn-config.h \
$(REGS_H) $(FLAGS_H) output.h $(FUNCTION_H) except.h toplev.h $(RECOG_H) \
$(EXPR_H) $(TIMEVAR_H) tree-pass.h $(DF_H) $(DBGCNT_H)
cfg.o : cfg.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(FLAGS_H) \
$(REGS_H) hard-reg-set.h output.h toplev.h $(FUNCTION_H) except.h $(GGC_H) \
$(TM_P_H) $(TIMEVAR_H) $(OBSTACK_H) $(TREE_H) alloc-pool.h $(HASHTAB_H) \
$(CFGLOOP_H)
$(TM_P_H) $(TIMEVAR_H) $(OBSTACK_H) $(TREE_H) alloc-pool.h \
$(HASHTAB_H) $(DF_H) $(CFGLOOP_H)
cfghooks.o: cfghooks.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TREE_H) $(BASIC_BLOCK_H) $(TREE_FLOW_H) $(TIMEVAR_H) toplev.h $(CFGLOOP_H)
cfgexpand.o : cfgexpand.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
@ -2541,7 +2562,7 @@ cfgrtl.o : cfgrtl.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(FLAGS_H) insn-config.h $(BASIC_BLOCK_H) $(REGS_H) hard-reg-set.h \
output.h toplev.h $(FUNCTION_H) except.h $(TM_P_H) insn-config.h $(EXPR_H) \
$(CFGLAYOUT_H) $(CFGLOOP_H) $(OBSTACK_H) $(TARGET_H) $(TREE_H) \
tree-pass.h
tree-pass.h $(DF_H)
cfganal.o : cfganal.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(BASIC_BLOCK_H) hard-reg-set.h insn-config.h $(RECOG_H) $(TM_P_H) \
$(TIMEVAR_H) $(OBSTACK_H) toplev.h
@ -2551,7 +2572,8 @@ cfgbuild.o : cfgbuild.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
cfgcleanup.o : cfgcleanup.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(TIMEVAR_H) hard-reg-set.h output.h $(FLAGS_H) $(RECOG_H) \
toplev.h insn-config.h cselib.h $(TARGET_H) $(TM_P_H) $(PARAMS_H) \
$(REGS_H) $(EMIT_RTL_H) $(CFGLAYOUT_H) tree-pass.h $(CFGLOOP_H) $(EXPR_H)
$(REGS_H) $(EMIT_RTL_H) $(CFGLAYOUT_H) tree-pass.h $(CFGLOOP_H) $(EXPR_H) \
$(DF_H) dce.h
cfgloop.o : cfgloop.c $(CONFIG_H) $(SYSTEM_H) $(RTL_H) coretypes.h $(TM_H) \
$(BASIC_BLOCK_H) hard-reg-set.h $(CFGLOOP_H) $(FLAGS_H) $(FUNCTION_H) \
$(OBSTACK_H) toplev.h $(TREE_FLOW_H) $(TREE_H) pointer-set.h output.h \
@ -2564,7 +2586,7 @@ graphds.o : graphds.c graphds.h $(CONFIG_H) $(SYSTEM_H) bitmap.h $(OBSTACK_H) \
struct-equiv.o : struct-equiv.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) hard-reg-set.h output.h $(FLAGS_H) $(RECOG_H) \
insn-config.h $(TARGET_H) $(TM_P_H) $(PARAMS_H) \
$(REGS_H) $(EMIT_RTL_H)
$(REGS_H) $(EMIT_RTL_H) $(DF_H)
loop-iv.o : loop-iv.c $(CONFIG_H) $(SYSTEM_H) $(RTL_H) $(BASIC_BLOCK_H) \
hard-reg-set.h $(CFGLOOP_H) $(EXPR_H) coretypes.h $(TM_H) $(OBSTACK_H) \
output.h intl.h $(DF_H) $(HASHTAB_H)
@ -2577,7 +2599,7 @@ cfgloopmanip.o : cfgloopmanip.c $(CONFIG_H) $(SYSTEM_H) $(RTL_H) \
coretypes.h $(TM_H) cfghooks.h $(OBSTACK_H)
loop-init.o : loop-init.c $(CONFIG_H) $(SYSTEM_H) $(RTL_H) $(GGC_H) \
$(BASIC_BLOCK_H) hard-reg-set.h $(CFGLOOP_H) $(CFGLAYOUT_H) \
coretypes.h $(TM_H) $(OBSTACK_H) tree-pass.h $(TIMEVAR_H) $(FLAGS_H)
coretypes.h $(TM_H) $(OBSTACK_H) tree-pass.h $(TIMEVAR_H) $(FLAGS_H) $(DF_H)
loop-unswitch.o : loop-unswitch.c $(CONFIG_H) $(SYSTEM_H) $(RTL_H) $(TM_H) \
$(BASIC_BLOCK_H) hard-reg-set.h $(CFGLOOP_H) $(CFGLAYOUT_H) $(PARAMS_H) \
output.h $(EXPR_H) coretypes.h $(TM_H) $(OBSTACK_H)
@ -2594,53 +2616,53 @@ combine.o : combine.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(FLAGS_H) $(FUNCTION_H) insn-config.h $(INSN_ATTR_H) $(REGS_H) $(EXPR_H) \
rtlhooks-def.h $(BASIC_BLOCK_H) $(RECOG_H) $(REAL_H) hard-reg-set.h \
toplev.h $(TM_P_H) $(TREE_H) $(TARGET_H) output.h $(PARAMS_H) $(OPTABS_H) \
insn-codes.h $(TIMEVAR_H) tree-pass.h
insn-codes.h $(TIMEVAR_H) tree-pass.h $(DF_H)
regclass.o : regclass.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
hard-reg-set.h $(FLAGS_H) $(BASIC_BLOCK_H) $(REGS_H) insn-config.h \
$(RECOG_H) reload.h $(REAL_H) toplev.h $(FUNCTION_H) output.h $(GGC_H) \
$(TM_P_H) $(EXPR_H) $(TIMEVAR_H) gt-regclass.h $(HASHTAB_H) \
$(TARGET_H)
$(TARGET_H) tree-pass.h $(DF_H)
local-alloc.o : local-alloc.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(FLAGS_H) $(REGS_H) hard-reg-set.h insn-config.h $(RECOG_H) \
output.h $(FUNCTION_H) $(INSN_ATTR_H) toplev.h except.h reload.h $(TM_P_H) \
$(GGC_H) $(INTEGRATE_H) $(TIMEVAR_H) tree-pass.h
$(GGC_H) $(INTEGRATE_H) $(TIMEVAR_H) tree-pass.h $(DF_H) $(DBGCNT_H)
bitmap.o : bitmap.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(FLAGS_H) $(GGC_H) gt-bitmap.h bitmap.h $(OBSTACK_H)
global.o : global.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(FLAGS_H) reload.h $(FUNCTION_H) $(RECOG_H) $(REGS_H) hard-reg-set.h \
insn-config.h output.h toplev.h $(TM_P_H) $(MACHMODE_H) tree-pass.h \
$(TIMEVAR_H) vecprim.h
$(TIMEVAR_H) vecprim.h $(DF_H)
varray.o : varray.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(GGC_H) \
$(HASHTAB_H) $(BCONFIG_H) $(VARRAY_H) toplev.h
vec.o : vec.c $(CONFIG_H) $(SYSTEM_H) coretypes.h vec.h $(GGC_H) \
toplev.h
reload.o : reload.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(FLAGS_H) output.h $(EXPR_H) $(OPTABS_H) reload.h $(RECOG_H) \
hard-reg-set.h insn-config.h $(REGS_H) $(FUNCTION_H) toplev.h \
addresses.h $(TM_P_H) $(PARAMS_H) $(TARGET_H) $(REAL_H)
hard-reg-set.h insn-config.h $(REGS_H) $(FUNCTION_H) real.h toplev.h \
addresses.h $(TM_P_H) $(PARAMS_H) $(TARGET_H) $(REAL_H) $(DF_H)
reload1.o : reload1.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(EXPR_H) $(OPTABS_H) reload.h $(REGS_H) hard-reg-set.h insn-config.h \
$(BASIC_BLOCK_H) $(RECOG_H) output.h $(FUNCTION_H) toplev.h $(TM_P_H) \
addresses.h except.h $(TREE_H) $(REAL_H) $(FLAGS_H) $(MACHMODE_H) \
$(OBSTACK_H) $(TARGET_H)
$(OBSTACK_H) $(DF_H) $(TARGET_H) dse.h
rtlhooks.o : rtlhooks.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
rtlhooks-def.h $(EXPR_H) $(RECOG_H)
postreload.o : postreload.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(REAL_H) $(FLAGS_H) $(EXPR_H) $(OPTABS_H) reload.h $(REGS_H) \
hard-reg-set.h insn-config.h $(BASIC_BLOCK_H) $(RECOG_H) output.h \
$(FUNCTION_H) toplev.h cselib.h $(TM_P_H) except.h $(TREE_H) $(MACHMODE_H) \
$(OBSTACK_H) $(TIMEVAR_H) tree-pass.h
$(OBSTACK_H) $(TIMEVAR_H) tree-pass.h $(DF_H)
postreload-gcse.o : postreload-gcse.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
$(TM_H) $(RTL_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) insn-config.h \
$(RECOG_H) $(EXPR_H) $(BASIC_BLOCK_H) $(FUNCTION_H) output.h toplev.h \
$(TM_P_H) except.h $(TREE_H) $(TARGET_H) $(HASHTAB_H) intl.h $(OBSTACK_H) \
$(PARAMS_H) $(TIMEVAR_H) tree-pass.h $(REAL_H)
$(PARAMS_H) $(TIMEVAR_H) tree-pass.h $(REAL_H) $(DBGCNT_H)
caller-save.o : caller-save.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(FLAGS_H) $(REGS_H) hard-reg-set.h insn-config.h $(BASIC_BLOCK_H) $(FUNCTION_H) \
addresses.h $(RECOG_H) reload.h $(EXPR_H) toplev.h $(TM_P_H)
addresses.h $(RECOG_H) reload.h $(EXPR_H) toplev.h $(TM_P_H) $(DF_H)
bt-load.o : bt-load.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) except.h \
$(RTL_H) hard-reg-set.h $(REGS_H) $(TM_P_H) $(FIBHEAP_H) output.h $(EXPR_H) \
$(TARGET_H) $(FLAGS_H) $(INSN_ATTR_H) $(FUNCTION_H) tree-pass.h toplev.h
$(TARGET_H) $(FLAGS_H) $(INSN_ATTR_H) $(FUNCTION_H) tree-pass.h toplev.h $(DF_H)
reorg.o : reorg.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
conditions.h hard-reg-set.h $(BASIC_BLOCK_H) $(REGS_H) insn-config.h \
$(INSN_ATTR_H) except.h $(RECOG_H) $(FUNCTION_H) $(FLAGS_H) output.h \
@ -2650,9 +2672,15 @@ alias.o : alias.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(FLAGS_H) hard-reg-set.h $(BASIC_BLOCK_H) $(REGS_H) toplev.h output.h \
$(ALIAS_H) $(EMIT_RTL_H) $(GGC_H) $(FUNCTION_H) cselib.h $(TREE_H) $(TM_P_H) \
langhooks.h $(TARGET_H) gt-alias.h $(TIMEVAR_H) $(CGRAPH_H) \
$(SPLAY_TREE_H) $(VARRAY_H) $(IPA_TYPE_ESCAPE_H) tree-pass.h
$(SPLAY_TREE_H) $(VARRAY_H) $(IPA_TYPE_ESCAPE_H) $(DF_H) tree-pass.h
stack-ptr-mod.o : stack-ptr-mod.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
$(TM_H) $(TREE_H) $(RTL_H) $(REGS_H) $(EXPR_H) tree-pass.h \
$(BASIC_BLOCK_H) $(FLAGS_H) output.h $(DF_H)
init-regs.o : init-regs.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
$(TM_H) $(TREE_H) $(RTL_H) $(REGS_H) $(EXPR_H) tree-pass.h \
$(BASIC_BLOCK_H) $(FLAGS_H) $(DF_H)
regmove.o : regmove.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
insn-config.h $(TIMEVAR_H) tree-pass.h \
insn-config.h $(TIMEVAR_H) tree-pass.h $(DF_H)\
$(RECOG_H) output.h $(REGS_H) hard-reg-set.h $(FLAGS_H) $(FUNCTION_H) \
$(EXPR_H) $(BASIC_BLOCK_H) toplev.h $(TM_P_H) except.h reload.h
combine-stack-adj.o : combine-stack-adj.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
@ -2662,30 +2690,30 @@ combine-stack-adj.o : combine-stack-adj.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
ddg.o : ddg.c $(DDG_H) $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TARGET_H) \
toplev.h $(RTL_H) $(TM_P_H) $(REGS_H) $(FUNCTION_H) \
$(FLAGS_H) insn-config.h $(INSN_ATTR_H) except.h $(RECOG_H) \
$(SCHED_INT_H) $(CFGLAYOUT_H) $(CFGLOOP_H) $(EXPR_H) bitmap.h $(DF_H) \
$(SCHED_INT_H) $(CFGLAYOUT_H) $(CFGLOOP_H) $(EXPR_H) bitmap.h \
hard-reg-set.h sbitmap.h $(TM_H)
modulo-sched.o : modulo-sched.c $(DDG_H) $(CONFIG_H) $(CONFIG_H) $(SYSTEM_H) \
coretypes.h $(TARGET_H) toplev.h $(RTL_H) $(TM_P_H) $(REGS_H) $(FUNCTION_H) \
$(FLAGS_H) insn-config.h $(INSN_ATTR_H) except.h $(RECOG_H) \
$(SCHED_INT_H) $(CFGLAYOUT_H) $(CFGLOOP_H) $(EXPR_H) $(PARAMS_H) \
cfghooks.h $(DF_H) $(GCOV_IO_H) hard-reg-set.h $(TM_H) $(TIMEVAR_H) \
tree-pass.h
haifa-sched.o : haifa-sched.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(SCHED_INT_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) insn-config.h \
$(FUNCTION_H) $(INSN_ATTR_H) toplev.h $(RECOG_H) except.h $(TM_P_H) \
$(TARGET_H) output.h $(PARAMS_H)
cfghooks.h $(GCOV_IO_H) hard-reg-set.h $(TM_H) timevar.h tree-pass.h \
$(DF_H)
haifa-sched.o : haifa-sched.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(SCHED_INT_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) insn-config.h $(FUNCTION_H) \
$(INSN_ATTR_H) toplev.h $(RECOG_H) except.h $(TM_P_H) $(TARGET_H) output.h \
$(PARAMS_H) $(DBGCNT_H)
sched-deps.o : sched-deps.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(SCHED_INT_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) insn-config.h \
$(FUNCTION_H) $(INSN_ATTR_H) toplev.h $(RECOG_H) except.h cselib.h \
$(PARAMS_H) $(TM_P_H) $(DF_H)
$(PARAMS_H) $(TM_P_H)
sched-rgn.o : sched-rgn.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(SCHED_INT_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) insn-config.h \
$(FUNCTION_H) $(INSN_ATTR_H) toplev.h $(RECOG_H) except.h $(PARAMS_H) \
$(TM_P_H) $(TARGET_H) $(CFGLAYOUT_H) $(TIMEVAR_H) tree-pass.h
$(TM_P_H) $(TARGET_H) $(CFGLAYOUT_H) $(TIMEVAR_H) tree-pass.h $(DBGCNT_H)
sched-ebb.o : sched-ebb.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(SCHED_INT_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) insn-config.h \
$(FUNCTION_H) $(INSN_ATTR_H) toplev.h $(RECOG_H) except.h $(TM_P_H) \
$(PARAMS_H) $(CFGLAYOUT_H) $(TARGET_H) output.h
$(PARAMS_H) $(CFGLAYOUT_H) $(TARGET_H) output.h
sched-vis.o : sched-vis.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(SCHED_INT_H) hard-reg-set.h $(BASIC_BLOCK_H) $(OBSTACK_H) \
$(TM_P_H) $(REAL_H) toplev.h tree-pass.h
@ -2694,16 +2722,17 @@ final.o : final.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
insn-config.h $(INSN_ATTR_H) $(FUNCTION_H) output.h hard-reg-set.h \
except.h debug.h xcoffout.h toplev.h reload.h dwarf2out.h tree-pass.h \
$(BASIC_BLOCK_H) $(TM_P_H) $(TARGET_H) $(EXPR_H) $(CFGLAYOUT_H) dbxout.h \
$(TIMEVAR_H) $(CGRAPH_H) $(COVERAGE_H) $(REAL_H) vecprim.h
$(TIMEVAR_H) $(CGRAPH_H) $(COVERAGE_H) $(REAL_H) $(DF_H) vecprim.h
recog.o : recog.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(FUNCTION_H) $(BASIC_BLOCK_H) $(REGS_H) $(RECOG_H) $(EXPR_H) \
$(FLAGS_H) insn-config.h $(INSN_ATTR_H) toplev.h output.h reload.h \
addresses.h $(TM_P_H) $(TIMEVAR_H) tree-pass.h hard-reg-set.h $(REAL_H)
addresses.h $(TM_P_H) $(TIMEVAR_H) tree-pass.h hard-reg-set.h $(REAL_H) \
$(DF_H) $(DBGCNT_H)
reg-stack.o : reg-stack.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(TREE_H) $(RECOG_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) \
insn-config.h toplev.h reload.h $(FUNCTION_H) $(TM_P_H) $(GGC_H) \
$(BASIC_BLOCK_H) output.h $(VARRAY_H) $(TIMEVAR_H) tree-pass.h \
$(TARGET_H) vecprim.h
$(TARGET_H) vecprim.h $(DF_H)
sreal.o: sreal.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) sreal.h
predict.o: predict.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TREE_H) $(FLAGS_H) insn-config.h $(BASIC_BLOCK_H) $(REGS_H) \
@ -2724,17 +2753,19 @@ tracer.o : tracer.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
cfglayout.o : cfglayout.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(TREE_H) insn-config.h $(BASIC_BLOCK_H) hard-reg-set.h output.h \
$(FUNCTION_H) $(CFGLAYOUT_H) $(CFGLOOP_H) $(TARGET_H) gt-cfglayout.h \
$(GGC_H) alloc-pool.h $(FLAGS_H) $(OBSTACK_H) tree-pass.h vecprim.h
$(GGC_H) alloc-pool.h $(FLAGS_H) $(OBSTACK_H) tree-pass.h vecprim.h \
$(DF_H)
timevar.o : timevar.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(TIMEVAR_H) $(FLAGS_H) intl.h toplev.h $(RTL_H) timevar.def
regrename.o : regrename.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) insn-config.h $(BASIC_BLOCK_H) $(REGS_H) hard-reg-set.h \
output.h $(RECOG_H) $(FUNCTION_H) $(OBSTACK_H) $(FLAGS_H) $(TM_P_H) \
addresses.h reload.h toplev.h $(TIMEVAR_H) tree-pass.h
addresses.h reload.h toplev.h $(TIMEVAR_H) tree-pass.h $(DF_H)
ifcvt.o : ifcvt.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(REGS_H) toplev.h $(FLAGS_H) insn-config.h $(FUNCTION_H) $(RECOG_H) \
$(TARGET_H) $(BASIC_BLOCK_H) $(EXPR_H) output.h except.h $(TM_P_H) \
$(REAL_H) $(OPTABS_H) $(CFGLOOP_H) hard-reg-set.h $(TIMEVAR_H) tree-pass.h
$(REAL_H) $(OPTABS_H) $(CFGLOOP_H) hard-reg-set.h $(TIMEVAR_H) tree-pass.h \
$(DF_H)
lambda-mat.o : lambda-mat.c $(LAMBDA_H) $(GGC_H) $(SYSTEM_H) $(CONFIG_H) \
$(TM_H) coretypes.h $(TREE_H)
lambda-trans.o: lambda-trans.c $(LAMBDA_H) $(GGC_H) $(SYSTEM_H) $(CONFIG_H) \
@ -2750,10 +2781,11 @@ hooks.o: hooks.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(HOOKS_H)
pretty-print.o: $(CONFIG_H) $(SYSTEM_H) coretypes.h intl.h $(PRETTY_PRINT_H) \
$(TREE_H)
errors.o : errors.c $(CONFIG_H) $(SYSTEM_H) errors.h $(BCONFIG_H)
dbgcnt.o: dbgcnt.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(DBGCNT_H)
lower-subreg.o : lower-subreg.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
$(MACHMODE_H) $(TM_H) $(RTL_H) $(TM_P_H) $(TIMEVAR_H) $(FLAGS_H) \
insn-config.h $(BASIC_BLOCK_H) $(RECOG_H) $(OBSTACK_H) bitmap.h \
$(EXPR_H) $(REGS_H) tree-pass.h
$(EXPR_H) $(REGS_H) tree-pass.h $(DF_H)
$(out_object_file): $(out_file) $(CONFIG_H) coretypes.h $(TM_H) $(TREE_H) \
$(RTL_H) $(REGS_H) hard-reg-set.h insn-config.h conditions.h \

View file

@ -46,6 +46,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "varray.h"
#include "tree-pass.h"
#include "ipa-type-escape.h"
#include "df.h"
/* The aliasing API provided here solves related but different problems:
@ -839,7 +840,7 @@ find_base_value (rtx src)
/* If we're inside init_alias_analysis, use new_reg_base_value
to reduce the number of relaxation iterations. */
if (new_reg_base_value && new_reg_base_value[regno]
&& REG_N_SETS (regno) == 1)
&& DF_REG_DEF_COUNT (regno) == 1)
return new_reg_base_value[regno];
if (VEC_index (rtx, reg_base_value, regno))
@ -1087,27 +1088,6 @@ record_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
reg_seen[regno] = 1;
}
/* Clear alias info for a register. This is used if an RTL transformation
changes the value of a register. This is used in flow by AUTO_INC_DEC
optimizations. We don't need to clear reg_base_value, since flow only
changes the offset. */
void
clear_reg_alias_info (rtx reg)
{
unsigned int regno = REGNO (reg);
if (regno >= FIRST_PSEUDO_REGISTER)
{
regno -= FIRST_PSEUDO_REGISTER;
if (regno < reg_known_value_size)
{
reg_known_value[regno] = reg;
reg_known_equiv_p[regno] = false;
}
}
}
/* If a value is known for REGNO, return it. */
rtx
@ -2433,7 +2413,7 @@ init_alias_analysis (void)
the optimization level or flag_expensive_optimizations.
We could propagate more information in the first pass by making use
of REG_N_SETS to determine immediately that the alias information
of DF_REG_DEF_COUNT to determine immediately that the alias information
for a pseudo is "constant".
A program with an uninitialized variable can cause an infinite loop
@ -2514,7 +2494,7 @@ init_alias_analysis (void)
note = find_reg_equal_equiv_note (insn);
if (note && REG_NOTE_KIND (note) == REG_EQUAL
&& REG_N_SETS (regno) != 1)
&& DF_REG_DEF_COUNT (regno) != 1)
note = NULL_RTX;
if (note != NULL_RTX
@ -2527,7 +2507,7 @@ init_alias_analysis (void)
set_reg_known_equiv_p (regno,
REG_NOTE_KIND (note) == REG_EQUIV);
}
else if (REG_N_SETS (regno) == 1
else if (DF_REG_DEF_COUNT (regno) == 1
&& GET_CODE (src) == PLUS
&& REG_P (XEXP (src, 0))
&& (t = get_reg_known_value (REGNO (XEXP (src, 0))))
@ -2537,7 +2517,7 @@ init_alias_analysis (void)
set_reg_known_value (regno, t);
set_reg_known_equiv_p (regno, 0);
}
else if (REG_N_SETS (regno) == 1
else if (DF_REG_DEF_COUNT (regno) == 1
&& ! rtx_varies_p (src, 1))
{
set_reg_known_value (regno, src);

View file

@ -1,5 +1,5 @@
/* Functions to support a pool of allocatable objects.
Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006
Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Daniel Berlin <dan@cgsoftware.com>
@ -161,7 +161,9 @@ create_alloc_pool (const char *name, size_t size, size_t num)
header_size = align_eight (sizeof (struct alloc_pool_list_def));
pool->block_size = (size * num) + header_size;
pool->free_list = NULL;
pool->returned_free_list = NULL;
pool->virgin_free_list = NULL;
pool->virgin_elts_remaining = 0;
pool->elts_allocated = 0;
pool->elts_free = 0;
pool->blocks_allocated = 0;
@ -223,7 +225,6 @@ void *
pool_alloc (alloc_pool pool)
{
alloc_pool_list header;
char *block;
#ifdef GATHER_STATISTICS
struct alloc_pool_descriptor *desc = alloc_pool_descriptor (pool->name);
@ -233,46 +234,57 @@ pool_alloc (alloc_pool pool)
gcc_assert (pool);
/* If there are no more free elements, make some more!. */
if (!pool->free_list)
if (!pool->returned_free_list)
{
size_t i;
alloc_pool_list block_header;
char *block;
if (!pool->virgin_elts_remaining)
{
alloc_pool_list block_header;
/* Make the block. */
block = XNEWVEC (char, pool->block_size);
block_header = (alloc_pool_list) block;
block += align_eight (sizeof (struct alloc_pool_list_def));
/* Make the block. */
block = XNEWVEC (char, pool->block_size);
block_header = (alloc_pool_list) block;
block += align_eight (sizeof (struct alloc_pool_list_def));
#ifdef GATHER_STATISTICS
desc->current += pool->block_size;
if (desc->peak < desc->current)
desc->peak = desc->current;
desc->current += pool->block_size;
if (desc->peak < desc->current)
desc->peak = desc->current;
#endif
/* Throw it on the block list. */
block_header->next = pool->block_list;
pool->block_list = block_header;
/* Throw it on the block list. */
block_header->next = pool->block_list;
pool->block_list = block_header;
/* Make the block available for allocation. */
pool->virgin_free_list = block;
pool->virgin_elts_remaining = pool->elts_per_block;
/* Now put the actual block pieces onto the free list. */
for (i = 0; i < pool->elts_per_block; i++, block += pool->elt_size)
{
/* Also update the number of elements we have free/allocated, and
increment the allocated block count. */
pool->elts_allocated += pool->elts_per_block;
pool->elts_free += pool->elts_per_block;
pool->blocks_allocated += 1;
}
/* We now know that we can take the first elt off the virgin list and
put it on the returned list. */
block = pool->virgin_free_list;
header = (alloc_pool_list) USER_PTR_FROM_ALLOCATION_OBJECT_PTR (block);
header->next = NULL;
#ifdef ENABLE_CHECKING
/* Mark the element to be free. */
((allocation_object *) block)->id = 0;
/* Mark the element to be free. */
((allocation_object *) block)->id = 0;
#endif
header = (alloc_pool_list) USER_PTR_FROM_ALLOCATION_OBJECT_PTR (block);
header->next = pool->free_list;
pool->free_list = header;
}
/* Also update the number of elements we have free/allocated, and
increment the allocated block count. */
pool->elts_allocated += pool->elts_per_block;
pool->elts_free += pool->elts_per_block;
pool->blocks_allocated += 1;
pool->returned_free_list = header;
pool->virgin_free_list += pool->elt_size;
pool->virgin_elts_remaining--;
}
/* Pull the first free element from the free list, and return it. */
header = pool->free_list;
pool->free_list = header->next;
header = pool->returned_free_list;
pool->returned_free_list = header->next;
pool->elts_free--;
#ifdef ENABLE_CHECKING
@ -305,8 +317,8 @@ pool_free (alloc_pool pool, void *ptr)
#endif
header = (alloc_pool_list) ptr;
header->next = pool->free_list;
pool->free_list = header;
header->next = pool->returned_free_list;
pool->returned_free_list = header;
pool->elts_free++;
}
/* Output per-alloc_pool statistics. */

View file

@ -1,5 +1,5 @@
/* Functions to support a pool of allocatable objects
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2003, 2004
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2007
Free Software Foundation, Inc.
Contributed by Daniel Berlin <dan@cgsoftware.com>
@ -37,7 +37,18 @@ typedef struct alloc_pool_def
ALLOC_POOL_ID_TYPE id;
#endif
size_t elts_per_block;
alloc_pool_list free_list;
/* These are the elements that have been allocated at least once and freed. */
alloc_pool_list returned_free_list;
/* These are the elements that have not yet been allocated out of
the last block obtained from XNEWVEC. */
char* virgin_free_list;
/* The number of elements in the virgin_free_list that can be
allocated before needing another block. */
size_t virgin_elts_remaining;
size_t elts_allocated;
size_t elts_free;
size_t blocks_allocated;

1558
gcc/auto-inc-dec.c Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
/* Define control and data flow tables, and regsets.
Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
Free Software Foundation, Inc.
Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -257,12 +257,6 @@ struct rtl_bb_info GTY(())
rtx head_;
rtx end_;
/* The registers that are live on entry to this block. */
bitmap GTY ((skip (""))) global_live_at_start;
/* The registers that are live on exit from this block. */
bitmap GTY ((skip (""))) global_live_at_end;
/* In CFGlayout mode points to insn notes/jumptables to be placed just before
and after the block. */
rtx header;
@ -299,46 +293,45 @@ DEF_VEC_ALLOC_P(basic_block,heap);
enum bb_flags
{
/* Set if insns in BB have are modified. Used for updating liveness info. */
BB_DIRTY = 1,
/* Only set on blocks that have just been created by create_bb. */
BB_NEW = 2,
BB_NEW = 1 << 0,
/* Set by find_unreachable_blocks. Do not rely on this being set in any
pass. */
BB_REACHABLE = 4,
BB_REACHABLE = 1 << 1,
/* Set for blocks in an irreducible loop by loop analysis. */
BB_IRREDUCIBLE_LOOP = 8,
BB_IRREDUCIBLE_LOOP = 1 << 2,
/* Set on blocks that may actually not be single-entry single-exit block. */
BB_SUPERBLOCK = 16,
BB_SUPERBLOCK = 1 << 3,
/* Set on basic blocks that the scheduler should not touch. This is used
by SMS to prevent other schedulers from messing with the loop schedule. */
BB_DISABLE_SCHEDULE = 32,
BB_DISABLE_SCHEDULE = 1 << 4,
/* Set on blocks that should be put in a hot section. */
BB_HOT_PARTITION = 64,
BB_HOT_PARTITION = 1 << 5,
/* Set on blocks that should be put in a cold section. */
BB_COLD_PARTITION = 128,
BB_COLD_PARTITION = 1 << 6,
/* Set on block that was duplicated. */
BB_DUPLICATED = 256,
BB_DUPLICATED = 1 << 7,
/* Set if the label at the top of this block is the target of a non-local goto. */
BB_NON_LOCAL_GOTO_TARGET = 1 << 8,
/* Set on blocks that are in RTL format. */
BB_RTL = 1024,
BB_RTL = 1 << 9 ,
/* Set on blocks that are forwarder blocks.
Only used in cfgcleanup.c. */
BB_FORWARDER_BLOCK = 2048,
BB_FORWARDER_BLOCK = 1 << 10,
/* Set on blocks that cannot be threaded through.
Only used in cfgcleanup.c. */
BB_NONTHREADABLE_BLOCK = 4096
BB_NONTHREADABLE_BLOCK = 1 << 11
};
/* Dummy flag for convenience in the hot/cold partitioning code. */
@ -435,11 +428,23 @@ struct control_flow_graph GTY(())
(INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
(INSN) = NEXT_INSN (INSN))
/* For iterating over insns in basic block when we might remove the
current insn. */
#define FOR_BB_INSNS_SAFE(BB, INSN, CURR) \
for ((INSN) = BB_HEAD (BB), (CURR) = (INSN) ? NEXT_INSN ((INSN)): NULL; \
(INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
(INSN) = (CURR), (CURR) = (INSN) ? NEXT_INSN ((INSN)) : NULL)
#define FOR_BB_INSNS_REVERSE(BB, INSN) \
for ((INSN) = BB_END (BB); \
(INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
(INSN) = PREV_INSN (INSN))
#define FOR_BB_INSNS_REVERSE_SAFE(BB, INSN, CURR) \
for ((INSN) = BB_END (BB),(CURR) = (INSN) ? PREV_INSN ((INSN)) : NULL; \
(INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
(INSN) = (CURR), (CURR) = (INSN) ? PREV_INSN ((INSN)) : NULL)
/* Cycles through _all_ basic blocks, even the fake ones (entry and
exit block). */
@ -451,18 +456,6 @@ struct control_flow_graph GTY(())
extern bitmap_obstack reg_obstack;
/* Indexed by n, gives number of basic block that (REG n) is used in.
If the value is REG_BLOCK_GLOBAL (-2),
it means (REG n) is used in more than one basic block.
REG_BLOCK_UNKNOWN (-1) means it hasn't been seen yet so we don't know.
This information remains valid for the rest of the compilation
of the current function; it is used to control register allocation. */
#define REG_BLOCK_UNKNOWN -1
#define REG_BLOCK_GLOBAL -2
#define REG_BASIC_BLOCK(N) \
(VEC_index (reg_info_p, reg_n_info, N)->basic_block)
/* Stuff for recording basic block info. */
@ -505,7 +498,8 @@ extern edge redirect_edge_succ_nodup (edge, basic_block);
extern void redirect_edge_pred (edge, basic_block);
extern basic_block create_basic_block_structure (rtx, rtx, rtx, basic_block);
extern void clear_bb_flags (void);
extern int post_order_compute (int *, bool);
extern int post_order_compute (int *, bool, bool);
extern int inverted_post_order_compute (int *);
extern int pre_and_rev_post_order_compute (int *, int *, bool);
extern int dfs_enumerate_from (basic_block, int,
bool (*)(basic_block, void *),
@ -515,7 +509,6 @@ extern void dump_bb_info (basic_block, bool, bool, int, const char *, FILE *);
extern void dump_edge_info (FILE *, edge, int);
extern void brief_dump_cfg (FILE *);
extern void clear_edges (void);
extern rtx first_insn_after_basic_block_note (basic_block);
extern void scale_bbs_frequencies_int (basic_block *, int, int, int);
extern void scale_bbs_frequencies_gcov_type (basic_block *, int, gcov_type,
gcov_type);
@ -788,76 +781,26 @@ void verify_edge_list (FILE *, struct edge_list *);
int find_edge_index (struct edge_list *, basic_block, basic_block);
edge find_edge (basic_block, basic_block);
enum update_life_extent
{
UPDATE_LIFE_LOCAL = 0,
UPDATE_LIFE_GLOBAL = 1,
UPDATE_LIFE_GLOBAL_RM_NOTES = 2
};
/* Flags for life_analysis and update_life_info. */
#define PROP_DEATH_NOTES 1 /* Create DEAD and UNUSED notes. */
#define PROP_LOG_LINKS 2 /* Create LOG_LINKS. */
#define PROP_REG_INFO 4 /* Update regs_ever_live et al. */
#define PROP_KILL_DEAD_CODE 8 /* Remove dead code. */
#define PROP_SCAN_DEAD_CODE 16 /* Scan for dead code. */
#define PROP_ALLOW_CFG_CHANGES 32 /* Allow the CFG to be changed
by dead code removal. */
#define PROP_AUTOINC 64 /* Create autoinc mem references. */
#define PROP_SCAN_DEAD_STORES 128 /* Scan for dead code. */
#define PROP_ASM_SCAN 256 /* Internal flag used within flow.c
to flag analysis of asms. */
#define PROP_DEAD_INSN 1024 /* Internal flag used within flow.c
to flag analysis of dead insn. */
#define PROP_POST_REGSTACK 2048 /* We run after reg-stack and need
to preserve REG_DEAD notes for
stack regs. */
#define PROP_FINAL (PROP_DEATH_NOTES | PROP_LOG_LINKS \
| PROP_REG_INFO | PROP_KILL_DEAD_CODE \
| PROP_SCAN_DEAD_CODE | PROP_AUTOINC \
| PROP_ALLOW_CFG_CHANGES \
| PROP_SCAN_DEAD_STORES)
#define PROP_POSTRELOAD (PROP_DEATH_NOTES \
| PROP_KILL_DEAD_CODE \
| PROP_SCAN_DEAD_CODE \
| PROP_SCAN_DEAD_STORES)
#define CLEANUP_EXPENSIVE 1 /* Do relatively expensive optimizations
except for edge forwarding */
#define CLEANUP_CROSSJUMP 2 /* Do crossjumping. */
#define CLEANUP_POST_REGSTACK 4 /* We run after reg-stack and need
to care REG_DEAD notes. */
#define CLEANUP_UPDATE_LIFE 8 /* Keep life information up to date. */
#define CLEANUP_THREADING 16 /* Do jump threading. */
#define CLEANUP_NO_INSN_DEL 32 /* Do not try to delete trivially dead
#define CLEANUP_THREADING 8 /* Do jump threading. */
#define CLEANUP_NO_INSN_DEL 16 /* Do not try to delete trivially dead
insns. */
#define CLEANUP_CFGLAYOUT 64 /* Do cleanup in cfglayout mode. */
#define CLEANUP_LOG_LINKS 128 /* Update log links. */
#define CLEANUP_CFGLAYOUT 32 /* Do cleanup in cfglayout mode. */
/* The following are ORed in on top of the CLEANUP* flags in calls to
struct_equiv_block_eq. */
#define STRUCT_EQUIV_START 256 /* Initializes the search range. */
#define STRUCT_EQUIV_RERUN 512 /* Rerun to find register use in
#define STRUCT_EQUIV_START 64 /* Initializes the search range. */
#define STRUCT_EQUIV_RERUN 128 /* Rerun to find register use in
found equivalence. */
#define STRUCT_EQUIV_FINAL 1024 /* Make any changes necessary to get
#define STRUCT_EQUIV_FINAL 256 /* Make any changes necessary to get
actual equivalence. */
#define STRUCT_EQUIV_NEED_FULL_BLOCK 2048 /* struct_equiv_block_eq is required
#define STRUCT_EQUIV_NEED_FULL_BLOCK 512 /* struct_equiv_block_eq is required
to match only full blocks */
#define STRUCT_EQUIV_MATCH_JUMPS 4096 /* Also include the jumps at the end of the block in the comparison. */
extern void life_analysis (int);
extern int update_life_info (sbitmap, enum update_life_extent, int);
extern int update_life_info_in_dirty_blocks (enum update_life_extent, int);
extern int count_or_remove_death_notes (sbitmap, int);
extern int propagate_block (basic_block, regset, regset, regset, int);
struct propagate_block_info;
extern rtx propagate_one_insn (struct propagate_block_info *, rtx);
extern struct propagate_block_info *init_propagate_block_info
(basic_block, regset, regset, regset, int);
extern void free_propagate_block_info (struct propagate_block_info *);
#define STRUCT_EQUIV_MATCH_JUMPS 1024 /* Also include the jumps at the end of the block in the comparison. */
/* In lcm.c */
extern struct edge_list *pre_edge_lcm (int, sbitmap *, sbitmap *,
@ -883,31 +826,19 @@ extern void remove_predictions_associated_with_edge (edge);
extern bool edge_probability_reliable_p (edge);
extern bool br_prob_note_reliable_p (rtx);
/* In flow.c */
/* In cfg.c */
extern void dump_regset (regset, FILE *);
extern void debug_regset (regset);
extern void init_flow (void);
extern void debug_bb (basic_block);
extern basic_block debug_bb_n (int);
extern void dump_regset (regset, FILE *);
extern void debug_regset (regset);
extern void allocate_reg_life_data (void);
extern void expunge_block (basic_block);
extern void link_block (basic_block, basic_block);
extern void unlink_block (basic_block);
extern void compact_blocks (void);
extern basic_block alloc_block (void);
extern void find_unreachable_blocks (void);
extern int delete_noop_moves (void);
extern basic_block force_nonfallthru (edge);
extern rtx block_label (basic_block);
extern bool forwarder_block_p (basic_block);
extern bool purge_all_dead_edges (void);
extern bool purge_dead_edges (basic_block);
extern void find_many_sub_basic_blocks (sbitmap);
extern void rtl_make_eh_edge (sbitmap, basic_block, rtx);
extern bool can_fallthru (basic_block, basic_block);
extern bool could_fall_through (basic_block, basic_block);
extern void flow_nodes_print (const char *, const sbitmap, FILE *);
extern void flow_edge_list_print (const char *, const edge *, int, FILE *);
extern void alloc_aux_for_block (basic_block, int);
extern void alloc_aux_for_blocks (int);
extern void clear_aux_for_blocks (void);
@ -916,7 +847,27 @@ extern void alloc_aux_for_edge (edge, int);
extern void alloc_aux_for_edges (int);
extern void clear_aux_for_edges (void);
extern void free_aux_for_edges (void);
/* In cfganal.c */
extern void find_unreachable_blocks (void);
extern bool forwarder_block_p (basic_block);
extern bool can_fallthru (basic_block, basic_block);
extern bool could_fall_through (basic_block, basic_block);
extern void flow_nodes_print (const char *, const sbitmap, FILE *);
extern void flow_edge_list_print (const char *, const edge *, int, FILE *);
/* In cfgrtl.c */
extern basic_block force_nonfallthru (edge);
extern rtx block_label (basic_block);
extern bool purge_all_dead_edges (void);
extern bool purge_dead_edges (basic_block);
/* In cfgbuild.c. */
extern void find_many_sub_basic_blocks (sbitmap);
extern void rtl_make_eh_edge (sbitmap, basic_block, rtx);
extern void find_basic_blocks (rtx);
/* In cfgcleanup.c. */
extern bool cleanup_cfg (int);
extern bool delete_unreachable_blocks (void);

View file

@ -85,6 +85,7 @@
#include "params.h"
#include "toplev.h"
#include "tree-pass.h"
#include "df.h"
#ifndef HAVE_conditional_execution
#define HAVE_conditional_execution 0
@ -1607,16 +1608,6 @@ fix_crossing_conditional_branches (void)
last_bb->aux = new_bb;
prev_bb = last_bb;
last_bb = new_bb;
/* Update register liveness information. */
new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
prev_bb->il.rtl->global_live_at_end);
COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
prev_bb->il.rtl->global_live_at_end);
/* Put appropriate instructions in new bb. */
new_label = gen_label_rtx ();
@ -1840,10 +1831,7 @@ fix_edges_for_rarely_executed_code (edge *crossing_edges,
well. */
if (!HAS_LONG_UNCOND_BRANCH)
{
fix_crossing_unconditional_branches ();
reg_scan (get_insns (), max_reg_num ());
}
fix_crossing_unconditional_branches ();
add_reg_crossing_jump_notes ();
}
@ -2205,13 +2193,11 @@ gate_handle_reorder_blocks (void)
static unsigned int
rest_of_handle_reorder_blocks (void)
{
unsigned int liveness_flags;
basic_block bb;
/* Last attempt to optimize CFG, as scheduling, peepholing and insn
splitting possibly introduced more crossjumping opportunities. */
liveness_flags = (!HAVE_conditional_execution ? CLEANUP_UPDATE_LIFE : 0);
cfg_layout_initialize (CLEANUP_EXPENSIVE | liveness_flags);
cfg_layout_initialize (CLEANUP_EXPENSIVE);
if (flag_sched2_use_traces && flag_schedule_insns_after_reload)
{
@ -2224,14 +2210,7 @@ rest_of_handle_reorder_blocks (void)
reorder_basic_blocks ();
if (flag_reorder_blocks || flag_reorder_blocks_and_partition
|| (flag_sched2_use_traces && flag_schedule_insns_after_reload))
cleanup_cfg (CLEANUP_EXPENSIVE | liveness_flags);
/* On conditional execution targets we can not update the life cheaply, so
we deffer the updating to after both cleanups. This may lose some cases
but should not be terribly bad. */
if (HAVE_conditional_execution)
update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
PROP_DEATH_NOTES);
cleanup_cfg (CLEANUP_EXPENSIVE);
FOR_EACH_BB (bb)
if (bb->next_bb != EXIT_BLOCK_PTR)
@ -2279,9 +2258,6 @@ rest_of_handle_partition_blocks (void)
{
no_new_pseudos = 0;
partition_hot_cold_basic_blocks ();
allocate_reg_life_data ();
update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
no_new_pseudos = 1;
return 0;
}

View file

@ -1,6 +1,6 @@
/* Functions to support general ended bitmaps.
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005
Free Software Foundation, Inc.
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -852,72 +852,151 @@ bitmap_and_into (bitmap a, bitmap b)
gcc_assert (!a->current || a->indx == a->current->indx);
}
/* Insert an element equal to SRC_ELT after DST_PREV, overwriting DST_ELT
if non-NULL. CHANGED is true if the destination bitmap had already been
changed; the new value of CHANGED is returned. */
static inline bool
bitmap_elt_copy (bitmap dst, bitmap_element *dst_elt, bitmap_element *dst_prev,
bitmap_element *src_elt, bool changed)
{
if (!changed && dst_elt && dst_elt->indx == src_elt->indx)
{
unsigned ix;
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
if (src_elt->bits[ix] != dst_elt->bits[ix])
{
dst_elt->bits[ix] = src_elt->bits[ix];
changed = true;
}
}
else
{
changed = true;
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, src_elt->indx);
else
dst_elt->indx = src_elt->indx;
memcpy (dst_elt->bits, src_elt->bits, sizeof (dst_elt->bits));
}
return changed;
}
/* DST = A & ~B */
void
bool
bitmap_and_compl (bitmap dst, bitmap a, bitmap b)
{
bitmap_element *dst_elt = dst->first;
bitmap_element *a_elt = a->first;
bitmap_element *b_elt = b->first;
bitmap_element *dst_prev = NULL;
bitmap_element **dst_prev_pnext = &dst->first;
bool changed = false;
gcc_assert (dst != a && dst != b);
if (a == b)
{
changed = !bitmap_empty_p (dst);
bitmap_clear (dst);
return;
return changed;
}
while (a_elt)
{
if (!b_elt || a_elt->indx < b_elt->indx)
while (b_elt && b_elt->indx < a_elt->indx)
b_elt = b_elt->next;
if (!b_elt || b_elt->indx > a_elt->indx)
{
/* Copy a_elt. */
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
else
dst_elt->indx = a_elt->indx;
memcpy (dst_elt->bits, a_elt->bits, sizeof (dst_elt->bits));
dst_prev = dst_elt;
dst_elt = dst_elt->next;
changed = bitmap_elt_copy (dst, dst_elt, dst_prev, a_elt, changed);
dst_prev = *dst_prev_pnext;
dst_prev_pnext = &dst_prev->next;
dst_elt = *dst_prev_pnext;
a_elt = a_elt->next;
}
else if (b_elt->indx < a_elt->indx)
b_elt = b_elt->next;
else
{
/* Matching elts, generate A & ~B. */
unsigned ix;
BITMAP_WORD ior = 0;
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
else
dst_elt->indx = a_elt->indx;
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
if (!changed && dst_elt && dst_elt->indx == a_elt->indx)
{
BITMAP_WORD r = a_elt->bits[ix] & ~b_elt->bits[ix];
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = a_elt->bits[ix] & ~b_elt->bits[ix];
dst_elt->bits[ix] = r;
ior |= r;
if (dst_elt->bits[ix] != r)
{
changed = true;
dst_elt->bits[ix] = r;
}
ior |= r;
}
}
else
{
bool new_element;
if (!dst_elt || dst_elt->indx > a_elt->indx)
{
dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
new_element = true;
}
else
{
dst_elt->indx = a_elt->indx;
new_element = false;
}
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = a_elt->bits[ix] & ~b_elt->bits[ix];
dst_elt->bits[ix] = r;
ior |= r;
}
if (ior)
changed = true;
else
{
changed |= !new_element;
bitmap_element_free (dst, dst_elt);
dst_elt = *dst_prev_pnext;
}
}
if (ior)
{
dst_prev = dst_elt;
dst_elt = dst_elt->next;
dst_prev = *dst_prev_pnext;
dst_prev_pnext = &dst_prev->next;
dst_elt = *dst_prev_pnext;
}
a_elt = a_elt->next;
b_elt = b_elt->next;
}
}
/* Ensure that dst->current is valid. */
dst->current = dst->first;
bitmap_elt_clear_from (dst, dst_elt);
if (dst_elt)
{
changed = true;
bitmap_elt_clear_from (dst, dst_elt);
}
gcc_assert (!dst->current == !dst->first);
if (dst->current)
dst->indx = dst->current->indx;
return changed;
}
/* A &= ~B. Returns true if A changes */
@ -974,15 +1053,120 @@ bitmap_and_compl_into (bitmap a, bitmap b)
return changed != 0;
}
/* Set COUNT bits from START in HEAD. */
void
bitmap_set_range (bitmap head, unsigned int start, unsigned int count)
{
unsigned int first_index, end_bit_plus1, last_index;
bitmap_element *elt, *elt_prev;
unsigned int i;
if (!count)
return;
first_index = start / BITMAP_ELEMENT_ALL_BITS;
end_bit_plus1 = start + count;
last_index = (end_bit_plus1 - 1) / BITMAP_ELEMENT_ALL_BITS;
elt = bitmap_find_bit (head, start);
/* If bitmap_find_bit returns zero, the current is the closest block
to the result. Otherwise, just use bitmap_element_allocate to
ensure ELT is set; in the loop below, ELT == NULL means "insert
at the end of the bitmap". */
if (!elt)
{
elt = bitmap_element_allocate (head);
elt->indx = first_index;
bitmap_element_link (head, elt);
}
gcc_assert (elt->indx == first_index);
elt_prev = elt->prev;
for (i = first_index; i <= last_index; i++)
{
unsigned elt_start_bit = i * BITMAP_ELEMENT_ALL_BITS;
unsigned elt_end_bit_plus1 = elt_start_bit + BITMAP_ELEMENT_ALL_BITS;
unsigned int first_word_to_mod;
BITMAP_WORD first_mask;
unsigned int last_word_to_mod;
BITMAP_WORD last_mask;
unsigned int ix;
if (!elt || elt->indx != i)
elt = bitmap_elt_insert_after (head, elt_prev, i);
if (elt_start_bit <= start)
{
/* The first bit to turn on is somewhere inside this
elt. */
first_word_to_mod = (start - elt_start_bit) / BITMAP_WORD_BITS;
/* This mask should have 1s in all bits >= start position. */
first_mask =
(((BITMAP_WORD) 1) << ((start % BITMAP_WORD_BITS))) - 1;
first_mask = ~first_mask;
}
else
{
/* The first bit to turn on is below this start of this elt. */
first_word_to_mod = 0;
first_mask = ~(BITMAP_WORD) 0;
}
if (elt_end_bit_plus1 <= end_bit_plus1)
{
/* The last bit to turn on is beyond this elt. */
last_word_to_mod = BITMAP_ELEMENT_WORDS - 1;
last_mask = ~(BITMAP_WORD) 0;
}
else
{
/* The last bit to turn on is inside to this elt. */
last_word_to_mod =
(end_bit_plus1 - elt_start_bit) / BITMAP_WORD_BITS;
/* The last mask should have 1s below the end bit. */
last_mask =
(((BITMAP_WORD) 1) << ((end_bit_plus1 % BITMAP_WORD_BITS))) - 1;
}
if (first_word_to_mod == last_word_to_mod)
{
BITMAP_WORD mask = first_mask & last_mask;
elt->bits[first_word_to_mod] |= mask;
}
else
{
elt->bits[first_word_to_mod] |= first_mask;
if (BITMAP_ELEMENT_WORDS > 2)
for (ix = first_word_to_mod + 1; ix < last_word_to_mod; ix++)
elt->bits[ix] = ~(BITMAP_WORD) 0;
elt->bits[last_word_to_mod] |= last_mask;
}
elt_prev = elt;
elt = elt->next;
}
head->current = elt ? elt : elt_prev;
head->indx = head->current->indx;
}
/* Clear COUNT bits from START in HEAD. */
void
bitmap_clear_range (bitmap head, unsigned int start, unsigned int count)
{
unsigned int first_index = start / BITMAP_ELEMENT_ALL_BITS;
unsigned int end_bit_plus1 = start + count;
unsigned int end_bit = end_bit_plus1 - 1;
unsigned int last_index = (end_bit) / BITMAP_ELEMENT_ALL_BITS;
bitmap_element *elt = bitmap_find_bit (head, start);
unsigned int first_index, end_bit_plus1, last_index;
bitmap_element *elt;
if (!count)
return;
first_index = start / BITMAP_ELEMENT_ALL_BITS;
end_bit_plus1 = start + count;
last_index = (end_bit_plus1 - 1) / BITMAP_ELEMENT_ALL_BITS;
elt = bitmap_find_bit (head, start);
/* If bitmap_find_bit returns zero, the current is the closest block
to the result. If the current is less than first index, find the
@ -1070,8 +1254,9 @@ bitmap_clear_range (bitmap head, unsigned int start, unsigned int count)
else
{
elt->bits[first_word_to_mod] &= ~first_mask;
for (i = first_word_to_mod + 1; i < last_word_to_mod; i++)
elt->bits[i] = 0;
if (BITMAP_ELEMENT_WORDS > 2)
for (i = first_word_to_mod + 1; i < last_word_to_mod; i++)
elt->bits[i] = 0;
elt->bits[last_word_to_mod] &= ~last_mask;
}
for (i = 0; i < BITMAP_ELEMENT_WORDS; i++)
@ -1163,6 +1348,66 @@ bitmap_compl_and_into (bitmap a, bitmap b)
return;
}
/* Insert an element corresponding to A_ELT | B_ELT after DST_PREV,
overwriting DST_ELT if non-NULL. CHANGED is true if the destination bitmap
had already been changed; the new value of CHANGED is returned. */
static inline bool
bitmap_elt_ior (bitmap dst, bitmap_element *dst_elt, bitmap_element *dst_prev,
bitmap_element *a_elt, bitmap_element *b_elt,
bool changed)
{
gcc_assert (a_elt || b_elt);
if (a_elt && b_elt && a_elt->indx == b_elt->indx)
{
/* Matching elts, generate A | B. */
unsigned ix;
if (!changed && dst_elt && dst_elt->indx == a_elt->indx)
{
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = a_elt->bits[ix] | b_elt->bits[ix];
if (r != dst_elt->bits[ix])
{
dst_elt->bits[ix] = r;
changed = true;
}
}
}
else
{
changed = true;
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
else
dst_elt->indx = a_elt->indx;
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = a_elt->bits[ix] | b_elt->bits[ix];
dst_elt->bits[ix] = r;
}
}
}
else
{
/* Copy a single element. */
bitmap_element *src;
if (!b_elt || (a_elt && a_elt->indx < b_elt->indx))
src = a_elt;
else
src = b_elt;
gcc_assert (src);
changed = bitmap_elt_copy (dst, dst_elt, dst_prev, src, changed);
}
return changed;
}
/* DST = A | B. Return true if DST changes. */
bool
@ -1172,89 +1417,31 @@ bitmap_ior (bitmap dst, bitmap a, bitmap b)
bitmap_element *a_elt = a->first;
bitmap_element *b_elt = b->first;
bitmap_element *dst_prev = NULL;
bitmap_element **dst_prev_pnext = &dst->first;
bool changed = false;
gcc_assert (dst != a && dst != b);
while (a_elt || b_elt)
{
changed = bitmap_elt_ior (dst, dst_elt, dst_prev, a_elt, b_elt, changed);
if (a_elt && b_elt && a_elt->indx == b_elt->indx)
{
/* Matching elts, generate A | B. */
unsigned ix;
if (!changed && dst_elt && dst_elt->indx == a_elt->indx)
{
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = a_elt->bits[ix] | b_elt->bits[ix];
if (r != dst_elt->bits[ix])
{
dst_elt->bits[ix] = r;
changed = true;
}
}
}
else
{
changed = true;
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, a_elt->indx);
else
dst_elt->indx = a_elt->indx;
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = a_elt->bits[ix] | b_elt->bits[ix];
dst_elt->bits[ix] = r;
}
}
a_elt = a_elt->next;
b_elt = b_elt->next;
dst_prev = dst_elt;
dst_elt = dst_elt->next;
}
else
{
/* Copy a single element. */
bitmap_element *src;
if (!b_elt || (a_elt && a_elt->indx < b_elt->indx))
{
src = a_elt;
a_elt = a_elt->next;
}
else
{
src = b_elt;
b_elt = b_elt->next;
}
if (!changed && dst_elt && dst_elt->indx == src->indx)
{
unsigned ix;
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
if (src->bits[ix] != dst_elt->bits[ix])
{
dst_elt->bits[ix] = src->bits[ix];
changed = true;
}
}
else
{
changed = true;
if (!dst_elt)
dst_elt = bitmap_elt_insert_after (dst, dst_prev, src->indx);
else
dst_elt->indx = src->indx;
memcpy (dst_elt->bits, src->bits, sizeof (dst_elt->bits));
}
dst_prev = dst_elt;
dst_elt = dst_elt->next;
if (a_elt && (!b_elt || a_elt->indx <= b_elt->indx))
a_elt = a_elt->next;
else if (b_elt && (!a_elt || b_elt->indx <= a_elt->indx))
b_elt = b_elt->next;
}
dst_prev = *dst_prev_pnext;
dst_prev_pnext = &dst_prev->next;
dst_elt = *dst_prev_pnext;
}
if (dst_elt)
@ -1276,6 +1463,7 @@ bitmap_ior_into (bitmap a, bitmap b)
bitmap_element *a_elt = a->first;
bitmap_element *b_elt = b->first;
bitmap_element *a_prev = NULL;
bitmap_element **a_prev_pnext = &a->first;
bool changed = false;
if (a == b)
@ -1283,48 +1471,23 @@ bitmap_ior_into (bitmap a, bitmap b)
while (b_elt)
{
if (!a_elt || b_elt->indx < a_elt->indx)
/* If A lags behind B, just advance it. */
if (!a_elt || a_elt->indx == b_elt->indx)
{
/* Copy b_elt. */
bitmap_element *dst = bitmap_elt_insert_after (a, a_prev, b_elt->indx);
memcpy (dst->bits, b_elt->bits, sizeof (dst->bits));
a_prev = dst;
changed = bitmap_elt_ior (a, a_elt, a_prev, a_elt, b_elt, changed);
b_elt = b_elt->next;
changed = true;
}
else if (a_elt->indx < b_elt->indx)
else if (a_elt->indx > b_elt->indx)
{
a_prev = a_elt;
a_elt = a_elt->next;
}
else
{
/* Matching elts, generate A |= B. */
unsigned ix;
if (changed)
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = a_elt->bits[ix] | b_elt->bits[ix];
a_elt->bits[ix] = r;
}
else
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = a_elt->bits[ix] | b_elt->bits[ix];
if (a_elt->bits[ix] != r)
{
changed = true;
a_elt->bits[ix] = r;
}
}
changed = bitmap_elt_copy (a, NULL, a_prev, b_elt, changed);
b_elt = b_elt->next;
a_prev = a_elt;
a_elt = a_elt->next;
}
a_prev = *a_prev_pnext;
a_prev_pnext = &a_prev->next;
a_elt = *a_prev_pnext;
}
gcc_assert (!a->current == !a->first);
if (a->current)
a->indx = a->current->indx;
@ -1548,15 +1711,103 @@ bitmap_intersect_compl_p (bitmap a, bitmap b)
/* DST = A | (FROM1 & ~FROM2). Return true if DST changes. */
bool
bitmap_ior_and_compl (bitmap dst, bitmap a, bitmap from1, bitmap from2)
bitmap_ior_and_compl (bitmap dst, bitmap a, bitmap b, bitmap kill)
{
bitmap_head tmp;
bool changed;
bool changed = false;
bitmap_initialize (&tmp, &bitmap_default_obstack);
bitmap_and_compl (&tmp, from1, from2);
changed = bitmap_ior (dst, a, &tmp);
bitmap_clear (&tmp);
bitmap_element *dst_elt = dst->first;
bitmap_element *a_elt = a->first;
bitmap_element *b_elt = b->first;
bitmap_element *kill_elt = kill->first;
bitmap_element *dst_prev = NULL;
bitmap_element **dst_prev_pnext = &dst->first;
gcc_assert (dst != a && dst != b && dst != kill);
/* Special cases. We don't bother checking for bitmap_equal_p (b, kill). */
if (b == kill || bitmap_empty_p (b))
{
changed = !bitmap_equal_p (dst, a);
if (changed)
bitmap_copy (dst, a);
return changed;
}
if (bitmap_empty_p (kill))
return bitmap_ior (dst, a, b);
if (bitmap_empty_p (a))
return bitmap_and_compl (dst, b, kill);
while (a_elt || b_elt)
{
bool new_element = false;
if (b_elt)
while (kill_elt && kill_elt->indx < b_elt->indx)
kill_elt = kill_elt->next;
if (b_elt && kill_elt && kill_elt->indx == b_elt->indx
&& (!a_elt || a_elt->indx >= b_elt->indx))
{
bitmap_element tmp_elt;
unsigned ix;
BITMAP_WORD ior = 0;
tmp_elt.indx = b_elt->indx;
for (ix = BITMAP_ELEMENT_WORDS; ix--;)
{
BITMAP_WORD r = b_elt->bits[ix] & ~kill_elt->bits[ix];
ior |= r;
tmp_elt.bits[ix] = r;
}
if (ior)
{
changed = bitmap_elt_ior (dst, dst_elt, dst_prev,
a_elt, &tmp_elt, changed);
new_element = true;
if (a_elt && a_elt->indx == b_elt->indx)
a_elt = a_elt->next;
}
b_elt = b_elt->next;
kill_elt = kill_elt->next;
}
else
{
changed = bitmap_elt_ior (dst, dst_elt, dst_prev,
a_elt, b_elt, changed);
new_element = true;
if (a_elt && b_elt && a_elt->indx == b_elt->indx)
{
a_elt = a_elt->next;
b_elt = b_elt->next;
}
else
{
if (a_elt && (!b_elt || a_elt->indx <= b_elt->indx))
a_elt = a_elt->next;
else if (b_elt && (!a_elt || b_elt->indx <= a_elt->indx))
b_elt = b_elt->next;
}
}
if (new_element)
{
dst_prev = *dst_prev_pnext;
dst_prev_pnext = &dst_prev->next;
dst_elt = *dst_prev_pnext;
}
}
if (dst_elt)
{
changed = true;
bitmap_elt_clear_from (dst, dst_elt);
}
gcc_assert (!dst->current == !dst->first);
if (dst->current)
dst->indx = dst->current->indx;
return changed;
}
@ -1576,6 +1827,7 @@ bitmap_ior_and_compl_into (bitmap a, bitmap from1, bitmap from2)
return changed;
}
/* Debugging function to print out the contents of a bitmap. */

View file

@ -1,6 +1,6 @@
/* Functions to support general ended bitmaps.
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
Free Software Foundation, Inc.
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -117,11 +117,12 @@ extern unsigned long bitmap_count_bits (bitmap);
The operations supported are &, & ~, |, ^. */
extern void bitmap_and (bitmap, bitmap, bitmap);
extern void bitmap_and_into (bitmap, bitmap);
extern void bitmap_and_compl (bitmap, bitmap, bitmap);
extern bool bitmap_and_compl (bitmap, bitmap, bitmap);
extern bool bitmap_and_compl_into (bitmap, bitmap);
#define bitmap_compl_and(DST, A, B) bitmap_and_compl (DST, B, A)
extern void bitmap_compl_and_into (bitmap, bitmap);
extern void bitmap_clear_range (bitmap, unsigned int, unsigned int);
extern void bitmap_set_range (bitmap, unsigned int, unsigned int);
extern bool bitmap_ior (bitmap, bitmap, bitmap);
extern bool bitmap_ior_into (bitmap, bitmap);
extern void bitmap_xor (bitmap, bitmap, bitmap);

View file

@ -1,5 +1,5 @@
/* Perform branch target register load optimizations.
Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006
Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
This file is part of GCC.
@ -37,6 +37,8 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "tm_p.h"
#include "toplev.h"
#include "tree-pass.h"
#include "recog.h"
#include "df.h"
/* Target register optimizations - these are performed after reload. */
@ -476,7 +478,7 @@ compute_defs_uses_and_gen (fibheap_t all_btr_defs, btr_def *def_array,
CLEAR_HARD_REG_SET (info.btrs_written_in_block);
for (reg = first_btr; reg <= last_btr; reg++)
if (TEST_HARD_REG_BIT (all_btrs, reg)
&& REGNO_REG_SET_P (bb->il.rtl->global_live_at_start, reg))
&& REGNO_REG_SET_P (DF_LIVE_IN (bb), reg))
SET_HARD_REG_BIT (info.btrs_live_in_block, reg);
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
@ -508,7 +510,7 @@ compute_defs_uses_and_gen (fibheap_t all_btr_defs, btr_def *def_array,
}
/* Check for the blockage emitted by expand_nl_goto_receiver. */
else if (current_function_has_nonlocal_label
&& GET_CODE (PATTERN (insn)) == ASM_INPUT)
&& GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE)
{
btr_user user;
@ -577,7 +579,7 @@ compute_defs_uses_and_gen (fibheap_t all_btr_defs, btr_def *def_array,
COPY_HARD_REG_SET (btrs_live[i], info.btrs_live_in_block);
COPY_HARD_REG_SET (btrs_written[i], info.btrs_written_in_block);
REG_SET_TO_HARD_REG_SET (btrs_live_at_end[i], bb->il.rtl->global_live_at_end);
REG_SET_TO_HARD_REG_SET (btrs_live_at_end[i], DF_LIVE_OUT (bb));
/* If this block ends in a jump insn, add any uses or even clobbers
of branch target registers that it might have. */
for (insn = BB_END (bb); insn != BB_HEAD (bb) && ! INSN_P (insn); )
@ -1203,7 +1205,7 @@ move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
/* Insert target register initialization at head of basic block. */
def->insn = emit_insn_after (new_insn, insp);
regs_ever_live[btr] = 1;
df_set_regs_ever_live (btr, true);
if (dump_file)
fprintf (dump_file, "New pt is insn %d, inserted after insn %d\n",
@ -1226,7 +1228,7 @@ move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
replacement_rtx = btr_rtx;
else
replacement_rtx = gen_rtx_REG (GET_MODE (user->use), btr);
replace_rtx (user->insn, user->use, replacement_rtx);
validate_replace_rtx (user->insn, user->use, replacement_rtx);
user->use = replacement_rtx;
}
}
@ -1418,7 +1420,8 @@ migrate_btr_defs (enum reg_class btr_class, int allow_callee_save)
CLEAR_HARD_REG_SET (all_btrs);
for (first_btr = -1, reg = 0; reg < FIRST_PSEUDO_REGISTER; reg++)
if (TEST_HARD_REG_BIT (reg_class_contents[(int) btr_class], reg)
&& (allow_callee_save || call_used_regs[reg] || regs_ever_live[reg]))
&& (allow_callee_save || call_used_regs[reg]
|| df_regs_ever_live_p (reg)))
{
SET_HARD_REG_BIT (all_btrs, reg);
last_btr = reg;
@ -1455,7 +1458,7 @@ migrate_btr_defs (enum reg_class btr_class, int allow_callee_save)
fibheap_delete (all_btr_defs);
}
void
static void
branch_target_load_optimize (bool after_prologue_epilogue_gen)
{
enum reg_class class = targetm.branch_target_register_class ();
@ -1467,14 +1470,17 @@ branch_target_load_optimize (bool after_prologue_epilogue_gen)
else
issue_rate = 1;
/* Build the CFG for migrate_btr_defs. */
if (!after_prologue_epilogue_gen)
{
/* Build the CFG for migrate_btr_defs. */
#if 1
/* This may or may not be needed, depending on where we
run this phase. */
cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
/* This may or may not be needed, depending on where we
run this phase. */
cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
#endif
}
df_analyze ();
life_analysis (0);
/* Dominator info is also needed for migrate_btr_def. */
calculate_dominance_info (CDI_DOMINATORS);
@ -1483,21 +1489,50 @@ branch_target_load_optimize (bool after_prologue_epilogue_gen)
(after_prologue_epilogue_gen)));
free_dominance_info (CDI_DOMINATORS);
update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
PROP_DEATH_NOTES | PROP_REG_INFO);
}
}
static bool
gate_handle_branch_target_load_optimize (void)
gate_handle_branch_target_load_optimize1 (void)
{
return flag_branch_target_load_optimize;
}
static unsigned int
rest_of_handle_branch_target_load_optimize1 (void)
{
branch_target_load_optimize (epilogue_completed);
return 0;
}
struct tree_opt_pass pass_branch_target_load_optimize1 =
{
"btl1", /* name */
gate_handle_branch_target_load_optimize1, /* gate */
rest_of_handle_branch_target_load_optimize1, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
0, /* tv_id */
0, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func |
TODO_ggc_collect, /* todo_flags_finish */
'd' /* letter */
};
static bool
gate_handle_branch_target_load_optimize2 (void)
{
return (optimize > 0 && flag_branch_target_load_optimize2);
}
static unsigned int
rest_of_handle_branch_target_load_optimize (void)
rest_of_handle_branch_target_load_optimize2 (void)
{
static int warned = 0;
@ -1518,11 +1553,11 @@ rest_of_handle_branch_target_load_optimize (void)
return 0;
}
struct tree_opt_pass pass_branch_target_load_optimize =
struct tree_opt_pass pass_branch_target_load_optimize2 =
{
"btl", /* name */
gate_handle_branch_target_load_optimize, /* gate */
rest_of_handle_branch_target_load_optimize, /* execute */
"btl2", /* name */
gate_handle_branch_target_load_optimize2, /* gate */
rest_of_handle_branch_target_load_optimize2, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */

View file

@ -761,12 +761,10 @@ expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
#endif
{ /* Nothing */ }
/* @@@ This is a kludge. Not all machine descriptions define a blockage
insn, but we must not allow the code we just generated to be reordered
by scheduling. Specifically, the update of the frame pointer must
happen immediately, not later. So emit an ASM_INPUT to act as blockage
insn. */
emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
/* We must not allow the code we just generated to be reordered by
scheduling. Specifically, the update of the frame pointer must
happen immediately, not later. */
emit_insn (gen_blockage ());
}
/* __builtin_longjmp is passed a pointer to an array of five words (not

View file

@ -36,6 +36,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "toplev.h"
#include "tm_p.h"
#include "addresses.h"
#include "df.h"
#ifndef MAX_MOVE_MAX
#define MAX_MOVE_MAX MOVE_MAX
@ -189,8 +190,8 @@ init_caller_save (void)
savepat = gen_rtx_SET (VOIDmode, test_mem, test_reg);
restpat = gen_rtx_SET (VOIDmode, test_reg, test_mem);
saveinsn = gen_rtx_INSN (VOIDmode, 0, 0, 0, 0, 0, savepat, -1, 0, 0);
restinsn = gen_rtx_INSN (VOIDmode, 0, 0, 0, 0, 0, restpat, -1, 0, 0);
saveinsn = gen_rtx_INSN (VOIDmode, 0, 0, 0, 0, 0, savepat, -1, 0);
restinsn = gen_rtx_INSN (VOIDmode, 0, 0, 0, 0, 0, restpat, -1, 0);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
for (mode = 0 ; mode < MAX_MACHINE_MODE; mode++)
@ -200,7 +201,7 @@ init_caller_save (void)
/* Update the register number and modes of the register
and memory operand. */
REGNO (test_reg) = i;
SET_REGNO (test_reg, i);
PUT_MODE (test_reg, mode);
PUT_MODE (test_mem, mode);

View file

@ -41,6 +41,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "target.h"
#include "cgraph.h"
#include "except.h"
#include "dbgcnt.h"
/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
@ -2222,7 +2223,8 @@ expand_call (tree exp, rtx target, int ignore)
if (currently_expanding_call++ != 0
|| !flag_optimize_sibling_calls
|| args_size.var
|| lookup_stmt_eh_region (exp) >= 0)
|| lookup_stmt_eh_region (exp) >= 0
|| dbg_cnt (tail_call) == false)
try_tail_call = 0;
/* Rest of purposes for tail call optimizations to fail. */
@ -2855,9 +2857,10 @@ expand_call (tree exp, rtx target, int ignore)
valreg = temp;
}
/* For calls to `setjmp', etc., inform flow.c it should complain
if nonvolatile values are live. For functions that cannot return,
inform flow that control does not fall through. */
/* For calls to `setjmp', etc., inform
function.c:setjmp_warnings that it should complain if
nonvolatile values are live. For functions that cannot
return, inform flow that control does not fall through. */
if ((flags & ECF_NORETURN) || pass == 0)
{
@ -3816,9 +3819,10 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
valreg,
old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
/* For calls to `setjmp', etc., inform flow.c it should complain
if nonvolatile values are live. For functions that cannot return,
inform flow that control does not fall through. */
/* For calls to `setjmp', etc., inform function.c:setjmp_warnings
that it should complain if nonvolatile values are live. For
functions that cannot return, inform flow that control does not
fall through. */
if (flags & ECF_NORETURN)
{

221
gcc/cfg.c
View file

@ -1,6 +1,6 @@
/* Control flow graph manipulation code for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2002, 2003, 2004, 2005
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
This file is part of GCC.
@ -66,6 +66,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "ggc.h"
#include "hashtab.h"
#include "alloc-pool.h"
#include "df.h"
#include "cfgloop.h"
/* The obstack on which the flow graph components are allocated. */
@ -163,24 +164,28 @@ void
compact_blocks (void)
{
int i;
basic_block bb;
SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
i = NUM_FIXED_BLOCKS;
FOR_EACH_BB (bb)
if (df)
df_compact_blocks ();
else
{
SET_BASIC_BLOCK (i, bb);
bb->index = i;
i++;
basic_block bb;
i = NUM_FIXED_BLOCKS;
FOR_EACH_BB (bb)
{
SET_BASIC_BLOCK (i, bb);
bb->index = i;
i++;
}
gcc_assert (i == n_basic_blocks);
for (; i < last_basic_block; i++)
SET_BASIC_BLOCK (i, NULL);
}
gcc_assert (i == n_basic_blocks);
for (; i < last_basic_block; i++)
SET_BASIC_BLOCK (i, NULL);
last_basic_block = n_basic_blocks;
}
@ -205,6 +210,7 @@ static inline void
connect_src (edge e)
{
VEC_safe_push (edge, gc, e->src->succs, e);
df_mark_solutions_dirty ();
}
/* Connect E to E->dest. */
@ -215,6 +221,7 @@ connect_dest (edge e)
basic_block dest = e->dest;
VEC_safe_push (edge, gc, dest->preds, e);
e->dest_idx = EDGE_COUNT (dest->preds) - 1;
df_mark_solutions_dirty ();
}
/* Disconnect edge E from E->src. */
@ -237,6 +244,7 @@ disconnect_src (edge e)
ei_next (&ei);
}
df_mark_solutions_dirty ();
gcc_unreachable ();
}
@ -254,6 +262,7 @@ disconnect_dest (edge e)
to update dest_idx of the edge that moved into the "hole". */
if (dest_idx < EDGE_COUNT (dest->preds))
EDGE_PRED (dest, dest_idx)->dest_idx = dest_idx;
df_mark_solutions_dirty ();
}
/* Create an edge connecting SRC and DEST with flags FLAGS. Return newly
@ -275,7 +284,6 @@ unchecked_make_edge (basic_block src, basic_block dst, int flags)
connect_dest (e);
execute_on_growing_pred (e);
return e;
}
@ -409,15 +417,16 @@ redirect_edge_pred (edge e, basic_block new_pred)
connect_src (e);
}
/* Clear all basic block flags, with the exception of partitioning. */
/* Clear all basic block flags, with the exception of partitioning and
setjmp_target. */
void
clear_bb_flags (void)
{
basic_block bb;
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
bb->flags = (BB_PARTITION (bb) | (bb->flags & BB_DISABLE_SCHEDULE)
| (bb->flags & BB_RTL));
bb->flags = (BB_PARTITION (bb)
| (bb->flags & (BB_DISABLE_SCHEDULE + BB_RTL + BB_NON_LOCAL_GOTO_TARGET)));
}
/* Check the consistency of profile information. We can't do that
@ -469,6 +478,41 @@ check_bb_profile (basic_block bb, FILE * file)
}
}
/* Write information about registers and basic blocks into FILE.
This is part of making a debugging dump. */
void
dump_regset (regset r, FILE *outf)
{
unsigned i;
reg_set_iterator rsi;
if (r == NULL)
{
fputs (" (nil)", outf);
return;
}
EXECUTE_IF_SET_IN_REG_SET (r, 0, i, rsi)
{
fprintf (outf, " %d", i);
if (i < FIRST_PSEUDO_REGISTER)
fprintf (outf, " [%s]",
reg_names[i]);
}
}
/* Print a human-readable representation of R on the standard error
stream. This function is designed to be used from within the
debugger. */
void
debug_regset (regset r)
{
dump_regset (r, stderr);
putc ('\n', stderr);
}
/* Emit basic block information for BB. HEADER is true if the user wants
the generic information and the predecessors, FOOTER is true if they want
the successors. FLAGS is the dump flags of interest; TDF_DETAILS emit
@ -500,6 +544,14 @@ dump_bb_info (basic_block bb, bool header, bool footer, int flags,
fprintf (file, "%sPredecessors: ", prefix);
FOR_EACH_EDGE (e, ei, bb->preds)
dump_edge_info (file, e, 0);
if ((flags & TDF_DETAILS)
&& (bb->flags & BB_RTL)
&& df)
{
fprintf (file, "\n");
df_dump_top (bb, file);
}
}
if (footer)
@ -507,81 +559,92 @@ dump_bb_info (basic_block bb, bool header, bool footer, int flags,
fprintf (file, "\n%sSuccessors: ", prefix);
FOR_EACH_EDGE (e, ei, bb->succs)
dump_edge_info (file, e, 1);
}
if ((flags & TDF_DETAILS)
&& (bb->flags & BB_RTL))
{
if (bb->il.rtl->global_live_at_start && header)
if ((flags & TDF_DETAILS)
&& (bb->flags & BB_RTL)
&& df)
{
fprintf (file, "\n%sRegisters live at start:", prefix);
dump_regset (bb->il.rtl->global_live_at_start, file);
}
if (bb->il.rtl->global_live_at_end && footer)
{
fprintf (file, "\n%sRegisters live at end:", prefix);
dump_regset (bb->il.rtl->global_live_at_end, file);
fprintf (file, "\n");
df_dump_bottom (bb, file);
}
}
putc ('\n', file);
}
/* Dump the register info to FILE. */
void
dump_reg_info (FILE *file)
{
unsigned int i, max = max_reg_num ();
if (reload_completed)
return;
if (reg_info_p_size < max)
max = reg_info_p_size;
fprintf (file, "%d registers.\n", max);
for (i = FIRST_PSEUDO_REGISTER; i < max; i++)
{
enum reg_class class, altclass;
if (regstat_n_sets_and_refs)
fprintf (file, "\nRegister %d used %d times across %d insns",
i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
else if (df)
fprintf (file, "\nRegister %d used %d times across %d insns",
i, DF_REG_USE_COUNT (i) + DF_REG_DEF_COUNT (i), REG_LIVE_LENGTH (i));
if (REG_BASIC_BLOCK (i) >= NUM_FIXED_BLOCKS)
fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
if (regstat_n_sets_and_refs)
fprintf (file, "; set %d time%s", REG_N_SETS (i),
(REG_N_SETS (i) == 1) ? "" : "s");
else if (df)
fprintf (file, "; set %d time%s", DF_REG_DEF_COUNT (i),
(DF_REG_DEF_COUNT (i) == 1) ? "" : "s");
if (regno_reg_rtx[i] != NULL && REG_USERVAR_P (regno_reg_rtx[i]))
fprintf (file, "; user var");
if (REG_N_DEATHS (i) != 1)
fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
if (REG_N_CALLS_CROSSED (i) == 1)
fprintf (file, "; crosses 1 call");
else if (REG_N_CALLS_CROSSED (i))
fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
if (regno_reg_rtx[i] != NULL
&& PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
class = reg_preferred_class (i);
altclass = reg_alternate_class (i);
if (class != GENERAL_REGS || altclass != ALL_REGS)
{
if (altclass == ALL_REGS || class == ALL_REGS)
fprintf (file, "; pref %s", reg_class_names[(int) class]);
else if (altclass == NO_REGS)
fprintf (file, "; %s or none", reg_class_names[(int) class]);
else
fprintf (file, "; pref %s, else %s",
reg_class_names[(int) class],
reg_class_names[(int) altclass]);
}
if (regno_reg_rtx[i] != NULL && REG_POINTER (regno_reg_rtx[i]))
fprintf (file, "; pointer");
fprintf (file, ".\n");
}
}
void
dump_flow_info (FILE *file, int flags)
{
basic_block bb;
/* There are no pseudo registers after reload. Don't dump them. */
if (reg_n_info && !reload_completed
&& (flags & TDF_DETAILS) != 0)
{
unsigned int i, max = max_reg_num ();
fprintf (file, "%d registers.\n", max);
for (i = FIRST_PSEUDO_REGISTER; i < max; i++)
if (REG_N_REFS (i))
{
enum reg_class prefclass, altclass;
fprintf (file, "\nRegister %d used %d times across %d insns",
i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
if (REG_BASIC_BLOCK (i) >= 0)
fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
if (REG_N_SETS (i))
fprintf (file, "; set %d time%s", REG_N_SETS (i),
(REG_N_SETS (i) == 1) ? "" : "s");
if (regno_reg_rtx[i] != NULL && REG_USERVAR_P (regno_reg_rtx[i]))
fprintf (file, "; user var");
if (REG_N_DEATHS (i) != 1)
fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
if (REG_N_CALLS_CROSSED (i) == 1)
fprintf (file, "; crosses 1 call");
else if (REG_N_CALLS_CROSSED (i))
fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
if (regno_reg_rtx[i] != NULL
&& PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
prefclass = reg_preferred_class (i);
altclass = reg_alternate_class (i);
if (prefclass != GENERAL_REGS || altclass != ALL_REGS)
{
if (altclass == ALL_REGS || prefclass == ALL_REGS)
fprintf (file, "; pref %s", reg_class_names[(int) prefclass]);
else if (altclass == NO_REGS)
fprintf (file, "; %s or none", reg_class_names[(int) prefclass]);
else
fprintf (file, "; pref %s, else %s",
reg_class_names[(int) prefclass],
reg_class_names[(int) altclass]);
}
if (regno_reg_rtx[i] != NULL && REG_POINTER (regno_reg_rtx[i]))
fprintf (file, "; pointer");
fprintf (file, ".\n");
}
}
if (reg_info_p_size && (flags & TDF_DETAILS) != 0)
dump_reg_info (file);
fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
FOR_EACH_BB (bb)

View file

@ -1,6 +1,6 @@
/* Control flow graph analysis code for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -645,16 +645,20 @@ connect_infinite_loops_to_exit (void)
return;
}
/* Compute reverse top sort order.
This is computing a post order numbering of the graph. */
/* Compute reverse top sort order. This is computing a post order
numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then then
ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is
true, unreachable blocks are deleted. */
int
post_order_compute (int *post_order, bool include_entry_exit)
post_order_compute (int *post_order, bool include_entry_exit,
bool delete_unreachable)
{
edge_iterator *stack;
int sp;
int post_order_num = 0;
sbitmap visited;
int count;
if (include_entry_exit)
post_order[post_order_num++] = EXIT_BLOCK;
@ -699,7 +703,7 @@ post_order_compute (int *post_order, bool include_entry_exit)
else
{
if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
post_order[post_order_num++] = src->index;
post_order[post_order_num++] = src->index;
if (!ei_one_before_end_p (ei))
ei_next (&stack[sp - 1]);
@ -709,7 +713,220 @@ post_order_compute (int *post_order, bool include_entry_exit)
}
if (include_entry_exit)
post_order[post_order_num++] = ENTRY_BLOCK;
{
post_order[post_order_num++] = ENTRY_BLOCK;
count = post_order_num;
}
else
count = post_order_num + 2;
/* Delete the unreachable blocks if some were found and we are
supposed to do it. */
if (delete_unreachable && (count != n_basic_blocks))
{
basic_block b;
basic_block next_bb;
for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
{
next_bb = b->next_bb;
if (!(TEST_BIT (visited, b->index)))
delete_basic_block (b);
}
tidy_fallthru_edges ();
}
free (stack);
sbitmap_free (visited);
return post_order_num;
}
/* Helper routine for inverted_post_order_compute.
BB has to belong to a region of CFG
unreachable by inverted traversal from the exit.
i.e. there's no control flow path from ENTRY to EXIT
that contains this BB.
This can happen in two cases - if there's an infinite loop
or if there's a block that has no successor
(call to a function with no return).
Some RTL passes deal with this condition by
calling connect_infinite_loops_to_exit () and/or
add_noreturn_fake_exit_edges ().
However, those methods involve modifying the CFG itself
which may not be desirable.
Hence, we deal with the infinite loop/no return cases
by identifying a unique basic block that can reach all blocks
in such a region by inverted traversal.
This function returns a basic block that guarantees
that all blocks in the region are reachable
by starting an inverted traversal from the returned block. */
static basic_block
dfs_find_deadend (basic_block bb)
{
sbitmap visited = sbitmap_alloc (last_basic_block);
sbitmap_zero (visited);
for (;;)
{
SET_BIT (visited, bb->index);
if (EDGE_COUNT (bb->succs) == 0
|| TEST_BIT (visited, EDGE_SUCC (bb, 0)->dest->index))
{
sbitmap_free (visited);
return bb;
}
bb = EDGE_SUCC (bb, 0)->dest;
}
gcc_unreachable ();
}
/* Compute the reverse top sort order of the inverted CFG
i.e. starting from the exit block and following the edges backward
(from successors to predecessors).
This ordering can be used for forward dataflow problems among others.
This function assumes that all blocks in the CFG are reachable
from the ENTRY (but not necessarily from EXIT).
If there's an infinite loop,
a simple inverted traversal starting from the blocks
with no successors can't visit all blocks.
To solve this problem, we first do inverted traversal
starting from the blocks with no successor.
And if there's any block left that's not visited by the regular
inverted traversal from EXIT,
those blocks are in such problematic region.
Among those, we find one block that has
any visited predecessor (which is an entry into such a region),
and start looking for a "dead end" from that block
and do another inverted traversal from that block. */
int
inverted_post_order_compute (int *post_order)
{
basic_block bb;
edge_iterator *stack;
int sp;
int post_order_num = 0;
sbitmap visited;
/* Allocate stack for back-tracking up CFG. */
stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
sp = 0;
/* Allocate bitmap to track nodes that have been visited. */
visited = sbitmap_alloc (last_basic_block);
/* None of the nodes in the CFG have been visited yet. */
sbitmap_zero (visited);
/* Put all blocks that have no successor into the initial work list. */
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
if (EDGE_COUNT (bb->succs) == 0)
{
/* Push the initial edge on to the stack. */
if (EDGE_COUNT (bb->preds) > 0)
{
stack[sp++] = ei_start (bb->preds);
SET_BIT (visited, bb->index);
}
}
do
{
bool has_unvisited_bb = false;
/* The inverted traversal loop. */
while (sp)
{
edge_iterator ei;
basic_block pred;
/* Look at the edge on the top of the stack. */
ei = stack[sp - 1];
bb = ei_edge (ei)->dest;
pred = ei_edge (ei)->src;
/* Check if the predecessor has been visited yet. */
if (! TEST_BIT (visited, pred->index))
{
/* Mark that we have visited the destination. */
SET_BIT (visited, pred->index);
if (EDGE_COUNT (pred->preds) > 0)
/* Since the predecessor node has been visited for the first
time, check its predecessors. */
stack[sp++] = ei_start (pred->preds);
else
post_order[post_order_num++] = pred->index;
}
else
{
if (bb != EXIT_BLOCK_PTR && ei_one_before_end_p (ei))
post_order[post_order_num++] = bb->index;
if (!ei_one_before_end_p (ei))
ei_next (&stack[sp - 1]);
else
sp--;
}
}
/* Detect any infinite loop and activate the kludge.
Note that this doesn't check EXIT_BLOCK itself
since EXIT_BLOCK is always added after the outer do-while loop. */
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
if (!TEST_BIT (visited, bb->index))
{
has_unvisited_bb = true;
if (EDGE_COUNT (bb->preds) > 0)
{
edge_iterator ei;
edge e;
basic_block visited_pred = NULL;
/* Find an already visited predecessor. */
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (TEST_BIT (visited, e->src->index))
visited_pred = e->src;
}
if (visited_pred)
{
basic_block be = dfs_find_deadend (bb);
gcc_assert (be != NULL);
SET_BIT (visited, be->index);
stack[sp++] = ei_start (be->preds);
break;
}
}
}
if (has_unvisited_bb && sp == 0)
{
/* No blocks are reachable from EXIT at all.
Find a dead-end from the ENTRY, and restart the iteration. */
basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR);
gcc_assert (be != NULL);
SET_BIT (visited, be->index);
stack[sp++] = ei_start (be->preds);
}
/* The only case the below while fires is
when there's an infinite loop. */
}
while (sp);
/* EXIT_BLOCK is always included. */
post_order[post_order_num++] = EXIT_BLOCK;
free (stack);
sbitmap_free (visited);
@ -1076,4 +1293,3 @@ compute_dominance_frontiers (bitmap *frontiers)
timevar_pop (TV_DOM_FRONTIERS);
}

View file

@ -54,6 +54,8 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "tree-pass.h"
#include "cfgloop.h"
#include "expr.h"
#include "df.h"
#include "dce.h"
#define FORWARDER_BLOCK_P(BB) ((BB)->flags & BB_FORWARDER_BLOCK)
@ -70,7 +72,7 @@ static void merge_blocks_move_successor_nojumps (basic_block, basic_block);
static bool try_optimize_cfg (int);
static bool try_simplify_condjump (basic_block);
static bool try_forward_edges (int, basic_block);
static edge thread_jump (int, edge, basic_block);
static edge thread_jump (edge, basic_block);
static bool mark_effect (rtx, bitmap);
static void notice_new_block (basic_block);
static void update_forwarder_flag (basic_block);
@ -260,7 +262,7 @@ mentions_nonequal_regs (rtx *x, void *data)
if exist, NULL otherwise. */
static edge
thread_jump (int mode, edge e, basic_block b)
thread_jump (edge e, basic_block b)
{
rtx set1, set2, cond1, cond2, insn;
enum rtx_code code1, code2, reversed_code2;
@ -380,11 +382,6 @@ thread_jump (int mode, edge e, basic_block b)
if (for_each_rtx (&cond2, mentions_nonequal_regs, nonequal))
goto failed_exit;
/* In case liveness information is available, we need to prove equivalence
only of the live values. */
if (mode & CLEANUP_UPDATE_LIFE)
AND_REG_SET (nonequal, b->il.rtl->global_live_at_end);
EXECUTE_IF_SET_IN_REG_SET (nonequal, 0, i, rsi)
goto failed_exit;
@ -431,7 +428,7 @@ try_forward_edges (int mode, basic_block b)
int counter;
bool threaded = false;
int nthreaded_edges = 0;
bool may_thread = first_pass | (b->flags & BB_DIRTY);
bool may_thread = first_pass | df_get_bb_dirty (b);
/* Skip complex edges because we don't know how to update them.
@ -465,7 +462,7 @@ try_forward_edges (int mode, basic_block b)
{
basic_block new_target = NULL;
bool new_target_threaded = false;
may_thread |= target->flags & BB_DIRTY;
may_thread |= df_get_bb_dirty (target);
if (FORWARDER_BLOCK_P (target)
&& !(single_succ_edge (target)->flags & EDGE_CROSSING)
@ -481,7 +478,7 @@ try_forward_edges (int mode, basic_block b)
of probabilities. */
else if ((mode & CLEANUP_THREADING) && may_thread)
{
edge t = thread_jump (mode, e, target);
edge t = thread_jump (e, target);
if (t)
{
if (!threaded_edges)
@ -644,7 +641,7 @@ merge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
/* Scramble the insn chain. */
if (BB_END (a) != PREV_INSN (BB_HEAD (b)))
reorder_insns_nobb (BB_HEAD (a), BB_END (a), PREV_INSN (BB_HEAD (b)));
a->flags |= BB_DIRTY;
df_set_bb_dirty (a);
if (dump_file)
fprintf (dump_file, "Moved block %d before %d and merged.\n",
@ -1707,7 +1704,7 @@ try_crossjump_to_edge (int mode, edge e1, edge e2)
redirect_to->count += src1->count;
redirect_to->frequency += src1->frequency;
/* We may have some registers visible through the block. */
redirect_to->flags |= BB_DIRTY;
df_set_bb_dirty (redirect_to);
/* Recompute the frequencies and counts of outgoing edges. */
FOR_EACH_EDGE (s, ei, redirect_to->succs)
@ -1856,8 +1853,8 @@ try_crossjump_bb (int mode, basic_block bb)
/* If nothing changed since the last attempt, there is nothing
we can do. */
if (!first_pass
&& (!(e->src->flags & BB_DIRTY)
&& !(fallthru->src->flags & BB_DIRTY)))
&& (!(df_get_bb_dirty (e->src))
&& !(df_get_bb_dirty (fallthru->src))))
continue;
if (try_crossjump_to_edge (mode, e, fallthru))
@ -1906,8 +1903,8 @@ try_crossjump_bb (int mode, basic_block bb)
/* If nothing changed since the last attempt, there is nothing
we can do. */
if (!first_pass
&& (!(e->src->flags & BB_DIRTY)
&& !(e2->src->flags & BB_DIRTY)))
&& (!(df_get_bb_dirty (e->src))
&& !(df_get_bb_dirty (e2->src))))
continue;
if (try_crossjump_to_edge (mode, e, e2))
@ -1937,7 +1934,7 @@ try_optimize_cfg (int mode)
if (mode & CLEANUP_CROSSJUMP)
add_noreturn_fake_exit_edges ();
if (mode & (CLEANUP_UPDATE_LIFE | CLEANUP_CROSSJUMP | CLEANUP_THREADING))
if (mode & (CLEANUP_CROSSJUMP | CLEANUP_THREADING))
clear_bb_flags ();
FOR_EACH_BB (bb)
@ -2000,7 +1997,7 @@ try_optimize_cfg (int mode)
rtx label = BB_HEAD (b);
delete_insn_chain (label, label, false);
/* In the case label is undeletable, move it after the
/* If the case label is undeletable, move it after the
BASIC_BLOCK note. */
if (NOTE_KIND (BB_HEAD (b)) == NOTE_INSN_DELETED_LABEL)
{
@ -2166,6 +2163,47 @@ delete_unreachable_blocks (void)
tidy_fallthru_edges ();
return changed;
}
/* Delete any jump tables never referenced. We can't delete them at the
time of removing tablejump insn as they are referenced by the preceding
insns computing the destination, so we delay deleting and garbagecollect
them once life information is computed. */
void
delete_dead_jumptables (void)
{
basic_block bb;
/* A dead jump table does not belong to any basic block. Scan insns
between two adjacent basic blocks. */
FOR_EACH_BB (bb)
{
rtx insn, next;
for (insn = NEXT_INSN (BB_END (bb));
insn && !NOTE_INSN_BASIC_BLOCK_P (insn);
insn = next)
{
next = NEXT_INSN (insn);
if (LABEL_P (insn)
&& LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
&& JUMP_P (next)
&& (GET_CODE (PATTERN (next)) == ADDR_VEC
|| GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
{
rtx label = insn, jump = next;
if (dump_file)
fprintf (dump_file, "Dead jumptable %i removed\n",
INSN_UID (insn));
next = NEXT_INSN (next);
delete_insn (jump);
delete_insn (label);
}
}
}
}
/* Tidy the CFG by deleting unreachable code and whatnot. */
@ -2186,7 +2224,7 @@ cleanup_cfg (int mode)
changed = true;
/* We've possibly created trivially dead code. Cleanup it right
now to introduce more opportunities for try_optimize_cfg. */
if (!(mode & (CLEANUP_NO_INSN_DEL | CLEANUP_UPDATE_LIFE))
if (!(mode & (CLEANUP_NO_INSN_DEL))
&& !reload_completed)
delete_trivially_dead_insns (get_insns (), max_reg_num ());
}
@ -2196,39 +2234,26 @@ cleanup_cfg (int mode)
while (try_optimize_cfg (mode))
{
delete_unreachable_blocks (), changed = true;
if (mode & CLEANUP_UPDATE_LIFE)
{
/* Cleaning up CFG introduces more opportunities for dead code
removal that in turn may introduce more opportunities for
cleaning up the CFG. */
if (!update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
PROP_DEATH_NOTES
| PROP_SCAN_DEAD_CODE
| PROP_KILL_DEAD_CODE
| ((mode & CLEANUP_LOG_LINKS)
? PROP_LOG_LINKS : 0)))
break;
}
else if (!(mode & CLEANUP_NO_INSN_DEL)
&& (mode & CLEANUP_EXPENSIVE)
&& !reload_completed)
if (!(mode & CLEANUP_NO_INSN_DEL)
&& (mode & CLEANUP_EXPENSIVE)
&& !reload_completed)
{
if (!delete_trivially_dead_insns (get_insns (), max_reg_num ()))
break;
}
else
break;
/* Don't call delete_dead_jumptables in cfglayout mode, because
that function assumes that jump tables are in the insns stream.
But we also don't _have_ to delete dead jumptables in cfglayout
mode because we shouldn't even be looking at things that are
not in a basic block. Dead jumptables are cleaned up when
going out of cfglayout mode. */
if (!(mode & CLEANUP_CFGLAYOUT))
delete_dead_jumptables ();
}
/* Don't call delete_dead_jumptables in cfglayout mode, because
that function assumes that jump tables are in the insns stream.
But we also don't _have_ to delete dead jumptables in cfglayout
mode because we shouldn't even be looking at things that are
not in a basic block. Dead jumptables are cleaned up when
going out of cfglayout mode. */
if (!(mode & CLEANUP_CFGLAYOUT))
delete_dead_jumptables ();
timevar_pop (TV_CLEANUP_CFG);
return changed;
@ -2267,7 +2292,6 @@ static unsigned int
rest_of_handle_jump2 (void)
{
delete_trivially_dead_insns (get_insns (), max_reg_num ());
reg_scan (get_insns (), max_reg_num ());
if (dump_file)
dump_flow_info (dump_file, dump_flags);
cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0)

View file

@ -1,5 +1,5 @@
/* Hooks for cfg representation specific functions.
Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Sebastian Pop <s.pop@laposte.net>
This file is part of GCC.
@ -57,7 +57,7 @@ struct cfg_hooks
/* Creates a new basic block just after basic block B by splitting
everything after specified instruction I. */
basic_block (*split_block) (basic_block b, void * i);
/* Move block B immediately after block A. */
bool (*move_block_after) (basic_block b, basic_block a);

View file

@ -1,5 +1,6 @@
/* Basic block reordering routines for the GNU compiler.
Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
This file is part of GCC.
@ -37,6 +38,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "alloc-pool.h"
#include "flags.h"
#include "tree-pass.h"
#include "df.h"
#include "vecprim.h"
/* Holds the interesting trailing notes for the function. */
@ -883,7 +885,7 @@ fixup_reorder_chain (void)
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->flags & EDGE_FALLTHRU)
break;
if (e && !can_fallthru (e->src, e->dest))
force_nonfallthru (e);
}
@ -1108,35 +1110,34 @@ cfg_layout_duplicate_bb (basic_block bb)
new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
}
if (bb->il.rtl->global_live_at_start)
{
new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
bb->il.rtl->global_live_at_start);
COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
bb->il.rtl->global_live_at_end);
}
return new_bb;
}
/* Main entry point to this module - initialize the datastructures for
CFG layout changes. It keeps LOOPS up-to-date if not null.
FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
include CLEANUP_UPDATE_LIFE if liveness information must be kept up
to date. */
FLAGS is a set of additional flags to pass to cleanup_cfg(). */
void
cfg_layout_initialize (unsigned int flags)
{
rtx x;
basic_block bb;
initialize_original_copy_tables ();
cfg_layout_rtl_register_cfg_hooks ();
record_effective_endpoints ();
/* Make sure that the targets of non local gotos are marked. */
for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
{
bb = BLOCK_FOR_INSN (XEXP (x, 0));
bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
}
cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
}

View file

@ -1,5 +1,6 @@
/* Natural loop discovery code for GNU compiler.
Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
This file is part of GCC.
@ -284,9 +285,6 @@ establish_preds (struct loop *loop, struct loop *father)
unsigned depth = loop_depth (father) + 1;
unsigned i;
/* Remember the current loop depth if it is the largest seen so far. */
cfun->max_loop_depth = MAX (cfun->max_loop_depth, (int) depth);
VEC_truncate (loop_p, loop->superloops, 0);
VEC_reserve (loop_p, gc, loop->superloops, depth);
for (i = 0; VEC_iterate (loop_p, father->superloops, i, ploop); i++)
@ -364,10 +362,6 @@ flow_loops_find (struct loops *loops)
memset (loops, 0, sizeof *loops);
/* We are going to recount the maximum loop depth,
so throw away the last count. */
cfun->max_loop_depth = 0;
/* Taking care of this degenerate case makes the rest of
this code simpler. */
if (n_basic_blocks == NUM_FIXED_BLOCKS)

View file

@ -1,6 +1,6 @@
/* Natural loop functions
Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
Free Software Foundation, Inc.
Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
2005, 2006, 2007Free Software Foundation, Inc.
This file is part of GCC.
@ -386,7 +386,6 @@ extern rtx get_iv_value (struct rtx_iv *, rtx);
extern bool biv_p (rtx, rtx);
extern void find_simple_exit (struct loop *, struct niter_desc *);
extern void iv_analysis_done (void);
extern struct df *iv_current_loop_df (void);
extern struct niter_desc *get_simple_loop_desc (struct loop *loop);
extern void free_simple_loop_desc (struct loop *loop);

View file

@ -1,6 +1,7 @@
/* Control flow graph manipulation code for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
This file is part of GCC.
@ -60,6 +61,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "cfgloop.h"
#include "ggc.h"
#include "tree-pass.h"
#include "df.h"
static int can_delete_note_p (rtx);
static int can_delete_label_p (rtx);
@ -307,6 +309,7 @@ create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
bb->flags = BB_NEW | BB_RTL;
link_block (bb, after);
SET_BASIC_BLOCK (bb->index, bb);
df_bb_refs_record (bb->index, false);
update_bb_for_insn (bb);
BB_SET_PARTITION (bb, BB_UNPARTITIONED);
@ -376,13 +379,10 @@ rtl_delete_block (basic_block b)
BB_HEAD (b) = NULL;
delete_insn_chain (insn, end, true);
if (b->il.rtl->global_live_at_start)
{
FREE_REG_SET (b->il.rtl->global_live_at_start);
FREE_REG_SET (b->il.rtl->global_live_at_end);
b->il.rtl->global_live_at_start = NULL;
b->il.rtl->global_live_at_end = NULL;
}
if (dump_file)
fprintf (dump_file, "deleting block %d\n", b->index);
df_bb_delete (b->index);
}
/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
@ -466,12 +466,35 @@ update_bb_for_insn (basic_block bb)
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
if (!BARRIER_P (insn))
set_block_for_insn (insn, bb);
{
set_block_for_insn (insn, bb);
df_insn_change_bb (insn);
}
if (insn == BB_END (bb))
break;
}
}
/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
note associated with the BLOCK. */
static rtx
first_insn_after_basic_block_note (basic_block block)
{
rtx insn;
/* Get the first instruction in the block. */
insn = BB_HEAD (block);
if (insn == NULL_RTX)
return NULL_RTX;
if (LABEL_P (insn))
insn = NEXT_INSN (insn);
gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
return NEXT_INSN (insn);
}
/* Creates a new basic block just after basic block B by splitting
everything after specified instruction I. */
@ -510,32 +533,8 @@ rtl_split_block (basic_block bb, void *insnp)
FOR_EACH_EDGE (e, ei, new_bb->succs)
e->src = new_bb;
if (bb->il.rtl->global_live_at_start)
{
new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
COPY_REG_SET (new_bb->il.rtl->global_live_at_end, bb->il.rtl->global_live_at_end);
/* We now have to calculate which registers are live at the end
of the split basic block and at the start of the new basic
block. Start with those registers that are known to be live
at the end of the original basic block and get
propagate_block to determine which registers are live. */
COPY_REG_SET (new_bb->il.rtl->global_live_at_start, bb->il.rtl->global_live_at_end);
propagate_block (new_bb, new_bb->il.rtl->global_live_at_start, NULL, NULL, 0);
COPY_REG_SET (bb->il.rtl->global_live_at_end,
new_bb->il.rtl->global_live_at_start);
#ifdef HAVE_conditional_execution
/* In the presence of conditional execution we are not able to update
liveness precisely. */
if (reload_completed)
{
bb->flags |= BB_DIRTY;
new_bb->flags |= BB_DIRTY;
}
#endif
}
/* The new block starts off being dirty. */
df_set_bb_dirty (bb);
return new_bb;
}
@ -549,6 +548,9 @@ rtl_merge_blocks (basic_block a, basic_block b)
rtx del_first = NULL_RTX, del_last = NULL_RTX;
int b_empty = 0;
if (dump_file)
fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
/* If there was a CODE_LABEL beginning B, delete it. */
if (LABEL_P (b_head))
{
@ -621,17 +623,22 @@ rtl_merge_blocks (basic_block a, basic_block b)
rtx x;
for (x = a_end; x != b_end; x = NEXT_INSN (x))
set_block_for_insn (x, a);
{
set_block_for_insn (x, a);
df_insn_change_bb (x);
}
set_block_for_insn (b_end, a);
df_insn_change_bb (b_end);
a_end = b_end;
}
df_bb_delete (b->index);
BB_END (a) = a_end;
a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
}
/* Return true when block A and B can be merged. */
static bool
rtl_can_merge_blocks (basic_block a,basic_block b)
@ -830,7 +837,10 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
tmp = NEXT_INSN (tmp))
set_block_for_insn (tmp, src);
{
set_block_for_insn (tmp, src);
df_insn_change_bb (tmp);
}
NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
@ -860,7 +870,6 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
if (e->dest != target)
redirect_edge_succ (e, target);
return e;
}
@ -944,6 +953,7 @@ redirect_branch_edge (edge e, basic_block target)
if (e->dest != target)
e = redirect_edge_succ_nodup (e, target);
return e;
}
@ -972,7 +982,7 @@ rtl_redirect_edge_and_branch (edge e, basic_block target)
if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
{
src->flags |= BB_DIRTY;
df_set_bb_dirty (src);
return ret;
}
@ -980,7 +990,7 @@ rtl_redirect_edge_and_branch (edge e, basic_block target)
if (!ret)
return NULL;
src->flags |= BB_DIRTY;
df_set_bb_dirty (src);
return ret;
}
@ -1088,16 +1098,6 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
jump_block->frequency = EDGE_FREQUENCY (e);
jump_block->loop_depth = target->loop_depth;
if (target->il.rtl->global_live_at_start)
{
jump_block->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
jump_block->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
COPY_REG_SET (jump_block->il.rtl->global_live_at_start,
target->il.rtl->global_live_at_start);
COPY_REG_SET (jump_block->il.rtl->global_live_at_end,
target->il.rtl->global_live_at_start);
}
/* Make sure new block ends up in correct hot/cold section. */
BB_COPY_PARTITION (jump_block, e->src);
@ -1149,6 +1149,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
if (abnormal_edge_flags)
make_edge (src, target, abnormal_edge_flags);
df_mark_solutions_dirty ();
return new_bb;
}
@ -1175,7 +1176,7 @@ rtl_redirect_edge_and_branch_force (edge e, basic_block target)
/* In case the edge redirection failed, try to force it to be non-fallthru
and redirect newly created simplejump. */
e->src->flags |= BB_DIRTY;
df_set_bb_dirty (e->src);
return force_nonfallthru_and_redirect (e, target);
}
@ -1289,17 +1290,6 @@ rtl_split_edge (edge edge_in)
BB_COPY_PARTITION (bb, edge_in->dest);
}
/* ??? This info is likely going to be out of date very soon. */
if (edge_in->dest->il.rtl->global_live_at_start)
{
bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
COPY_REG_SET (bb->il.rtl->global_live_at_start,
edge_in->dest->il.rtl->global_live_at_start);
COPY_REG_SET (bb->il.rtl->global_live_at_end,
edge_in->dest->il.rtl->global_live_at_start);
}
make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
/* For non-fallthru edges, we must adjust the predecessor's
@ -1433,11 +1423,11 @@ commit_one_edge_insertion (edge e)
if (before)
{
emit_insn_before_noloc (insns, before);
emit_insn_before_noloc (insns, before, bb);
last = prev_nonnote_insn (before);
}
else
last = emit_insn_after_noloc (insns, after);
last = emit_insn_after_noloc (insns, after, bb);
if (returnjump_p (last))
{
@ -1515,6 +1505,7 @@ commit_edge_insertions (void)
sbitmap_free (blocks);
}
/* Print out RTL-specific basic block information (live information
at start and end). */
@ -1528,18 +1519,23 @@ rtl_dump_bb (basic_block bb, FILE *outf, int indent)
s_indent = (char *) alloca ((size_t) indent + 1);
memset (s_indent, ' ', (size_t) indent);
s_indent[indent] = '\0';
fprintf (outf, ";;%s Registers live at start: ", s_indent);
dump_regset (bb->il.rtl->global_live_at_start, outf);
putc ('\n', outf);
if (df)
{
df_dump_top (bb, outf);
putc ('\n', outf);
}
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
insn = NEXT_INSN (insn))
print_rtl_single (outf, insn);
fprintf (outf, ";;%s Registers live at end: ", s_indent);
dump_regset (bb->il.rtl->global_live_at_end, outf);
putc ('\n', outf);
if (df)
{
df_dump_bottom (bb, outf);
putc ('\n', outf);
}
}
/* Like print_rtl, but also print out live information for the start of each
@ -1549,7 +1545,6 @@ void
print_rtl_with_bb (FILE *outf, rtx rtx_first)
{
rtx tmp_rtx;
if (rtx_first == 0)
fprintf (outf, "(nil)\n");
else
@ -1562,6 +1557,9 @@ print_rtl_with_bb (FILE *outf, rtx rtx_first)
basic_block bb;
if (df)
df_dump_start (outf);
FOR_EACH_BB_REVERSE (bb)
{
rtx x;
@ -1584,15 +1582,21 @@ print_rtl_with_bb (FILE *outf, rtx rtx_first)
for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
{
int did_output;
edge_iterator ei;
edge e;
if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
{
fprintf (outf, ";; Start of basic block %d, registers live:",
bb->index);
dump_regset (bb->il.rtl->global_live_at_start, outf);
putc ('\n', outf);
edge e;
edge_iterator ei;
fprintf (outf, ";; Start of basic block (");
FOR_EACH_EDGE (e, ei, bb->preds)
fprintf (outf, " %d", e->src->index);
fprintf (outf, ") -> %d\n", bb->index);
if (df)
{
df_dump_top (bb, outf);
putc ('\n', outf);
}
FOR_EACH_EDGE (e, ei, bb->preds)
{
fputs (";; Pred edge ", outf);
@ -1612,9 +1616,19 @@ print_rtl_with_bb (FILE *outf, rtx rtx_first)
if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
{
fprintf (outf, ";; End of basic block %d, registers live:",
bb->index);
dump_regset (bb->il.rtl->global_live_at_end, outf);
edge e;
edge_iterator ei;
fprintf (outf, ";; End of basic block %d -> (", bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
fprintf (outf, " %d", e->dest->index);
fprintf (outf, ")\n");
if (df)
{
df_dump_bottom (bb, outf);
putc ('\n', outf);
}
putc ('\n', outf);
FOR_EACH_EDGE (e, ei, bb->succs)
{
@ -1623,7 +1637,6 @@ print_rtl_with_bb (FILE *outf, rtx rtx_first)
fputc ('\n', outf);
}
}
if (did_output)
putc ('\n', outf);
}
@ -2163,7 +2176,7 @@ purge_dead_edges (basic_block bb)
}
remove_edge (e);
bb->flags |= BB_DIRTY;
df_set_bb_dirty (bb);
purged = true;
}
@ -2231,7 +2244,7 @@ purge_dead_edges (basic_block bb)
}
/* We do not need this edge. */
bb->flags |= BB_DIRTY;
df_set_bb_dirty (bb);
purged = true;
remove_edge (e);
}
@ -2303,7 +2316,7 @@ purge_dead_edges (basic_block bb)
{
if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
{
bb->flags |= BB_DIRTY;
df_set_bb_dirty (bb);
remove_edge (e);
purged = true;
}
@ -2355,7 +2368,6 @@ cfg_layout_split_block (basic_block bb, void *insnp)
return new_bb;
}
/* Redirect Edge to DEST. */
static edge
cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
@ -2372,7 +2384,7 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
if (e->src != ENTRY_BLOCK_PTR
&& (ret = try_redirect_by_replacing_jump (e, dest, true)))
{
src->flags |= BB_DIRTY;
df_set_bb_dirty (src);
return ret;
}
@ -2383,7 +2395,7 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
e->src->index, dest->index);
e->src->flags |= BB_DIRTY;
df_set_bb_dirty (e->src);
redirect_edge_succ (e, dest);
return e;
}
@ -2409,7 +2421,7 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
redirected = redirect_branch_edge (e, dest);
gcc_assert (redirected);
e->flags |= EDGE_FALLTHRU;
e->src->flags |= BB_DIRTY;
df_set_bb_dirty (e->src);
return e;
}
/* In case we are redirecting fallthru edge to the branch edge
@ -2435,7 +2447,7 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
/* We don't want simplejumps in the insn stream during cfglayout. */
gcc_assert (!simplejump_p (BB_END (src)));
src->flags |= BB_DIRTY;
df_set_bb_dirty (src);
return ret;
}
@ -2574,6 +2586,9 @@ cfg_layout_merge_blocks (basic_block a, basic_block b)
gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
#endif
if (dump_file)
fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
/* If there was a CODE_LABEL beginning B, delete it. */
if (LABEL_P (BB_HEAD (b)))
{
@ -2595,7 +2610,7 @@ cfg_layout_merge_blocks (basic_block a, basic_block b)
{
rtx first = BB_END (a), last;
last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a));
last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
delete_insn_chain (NEXT_INSN (first), last, false);
b->il.rtl->header = NULL;
}
@ -2605,7 +2620,7 @@ cfg_layout_merge_blocks (basic_block a, basic_block b)
{
rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
emit_insn_after_noloc (first, BB_END (a));
emit_insn_after_noloc (first, BB_END (a), a);
/* Skip possible DELETED_LABEL insn. */
if (!NOTE_INSN_BASIC_BLOCK_P (first))
first = NEXT_INSN (first);
@ -2621,7 +2636,11 @@ cfg_layout_merge_blocks (basic_block a, basic_block b)
for (insn = BB_HEAD (b);
insn != NEXT_INSN (BB_END (b));
insn = NEXT_INSN (insn))
set_block_for_insn (insn, a);
{
set_block_for_insn (insn, a);
df_insn_change_bb (insn);
}
insn = BB_HEAD (b);
/* Skip possible DELETED_LABEL insn. */
if (!NOTE_INSN_BASIC_BLOCK_P (insn))
@ -2632,6 +2651,8 @@ cfg_layout_merge_blocks (basic_block a, basic_block b)
delete_insn (insn);
}
df_bb_delete (b->index);
/* Possible tablejumps and barriers should appear after the block. */
if (b->il.rtl->footer)
{
@ -2648,7 +2669,6 @@ cfg_layout_merge_blocks (basic_block a, basic_block b)
}
b->il.rtl->footer = NULL;
}
a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
if (dump_file)
fprintf (dump_file, "Merged blocks %d and %d.\n",
@ -2665,18 +2685,6 @@ cfg_layout_split_edge (edge e)
? NEXT_INSN (BB_END (e->src)) : get_insns (),
NULL_RTX, e->src);
/* ??? This info is likely going to be out of date very soon, but we must
create it to avoid getting an ICE later. */
if (e->dest->il.rtl->global_live_at_start)
{
new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
e->dest->il.rtl->global_live_at_start);
COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
e->dest->il.rtl->global_live_at_start);
}
make_edge (new_bb, e->dest, EDGE_FALLTHRU);
redirect_edge_and_branch_force (e, new_bb);
@ -2977,7 +2985,7 @@ insert_insn_end_bb_new (rtx pat, basic_block bb)
#endif
/* FIXME: What if something in cc0/jump uses value set in new
insn? */
new_insn = emit_insn_before_noloc (pat, insn);
new_insn = emit_insn_before_noloc (pat, insn, bb);
}
/* Likewise if the last insn is a call, as will happen in the presence
@ -3008,10 +3016,10 @@ insert_insn_end_bb_new (rtx pat, basic_block bb)
|| NOTE_INSN_BASIC_BLOCK_P (insn))
insn = NEXT_INSN (insn);
new_insn = emit_insn_before_noloc (pat, insn);
new_insn = emit_insn_before_noloc (pat, insn, bb);
}
else
new_insn = emit_insn_after_noloc (pat, insn);
new_insn = emit_insn_after_noloc (pat, insn, bb);
return new_insn;
}
@ -3089,7 +3097,7 @@ struct cfg_hooks rtl_cfg_hooks = {
/* We do not want to declare these functions in a header file, since they
should only be used through the cfghooks interface, and we do not want to
move them here since it would require also moving quite a lot of related
code. */
code. They are in cfglayout.c. */
extern bool cfg_layout_can_duplicate_bb_p (basic_block);
extern basic_block cfg_layout_duplicate_bb (basic_block);

View file

@ -1,6 +1,7 @@
/* Combine stack adjustments.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
This file is part of GCC.
@ -55,6 +56,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "function.h"
#include "expr.h"
#include "basic-block.h"
#include "df.h"
#include "except.h"
#include "toplev.h"
#include "reload.h"
@ -454,9 +456,7 @@ gate_handle_stack_adjustments (void)
static unsigned int
rest_of_handle_stack_adjustments (void)
{
life_analysis (PROP_POSTRELOAD);
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE
| (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
cleanup_cfg (flag_crossjumping ? CLEANUP_CROSSJUMP : 0);
/* This is kind of a heuristic. We need to run combine_stack_adjustments
even for machines with possibly nonzero RETURN_POPS_ARGS
@ -465,7 +465,11 @@ rest_of_handle_stack_adjustments (void)
#ifndef PUSH_ROUNDING
if (!ACCUMULATE_OUTGOING_ARGS)
#endif
combine_stack_adjustments ();
{
df_note_add_problem ();
df_analyze ();
combine_stack_adjustments ();
}
return 0;
}
@ -482,6 +486,7 @@ struct tree_opt_pass pass_stack_adjustments =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_df_finish |
TODO_dump_func |
TODO_ggc_collect, /* todo_flags_finish */
0 /* letter */

File diff suppressed because it is too large Load diff

View file

@ -313,6 +313,10 @@ fasynchronous-unwind-tables
Common Report Var(flag_asynchronous_unwind_tables) Optimization
Generate unwind tables that are exact at each instruction boundary
fauto-inc-dec
Common Report Var(flag_auto_inc_dec) Init(1)
Generate auto-inc/dec instructions
; -fcheck-bounds causes gcc to generate array bounds checks.
; For C, C++ and ObjC: defaults off.
; For Java: defaults to on.
@ -388,6 +392,10 @@ fdata-sections
Common Report Var(flag_data_sections) Optimization
Place data items into their own section
fdbg-cnt=
Common RejectNegative Joined
-fdbg-cnt=<counter>:<limit> Set the debug counter limit.
; Nonzero for -fdefer-pop: don't pop args after each function call
; instead save them up to pop many calls' args with one insns.
fdefer-pop
@ -656,6 +664,14 @@ fmudflapir
Common RejectNegative Report Var(flag_mudflap_ignore_reads)
Ignore read operations when inserting mudflap instrumentation
fdce
Common Var(flag_dce) Init(1)
Use the RTL dead code elimination pass
fdse
Common Var(flag_dse) Init(1)
Use the RTL dead store elimination pass
freschedule-modulo-scheduled-loops
Common Report Var(flag_resched_modulo_sched) Optimization
Enable/Disable the traditional scheduling in loops that already passed modulo scheduling

View file

@ -56,7 +56,7 @@ Boston, MA 02110-1301, USA. */
#include "tree-flow.h"
#include "tree-stdarg.h"
#include "tm-constrs.h"
#include "df.h"
/* Specify which cpu to schedule for. */
enum processor_type alpha_tune;
@ -4795,7 +4795,7 @@ alpha_ra_ever_killed (void)
rtx top;
if (!has_hard_reg_initial_val (Pmode, REG_RA))
return regs_ever_live[REG_RA];
return (int)df_regs_ever_live_p (REG_RA);
push_topmost_sequence ();
top = get_insns ();
@ -7091,7 +7091,7 @@ alpha_sa_mask (unsigned long *imaskP, unsigned long *fmaskP)
/* One for every register we have to save. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (! fixed_regs[i] && ! call_used_regs[i]
&& regs_ever_live[i] && i != REG_RA
&& df_regs_ever_live_p (i) && i != REG_RA
&& (!TARGET_ABI_UNICOSMK || i != HARD_FRAME_POINTER_REGNUM))
{
if (i < 32)
@ -7199,7 +7199,7 @@ alpha_sa_size (void)
vms_save_fp_regno = -1;
if (vms_base_regno == HARD_FRAME_POINTER_REGNUM)
for (i = 0; i < 32; i++)
if (! fixed_regs[i] && call_used_regs[i] && ! regs_ever_live[i])
if (! fixed_regs[i] && call_used_regs[i] && ! df_regs_ever_live_p (i))
vms_save_fp_regno = i;
if (vms_save_fp_regno == -1 && alpha_procedure_type == PT_REGISTER)

View file

@ -1,7 +1,6 @@
/* Subroutines used for code generation on the Argonaut ARC cpu.
Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
2005
Free Software Foundation, Inc.
Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -1057,9 +1056,9 @@ arc_compute_function_type (tree decl)
Don't consider them here. */
#define MUST_SAVE_REGISTER(regno, interrupt_p) \
((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
&& (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
&& (df_regs_ever_live_p (regno) && (!call_used_regs[regno] || interrupt_p)))
#define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
#define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM))
/* Return the bytes needed to compute the frame pointer from the current
stack pointer.

View file

@ -52,6 +52,7 @@
#include "target-def.h"
#include "debug.h"
#include "langhooks.h"
#include "df.h"
/* Forward definitions of types. */
typedef struct minipool_node Mnode;
@ -1641,7 +1642,7 @@ use_return_insn (int iscond, rtx sibling)
if (flag_pic
&& arm_pic_register != INVALID_REGNUM
&& regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
&& df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
return 0;
}
@ -1654,18 +1655,18 @@ use_return_insn (int iscond, rtx sibling)
since this also requires an insn. */
if (TARGET_HARD_FLOAT && TARGET_FPA)
for (regno = FIRST_FPA_REGNUM; regno <= LAST_FPA_REGNUM; regno++)
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
return 0;
/* Likewise VFP regs. */
if (TARGET_HARD_FLOAT && TARGET_VFP)
for (regno = FIRST_VFP_REGNUM; regno <= LAST_VFP_REGNUM; regno++)
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
return 0;
if (TARGET_REALLY_IWMMXT)
for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)
if (regs_ever_live[regno] && ! call_used_regs [regno])
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
return 0;
return 1;
@ -3460,7 +3461,7 @@ thumb_find_work_register (unsigned long pushed_regs_mask)
register allocation order means that sometimes r3 might be used
but earlier argument registers might not, so check them all. */
for (reg = LAST_ARG_REGNUM; reg >= 0; reg --)
if (!regs_ever_live[reg])
if (!df_regs_ever_live_p (reg))
return reg;
/* Before going on to check the call-saved registers we can try a couple
@ -9770,8 +9771,8 @@ arm_compute_save_reg0_reg12_mask (void)
max_reg = 12;
for (reg = 0; reg <= max_reg; reg++)
if (regs_ever_live[reg]
|| (! current_function_is_leaf && call_used_regs [reg]))
if (df_regs_ever_live_p (reg)
|| (! current_function_is_leaf && call_used_regs[reg]))
save_reg_mask |= (1 << reg);
/* Also save the pic base register if necessary. */
@ -9789,15 +9790,18 @@ arm_compute_save_reg0_reg12_mask (void)
/* In the normal case we only need to save those registers
which are call saved and which are used by this function. */
for (reg = 0; reg <= last_reg; reg++)
if (regs_ever_live[reg] && ! call_used_regs [reg])
if (df_regs_ever_live_p (reg) && ! call_used_regs[reg])
save_reg_mask |= (1 << reg);
/* Handle the frame pointer as a special case. */
if (TARGET_THUMB2 && frame_pointer_needed)
if (! TARGET_APCS_FRAME
&& ! frame_pointer_needed
&& df_regs_ever_live_p (HARD_FRAME_POINTER_REGNUM)
&& ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
else if (! TARGET_APCS_FRAME
&& ! frame_pointer_needed
&& regs_ever_live[HARD_FRAME_POINTER_REGNUM]
&& df_regs_ever_live_p (HARD_FRAME_POINTER_REGNUM)
&& ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
@ -9806,7 +9810,7 @@ arm_compute_save_reg0_reg12_mask (void)
if (flag_pic
&& !TARGET_SINGLE_PIC_BASE
&& arm_pic_register != INVALID_REGNUM
&& (regs_ever_live[PIC_OFFSET_TABLE_REGNUM]
&& (df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM)
|| current_function_uses_pic_offset_table))
save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
@ -9872,11 +9876,11 @@ arm_compute_save_reg_mask (void)
now and then popping it back into the PC. This incurs extra memory
accesses though, so we only do it when optimizing for size, and only
if we know that we will not need a fancy return sequence. */
if (regs_ever_live [LR_REGNUM]
|| (save_reg_mask
&& optimize_size
&& ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
&& !current_function_calls_eh_return))
if (df_regs_ever_live_p (LR_REGNUM)
|| (save_reg_mask
&& optimize_size
&& ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
&& !current_function_calls_eh_return))
save_reg_mask |= 1 << LR_REGNUM;
if (cfun->machine->lr_save_eliminated)
@ -9932,7 +9936,7 @@ thumb1_compute_save_reg_mask (void)
mask = 0;
for (reg = 0; reg < 12; reg ++)
if (regs_ever_live[reg] && !call_used_regs[reg])
if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
mask |= 1 << reg;
if (flag_pic
@ -9986,8 +9990,8 @@ arm_get_vfp_saved_size (void)
regno < LAST_VFP_REGNUM;
regno += 2)
{
if ((!regs_ever_live[regno] || call_used_regs[regno])
&& (!regs_ever_live[regno + 1] || call_used_regs[regno + 1]))
if ((!df_regs_ever_live_p (regno) || call_used_regs[regno])
&& (!df_regs_ever_live_p (regno + 1) || call_used_regs[regno + 1]))
{
if (count > 0)
{
@ -10389,7 +10393,7 @@ arm_output_epilogue (rtx sibling)
if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
{
for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
if (regs_ever_live[reg] && !call_used_regs[reg])
if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
{
floats_offset += 12;
asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
@ -10402,7 +10406,7 @@ arm_output_epilogue (rtx sibling)
for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
{
if (regs_ever_live[reg] && !call_used_regs[reg])
if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
{
floats_offset += 12;
@ -10448,8 +10452,8 @@ arm_output_epilogue (rtx sibling)
start_reg = FIRST_VFP_REGNUM;
for (reg = FIRST_VFP_REGNUM; reg < LAST_VFP_REGNUM; reg += 2)
{
if ((!regs_ever_live[reg] || call_used_regs[reg])
&& (!regs_ever_live[reg + 1] || call_used_regs[reg + 1]))
if ((!df_regs_ever_live_p (reg) || call_used_regs[reg])
&& (!df_regs_ever_live_p (reg + 1) || call_used_regs[reg + 1]))
{
if (start_reg != reg)
vfp_output_fldmd (f, IP_REGNUM,
@ -10476,7 +10480,7 @@ arm_output_epilogue (rtx sibling)
lrm_count += (lrm_count % 2 ? 2 : 1);
for (reg = LAST_IWMMXT_REGNUM; reg >= FIRST_IWMMXT_REGNUM; reg--)
if (regs_ever_live[reg] && !call_used_regs[reg])
if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
{
asm_fprintf (f, "\twldrd\t%r, [%r, #-%d]\n",
reg, FP_REGNUM, lrm_count * 4);
@ -10557,7 +10561,7 @@ arm_output_epilogue (rtx sibling)
if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
{
for (reg = FIRST_FPA_REGNUM; reg <= LAST_FPA_REGNUM; reg++)
if (regs_ever_live[reg] && !call_used_regs[reg])
if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
reg, SP_REGNUM);
}
@ -10567,7 +10571,7 @@ arm_output_epilogue (rtx sibling)
for (reg = FIRST_FPA_REGNUM; reg <= LAST_FPA_REGNUM; reg++)
{
if (regs_ever_live[reg] && !call_used_regs[reg])
if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
{
if (reg - start_reg == 3)
{
@ -10598,8 +10602,8 @@ arm_output_epilogue (rtx sibling)
start_reg = FIRST_VFP_REGNUM;
for (reg = FIRST_VFP_REGNUM; reg < LAST_VFP_REGNUM; reg += 2)
{
if ((!regs_ever_live[reg] || call_used_regs[reg])
&& (!regs_ever_live[reg + 1] || call_used_regs[reg + 1]))
if ((!df_regs_ever_live_p (reg) || call_used_regs[reg])
&& (!df_regs_ever_live_p (reg + 1) || call_used_regs[reg + 1]))
{
if (start_reg != reg)
vfp_output_fldmd (f, SP_REGNUM,
@ -10615,7 +10619,7 @@ arm_output_epilogue (rtx sibling)
}
if (TARGET_IWMMXT)
for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
if (regs_ever_live[reg] && !call_used_regs[reg])
if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
asm_fprintf (f, "\twldrd\t%r, [%r], #8\n", reg, SP_REGNUM);
/* If we can, restore the LR into the PC. */
@ -10957,7 +10961,7 @@ thumb_force_lr_save (void)
return !cfun->machine->lr_save_eliminated
&& (!leaf_function_p ()
|| thumb_far_jump_used_p ()
|| regs_ever_live [LR_REGNUM]);
|| df_regs_ever_live_p (LR_REGNUM));
}
@ -11065,7 +11069,7 @@ arm_get_frame_offsets (void)
for (regno = FIRST_IWMMXT_REGNUM;
regno <= LAST_IWMMXT_REGNUM;
regno++)
if (regs_ever_live [regno] && ! call_used_regs [regno])
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
saved += 8;
}
@ -11074,7 +11078,7 @@ arm_get_frame_offsets (void)
{
/* Space for saved FPA registers. */
for (regno = FIRST_FPA_REGNUM; regno <= LAST_FPA_REGNUM; regno++)
if (regs_ever_live[regno] && ! call_used_regs[regno])
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
saved += 12;
/* Space for saved VFP registers. */
@ -11213,7 +11217,7 @@ arm_save_coproc_regs(void)
rtx insn;
for (reg = LAST_IWMMXT_REGNUM; reg >= FIRST_IWMMXT_REGNUM; reg--)
if (regs_ever_live[reg] && ! call_used_regs [reg])
if (df_regs_ever_live_p (reg) && ! call_used_regs[reg])
{
insn = gen_rtx_PRE_DEC (V2SImode, stack_pointer_rtx);
insn = gen_rtx_MEM (V2SImode, insn);
@ -11227,7 +11231,7 @@ arm_save_coproc_regs(void)
if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
{
for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
if (regs_ever_live[reg] && !call_used_regs[reg])
if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
{
insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
insn = gen_rtx_MEM (XFmode, insn);
@ -11242,7 +11246,7 @@ arm_save_coproc_regs(void)
for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
{
if (regs_ever_live[reg] && !call_used_regs[reg])
if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
{
if (start_reg - reg == 3)
{
@ -11277,8 +11281,8 @@ arm_save_coproc_regs(void)
for (reg = FIRST_VFP_REGNUM; reg < LAST_VFP_REGNUM; reg += 2)
{
if ((!regs_ever_live[reg] || call_used_regs[reg])
&& (!regs_ever_live[reg + 1] || call_used_regs[reg + 1]))
if ((!df_regs_ever_live_p (reg) || call_used_regs[reg])
&& (!df_regs_ever_live_p (reg + 1) || call_used_regs[reg + 1]))
{
if (start_reg != reg)
saved_size += vfp_emit_fstmd (start_reg,
@ -11423,7 +11427,7 @@ arm_expand_prologue (void)
doesn't need to be unwound, as it doesn't contain a value
inherited from the caller. */
if (regs_ever_live[3] == 0)
if (df_regs_ever_live_p (3) == false)
insn = emit_set_insn (gen_rtx_REG (SImode, 3), ip_rtx);
else if (args_to_push == 0)
{
@ -11517,7 +11521,7 @@ arm_expand_prologue (void)
if (IS_NESTED (func_type))
{
/* Recover the static chain register. */
if (regs_ever_live [3] == 0
if (!df_regs_ever_live_p (3)
|| saved_pretend_args)
insn = gen_rtx_REG (SImode, 3);
else /* if (current_function_pretend_args_size == 0) */
@ -11586,10 +11590,7 @@ arm_expand_prologue (void)
/* If the link register is being kept alive, with the return address in it,
then make sure that it does not get reused by the ce2 pass. */
if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
{
emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
cfun->machine->lr_save_eliminated = 1;
}
cfun->machine->lr_save_eliminated = 1;
}
/* Print condition code to STREAM. Helper function for arm_print_operand. */
@ -13922,7 +13923,7 @@ thumb_exit (FILE *f, int reg_containing_return_addr)
{
/* If we can deduce the registers used from the function's
return value. This is more reliable that examining
regs_ever_live[] because that will be set if the register is
df_regs_ever_live_p () because that will be set if the register is
ever used in the function, not just if the register is used
to hold a return value. */
@ -14203,7 +14204,7 @@ thumb_far_jump_used_p (void)
If we need doubleword stack alignment this could affect the other
elimination offsets so we can't risk getting it wrong. */
if (regs_ever_live [ARG_POINTER_REGNUM])
if (df_regs_ever_live_p (ARG_POINTER_REGNUM))
cfun->machine->arg_pointer_live = 1;
else if (!cfun->machine->arg_pointer_live)
return 0;
@ -14267,7 +14268,7 @@ thumb_unexpanded_epilogue (void)
high_regs_pushed = bit_count (live_regs_mask & 0x0f00);
/* If we can deduce the registers used from the function's return value.
This is more reliable that examining regs_ever_live[] because that
This is more reliable that examining df_regs_ever_live_p () because that
will be set if the register is ever used in the function, not just if
the register is used to hold a return value. */
size = arm_size_return_regs ();
@ -14638,11 +14639,6 @@ thumb1_expand_prologue (void)
cfun->machine->lr_save_eliminated = !thumb_force_lr_save ();
if (live_regs_mask & 0xff)
cfun->machine->lr_save_eliminated = 0;
/* If the link register is being kept alive, with the return address in it,
then make sure that it does not get reused by the ce2 pass. */
if (cfun->machine->lr_save_eliminated)
emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
}
@ -14691,10 +14687,10 @@ thumb1_expand_epilogue (void)
/* Emit a clobber for each insn that will be restored in the epilogue,
so that flow2 will get register lifetimes correct. */
for (regno = 0; regno < 13; regno++)
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, regno)));
if (! regs_ever_live[LR_REGNUM])
if (! df_regs_ever_live_p (LR_REGNUM))
emit_insn (gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, LR_REGNUM)));
}

View file

@ -985,7 +985,7 @@ extern int arm_structure_size_boundary;
call-clobbered. */
#define HARD_REGNO_RENAME_OK(SRC, DST) \
(! IS_INTERRUPT (cfun->machine->func_type) || \
regs_ever_live[DST])
df_regs_ever_live_p (DST))
/* Register and constant classes. */
@ -1600,7 +1600,7 @@ typedef struct
frame. */
#define EXIT_IGNORE_STACK 1
#define EPILOGUE_USES(REGNO) (reload_completed && (REGNO) == LR_REGNUM)
#define EPILOGUE_USES(REGNO) ((REGNO) == LR_REGNUM)
/* Determine if the epilogue should be output as RTL.
You should override this if you define FUNCTION_EXTRA_EPILOGUE. */

View file

@ -460,7 +460,7 @@ avr_regs_to_save (HARD_REG_SET *set)
continue;
if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
|| (regs_ever_live[reg]
|| (df_regs_ever_live_p (reg)
&& (int_or_sig_p || !call_used_regs[reg])
&& !(frame_pointer_needed
&& (reg == REG_Y || reg == (REG_Y+1)))))
@ -517,7 +517,7 @@ sequent_regs_live (void)
{
if (!call_used_regs[reg])
{
if (regs_ever_live[reg])
if (df_regs_ever_live_p (reg))
{
++live_seq;
++cur_seq;
@ -529,7 +529,7 @@ sequent_regs_live (void)
if (!frame_pointer_needed)
{
if (regs_ever_live[REG_Y])
if (df_regs_ever_live_p (REG_Y))
{
++live_seq;
++cur_seq;
@ -537,7 +537,7 @@ sequent_regs_live (void)
else
cur_seq = 0;
if (regs_ever_live[REG_Y+1])
if (df_regs_ever_live_p (REG_Y+1))
{
++live_seq;
++cur_seq;
@ -5815,7 +5815,7 @@ avr_peep2_scratch_safe (rtx scratch)
for (reg = first_reg; reg <= last_reg; reg++)
{
if (!regs_ever_live[reg])
if (!df_regs_ever_live_p (reg))
return 0;
}
}

View file

@ -1,5 +1,5 @@
/* The Blackfin code generation auxiliary output file.
Copyright (C) 2005, 2006 Free Software Foundation, Inc.
Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Analog Devices.
This file is part of GCC.
@ -250,7 +250,7 @@ n_dregs_to_save (bool is_inthandler)
for (i = REG_R0; i <= REG_R7; i++)
{
if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
if (df_regs_ever_live_p (i) && (is_inthandler || ! call_used_regs[i]))
return REG_R7 - i + 1;
if (current_function_calls_eh_return)
@ -278,7 +278,7 @@ n_pregs_to_save (bool is_inthandler)
unsigned i;
for (i = REG_P0; i <= REG_P5; i++)
if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
if ((df_regs_ever_live_p (i) && (is_inthandler || ! call_used_regs[i]))
|| (!TARGET_FDPIC
&& i == PIC_OFFSET_TABLE_REGNUM
&& (current_function_uses_pic_offset_table
@ -292,7 +292,7 @@ n_pregs_to_save (bool is_inthandler)
static bool
must_save_fp_p (void)
{
return frame_pointer_needed || regs_ever_live[REG_FP];
return frame_pointer_needed || df_regs_ever_live_p (REG_FP);
}
static bool
@ -513,7 +513,7 @@ n_regs_saved_by_prologue (void)
for (i = REG_P7 + 1; i < REG_CC; i++)
if (all
|| regs_ever_live[i]
|| df_regs_ever_live_p (i)
|| (!leaf_function_p () && call_used_regs[i]))
n += i == REG_A0 || i == REG_A1 ? 2 : 1;
}
@ -815,7 +815,7 @@ expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
for (i = REG_P7 + 1; i < REG_CC; i++)
if (all
|| regs_ever_live[i]
|| df_regs_ever_live_p (i)
|| (!leaf_function_p () && call_used_regs[i]))
{
if (i == REG_A0 || i == REG_A1)
@ -845,23 +845,11 @@ expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
rtx insn;
insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
NULL_RTX);
insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
NULL_RTX);
insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
NULL_RTX);
insn = emit_move_insn (r1reg, spreg);
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
NULL_RTX);
insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
NULL_RTX);
insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
NULL_RTX);
}
}
@ -899,7 +887,7 @@ expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
for (i = REG_CC - 1; i > REG_P7; i--)
if (all
|| regs_ever_live[i]
|| df_regs_ever_live_p (i)
|| (!leaf_function_p () && call_used_regs[i]))
{
if (i == REG_A0 || i == REG_A1)
@ -948,7 +936,6 @@ bfin_load_pic_reg (rtx dest)
gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
UNSPEC_LIBRARY_OFFSET));
insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
return dest;
}
@ -1068,7 +1055,7 @@ bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
call-clobbered. */
if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
&& !regs_ever_live[new_reg])
&& !df_regs_ever_live_p (new_reg))
return 0;
return 1;

View file

@ -1,6 +1,6 @@
/* Subroutines for assembler code output on the TMS320C[34]x
Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
2004, 2005
2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
@ -750,13 +750,13 @@ c4x_isr_reg_used_p (unsigned int regno)
/* Only save/restore regs in leaf function that are used. */
if (c4x_leaf_function)
return regs_ever_live[regno] && fixed_regs[regno] == 0;
return df_regs_ever_live_p (regno) && fixed_regs[regno] == 0;
/* Only save/restore regs that are used by the ISR and regs
that are likely to be used by functions the ISR calls
if they are not fixed. */
return IS_EXT_REGNO (regno)
|| ((regs_ever_live[regno] || call_used_regs[regno])
|| ((df_regs_ever_live_p (regno) || call_used_regs[regno])
&& fixed_regs[regno] == 0);
}
@ -890,9 +890,9 @@ c4x_expand_prologue (void)
/* We need to clear the repeat mode flag if the ISR is
going to use a RPTB instruction or uses the RC, RS, or RE
registers. */
if (regs_ever_live[RC_REGNO]
|| regs_ever_live[RS_REGNO]
|| regs_ever_live[RE_REGNO])
if (df_regs_ever_live_p (RC_REGNO)
|| df_regs_ever_live_p (RS_REGNO)
|| df_regs_ever_live_p (RE_REGNO))
{
insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
RTX_FRAME_RELATED_P (insn) = 1;
@ -983,7 +983,7 @@ c4x_expand_prologue (void)
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
{
if (regs_ever_live[regno] && ! call_used_regs[regno])
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
{
if (IS_FLOAT_CALL_SAVED_REGNO (regno))
{
@ -1111,7 +1111,7 @@ c4x_expand_epilogue(void)
where required. */
for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
{
if (regs_ever_live[regno] && ! call_used_regs[regno])
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
{
if (regno == AR3_REGNO && dont_pop_ar3)
continue;
@ -1220,7 +1220,7 @@ c4x_null_epilogue_p (void)
&& ! get_frame_size ())
{
for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
if (regs_ever_live[regno] && ! call_used_regs[regno]
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno]
&& (regno != AR3_REGNO))
return 1;
return 0;

View file

@ -1,6 +1,6 @@
/* Definitions of target machine for GNU compiler. TMS320C[34]x
Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
2003, 2004, 2005 Free Software Foundation, Inc.
2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
@ -887,7 +887,7 @@ enum reg_class
int regno; \
int offset = 0; \
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) \
if (regs_ever_live[regno] && ! call_used_regs[regno]) \
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno]) \
offset += TARGET_PRESERVE_FLOAT \
&& IS_FLOAT_CALL_SAVED_REGNO (regno) ? 2 : 1; \
(DEPTH) = -(offset + get_frame_size ()); \
@ -906,7 +906,7 @@ enum reg_class
int regno; \
int offset = 0; \
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) \
if (regs_ever_live[regno] && ! call_used_regs[regno]) \
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno]) \
offset += TARGET_PRESERVE_FLOAT \
&& IS_FLOAT_CALL_SAVED_REGNO (regno) ? 2 : 1; \
(OFFSET) = -(offset + get_frame_size ()); \

View file

@ -1,5 +1,5 @@
/* Definitions for GCC. Part of the machine description for CRIS.
Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Axis Communications. Written by Hans-Peter Nilsson.
@ -578,13 +578,13 @@ static int
cris_reg_saved_in_regsave_area (unsigned int regno, bool got_really_used)
{
return
(((regs_ever_live[regno]
(((df_regs_ever_live_p (regno)
&& !call_used_regs[regno])
|| (regno == PIC_OFFSET_TABLE_REGNUM
&& (got_really_used
/* It is saved anyway, if there would be a gap. */
|| (flag_pic
&& regs_ever_live[regno + 1]
&& df_regs_ever_live_p (regno + 1)
&& !call_used_regs[regno + 1]))))
&& (regno != FRAME_POINTER_REGNUM || !frame_pointer_needed)
&& regno != CRIS_SRP_REGNUM)
@ -1122,7 +1122,7 @@ cris_return_addr_rtx (int count, rtx frameaddr ATTRIBUTE_UNUSED)
bool
cris_return_address_on_stack (void)
{
return regs_ever_live[CRIS_SRP_REGNUM]
return df_regs_ever_live_p (CRIS_SRP_REGNUM)
|| cfun->machine->needs_return_address_on_stack;
}

View file

@ -1,6 +1,6 @@
/* Output routines for GCC for CRX.
Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
2002, 2003, 2004 Free Software Foundation, Inc.
2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -272,7 +272,7 @@ crx_compute_save_regs (void)
* for the sake of its sons. */
save_regs[regno] = 1;
else if (regs_ever_live[regno])
else if (df_regs_ever_live_p (regno))
/* This reg is used - save it. */
save_regs[regno] = 1;
else
@ -282,7 +282,7 @@ crx_compute_save_regs (void)
else
{
/* If this reg is used and not call-used (except RA), save it. */
if (regs_ever_live[regno]
if (df_regs_ever_live_p (regno)
&& (!call_used_regs[regno] || regno == RETURN_ADDRESS_REGNUM))
save_regs[regno] = 1;
else

View file

@ -1,6 +1,6 @@
/* Definitions of target machine for GNU compiler, for CRX.
Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
2001, 2002, 2003, 2004 Free Software Foundation, Inc.
2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -165,7 +165,7 @@ do { \
/* Interrupt functions can only use registers that have already been saved by
* the prologue, even if they would normally be call-clobbered. */
#define HARD_REGNO_RENAME_OK(SRC, DEST) \
(!crx_interrupt_function_p () || regs_ever_live[DEST])
(!crx_interrupt_function_p () || df_regs_ever_live_p (DEST))
#define MODES_TIEABLE_P(MODE1, MODE2) 1

View file

@ -1,6 +1,6 @@
/* Functions for generic Darwin as target machine for GNU C compiler.
Copyright (C) 1989, 1990, 1991, 1992, 1993, 2000, 2001, 2002, 2003, 2004,
2005
2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Apple Computer Inc.
@ -45,6 +45,7 @@ Boston, MA 02110-1301, USA. */
#include "tm_p.h"
#include "toplev.h"
#include "hashtab.h"
#include "df.h"
/* Darwin supports a feature called fix-and-continue, which is used
for rapid turn around debugging. When code is compiled with the
@ -777,7 +778,7 @@ machopic_legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
#endif
if (reload_in_progress)
regs_ever_live[REGNO (pic)] = 1;
df_set_regs_ever_live (REGNO (pic), true);
pic_ref = gen_rtx_PLUS (Pmode, pic,
gen_pic_offset (XEXP (orig, 0),
pic_base));
@ -848,7 +849,7 @@ machopic_legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
pic_offset_table_rtx));
#endif
if (reload_in_progress)
regs_ever_live[REGNO (pic)] = 1;
df_set_regs_ever_live (REGNO (pic), true);
pic_ref = gen_rtx_PLUS (Pmode,
pic,
gen_pic_offset (orig, pic_base));

View file

@ -137,11 +137,11 @@ static int fr30_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
#define MUST_SAVE_REGISTER(regno) \
( (regno) != RETURN_POINTER_REGNUM \
&& (regno) != FRAME_POINTER_REGNUM \
&& regs_ever_live [regno] \
&& df_regs_ever_live_p (regno) \
&& ! call_used_regs [regno] )
#define MUST_SAVE_FRAME_POINTER (regs_ever_live [FRAME_POINTER_REGNUM] || frame_pointer_needed)
#define MUST_SAVE_RETURN_POINTER (regs_ever_live [RETURN_POINTER_REGNUM] || current_function_profile)
#define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM) || frame_pointer_needed)
#define MUST_SAVE_RETURN_POINTER (df_regs_ever_live_p (RETURN_POINTER_REGNUM) || current_function_profile)
#if UNITS_PER_WORD == 4
#define WORD_ALIGN(SIZE) (((SIZE) + 3) & ~3)

View file

@ -1,4 +1,4 @@
/* Copyright (C) 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005
/* Copyright (C) 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Red Hat, Inc.
@ -51,6 +51,7 @@ Boston, MA 02110-1301, USA. */
#include "targhooks.h"
#include "integrate.h"
#include "langhooks.h"
#include "df.h"
#ifndef FRV_INLINE
#define FRV_INLINE inline
@ -1167,7 +1168,7 @@ frv_stack_info (void)
default:
for (regno = first; regno <= last; regno++)
{
if ((regs_ever_live[regno] && !call_used_regs[regno])
if ((df_regs_ever_live_p (regno) && !call_used_regs[regno])
|| (current_function_calls_eh_return
&& (regno >= FIRST_EH_REGNUM && regno <= LAST_EH_REGNUM))
|| (!TARGET_FDPIC && flag_pic
@ -1185,7 +1186,7 @@ frv_stack_info (void)
break;
case STACK_REGS_LR:
if (regs_ever_live[LR_REGNO]
if (df_regs_ever_live_p (LR_REGNO)
|| profile_flag
/* This is set for __builtin_return_address, etc. */
|| cfun->machine->frame_needed
@ -1498,7 +1499,7 @@ frv_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
rtx insn;
/* Just to check that the above comment is true. */
gcc_assert (!regs_ever_live[GPR_FIRST + 3]);
gcc_assert (!df_regs_ever_live_p (GPR_FIRST + 3));
/* Generate the instruction that saves the link register. */
fprintf (file, "\tmovsg lr,gr3\n");
@ -1518,7 +1519,7 @@ frv_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
{
rtx address = XEXP (XVECEXP (pattern, 0, 1), 0);
if (GET_CODE (address) == REG && REGNO (address) == LR_REGNO)
REGNO (address) = GPR_FIRST + 3;
SET_REGNO (address, GPR_FIRST + 3);
}
}
}
@ -5293,15 +5294,15 @@ frv_ifcvt_modify_tests (ce_if_block_t *ce_info, rtx *p_true, rtx *p_false)
for (j = CC_FIRST; j <= CC_LAST; j++)
if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
{
if (REGNO_REG_SET_P (then_bb->il.rtl->global_live_at_start, j))
if (REGNO_REG_SET_P (DF_LIVE_IN (rtl_df, then_bb), j))
continue;
if (else_bb
&& REGNO_REG_SET_P (else_bb->il.rtl->global_live_at_start, j))
&& REGNO_REG_SET_P (DF_LIVE_IN (rtl_df, else_bb), j))
continue;
if (join_bb
&& REGNO_REG_SET_P (join_bb->il.rtl->global_live_at_start, j))
&& REGNO_REG_SET_P (DF_LIVE_IN (rtl_df, join_bb), j))
continue;
SET_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j);
@ -5323,7 +5324,7 @@ frv_ifcvt_modify_tests (ce_if_block_t *ce_info, rtx *p_true, rtx *p_false)
/* Remove anything live at the beginning of the join block from being
available for allocation. */
EXECUTE_IF_SET_IN_REG_SET (join_bb->il.rtl->global_live_at_start, 0, regno, rsi)
EXECUTE_IF_SET_IN_REG_SET (DF_LIVE_IN (rtl_df, join_bb), 0, regno, rsi)
{
if (regno < FIRST_PSEUDO_REGISTER)
CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
@ -5367,7 +5368,7 @@ frv_ifcvt_modify_tests (ce_if_block_t *ce_info, rtx *p_true, rtx *p_false)
/* Anything live at the beginning of the block is obviously unavailable
for allocation. */
EXECUTE_IF_SET_IN_REG_SET (bb[j]->il.rtl->global_live_at_start, 0, regno, rsi)
EXECUTE_IF_SET_IN_REG_SET (DF_LIVE_IN (rtl_df, bb[j]), 0, regno, rsi)
{
if (regno < FIRST_PSEUDO_REGISTER)
CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
@ -6021,7 +6022,7 @@ frv_ifcvt_modify_insn (ce_if_block_t *ce_info,
severely. */
&& ce_info->join_bb
&& ! (REGNO_REG_SET_P
(ce_info->join_bb->il.rtl->global_live_at_start,
(DF_LIVE_IN (rtl_df, ce_info->join_bb),
REGNO (SET_DEST (set))))
/* Similarly, we must not unconditionally set a reg
used as scratch in the THEN branch if the same reg
@ -6029,7 +6030,7 @@ frv_ifcvt_modify_insn (ce_if_block_t *ce_info,
&& (! ce_info->else_bb
|| BLOCK_FOR_INSN (insn) == ce_info->else_bb
|| ! (REGNO_REG_SET_P
(ce_info->else_bb->il.rtl->global_live_at_start,
(DF_LIVE_IN (rtl_df, ce_info->else_bb),
REGNO (SET_DEST (set))))))
pattern = set;
@ -7641,7 +7642,7 @@ frv_reorder_packet (void)
for (from = 0; from < to - 1; from++)
{
remove_insn (insns[from]);
add_insn_before (insns[from], insns[to - 1]);
add_insn_before (insns[from], insns[to - 1], NULL);
SET_PACKING_FLAG (insns[from]);
}
}
@ -8632,7 +8633,7 @@ frv_int_to_acc (enum insn_code icode, int opnum, rtx opval)
reg = gen_rtx_REG (insn_data[icode].operand[opnum].mode,
ACC_FIRST + INTVAL (opval));
if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
REGNO (reg) = ACCG_FIRST + INTVAL (opval);
SET_REGNO (reg, ACCG_FIRST + INTVAL (opval));
if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
{

View file

@ -1,6 +1,6 @@
/* Subroutines for insn-output.c for Renesas H8/300.
Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Steve Chamberlain (sac@cygnus.com),
Jim Wilson (wilson@cygnus.com), and Doug Evans (dje@cygnus.com).
@ -494,12 +494,12 @@ byte_reg (rtx x, int b)
&& ! TREE_THIS_VOLATILE (current_function_decl) \
&& (h8300_saveall_function_p (current_function_decl) \
/* Save any call saved register that was used. */ \
|| (regs_ever_live[regno] && !call_used_regs[regno]) \
|| (df_regs_ever_live_p (regno) && !call_used_regs[regno]) \
/* Save the frame pointer if it was used. */ \
|| (regno == HARD_FRAME_POINTER_REGNUM && regs_ever_live[regno]) \
|| (regno == HARD_FRAME_POINTER_REGNUM && df_regs_ever_live_p (regno)) \
/* Save any register used in an interrupt handler. */ \
|| (h8300_current_function_interrupt_function_p () \
&& regs_ever_live[regno]) \
&& df_regs_ever_live_p (regno)) \
/* Save call clobbered registers in non-leaf interrupt \
handlers. */ \
|| (h8300_current_function_interrupt_function_p () \
@ -5607,7 +5607,7 @@ h8300_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
call-clobbered. */
if (h8300_current_function_interrupt_function_p ()
&& !regs_ever_live[new_reg])
&& !df_regs_ever_live_p (new_reg))
return 0;
return 1;

View file

@ -1,6 +1,6 @@
;; GCC machine description for Renesas H8/300
;; Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
;; 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
;; 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
;; Contributed by Steve Chamberlain (sac@cygnus.com),
;; Jim Wilson (wilson@cygnus.com), and Doug Evans (dje@cygnus.com).
@ -1210,7 +1210,7 @@
[(set (match_operand:HI 0 "stack_pointer_operand" "")
(plus:HI (match_dup 0)
(match_operand 1 "const_int_gt_2_operand" "")))]
"TARGET_H8300 && flow2_completed"
"TARGET_H8300 && epilogue_completed"
[(const_int 0)]
"split_adds_subs (HImode, operands); DONE;")
@ -3017,7 +3017,7 @@
[(match_dup 0)
(match_operand:QI 1 "register_operand" "")]))
(clobber (match_operand:QI 3 "register_operand" ""))]
"flow2_completed
"epilogue_completed
&& find_regno_note (insn, REG_DEAD, REGNO (operands[1]))"
[(set (cc0)
(match_dup 1))
@ -3048,7 +3048,7 @@
[(match_dup 0)
(match_operand:QI 1 "register_operand" "")]))
(clobber (match_operand:QI 3 "register_operand" ""))]
"flow2_completed
"epilogue_completed
&& !find_regno_note (insn, REG_DEAD, REGNO (operands[1]))"
[(set (match_dup 3)
(match_dup 1))
@ -4183,7 +4183,7 @@
(match_dup 0)))
(clobber (match_operand:SI 2 "register_operand" ""))]
"(TARGET_H8300H || TARGET_H8300S)
&& flow2_completed
&& epilogue_completed
&& find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
&& REGNO (operands[0]) != REGNO (operands[1])"
[(parallel [(set (match_dup 3)
@ -4203,7 +4203,7 @@
(match_dup 0)))
(clobber (match_operand:SI 2 "register_operand" ""))]
"(TARGET_H8300H || TARGET_H8300S)
&& flow2_completed
&& epilogue_completed
&& !(find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
&& REGNO (operands[0]) != REGNO (operands[1]))"
[(set (match_dup 2)
@ -4286,7 +4286,7 @@
(match_dup 0)))
(clobber (match_operand:SI 2 "register_operand" ""))]
"(TARGET_H8300H || TARGET_H8300S)
&& flow2_completed
&& epilogue_completed
&& find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
&& REGNO (operands[0]) != REGNO (operands[1])"
[(parallel [(set (match_dup 3)
@ -4306,7 +4306,7 @@
(match_dup 0)))
(clobber (match_operand:SI 2 "register_operand" ""))]
"(TARGET_H8300H || TARGET_H8300S)
&& flow2_completed
&& epilogue_completed
&& !(find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
&& REGNO (operands[0]) != REGNO (operands[1]))"
[(set (match_dup 2)

View file

@ -49,6 +49,7 @@ Boston, MA 02110-1301, USA. */
#include "cgraph.h"
#include "tree-gimple.h"
#include "dwarf2.h"
#include "df.h"
#include "tm-constrs.h"
#include "params.h"
@ -3014,7 +3015,7 @@ ix86_eax_live_at_start_p (void)
to correct at this point. This gives false positives for broken
functions that might use uninitialized data that happens to be
allocated in eax, but who cares? */
return REGNO_REG_SET_P (ENTRY_BLOCK_PTR->il.rtl->global_live_at_end, 0);
return REGNO_REG_SET_P (DF_LIVE_OUT (ENTRY_BLOCK_PTR), 0);
}
/* Return true if TYPE has a variable argument list. */
@ -5580,7 +5581,7 @@ ix86_select_alt_pic_regnum (void)
{
int i;
for (i = 2; i >= 0; --i)
if (!regs_ever_live[i])
if (!df_regs_ever_live_p (i))
return i;
}
@ -5593,7 +5594,7 @@ ix86_save_reg (unsigned int regno, int maybe_eh_return)
{
if (pic_offset_table_rtx
&& regno == REAL_PIC_OFFSET_TABLE_REGNUM
&& (regs_ever_live[REAL_PIC_OFFSET_TABLE_REGNUM]
&& (df_regs_ever_live_p (REAL_PIC_OFFSET_TABLE_REGNUM)
|| current_function_profile
|| current_function_calls_eh_return
|| current_function_uses_const_pool))
@ -5620,7 +5621,7 @@ ix86_save_reg (unsigned int regno, int maybe_eh_return)
&& regno == REGNO (cfun->machine->force_align_arg_pointer))
return 1;
return (regs_ever_live[regno]
return (df_regs_ever_live_p (regno)
&& !call_used_regs[regno]
&& !fixed_regs[regno]
&& (regno != HARD_FRAME_POINTER_REGNUM || !frame_pointer_needed));
@ -6106,13 +6107,13 @@ ix86_expand_prologue (void)
pic_reg_used = false;
if (pic_offset_table_rtx
&& (regs_ever_live[REAL_PIC_OFFSET_TABLE_REGNUM]
&& (df_regs_ever_live_p (REAL_PIC_OFFSET_TABLE_REGNUM)
|| current_function_profile))
{
unsigned int alt_pic_reg_used = ix86_select_alt_pic_regnum ();
if (alt_pic_reg_used != INVALID_REGNUM)
REGNO (pic_offset_table_rtx) = alt_pic_reg_used;
SET_REGNO (pic_offset_table_rtx, alt_pic_reg_used);
pic_reg_used = true;
}
@ -6130,9 +6131,7 @@ ix86_expand_prologue (void)
LABEL_PRESERVE_P (label) = 1;
gcc_assert (REGNO (pic_offset_table_rtx) != REGNO (tmp_reg));
insn = emit_insn (gen_set_rip_rex64 (pic_offset_table_rtx, label));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
insn = emit_insn (gen_set_got_offset_rex64 (tmp_reg, label));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
insn = emit_insn (gen_adddi3 (pic_offset_table_rtx,
pic_offset_table_rtx, tmp_reg));
}
@ -6141,20 +6140,16 @@ ix86_expand_prologue (void)
}
else
insn = emit_insn (gen_set_got (pic_offset_table_rtx));
/* Even with accurate pre-reload life analysis, we can wind up
deleting all references to the pic register after reload.
Consider if cross-jumping unifies two sides of a branch
controlled by a comparison vs the only read from a global.
In which case, allow the set_got to be deleted, though we're
too late to do anything about the ebx save in the prologue. */
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
}
/* Prevent function calls from be scheduled before the call to mcount.
In the pic_reg_used case, make sure that the got load isn't deleted. */
if (current_function_profile)
emit_insn (gen_blockage (pic_reg_used ? pic_offset_table_rtx : const0_rtx));
{
if (pic_reg_used)
emit_insn (gen_prologue_use (pic_offset_table_rtx));
emit_insn (gen_blockage ());
}
}
/* Emit code to restore saved registers using MOV insns. First register
@ -6366,7 +6361,7 @@ ix86_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
HOST_WIDE_INT size ATTRIBUTE_UNUSED)
{
if (pic_offset_table_rtx)
REGNO (pic_offset_table_rtx) = REAL_PIC_OFFSET_TABLE_REGNUM;
SET_REGNO (pic_offset_table_rtx, REAL_PIC_OFFSET_TABLE_REGNUM);
#if TARGET_MACHO
/* Mach-O doesn't support labels at the end of objects, so if
it looks like we might want one, insert a NOP. */
@ -7193,7 +7188,7 @@ legitimize_pic_address (rtx orig, rtx reg)
base address (@GOTOFF). */
if (reload_in_progress)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
if (GET_CODE (addr) == CONST)
addr = XEXP (addr, 0);
if (GET_CODE (addr) == PLUS)
@ -7225,7 +7220,7 @@ legitimize_pic_address (rtx orig, rtx reg)
base address (@GOTOFF). */
if (reload_in_progress)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
if (GET_CODE (addr) == CONST)
addr = XEXP (addr, 0);
if (GET_CODE (addr) == PLUS)
@ -7276,7 +7271,7 @@ legitimize_pic_address (rtx orig, rtx reg)
Global Offset Table (@GOT). */
if (reload_in_progress)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
new_rtx = gen_rtx_CONST (Pmode, new_rtx);
if (TARGET_64BIT)
@ -7329,7 +7324,7 @@ legitimize_pic_address (rtx orig, rtx reg)
if (!TARGET_64BIT)
{
if (reload_in_progress)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
UNSPEC_GOTOFF);
new_rtx = gen_rtx_PLUS (Pmode, new_rtx, op1);
@ -7489,7 +7484,7 @@ legitimize_tls_address (rtx x, enum tls_model model, int for_mov)
else if (flag_pic)
{
if (reload_in_progress)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
pic = pic_offset_table_rtx;
type = TARGET_ANY_GNU_TLS ? UNSPEC_GOTNTPOFF : UNSPEC_GOTTPOFF;
}

View file

@ -2431,7 +2431,7 @@ struct machine_function GTY(())
verify whether there's any such instruction live by testing that
REG_SP is live. */
#define ix86_current_function_calls_tls_descriptor \
(ix86_tls_descriptor_calls_expanded_in_cfun && regs_ever_live[SP_REG])
(ix86_tls_descriptor_calls_expanded_in_cfun && df_regs_ever_live_p (SP_REG))
/* Control behavior of x86_file_start. */
#define X86_FILE_START_VERSION_DIRECTIVE false

View file

@ -194,6 +194,7 @@
(UNSPECV_CMPXCHG_2 11)
(UNSPECV_XCHG 12)
(UNSPECV_LOCK 13)
(UNSPECV_PROLOGUE_USE 14)
])
;; Registers by name.
@ -1917,7 +1918,7 @@
[(set (match_operand:DI 0 "push_operand" "")
(match_operand:DI 1 "immediate_operand" ""))]
"TARGET_64BIT && ((optimize > 0 && flag_peephole2)
? flow2_completed : reload_completed)
? epilogue_completed : reload_completed)
&& !symbolic_operand (operands[1], DImode)
&& !x86_64_immediate_operand (operands[1], DImode)"
[(set (match_dup 0) (match_dup 1))
@ -2153,7 +2154,7 @@
[(set (match_operand:DI 0 "memory_operand" "")
(match_operand:DI 1 "immediate_operand" ""))]
"TARGET_64BIT && ((optimize > 0 && flag_peephole2)
? flow2_completed : reload_completed)
? epilogue_completed : reload_completed)
&& !symbolic_operand (operands[1], DImode)
&& !x86_64_immediate_operand (operands[1], DImode)"
[(set (match_dup 2) (match_dup 3))
@ -10573,7 +10574,7 @@
(match_operand:QI 2 "nonmemory_operand" "")))
(clobber (reg:CC FLAGS_REG))]
"!TARGET_64BIT && ((optimize > 0 && flag_peephole2)
? flow2_completed : reload_completed)"
? epilogue_completed : reload_completed)"
[(const_int 0)]
"ix86_split_ashl (operands, NULL_RTX, DImode); DONE;")
@ -11504,7 +11505,7 @@
(match_operand:QI 2 "nonmemory_operand" "")))
(clobber (reg:CC FLAGS_REG))]
"!TARGET_64BIT && ((optimize > 0 && flag_peephole2)
? flow2_completed : reload_completed)"
? epilogue_completed : reload_completed)"
[(const_int 0)]
"ix86_split_ashr (operands, NULL_RTX, DImode); DONE;")
@ -12182,7 +12183,7 @@
(match_operand:QI 2 "nonmemory_operand" "")))
(clobber (reg:CC FLAGS_REG))]
"!TARGET_64BIT && ((optimize > 0 && flag_peephole2)
? flow2_completed : reload_completed)"
? epilogue_completed : reload_completed)"
[(const_int 0)]
"ix86_split_lshr (operands, NULL_RTX, DImode); DONE;")
@ -14347,7 +14348,7 @@
registers we stored in the result block. We avoid problems by
claiming that all hard registers are used and clobbered at this
point. */
emit_insn (gen_blockage (const0_rtx));
emit_insn (gen_blockage ());
DONE;
})
@ -14358,7 +14359,15 @@
;; all of memory. This blocks insns from being moved across this point.
(define_insn "blockage"
[(unspec_volatile [(match_operand 0 "" "")] UNSPECV_BLOCKAGE)]
[(unspec_volatile [(const_int 0)] UNSPECV_BLOCKAGE)]
""
""
[(set_attr "length" "0")])
;; As USE insns aren't meaningful after reload, this is used instead
;; to prevent deleting instructions setting registers for PIC code
(define_insn "prologue_use"
[(unspec_volatile [(match_operand 0 "" "")] UNSPECV_PROLOGUE_USE)]
""
""
[(set_attr "length" "0")])

View file

@ -109,3 +109,4 @@ extern void ia64_output_function_profiler (FILE *, int);
extern void ia64_profile_hook (int);
extern void ia64_optimization_options (int, int);
extern void ia64_init_expanders (void);

View file

@ -53,8 +53,10 @@ Boston, MA 02110-1301, USA. */
#include "cfglayout.h"
#include "tree-gimple.h"
#include "intl.h"
#include "df.h"
#include "debug.h"
#include "params.h"
#include "dbgcnt.h"
#include "tm-constrs.h"
/* This is used for communication between ASM_OUTPUT_LABEL and
@ -122,6 +124,18 @@ unsigned int ia64_section_threshold;
TRUE if we do insn bundling instead of insn scheduling. */
int bundling_p = 0;
enum ia64_frame_regs
{
reg_fp,
reg_save_b0,
reg_save_pr,
reg_save_ar_pfs,
reg_save_ar_unat,
reg_save_ar_lc,
reg_save_gp,
number_of_ia64_frame_regs
};
/* Structure to be filled in by ia64_compute_frame_size with register
save masks and offsets for the current function. */
@ -136,13 +150,7 @@ struct ia64_frame_info
unsigned int gr_used_mask; /* mask of registers in use as gr spill
registers or long-term scratches. */
int n_spilled; /* number of spilled registers. */
int reg_fp; /* register for fp. */
int reg_save_b0; /* save register for b0. */
int reg_save_pr; /* save register for prs. */
int reg_save_ar_pfs; /* save register for ar.pfs. */
int reg_save_ar_unat; /* save register for ar.unat. */
int reg_save_ar_lc; /* save register for ar.lc. */
int reg_save_gp; /* save register for gp. */
int r[number_of_ia64_frame_regs]; /* Frame related registers. */
int n_input_regs; /* number of input registers used. */
int n_local_regs; /* number of local registers used. */
int n_output_regs; /* number of output registers used. */
@ -154,6 +162,8 @@ struct ia64_frame_info
/* Current frame information calculated by ia64_compute_frame_size. */
static struct ia64_frame_info current_frame_info;
/* The actual registers that are emitted. */
static int emitted_frame_related_regs[number_of_ia64_frame_regs];
static int ia64_first_cycle_multipass_dfa_lookahead (void);
static void ia64_dependencies_evaluation_hook (rtx, rtx);
@ -173,7 +183,7 @@ static int ia64_spec_check_p (rtx);
static int ia64_spec_check_src_p (rtx);
static rtx gen_tls_get_addr (void);
static rtx gen_thread_pointer (void);
static int find_gr_spill (int);
static int find_gr_spill (enum ia64_frame_regs, int);
static int next_scratch_gr_reg (void);
static void mark_reg_gr_used_mask (rtx, void *);
static void ia64_compute_frame_size (HOST_WIDE_INT);
@ -1872,13 +1882,42 @@ ia64_expand_call (rtx retval, rtx addr, rtx nextarg ATTRIBUTE_UNUSED,
use_reg (&CALL_INSN_FUNCTION_USAGE (insn), b0);
}
static void
reg_emitted (enum ia64_frame_regs r)
{
if (emitted_frame_related_regs[r] == 0)
emitted_frame_related_regs[r] = current_frame_info.r[r];
else
gcc_assert (emitted_frame_related_regs[r] == current_frame_info.r[r]);
}
static int
get_reg (enum ia64_frame_regs r)
{
reg_emitted (r);
return current_frame_info.r[r];
}
static bool
is_emitted (int regno)
{
enum ia64_frame_regs r;
for (r = reg_fp; r < number_of_ia64_frame_regs; r++)
if (emitted_frame_related_regs[r] == regno)
return true;
return false;
}
void
ia64_reload_gp (void)
{
rtx tmp;
if (current_frame_info.reg_save_gp)
tmp = gen_rtx_REG (DImode, current_frame_info.reg_save_gp);
if (current_frame_info.r[reg_save_gp])
{
tmp = gen_rtx_REG (DImode, get_reg (reg_save_gp));
}
else
{
HOST_WIDE_INT offset;
@ -2158,20 +2197,33 @@ ia64_globalize_decl_name (FILE * stream, tree decl)
TRY_LOCALS is true if we should attempt to locate a local regnum. */
static int
find_gr_spill (int try_locals)
find_gr_spill (enum ia64_frame_regs r, int try_locals)
{
int regno;
if (emitted_frame_related_regs[r] != 0)
{
regno = emitted_frame_related_regs[r];
if (regno >= LOC_REG (0) && regno < LOC_REG (80 - frame_pointer_needed))
current_frame_info.n_local_regs = regno - LOC_REG (0) + 1;
else if (current_function_is_leaf
&& regno >= GR_REG (1) && regno <= GR_REG (31))
current_frame_info.gr_used_mask |= 1 << regno;
return regno;
}
/* If this is a leaf function, first try an otherwise unused
call-clobbered register. */
if (current_function_is_leaf)
{
for (regno = GR_REG (1); regno <= GR_REG (31); regno++)
if (! regs_ever_live[regno]
if (! df_regs_ever_live_p (regno)
&& call_used_regs[regno]
&& ! fixed_regs[regno]
&& ! global_regs[regno]
&& ((current_frame_info.gr_used_mask >> regno) & 1) == 0)
&& ((current_frame_info.gr_used_mask >> regno) & 1) == 0
&& ! is_emitted (regno))
{
current_frame_info.gr_used_mask |= 1 << regno;
return regno;
@ -2243,6 +2295,7 @@ mark_reg_gr_used_mask (rtx reg, void *data ATTRIBUTE_UNUSED)
}
}
/* Returns the number of bytes offset between the frame pointer and the stack
pointer for the current function. SIZE is the number of bytes of space
needed for local variables. */
@ -2284,7 +2337,7 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
since we'll be adjusting that down later. */
regno = LOC_REG (78) + ! frame_pointer_needed;
for (; regno >= LOC_REG (0); regno--)
if (regs_ever_live[regno])
if (df_regs_ever_live_p (regno) && !is_emitted (regno))
break;
current_frame_info.n_local_regs = regno - LOC_REG (0) + 1;
@ -2299,13 +2352,13 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
else
{
for (regno = IN_REG (7); regno >= IN_REG (0); regno--)
if (regs_ever_live[regno])
if (df_regs_ever_live_p (regno))
break;
current_frame_info.n_input_regs = regno - IN_REG (0) + 1;
}
for (regno = OUT_REG (7); regno >= OUT_REG (0); regno--)
if (regs_ever_live[regno])
if (df_regs_ever_live_p (regno))
break;
i = regno - OUT_REG (0) + 1;
@ -2327,7 +2380,7 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
which will always wind up on the stack. */
for (regno = FR_REG (2); regno <= FR_REG (127); regno++)
if (regs_ever_live[regno] && ! call_used_regs[regno])
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
{
SET_HARD_REG_BIT (mask, regno);
spill_size += 16;
@ -2336,7 +2389,7 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
}
for (regno = GR_REG (1); regno <= GR_REG (31); regno++)
if (regs_ever_live[regno] && ! call_used_regs[regno])
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
{
SET_HARD_REG_BIT (mask, regno);
spill_size += 8;
@ -2345,7 +2398,7 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
}
for (regno = BR_REG (1); regno <= BR_REG (7); regno++)
if (regs_ever_live[regno] && ! call_used_regs[regno])
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
{
SET_HARD_REG_BIT (mask, regno);
spill_size += 8;
@ -2357,15 +2410,15 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
if (frame_pointer_needed)
{
current_frame_info.reg_fp = find_gr_spill (1);
current_frame_info.r[reg_fp] = find_gr_spill (reg_fp, 1);
/* If we did not get a register, then we take LOC79. This is guaranteed
to be free, even if regs_ever_live is already set, because this is
HARD_FRAME_POINTER_REGNUM. This requires incrementing n_local_regs,
as we don't count loc79 above. */
if (current_frame_info.reg_fp == 0)
if (current_frame_info.r[reg_fp] == 0)
{
current_frame_info.reg_fp = LOC_REG (79);
current_frame_info.n_local_regs++;
current_frame_info.r[reg_fp] = LOC_REG (79);
current_frame_info.n_local_regs = LOC_REG (79) - LOC_REG (0) + 1;
}
}
@ -2376,8 +2429,8 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
able to unwind the stack. */
SET_HARD_REG_BIT (mask, BR_REG (0));
current_frame_info.reg_save_b0 = find_gr_spill (1);
if (current_frame_info.reg_save_b0 == 0)
current_frame_info.r[reg_save_b0] = find_gr_spill (reg_save_b0, 1);
if (current_frame_info.r[reg_save_b0] == 0)
{
extra_spill_size += 8;
n_spilled += 1;
@ -2385,8 +2438,8 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
/* Similarly for ar.pfs. */
SET_HARD_REG_BIT (mask, AR_PFS_REGNUM);
current_frame_info.reg_save_ar_pfs = find_gr_spill (1);
if (current_frame_info.reg_save_ar_pfs == 0)
current_frame_info.r[reg_save_ar_pfs] = find_gr_spill (reg_save_ar_pfs, 1);
if (current_frame_info.r[reg_save_ar_pfs] == 0)
{
extra_spill_size += 8;
n_spilled += 1;
@ -2394,9 +2447,9 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
/* Similarly for gp. Note that if we're calling setjmp, the stacked
registers are clobbered, so we fall back to the stack. */
current_frame_info.reg_save_gp
= (current_function_calls_setjmp ? 0 : find_gr_spill (1));
if (current_frame_info.reg_save_gp == 0)
current_frame_info.r[reg_save_gp]
= (current_function_calls_setjmp ? 0 : find_gr_spill (reg_save_gp, 1));
if (current_frame_info.r[reg_save_gp] == 0)
{
SET_HARD_REG_BIT (mask, GR_REG (1));
spill_size += 8;
@ -2405,18 +2458,19 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
}
else
{
if (regs_ever_live[BR_REG (0)] && ! call_used_regs[BR_REG (0)])
if (df_regs_ever_live_p (BR_REG (0)) && ! call_used_regs[BR_REG (0)])
{
SET_HARD_REG_BIT (mask, BR_REG (0));
extra_spill_size += 8;
n_spilled += 1;
}
if (regs_ever_live[AR_PFS_REGNUM])
if (df_regs_ever_live_p (AR_PFS_REGNUM))
{
SET_HARD_REG_BIT (mask, AR_PFS_REGNUM);
current_frame_info.reg_save_ar_pfs = find_gr_spill (1);
if (current_frame_info.reg_save_ar_pfs == 0)
current_frame_info.r[reg_save_ar_pfs]
= find_gr_spill (reg_save_ar_pfs, 1);
if (current_frame_info.r[reg_save_ar_pfs] == 0)
{
extra_spill_size += 8;
n_spilled += 1;
@ -2429,25 +2483,31 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
it is absolutely critical that FP get the only hard register that's
guaranteed to be free, so we allocated it first. If all three did
happen to be allocated hard regs, and are consecutive, rearrange them
into the preferred order now. */
if (current_frame_info.reg_fp != 0
&& current_frame_info.reg_save_b0 == current_frame_info.reg_fp + 1
&& current_frame_info.reg_save_ar_pfs == current_frame_info.reg_fp + 2)
into the preferred order now.
If we have already emitted code for any of those registers,
then it's already too late to change. */
if (current_frame_info.r[reg_fp] != 0
&& current_frame_info.r[reg_save_b0] == current_frame_info.r[reg_fp] + 1
&& current_frame_info.r[reg_save_ar_pfs] == current_frame_info.r[reg_fp] + 2
&& emitted_frame_related_regs[reg_save_b0] == 0
&& emitted_frame_related_regs[reg_save_ar_pfs] == 0
&& emitted_frame_related_regs[reg_fp] == 0)
{
current_frame_info.reg_save_b0 = current_frame_info.reg_fp;
current_frame_info.reg_save_ar_pfs = current_frame_info.reg_fp + 1;
current_frame_info.reg_fp = current_frame_info.reg_fp + 2;
current_frame_info.r[reg_save_b0] = current_frame_info.r[reg_fp];
current_frame_info.r[reg_save_ar_pfs] = current_frame_info.r[reg_fp] + 1;
current_frame_info.r[reg_fp] = current_frame_info.r[reg_fp] + 2;
}
/* See if we need to store the predicate register block. */
for (regno = PR_REG (0); regno <= PR_REG (63); regno++)
if (regs_ever_live[regno] && ! call_used_regs[regno])
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
break;
if (regno <= PR_REG (63))
{
SET_HARD_REG_BIT (mask, PR_REG (0));
current_frame_info.reg_save_pr = find_gr_spill (1);
if (current_frame_info.reg_save_pr == 0)
current_frame_info.r[reg_save_pr] = find_gr_spill (reg_save_pr, 1);
if (current_frame_info.r[reg_save_pr] == 0)
{
extra_spill_size += 8;
n_spilled += 1;
@ -2456,30 +2516,32 @@ ia64_compute_frame_size (HOST_WIDE_INT size)
/* ??? Mark them all as used so that register renaming and such
are free to use them. */
for (regno = PR_REG (0); regno <= PR_REG (63); regno++)
regs_ever_live[regno] = 1;
df_set_regs_ever_live (regno, true);
}
/* If we're forced to use st8.spill, we're forced to save and restore
ar.unat as well. The check for existing liveness allows inline asm
to touch ar.unat. */
if (spilled_gr_p || cfun->machine->n_varargs
|| regs_ever_live[AR_UNAT_REGNUM])
|| df_regs_ever_live_p (AR_UNAT_REGNUM))
{
regs_ever_live[AR_UNAT_REGNUM] = 1;
df_set_regs_ever_live (AR_UNAT_REGNUM, true);
SET_HARD_REG_BIT (mask, AR_UNAT_REGNUM);
current_frame_info.reg_save_ar_unat = find_gr_spill (spill_size == 0);
if (current_frame_info.reg_save_ar_unat == 0)
current_frame_info.r[reg_save_ar_unat]
= find_gr_spill (reg_save_ar_unat, spill_size == 0);
if (current_frame_info.r[reg_save_ar_unat] == 0)
{
extra_spill_size += 8;
n_spilled += 1;
}
}
if (regs_ever_live[AR_LC_REGNUM])
if (df_regs_ever_live_p (AR_LC_REGNUM))
{
SET_HARD_REG_BIT (mask, AR_LC_REGNUM);
current_frame_info.reg_save_ar_lc = find_gr_spill (spill_size == 0);
if (current_frame_info.reg_save_ar_lc == 0)
current_frame_info.r[reg_save_ar_lc]
= find_gr_spill (reg_save_ar_lc, spill_size == 0);
if (current_frame_info.r[reg_save_ar_lc] == 0)
{
extra_spill_size += 8;
n_spilled += 1;
@ -2713,16 +2775,6 @@ spill_restore_mem (rtx reg, HOST_WIDE_INT cfa_off)
insn = emit_insn (seq);
}
spill_fill_data.init_after = insn;
/* If DISP is 0, we may or may not have a further adjustment
afterward. If we do, then the load/store insn may be modified
to be a post-modify. If we don't, then this copy may be
eliminated by copyprop_hardreg_forward, which makes this
insn garbage, which runs afoul of the sanity check in
propagate_one_insn. So mark this insn as legal to delete. */
if (disp == 0)
REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
REG_NOTES (insn));
}
mem = gen_rtx_MEM (GET_MODE (reg), spill_fill_data.iter_reg[iter]);
@ -2850,6 +2902,22 @@ ia64_expand_prologue (void)
ia64_compute_frame_size (get_frame_size ());
last_scratch_gr_reg = 15;
if (dump_file)
{
fprintf (dump_file, "ia64 frame related registers "
"recorded in current_frame_info.r[]:\n");
#define PRINTREG(a) if (current_frame_info.r[a]) \
fprintf(dump_file, "%s = %d\n", #a, current_frame_info.r[a])
PRINTREG(reg_fp);
PRINTREG(reg_save_b0);
PRINTREG(reg_save_pr);
PRINTREG(reg_save_ar_pfs);
PRINTREG(reg_save_ar_unat);
PRINTREG(reg_save_ar_lc);
PRINTREG(reg_save_gp);
#undef PRINTREG
}
/* If there is no epilogue, then we don't need some prologue insns.
We need to avoid emitting the dead prologue insns, because flow
will complain about them. */
@ -2892,12 +2960,12 @@ ia64_expand_prologue (void)
there is a frame pointer. loc79 gets wasted in this case, as it is
renamed to a register that will never be used. See also the try_locals
code in find_gr_spill. */
if (current_frame_info.reg_fp)
if (current_frame_info.r[reg_fp])
{
const char *tmp = reg_names[HARD_FRAME_POINTER_REGNUM];
reg_names[HARD_FRAME_POINTER_REGNUM]
= reg_names[current_frame_info.reg_fp];
reg_names[current_frame_info.reg_fp] = tmp;
= reg_names[current_frame_info.r[reg_fp]];
reg_names[current_frame_info.r[reg_fp]] = tmp;
}
/* We don't need an alloc instruction if we've used no outputs or locals. */
@ -2915,8 +2983,11 @@ ia64_expand_prologue (void)
{
current_frame_info.need_regstk = 0;
if (current_frame_info.reg_save_ar_pfs)
regno = current_frame_info.reg_save_ar_pfs;
if (current_frame_info.r[reg_save_ar_pfs])
{
regno = current_frame_info.r[reg_save_ar_pfs];
reg_emitted (reg_save_ar_pfs);
}
else
regno = next_scratch_gr_reg ();
ar_pfs_save_reg = gen_rtx_REG (DImode, regno);
@ -2926,7 +2997,7 @@ ia64_expand_prologue (void)
GEN_INT (current_frame_info.n_local_regs),
GEN_INT (current_frame_info.n_output_regs),
GEN_INT (current_frame_info.n_rotate_regs)));
RTX_FRAME_RELATED_P (insn) = (current_frame_info.reg_save_ar_pfs != 0);
RTX_FRAME_RELATED_P (insn) = (current_frame_info.r[reg_save_ar_pfs] != 0);
}
/* Set up frame pointer, stack pointer, and spill iterators. */
@ -2984,9 +3055,12 @@ ia64_expand_prologue (void)
/* Must copy out ar.unat before doing any integer spills. */
if (TEST_HARD_REG_BIT (current_frame_info.mask, AR_UNAT_REGNUM))
{
if (current_frame_info.reg_save_ar_unat)
ar_unat_save_reg
= gen_rtx_REG (DImode, current_frame_info.reg_save_ar_unat);
if (current_frame_info.r[reg_save_ar_unat])
{
ar_unat_save_reg
= gen_rtx_REG (DImode, current_frame_info.r[reg_save_ar_unat]);
reg_emitted (reg_save_ar_unat);
}
else
{
alt_regno = next_scratch_gr_reg ();
@ -2996,11 +3070,11 @@ ia64_expand_prologue (void)
reg = gen_rtx_REG (DImode, AR_UNAT_REGNUM);
insn = emit_move_insn (ar_unat_save_reg, reg);
RTX_FRAME_RELATED_P (insn) = (current_frame_info.reg_save_ar_unat != 0);
RTX_FRAME_RELATED_P (insn) = (current_frame_info.r[reg_save_ar_unat] != 0);
/* Even if we're not going to generate an epilogue, we still
need to save the register so that EH works. */
if (! epilogue_p && current_frame_info.reg_save_ar_unat)
if (! epilogue_p && current_frame_info.r[reg_save_ar_unat])
emit_insn (gen_prologue_use (ar_unat_save_reg));
}
else
@ -3026,9 +3100,10 @@ ia64_expand_prologue (void)
if (TEST_HARD_REG_BIT (current_frame_info.mask, PR_REG (0)))
{
reg = gen_rtx_REG (DImode, PR_REG (0));
if (current_frame_info.reg_save_pr != 0)
if (current_frame_info.r[reg_save_pr] != 0)
{
alt_reg = gen_rtx_REG (DImode, current_frame_info.reg_save_pr);
alt_reg = gen_rtx_REG (DImode, current_frame_info.r[reg_save_pr]);
reg_emitted (reg_save_pr);
insn = emit_move_insn (alt_reg, reg);
/* ??? Denote pr spill/fill by a DImode move that modifies all
@ -3056,7 +3131,7 @@ ia64_expand_prologue (void)
/* Handle AR regs in numerical order. All of them get special handling. */
if (TEST_HARD_REG_BIT (current_frame_info.mask, AR_UNAT_REGNUM)
&& current_frame_info.reg_save_ar_unat == 0)
&& current_frame_info.r[reg_save_ar_unat] == 0)
{
reg = gen_rtx_REG (DImode, AR_UNAT_REGNUM);
do_spill (gen_movdi_x, ar_unat_save_reg, cfa_off, reg);
@ -3067,7 +3142,7 @@ ia64_expand_prologue (void)
only thing we have to do now is copy that register to a stack slot
if we'd not allocated a local register for the job. */
if (TEST_HARD_REG_BIT (current_frame_info.mask, AR_PFS_REGNUM)
&& current_frame_info.reg_save_ar_pfs == 0)
&& current_frame_info.r[reg_save_ar_pfs] == 0)
{
reg = gen_rtx_REG (DImode, AR_PFS_REGNUM);
do_spill (gen_movdi_x, ar_pfs_save_reg, cfa_off, reg);
@ -3077,9 +3152,10 @@ ia64_expand_prologue (void)
if (TEST_HARD_REG_BIT (current_frame_info.mask, AR_LC_REGNUM))
{
reg = gen_rtx_REG (DImode, AR_LC_REGNUM);
if (current_frame_info.reg_save_ar_lc != 0)
if (current_frame_info.r[reg_save_ar_lc] != 0)
{
alt_reg = gen_rtx_REG (DImode, current_frame_info.reg_save_ar_lc);
alt_reg = gen_rtx_REG (DImode, current_frame_info.r[reg_save_ar_lc]);
reg_emitted (reg_save_ar_lc);
insn = emit_move_insn (alt_reg, reg);
RTX_FRAME_RELATED_P (insn) = 1;
@ -3102,9 +3178,10 @@ ia64_expand_prologue (void)
if (TEST_HARD_REG_BIT (current_frame_info.mask, BR_REG (0)))
{
reg = gen_rtx_REG (DImode, BR_REG (0));
if (current_frame_info.reg_save_b0 != 0)
if (current_frame_info.r[reg_save_b0] != 0)
{
alt_reg = gen_rtx_REG (DImode, current_frame_info.reg_save_b0);
alt_reg = gen_rtx_REG (DImode, current_frame_info.r[reg_save_b0]);
reg_emitted (reg_save_b0);
insn = emit_move_insn (alt_reg, reg);
RTX_FRAME_RELATED_P (insn) = 1;
@ -3123,17 +3200,12 @@ ia64_expand_prologue (void)
}
}
if (current_frame_info.reg_save_gp)
if (current_frame_info.r[reg_save_gp])
{
reg_emitted (reg_save_gp);
insn = emit_move_insn (gen_rtx_REG (DImode,
current_frame_info.reg_save_gp),
current_frame_info.r[reg_save_gp]),
pic_offset_table_rtx);
/* We don't know for sure yet if this is actually needed, since
we've not split the PIC call patterns. If all of the calls
are indirect, and not followed by any uses of the gp, then
this save is dead. Allow it to go away. */
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, REG_NOTES (insn));
}
/* We should now be at the base of the gr/br/fr spill area. */
@ -3217,8 +3289,11 @@ ia64_expand_epilogue (int sibcall_p)
/* Restore the predicate registers. */
if (TEST_HARD_REG_BIT (current_frame_info.mask, PR_REG (0)))
{
if (current_frame_info.reg_save_pr != 0)
alt_reg = gen_rtx_REG (DImode, current_frame_info.reg_save_pr);
if (current_frame_info.r[reg_save_pr] != 0)
{
alt_reg = gen_rtx_REG (DImode, current_frame_info.r[reg_save_pr]);
reg_emitted (reg_save_pr);
}
else
{
alt_regno = next_scratch_gr_reg ();
@ -3236,9 +3311,12 @@ ia64_expand_epilogue (int sibcall_p)
after the GRs have been restored. */
if (TEST_HARD_REG_BIT (current_frame_info.mask, AR_UNAT_REGNUM))
{
if (current_frame_info.reg_save_ar_unat != 0)
ar_unat_save_reg
= gen_rtx_REG (DImode, current_frame_info.reg_save_ar_unat);
if (current_frame_info.r[reg_save_ar_unat] != 0)
{
ar_unat_save_reg
= gen_rtx_REG (DImode, current_frame_info.r[reg_save_ar_unat]);
reg_emitted (reg_save_ar_unat);
}
else
{
alt_regno = next_scratch_gr_reg ();
@ -3251,9 +3329,10 @@ ia64_expand_epilogue (int sibcall_p)
else
ar_unat_save_reg = NULL_RTX;
if (current_frame_info.reg_save_ar_pfs != 0)
if (current_frame_info.r[reg_save_ar_pfs] != 0)
{
alt_reg = gen_rtx_REG (DImode, current_frame_info.reg_save_ar_pfs);
reg_emitted (reg_save_ar_pfs);
alt_reg = gen_rtx_REG (DImode, current_frame_info.r[reg_save_ar_pfs]);
reg = gen_rtx_REG (DImode, AR_PFS_REGNUM);
emit_move_insn (reg, alt_reg);
}
@ -3269,8 +3348,11 @@ ia64_expand_epilogue (int sibcall_p)
if (TEST_HARD_REG_BIT (current_frame_info.mask, AR_LC_REGNUM))
{
if (current_frame_info.reg_save_ar_lc != 0)
alt_reg = gen_rtx_REG (DImode, current_frame_info.reg_save_ar_lc);
if (current_frame_info.r[reg_save_ar_lc] != 0)
{
alt_reg = gen_rtx_REG (DImode, current_frame_info.r[reg_save_ar_lc]);
reg_emitted (reg_save_ar_lc);
}
else
{
alt_regno = next_scratch_gr_reg ();
@ -3285,8 +3367,11 @@ ia64_expand_epilogue (int sibcall_p)
/* Restore the return pointer. */
if (TEST_HARD_REG_BIT (current_frame_info.mask, BR_REG (0)))
{
if (current_frame_info.reg_save_b0 != 0)
alt_reg = gen_rtx_REG (DImode, current_frame_info.reg_save_b0);
if (current_frame_info.r[reg_save_b0] != 0)
{
alt_reg = gen_rtx_REG (DImode, current_frame_info.r[reg_save_b0]);
reg_emitted (reg_save_b0);
}
else
{
alt_regno = next_scratch_gr_reg ();
@ -3409,7 +3494,8 @@ ia64_expand_epilogue (int sibcall_p)
register, we may have swapped the names of r2 and HARD_FRAME_POINTER_REGNUM,
so we have to make sure we're using the string "r2" when emitting
the register name for the assembler. */
if (current_frame_info.reg_fp && current_frame_info.reg_fp == GR_REG (2))
if (current_frame_info.r[reg_fp]
&& current_frame_info.r[reg_fp] == GR_REG (2))
fp = HARD_FRAME_POINTER_REGNUM;
/* We must emit an alloc to force the input registers to become output
@ -3442,11 +3528,11 @@ ia64_direct_return (void)
return (current_frame_info.total_size == 0
&& current_frame_info.n_spilled == 0
&& current_frame_info.reg_save_b0 == 0
&& current_frame_info.reg_save_pr == 0
&& current_frame_info.reg_save_ar_pfs == 0
&& current_frame_info.reg_save_ar_unat == 0
&& current_frame_info.reg_save_ar_lc == 0);
&& current_frame_info.r[reg_save_b0] == 0
&& current_frame_info.r[reg_save_pr] == 0
&& current_frame_info.r[reg_save_ar_pfs] == 0
&& current_frame_info.r[reg_save_ar_unat] == 0
&& current_frame_info.r[reg_save_ar_lc] == 0);
}
return 0;
}
@ -3472,8 +3558,11 @@ ia64_split_return_addr_rtx (rtx dest)
if (TEST_HARD_REG_BIT (current_frame_info.mask, BR_REG (0)))
{
if (current_frame_info.reg_save_b0 != 0)
src = gen_rtx_REG (DImode, current_frame_info.reg_save_b0);
if (current_frame_info.r[reg_save_b0] != 0)
{
src = gen_rtx_REG (DImode, current_frame_info.r[reg_save_b0]);
reg_emitted (reg_save_b0);
}
else
{
HOST_WIDE_INT off;
@ -3520,21 +3609,14 @@ int
ia64_hard_regno_rename_ok (int from, int to)
{
/* Don't clobber any of the registers we reserved for the prologue. */
if (to == current_frame_info.reg_fp
|| to == current_frame_info.reg_save_b0
|| to == current_frame_info.reg_save_pr
|| to == current_frame_info.reg_save_ar_pfs
|| to == current_frame_info.reg_save_ar_unat
|| to == current_frame_info.reg_save_ar_lc)
return 0;
enum ia64_frame_regs r;
if (from == current_frame_info.reg_fp
|| from == current_frame_info.reg_save_b0
|| from == current_frame_info.reg_save_pr
|| from == current_frame_info.reg_save_ar_pfs
|| from == current_frame_info.reg_save_ar_unat
|| from == current_frame_info.reg_save_ar_lc)
return 0;
for (r = reg_fp; r <= reg_save_ar_lc; r++)
if (to == current_frame_info.r[r]
|| from == current_frame_info.r[r]
|| to == emitted_frame_related_regs[r]
|| from == emitted_frame_related_regs[r])
return 0;
/* Don't use output registers outside the register frame. */
if (OUT_REGNO_P (to) && to >= OUT_REG (current_frame_info.n_output_regs))
@ -3592,36 +3674,36 @@ ia64_output_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
mask = 0;
grsave = grsave_prev = 0;
if (current_frame_info.reg_save_b0 != 0)
if (current_frame_info.r[reg_save_b0] != 0)
{
mask |= 8;
grsave = grsave_prev = current_frame_info.reg_save_b0;
grsave = grsave_prev = current_frame_info.r[reg_save_b0];
}
if (current_frame_info.reg_save_ar_pfs != 0
if (current_frame_info.r[reg_save_ar_pfs] != 0
&& (grsave_prev == 0
|| current_frame_info.reg_save_ar_pfs == grsave_prev + 1))
|| current_frame_info.r[reg_save_ar_pfs] == grsave_prev + 1))
{
mask |= 4;
if (grsave_prev == 0)
grsave = current_frame_info.reg_save_ar_pfs;
grsave_prev = current_frame_info.reg_save_ar_pfs;
grsave = current_frame_info.r[reg_save_ar_pfs];
grsave_prev = current_frame_info.r[reg_save_ar_pfs];
}
if (current_frame_info.reg_fp != 0
if (current_frame_info.r[reg_fp] != 0
&& (grsave_prev == 0
|| current_frame_info.reg_fp == grsave_prev + 1))
|| current_frame_info.r[reg_fp] == grsave_prev + 1))
{
mask |= 2;
if (grsave_prev == 0)
grsave = HARD_FRAME_POINTER_REGNUM;
grsave_prev = current_frame_info.reg_fp;
grsave_prev = current_frame_info.r[reg_fp];
}
if (current_frame_info.reg_save_pr != 0
if (current_frame_info.r[reg_save_pr] != 0
&& (grsave_prev == 0
|| current_frame_info.reg_save_pr == grsave_prev + 1))
|| current_frame_info.r[reg_save_pr] == grsave_prev + 1))
{
mask |= 1;
if (grsave_prev == 0)
grsave = current_frame_info.reg_save_pr;
grsave = current_frame_info.r[reg_save_pr];
}
if (mask && TARGET_GNU_AS)
@ -3657,12 +3739,13 @@ ia64_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
{
int i;
if (current_frame_info.reg_fp)
if (current_frame_info.r[reg_fp])
{
const char *tmp = reg_names[HARD_FRAME_POINTER_REGNUM];
reg_names[HARD_FRAME_POINTER_REGNUM]
= reg_names[current_frame_info.reg_fp];
reg_names[current_frame_info.reg_fp] = tmp;
= reg_names[current_frame_info.r[reg_fp]];
reg_names[current_frame_info.r[reg_fp]] = tmp;
reg_emitted (reg_fp);
}
if (! TARGET_REG_NAMES)
{
@ -3683,11 +3766,11 @@ ia64_dbx_register_number (int regno)
/* In ia64_expand_prologue we quite literally renamed the frame pointer
from its home at loc79 to something inside the register frame. We
must perform the same renumbering here for the debug info. */
if (current_frame_info.reg_fp)
if (current_frame_info.r[reg_fp])
{
if (regno == HARD_FRAME_POINTER_REGNUM)
regno = current_frame_info.reg_fp;
else if (regno == current_frame_info.reg_fp)
regno = current_frame_info.r[reg_fp];
else if (regno == current_frame_info.r[reg_fp])
regno = HARD_FRAME_POINTER_REGNUM;
}
@ -5093,6 +5176,13 @@ ia64_override_options (void)
init_machine_status = ia64_init_machine_status;
}
/* Initialize the record of emitted frame related registers. */
void ia64_init_expanders (void)
{
memset (&emitted_frame_related_regs, 0, sizeof (emitted_frame_related_regs));
}
static struct machine_function *
ia64_init_machine_status (void)
{
@ -6650,11 +6740,12 @@ ia64_set_sched_flags (spec_info_t spec_info)
mask |= BE_IN_CONTROL;
}
gcc_assert (*flags & USE_GLAT);
if (mask)
{
*flags |= USE_DEPS_LIST | DETACH_LIFE_INFO | DO_SPECULATION;
*flags |= USE_DEPS_LIST | DO_SPECULATION;
if (mask & BE_IN_SPEC)
*flags |= NEW_BBS;
spec_info->mask = mask;
spec_info->flags = 0;
@ -8338,7 +8429,7 @@ emit_predicate_relation_info (void)
/* Skip p0, which may be thought to be live due to (reg:DI p0)
grabbing the entire block of predicate registers. */
for (r = PR_REG (2); r < PR_REG (64); r += 2)
if (REGNO_REG_SET_P (bb->il.rtl->global_live_at_start, r))
if (REGNO_REG_SET_P (df_get_live_in (bb), r))
{
rtx p = gen_rtx_REG (BImode, r);
rtx n = emit_insn_after (gen_pred_rel_mutex (p), head);
@ -8388,13 +8479,9 @@ ia64_reorg (void)
/* If optimizing, we'll have split before scheduling. */
if (optimize == 0)
split_all_insns (0);
split_all_insns ();
/* ??? update_life_info_in_dirty_blocks fails to terminate during
non-optimizing bootstrap. */
update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
if (optimize && ia64_flag_schedule_insns2)
if (optimize && ia64_flag_schedule_insns2 && dbg_cnt (ia64_sched2))
{
timevar_push (TV_SCHED2);
ia64_final_schedule = 1;
@ -8469,6 +8556,8 @@ ia64_reorg (void)
_1mlx_ = get_cpu_unit_code ("1b_1mlx.");
}
schedule_ebbs ();
/* We cannot reuse this one because it has been corrupted by the
evil glat. */
finish_bundle_states ();
if (ia64_tune == PROCESSOR_ITANIUM)
{
@ -8485,6 +8574,8 @@ ia64_reorg (void)
else
emit_all_insn_group_barriers (dump_file);
df_analyze ();
/* A call must not be the last instruction in a function, so that the
return address is still within the function, so that unwinding works
properly. Note that IA-64 differs from dwarf2 on this point. */
@ -8521,6 +8612,7 @@ ia64_reorg (void)
variable_tracking_main ();
timevar_pop (TV_VAR_TRACKING);
}
df_finish_pass ();
}
/* Return true if REGNO is used by the epilogue. */
@ -8567,24 +8659,18 @@ ia64_epilogue_uses (int regno)
int
ia64_eh_uses (int regno)
{
enum ia64_frame_regs r;
if (! reload_completed)
return 0;
if (current_frame_info.reg_save_b0
&& regno == current_frame_info.reg_save_b0)
return 1;
if (current_frame_info.reg_save_pr
&& regno == current_frame_info.reg_save_pr)
return 1;
if (current_frame_info.reg_save_ar_pfs
&& regno == current_frame_info.reg_save_ar_pfs)
return 1;
if (current_frame_info.reg_save_ar_unat
&& regno == current_frame_info.reg_save_ar_unat)
return 1;
if (current_frame_info.reg_save_ar_lc
&& regno == current_frame_info.reg_save_ar_lc)
return 1;
if (regno == 0)
return 0;
for (r = reg_save_b0; r <= reg_save_ar_lc; r++)
if (regno == current_frame_info.r[r]
|| regno == emitted_frame_related_regs[r])
return 1;
return 0;
}
@ -8737,7 +8823,7 @@ process_set (FILE *asm_out_file, rtx pat, rtx insn, bool unwind, bool frame)
/* If this is the final destination for ar.pfs, then this must
be the alloc in the prologue. */
if (dest_regno == current_frame_info.reg_save_ar_pfs)
if (dest_regno == current_frame_info.r[reg_save_ar_pfs])
{
if (unwind)
fprintf (asm_out_file, "\t.save ar.pfs, r%d\n",
@ -8802,28 +8888,28 @@ process_set (FILE *asm_out_file, rtx pat, rtx insn, bool unwind, bool frame)
{
case BR_REG (0):
/* Saving return address pointer. */
gcc_assert (dest_regno == current_frame_info.reg_save_b0);
gcc_assert (dest_regno == current_frame_info.r[reg_save_b0]);
if (unwind)
fprintf (asm_out_file, "\t.save rp, r%d\n",
ia64_dbx_register_number (dest_regno));
return 1;
case PR_REG (0):
gcc_assert (dest_regno == current_frame_info.reg_save_pr);
gcc_assert (dest_regno == current_frame_info.r[reg_save_pr]);
if (unwind)
fprintf (asm_out_file, "\t.save pr, r%d\n",
ia64_dbx_register_number (dest_regno));
return 1;
case AR_UNAT_REGNUM:
gcc_assert (dest_regno == current_frame_info.reg_save_ar_unat);
gcc_assert (dest_regno == current_frame_info.r[reg_save_ar_unat]);
if (unwind)
fprintf (asm_out_file, "\t.save ar.unat, r%d\n",
ia64_dbx_register_number (dest_regno));
return 1;
case AR_LC_REGNUM:
gcc_assert (dest_regno == current_frame_info.reg_save_ar_lc);
gcc_assert (dest_regno == current_frame_info.r[reg_save_ar_lc]);
if (unwind)
fprintf (asm_out_file, "\t.save ar.lc, r%d\n",
ia64_dbx_register_number (dest_regno));
@ -8880,31 +8966,31 @@ process_set (FILE *asm_out_file, rtx pat, rtx insn, bool unwind, bool frame)
switch (src_regno)
{
case BR_REG (0):
gcc_assert (!current_frame_info.reg_save_b0);
gcc_assert (!current_frame_info.r[reg_save_b0]);
if (unwind)
fprintf (asm_out_file, "\t%s rp, %ld\n", saveop, off);
return 1;
case PR_REG (0):
gcc_assert (!current_frame_info.reg_save_pr);
gcc_assert (!current_frame_info.r[reg_save_pr]);
if (unwind)
fprintf (asm_out_file, "\t%s pr, %ld\n", saveop, off);
return 1;
case AR_LC_REGNUM:
gcc_assert (!current_frame_info.reg_save_ar_lc);
gcc_assert (!current_frame_info.r[reg_save_ar_lc]);
if (unwind)
fprintf (asm_out_file, "\t%s ar.lc, %ld\n", saveop, off);
return 1;
case AR_PFS_REGNUM:
gcc_assert (!current_frame_info.reg_save_ar_pfs);
gcc_assert (!current_frame_info.r[reg_save_ar_pfs]);
if (unwind)
fprintf (asm_out_file, "\t%s ar.pfs, %ld\n", saveop, off);
return 1;
case AR_UNAT_REGNUM:
gcc_assert (!current_frame_info.reg_save_ar_unat);
gcc_assert (!current_frame_info.r[reg_save_ar_unat]);
if (unwind)
fprintf (asm_out_file, "\t%s ar.unat, %ld\n", saveop, off);
return 1;

View file

@ -1,5 +1,5 @@
/* Definitions of target machine GNU compiler. IA-64 version.
Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by James E. Wilson <wilson@cygnus.com> and
David Mosberger <davidm@hpl.hp.com>.
@ -985,6 +985,7 @@ enum reg_class
pointer is not 16-byte aligned like the stack pointer. */
#define INIT_EXPANDERS \
do { \
ia64_init_expanders (); \
if (cfun && cfun->emit->regno_pointer_align) \
REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = 64; \
} while (0)

View file

@ -1,5 +1,5 @@
/* Subroutines used for code generation on Vitesse IQ2000 processors
Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -1964,13 +1964,6 @@ iq2000_expand_prologue (void)
PUT_CODE (SET_SRC (pattern), ASHIFTRT);
insn = emit_insn (pattern);
/* Global life information isn't valid at this point, so we
can't check whether these shifts are actually used. Mark
them MAYBE_DEAD so that flow2 will remove them, and not
complain about dead code in the prologue. */
REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
REG_NOTES (insn));
}
}
@ -2119,7 +2112,7 @@ iq2000_can_use_return_insn (void)
if (! reload_completed)
return 0;
if (regs_ever_live[31] || profile_flag)
if (df_regs_ever_live_p (31) || profile_flag)
return 0;
if (cfun->machine->initialized)

View file

@ -1,6 +1,6 @@
/* Definitions of target machine for GNU compiler.
Vitesse IQ2000 processors
Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -974,9 +974,9 @@ enum processor_type
/* Tell prologue and epilogue if register REGNO should be saved / restored. */
#define MUST_SAVE_REGISTER(regno) \
((regs_ever_live[regno] && !call_used_regs[regno]) \
((df_regs_ever_live_p (regno) && !call_used_regs[regno]) \
|| (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed) \
|| (regno == (GP_REG_FIRST + 31) && regs_ever_live[GP_REG_FIRST + 31]))
|| (regno == (GP_REG_FIRST + 31) && df_regs_ever_live_p (GP_REG_FIRST + 31)))
/* ALIGN FRAMES on double word boundaries */
#ifndef IQ2000_STACK_ALIGN

View file

@ -1,5 +1,5 @@
/* Target Code for R8C/M16C/M32C
Copyright (C) 2005
Copyright (C) 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Red Hat.
@ -1248,7 +1248,7 @@ need_to_save (int regno)
if (cfun->machine->is_interrupt
&& (!cfun->machine->is_leaf || regno == A0_REGNO))
return 1;
if (regs_ever_live[regno]
if (df_regs_ever_live_p (regno)
&& (!call_used_regs[regno] || cfun->machine->is_interrupt))
return 1;
return 0;

View file

@ -1254,10 +1254,10 @@ static struct m32r_frame_info zero_frame_info;
Don't consider them here. */
#define MUST_SAVE_REGISTER(regno, interrupt_p) \
((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
&& (regs_ever_live[regno] && (!call_really_used_regs[regno] || interrupt_p)))
&& (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
#define MUST_SAVE_FRAME_POINTER (regs_ever_live[FRAME_POINTER_REGNUM])
#define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM] || current_function_profile)
#define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
#define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || current_function_profile)
#define SHORT_INSN_SIZE 2 /* Size of small instructions. */
#define LONG_INSN_SIZE 4 /* Size of long instructions. */
@ -2449,7 +2449,7 @@ m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
{
/* Interrupt routines can't clobber any register that isn't already used. */
if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
&& !regs_ever_live[new_reg])
&& !df_regs_ever_live_p (new_reg))
return 0;
return 1;

View file

@ -1,5 +1,5 @@
/* Subroutines for code generation on Motorola 68HC11 and 68HC12.
Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005
Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Stephane Carrez (stcarrez@nerim.fr)
@ -1336,7 +1336,7 @@ m68hc11_initial_elimination_offset (int from, int to)
/* Push any 2 byte pseudo hard registers that we need to save. */
for (regno = SOFT_REG_FIRST; regno < SOFT_REG_LAST; regno++)
{
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
{
size += 2;
}
@ -1550,7 +1550,7 @@ m68hc11_total_frame_size (void)
size += HARD_REG_SIZE;
for (regno = SOFT_REG_FIRST; regno <= SOFT_REG_LAST; regno++)
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
size += HARD_REG_SIZE;
return size;
@ -1662,7 +1662,7 @@ expand_prologue (void)
/* Push any 2 byte pseudo hard registers that we need to save. */
for (regno = SOFT_REG_FIRST; regno <= SOFT_REG_LAST; regno++)
{
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
{
emit_move_after_reload (stack_push_word,
gen_rtx_REG (HImode, regno), scratch);
@ -1700,7 +1700,7 @@ expand_epilogue (void)
/* Pop any 2 byte pseudo hard registers that we saved. */
for (regno = SOFT_REG_LAST; regno >= SOFT_REG_FIRST; regno--)
{
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
{
emit_move_after_reload (gen_rtx_REG (HImode, regno),
stack_pop_word, scratch);

View file

@ -43,6 +43,7 @@ Boston, MA 02110-1301, USA. */
#include "target-def.h"
#include "debug.h"
#include "flags.h"
#include "df.h"
enum reg_class regno_reg_class[] =
{
@ -818,7 +819,7 @@ m68k_save_reg (unsigned int regno, bool interrupt_handler)
if they are live or when calling nested functions. */
if (interrupt_handler)
{
if (regs_ever_live[regno])
if (df_regs_ever_live_p (regno))
return true;
if (!current_function_is_leaf && call_used_regs[regno])
@ -826,7 +827,7 @@ m68k_save_reg (unsigned int regno, bool interrupt_handler)
}
/* Never need to save registers that aren't touched. */
if (!regs_ever_live[regno])
if (!df_regs_ever_live_p (regno))
return false;
/* Otherwise save everything that isn't call-clobbered. */
@ -1037,12 +1038,7 @@ m68k_expand_prologue (void)
if (flag_pic
&& !TARGET_SEP_DATA
&& current_function_uses_pic_offset_table)
{
insn = emit_insn (gen_load_got (pic_offset_table_rtx));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
const0_rtx,
REG_NOTES (insn));
}
insn = emit_insn (gen_load_got (pic_offset_table_rtx));
}
/* Return true if a simple (return) instruction is sufficient for this
@ -4143,7 +4139,6 @@ m68k_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
/* Pretend to be a post-reload pass while generating rtl. */
no_new_pseudos = 1;
reload_completed = 1;
allocate_reg_info (FIRST_PSEUDO_REGISTER, true, true);
/* The "this" pointer is stored at 4(%sp). */
this_slot = gen_rtx_MEM (Pmode, plus_constant (stack_pointer_rtx, 4));
@ -4198,7 +4193,7 @@ m68k_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
/* Use the static chain register as a temporary (call-clobbered)
GOT pointer for this function. We can use the static chain
register because it isn't live on entry to the thunk. */
REGNO (pic_offset_table_rtx) = STATIC_CHAIN_REGNUM;
SET_REGNO (pic_offset_table_rtx, STATIC_CHAIN_REGNUM);
emit_insn (gen_load_got (pic_offset_table_rtx));
}
legitimize_pic_address (XEXP (mem, 0), Pmode, static_chain_rtx);
@ -4220,7 +4215,7 @@ m68k_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
/* Restore the original PIC register. */
if (flag_pic)
REGNO (pic_offset_table_rtx) = PIC_REG;
SET_REGNO (pic_offset_table_rtx, PIC_REG);
}
/* Worker function for TARGET_STRUCT_VALUE_RTX. */
@ -4244,7 +4239,7 @@ m68k_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
if ((m68k_get_function_kind (current_function_decl)
== m68k_fk_interrupt_handler)
&& !regs_ever_live[new_reg])
&& !df_regs_ever_live_p (new_reg))
return 0;
return 1;

View file

@ -267,7 +267,7 @@ calc_live_regs (int * count)
for (reg = 0; reg < FIRST_PSEUDO_REGISTER; reg++)
{
if (regs_ever_live[reg] && !call_used_regs[reg])
if (df_regs_ever_live_p (reg) && !call_used_regs[reg])
{
(*count)++;
live_regs_mask |= (1 << reg);

View file

@ -1,6 +1,7 @@
/* Subroutines used for MIPS code generation.
Copyright (C) 1989, 1990, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by A. Lichnewsky, lich@inria.inria.fr.
Changes by Michael Meissner, meissner@osf.org.
64-bit r4000 support by Ian Lance Taylor, ian@cygnus.com, and
@ -6337,7 +6338,7 @@ mips_global_pointer (void)
In cases like these, reload will have added the constant to the pool
but no instruction will yet refer to it. */
if (!regs_ever_live[GLOBAL_POINTER_REGNUM]
if (!df_regs_ever_live_p (GLOBAL_POINTER_REGNUM)
&& !current_function_uses_const_pool
&& !mips_function_has_gp_insn ())
return 0;
@ -6346,7 +6347,7 @@ mips_global_pointer (void)
register instead of $gp. */
if (TARGET_CALL_SAVED_GP && current_function_is_leaf)
for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
if (!regs_ever_live[regno]
if (!df_regs_ever_live_p (regno)
&& call_used_regs[regno]
&& !fixed_regs[regno]
&& regno != PIC_FUNCTION_ADDR_REGNUM)
@ -6412,15 +6413,15 @@ mips_save_reg_p (unsigned int regno)
return TARGET_CALL_SAVED_GP && cfun->machine->global_pointer == regno;
/* Check call-saved registers. */
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
return true;
/* Save both registers in an FPR pair if either one is used. This is
needed for the case when MIN_FPRS_PER_FMT == 1, which allows the odd
register to be used without the even register. */
if (FP_REG_P (regno)
&& MAX_FPRS_PER_FMT == 2
&& regs_ever_live[regno + 1]
&& MAX_FPRS_PER_FMT == 2
&& df_regs_ever_live_p (regno + 1)
&& !call_used_regs[regno + 1])
return true;
@ -6430,7 +6431,7 @@ mips_save_reg_p (unsigned int regno)
/* We need to save the incoming return address if it is ever clobbered
within the function. */
if (regno == GP_REG_FIRST + 31 && regs_ever_live[regno])
if (regno == GP_REG_FIRST + 31 && df_regs_ever_live_p (regno))
return true;
if (TARGET_MIPS16)
@ -6438,7 +6439,7 @@ mips_save_reg_p (unsigned int regno)
/* $18 is a special case in mips16 code. It may be used to call
a function which returns a floating point value, but it is
marked in call_used_regs. */
if (regno == GP_REG_FIRST + 18 && regs_ever_live[regno])
if (regno == GP_REG_FIRST + 18 && df_regs_ever_live_p (regno))
return true;
/* $31 is also a special case. It will be used to copy a return
@ -6990,7 +6991,7 @@ mips_expand_prologue (void)
HOST_WIDE_INT size;
if (cfun->machine->global_pointer > 0)
REGNO (pic_offset_table_rtx) = cfun->machine->global_pointer;
SET_REGNO (pic_offset_table_rtx, cfun->machine->global_pointer);
size = compute_frame_size (get_frame_size ());
@ -7099,7 +7100,7 @@ mips_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
HOST_WIDE_INT size ATTRIBUTE_UNUSED)
{
/* Reinstate the normal $gp. */
REGNO (pic_offset_table_rtx) = GLOBAL_POINTER_REGNUM;
SET_REGNO (pic_offset_table_rtx, GLOBAL_POINTER_REGNUM);
mips_output_cplocal ();
if (cfun->machine->all_noreorder_p)
@ -7296,7 +7297,7 @@ mips_can_use_return_insn (void)
if (! reload_completed)
return 0;
if (regs_ever_live[31] || current_function_profile)
if (df_regs_ever_live_p (31) || current_function_profile)
return 0;
/* In mips16 mode, a function that returns a floating point value
@ -7331,9 +7332,13 @@ mips_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
/* Pick a global pointer. Use a call-clobbered register if
TARGET_CALL_SAVED_GP, so that we can use a sibcall. */
if (TARGET_USE_GOT)
cfun->machine->global_pointer
= REGNO (pic_offset_table_rtx)
= TARGET_CALL_SAVED_GP ? 15 : GLOBAL_POINTER_REGNUM;
{
cfun->machine->global_pointer =
TARGET_CALL_SAVED_GP ? 15 : GLOBAL_POINTER_REGNUM;
SET_REGNO (pic_offset_table_rtx, cfun->machine->global_pointer);
}
/* Set up the global pointer for n32 or n64 abicalls. If
LOADGP_ABSOLUTE then the thunk does not use the gp and there is
@ -8387,7 +8392,7 @@ build_mips16_call_stub (rtx retval, rtx fn, rtx arg_size, int fp_code)
/* If we are handling a floating point return value, we need to
save $18 in the function prologue. Putting a note on the
call will mean that regs_ever_live[$18] will be true if the
call will mean that df_regs_ever_live_p ($18) will be true if the
call is not eliminated, and we can check that in the prologue
code. */
if (fpret)

View file

@ -1,6 +1,7 @@
;; Mips.md Machine Description for MIPS based processors
;; Copyright (C) 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
;; 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
;; 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
;; Free Software Foundation, Inc.
;; Contributed by A. Lichnewsky, lich@inria.inria.fr
;; Changes by Michael Meissner, meissner@osf.org
;; 64-bit r4000 support by Ian Lance Taylor, ian@cygnus.com, and
@ -3094,7 +3095,7 @@
(high:DI (match_operand:DI 1 "general_symbolic_operand" "")))]
"TARGET_EXPLICIT_RELOCS && ABI_HAS_64BIT_SYMBOLS"
"#"
"&& flow2_completed"
"&& epilogue_completed"
[(set (match_dup 0) (high:DI (match_dup 2)))
(set (match_dup 0) (lo_sum:DI (match_dup 0) (match_dup 2)))
(set (match_dup 0) (ashift:DI (match_dup 0) (const_int 16)))

View file

@ -1,5 +1,5 @@
/* Definitions of target machine for GNU compiler, for MMIX.
Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Hans-Peter Nilsson (hp@bitrange.com)
@ -56,7 +56,7 @@ Boston, MA 02110-1301, USA. */
/* We have no means to tell DWARF 2 about the register stack, so we need
to store the return address on the stack if an exception can get into
this function. FIXME: Narrow condition. Before any whole-function
analysis, regs_ever_live[] isn't initialized. We know it's up-to-date
analysis, df_regs_ever_live_p () isn't initialized. We know it's up-to-date
after reload_completed; it may contain incorrect information some time
before that. Within a RTL sequence (after a call to start_sequence,
such as in RTL expanders), leaf_function_p doesn't see all insns
@ -66,7 +66,7 @@ Boston, MA 02110-1301, USA. */
preferable. */
#define MMIX_CFUN_NEEDS_SAVED_EH_RETURN_ADDRESS \
(flag_exceptions \
&& ((reload_completed && regs_ever_live[MMIX_rJ_REGNUM]) \
&& ((reload_completed && df_regs_ever_live_p (MMIX_rJ_REGNUM)) \
|| !leaf_function_p ()))
#define IS_MMIX_EH_RETURN_DATA_REG(REGNO) \
@ -547,7 +547,7 @@ mmix_initial_elimination_offset (int fromreg, int toreg)
for (regno = MMIX_FIRST_GLOBAL_REGNUM;
regno <= 255;
regno++)
if ((regs_ever_live[regno] && ! call_used_regs[regno])
if ((df_regs_ever_live_p (regno) && ! call_used_regs[regno])
|| IS_MMIX_EH_RETURN_DATA_REG (regno))
fp_sp_offset += 8;
@ -765,7 +765,7 @@ mmix_reorg (void)
for (regno = MMIX_LAST_STACK_REGISTER_REGNUM;
regno >= 0;
regno--)
if ((regs_ever_live[regno] && !call_used_regs[regno])
if ((df_regs_ever_live_p (regno) && !call_used_regs[regno])
|| (regno == MMIX_FRAME_POINTER_REGNUM && frame_pointer_needed))
break;
@ -774,7 +774,7 @@ mmix_reorg (void)
insns to see whether they're actually used (and indeed do other less
trivial register usage analysis and transformations), but it seems
wasteful to optimize for unused parameter registers. As of
2002-04-30, regs_ever_live[n] seems to be set for only-reads too, but
2002-04-30, df_regs_ever_live_p (n) seems to be set for only-reads too, but
that might change. */
if (!TARGET_ABI_GNU && regno < current_function_args_info.regs - 1)
{
@ -1836,7 +1836,7 @@ mmix_use_simple_return (void)
/* Note that we assume that the frame-pointer-register is one of these
registers, in which case we don't count it here. */
if ((((regno != MMIX_FRAME_POINTER_REGNUM || !frame_pointer_needed)
&& regs_ever_live[regno] && !call_used_regs[regno]))
&& df_regs_ever_live_p (regno) && !call_used_regs[regno]))
|| IS_MMIX_EH_RETURN_DATA_REG (regno))
return 0;
@ -1872,7 +1872,7 @@ mmix_expand_prologue (void)
/* Note that we assume that the frame-pointer-register is one of these
registers, in which case we don't count it here. */
if ((((regno != MMIX_FRAME_POINTER_REGNUM || !frame_pointer_needed)
&& regs_ever_live[regno] && !call_used_regs[regno]))
&& df_regs_ever_live_p (regno) && !call_used_regs[regno]))
|| IS_MMIX_EH_RETURN_DATA_REG (regno))
stack_space_to_allocate += 8;
@ -2057,7 +2057,7 @@ mmix_expand_prologue (void)
regno >= MMIX_FIRST_GLOBAL_REGNUM;
regno--)
if (((regno != MMIX_FRAME_POINTER_REGNUM || !frame_pointer_needed)
&& regs_ever_live[regno] && ! call_used_regs[regno])
&& df_regs_ever_live_p (regno) && ! call_used_regs[regno])
|| IS_MMIX_EH_RETURN_DATA_REG (regno))
{
rtx insn;
@ -2109,7 +2109,7 @@ mmix_expand_epilogue (void)
regno >= MMIX_FIRST_GLOBAL_REGNUM;
regno--)
if (((regno != MMIX_FRAME_POINTER_REGNUM || !frame_pointer_needed)
&& regs_ever_live[regno] && !call_used_regs[regno])
&& df_regs_ever_live_p (regno) && !call_used_regs[regno])
|| IS_MMIX_EH_RETURN_DATA_REG (regno))
stack_space_to_deallocate += 8;
@ -2138,7 +2138,7 @@ mmix_expand_epilogue (void)
regno <= 255;
regno++)
if (((regno != MMIX_FRAME_POINTER_REGNUM || !frame_pointer_needed)
&& regs_ever_live[regno] && !call_used_regs[regno])
&& df_regs_ever_live_p (regno) && !call_used_regs[regno])
|| IS_MMIX_EH_RETURN_DATA_REG (regno))
{
if (offset > 255)

View file

@ -1,6 +1,6 @@
/* Subroutines for insn-output.c for Matsushita MN10300 series
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
Free Software Foundation, Inc.
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Jeff Law (law@cygnus.com).
This file is part of GCC.
@ -60,12 +60,12 @@ enum processor_type mn10300_processor = PROCESSOR_DEFAULT;
/* The size of the callee register save area. Right now we save everything
on entry since it costs us nothing in code size. It does cost us from a
speed standpoint, so we want to optimize this sooner or later. */
#define REG_SAVE_BYTES (4 * regs_ever_live[2] \
+ 4 * regs_ever_live[3] \
+ 4 * regs_ever_live[6] \
+ 4 * regs_ever_live[7] \
+ 16 * (regs_ever_live[14] || regs_ever_live[15] \
|| regs_ever_live[16] || regs_ever_live[17]))
#define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
+ 4 * df_regs_ever_live_p (3) \
+ 4 * df_regs_ever_live_p (6) \
+ 4 * df_regs_ever_live_p (7) \
+ 16 * (df_regs_ever_live_p (14) || df_regs_ever_live_p (15) \
|| df_regs_ever_live_p (16) || df_regs_ever_live_p (17)))
static bool mn10300_handle_option (size_t, const char *, int);
@ -537,7 +537,7 @@ fp_regs_to_save (void)
return 0;
for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
++n;
return n;
@ -590,14 +590,14 @@ can_use_return_insn (void)
return (reload_completed
&& size == 0
&& !regs_ever_live[2]
&& !regs_ever_live[3]
&& !regs_ever_live[6]
&& !regs_ever_live[7]
&& !regs_ever_live[14]
&& !regs_ever_live[15]
&& !regs_ever_live[16]
&& !regs_ever_live[17]
&& !df_regs_ever_live_p (2)
&& !df_regs_ever_live_p (3)
&& !df_regs_ever_live_p (6)
&& !df_regs_ever_live_p (7)
&& !df_regs_ever_live_p (14)
&& !df_regs_ever_live_p (15)
&& !df_regs_ever_live_p (16)
&& !df_regs_ever_live_p (17)
&& fp_regs_to_save () == 0
&& !frame_pointer_needed);
}
@ -614,7 +614,7 @@ mn10300_get_live_callee_saved_regs (void)
mask = 0;
for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
mask |= (1 << i);
if ((mask & 0x3c000) != 0)
mask |= 0x3c000;
@ -907,7 +907,7 @@ expand_prologue (void)
/* Now actually save the FP registers. */
for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
{
rtx addr;
@ -944,24 +944,8 @@ expand_prologue (void)
emit_insn (gen_addsi3 (stack_pointer_rtx,
stack_pointer_rtx,
GEN_INT (-size)));
if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
{
rtx insn = get_last_insn ();
rtx last = emit_insn (gen_GOTaddr2picreg ());
/* Mark these insns as possibly dead. Sometimes, flow2 may
delete all uses of the PIC register. In this case, let it
delete the initialization too. */
do
{
insn = NEXT_INSN (insn);
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
const0_rtx,
REG_NOTES (insn));
}
while (insn != last);
}
if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
emit_insn (gen_GOTaddr2picreg ());
}
void
@ -1127,7 +1111,7 @@ expand_epilogue (void)
reg = gen_rtx_POST_INC (SImode, reg);
for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
{
rtx addr;
@ -1189,10 +1173,10 @@ expand_epilogue (void)
}
/* Adjust the stack and restore callee-saved registers, if any. */
if (size || regs_ever_live[2] || regs_ever_live[3]
|| regs_ever_live[6] || regs_ever_live[7]
|| regs_ever_live[14] || regs_ever_live[15]
|| regs_ever_live[16] || regs_ever_live[17]
if (size || df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
|| df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
|| df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
|| df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
|| frame_pointer_needed)
emit_jump_insn (gen_return_internal_regs
(GEN_INT (size + REG_SAVE_BYTES)));
@ -1401,10 +1385,10 @@ initial_offset (int from, int to)
is the size of the callee register save area. */
if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
{
if (regs_ever_live[2] || regs_ever_live[3]
|| regs_ever_live[6] || regs_ever_live[7]
|| regs_ever_live[14] || regs_ever_live[15]
|| regs_ever_live[16] || regs_ever_live[17]
if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
|| df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
|| df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
|| df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
|| fp_regs_to_save ()
|| frame_pointer_needed)
return REG_SAVE_BYTES
@ -1418,10 +1402,10 @@ initial_offset (int from, int to)
area, and the fixed stack space needed for function calls (if any). */
if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
{
if (regs_ever_live[2] || regs_ever_live[3]
|| regs_ever_live[6] || regs_ever_live[7]
|| regs_ever_live[14] || regs_ever_live[15]
|| regs_ever_live[16] || regs_ever_live[17]
if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
|| df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
|| df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
|| df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
|| fp_regs_to_save ()
|| frame_pointer_needed)
return (get_frame_size () + REG_SAVE_BYTES

View file

@ -1,5 +1,5 @@
/* Target definitions for the MorphoRISC1
Copyright (C) 2005 Free Software Foundation, Inc.
Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Red Hat, Inc.
This file is part of GCC.
@ -353,7 +353,7 @@ mt_print_operand_simple_address (FILE * file, rtx addr)
switch (GET_CODE (addr))
{
case REG:
fprintf (file, "%s, #0", reg_names [REGNO (addr)]);
fprintf (file, "%s, #0", reg_names[REGNO (addr)]);
break;
case PLUS:
@ -375,11 +375,11 @@ mt_print_operand_simple_address (FILE * file, rtx addr)
reg = arg1, offset = arg0;
else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
{
fprintf (file, "%s, #", reg_names [GPR_R0]);
fprintf (file, "%s, #", reg_names[GPR_R0]);
output_addr_const (file, addr);
break;
}
fprintf (file, "%s, #", reg_names [REGNO (reg)]);
fprintf (file, "%s, #", reg_names[REGNO (reg)]);
output_addr_const (file, offset);
break;
}
@ -457,7 +457,7 @@ mt_print_operand (FILE * file, rtx x, int code)
switch (GET_CODE (x))
{
case REG:
fputs (reg_names [REGNO (x)], file);
fputs (reg_names[REGNO (x)], file);
break;
case CONST:
@ -884,10 +884,10 @@ mt_compute_frame_size (int size)
}
}
current_frame_info.save_fp = (regs_ever_live [GPR_FP]
current_frame_info.save_fp = (df_regs_ever_live_p (GPR_FP)
|| frame_pointer_needed
|| interrupt_handler);
current_frame_info.save_lr = (regs_ever_live [GPR_LINK]
current_frame_info.save_lr = (df_regs_ever_live_p (GPR_LINK)
|| profile_flag
|| interrupt_handler);

View file

@ -1,5 +1,5 @@
/* Target Definitions for MorphoRISC1
Copyright (C) 2005 Free Software Foundation, Inc.
Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Red Hat, Inc.
This file is part of GCC.
@ -414,11 +414,11 @@ enum save_direction
&& (regno) != GPR_FP \
&& (regno) != GPR_SP \
&& (regno) != GPR_R0 \
&& (( regs_ever_live [regno] && ! call_used_regs [regno] ) \
&& (( df_regs_ever_live_p (regno) && ! call_used_regs[regno] ) \
/* Save ira register in an interrupt handler. */ \
|| (interrupt_handler && (regno) == GPR_INTERRUPT_LINK) \
/* Save any register used in an interrupt handler. */ \
|| (interrupt_handler && regs_ever_live [regno]) \
|| (interrupt_handler && df_regs_ever_live_p (regno)) \
/* Save call clobbered registers in non-leaf interrupt \
handlers. */ \
|| (interrupt_handler && call_used_regs[regno] \

View file

@ -1,6 +1,6 @@
/* Subroutines for insn-output.c for HPPA.
Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Tim Moore (moore@cs.utah.edu), based on sparc.c
This file is part of GCC.
@ -3491,13 +3491,13 @@ compute_frame_size (HOST_WIDE_INT size, int *fregs_live)
/* Account for space used by the callee general register saves. */
for (i = 18, j = frame_pointer_needed ? 4 : 3; i >= j; i--)
if (regs_ever_live[i])
if (df_regs_ever_live_p (i))
size += UNITS_PER_WORD;
/* Account for space used by the callee floating point register saves. */
for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP)
if (regs_ever_live[i]
|| (!TARGET_64BIT && regs_ever_live[i + 1]))
if (df_regs_ever_live_p (i)
|| (!TARGET_64BIT && df_regs_ever_live_p (i + 1)))
{
freg_saved = 1;
@ -3562,7 +3562,7 @@ pa_output_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
to output the assembler directives which denote the start
of a function. */
fprintf (file, "\t.CALLINFO FRAME=" HOST_WIDE_INT_PRINT_DEC, actual_fsize);
if (regs_ever_live[2])
if (df_regs_ever_live_p (2))
fputs (",CALLS,SAVE_RP", file);
else
fputs (",NO_CALLS", file);
@ -3626,7 +3626,7 @@ hppa_expand_prologue (void)
/* Save RP first. The calling conventions manual states RP will
always be stored into the caller's frame at sp - 20 or sp - 16
depending on which ABI is in use. */
if (regs_ever_live[2] || current_function_calls_eh_return)
if (df_regs_ever_live_p (2) || current_function_calls_eh_return)
store_reg (2, TARGET_64BIT ? -16 : -20, STACK_POINTER_REGNUM);
/* Allocate the local frame and set up the frame pointer if needed. */
@ -3737,7 +3737,7 @@ hppa_expand_prologue (void)
}
for (i = 18; i >= 4; i--)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
{
store_reg (i, offset, FRAME_POINTER_REGNUM);
offset += UNITS_PER_WORD;
@ -3777,7 +3777,7 @@ hppa_expand_prologue (void)
}
for (i = 18; i >= 3; i--)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
{
/* If merge_sp_adjust_with_store is nonzero, then we can
optimize the first GR save. */
@ -3840,8 +3840,8 @@ hppa_expand_prologue (void)
/* Now actually save the FP registers. */
for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP)
{
if (regs_ever_live[i]
|| (! TARGET_64BIT && regs_ever_live[i + 1]))
if (df_regs_ever_live_p (i)
|| (! TARGET_64BIT && df_regs_ever_live_p (i + 1)))
{
rtx addr, insn, reg;
addr = gen_rtx_MEM (DFmode, gen_rtx_POST_INC (DFmode, tmpreg));
@ -4029,7 +4029,7 @@ hppa_expand_epilogue (void)
/* Try to restore RP early to avoid load/use interlocks when
RP gets used in the return (bv) instruction. This appears to still
be necessary even when we schedule the prologue and epilogue. */
if (regs_ever_live [2] || current_function_calls_eh_return)
if (df_regs_ever_live_p (2) || current_function_calls_eh_return)
{
ret_off = TARGET_64BIT ? -16 : -20;
if (frame_pointer_needed)
@ -4071,7 +4071,7 @@ hppa_expand_epilogue (void)
}
for (i = 18; i >= 4; i--)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
{
load_reg (i, offset, FRAME_POINTER_REGNUM);
offset += UNITS_PER_WORD;
@ -4108,7 +4108,7 @@ hppa_expand_epilogue (void)
for (i = 18; i >= 3; i--)
{
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
{
/* Only for the first load.
merge_sp_adjust_with_load holds the register load
@ -4138,8 +4138,8 @@ hppa_expand_epilogue (void)
/* Actually do the restores now. */
for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP)
if (regs_ever_live[i]
|| (! TARGET_64BIT && regs_ever_live[i + 1]))
if (df_regs_ever_live_p (i)
|| (! TARGET_64BIT && df_regs_ever_live_p (i + 1)))
{
rtx src = gen_rtx_MEM (DFmode, gen_rtx_POST_INC (DFmode, tmpreg));
rtx dest = gen_rtx_REG (DFmode, i);
@ -4414,7 +4414,7 @@ hppa_can_use_return_insn_p (void)
{
return (reload_completed
&& (compute_frame_size (get_frame_size (), 0) ? 0 : 1)
&& ! regs_ever_live[2]
&& ! df_regs_ever_live_p (2)
&& ! frame_pointer_needed);
}
@ -6256,7 +6256,7 @@ output_lbranch (rtx dest, rtx insn, int xdelay)
for other purposes. */
if (TARGET_64BIT)
{
if (actual_fsize == 0 && !regs_ever_live[2])
if (actual_fsize == 0 && !df_regs_ever_live_p (2))
/* Use the return pointer slot in the frame marker. */
output_asm_insn ("std %%r1,-16(%%r30)", xoperands);
else
@ -6266,7 +6266,7 @@ output_lbranch (rtx dest, rtx insn, int xdelay)
}
else
{
if (actual_fsize == 0 && !regs_ever_live[2])
if (actual_fsize == 0 && !df_regs_ever_live_p (2))
/* Use the return pointer slot in the frame marker. */
output_asm_insn ("stw %%r1,-20(%%r30)", xoperands);
else
@ -6310,14 +6310,14 @@ output_lbranch (rtx dest, rtx insn, int xdelay)
/* Now restore the value of %r1 in the delay slot. */
if (TARGET_64BIT)
{
if (actual_fsize == 0 && !regs_ever_live[2])
if (actual_fsize == 0 && !df_regs_ever_live_p (2))
return "ldd -16(%%r30),%%r1";
else
return "ldd -40(%%r30),%%r1";
}
else
{
if (actual_fsize == 0 && !regs_ever_live[2])
if (actual_fsize == 0 && !df_regs_ever_live_p (2))
return "ldw -20(%%r30),%%r1";
else
return "ldw -12(%%r30),%%r1";

View file

@ -1,6 +1,6 @@
/* Definitions of target machine for GNU compiler, for the HP Spectrum.
Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Michael Tiemann (tiemann@cygnus.com) of Cygnus Support
and Tim Moore (moore@defmacro.cs.utah.edu) of the Center for
Software Science at the University of Utah.
@ -372,7 +372,7 @@ typedef struct machine_function GTY(())
is already live or already being saved (due to eh). */
#define HARD_REGNO_RENAME_OK(OLD_REG, NEW_REG) \
((NEW_REG) != 2 || regs_ever_live[2] || current_function_calls_eh_return)
((NEW_REG) != 2 || df_regs_ever_live_p (2) || current_function_calls_eh_return)
/* C statement to store the difference between the frame pointer
and the stack pointer values immediately after the function prologue.

View file

@ -1,6 +1,6 @@
/* Subroutines for gcc2 for pdp11.
Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2001, 2004, 2005
Free Software Foundation, Inc.
Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2001, 2004, 2005,
2006, 2007 Free Software Foundation, Inc.
Contributed by Michael K. Gschwind (mike@vlsivie.tuwien.ac.at).
This file is part of GCC.
@ -291,7 +291,7 @@ pdp11_output_function_prologue (FILE *stream, HOST_WIDE_INT size)
/* save CPU registers */
for (regno = 0; regno < 8; regno++)
if (regs_ever_live[regno] && ! call_used_regs[regno])
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno])
if (! ((regno == FRAME_POINTER_REGNUM)
&& frame_pointer_needed))
fprintf (stream, "\tmov %s, -(sp)\n", reg_names[regno]);
@ -304,7 +304,7 @@ pdp11_output_function_prologue (FILE *stream, HOST_WIDE_INT size)
{
/* ac0 - ac3 */
if (LOAD_FPU_REG_P(regno)
&& regs_ever_live[regno]
&& df_regs_ever_live_p (regno)
&& ! call_used_regs[regno])
{
fprintf (stream, "\tstd %s, -(sp)\n", reg_names[regno]);
@ -314,7 +314,7 @@ pdp11_output_function_prologue (FILE *stream, HOST_WIDE_INT size)
/* maybe make ac4, ac5 call used regs?? */
/* ac4 - ac5 */
if (NO_LOAD_FPU_REG_P(regno)
&& regs_ever_live[regno]
&& df_regs_ever_live_p (regno)
&& ! call_used_regs[regno])
{
gcc_assert (via_ac != -1);
@ -373,10 +373,10 @@ pdp11_output_function_epilogue (FILE *stream, HOST_WIDE_INT size)
if (frame_pointer_needed)
{
/* hope this is safe - m68k does it also .... */
regs_ever_live[FRAME_POINTER_REGNUM] = 0;
df_regs_ever_live_p (FRAME_POINTER_REGNUM) = 0;
for (i =7, j = 0 ; i >= 0 ; i--)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
j++;
/* remember # of pushed bytes for CPU regs */
@ -384,14 +384,14 @@ pdp11_output_function_epilogue (FILE *stream, HOST_WIDE_INT size)
/* change fp -> r5 due to the compile error on libgcc2.c */
for (i =7 ; i >= 0 ; i--)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
fprintf(stream, "\tmov %#o(r5), %s\n",(-fsize-2*j--)&0xffff, reg_names[i]);
/* get ACs */
via_ac = FIRST_PSEUDO_REGISTER -1;
for (i = FIRST_PSEUDO_REGISTER; i > 7; i--)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
{
via_ac = i;
k += 8;
@ -400,7 +400,7 @@ pdp11_output_function_epilogue (FILE *stream, HOST_WIDE_INT size)
for (i = FIRST_PSEUDO_REGISTER; i > 7; i--)
{
if (LOAD_FPU_REG_P(i)
&& regs_ever_live[i]
&& df_regs_ever_live_p (i)
&& ! call_used_regs[i])
{
fprintf(stream, "\tldd %#o(r5), %s\n", (-fsize-k)&0xffff, reg_names[i]);
@ -408,7 +408,7 @@ pdp11_output_function_epilogue (FILE *stream, HOST_WIDE_INT size)
}
if (NO_LOAD_FPU_REG_P(i)
&& regs_ever_live[i]
&& df_regs_ever_live_p (i)
&& ! call_used_regs[i])
{
gcc_assert (LOAD_FPU_REG_P(via_ac));
@ -428,18 +428,18 @@ pdp11_output_function_epilogue (FILE *stream, HOST_WIDE_INT size)
/* get ACs */
for (i = FIRST_PSEUDO_REGISTER; i > 7; i--)
if (regs_ever_live[i] && call_used_regs[i])
if (df_regs_ever_live_p (i) && call_used_regs[i])
via_ac = i;
for (i = FIRST_PSEUDO_REGISTER; i > 7; i--)
{
if (LOAD_FPU_REG_P(i)
&& regs_ever_live[i]
&& df_regs_ever_live_p (i)
&& ! call_used_regs[i])
fprintf(stream, "\tldd (sp)+, %s\n", reg_names[i]);
if (NO_LOAD_FPU_REG_P(i)
&& regs_ever_live[i]
&& df_regs_ever_live_p (i)
&& ! call_used_regs[i])
{
gcc_assert (LOAD_FPU_REG_P(via_ac));
@ -450,7 +450,7 @@ pdp11_output_function_epilogue (FILE *stream, HOST_WIDE_INT size)
}
for (i=7; i >= 0; i--)
if (regs_ever_live[i] && !call_used_regs[i])
if (df_regs_ever_live_p (i) && !call_used_regs[i])
fprintf(stream, "\tmov (sp)+, %s\n", reg_names[i]);
if (fsize)

View file

@ -1,6 +1,6 @@
/* Definitions of target machine for GNU compiler, for the pdp-11
Copyright (C) 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2004, 2005
Free Software Foundation, Inc.
Copyright (C) 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2004, 2005,
2006, 2007 Free Software Foundation, Inc.
Contributed by Michael K. Gschwind (mike@vlsivie.tuwien.ac.at).
This file is part of GCC.
@ -566,10 +566,10 @@ extern int may_call_alloca;
int offset, regno; \
offset = get_frame_size(); \
for (regno = 0; regno < 8; regno++) \
if (regs_ever_live[regno] && ! call_used_regs[regno]) \
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno]) \
offset += 2; \
for (regno = 8; regno < 14; regno++) \
if (regs_ever_live[regno] && ! call_used_regs[regno]) \
if (df_regs_ever_live_p (regno) && ! call_used_regs[regno]) \
offset += 8; \
/* offset -= 2; no fp on stack frame */ \
(DEPTH_VAR) = offset; \

View file

@ -1,5 +1,5 @@
;; Predicate definitions for POWER and PowerPC.
;; Copyright (C) 2005, 2006 Free Software Foundation, Inc.
;; Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
;;
;; This file is part of GCC.
;;
@ -640,8 +640,8 @@
(match_operand 0 "reg_or_mem_operand")))
;; Return 1 if the operand is a general register or memory operand without
;; pre_inc or pre_dec, which produces invalid form of PowerPC lwa
;; instruction.
;; pre_inc or pre_dec or pre_modify, which produces invalid form of PowerPC
;; lwa instruction.
(define_predicate "lwa_operand"
(match_code "reg,subreg,mem")
{
@ -654,6 +654,8 @@
|| (memory_operand (inner, mode)
&& GET_CODE (XEXP (inner, 0)) != PRE_INC
&& GET_CODE (XEXP (inner, 0)) != PRE_DEC
&& (GET_CODE (XEXP (inner, 0)) != PRE_MODIFY
|| legitimate_indexed_address_p (XEXP (XEXP (inner, 0), 1), 0))
&& (GET_CODE (XEXP (inner, 0)) != PLUS
|| GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
|| INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));

View file

@ -42,6 +42,7 @@ extern bool invalid_e500_subreg (rtx, enum machine_mode);
extern void validate_condition_mode (enum rtx_code, enum machine_mode);
extern bool legitimate_constant_pool_address_p (rtx);
extern bool legitimate_indirect_address_p (rtx, int);
extern bool legitimate_indexed_address_p (rtx, int);
extern rtx rs6000_got_register (rtx);
extern rtx find_addr_reg (rtx);

View file

@ -616,7 +616,6 @@ struct processor_costs power6_cost = {
static bool rs6000_function_ok_for_sibcall (tree, tree);
static const char *rs6000_invalid_within_doloop (rtx);
static rtx rs6000_generate_compare (enum rtx_code);
static void rs6000_maybe_dead (rtx);
static void rs6000_emit_stack_tie (void);
static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
static rtx spe_synthesize_frame_save (rtx);
@ -631,7 +630,6 @@ static int toc_hash_eq (const void *, const void *);
static int constant_pool_expr_1 (rtx, int *, int *);
static bool constant_pool_expr_p (rtx);
static bool legitimate_small_data_p (enum machine_mode, rtx);
static bool legitimate_indexed_address_p (rtx, int);
static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
static struct machine_function * rs6000_init_machine_status (void);
static bool rs6000_assemble_integer (rtx, unsigned int, int);
@ -3098,7 +3096,7 @@ rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
return (offset < 0x10000) && (offset + extra < 0x10000);
}
static bool
bool
legitimate_indexed_address_p (rtx x, int strict)
{
rtx op0, op1;
@ -3459,10 +3457,7 @@ rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
last = emit_move_insn (got, tmp3);
set_unique_reg_note (last, REG_EQUAL, gsym);
REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
REG_NOTES (first));
REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
REG_NOTES (last));
maybe_encapsulate_block (first, last, gsym);
}
}
}
@ -3832,6 +3827,24 @@ rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
&& (TARGET_POWERPC64 || mode != DImode)
&& legitimate_indexed_address_p (x, reg_ok_strict))
return 1;
if (GET_CODE (x) == PRE_MODIFY
&& mode != TImode
&& mode != TFmode
&& mode != TDmode
&& ((TARGET_HARD_FLOAT && TARGET_FPRS)
|| TARGET_POWERPC64
|| ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
&& (TARGET_POWERPC64 || mode != DImode)
&& !ALTIVEC_VECTOR_MODE (mode)
&& !SPE_VECTOR_MODE (mode)
/* Restrict addressing for DI because of our SUBREG hackery. */
&& !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
&& TARGET_UPDATE
&& legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
&& (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
|| legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
&& rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
return 1;
if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
return 1;
return 0;
@ -3864,7 +3877,10 @@ rs6000_mode_dependent_address (rtx addr)
case LO_SUM:
return true;
/* Auto-increment cases are now treated generically in recog.c. */
case PRE_INC:
case PRE_DEC:
case PRE_MODIFY:
return TARGET_UPDATE;
default:
break;
@ -10465,8 +10481,8 @@ rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
/* The second flow pass currently (June 1999) can't update
regs_ever_live without disturbing other parts of the compiler, so
update it here to make the prolog/epilogue code happy. */
if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
if (no_new_pseudos && ! df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
current_function_uses_pic_offset_table = 1;
@ -10833,6 +10849,9 @@ print_operand (FILE *file, rtx x, int code)
|| GET_CODE (XEXP (x, 0)) == PRE_DEC)
output_address (plus_constant (XEXP (XEXP (x, 0), 0),
UNITS_PER_WORD));
else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
output_address (plus_constant (XEXP (XEXP (x, 0), 0),
UNITS_PER_WORD));
else
output_address (XEXP (adjust_address_nv (x, SImode,
UNITS_PER_WORD),
@ -11033,7 +11052,8 @@ print_operand (FILE *file, rtx x, int code)
/* Print `u' if this has an auto-increment or auto-decrement. */
if (GET_CODE (x) == MEM
&& (GET_CODE (XEXP (x, 0)) == PRE_INC
|| GET_CODE (XEXP (x, 0)) == PRE_DEC))
|| GET_CODE (XEXP (x, 0)) == PRE_DEC
|| GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
putc ('u', file);
return;
@ -11120,7 +11140,9 @@ print_operand (FILE *file, rtx x, int code)
case 'X':
if (GET_CODE (x) == MEM
&& legitimate_indexed_address_p (XEXP (x, 0), 0))
&& (legitimate_indexed_address_p (XEXP (x, 0), 0)
|| (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
&& legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
putc ('x', file);
return;
@ -11133,6 +11155,8 @@ print_operand (FILE *file, rtx x, int code)
if (GET_CODE (XEXP (x, 0)) == PRE_INC
|| GET_CODE (XEXP (x, 0)) == PRE_DEC)
output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
else
output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
if (small_data_operand (x, GET_MODE (x)))
@ -11180,6 +11204,8 @@ print_operand (FILE *file, rtx x, int code)
if (GET_CODE (XEXP (x, 0)) == PRE_INC
|| GET_CODE (XEXP (x, 0)) == PRE_DEC)
output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
else
output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
if (small_data_operand (x, GET_MODE (x)))
@ -11260,6 +11286,8 @@ print_operand (FILE *file, rtx x, int code)
else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
output_address (XEXP (XEXP (x, 0), 1));
else
output_address (XEXP (x, 0));
}
@ -13120,7 +13148,7 @@ first_reg_to_save (void)
/* Find lowest numbered live register. */
for (first_reg = 13; first_reg <= 31; first_reg++)
if (regs_ever_live[first_reg]
if (df_regs_ever_live_p (first_reg)
&& (! call_used_regs[first_reg]
|| (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
&& ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
@ -13147,7 +13175,7 @@ first_fp_reg_to_save (void)
/* Find lowest numbered live register. */
for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
if (regs_ever_live[first_reg])
if (df_regs_ever_live_p (first_reg))
break;
return first_reg;
@ -13173,7 +13201,7 @@ first_altivec_reg_to_save (void)
/* Find lowest numbered live register. */
for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
if (regs_ever_live[i])
if (df_regs_ever_live_p (i))
break;
return i;
@ -13197,7 +13225,7 @@ compute_vrsave_mask (void)
/* First, find out if we use _any_ altivec registers. */
for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
if (regs_ever_live[i])
if (df_regs_ever_live_p (i))
mask |= ALTIVEC_REG_BIT (i);
if (mask == 0)
@ -13458,13 +13486,13 @@ rs6000_stack_info (void)
|| rs6000_ra_ever_killed ())
{
info_ptr->lr_save_p = 1;
regs_ever_live[LINK_REGISTER_REGNUM] = 1;
df_set_regs_ever_live (LINK_REGISTER_REGNUM, true);
}
/* Determine if we need to save the condition code registers. */
if (regs_ever_live[CR2_REGNO]
|| regs_ever_live[CR3_REGNO]
|| regs_ever_live[CR4_REGNO])
if (df_regs_ever_live_p (CR2_REGNO)
|| df_regs_ever_live_p (CR3_REGNO)
|| df_regs_ever_live_p (CR4_REGNO))
{
info_ptr->cr_save_p = 1;
if (DEFAULT_ABI == ABI_V4)
@ -13988,15 +14016,6 @@ rs6000_ra_ever_killed (void)
return 0;
}
/* Add a REG_MAYBE_DEAD note to the insn. */
static void
rs6000_maybe_dead (rtx insn)
{
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
const0_rtx,
REG_NOTES (insn));
}
/* Emit instructions needed to load the TOC register.
This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
a constant pool; or for SVR4 -fpic. */
@ -14004,7 +14023,7 @@ rs6000_maybe_dead (rtx insn)
void
rs6000_emit_load_toc_table (int fromprolog)
{
rtx dest, insn;
rtx dest;
dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
@ -14024,29 +14043,16 @@ rs6000_emit_load_toc_table (int fromprolog)
tmp1 = gen_reg_rtx (Pmode);
tmp2 = gen_reg_rtx (Pmode);
}
insn = emit_insn (gen_load_toc_v4_PIC_1 (lab));
if (fromprolog)
rs6000_maybe_dead (insn);
insn = emit_move_insn (tmp1,
emit_insn (gen_load_toc_v4_PIC_1 (lab));
emit_move_insn (tmp1,
gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
if (fromprolog)
rs6000_maybe_dead (insn);
insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
if (fromprolog)
rs6000_maybe_dead (insn);
insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
if (fromprolog)
rs6000_maybe_dead (insn);
emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
}
else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
{
insn = emit_insn (gen_load_toc_v4_pic_si ());
if (fromprolog)
rs6000_maybe_dead (insn);
insn = emit_move_insn (dest,
gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
if (fromprolog)
rs6000_maybe_dead (insn);
emit_insn (gen_load_toc_v4_pic_si ());
emit_move_insn (dest, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
}
else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
{
@ -14065,13 +14071,10 @@ rs6000_emit_load_toc_table (int fromprolog)
ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (symF)));
rs6000_maybe_dead (emit_move_insn (dest,
gen_rtx_REG (Pmode,
LINK_REGISTER_REGNUM)));
rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
symL,
symF)));
emit_insn (gen_load_toc_v4_PIC_1 (symF));
emit_move_insn (dest,
gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
}
else
{
@ -14083,9 +14086,7 @@ rs6000_emit_load_toc_table (int fromprolog)
gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
}
insn = emit_insn (gen_addsi3 (dest, temp0, dest));
if (fromprolog)
rs6000_maybe_dead (insn);
emit_insn (gen_addsi3 (dest, temp0, dest));
}
else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
{
@ -14095,23 +14096,17 @@ rs6000_emit_load_toc_table (int fromprolog)
ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
insn = emit_insn (gen_elf_high (dest, realsym));
if (fromprolog)
rs6000_maybe_dead (insn);
insn = emit_insn (gen_elf_low (dest, dest, realsym));
if (fromprolog)
rs6000_maybe_dead (insn);
emit_insn (gen_elf_high (dest, realsym));
emit_insn (gen_elf_low (dest, dest, realsym));
}
else
{
gcc_assert (DEFAULT_ABI == ABI_AIX);
if (TARGET_32BIT)
insn = emit_insn (gen_load_toc_aix_si (dest));
emit_insn (gen_load_toc_aix_si (dest));
else
insn = emit_insn (gen_load_toc_aix_di (dest));
if (fromprolog)
rs6000_maybe_dead (insn);
emit_insn (gen_load_toc_aix_di (dest));
}
}
@ -14198,7 +14193,7 @@ rtx
create_TOC_reference (rtx symbol)
{
if (no_new_pseudos)
regs_ever_live[TOC_REGISTER] = 1;
df_set_regs_ever_live (TOC_REGISTER, true);
return gen_rtx_PLUS (Pmode,
gen_rtx_REG (Pmode, TOC_REGISTER),
gen_rtx_CONST (Pmode,
@ -14641,7 +14636,7 @@ no_global_regs_above (int first_greg)
static bool
rs6000_reg_live_or_pic_offset_p (int reg)
{
return ((regs_ever_live[reg]
return ((df_regs_ever_live_p (reg)
&& (!call_used_regs[reg]
|| (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
&& TARGET_TOC && TARGET_MINIMAL_TOC)))
@ -14881,7 +14876,7 @@ rs6000_emit_prologue (void)
{
int i;
for (i = 0; i < 64 - info->first_fp_reg_save; i++)
if ((regs_ever_live[info->first_fp_reg_save+i]
if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
&& ! call_used_regs[info->first_fp_reg_save+i]))
emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
info->first_fp_reg_save + i,
@ -14952,7 +14947,7 @@ rs6000_emit_prologue (void)
int i;
rtx spe_save_area_ptr;
int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
&& regs_ever_live[STATIC_CHAIN_REGNUM]
&& df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
&& !call_used_regs[STATIC_CHAIN_REGNUM]);
/* Determine whether we can address all of the registers that need
@ -15206,7 +15201,7 @@ rs6000_emit_prologue (void)
if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
|| (DEFAULT_ABI == ABI_V4
&& (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
&& regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
&& df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
{
/* If emit_load_toc_table will use the link register, we need to save
it. We use R12 for this purpose because emit_load_toc_table
@ -15222,13 +15217,11 @@ rs6000_emit_prologue (void)
rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
insn = emit_move_insn (frame_ptr_rtx, lr);
rs6000_maybe_dead (insn);
RTX_FRAME_RELATED_P (insn) = 1;
rs6000_emit_load_toc_table (TRUE);
insn = emit_move_insn (lr, frame_ptr_rtx);
rs6000_maybe_dead (insn);
RTX_FRAME_RELATED_P (insn) = 1;
}
else
@ -15244,17 +15237,16 @@ rs6000_emit_prologue (void)
/* Save and restore LR locally around this call (in R0). */
if (!info->lr_save_p)
rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (src)));
emit_insn (gen_load_macho_picbase (src));
insn = emit_move_insn (gen_rtx_REG (Pmode,
RS6000_PIC_OFFSET_TABLE_REGNUM),
lr);
rs6000_maybe_dead (insn);
emit_move_insn (gen_rtx_REG (Pmode,
RS6000_PIC_OFFSET_TABLE_REGNUM),
lr);
if (!info->lr_save_p)
rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
}
#endif
}
@ -15673,7 +15665,7 @@ rs6000_emit_epilogue (int sibcall)
/* Restore fpr's if we need to do it without calling a function. */
if (restoring_FPRs_inline)
for (i = 0; i < 64 - info->first_fp_reg_save; i++)
if ((regs_ever_live[info->first_fp_reg_save+i]
if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
&& ! call_used_regs[info->first_fp_reg_save+i]))
{
rtx addr, mem;
@ -15697,7 +15689,7 @@ rs6000_emit_epilogue (int sibcall)
if (using_mtcr_multiple)
{
for (i = 0; i < 8; i++)
if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
count++;
gcc_assert (count);
}
@ -15711,7 +15703,7 @@ rs6000_emit_epilogue (int sibcall)
ndx = 0;
for (i = 0; i < 8; i++)
if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
{
rtvec r = rtvec_alloc (2);
RTVEC_ELT (r, 0) = r12_rtx;
@ -15726,7 +15718,7 @@ rs6000_emit_epilogue (int sibcall)
}
else
for (i = 0; i < 8; i++)
if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
{
emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
CR0_REGNO+i),

View file

@ -888,7 +888,7 @@ extern enum rs6000_nop_insertion rs6000_sched_insert_nops;
emitted the vrsave mask. */
#define HARD_REGNO_RENAME_OK(SRC, DST) \
(! ALTIVEC_REGNO_P (DST) || regs_ever_live[DST])
(! ALTIVEC_REGNO_P (DST) || df_regs_ever_live_p (DST))
/* A C expression returning the cost of moving data from a register of class
CLASS1 to one of CLASS2. */
@ -1598,6 +1598,8 @@ typedef struct rs6000_args
#define HAVE_PRE_DECREMENT 1
#define HAVE_PRE_INCREMENT 1
#define HAVE_PRE_MODIFY_DISP 1
#define HAVE_PRE_MODIFY_REG 1
/* Macros to check register numbers against specific register classes. */

View file

@ -8364,7 +8364,8 @@
|| (GET_CODE (operands[1]) == MEM
&& (GET_CODE (XEXP (operands[1], 0)) == LO_SUM
|| GET_CODE (XEXP (operands[1], 0)) == PRE_INC
|| GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)))
|| GET_CODE (XEXP (operands[1], 0)) == PRE_DEC
|| GET_CODE (XEXP (operands[1], 0)) == PRE_MODIFY)))
{
/* If the low-address word is used in the address, we must load
it last. Otherwise, load it first. Note that we cannot have
@ -8374,7 +8375,7 @@
operands[1], 0))
return \"{l|lwz} %L0,%L1\;{l|lwz} %0,%1\";
else
return \"{l%U1|lwz%U1} %0,%1\;{l|lwz} %L0,%L1\";
return \"{l%U1%X1|lwz%U1%X1} %0,%1\;{l|lwz} %L0,%L1\";
}
else
{
@ -8404,8 +8405,9 @@
|| (GET_CODE (operands[0]) == MEM
&& (GET_CODE (XEXP (operands[0], 0)) == LO_SUM
|| GET_CODE (XEXP (operands[0], 0)) == PRE_INC
|| GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)))
return \"{st%U0|stw%U0} %1,%0\;{st|stw} %L1,%L0\";
|| GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
|| GET_CODE (XEXP (operands[0], 0)) == PRE_MODIFY)))
return \"{st%U0%X0|stw%U0%X0} %1,%0\;{st|stw} %L1,%L0\";
else
{
rtx addreg;
@ -8461,9 +8463,9 @@
operands[1], 0))
return \"{l|lwz} %L0,%L1\;{l|lwz} %0,%1\";
else
return \"{l%U1|lwz%U1} %0,%1\;{l|lwz} %L0,%L1\";
return \"{l%U1%X1|lwz%U1%X1} %0,%1\;{l|lwz} %L0,%L1\";
case 2:
return \"{st%U0|stw%U0} %1,%0\;{st|stw} %L1,%L0\";
return \"{st%U0%X0|stw%U0%X0} %1,%0\;{st|stw} %L1,%L0\";
case 3:
case 4:
case 5:

View file

@ -1,6 +1,6 @@
/* Subroutines used for code generation on IBM S/390 and zSeries
Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
Free Software Foundation, Inc.
Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
2007 Free Software Foundation, Inc.
Contributed by Hartmut Penner (hpenner@de.ibm.com) and
Ulrich Weigand (uweigand@de.ibm.com).
@ -51,6 +51,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "langhooks.h"
#include "optabs.h"
#include "tree-gimple.h"
#include "df.h"
/* Define the specific costs for a given cpu. */
@ -2890,7 +2891,7 @@ legitimize_pic_address (rtx orig, rtx reg)
rtx temp = reg? reg : gen_reg_rtx (Pmode);
if (reload_in_progress || reload_completed)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
addr = gen_rtx_CONST (Pmode, addr);
@ -2916,7 +2917,7 @@ legitimize_pic_address (rtx orig, rtx reg)
in both 31- and 64-bit code (@GOT). */
if (reload_in_progress || reload_completed)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
new = gen_rtx_CONST (Pmode, new);
@ -2948,7 +2949,7 @@ legitimize_pic_address (rtx orig, rtx reg)
rtx temp = gen_reg_rtx (Pmode);
if (reload_in_progress || reload_completed)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
addr = gen_rtx_CONST (Pmode, addr);
@ -2996,7 +2997,7 @@ legitimize_pic_address (rtx orig, rtx reg)
rtx temp = reg? reg : gen_reg_rtx (Pmode);
if (reload_in_progress || reload_completed)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
addr = XVECEXP (addr, 0, 0);
addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
@ -3076,7 +3077,7 @@ legitimize_pic_address (rtx orig, rtx reg)
rtx temp = reg? reg : gen_reg_rtx (Pmode);
if (reload_in_progress || reload_completed)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
UNSPEC_GOTOFF);
@ -3244,7 +3245,7 @@ legitimize_tls_address (rtx addr, rtx reg)
in both 31- and 64-bit code. */
if (reload_in_progress || reload_completed)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
new = gen_rtx_CONST (Pmode, new);
@ -3273,7 +3274,7 @@ legitimize_tls_address (rtx addr, rtx reg)
from the literal pool. */
if (reload_in_progress || reload_completed)
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
new = gen_rtx_CONST (Pmode, new);
@ -6325,7 +6326,7 @@ find_unused_clobbered_reg (void)
{
int i;
for (i = 0; i < 6; i++)
if (!regs_ever_live[i])
if (!df_regs_ever_live_p (i))
return i;
return 0;
}
@ -6391,7 +6392,7 @@ s390_regs_ever_clobbered (int *regs_ever_clobbered)
for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
if (current_function_calls_eh_return
|| (cfun->machine->has_landing_pad_p
&& regs_ever_live [EH_RETURN_DATA_REGNO (i)]))
&& df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
/* For nonlocal gotos all call-saved registers have to be saved.
@ -6470,7 +6471,7 @@ s390_register_info (int clobbered_regs[])
cfun_frame_layout.high_fprs = 0;
if (TARGET_64BIT)
for (i = 24; i < 32; i++)
if (regs_ever_live[i] && !global_regs[i])
if (df_regs_ever_live_p (i) && !global_regs[i])
{
cfun_set_fpr_bit (i - 16);
cfun_frame_layout.high_fprs++;
@ -6492,7 +6493,7 @@ s390_register_info (int clobbered_regs[])
if (flag_pic)
clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
|= regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
|= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM);
clobbered_regs[BASE_REGNUM]
|= (cfun->machine->base_reg
@ -6515,10 +6516,10 @@ s390_register_info (int clobbered_regs[])
|| current_function_stdarg);
for (i = 6; i < 16; i++)
if (regs_ever_live[i] || clobbered_regs[i])
if (df_regs_ever_live_p (i) || clobbered_regs[i])
break;
for (j = 15; j > i; j--)
if (regs_ever_live[j] || clobbered_regs[j])
if (df_regs_ever_live_p (j) || clobbered_regs[j])
break;
if (i == 16)
@ -6612,7 +6613,7 @@ s390_register_info (int clobbered_regs[])
if (!TARGET_64BIT)
for (i = 2; i < 4; i++)
if (regs_ever_live[i + 16] && !global_regs[i + 16])
if (df_regs_ever_live_p (i + 16) && !global_regs[i + 16])
cfun_set_fpr_bit (i);
}
@ -6758,7 +6759,7 @@ s390_init_frame_layout (void)
as base register to avoid save/restore overhead. */
if (!base_used)
cfun->machine->base_reg = NULL_RTX;
else if (current_function_is_leaf && !regs_ever_live[5])
else if (current_function_is_leaf && !df_regs_ever_live_p (5))
cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
else
cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
@ -6781,12 +6782,15 @@ s390_update_frame_layout (void)
s390_register_info (clobbered_regs);
regs_ever_live[BASE_REGNUM] = clobbered_regs[BASE_REGNUM];
regs_ever_live[RETURN_REGNUM] = clobbered_regs[RETURN_REGNUM];
regs_ever_live[STACK_POINTER_REGNUM] = clobbered_regs[STACK_POINTER_REGNUM];
df_set_regs_ever_live (BASE_REGNUM,
clobbered_regs[BASE_REGNUM] ? true : false);
df_set_regs_ever_live (RETURN_REGNUM,
clobbered_regs[RETURN_REGNUM] ? true : false);
df_set_regs_ever_live (STACK_POINTER_REGNUM,
clobbered_regs[STACK_POINTER_REGNUM] ? true : false);
if (cfun->machine->base_reg)
regs_ever_live[REGNO (cfun->machine->base_reg)] = 1;
df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true);
}
/* Return true if it is legal to put a value with MODE into REGNO. */
@ -7169,7 +7173,10 @@ s390_emit_prologue (void)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
annotate_constant_pool_refs (&PATTERN (insn));
{
annotate_constant_pool_refs (&PATTERN (insn));
df_insn_rescan (insn);
}
pop_topmost_sequence ();
@ -7407,17 +7414,12 @@ s390_emit_prologue (void)
/* Set up got pointer, if needed. */
if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
{
rtx insns = s390_load_got ();
for (insn = insns; insn; insn = NEXT_INSN (insn))
{
annotate_constant_pool_refs (&PATTERN (insn));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
REG_NOTES (insn));
}
annotate_constant_pool_refs (&PATTERN (insn));
emit_insn (insns);
}

View file

@ -1,5 +1,5 @@
/* score-mdaux.c for Sunplus S+CORE processor
Copyright (C) 2005 Free Software Foundation, Inc.
Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Sunnorth
This file is part of GCC.
@ -126,7 +126,7 @@ static int
score_save_reg_p (unsigned int regno)
{
/* Check call-saved registers. */
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
return 1;
/* We need to save the old frame pointer before setting up a new one. */
@ -135,7 +135,7 @@ score_save_reg_p (unsigned int regno)
/* We need to save the incoming return address if it is ever clobbered
within the function. */
if (regno == RA_REGNUM && regs_ever_live[regno])
if (regno == RA_REGNUM && df_regs_ever_live_p (regno))
return 1;
return 0;

View file

@ -1,6 +1,6 @@
/* Output routines for GCC for Renesas / SuperH SH.
Copyright (C) 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
2003, 2004, 2005, 2006 Free Software Foundation, Inc.
2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Steve Chamberlain (sac@cygnus.com).
Improved by Jim Wilson (wilson@cygnus.com).
@ -47,6 +47,7 @@ Boston, MA 02110-1301, USA. */
#include "real.h"
#include "langhooks.h"
#include "basic-block.h"
#include "df.h"
#include "cfglayout.h"
#include "intl.h"
#include "sched-int.h"
@ -88,6 +89,9 @@ static short *regmode_weight[2];
/* Total SFmode and SImode weights of scheduled insns. */
static int curr_regmode_pressure[2];
/* Number of r0 life regions. */
static int r0_life_regions;
/* If true, skip cycles for Q -> R movement. */
static int skip_cycles = 0;
@ -195,6 +199,7 @@ static int sh_dfa_new_cycle (FILE *, int, rtx, int, int, int *sort_p);
static short find_set_regmode_weight (rtx, enum machine_mode);
static short find_insn_regmode_weight (rtx, enum machine_mode);
static void find_regmode_weight (basic_block, enum machine_mode);
static int find_r0_life_regions (basic_block);
static void sh_md_init_global (FILE *, int, int);
static void sh_md_finish_global (FILE *, int);
static int rank_for_reorder (const void *, const void *);
@ -4757,54 +4762,28 @@ sh_reorg (void)
if (GET_CODE (reg) != REG)
continue;
/* This is a function call via REG. If the only uses of REG
between the time that it is set and the time that it dies
are in function calls, then we can associate all the
function calls with the setting of REG. */
for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
/* Try scanning backward to find where the register is set. */
link = NULL;
for (scan = PREV_INSN (insn);
scan && GET_CODE (scan) != CODE_LABEL;
scan = PREV_INSN (scan))
{
rtx linked_insn;
if (! INSN_P (scan))
continue;
if (REG_NOTE_KIND (link) != 0)
continue;
linked_insn = XEXP (link, 0);
set = single_set (linked_insn);
if (set
&& rtx_equal_p (reg, SET_DEST (set))
&& ! INSN_DELETED_P (linked_insn))
if (! reg_mentioned_p (reg, scan))
continue;
if (noncall_uses_reg (reg, scan, &set))
break;
if (set)
{
link = linked_insn;
link = scan;
break;
}
}
if (! link)
{
/* ??? Sometimes global register allocation will have
deleted the insn pointed to by LOG_LINKS. Try
scanning backward to find where the register is set. */
for (scan = PREV_INSN (insn);
scan && GET_CODE (scan) != CODE_LABEL;
scan = PREV_INSN (scan))
{
if (! INSN_P (scan))
continue;
if (! reg_mentioned_p (reg, scan))
continue;
if (noncall_uses_reg (reg, scan, &set))
break;
if (set)
{
link = scan;
break;
}
}
}
if (! link)
continue;
@ -4833,7 +4812,7 @@ sh_reorg (void)
/* Don't try to trace forward past a CODE_LABEL if we haven't
seen INSN yet. Ordinarily, we will only find the setting insn
in LOG_LINKS if it is in the same basic block. However,
if it is in the same basic block. However,
cross-jumping can insert code labels in between the load and
the call, and can result in situations where a single call
insn may have two targets depending on where we came from. */
@ -4880,11 +4859,8 @@ sh_reorg (void)
later insn. */
/* ??? We shouldn't have to use FOUNDINSN here.
However, the LOG_LINKS fields are apparently not
entirely reliable around libcalls;
newlib/libm/math/e_pow.c is a test case. Sometimes
an insn will appear in LOG_LINKS even though it is
not the most recent insn which sets the register. */
This dates back to when we used LOG_LINKS to find
the most recent insn which sets the register. */
if (foundinsn
&& (scanset
@ -5849,12 +5825,12 @@ calc_live_regs (HARD_REG_SET *live_regs_mask)
CLEAR_HARD_REG_SET (*live_regs_mask);
if ((TARGET_SH4 || TARGET_SH2A_DOUBLE) && TARGET_FMOVD && interrupt_handler
&& regs_ever_live[FPSCR_REG])
&& df_regs_ever_live_p (FPSCR_REG))
target_flags &= ~MASK_FPU_SINGLE;
/* If we can save a lot of saves by switching to double mode, do that. */
else if ((TARGET_SH4 || TARGET_SH2A_DOUBLE) && TARGET_FMOVD && TARGET_FPU_SINGLE)
for (count = 0, reg = FIRST_FP_REG; reg <= LAST_FP_REG; reg += 2)
if (regs_ever_live[reg] && regs_ever_live[reg+1]
if (df_regs_ever_live_p (reg) && df_regs_ever_live_p (reg+1)
&& (! call_really_used_regs[reg]
|| interrupt_handler)
&& ++count > 2)
@ -5876,11 +5852,11 @@ calc_live_regs (HARD_REG_SET *live_regs_mask)
pr_live = (pr_initial
? (GET_CODE (pr_initial) != REG
|| REGNO (pr_initial) != (PR_REG))
: regs_ever_live[PR_REG]);
: df_regs_ever_live_p (PR_REG));
/* For Shcompact, if not optimizing, we end up with a memory reference
using the return address pointer for __builtin_return_address even
though there is no actual need to put the PR register on the stack. */
pr_live |= regs_ever_live[RETURN_ADDRESS_POINTER_REGNUM];
pr_live |= df_regs_ever_live_p (RETURN_ADDRESS_POINTER_REGNUM);
}
/* Force PR to be live if the prologue has to call the SHmedia
argument decoder or register saver. */
@ -5896,7 +5872,7 @@ calc_live_regs (HARD_REG_SET *live_regs_mask)
? pr_live
: interrupt_handler
? (/* Need to save all the regs ever live. */
(regs_ever_live[reg]
(df_regs_ever_live_p (reg)
|| (call_really_used_regs[reg]
&& (! fixed_regs[reg] || reg == MACH_REG || reg == MACL_REG
|| reg == PIC_OFFSET_TABLE_REGNUM)
@ -5914,7 +5890,7 @@ calc_live_regs (HARD_REG_SET *live_regs_mask)
&& flag_pic
&& current_function_args_info.call_cookie
&& reg == PIC_OFFSET_TABLE_REGNUM)
|| (regs_ever_live[reg]
|| (df_regs_ever_live_p (reg)
&& (!call_really_used_regs[reg]
|| (trapa_handler && reg == FPSCR_REG && TARGET_FPU_ANY)))
|| (current_function_calls_eh_return
@ -5923,7 +5899,7 @@ calc_live_regs (HARD_REG_SET *live_regs_mask)
|| reg == EH_RETURN_DATA_REGNO (2)
|| reg == EH_RETURN_DATA_REGNO (3)))
|| ((reg == MACL_REG || reg == MACH_REG)
&& regs_ever_live[reg]
&& df_regs_ever_live_p (reg)
&& sh_cfun_attr_renesas_p ())
))
{
@ -5935,7 +5911,7 @@ calc_live_regs (HARD_REG_SET *live_regs_mask)
{
if (FP_REGISTER_P (reg))
{
if (! TARGET_FPU_SINGLE && ! regs_ever_live[reg ^ 1])
if (! TARGET_FPU_SINGLE && ! df_regs_ever_live_p (reg ^ 1))
{
SET_HARD_REG_BIT (*live_regs_mask, (reg ^ 1));
count += GET_MODE_SIZE (REGISTER_NATURAL_MODE (reg ^ 1));
@ -6012,10 +5988,10 @@ sh_media_register_for_return (void)
if (sh_cfun_interrupt_handler_p ())
return -1;
tr0_used = flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
tr0_used = flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM);
for (regno = FIRST_TARGET_REG + tr0_used; regno <= LAST_TARGET_REG; regno++)
if (call_really_used_regs[regno] && ! regs_ever_live[regno])
if (call_really_used_regs[regno] && ! df_regs_ever_live_p (regno))
return regno;
return -1;
@ -6174,7 +6150,7 @@ sh_expand_prologue (void)
incoming-argument decoder and/or of the return trampoline from
the GOT, so make sure the PIC register is preserved and
initialized. */
regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
if (TARGET_SHCOMPACT
&& (current_function_args_info.call_cookie & ~ CALL_COOKIE_RET_TRAMP(1)))
@ -6207,19 +6183,8 @@ sh_expand_prologue (void)
int tr = sh_media_register_for_return ();
if (tr >= 0)
{
rtx insn = emit_move_insn (gen_rtx_REG (DImode, tr),
gen_rtx_REG (DImode, PR_MEDIA_REG));
/* ??? We should suppress saving pr when we don't need it, but this
is tricky because of builtin_return_address. */
/* If this function only exits with sibcalls, this copy
will be flagged as dead. */
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
const0_rtx,
REG_NOTES (insn));
}
emit_move_insn (gen_rtx_REG (DImode, tr),
gen_rtx_REG (DImode, PR_MEDIA_REG));
}
/* Emit the code for SETUP_VARARGS. */
@ -6467,24 +6432,8 @@ sh_expand_prologue (void)
else
push_regs (&live_regs_mask, current_function_interrupt);
if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
{
rtx insn = get_last_insn ();
rtx last = emit_insn (gen_GOTaddr2picreg ());
/* Mark these insns as possibly dead. Sometimes, flow2 may
delete all uses of the PIC register. In this case, let it
delete the initialization too. */
do
{
insn = NEXT_INSN (insn);
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
const0_rtx,
REG_NOTES (insn));
}
while (insn != last);
}
if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
emit_insn (gen_GOTaddr2picreg ());
if (SHMEDIA_REGS_STACK_ADJUST ())
{
@ -6499,16 +6448,7 @@ sh_expand_prologue (void)
}
if (target_flags != save_flags && ! current_function_interrupt)
{
rtx insn = emit_insn (gen_toggle_sz ());
/* If we're lucky, a mode switch in the function body will
overwrite fpscr, turning this insn dead. Tell flow this
insn is ok to delete. */
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
const0_rtx,
REG_NOTES (insn));
}
emit_insn (gen_toggle_sz ());
target_flags = save_flags;
@ -6729,11 +6669,6 @@ sh_expand_epilogue (bool sibcall_p)
}
insn = emit_move_insn (reg_rtx, mem_rtx);
if (reg == PR_MEDIA_REG && sh_media_register_for_return () >= 0)
/* This is dead, unless we return with a sibcall. */
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
const0_rtx,
REG_NOTES (insn));
}
gcc_assert (entry->offset + offset_base == d + d_rounding);
@ -6742,7 +6677,11 @@ sh_expand_epilogue (bool sibcall_p)
{
save_size = 0;
if (TEST_HARD_REG_BIT (live_regs_mask, PR_REG))
pop (PR_REG);
{
if (!frame_pointer_needed)
emit_insn (gen_blockage ());
pop (PR_REG);
}
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{
int j = (FIRST_PSEUDO_REGISTER - 1) - i;
@ -8799,7 +8738,7 @@ sh_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
saved by the prologue, even if they would normally be
call-clobbered. */
if (sh_cfun_interrupt_handler_p () && !regs_ever_live[new_reg])
if (sh_cfun_interrupt_handler_p () && !df_regs_ever_live_p (new_reg))
return 0;
return 1;
@ -9039,7 +8978,7 @@ flow_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
static int
sh_pr_n_sets (void)
{
return REG_N_SETS (TARGET_SHMEDIA ? PR_MEDIA_REG : PR_REG);
return DF_REG_DEF_COUNT (TARGET_SHMEDIA ? PR_MEDIA_REG : PR_REG);
}
/* Return where to allocate pseudo for a given hard register initial
@ -9206,6 +9145,56 @@ ready_reorder (rtx *ready, int nready)
SCHED_REORDER (ready, nready);
}
/* Count life regions of r0 for a block. */
static int
find_r0_life_regions (basic_block b)
{
rtx end, insn;
rtx pset;
rtx r0_reg;
int live;
int set;
int death = 0;
if (REGNO_REG_SET_P (DF_LIVE_IN (b), R0_REG))
{
set = 1;
live = 1;
}
else
{
set = 0;
live = 0;
}
insn = BB_HEAD (b);
end = BB_END (b);
r0_reg = gen_rtx_REG (SImode, R0_REG);
while (1)
{
if (INSN_P (insn))
{
if (find_regno_note (insn, REG_DEAD, R0_REG))
{
death++;
live = 0;
}
if (!live
&& (pset = single_set (insn))
&& reg_overlap_mentioned_p (r0_reg, SET_DEST (pset))
&& !find_regno_note (insn, REG_UNUSED, R0_REG))
{
set++;
live = 1;
}
}
if (insn == end)
break;
insn = NEXT_INSN (insn);
}
return set - death;
}
/* Calculate regmode weights for all insns of all basic block. */
static void
sh_md_init_global (FILE *dump ATTRIBUTE_UNUSED,
@ -9216,11 +9205,14 @@ sh_md_init_global (FILE *dump ATTRIBUTE_UNUSED,
regmode_weight[0] = (short *) xcalloc (old_max_uid, sizeof (short));
regmode_weight[1] = (short *) xcalloc (old_max_uid, sizeof (short));
r0_life_regions = 0;
FOR_EACH_BB_REVERSE (b)
{
find_regmode_weight (b, SImode);
find_regmode_weight (b, SFmode);
if (!reload_completed)
r0_life_regions += find_r0_life_regions (b);
}
CURR_REGMODE_PRESSURE (SImode) = 0;
@ -9281,7 +9273,6 @@ sh_md_init (FILE *dump ATTRIBUTE_UNUSED,
/* Pressure on register r0 can lead to spill failures. so avoid sched1 for
functions that already have high pressure on r0. */
#define R0_MAX_LIFE_REGIONS 2
#define R0_MAX_LIVE_LENGTH 12
/* Register Pressure thresholds for SImode and SFmode registers. */
#define SIMODE_MAX_WEIGHT 5
#define SFMODE_MAX_WEIGHT 10
@ -9292,9 +9283,8 @@ high_pressure (enum machine_mode mode)
{
/* Pressure on register r0 can lead to spill failures. so avoid sched1 for
functions that already have high pressure on r0. */
if ((REG_N_SETS (0) - REG_N_DEATHS (0)) >= R0_MAX_LIFE_REGIONS
&& REG_LIVE_LENGTH (0) >= R0_MAX_LIVE_LENGTH)
return 1;
if (r0_life_regions >= R0_MAX_LIFE_REGIONS)
return 1;
if (mode == SFmode)
return (CURR_REGMODE_PRESSURE (SFmode) > SFMODE_MAX_WEIGHT);
@ -10275,6 +10265,7 @@ sh_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
insn_locators_alloc ();
insns = get_insns ();
#if 0
if (optimize > 0)
{
/* Initialize the bitmap obstacks. */
@ -10301,6 +10292,14 @@ sh_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
else if (flag_pic)
split_all_insns_noflow ();
}
#else
if (optimize > 0)
{
if (! cfun->cfg)
init_flow ();
split_all_insns_noflow ();
}
#endif
sh_reorg ();
@ -10312,15 +10311,21 @@ sh_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
final (insns, file, 1);
final_end_function ();
#if 0
if (optimize > 0)
{
/* Release all memory allocated by flow. */
free_basic_block_vars ();
/* Release all memory allocated by df. */
if (rtl_df)
{
df_finish (rtl_df);
rtl_df = NULL;
}
/* Release the bitmap obstacks. */
bitmap_obstack_release (&reg_obstack);
bitmap_obstack_release (NULL);
}
#endif
reload_completed = 0;
epilogue_completed = 0;

View file

@ -1,6 +1,6 @@
;;- Machine description for Renesas / SuperH SH.
;; Copyright (C) 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
;; 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
;; 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
;; Contributed by Steve Chamberlain (sac@cygnus.com).
;; Improved by Jim Wilson (wilson@cygnus.com).
@ -135,7 +135,6 @@
(UNSPEC_FSINA 16)
(UNSPEC_NSB 17)
(UNSPEC_ALLOCO 18)
(UNSPEC_EH_RETURN 19)
(UNSPEC_TLSGD 20)
(UNSPEC_TLSLDM 21)
(UNSPEC_TLSIE 22)
@ -163,6 +162,7 @@
(UNSPECV_CONST8 6)
(UNSPECV_WINDOW_END 10)
(UNSPECV_CONST_END 11)
(UNSPECV_EH_RETURN 12)
])
;; -------------------------------------------------------------------------
@ -8110,15 +8110,197 @@ label:
DONE;
}")
(define_expand "sibcall_value"
[(set (match_operand 0 "" "")
(call (match_operand 1 "" "")
(define_insn "sibcall_valuei"
[(set (match_operand 0 "" "=rf")
(call (mem:SI (match_operand:SI 1 "register_operand" "k"))
(match_operand 2 "" "")))
(match_operand 3 "" "")]
(use (reg:PSI FPSCR_REG))
(return)]
"TARGET_SH1"
"jmp @%1%#"
[(set_attr "needs_delay_slot" "yes")
(set (attr "fp_mode")
(if_then_else (eq_attr "fpu_single" "yes")
(const_string "single") (const_string "double")))
(set_attr "type" "jump_ind")])
(define_insn "sibcall_valuei_pcrel"
[(set (match_operand 0 "" "=rf")
(call (mem:SI (match_operand:SI 1 "arith_reg_operand" "k"))
(match_operand 2 "" "")))
(use (match_operand 3 "" ""))
(use (reg:PSI FPSCR_REG))
(return)]
"TARGET_SH2"
"braf %1\\n%O3:%#"
[(set_attr "needs_delay_slot" "yes")
(set (attr "fp_mode")
(if_then_else (eq_attr "fpu_single" "yes")
(const_string "single") (const_string "double")))
(set_attr "type" "jump_ind")])
(define_insn_and_split "sibcall_value_pcrel"
[(set (match_operand 0 "" "=rf")
(call (mem:SI (match_operand:SI 1 "symbol_ref_operand" ""))
(match_operand 2 "" "")))
(use (reg:PSI FPSCR_REG))
(clobber (match_scratch:SI 3 "=k"))
(return)]
"TARGET_SH2"
"#"
"reload_completed"
[(const_int 0)]
"
{
rtx lab = PATTERN (gen_call_site ());
rtx call_insn;
emit_insn (gen_sym_label2reg (operands[3], operands[1], lab));
call_insn = emit_call_insn (gen_sibcall_valuei_pcrel (operands[0],
operands[3],
operands[2],
copy_rtx (lab)));
SIBLING_CALL_P (call_insn) = 1;
DONE;
}"
[(set_attr "needs_delay_slot" "yes")
(set (attr "fp_mode")
(if_then_else (eq_attr "fpu_single" "yes")
(const_string "single") (const_string "double")))
(set_attr "type" "jump_ind")])
(define_insn "sibcall_value_compact"
[(set (match_operand 0 "" "=rf,rf")
(call (mem:SI (match_operand:SI 1 "register_operand" "k,k"))
(match_operand 2 "" "")))
(return)
(use (match_operand:SI 3 "register_operand" "z,x"))
(use (reg:SI R1_REG))
(use (reg:PSI FPSCR_REG))
;; We want to make sure the `x' above will only match MACH_REG
;; because sibcall_epilogue may clobber MACL_REG.
(clobber (reg:SI MACL_REG))]
"TARGET_SHCOMPACT"
"@
jmp @%1%#
jmp @%1\\n sts %3, r0"
[(set_attr "needs_delay_slot" "yes,no")
(set_attr "length" "2,4")
(set (attr "fp_mode") (const_string "single"))
(set_attr "type" "jump_ind")])
(define_insn "sibcall_value_media"
[(set (match_operand 0 "" "=rf")
(call (mem:DI (match_operand 1 "target_reg_operand" "k"))
(match_operand 2 "" "")))
(use (reg:SI PR_MEDIA_REG))
(return)]
"TARGET_SHMEDIA"
"blink %1, r63"
[(set_attr "type" "jump_media")])
(define_expand "sibcall_value"
[(parallel
[(set (match_operand 0 "arith_reg_operand" "")
(call (mem:SI (match_operand 1 "arith_reg_operand" ""))
(match_operand 2 "" "")))
(match_operand 3 "" "")
(use (reg:PSI FPSCR_REG))
(return)])]
""
"
{
emit_call_insn (gen_sibcall (operands[1], operands[2], operands[3]));
if (TARGET_SHMEDIA)
{
operands[1] = shmedia_prepare_call_address (operands[1], 1);
emit_call_insn (gen_sibcall_value_media (operands[0], operands[1],
operands[2]));
DONE;
}
else if (TARGET_SHCOMPACT && operands[3]
&& (INTVAL (operands[3]) & ~ CALL_COOKIE_RET_TRAMP (1)))
{
rtx cookie_rtx = operands[3];
long cookie = INTVAL (cookie_rtx);
rtx func = XEXP (operands[1], 0);
rtx mach, r1;
if (flag_pic)
{
if (GET_CODE (func) == SYMBOL_REF && ! SYMBOL_REF_LOCAL_P (func))
{
rtx reg = gen_reg_rtx (Pmode);
emit_insn (gen_symGOT2reg (reg, func));
func = reg;
}
else
func = legitimize_pic_address (func, Pmode, 0);
}
/* FIXME: if we could tell whether all argument registers are
already taken, we could decide whether to force the use of
MACH_REG or to stick to R0_REG. Unfortunately, there's no
simple way to tell. We could use the CALL_COOKIE, but we
can't currently tell a register used for regular argument
passing from one that is unused. If we leave it up to reload
to decide which register to use, it seems to always choose
R0_REG, which leaves no available registers in SIBCALL_REGS
to hold the address of the trampoline. */
mach = gen_rtx_REG (SImode, MACH_REG);
r1 = gen_rtx_REG (SImode, R1_REG);
/* Since such a call function may use all call-clobbered
registers, we force a mode switch earlier, so that we don't
run out of registers when adjusting fpscr for the call. */
emit_insn (gen_force_mode_for_call ());
operands[1]
= function_symbol (NULL, \"__GCC_shcompact_call_trampoline\",
SFUNC_GOT);
operands[1] = force_reg (SImode, operands[1]);
/* We don't need a return trampoline, since the callee will
return directly to the upper caller. */
if (cookie & CALL_COOKIE_RET_TRAMP (1))
{
cookie &= ~ CALL_COOKIE_RET_TRAMP (1);
cookie_rtx = GEN_INT (cookie);
}
emit_move_insn (mach, func);
emit_move_insn (r1, cookie_rtx);
emit_call_insn (gen_sibcall_value_compact (operands[0], operands[1],
operands[2], mach));
DONE;
}
else if (TARGET_SHCOMPACT && flag_pic
&& GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
&& ! SYMBOL_REF_LOCAL_P (XEXP (operands[1], 0)))
{
rtx reg = gen_reg_rtx (Pmode);
emit_insn (gen_symGOT2reg (reg, XEXP (operands[1], 0)));
XEXP (operands[1], 0) = reg;
}
if (flag_pic && TARGET_SH2
&& GET_CODE (operands[1]) == MEM
&& GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
/* The PLT needs the PIC register, but the epilogue would have
to restore it, so we can only use PC-relative PIC calls for
static functions. */
&& SYMBOL_REF_LOCAL_P (XEXP (operands[1], 0)))
{
emit_call_insn (gen_sibcall_value_pcrel (operands[0],
XEXP (operands[1], 0),
operands[2]));
DONE;
}
else
operands[1] = force_reg (SImode, XEXP (operands[1], 0));
emit_call_insn (gen_sibcall_valuei (operands[0], operands[1], operands[2]));
DONE;
}")
@ -8239,19 +8421,14 @@ label:
{
rtx r0 = gen_rtx_REG (SImode, R0_REG);
rtx tmp = gen_rtx_REG (SImode, MACL_REG);
rtx i;
/* We can't tell at this point whether the sibcall is a
sibcall_compact and, if it is, whether it uses r0 or
mach as operand 2, so let the instructions that
preserve r0 be optimized away if r0 turns out to be
dead. */
i = emit_insn_before (gen_rtx_SET (SImode, tmp, r0), insn);
REG_NOTES (i) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
REG_NOTES (i));
i = emit_move_insn (r0, tmp);
REG_NOTES (i) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
REG_NOTES (i));
emit_insn_before (gen_rtx_SET (SImode, tmp, r0), insn);
emit_move_insn (r0, tmp);
break;
}
}
@ -9226,19 +9403,22 @@ mov.l\\t1f,r0\\n\\
;; until we know where it will be put in the stack frame.
(define_insn "eh_set_ra_si"
[(unspec [(match_operand:SI 0 "register_operand" "r")] UNSPEC_EH_RETURN)
[(unspec_volatile [(match_operand:SI 0 "register_operand" "r")]
UNSPECV_EH_RETURN)
(clobber (match_scratch:SI 1 "=&r"))]
"! TARGET_SHMEDIA64"
"#")
(define_insn "eh_set_ra_di"
[(unspec [(match_operand:DI 0 "register_operand" "r")] UNSPEC_EH_RETURN)
[(unspec_volatile [(match_operand:DI 0 "register_operand" "r")]
UNSPECV_EH_RETURN)
(clobber (match_scratch:DI 1 "=&r"))]
"TARGET_SHMEDIA64"
"#")
(define_split
[(unspec [(match_operand 0 "register_operand" "")] UNSPEC_EH_RETURN)
[(unspec_volatile [(match_operand 0 "register_operand" "")]
UNSPECV_EH_RETURN)
(clobber (match_scratch 1 ""))]
"reload_completed"
[(const_int 0)]
@ -10167,7 +10347,7 @@ mov.l\\t1f,r0\\n\\
[(set (reg:PSI FPSCR_REG)
(mem:PSI (match_operand:SI 0 "register_operand" "")))]
"(TARGET_SH4 || TARGET_SH2A_DOUBLE)
&& (flag_peephole2 ? flow2_completed : reload_completed)"
&& (flag_peephole2 ? epilogue_completed : reload_completed)"
[(const_int 0)]
{
rtx fpscr, mem, new_insn;

View file

@ -1,6 +1,6 @@
/* Subroutines for insn-output.c for SPARC.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Michael Tiemann (tiemann@cygnus.com)
64-bit SPARC-V9 support by Michael Tiemann, Jim Wilson, and Doug Evans,
@ -52,6 +52,7 @@ Boston, MA 02110-1301, USA. */
#include "tree-gimple.h"
#include "langhooks.h"
#include "params.h"
#include "df.h"
/* Processor costs */
static const
@ -3742,20 +3743,20 @@ sparc_compute_frame_size (HOST_WIDE_INT size, int leaf_function_p)
if (TARGET_ARCH64)
{
for (i = 0; i < 8; i++)
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
n_regs += 2;
}
else
{
for (i = 0; i < 8; i += 2)
if ((regs_ever_live[i] && ! call_used_regs[i])
|| (regs_ever_live[i+1] && ! call_used_regs[i+1]))
if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
|| (df_regs_ever_live_p (i+1) && ! call_used_regs[i+1]))
n_regs += 2;
}
for (i = 32; i < (TARGET_V9 ? 96 : 64); i += 2)
if ((regs_ever_live[i] && ! call_used_regs[i])
|| (regs_ever_live[i+1] && ! call_used_regs[i+1]))
if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
|| (df_regs_ever_live_p (i+1) && ! call_used_regs[i+1]))
n_regs += 2;
/* Set up values for use in prologue and epilogue. */
@ -3798,7 +3799,7 @@ sparc_output_scratch_registers (FILE *file ATTRIBUTE_UNUSED)
.register being printed for them already. */
for (i = 2; i < 8; i++)
{
if (regs_ever_live [i]
if (df_regs_ever_live_p (i)
&& ! sparc_hard_reg_printed [i])
{
sparc_hard_reg_printed [i] = 1;
@ -3829,7 +3830,7 @@ save_or_restore_regs (int low, int high, rtx base, int offset, int action)
{
for (i = low; i < high; i++)
{
if (regs_ever_live[i] && ! call_used_regs[i])
if (df_regs_ever_live_p (i) && ! call_used_regs[i])
{
mem = gen_rtx_MEM (DImode, plus_constant (base, offset));
set_mem_alias_set (mem, sparc_sr_alias_set);
@ -3848,8 +3849,8 @@ save_or_restore_regs (int low, int high, rtx base, int offset, int action)
{
for (i = low; i < high; i += 2)
{
bool reg0 = regs_ever_live[i] && ! call_used_regs[i];
bool reg1 = regs_ever_live[i+1] && ! call_used_regs[i+1];
bool reg0 = df_regs_ever_live_p (i) && ! call_used_regs[i];
bool reg1 = df_regs_ever_live_p (i+1) && ! call_used_regs[i+1];
enum machine_mode mode;
int regno;
@ -6509,7 +6510,7 @@ order_regs_for_local_alloc (void)
{
static int last_order_nonleaf = 1;
if (regs_ever_live[15] != last_order_nonleaf)
if (df_regs_ever_live_p (15) != last_order_nonleaf)
{
last_order_nonleaf = !last_order_nonleaf;
memcpy ((char *) reg_alloc_order,
@ -7673,7 +7674,7 @@ sparc_check_64 (rtx x, rtx insn)
y = gen_rtx_REG (SImode, REGNO (x) + WORDS_BIG_ENDIAN);
if (flag_expensive_optimizations
&& REG_N_SETS (REGNO (y)) == 1)
&& DF_REG_DEF_COUNT (REGNO (y)) == 1)
set_once = 1;
if (insn == 0)

View file

@ -1387,7 +1387,6 @@ print_operand (FILE * file, rtx x, int code)
}
extern char call_used_regs[];
extern char regs_ever_live[];
/* For PIC mode we've reserved PIC_OFFSET_TABLE_REGNUM, which is a
caller saved register. For leaf functions it is more efficient to
@ -1517,13 +1516,13 @@ spu_split_immediate (rtx * ops)
static int
need_to_save_reg (int regno, int saving)
{
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
return 1;
if (flag_pic
&& regno == PIC_OFFSET_TABLE_REGNUM
&& (!saving || current_function_uses_pic_offset_table)
&& (!saving
|| !current_function_is_leaf || regs_ever_live[LAST_ARG_REGNUM]))
|| !current_function_is_leaf || df_regs_ever_live_p (LAST_ARG_REGNUM)))
return 1;
return 0;
}
@ -1571,16 +1570,11 @@ frame_emit_add_imm (rtx dst, rtx src, HOST_WIDE_INT imm, rtx scratch)
}
else
{
insn = emit_insn (gen_movsi (scratch, gen_int_mode (imm, SImode)));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
REG_NOTES (insn));
emit_insn (gen_movsi (scratch, gen_int_mode (imm, SImode)));
insn = emit_insn (gen_addsi3 (dst, src, scratch));
if (REGNO (src) == REGNO (scratch))
abort ();
}
if (REGNO (dst) == REGNO (scratch))
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
REG_NOTES (insn));
return insn;
}
@ -1688,11 +1682,7 @@ spu_expand_prologue (void)
{
rtx pic_reg = get_pic_reg ();
insn = emit_insn (gen_load_pic_offset (pic_reg, scratch_reg_0));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
REG_NOTES (insn));
insn = emit_insn (gen_subsi3 (pic_reg, pic_reg, scratch_reg_0));
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
REG_NOTES (insn));
}
if (total_size > 0)
@ -2424,7 +2414,7 @@ immediate_load_p (rtx op, enum machine_mode mode)
{
enum immediate_class c = classify_immediate (op, mode);
return c == IC_IL1 || c == IC_IL1s
|| (!flow2_completed && (c == IC_IL2 || c == IC_IL2s));
|| (!epilogue_completed && (c == IC_IL2 || c == IC_IL2s));
}
return 0;
}
@ -3833,7 +3823,7 @@ fsmbi_const_p (rtx x)
/* We can always choose TImode for CONST_INT because the high bits
of an SImode will always be all 1s, i.e., valid for fsmbi. */
enum immediate_class c = classify_immediate (x, TImode);
return c == IC_FSMBI || (!flow2_completed && c == IC_FSMBI2);
return c == IC_FSMBI || (!epilogue_completed && c == IC_FSMBI2);
}
return 0;
}

View file

@ -1,6 +1,6 @@
/* Xstormy16 target functions.
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
Free Software Foundation, Inc.
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
2006, 2007 Free Software Foundation, Inc.
Contributed by Red Hat, Inc.
This file is part of GCC.
@ -1000,10 +1000,10 @@ struct xstormy16_stack_layout
/* Does REGNO need to be saved? */
#define REG_NEEDS_SAVE(REGNUM, IFUN) \
((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \
((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
|| (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
&& (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
&& (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
&& (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
/* Compute the stack layout. */
struct xstormy16_stack_layout

View file

@ -1,6 +1,6 @@
/* Subroutines for insn-output.c for NEC V850 series
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
Free Software Foundation, Inc.
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
2006, 2007 Free Software Foundation, Inc.
Contributed by Jeff Law (law@cygnus.com).
This file is part of GCC.
@ -1134,7 +1134,7 @@ substitute_ep_register (rtx first_insn,
if (!*p_r1)
{
regs_ever_live[1] = 1;
df_set_regs_ever_live_p (1, true);
*p_r1 = gen_rtx_REG (Pmode, 1);
*p_ep = gen_rtx_REG (Pmode, 30);
}
@ -1460,12 +1460,15 @@ compute_register_save_size (long * p_reg_saved)
int size = 0;
int i;
int interrupt_handler = v850_interrupt_function_p (current_function_decl);
int call_p = regs_ever_live [LINK_POINTER_REGNUM];
int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
long reg_saved = 0;
/* Count the return pointer if we need to save it. */
if (current_function_profile && !call_p)
regs_ever_live [LINK_POINTER_REGNUM] = call_p = 1;
{
df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
call_p = 1;
}
/* Count space for the register saves. */
if (interrupt_handler)
@ -1474,7 +1477,7 @@ compute_register_save_size (long * p_reg_saved)
switch (i)
{
default:
if (regs_ever_live[i] || call_p)
if (df_regs_ever_live_p (i) || call_p)
{
size += 4;
reg_saved |= 1L << i;
@ -1502,7 +1505,7 @@ compute_register_save_size (long * p_reg_saved)
{
/* Find the first register that needs to be saved. */
for (i = 0; i <= 31; i++)
if (regs_ever_live[i] && ((! call_used_regs[i])
if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
|| i == LINK_POINTER_REGNUM))
break;
@ -1534,7 +1537,7 @@ compute_register_save_size (long * p_reg_saved)
reg_saved |= 1L << i;
}
if (regs_ever_live [LINK_POINTER_REGNUM])
if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
{
size += 4;
reg_saved |= 1L << LINK_POINTER_REGNUM;
@ -1543,7 +1546,7 @@ compute_register_save_size (long * p_reg_saved)
else
{
for (; i <= 31; i++)
if (regs_ever_live[i] && ((! call_used_regs[i])
if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
|| i == LINK_POINTER_REGNUM))
{
size += 4;

View file

@ -1,6 +1,6 @@
/* Subroutines for insn-output.c for VAX.
Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
2004, 2005
2004, 2005, 2006, 2007
Free Software Foundation, Inc.
This file is part of GCC.
@ -116,7 +116,7 @@ vax_output_function_prologue (FILE * file, HOST_WIDE_INT size)
int mask = 0;
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
mask |= 1 << regno;
fprintf (file, "\t.word 0x%x\n", mask);
@ -127,7 +127,7 @@ vax_output_function_prologue (FILE * file, HOST_WIDE_INT size)
int offset = 0;
for (regno = FIRST_PSEUDO_REGISTER-1; regno >= 0; --regno)
if (regs_ever_live[regno] && !call_used_regs[regno])
if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
dwarf2out_reg_save (label, regno, offset -= 4);
dwarf2out_reg_save (label, PC_REGNUM, offset -= 4);

168
gcc/cse.c
View file

@ -45,6 +45,8 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "params.h"
#include "rtlhooks-def.h"
#include "tree-pass.h"
#include "df.h"
#include "dbgcnt.h"
/* The basic idea of common subexpression elimination is to go
through the code, keeping a record of expressions that would
@ -347,27 +349,6 @@ static unsigned int cse_reg_info_timestamp;
static HARD_REG_SET hard_regs_in_table;
/* CUID of insn that starts the basic block currently being cse-processed. */
static int cse_basic_block_start;
/* CUID of insn that ends the basic block currently being cse-processed. */
static int cse_basic_block_end;
/* Vector mapping INSN_UIDs to cuids.
The cuids are like uids but increase monotonically always.
We use them to see whether a reg is used outside a given basic block. */
static int *uid_cuid;
/* Highest UID in UID_CUID. */
static int max_uid;
/* Get the cuid of an insn. */
#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
/* Nonzero if cse has altered conditional jump insns
in such a way that jump optimization should be redone. */
@ -538,10 +519,6 @@ static int constant_pool_entries_regcost;
struct cse_basic_block_data
{
/* Lowest CUID value of insns in block. */
int low_cuid;
/* Highest CUID value of insns in block. */
int high_cuid;
/* Total number of SETs in block. */
int nsets;
/* Size of current branch path, if any. */
@ -554,6 +531,11 @@ struct cse_basic_block_data
} *path;
};
/* Pointers to the live in/live out bitmaps for the boundaries of the
current EBB. */
static bitmap cse_ebb_live_in, cse_ebb_live_out;
/* A simple bitmap to track which basic blocks have been visited
already as part of an already processed extended basic block. */
static sbitmap cse_visited_basic_blocks;
@ -602,7 +584,7 @@ static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
static void cse_insn (rtx, rtx);
static void cse_prescan_path (struct cse_basic_block_data *);
static void invalidate_from_clobbers (rtx);
static rtx cse_process_notes (rtx, rtx);
static rtx cse_process_notes (rtx, rtx, bool *);
static void cse_extended_basic_block (struct cse_basic_block_data *);
static void count_reg_usage (rtx, int *, rtx, int);
static int check_for_label_ref (rtx *, void *);
@ -957,11 +939,10 @@ make_regs_eqv (unsigned int new, unsigned int old)
&& ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
|| (new >= FIRST_PSEUDO_REGISTER
&& (firstr < FIRST_PSEUDO_REGISTER
|| ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
|| (uid_cuid[REGNO_FIRST_UID (new)]
< cse_basic_block_start))
&& (uid_cuid[REGNO_LAST_UID (new)]
> uid_cuid[REGNO_LAST_UID (firstr)]))))))
|| (bitmap_bit_p (cse_ebb_live_out, new)
&& !bitmap_bit_p (cse_ebb_live_out, firstr))
|| (bitmap_bit_p (cse_ebb_live_in, new)
&& !bitmap_bit_p (cse_ebb_live_in, firstr))))))
{
reg_eqv_table[firstr].prev = new;
reg_eqv_table[new].next = firstr;
@ -2648,14 +2629,15 @@ cse_rtx_varies_p (rtx x, int from_alias)
static void
validate_canon_reg (rtx *xloc, rtx insn)
{
rtx new = canon_reg (*xloc, insn);
if (*xloc)
{
rtx new = canon_reg (*xloc, insn);
/* If replacing pseudo with hard reg or vice versa, ensure the
insn remains valid. Likewise if the insn has MATCH_DUPs. */
if (insn != 0 && new != 0)
validate_change (insn, xloc, new, 1);
else
*xloc = new;
/* If replacing pseudo with hard reg or vice versa, ensure the
insn remains valid. Likewise if the insn has MATCH_DUPs. */
gcc_assert (insn && new);
validate_change (insn, xloc, new, 1);
}
}
/* Canonicalize an expression:
@ -4151,12 +4133,12 @@ cse_insn (rtx insn, rtx libcall_insn)
This does nothing when a register is clobbered
because we have already invalidated the reg. */
if (MEM_P (XEXP (y, 0)))
canon_reg (XEXP (y, 0), NULL_RTX);
canon_reg (XEXP (y, 0), insn);
}
else if (GET_CODE (y) == USE
&& ! (REG_P (XEXP (y, 0))
&& REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
canon_reg (y, NULL_RTX);
canon_reg (y, insn);
else if (GET_CODE (y) == CALL)
{
/* The result of apply_change_group can be ignored; see
@ -4170,14 +4152,14 @@ cse_insn (rtx insn, rtx libcall_insn)
else if (GET_CODE (x) == CLOBBER)
{
if (MEM_P (XEXP (x, 0)))
canon_reg (XEXP (x, 0), NULL_RTX);
canon_reg (XEXP (x, 0), insn);
}
/* Canonicalize a USE of a pseudo register or memory location. */
else if (GET_CODE (x) == USE
&& ! (REG_P (XEXP (x, 0))
&& REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
canon_reg (XEXP (x, 0), NULL_RTX);
canon_reg (XEXP (x, 0), insn);
else if (GET_CODE (x) == CALL)
{
/* The result of apply_change_group can be ignored; see canon_reg. */
@ -4195,8 +4177,12 @@ cse_insn (rtx insn, rtx libcall_insn)
&& (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
|| GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
{
src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
/* The result of apply_change_group can be ignored; see canon_reg. */
canon_reg (XEXP (tem, 0), insn);
apply_change_group ();
src_eqv = fold_rtx (XEXP (tem, 0), insn);
XEXP (tem, 0) = src_eqv;
df_notes_rescan (insn);
}
/* Canonicalize sources and addresses of destinations.
@ -4861,6 +4847,7 @@ cse_insn (rtx insn, rtx libcall_insn)
XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
sets[i].orig_src,
copy_rtx (new));
df_notes_rescan (libcall_insn);
}
/* The result of apply_change_group can be ignored; see
@ -4979,6 +4966,7 @@ cse_insn (rtx insn, rtx libcall_insn)
/* Record the actual constant value in a REG_EQUAL note,
making a new one if one does not already exist. */
set_unique_reg_note (insn, REG_EQUAL, src_const);
df_notes_rescan (insn);
}
}
@ -5056,11 +5044,6 @@ cse_insn (rtx insn, rtx libcall_insn)
else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
&& !LABEL_REF_NONLOCAL_P (src))
{
/* Now emit a BARRIER after the unconditional jump. */
if (NEXT_INSN (insn) == 0
|| !BARRIER_P (NEXT_INSN (insn)))
emit_barrier_after (insn);
/* We reemit the jump in as many cases as possible just in
case the form of an unconditional jump is significantly
different than a computed jump or conditional jump.
@ -5086,11 +5069,6 @@ cse_insn (rtx insn, rtx libcall_insn)
delete_insn_and_edges (insn);
insn = new;
/* Now emit a BARRIER after the unconditional jump. */
if (NEXT_INSN (insn) == 0
|| !BARRIER_P (NEXT_INSN (insn)))
emit_barrier_after (insn);
}
else
INSN_CODE (insn) = -1;
@ -5716,7 +5694,7 @@ invalidate_from_clobbers (rtx x)
Return the replacement for X. */
static rtx
cse_process_notes (rtx x, rtx object)
cse_process_notes_1 (rtx x, rtx object, bool *changed)
{
enum rtx_code code = GET_CODE (x);
const char *fmt = GET_RTX_FORMAT (code);
@ -5737,22 +5715,22 @@ cse_process_notes (rtx x, rtx object)
case MEM:
validate_change (x, &XEXP (x, 0),
cse_process_notes (XEXP (x, 0), x), 0);
cse_process_notes (XEXP (x, 0), x, changed), 0);
return x;
case EXPR_LIST:
case INSN_LIST:
if (REG_NOTE_KIND (x) == REG_EQUAL)
XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
if (XEXP (x, 1))
XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
return x;
case SIGN_EXTEND:
case ZERO_EXTEND:
case SUBREG:
{
rtx new = cse_process_notes (XEXP (x, 0), object);
rtx new = cse_process_notes (XEXP (x, 0), object, changed);
/* We don't substitute VOIDmode constants into these rtx,
since they would impede folding. */
if (GET_MODE (new) != VOIDmode)
@ -5788,10 +5766,20 @@ cse_process_notes (rtx x, rtx object)
for (i = 0; i < GET_RTX_LENGTH (code); i++)
if (fmt[i] == 'e')
validate_change (object, &XEXP (x, i),
cse_process_notes (XEXP (x, i), object), 0);
cse_process_notes (XEXP (x, i), object, changed), 0);
return x;
}
static rtx
cse_process_notes (rtx x, rtx object, bool *changed)
{
rtx new = cse_process_notes_1 (x, object, changed);
if (new != x)
*changed = true;
return new;
}
/* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
@ -5966,14 +5954,12 @@ have_eh_succ_edges (basic_block bb)
/* Scan to the end of the path described by DATA. Return an estimate of
the total number of SETs, and the lowest and highest insn CUID, of all
insns in the path. */
the total number of SETs of all insns in the path. */
static void
cse_prescan_path (struct cse_basic_block_data *data)
{
int nsets = 0;
int low_cuid = -1, high_cuid = -1; /* FIXME low_cuid not computed correctly */
int path_size = data->path_size;
int path_entry;
@ -5996,21 +5982,9 @@ cse_prescan_path (struct cse_basic_block_data *data)
nsets += XVECLEN (PATTERN (insn), 0);
else
nsets += 1;
/* Ignore insns made by CSE in a previous traversal of this
basic block. They cannot affect the boundaries of the
basic block.
FIXME: When we only visit each basic block at most once,
this can go away. */
if (INSN_UID (insn) <= max_uid && INSN_CUID (insn) > high_cuid)
high_cuid = INSN_CUID (insn);
if (INSN_UID (insn) <= max_uid && INSN_CUID (insn) < low_cuid)
low_cuid = INSN_CUID (insn);
}
}
data->low_cuid = low_cuid;
data->high_cuid = high_cuid;
data->nsets = nsets;
}
@ -6027,6 +6001,8 @@ cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
qty_table = XNEWVEC (struct qty_table_elem, max_qty);
new_basic_block ();
cse_ebb_live_in = DF_LIVE_IN (ebb_data->path[0].bb);
cse_ebb_live_out = DF_LIVE_OUT (ebb_data->path[path_size - 1].bb);
for (path_entry = 0; path_entry < path_size; path_entry++)
{
basic_block bb;
@ -6058,8 +6034,13 @@ cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
/* Process notes first so we have all notes in canonical forms
when looking for duplicate operations. */
if (REG_NOTES (insn))
REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
NULL_RTX);
{
bool changed = false;
REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
NULL_RTX, &changed);
if (changed)
df_notes_rescan (insn);
}
/* Track when we are inside in LIBCALL block. Inside such
a block we do not want to record destinations. The last
@ -6191,6 +6172,7 @@ cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
free (qty_table);
}
/* Perform cse on the instructions of a function.
F is the first instruction.
@ -6207,6 +6189,11 @@ cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
int *rc_order = XNEWVEC (int, last_basic_block);
int i, n_blocks;
df_set_flags (DF_LR_RUN_DCE);
df_analyze ();
df_set_flags (DF_DEFER_INSN_RESCAN);
reg_scan (get_insns (), max_reg_num ());
init_cse_reg_info (nregs);
ebb_data.path = XNEWVEC (struct branch_path,
@ -6229,19 +6216,6 @@ cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (cse_visited_basic_blocks);
/* Compute the mapping from uids to cuids.
CUIDs are numbers assigned to insns, like uids, except that
that cuids increase monotonically through the code. */
max_uid = get_max_uid ();
uid_cuid = XCNEWVEC (int, max_uid + 1);
i = 0;
FOR_EACH_BB (bb)
{
rtx insn;
FOR_BB_INSNS (bb, insn)
INSN_CUID (insn) = ++i;
}
/* Loop over basic blocks in reverse completion order (RPO),
excluding the ENTRY and EXIT blocks. */
n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
@ -6271,8 +6245,6 @@ cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
needed for this path. For this, we take the number of sets
and multiply that by MAX_RECOG_OPERANDS. */
max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
cse_basic_block_start = ebb_data.low_cuid;
cse_basic_block_end = ebb_data.high_cuid;
/* Dump the path we're about to process. */
if (dump_file)
@ -6284,7 +6256,6 @@ cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
/* Clean up. */
end_alias_analysis ();
free (uid_cuid);
free (reg_eqv_table);
free (ebb_data.path);
sbitmap_free (cse_visited_basic_blocks);
@ -6605,7 +6576,7 @@ delete_trivially_dead_insns (rtx insns, int nreg)
/* If this is a dead insn, delete it and show registers in it aren't
being used. */
if (! live_insn)
if (! live_insn && dbg_cnt (delete_trivial_dead))
{
count_reg_usage (insn, counts, NULL_RTX, -1);
delete_insn_and_edges (insn);
@ -7009,11 +6980,10 @@ static unsigned int
rest_of_handle_cse (void)
{
int tem;
if (dump_file)
dump_flow_info (dump_file, dump_flags);
reg_scan (get_insns (), max_reg_num ());
tem = cse_main (get_insns (), max_reg_num ());
/* If we are not running more CSE passes, then we are no longer
@ -7024,7 +6994,7 @@ rest_of_handle_cse (void)
rebuild_jump_labels (get_insns ());
if (tem || optimize > 1)
cleanup_cfg (CLEANUP_EXPENSIVE);
cleanup_cfg (0);
return 0;
}
@ -7042,6 +7012,7 @@ struct tree_opt_pass pass_cse =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_df_finish |
TODO_dump_func |
TODO_ggc_collect |
TODO_verify_flow, /* todo_flags_finish */
@ -7078,11 +7049,9 @@ rest_of_handle_cse2 (void)
{
timevar_push (TV_JUMP);
rebuild_jump_labels (get_insns ());
delete_dead_jumptables ();
cleanup_cfg (CLEANUP_EXPENSIVE);
cleanup_cfg (0);
timevar_pop (TV_JUMP);
}
reg_scan (get_insns (), max_reg_num ());
cse_not_expected = 1;
return 0;
}
@ -7101,6 +7070,7 @@ struct tree_opt_pass pass_cse2 =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_df_finish |
TODO_dump_func |
TODO_ggc_collect |
TODO_verify_flow, /* todo_flags_finish */

View file

@ -1,6 +1,6 @@
/* Common subexpression elimination library for GNU compiler.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -131,6 +131,10 @@ static cselib_val dummy_val;
each time memory is invalidated. */
static cselib_val *first_containing_mem = &dummy_val;
static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
/* If nonnull, cselib will call this function before freeing useless
VALUEs. A VALUE is deemed useless if its "locs" field is null. */
void (*cselib_discard_hook) (cselib_val *);
/* Allocate a struct elt_list and fill in its two elements with the
@ -331,6 +335,9 @@ discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
if (v->locs == 0)
{
if (cselib_discard_hook)
cselib_discard_hook (v);
CSELIB_VAL_PTR (v->val_rtx) = NULL;
htab_clear_slot (cselib_hash_table, x);
unchain_one_value (v);
@ -823,6 +830,260 @@ cselib_lookup_mem (rtx x, int create)
return mem_elt;
}
/* Search thru the possible substitutions in P. We prefer a non reg
substitution because this allows us to expand the tree further. If
we find, just a reg, take the lowest regno. There may be several
non-reg results, we just take the first one because they will all
expand to the same place. */
static rtx
expand_loc (struct elt_loc_list *p, bitmap regs_active, int max_depth)
{
rtx reg_result = NULL;
unsigned int regno = UINT_MAX;
struct elt_loc_list *p_in = p;
for (; p; p = p -> next)
{
/* Avoid infinite recursion trying to expand a reg into a
the same reg. */
if ((REG_P (p->loc))
&& (REGNO (p->loc) < regno)
&& !bitmap_bit_p (regs_active, REGNO (p->loc)))
{
reg_result = p->loc;
regno = REGNO (p->loc);
}
/* Avoid infinite recursion and do not try to expand the
value. */
else if (GET_CODE (p->loc) == VALUE
&& CSELIB_VAL_PTR (p->loc)->locs == p_in)
continue;
else if (!REG_P (p->loc))
{
rtx result;
if (dump_file)
{
print_inline_rtx (dump_file, p->loc, 0);
fprintf (dump_file, "\n");
}
result = cselib_expand_value_rtx (p->loc, regs_active, max_depth - 1);
if (result)
return result;
}
}
if (regno != UINT_MAX)
{
rtx result;
if (dump_file)
fprintf (dump_file, "r%d\n", regno);
result = cselib_expand_value_rtx (reg_result, regs_active, max_depth - 1);
if (result)
return result;
}
if (dump_file)
{
if (reg_result)
{
print_inline_rtx (dump_file, reg_result, 0);
fprintf (dump_file, "\n");
}
else
fprintf (dump_file, "NULL\n");
}
return reg_result;
}
/* Forward substitute and expand an expression out to its roots.
This is the opposite of common subexpression. Because local value
numbering is such a weak optimization, the expanded expression is
pretty much unique (not from a pointer equals point of view but
from a tree shape point of view.
This function returns NULL if the expansion fails. The expansion
will fail if there is no value number for one of the operands or if
one of the operands has been overwritten between the current insn
and the beginning of the basic block. For instance x has no
expansion in:
r1 <- r1 + 3
x <- r1 + 8
REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
It is clear on return. */
rtx
cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
{
rtx copy, scopy;
int i, j;
RTX_CODE code;
const char *format_ptr;
code = GET_CODE (orig);
/* For the context of dse, if we end up expand into a huge tree, we
will not have a useful address, so we might as well just give up
quickly. */
if (max_depth <= 0)
return NULL;
switch (code)
{
case REG:
{
struct elt_list *l = REG_VALUES (REGNO (orig));
if (l && l->elt == NULL)
l = l->next;
for (; l; l = l->next)
if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
{
rtx result;
int regno = REGNO (orig);
/* The only thing that we are not willing to do (this
is requirement of dse and if others potiential uses
need this function we should add a parm to control
it) is that we will not substitute the
STACK_POINTER_REGNUM, FRAME_POINTER or the
HARD_FRAME_POINTER.
Thses expansions confuses the code that notices that
stores into the frame go dead at the end of the
function and that the frame is not effected by calls
to subroutines. If you allow the
STACK_POINTER_REGNUM substitution, then dse will
think that parameter pushing also goes dead which is
wrong. If you allow the FRAME_POINTER or the
HARD_FRAME_POINTER then you lose the opportunity to
make the frame assumptions. */
if (regno == STACK_POINTER_REGNUM
|| regno == FRAME_POINTER_REGNUM
|| regno == HARD_FRAME_POINTER_REGNUM)
return orig;
bitmap_set_bit (regs_active, regno);
if (dump_file)
fprintf (dump_file, "expanding: r%d into: ", regno);
result = expand_loc (l->elt->locs, regs_active, max_depth);
bitmap_clear_bit (regs_active, regno);
if (result)
return result;
else
return orig;
}
}
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
case SYMBOL_REF:
case CODE_LABEL:
case PC:
case CC0:
case SCRATCH:
/* SCRATCH must be shared because they represent distinct values. */
return orig;
case CLOBBER:
if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
return orig;
break;
case CONST:
if (shared_const_p (orig))
return orig;
break;
case VALUE:
{
rtx result;
if (dump_file)
fprintf (dump_file, "expanding value %s into: ", GET_MODE_NAME (GET_MODE (orig)));
result = expand_loc (CSELIB_VAL_PTR (orig)->locs, regs_active, max_depth);
if (result
&& GET_CODE (result) == CONST_INT
&& GET_MODE (orig) != VOIDmode)
{
result = gen_rtx_CONST (GET_MODE (orig), result);
if (dump_file)
fprintf (dump_file, " wrapping const_int result in const to preserve mode %s\n",
GET_MODE_NAME (GET_MODE (orig)));
}
return result;
}
default:
break;
}
/* Copy the various flags, fields, and other information. We assume
that all fields need copying, and then clear the fields that should
not be copied. That is the sensible default behavior, and forces
us to explicitly document why we are *not* copying a flag. */
copy = shallow_copy_rtx (orig);
format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
switch (*format_ptr++)
{
case 'e':
if (XEXP (orig, i) != NULL)
{
rtx result = cselib_expand_value_rtx (XEXP (orig, i), regs_active, max_depth - 1);
if (!result)
return NULL;
XEXP (copy, i) = result;
}
break;
case 'E':
case 'V':
if (XVEC (orig, i) != NULL)
{
XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
for (j = 0; j < XVECLEN (copy, i); j++)
{
rtx result = cselib_expand_value_rtx (XVECEXP (orig, i, j), regs_active, max_depth - 1);
if (!result)
return NULL;
XVECEXP (copy, i, j) = result;
}
}
break;
case 't':
case 'w':
case 'i':
case 's':
case 'S':
case 'T':
case 'u':
case 'B':
case '0':
/* These are left unchanged. */
break;
default:
gcc_unreachable ();
}
scopy = simplify_rtx (copy);
if (scopy)
return scopy;
return copy;
}
/* Walk rtx X and replace all occurrences of REG and MEM subexpressions
with VALUE expressions. This way, it becomes independent of changes
to registers and memory.
@ -1505,6 +1766,7 @@ cselib_init (bool record_memory)
void
cselib_finish (void)
{
cselib_discard_hook = NULL;
free_alloc_pool (elt_list_pool);
free_alloc_pool (elt_loc_list_pool);
free_alloc_pool (cselib_val_pool);

View file

@ -1,6 +1,6 @@
/* Common subexpression elimination for GNU compiler.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2003, 2004, 2005 Free Software Foundation, Inc.
1999, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@ -59,6 +59,8 @@ struct elt_list GTY(())
cselib_val *elt;
};
extern void (*cselib_discard_hook) (cselib_val *);
extern cselib_val *cselib_lookup (rtx, enum machine_mode, int);
extern void cselib_init (bool record_memory);
extern void cselib_clear_table (void);
@ -67,5 +69,6 @@ extern void cselib_process_insn (rtx);
extern enum machine_mode cselib_reg_set_mode (rtx);
extern int rtx_equal_for_cselib_p (rtx, rtx);
extern int references_value_p (rtx, int);
extern rtx cselib_expand_value_rtx (rtx, bitmap, int);
extern rtx cselib_subst_to_values (rtx);
extern void cselib_invalidate_rtx (rtx);

107
gcc/dbgcnt.c Normal file
View file

@ -0,0 +1,107 @@
/* Debug counter for debugging support
Copyright (C) 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
See dbgcnt.def for usage information. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "dbgcnt.h"
struct string2counter_map {
const char *name;
enum debug_counter counter;
};
#define DEBUG_COUNTER(a) { #a , a },
static struct string2counter_map map[debug_counter_number_of_counters] =
{
#include "dbgcnt.def"
};
#undef DEBUG_COUNTER
#define DEBUG_COUNTER(a) UINT_MAX,
static unsigned int limit[debug_counter_number_of_counters] =
{
#include "dbgcnt.def"
};
#undef DEBUG_COUNTER
static unsigned int count[debug_counter_number_of_counters];
bool
dbg_cnt_is_enabled (enum debug_counter index)
{
return count[index] <= limit[index];
}
bool
dbg_cnt (enum debug_counter index)
{
count[index]++;
return dbg_cnt_is_enabled (index);
}
static void
dbg_cnt_set_limit_by_index (enum debug_counter index, int value)
{
limit[index] = value;
fprintf (stderr, "dbg_cnt '%s' set to %d\n", map[index].name, value);
}
static void
dbg_cnt_set_limit_by_name (const char *name, int len, int value)
{
int i;
for (i = debug_counter_number_of_counters - 1; i >= 0; i--)
if (!strncmp (map[i].name, name, len))
break;
if (i < 0)
return;
dbg_cnt_set_limit_by_index (i, value);
}
void
dbg_cnt_process_opt (const char *arg)
{
char *colon = strchr (arg, ':');
char *comma;
if (colon == NULL)
return;
dbg_cnt_set_limit_by_name (arg, colon - arg, atoi (colon + 1));
comma = strchr (colon + 1, ',');
while (comma)
{
colon = strchr (comma + 1, ':');
if (colon == NULL || !(colon[1] >= '0' && colon[1] <= '9'))
return;
dbg_cnt_set_limit_by_name (comma + 1, colon - (comma + 1), atoi (colon + 1));
comma = strchr (colon + 1, ',');
}
}

84
gcc/dbgcnt.def Normal file
View file

@ -0,0 +1,84 @@
/* This file contains the list of the debug counter for GCC.
Copyright (C) 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA. */
/* A debug counter provides you a way to count an event
and return false after the counter has exceeded the threshold
specified by the option.
What is it used for ?
This is primarily used to speed up the search for the bad transformation
an optimization pass does. By doing a binary search on N,
you can quickly narrow down to one transformation
which is bad, or which triggers the bad behavior downstream
(usually in the form of the badly generated code).
How does it work ?
Everytime dbg_cnt(named-counter) is called,
the counter is incremented for the named-counter.
And the incremented value is compared against the threshold (limit)
specified by the option.
dbg_cnt () returns true if it is at or below threshold, and false if above.
How to add a new one ?
To add a new counter, simply add an entry below with some descriptive name,
and add call(s) to dbg_cnt(your-counter-name) in appropriate places.
Usually, you want to control at the finest granularity
any particular transformation can happen.
e.g. for each instruction in a dead code elimination,
or for each copy instruction in register coalescing,
or constant-propagation for each insn,
or a block straightening, etc.
See dce.c for an example. With the dbg_cnt () call in dce.c,
now a developer can use -fdbg-cnt=dce:N
to stop doing the dead code elimination after N times.
How to use it ?
By default, all limits are UINT_MAX.
Since debug count is unsigned int, <= UINT_MAX returns true always.
i.e. dbg_cnt() returns true always regardless of the counter value
(although it still counts the event).
Use -fdbg-cnt=counter1:N,counter2:M,...
which sets the limit for counter1 to N, and the limit for counter2 to M, etc.
e.g. setting a limit to zero will make dbg_cnt () return false *always*.
*/
/* Debug counter definitions. */
DEBUG_COUNTER (auto_inc_dec)
DEBUG_COUNTER (cse2_move2add)
DEBUG_COUNTER (dce)
DEBUG_COUNTER (delete_trivial_dead)
DEBUG_COUNTER (dse)
DEBUG_COUNTER (gcse2_delete)
DEBUG_COUNTER (ia64_sched2)
DEBUG_COUNTER (local_alloc_for_sched)
DEBUG_COUNTER (postreload_cse)
DEBUG_COUNTER (pre_insn)
DEBUG_COUNTER (sched2_func)
DEBUG_COUNTER (sched_block)
DEBUG_COUNTER (sched_func)
DEBUG_COUNTER (sched_insn)
DEBUG_COUNTER (sched_region)
DEBUG_COUNTER (split_for_sched2)
DEBUG_COUNTER (tail_call)

39
gcc/dbgcnt.h Normal file
View file

@ -0,0 +1,39 @@
/* Debug counter for debugging support
Copyright (C) 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
See dbgcnt.def for usage information. */
#ifndef GCC_DBGCNT_H
#define GCC_DBGCNT_H
#define DEBUG_COUNTER(a) a,
enum debug_counter {
#include "dbgcnt.def"
debug_counter_number_of_counters
};
#undef DEBUG_COUNTER
extern bool dbg_cnt_is_enabled (enum debug_counter index);
extern bool dbg_cnt (enum debug_counter index);
extern void dbg_cnt_process_opt (const char *arg);
#endif /* GCC_DBGCNT_H */

789
gcc/dce.c Normal file
View file

@ -0,0 +1,789 @@
/* RTL dead code elimination.
Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "hashtab.h"
#include "tm.h"
#include "rtl.h"
#include "tree.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "flags.h"
#include "df.h"
#include "cselib.h"
#include "dce.h"
#include "timevar.h"
#include "tree-pass.h"
#include "dbgcnt.h"
DEF_VEC_I(int);
DEF_VEC_ALLOC_I(int,heap);
/* -------------------------------------------------------------------------
Core mark/delete routines
------------------------------------------------------------------------- */
/* The data-flow information needed by this pass. */
static bool df_in_progress = false;
/* True if we deleted at least one instruction. */
static bool something_changed;
/* Instructions that have been marked but whose dependencies have not
yet been processed. */
static VEC(rtx,heap) *worklist;
static bitmap_obstack dce_blocks_bitmap_obstack;
static bitmap_obstack dce_tmp_bitmap_obstack;
static sbitmap marked = NULL;
/* Return true if INSN a normal instruction that can be deleted by the
DCE pass. */
static bool
deletable_insn_p (rtx insn, bool fast)
{
rtx x;
switch (GET_CODE (PATTERN (insn)))
{
case USE:
case PREFETCH:
case TRAP_IF:
/* The UNSPEC case was added here because the ia-64 claims that
USEs do not work after reload and generates UNSPECS rather
than USEs. Since dce is run after reload we need to avoid
deleting these even if they are dead. If it turns out that
USEs really do work after reload, the ia-64 should be
changed, and the UNSPEC case can be removed. */
case UNSPEC:
return false;
case CLOBBER:
if (fast)
{
/* A CLOBBER of a dead pseudo register serves no purpose.
That is not necessarily true for hard registers until
after reload. */
x = XEXP (PATTERN (insn), 0);
return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
}
else
/* Because of the way that use-def chains are built, it is not
possible to tell if the clobber is dead because it can
never be the target of a use-def chain. */
return false;
default:
if (!NONJUMP_INSN_P (insn))
return false;
if (volatile_insn_p (PATTERN (insn)))
return false;
if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
return false;
return true;
}
}
/* Return true if INSN has not been marked as needed. */
static inline int
marked_insn_p (rtx insn)
{
if (insn)
return TEST_BIT (marked, INSN_UID (insn));
else
/* Artificial defs are always needed and they do not have an
insn. */
return true;
}
/* If INSN has not yet been marked as needed, mark it now, and add it to
the worklist. */
static void
mark_insn (rtx insn, bool fast)
{
if (!marked_insn_p (insn))
{
if (!fast)
VEC_safe_push (rtx, heap, worklist, insn);
SET_BIT (marked, INSN_UID (insn));
if (dump_file)
fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
}
}
/* A note_stores callback used by mark_nonreg_stores. DATA is the
instruction containing DEST. */
static void
mark_nonreg_stores_1 (rtx dest, rtx pattern, void *data)
{
if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
mark_insn ((rtx) data, true);
}
/* A note_stores callback used by mark_nonreg_stores. DATA is the
instruction containing DEST. */
static void
mark_nonreg_stores_2 (rtx dest, rtx pattern, void *data)
{
if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
mark_insn ((rtx) data, false);
}
/* Mark INSN if BODY stores to a non-register destination. */
static void
mark_nonreg_stores (rtx body, rtx insn, bool fast)
{
if (fast)
note_stores (body, mark_nonreg_stores_1, insn);
else
note_stores (body, mark_nonreg_stores_2, insn);
}
/* Initialize global variables for a new DCE pass. */
static void
init_dce (bool fast)
{
if (!df_in_progress)
{
if (!fast)
df_chain_add_problem (DF_UD_CHAIN);
df_analyze ();
}
if (dump_file)
df_dump (dump_file);
bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
marked = sbitmap_alloc (get_max_uid () + 1);
sbitmap_zero (marked);
}
/* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
bad dangling REG_EQUAL notes. */
static void
delete_corresponding_reg_eq_notes (rtx insn)
{
struct df_ref **def_rec;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
{
struct df_ref *def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
/* This loop is a little tricky. We cannot just go down the
chain because it is being modified by the actions in the
loop. So we just get the head. We plan to drain the list
anyway. */
while (DF_REG_EQ_USE_CHAIN (regno))
{
struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
rtx noted_insn = DF_REF_INSN (eq_use);
rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
if (!note)
note = find_reg_note (noted_insn, REG_EQUIV, NULL_RTX);
/* This assert is generally triggered when someone deletes a
REG_EQUAL or REG_EQUIV note by hacking the list manually
rather than calling remove_note. */
gcc_assert (note);
remove_note (noted_insn, note);
}
}
}
/* Delete every instruction that hasn't been marked. Clear the insn
from DCE_DF if DF_DELETE is true. */
static void
delete_unmarked_insns (void)
{
basic_block bb;
rtx insn, next;
something_changed = false;
FOR_EACH_BB (bb)
FOR_BB_INSNS_SAFE (bb, insn, next)
if (INSN_P (insn))
{
if (noop_move_p (insn))
{
/* Note that this code does not handle the case where
the last insn of libcall is deleted. As it turns out
this case is excluded in the call to noop_move_p. */
rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
if (note && (XEXP (note, 0) != insn))
{
rtx new_libcall_insn = next_real_insn (insn);
rtx retval_note = find_reg_note (XEXP (note, 0),
REG_RETVAL, NULL_RTX);
REG_NOTES (new_libcall_insn)
= gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
REG_NOTES (new_libcall_insn));
XEXP (retval_note, 0) = new_libcall_insn;
}
}
else if (marked_insn_p (insn))
continue;
/* WARNING, this debugging can itself cause problems if the
edge of the counter causes part of a libcall to be
deleted but not all of it. */
if (!dbg_cnt (dce))
continue;
if (dump_file)
fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
/* Before we delete the insn, we have to delete
REG_EQUAL of the destination regs of the deleted insn
to prevent dangling REG_EQUAL. */
delete_corresponding_reg_eq_notes (insn);
delete_insn_and_edges (insn);
something_changed = true;
}
}
/* Mark all insns using DELETE_PARM in the libcall that contains
START_INSN. */
static void
mark_libcall (rtx start_insn, bool delete_parm)
{
rtx note = find_reg_note (start_insn, REG_LIBCALL_ID, NULL_RTX);
int id = INTVAL (XEXP (note, 0));
rtx insn;
mark_insn (start_insn, delete_parm);
insn = NEXT_INSN (start_insn);
/* There are tales, long ago and far away, of the mystical nested
libcall. No one alive has actually seen one, but other parts of
the compiler support them so we will here. */
for (insn = NEXT_INSN (start_insn); insn; insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
{
/* Stay in the loop as long as we are in any libcall. */
if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
{
if (id == INTVAL (XEXP (note, 0)))
{
mark_insn (insn, delete_parm);
if (dump_file)
fprintf (dump_file, "matching forward libcall %d[%d]\n",
INSN_UID (insn), id);
}
}
else
break;
}
}
for (insn = PREV_INSN (start_insn); insn; insn = PREV_INSN (insn))
{
if (INSN_P (insn))
{
/* Stay in the loop as long as we are in any libcall. */
if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
{
if (id == INTVAL (XEXP (note, 0)))
{
mark_insn (insn, delete_parm);
if (dump_file)
fprintf (dump_file, "matching backward libcall %d[%d]\n",
INSN_UID (insn), id);
}
}
else
break;
}
}
}
/* Go through the instructions and mark those whose necessity is not
dependent on inter-instruction information. Make sure all other
instructions are not marked. */
static void
prescan_insns_for_dce (bool fast)
{
basic_block bb;
rtx insn;
if (dump_file)
fprintf (dump_file, "Finding needed instructions:\n");
FOR_EACH_BB (bb)
FOR_BB_INSNS (bb, insn)
if (INSN_P (insn))
{
rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
if (note)
mark_libcall (insn, fast);
else if (deletable_insn_p (insn, fast))
mark_nonreg_stores (PATTERN (insn), insn, fast);
else
mark_insn (insn, fast);
}
if (dump_file)
fprintf (dump_file, "Finished finding needed instructions:\n");
}
/* UD-based DSE routines. */
/* Mark instructions that define artifically-used registers, such as
the frame pointer and the stack pointer. */
static void
mark_artificial_uses (void)
{
basic_block bb;
struct df_link *defs;
struct df_ref **use_rec;
FOR_ALL_BB (bb)
{
for (use_rec = df_get_artificial_uses (bb->index);
*use_rec; use_rec++)
for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
mark_insn (DF_REF_INSN (defs->ref), false);
}
}
/* Mark every instruction that defines a register value that INSN uses. */
static void
mark_reg_dependencies (rtx insn)
{
struct df_link *defs;
struct df_ref **use_rec;
/* If this is part of a libcall, mark the entire libcall. */
if (find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX))
mark_libcall (insn, false);
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
{
struct df_ref *use = *use_rec;
if (dump_file)
{
fprintf (dump_file, "Processing use of ");
print_simple_rtl (dump_file, DF_REF_REG (use));
fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
}
for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
mark_insn (DF_REF_INSN (defs->ref), false);
}
}
static void
end_ud_dce (void)
{
sbitmap_free (marked);
gcc_assert (VEC_empty (rtx, worklist));
}
/* UD-chain based DCE. */
static unsigned int
rest_of_handle_ud_dce (void)
{
rtx insn;
df_in_progress = false;
init_dce (false);
prescan_insns_for_dce (false);
mark_artificial_uses ();
while (VEC_length (rtx, worklist) > 0)
{
insn = VEC_pop (rtx, worklist);
mark_reg_dependencies (insn);
}
/* Before any insns are deleted, we must remove the chains since
they are not bidirectional. */
df_remove_problem (df_chain);
delete_unmarked_insns ();
end_ud_dce ();
return 0;
}
static bool
gate_ud_dce (void)
{
return optimize > 1 && flag_dce;
}
struct tree_opt_pass pass_ud_rtl_dce =
{
"dce", /* name */
gate_ud_dce, /* gate */
rest_of_handle_ud_dce, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
TV_DCE, /* tv_id */
0, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func |
TODO_df_finish |
TODO_ggc_collect, /* todo_flags_finish */
'w' /* letter */
};
/* -------------------------------------------------------------------------
Fast DCE functions
------------------------------------------------------------------------- */
/* Free the data allocated by init_dce. */
static void
fini_dce (void)
{
sbitmap_free (marked);
bitmap_obstack_release (&dce_blocks_bitmap_obstack);
bitmap_obstack_release (&dce_tmp_bitmap_obstack);
df_in_progress = false;
}
/* Process basic block BB. Return true if the live_in set has
changed. */
static bool
dce_process_block (basic_block bb, bool redo_out)
{
bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
rtx insn;
bool block_changed;
struct df_ref **def_rec, **use_rec;
unsigned int bb_index = bb->index;
if (redo_out)
{
/* Need to redo the live_out set of this block if when one of
the succs of this block has had a change in it live in
set. */
edge e;
edge_iterator ei;
df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
bitmap_clear (DF_LR_OUT (bb));
FOR_EACH_EDGE (e, ei, bb->succs)
(*con_fun_n) (e);
}
if (dump_file)
{
fprintf (dump_file, "processing block %d live out = ", bb->index);
df_print_regset (dump_file, DF_LR_OUT (bb));
}
bitmap_copy (local_live, DF_LR_OUT (bb));
/* Process the artificial defs and uses at the bottom of the block. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
struct df_ref *def = *def_rec;
if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
&& (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
bitmap_clear_bit (local_live, DF_REF_REGNO (def));
}
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
struct df_ref *use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
bitmap_set_bit (local_live, DF_REF_REGNO (use));
}
FOR_BB_INSNS_REVERSE (bb, insn)
if (INSN_P (insn))
{
/* If this is a recursive call, the libcall will have already
been marked. */
if (!marked_insn_p (insn))
{
bool needed = false;
/* The insn is needed if there is someone who uses the output. */
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec)))
{
needed = true;
break;
}
if (needed)
{
rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
/* If we need to mark an insn in the middle of a
libcall, we need to back up to mark the entire
libcall. Given that libcalls are rare, rescanning
the block should be a reasonable solution to trying
to figure out how to back up. */
if (note)
{
if (dump_file)
fprintf (dump_file, "needed libcall %d\n", INSN_UID (insn));
mark_libcall (insn, true);
BITMAP_FREE (local_live);
return dce_process_block (bb, false);
}
else
mark_insn (insn, true);
}
}
/* No matter if the instruction is needed or not, we remove
any regno in the defs from the live set. */
df_simulate_defs (insn, local_live);
/* On the other hand, we do not allow the dead uses to set
anything in local_live. */
if (marked_insn_p (insn))
df_simulate_uses (insn, local_live);
}
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
struct df_ref *def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
&& (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
bitmap_clear_bit (local_live, DF_REF_REGNO (def));
}
#ifdef EH_USES
/* Process the uses that are live into an exception handler. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
/* Add use to set of uses in this BB. */
struct df_ref *use = *use_rec;
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
bitmap_set_bit (local_live, DF_REF_REGNO (use));
}
#endif
block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
if (block_changed)
bitmap_copy (DF_LR_IN (bb), local_live);
BITMAP_FREE (local_live);
return block_changed;
}
static void
fast_dce (void)
{
int *postorder = df_get_postorder (DF_BACKWARD);
int n_blocks = df_get_n_blocks (DF_BACKWARD);
int i;
/* The set of blocks that have been seen on this iteration. */
bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
/* The set of blocks that need to have the out vectors reset because
the in of one of their successors has changed. */
bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
bool global_changed = true;
int loop_count = 0;
prescan_insns_for_dce (true);
for (i = 0; i < n_blocks; i++)
bitmap_set_bit (all_blocks, postorder[i]);
while (global_changed)
{
global_changed = false;
for (i = 0; i < n_blocks; i++)
{
int index = postorder[i];
basic_block bb = BASIC_BLOCK (index);
bool local_changed;
if (index < NUM_FIXED_BLOCKS)
{
bitmap_set_bit (processed, index);
continue;
}
local_changed
= dce_process_block (bb, bitmap_bit_p (redo_out, index));
bitmap_set_bit (processed, index);
if (local_changed)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
if (bitmap_bit_p (processed, e->src->index))
/* Be tricky about when we need to iterate the
analysis. We only have redo the analysis if the
bitmaps change at the top of a block that is the
entry to a loop. */
global_changed = true;
else
bitmap_set_bit (redo_out, e->src->index);
}
}
if (global_changed)
{
/* Turn off the RUN_DCE flag to prevent recursive calls to
dce. */
int old_flag = df_clear_flags (DF_LR_RUN_DCE);
/* So something was deleted that requires a redo. Do it on
the cheap. */
delete_unmarked_insns ();
sbitmap_zero (marked);
bitmap_clear (processed);
bitmap_clear (redo_out);
/* We do not need to rescan any instructions. We only need
to redo the dataflow equations for the blocks that had a
change at the top of the block. Then we need to redo the
iteration. */
df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
if (old_flag & DF_LR_RUN_DCE)
df_set_flags (DF_LR_RUN_DCE);
prescan_insns_for_dce (true);
}
loop_count++;
}
delete_unmarked_insns ();
BITMAP_FREE (processed);
BITMAP_FREE (redo_out);
BITMAP_FREE (all_blocks);
}
/* Callback for running pass_rtl_dce. */
static unsigned int
rest_of_handle_fast_dce (void)
{
init_dce (true);
fast_dce ();
fini_dce ();
df_in_progress = false;
return 0;
}
/* This is an internal call that is used by the df live register
problem to run fast dce as a side effect of creating the live
information. The stack is organized so that the lr problem is run,
this pass is run, which updates the live info and the df scanning
info, and then returns to allow the rest of the problems to be run.
This can be called by elsewhere but it will not update the bit
vectors for any other problems than LR.
*/
void
run_fast_df_dce (void)
{
if (flag_dce)
{
/* If dce is able to delete something, it has to happen
immediately. Otherwise there will be problems handling the
eq_notes. */
enum df_changeable_flags old_flags
= df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
df_in_progress = true;
rest_of_handle_fast_dce ();
df_set_flags (old_flags);
}
}
static bool
gate_fast_dce (void)
{
return optimize > 0 && flag_dce;
}
/* Run a fast DCE pass and return true if any instructions were
deleted. */
bool
run_fast_dce (void)
{
return gate_fast_dce () && (rest_of_handle_fast_dce (), something_changed);
}
struct tree_opt_pass pass_fast_rtl_dce =
{
"dce", /* name */
gate_fast_dce, /* gate */
rest_of_handle_fast_dce, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
TV_DCE, /* tv_id */
0, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func |
TODO_df_finish |
TODO_ggc_collect, /* todo_flags_finish */
'w' /* letter */
};

29
gcc/dce.h Normal file
View file

@ -0,0 +1,29 @@
/* RTL dead code elimination.
Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA. */
#ifndef GCC_DCE_H
#define GCC_DCE_H
struct df;
extern bool run_fast_dce (void);
extern void run_fast_df_dce (void);
#endif /* GCC_DCE_H */

View file

@ -1,5 +1,5 @@
/* DDG - Data Dependence Graph implementation.
Copyright (C) 2004, 2005, 2006
Copyright (C) 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>
@ -43,7 +43,6 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "sbitmap.h"
#include "expr.h"
#include "bitmap.h"
#include "df.h"
#include "ddg.h"
/* A flag indicating that a ddg edge belongs to an SCC or not. */
@ -230,10 +229,10 @@ create_ddg_dep_no_link (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to,
for all its uses in the next iteration, and an output dependence to the
first def of the next iteration. */
static void
add_deps_for_def (ddg_ptr g, struct df *df, struct df_ref *rd)
add_deps_for_def (ddg_ptr g, struct df_ref *rd)
{
int regno = DF_REF_REGNO (rd);
struct df_ru_bb_info *bb_info = DF_RU_BB_INFO (df, g->bb);
struct df_ru_bb_info *bb_info = DF_RU_BB_INFO (g->bb);
struct df_link *r_use;
int use_before_def = false;
rtx def_insn = DF_REF_INSN (rd);
@ -265,7 +264,7 @@ add_deps_for_def (ddg_ptr g, struct df *df, struct df_ref *rd)
there is a use between the two defs. */
if (! use_before_def)
{
struct df_ref *def = df_bb_regno_first_def_find (df, g->bb, regno);
struct df_ref *def = df_bb_regno_first_def_find (g->bb, regno);
int i;
ddg_node_ptr dest_node;
@ -274,7 +273,7 @@ add_deps_for_def (ddg_ptr g, struct df *df, struct df_ref *rd)
/* Check if there are uses after RD. */
for (i = src_node->cuid + 1; i < g->num_nodes; i++)
if (df_find_use (df, g->nodes[i].insn, rd->reg))
if (df_find_use (g->nodes[i].insn, DF_REF_REG (rd)))
return;
dest_node = get_node_of_insn (g, def->insn);
@ -286,16 +285,16 @@ add_deps_for_def (ddg_ptr g, struct df *df, struct df_ref *rd)
(nearest BLOCK_BEGIN) def of the next iteration, unless USE is followed
by a def in the block. */
static void
add_deps_for_use (ddg_ptr g, struct df *df, struct df_ref *use)
add_deps_for_use (ddg_ptr g, struct df_ref *use)
{
int i;
int regno = DF_REF_REGNO (use);
struct df_ref *first_def = df_bb_regno_first_def_find (df, g->bb, regno);
struct df_ref *first_def = df_bb_regno_first_def_find (g->bb, regno);
ddg_node_ptr use_node;
ddg_node_ptr def_node;
struct df_rd_bb_info *bb_info;
bb_info = DF_RD_BB_INFO (df, g->bb);
bb_info = DF_RD_BB_INFO (g->bb);
if (!first_def)
return;
@ -307,7 +306,7 @@ add_deps_for_use (ddg_ptr g, struct df *df, struct df_ref *use)
/* Make sure there are no defs after USE. */
for (i = use_node->cuid + 1; i < g->num_nodes; i++)
if (df_find_def (df, g->nodes[i].insn, use->reg))
if (df_find_def (g->nodes[i].insn, DF_REF_REG (use)))
return;
/* We must not add ANTI dep when there is an intra-loop TRUE dep in
the opposite direction. If the first_def reaches the USE then there is
@ -318,35 +317,35 @@ add_deps_for_use (ddg_ptr g, struct df *df, struct df_ref *use)
/* Build inter-loop dependencies, by looking at DF analysis backwards. */
static void
build_inter_loop_deps (ddg_ptr g, struct df *df)
build_inter_loop_deps (ddg_ptr g)
{
unsigned rd_num, u_num;
struct df_rd_bb_info *rd_bb_info;
struct df_ru_bb_info *ru_bb_info;
bitmap_iterator bi;
rd_bb_info = DF_RD_BB_INFO (df, g->bb);
rd_bb_info = DF_RD_BB_INFO (g->bb);
/* Find inter-loop output and true deps by connecting downward exposed defs
to the first def of the BB and to upwards exposed uses. */
EXECUTE_IF_SET_IN_BITMAP (rd_bb_info->gen, 0, rd_num, bi)
{
struct df_ref *rd = DF_DEFS_GET (df, rd_num);
struct df_ref *rd = DF_DEFS_GET (rd_num);
add_deps_for_def (g, df, rd);
add_deps_for_def (g, rd);
}
ru_bb_info = DF_RU_BB_INFO (df, g->bb);
ru_bb_info = DF_RU_BB_INFO (g->bb);
/* Find inter-loop anti deps. We are interested in uses of the block that
appear below all defs; this implies that these uses are killed. */
EXECUTE_IF_SET_IN_BITMAP (ru_bb_info->kill, 0, u_num, bi)
{
struct df_ref *use = DF_USES_GET (df, u_num);
/* We are interested in uses of this BB. */
if (BLOCK_FOR_INSN (use->insn) == g->bb)
add_deps_for_use (g, df, use);
struct df_ref *use = DF_USES_GET (u_num);
if (!(DF_REF_FLAGS (use) & DF_REF_IN_NOTE))
/* We are interested in uses of this BB. */
if (BLOCK_FOR_INSN (use->insn) == g->bb)
add_deps_for_use (g, use);
}
}
@ -443,7 +442,7 @@ build_intra_loop_deps (ddg_ptr g)
of ddg type that represents it.
Initialize the ddg structure fields to the appropriate values. */
ddg_ptr
create_ddg (basic_block bb, struct df *df, int closing_branch_deps)
create_ddg (basic_block bb, int closing_branch_deps)
{
ddg_ptr g;
rtx insn, first_note;
@ -520,7 +519,7 @@ create_ddg (basic_block bb, struct df *df, int closing_branch_deps)
/* Build the data dependency graph. */
build_intra_loop_deps (g);
build_inter_loop_deps (g, df);
build_inter_loop_deps (g);
return g;
}

View file

@ -1,5 +1,5 @@
/* DDG - Data Dependence Graph - interface.
Copyright (C) 2004
Copyright (C) 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>
@ -27,7 +27,6 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "sbitmap.h"
/* For basic_block. */
#include "basic-block.h"
/* For struct df. */
#include "df.h"
typedef struct ddg_node *ddg_node_ptr;
@ -166,7 +165,7 @@ struct ddg_all_sccs
};
ddg_ptr create_ddg (basic_block, struct df *, int closing_branch_deps);
ddg_ptr create_ddg (basic_block, int closing_branch_deps);
void free_ddg (ddg_ptr);
void print_ddg (FILE *, ddg_ptr);

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

831
gcc/df.h

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,5 @@
@c -*-texinfo-*-
@c Copyright (C) 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
@c Copyright (C) 2001, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
@c This is part of the GCC manual.
@c For copying conditions, see the file gcc.texi.
@ -626,41 +626,34 @@ may be used at a later point in the program. This information is
used, for instance, during register allocation, as the pseudo
registers only need to be assigned to a unique hard register or to a
stack slot if they are live. The hard registers and stack slots may
be freely reused for other values when a register is dead.
be freely reused for other values when a register is dead.
Liveness information is available in the back end starting with
@code{pass_df_initialize} and ending with @code{pass_df_finish}. Three
flavors of live analysis are available: With @code{LR}, it is possible
to determine at any point @code{P} in the function if the register may be
used on some path from @code{P} to the end of the function. With
@code{UR}, it is possible to determine if there is a path from the
beginning of the function to @code{P} that defines the variable.
@code{LIVE} is the intersection of the @code{LR} and @code{UR} and a
variable is live at @code{P} if there is both an assignment that reaches
it from the beginning of the function and a uses that can be reached on
some path from @code{P} to the end of the function.
In general @code{LIVE} is the most useful of the three. The macros
@code{DF_[LR,UR,LIVE]_[IN,OUT]} can be used to access this information.
The macros take a basic block number and return a bitmap that is indexed
by the register number. This information is only guaranteed to be up to
date after calls are made to @code{df_analyze}. See the file
@code{df-core.c} for details on using the dataflow.
@findex REG_DEAD, REG_UNUSED
The liveness information is stored partly in the RTL instruction
stream and partly in the flow graph. Local information is stored in
the instruction stream:
Each instruction may contain @code{REG_DEAD} notes representing that
the value of a given register is no longer needed, or
The liveness information is stored partly in the RTL instruction stream
and partly in the flow graph. Local information is stored in the
instruction stream: Each instruction may contain @code{REG_DEAD} notes
representing that the value of a given register is no longer needed, or
@code{REG_UNUSED} notes representing that the value computed by the
instruction is never used. The second is useful for instructions
computing multiple values at once.
@findex global_live_at_start, global_live_at_end
Global liveness information is stored in the control flow graph.
Each basic block contains two bitmaps, @code{global_live_at_start} and
@code{global_live_at_end} representing liveness of each register at
the entry and exit of the basic block. The file @code{flow.c}
contains functions to compute liveness of each register at any given
place in the instruction stream using this information.
@findex BB_DIRTY, clear_bb_flags, update_life_info_in_dirty_blocks
Liveness is expensive to compute and thus it is desirable to keep it
up to date during code modifying passes. This can be easily
accomplished using the @code{flags} field of a basic block. Functions
modifying the instruction stream automatically set the @code{BB_DIRTY}
flag of a modifies basic block, so the pass may simply
use@code{clear_bb_flags} before doing any modifications and then ask
the data flow module to have liveness updated via the
@code{update_life_info_in_dirty_blocks} function.
This scheme works reliably as long as no control flow graph
transformations are done. The task of updating liveness after control
flow graph changes is more difficult as normal iterative data flow
analysis may produce invalid results or get into an infinite cycle
when the initial solution is not below the desired one. Only simple
transformations, like splitting basic blocks or inserting on edges,
are safe, as functions to implement them already know how to update
liveness information locally.

View file

@ -1,5 +1,5 @@
@c Copyright (C) 1988, 1989, 1992, 1994, 1997, 1998, 1999, 2000, 2001, 2002,
@c 2003, 2004, 2005
@c 2003, 2004, 2005, 2006, 2007
@c Free Software Foundation, Inc.
@c This is part of the GCC manual.
@c For copying conditions, see the file gcc.texi.
@ -3250,7 +3250,9 @@ file as some small positive or negative offset from a named pattern.
@item LOG_LINKS (@var{i})
A list (chain of @code{insn_list} expressions) giving information about
dependencies between instructions within a basic block. Neither a jump
nor a label may come between the related insns.
nor a label may come between the related insns. These are only used by
the schedulers and by combine. This is a deprecated data structure.
Def-use and use-def chains are now prefered.
@findex REG_NOTES
@item REG_NOTES (@var{i})
@ -3531,6 +3533,12 @@ of the JUMP@. The format is a bitmask of ATTR_FLAG_* values.
This is used on an RTX_FRAME_RELATED_P insn wherein the attached expression
is used in place of the actual insn pattern. This is done in cases where
the pattern is either complex or misleading.
@findex REG_LIBCALL_ID
@item REG_LIBCALL_ID
This is used to specify that an insn is part of a libcall. Each libcall
in a function has a unique id, and all the insns that are part of that
libcall will have a REG_LIBCALL_ID note attached with the same ID.
@end table
For convenience, the machine mode in an @code{insn_list} or

View file

@ -1,5 +1,5 @@
/* Calculate (post)dominators in slightly super-linear time.
Copyright (C) 2000, 2003, 2004, 2005 Free Software Foundation, Inc.
Copyright (C) 2000, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Michael Matz (matz@ifh.de).
This file is part of GCC.
@ -152,6 +152,7 @@ static unsigned n_bbs_in_dom_tree[2];
static void
init_dom_info (struct dom_info *di, enum cdi_direction dir)
{
/* We need memory for n_basic_blocks nodes. */
unsigned int num = n_basic_blocks;
init_ar (di->dfs_parent, TBB, num, 0);
init_ar (di->path_min, TBB, num, i);

3108
gcc/dse.c Normal file

File diff suppressed because it is too large Load diff

33
gcc/dse.h Normal file
View file

@ -0,0 +1,33 @@
/* RTL dead store elimination.
Copyright (C) 2007 Free Software Foundation, Inc.
Contributed by Richard Sandiford <rsandifor@codesourcery.com>
and Kenneth Zadeck <zadeck@naturalbridge.com>
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA. */
#ifndef GCC_DSE_H
#define GCC_DSE_H
struct df;
extern void dse_record_singleton_alias_set (HOST_WIDE_INT, enum machine_mode);
extern void dse_invalidate_singleton_alias_set (HOST_WIDE_INT);
#endif /* GCC_DSE_H */

View file

@ -57,6 +57,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "debug.h"
#include "langhooks.h"
#include "tree-pass.h"
#include "df.h"
/* Commonly used modes. */
@ -358,6 +359,21 @@ get_reg_attrs (tree decl, int offset)
return *slot;
}
#if !HAVE_blockage
/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
across this insn. */
rtx
gen_blockage (void)
{
rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
MEM_VOLATILE_P (x) = true;
return x;
}
#endif
/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
don't attempt to share with the various global pieces of rtl (such as
frame_pointer_rtx). */
@ -2144,7 +2160,6 @@ unshare_all_rtl_again (rtx insn)
{
reset_used_flags (PATTERN (p));
reset_used_flags (REG_NOTES (p));
reset_used_flags (LOG_LINKS (p));
}
/* Make sure that virtual stack slots are not shared. */
@ -2222,11 +2237,7 @@ verify_rtx_sharing (rtx orig, rtx insn)
break;
case CONST:
/* CONST can be shared if it contains a SYMBOL_REF. If it contains
a LABEL_REF, it isn't sharable. */
if (GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
if (shared_const_p (orig))
return;
break;
@ -2307,7 +2318,6 @@ verify_rtl_sharing (void)
{
reset_used_flags (PATTERN (p));
reset_used_flags (REG_NOTES (p));
reset_used_flags (LOG_LINKS (p));
if (GET_CODE (PATTERN (p)) == SEQUENCE)
{
int i;
@ -2319,7 +2329,6 @@ verify_rtl_sharing (void)
gcc_assert (INSN_P (q));
reset_used_flags (PATTERN (q));
reset_used_flags (REG_NOTES (q));
reset_used_flags (LOG_LINKS (q));
}
}
}
@ -2329,7 +2338,6 @@ verify_rtl_sharing (void)
{
verify_rtx_sharing (PATTERN (p), p);
verify_rtx_sharing (REG_NOTES (p), p);
verify_rtx_sharing (LOG_LINKS (p), p);
}
}
@ -2344,7 +2352,6 @@ unshare_all_rtl_in_chain (rtx insn)
{
PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
}
}
@ -2421,11 +2428,7 @@ repeat:
break;
case CONST:
/* CONST can be shared if it contains a SYMBOL_REF. If it contains
a LABEL_REF, it isn't sharable. */
if (GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
if (shared_const_p (x))
return;
break;
@ -3160,6 +3163,10 @@ try_split (rtx pat, rtx trial, int last)
insn_last = NEXT_INSN (insn_last);
}
/* We will be adding the new sequence to the function. The splitters
may have introduced invalid RTL sharing, so unshare the sequence now. */
unshare_all_rtl_in_chain (seq);
/* Mark labels. */
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
{
@ -3307,7 +3314,6 @@ make_insn_raw (rtx pattern)
INSN_UID (insn) = cur_insn_uid++;
PATTERN (insn) = pattern;
INSN_CODE (insn) = -1;
LOG_LINKS (insn) = NULL;
REG_NOTES (insn) = NULL;
INSN_LOCATOR (insn) = curr_insn_locator ();
BLOCK_FOR_INSN (insn) = NULL;
@ -3339,7 +3345,6 @@ make_jump_insn_raw (rtx pattern)
PATTERN (insn) = pattern;
INSN_CODE (insn) = -1;
LOG_LINKS (insn) = NULL;
REG_NOTES (insn) = NULL;
JUMP_LABEL (insn) = NULL;
INSN_LOCATOR (insn) = curr_insn_locator ();
@ -3360,7 +3365,6 @@ make_call_insn_raw (rtx pattern)
PATTERN (insn) = pattern;
INSN_CODE (insn) = -1;
LOG_LINKS (insn) = NULL;
REG_NOTES (insn) = NULL;
CALL_INSN_FUNCTION_USAGE (insn) = NULL;
INSN_LOCATOR (insn) = curr_insn_locator ();
@ -3393,10 +3397,9 @@ add_insn (rtx insn)
SEQUENCE. */
void
add_insn_after (rtx insn, rtx after)
add_insn_after (rtx insn, rtx after, basic_block bb)
{
rtx next = NEXT_INSN (after);
basic_block bb;
gcc_assert (!optimize || !INSN_DELETED_P (after));
@ -3431,7 +3434,7 @@ add_insn_after (rtx insn, rtx after)
{
set_block_for_insn (insn, bb);
if (INSN_P (insn))
bb->flags |= BB_DIRTY;
df_insn_rescan (insn);
/* Should not happen as first in the BB is always
either NOTE or LABEL. */
if (BB_END (bb) == after
@ -3450,15 +3453,15 @@ add_insn_after (rtx insn, rtx after)
}
/* Add INSN into the doubly-linked list before insn BEFORE. This and
the previous should be the only functions called to insert an insn once
delay slots have been filled since only they know how to update a
SEQUENCE. */
the previous should be the only functions called to insert an insn
once delay slots have been filled since only they know how to
update a SEQUENCE. If BB is NULL, an attempt is made to infer the
bb from before. */
void
add_insn_before (rtx insn, rtx before)
add_insn_before (rtx insn, rtx before, basic_block bb)
{
rtx prev = PREV_INSN (before);
basic_block bb;
gcc_assert (!optimize || !INSN_DELETED_P (before));
@ -3490,13 +3493,16 @@ add_insn_before (rtx insn, rtx before)
gcc_assert (stack);
}
if (!BARRIER_P (before)
&& !BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (before)))
if (!bb
&& !BARRIER_P (before)
&& !BARRIER_P (insn))
bb = BLOCK_FOR_INSN (before);
if (bb)
{
set_block_for_insn (insn, bb);
if (INSN_P (insn))
bb->flags |= BB_DIRTY;
df_insn_rescan (insn);
/* Should not happen as first in the BB is always either NOTE or
LABEL. */
gcc_assert (BB_HEAD (bb) != insn
@ -3510,6 +3516,17 @@ add_insn_before (rtx insn, rtx before)
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
}
/* Replace insn with an deleted instruction note. */
void set_insn_deleted (rtx insn)
{
df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
PUT_CODE (insn, NOTE);
NOTE_KIND (insn) = NOTE_INSN_DELETED;
}
/* Remove an insn from its doubly-linked list. This function knows how
to handle sequences. */
void
@ -3519,6 +3536,9 @@ remove_insn (rtx insn)
rtx prev = PREV_INSN (insn);
basic_block bb;
/* Later in the code, the block will be marked dirty. */
df_insn_delete (NULL, INSN_UID (insn));
if (prev)
{
NEXT_INSN (prev) = next;
@ -3569,7 +3589,7 @@ remove_insn (rtx insn)
&& (bb = BLOCK_FOR_INSN (insn)))
{
if (INSN_P (insn))
bb->flags |= BB_DIRTY;
df_set_bb_dirty (bb);
if (BB_HEAD (bb) == insn)
{
/* Never ever delete the basic block note without deleting whole
@ -3665,14 +3685,14 @@ reorder_insns (rtx from, rtx to, rtx after)
&& (bb = BLOCK_FOR_INSN (after)))
{
rtx x;
bb->flags |= BB_DIRTY;
df_set_bb_dirty (bb);
if (!BARRIER_P (from)
&& (bb2 = BLOCK_FOR_INSN (from)))
{
if (BB_END (bb2) == to)
BB_END (bb2) = prev;
bb2->flags |= BB_DIRTY;
df_set_bb_dirty (bb2);
}
if (BB_END (bb) == after)
@ -3680,7 +3700,10 @@ reorder_insns (rtx from, rtx to, rtx after)
for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
if (!BARRIER_P (x))
set_block_for_insn (x, bb);
{
set_block_for_insn (x, bb);
df_insn_change_bb (x);
}
}
}
@ -3713,7 +3736,7 @@ reorder_insns (rtx from, rtx to, rtx after)
/* Make X be output before the instruction BEFORE. */
rtx
emit_insn_before_noloc (rtx x, rtx before)
emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
{
rtx last = before;
rtx insn;
@ -3735,7 +3758,7 @@ emit_insn_before_noloc (rtx x, rtx before)
while (insn)
{
rtx next = NEXT_INSN (insn);
add_insn_before (insn, before);
add_insn_before (insn, before, bb);
last = insn;
insn = next;
}
@ -3749,7 +3772,7 @@ emit_insn_before_noloc (rtx x, rtx before)
default:
last = make_insn_raw (x);
add_insn_before (last, before);
add_insn_before (last, before, bb);
break;
}
@ -3778,7 +3801,7 @@ emit_jump_insn_before_noloc (rtx x, rtx before)
while (insn)
{
rtx next = NEXT_INSN (insn);
add_insn_before (insn, before);
add_insn_before (insn, before, NULL);
last = insn;
insn = next;
}
@ -3792,7 +3815,7 @@ emit_jump_insn_before_noloc (rtx x, rtx before)
default:
last = make_jump_insn_raw (x);
add_insn_before (last, before);
add_insn_before (last, before, NULL);
break;
}
@ -3821,7 +3844,7 @@ emit_call_insn_before_noloc (rtx x, rtx before)
while (insn)
{
rtx next = NEXT_INSN (insn);
add_insn_before (insn, before);
add_insn_before (insn, before, NULL);
last = insn;
insn = next;
}
@ -3835,7 +3858,7 @@ emit_call_insn_before_noloc (rtx x, rtx before)
default:
last = make_call_insn_raw (x);
add_insn_before (last, before);
add_insn_before (last, before, NULL);
break;
}
@ -3852,7 +3875,7 @@ emit_barrier_before (rtx before)
INSN_UID (insn) = cur_insn_uid++;
add_insn_before (insn, before);
add_insn_before (insn, before, NULL);
return insn;
}
@ -3866,7 +3889,7 @@ emit_label_before (rtx label, rtx before)
if (INSN_UID (label) == 0)
{
INSN_UID (label) = cur_insn_uid++;
add_insn_before (label, before);
add_insn_before (label, before, NULL);
}
return label;
@ -3883,31 +3906,35 @@ emit_note_before (enum insn_note subtype, rtx before)
BLOCK_FOR_INSN (note) = NULL;
memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
add_insn_before (note, before);
add_insn_before (note, before, NULL);
return note;
}
/* Helper for emit_insn_after, handles lists of instructions
efficiently. */
static rtx emit_insn_after_1 (rtx, rtx);
static rtx
emit_insn_after_1 (rtx first, rtx after)
emit_insn_after_1 (rtx first, rtx after, basic_block bb)
{
rtx last;
rtx after_after;
basic_block bb;
if (!bb && !BARRIER_P (after))
bb = BLOCK_FOR_INSN (after);
if (!BARRIER_P (after)
&& (bb = BLOCK_FOR_INSN (after)))
if (bb)
{
bb->flags |= BB_DIRTY;
df_set_bb_dirty (bb);
for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
if (!BARRIER_P (last))
set_block_for_insn (last, bb);
{
set_block_for_insn (last, bb);
df_insn_rescan (last);
}
if (!BARRIER_P (last))
set_block_for_insn (last, bb);
{
set_block_for_insn (last, bb);
df_insn_rescan (last);
}
if (BB_END (bb) == after)
BB_END (bb) = last;
}
@ -3928,10 +3955,11 @@ emit_insn_after_1 (rtx first, rtx after)
return last;
}
/* Make X be output after the insn AFTER. */
/* Make X be output after the insn AFTER and set the BB of insn. If
BB is NULL, an attempt is made to infer the BB from AFTER. */
rtx
emit_insn_after_noloc (rtx x, rtx after)
emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
{
rtx last = after;
@ -3948,7 +3976,7 @@ emit_insn_after_noloc (rtx x, rtx after)
case CODE_LABEL:
case BARRIER:
case NOTE:
last = emit_insn_after_1 (x, after);
last = emit_insn_after_1 (x, after, bb);
break;
#ifdef ENABLE_RTL_CHECKING
@ -3959,7 +3987,7 @@ emit_insn_after_noloc (rtx x, rtx after)
default:
last = make_insn_raw (x);
add_insn_after (last, after);
add_insn_after (last, after, bb);
break;
}
@ -3985,7 +4013,7 @@ emit_jump_insn_after_noloc (rtx x, rtx after)
case CODE_LABEL:
case BARRIER:
case NOTE:
last = emit_insn_after_1 (x, after);
last = emit_insn_after_1 (x, after, NULL);
break;
#ifdef ENABLE_RTL_CHECKING
@ -3996,7 +4024,7 @@ emit_jump_insn_after_noloc (rtx x, rtx after)
default:
last = make_jump_insn_raw (x);
add_insn_after (last, after);
add_insn_after (last, after, NULL);
break;
}
@ -4021,7 +4049,7 @@ emit_call_insn_after_noloc (rtx x, rtx after)
case CODE_LABEL:
case BARRIER:
case NOTE:
last = emit_insn_after_1 (x, after);
last = emit_insn_after_1 (x, after, NULL);
break;
#ifdef ENABLE_RTL_CHECKING
@ -4032,7 +4060,7 @@ emit_call_insn_after_noloc (rtx x, rtx after)
default:
last = make_call_insn_raw (x);
add_insn_after (last, after);
add_insn_after (last, after, NULL);
break;
}
@ -4049,7 +4077,7 @@ emit_barrier_after (rtx after)
INSN_UID (insn) = cur_insn_uid++;
add_insn_after (insn, after);
add_insn_after (insn, after, NULL);
return insn;
}
@ -4064,7 +4092,7 @@ emit_label_after (rtx label, rtx after)
if (INSN_UID (label) == 0)
{
INSN_UID (label) = cur_insn_uid++;
add_insn_after (label, after);
add_insn_after (label, after, NULL);
}
return label;
@ -4080,7 +4108,7 @@ emit_note_after (enum insn_note subtype, rtx after)
NOTE_KIND (note) = subtype;
BLOCK_FOR_INSN (note) = NULL;
memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
add_insn_after (note, after);
add_insn_after (note, after, NULL);
return note;
}
@ -4088,7 +4116,7 @@ emit_note_after (enum insn_note subtype, rtx after)
rtx
emit_insn_after_setloc (rtx pattern, rtx after, int loc)
{
rtx last = emit_insn_after_noloc (pattern, after);
rtx last = emit_insn_after_noloc (pattern, after, NULL);
if (pattern == NULL_RTX || !loc)
return last;
@ -4112,7 +4140,7 @@ emit_insn_after (rtx pattern, rtx after)
if (INSN_P (after))
return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
else
return emit_insn_after_noloc (pattern, after);
return emit_insn_after_noloc (pattern, after, NULL);
}
/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
@ -4182,7 +4210,7 @@ rtx
emit_insn_before_setloc (rtx pattern, rtx before, int loc)
{
rtx first = PREV_INSN (before);
rtx last = emit_insn_before_noloc (pattern, before);
rtx last = emit_insn_before_noloc (pattern, before, NULL);
if (pattern == NULL_RTX || !loc)
return last;
@ -4209,7 +4237,7 @@ emit_insn_before (rtx pattern, rtx before)
if (INSN_P (before))
return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
else
return emit_insn_before_noloc (pattern, before);
return emit_insn_before_noloc (pattern, before, NULL);
}
/* like emit_insn_before_noloc, but set insn_locator according to scope. */
@ -4482,6 +4510,7 @@ rtx
set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
{
rtx note = find_reg_note (insn, kind, NULL_RTX);
rtx new_note = NULL;
switch (kind)
{
@ -4501,19 +4530,37 @@ set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
It serves no useful purpose and breaks eliminate_regs. */
if (GET_CODE (datum) == ASM_OPERANDS)
return NULL_RTX;
if (note)
{
XEXP (note, 0) = datum;
df_notes_rescan (insn);
return note;
}
break;
default:
if (note)
{
XEXP (note, 0) = datum;
return note;
}
break;
}
new_note = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
REG_NOTES (insn) = new_note;
switch (kind)
{
case REG_EQUAL:
case REG_EQUIV:
df_notes_rescan (insn);
break;
default:
break;
}
if (note)
{
XEXP (note, 0) = datum;
return note;
}
REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
return REG_NOTES (insn);
}
@ -4787,11 +4834,7 @@ copy_insn_1 (rtx orig)
break;
case CONST:
/* CONST can be shared if it contains a SYMBOL_REF. If it contains
a LABEL_REF, it isn't sharable. */
if (GET_CODE (XEXP (orig, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
&& GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
if (shared_const_p (orig))
return orig;
break;

Some files were not shown because too many files have changed in this diff Show more