Convert TYPE_ALIGN_OK to a TYPE_LANG_FLAG.
2016-07-11 Bernd Edlinger <bernd.edlinger@hotmail.de> Convert TYPE_ALIGN_OK to a TYPE_LANG_FLAG. * tree-core.h (tree_base::nothrow_flag): Adjust comment. (tree_type_common::lang_flag_7): New. (tree_type_common::spare): Reduce size. * tree.h (TYPE_ALIGN_OK): Remove. (TYPE_LANG_FLAG_7): New. (get_inner_reference): Adjust header. * print-tree.c (print_node): Adjust. * expr.c (get_inner_reference): Remove parameter keep_aligning. (get_bit_range, expand_assignment, expand_expr_addr_expr_1): Adjust calls to get_inner_reference. (expand_expr_real_1): Adjust call to get_inner_reference. Remove handling of TYPE_ALIGN_OK. * builtins.c (get_object_alignment_2): Adjust call to get_inner_reference. Remove handling of VIEW_CONVERT_EXPR. * emit-rtl.c (set_mem_attributes_minus_bitpos): Remove handling of TYPE_ALIGN_OK. * asan.c (instrument_derefs): Adjust calls to get_inner_reference. * cfgexpand.c (expand_debug_expr): Likewise. * dbxout.c (dbxout_expand_expr): Likewise. * dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref, loc_list_from_tree, fortran_common): Likewise. * fold-const.c (optimize_bit_field_compare, decode_field_reference, fold_unary_loc, fold_comparison, split_address_to_core_and_offset): Likewise. * gimple-laddress.c (execute): Likewise. * gimple-ssa-strength-reduction.c (slsr_process_ref): Likewise. * gimplify.c (gimplify_scan_omp_clauses): Likewise. * hsa-gen.c (gen_hsa_addr): Likewise. * simplifx-rtx.c (delegitimize_mem_from_attrs): Likewise. * tsan.c (instrument_expr): Likewise. * ubsan.c (instrument_bool_enum_load, instrument_object_size): Likewise. * tree.c (verify_type_variant): Remove handling of TYPE_ALIGN_OK. * tree-affine.c (tree_to_aff_combination, get_inner_reference_aff): Adjust calls to get_inner_reference. * tree-data-ref.c (split_constant_offset_1, dr_analyze_innermost): Likewise. * tree-scalar-evolution.c (interpret_rhs_expr): Likewise. * tree-sra.c (ipa_sra_check_caller): Likewise. * tree-ssa-loop-ivopts.c (split_address_cost): Likewise. * tree-ssa-math-opts.c (find_bswap_or_nop_load, bswap_replace): Likewise. * tree-vect-data-refs.c (vect_check_gather, vect_analyze_data_refs): Likewise. * config/mips/mips.c (r10k_safe_mem_expr_p): Likewise. * config/pa/pa.c (pa_emit_move_sequence): Remove handling of TYPE_ALIGN_OK. ada: 2016-07-11 Bernd Edlinger <bernd.edlinger@hotmail.de> Convert TYPE_ALIGN_OK to a TYPE_LANG_FLAG. * gcc-interface/ada-tree.h (TYPE_ALIGN_OK): Define. * gcc-interface/trans.c (Attribute_to_gnu): Adjust call to get_inner_reference. * gcc-interface/utils2.c (build_unary_op): Likewise. From-SVN: r238210
This commit is contained in:
parent
9e34db2e03
commit
25b75a48d2
33 changed files with 124 additions and 120 deletions
|
@ -1,3 +1,53 @@
|
|||
2016-07-11 Bernd Edlinger <bernd.edlinger@hotmail.de>
|
||||
|
||||
Convert TYPE_ALIGN_OK to a TYPE_LANG_FLAG.
|
||||
* tree-core.h (tree_base::nothrow_flag): Adjust comment.
|
||||
(tree_type_common::lang_flag_7): New.
|
||||
(tree_type_common::spare): Reduce size.
|
||||
* tree.h (TYPE_ALIGN_OK): Remove.
|
||||
(TYPE_LANG_FLAG_7): New.
|
||||
(get_inner_reference): Adjust header.
|
||||
* print-tree.c (print_node): Adjust.
|
||||
* expr.c (get_inner_reference): Remove parameter keep_aligning.
|
||||
(get_bit_range, expand_assignment, expand_expr_addr_expr_1): Adjust
|
||||
calls to get_inner_reference.
|
||||
(expand_expr_real_1): Adjust call to get_inner_reference. Remove
|
||||
handling of TYPE_ALIGN_OK.
|
||||
* builtins.c (get_object_alignment_2): Adjust call to
|
||||
get_inner_reference. Remove handling of VIEW_CONVERT_EXPR.
|
||||
* emit-rtl.c (set_mem_attributes_minus_bitpos): Remove handling of
|
||||
TYPE_ALIGN_OK.
|
||||
* asan.c (instrument_derefs): Adjust calls to get_inner_reference.
|
||||
* cfgexpand.c (expand_debug_expr): Likewise.
|
||||
* dbxout.c (dbxout_expand_expr): Likewise.
|
||||
* dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref,
|
||||
loc_list_from_tree, fortran_common): Likewise.
|
||||
* fold-const.c (optimize_bit_field_compare,
|
||||
decode_field_reference, fold_unary_loc, fold_comparison,
|
||||
split_address_to_core_and_offset): Likewise.
|
||||
* gimple-laddress.c (execute): Likewise.
|
||||
* gimple-ssa-strength-reduction.c (slsr_process_ref): Likewise.
|
||||
* gimplify.c (gimplify_scan_omp_clauses): Likewise.
|
||||
* hsa-gen.c (gen_hsa_addr): Likewise.
|
||||
* simplifx-rtx.c (delegitimize_mem_from_attrs): Likewise.
|
||||
* tsan.c (instrument_expr): Likewise.
|
||||
* ubsan.c (instrument_bool_enum_load, instrument_object_size): Likewise.
|
||||
* tree.c (verify_type_variant): Remove handling of TYPE_ALIGN_OK.
|
||||
* tree-affine.c (tree_to_aff_combination,
|
||||
get_inner_reference_aff): Adjust calls to get_inner_reference.
|
||||
* tree-data-ref.c (split_constant_offset_1,
|
||||
dr_analyze_innermost): Likewise.
|
||||
* tree-scalar-evolution.c (interpret_rhs_expr): Likewise.
|
||||
* tree-sra.c (ipa_sra_check_caller): Likewise.
|
||||
* tree-ssa-loop-ivopts.c (split_address_cost): Likewise.
|
||||
* tree-ssa-math-opts.c (find_bswap_or_nop_load,
|
||||
bswap_replace): Likewise.
|
||||
* tree-vect-data-refs.c (vect_check_gather,
|
||||
vect_analyze_data_refs): Likewise.
|
||||
* config/mips/mips.c (r10k_safe_mem_expr_p): Likewise.
|
||||
* config/pa/pa.c (pa_emit_move_sequence): Remove handling of
|
||||
TYPE_ALIGN_OK.
|
||||
|
||||
2016-07-11 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* Makefile.in (selftest-valgrind): New phony target.
|
||||
|
@ -32,7 +82,7 @@
|
|||
* lra-constraints.c (process_alt_operands): Check combination of
|
||||
reg class and mode.
|
||||
|
||||
2016-06-25 Jason Merrill <jason@redhat.com>
|
||||
2016-07-08 Jason Merrill <jason@redhat.com>
|
||||
Richard Biener <rguenther@suse.de>
|
||||
|
||||
P0145: Refining Expression Order for C++.
|
||||
|
|
|
@ -1,3 +1,11 @@
|
|||
2016-07-11 Bernd Edlinger <bernd.edlinger@hotmail.de>
|
||||
|
||||
Convert TYPE_ALIGN_OK to a TYPE_LANG_FLAG.
|
||||
* gcc-interface/ada-tree.h (TYPE_ALIGN_OK): Define.
|
||||
* gcc-interface/trans.c (Attribute_to_gnu): Adjust call to
|
||||
get_inner_reference.
|
||||
* gcc-interface/utils2.c (build_unary_op): Likewise.
|
||||
|
||||
2016-07-11 Eric Botcazou <ebotcazou@adacore.com>
|
||||
|
||||
* gcc-interface/trans.c (add_decl_expr): Minor tweak.
|
||||
|
|
|
@ -199,6 +199,9 @@ do { \
|
|||
alignment value the type ought to have. */
|
||||
#define TYPE_MAX_ALIGN(NODE) (TYPE_PRECISION (RECORD_OR_UNION_CHECK (NODE)))
|
||||
|
||||
/* True if objects of tagged types are guaranteed to be properly aligned. */
|
||||
#define TYPE_ALIGN_OK(NODE) TYPE_LANG_FLAG_7 (NODE)
|
||||
|
||||
/* For an UNCONSTRAINED_ARRAY_TYPE, this is the record containing both the
|
||||
template and the object.
|
||||
|
||||
|
|
|
@ -2181,7 +2181,7 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
|
|||
&& TREE_CODE (gnu_prefix) == FIELD_DECL));
|
||||
|
||||
get_inner_reference (gnu_prefix, &bitsize, &bitpos, &gnu_offset,
|
||||
&mode, &unsignedp, &reversep, &volatilep, false);
|
||||
&mode, &unsignedp, &reversep, &volatilep);
|
||||
|
||||
if (TREE_CODE (gnu_prefix) == COMPONENT_REF)
|
||||
{
|
||||
|
|
|
@ -1419,7 +1419,7 @@ build_unary_op (enum tree_code op_code, tree result_type, tree operand)
|
|||
|
||||
inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &reversep,
|
||||
&volatilep, false);
|
||||
&volatilep);
|
||||
|
||||
/* If INNER is a padding type whose field has a self-referential
|
||||
size, convert to that inner type. We know the offset is zero
|
||||
|
|
|
@ -1793,7 +1793,7 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
|
|||
machine_mode mode;
|
||||
int unsignedp, reversep, volatilep = 0;
|
||||
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
if (TREE_CODE (t) == COMPONENT_REF
|
||||
&& DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
|
||||
|
|
|
@ -259,7 +259,7 @@ get_object_alignment_2 (tree exp, unsigned int *alignp,
|
|||
/* Get the innermost object and the constant (bitpos) and possibly
|
||||
variable (offset) offset of the access. */
|
||||
exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, true);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
/* Extract alignment information from the innermost object and
|
||||
possibly adjust bitpos and offset. */
|
||||
|
@ -289,10 +289,6 @@ get_object_alignment_2 (tree exp, unsigned int *alignp,
|
|||
align = DECL_ALIGN (exp);
|
||||
known_alignment = true;
|
||||
}
|
||||
else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
|
||||
{
|
||||
align = TYPE_ALIGN (TREE_TYPE (exp));
|
||||
}
|
||||
else if (TREE_CODE (exp) == INDIRECT_REF
|
||||
|| TREE_CODE (exp) == MEM_REF
|
||||
|| TREE_CODE (exp) == TARGET_MEM_REF)
|
||||
|
|
|
@ -4444,7 +4444,7 @@ expand_debug_expr (tree exp)
|
|||
int reversep, volatilep = 0;
|
||||
tree tem
|
||||
= get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
rtx orig_op0;
|
||||
|
||||
if (bitsize == 0)
|
||||
|
|
|
@ -17470,7 +17470,7 @@ r10k_safe_mem_expr_p (tree expr, unsigned HOST_WIDE_INT offset)
|
|||
int unsigned_p, reverse_p, volatile_p;
|
||||
|
||||
inner = get_inner_reference (expr, &bitsize, &bitoffset, &var_offset, &mode,
|
||||
&unsigned_p, &reverse_p, &volatile_p, false);
|
||||
&unsigned_p, &reverse_p, &volatile_p);
|
||||
if (!DECL_P (inner) || !DECL_SIZE_UNIT (inner) || var_offset)
|
||||
return false;
|
||||
|
||||
|
|
|
@ -1929,16 +1929,7 @@ pa_emit_move_sequence (rtx *operands, machine_mode mode, rtx scratch_reg)
|
|||
type = strip_array_types (type);
|
||||
|
||||
if (POINTER_TYPE_P (type))
|
||||
{
|
||||
int align;
|
||||
|
||||
type = TREE_TYPE (type);
|
||||
/* Using TYPE_ALIGN_OK is rather conservative as
|
||||
only the ada frontend actually sets it. */
|
||||
align = (TYPE_ALIGN_OK (type) ? TYPE_ALIGN (type)
|
||||
: BITS_PER_UNIT);
|
||||
mark_reg_pointer (operand0, align);
|
||||
}
|
||||
mark_reg_pointer (operand0, BITS_PER_UNIT);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2485,7 +2485,7 @@ dbxout_expand_expr (tree expr)
|
|||
rtx x;
|
||||
|
||||
tem = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, true);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
x = dbxout_expand_expr (tem);
|
||||
if (x == NULL || !MEM_P (x))
|
||||
|
|
|
@ -15136,7 +15136,7 @@ loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
|
|||
|
||||
obj = get_inner_reference (TREE_OPERAND (loc, 0),
|
||||
&bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
STRIP_NOPS (obj);
|
||||
if (bitpos % BITS_PER_UNIT)
|
||||
{
|
||||
|
@ -16073,7 +16073,7 @@ loc_list_from_tree_1 (tree loc, int want_address,
|
|||
int unsignedp, reversep, volatilep = 0;
|
||||
|
||||
obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
gcc_assert (obj != loc);
|
||||
|
||||
|
@ -17548,7 +17548,7 @@ fortran_common (tree decl, HOST_WIDE_INT *value)
|
|||
return NULL_TREE;
|
||||
|
||||
cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, true);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
if (cvar == NULL_TREE
|
||||
|| TREE_CODE (cvar) != VAR_DECL
|
||||
|
|
|
@ -1813,9 +1813,9 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
|
|||
able to simply always use TYPE_ALIGN? */
|
||||
}
|
||||
|
||||
/* We can set the alignment from the type if we are making an object,
|
||||
this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
|
||||
if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
|
||||
/* We can set the alignment from the type if we are making an object or if
|
||||
this is an INDIRECT_REF. */
|
||||
if (objectp || TREE_CODE (t) == INDIRECT_REF)
|
||||
attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
|
||||
|
||||
/* If the size is known, we can set that. */
|
||||
|
|
55
gcc/expr.c
55
gcc/expr.c
|
@ -4828,7 +4828,7 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart,
|
|||
int unsignedp, reversep, volatilep = 0;
|
||||
get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
|
||||
&roffset, &rmode, &unsignedp, &reversep,
|
||||
&volatilep, false);
|
||||
&volatilep);
|
||||
if ((rbitpos % BITS_PER_UNIT) != 0)
|
||||
{
|
||||
*bitstart = *bitend = 0;
|
||||
|
@ -4984,7 +4984,7 @@ expand_assignment (tree to, tree from, bool nontemporal)
|
|||
|
||||
push_temp_slots ();
|
||||
tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
|
||||
&unsignedp, &reversep, &volatilep, true);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
/* Make sure bitpos is not negative, it can wreak havoc later. */
|
||||
if (bitpos < 0)
|
||||
|
@ -6971,27 +6971,13 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
|
|||
|
||||
If the field describes a variable-sized object, *PMODE is set to
|
||||
BLKmode and *PBITSIZE is set to -1. An access cannot be made in
|
||||
this case, but the address of the object can be found.
|
||||
|
||||
If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
|
||||
look through nodes that serve as markers of a greater alignment than
|
||||
the one that can be deduced from the expression. These nodes make it
|
||||
possible for front-ends to prevent temporaries from being created by
|
||||
the middle-end on alignment considerations. For that purpose, the
|
||||
normal operating mode at high-level is to always pass FALSE so that
|
||||
the ultimate containing object is really returned; moreover, the
|
||||
associated predicate handled_component_p will always return TRUE
|
||||
on these nodes, thus indicating that they are essentially handled
|
||||
by get_inner_reference. TRUE should only be passed when the caller
|
||||
is scanning the expression in order to build another representation
|
||||
and specifically knows how to handle these nodes; as such, this is
|
||||
the normal operating mode in the RTL expanders. */
|
||||
this case, but the address of the object can be found. */
|
||||
|
||||
tree
|
||||
get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
HOST_WIDE_INT *pbitpos, tree *poffset,
|
||||
machine_mode *pmode, int *punsignedp,
|
||||
int *preversep, int *pvolatilep, bool keep_aligning)
|
||||
int *preversep, int *pvolatilep)
|
||||
{
|
||||
tree size_tree = 0;
|
||||
machine_mode mode = VOIDmode;
|
||||
|
@ -7113,14 +7099,6 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
|||
break;
|
||||
|
||||
case VIEW_CONVERT_EXPR:
|
||||
if (keep_aligning && STRICT_ALIGNMENT
|
||||
&& (TYPE_ALIGN (TREE_TYPE (exp))
|
||||
> TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
|
||||
&& (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
|
||||
< BIGGEST_ALIGNMENT)
|
||||
&& (TYPE_ALIGN_OK (TREE_TYPE (exp))
|
||||
|| TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
|
||||
goto done;
|
||||
break;
|
||||
|
||||
case MEM_REF:
|
||||
|
@ -7839,7 +7817,7 @@ expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
|
|||
they won't change the final object whose address will be returned
|
||||
(they actually exist only for that purpose). */
|
||||
inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -10369,7 +10347,7 @@ expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
|
|||
int reversep, volatilep = 0, must_force_mem;
|
||||
tree tem
|
||||
= get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
|
||||
&unsignedp, &reversep, &volatilep, true);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
rtx orig_op0, memloc;
|
||||
bool clear_mem_expr = false;
|
||||
|
||||
|
@ -10767,7 +10745,7 @@ expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
|
|||
int unsignedp, reversep, volatilep = 0;
|
||||
tree tem
|
||||
= get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
|
||||
&unsignedp, &reversep, &volatilep, true);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
rtx orig_op0;
|
||||
|
||||
/* ??? We should work harder and deal with non-zero offsets. */
|
||||
|
@ -10878,20 +10856,11 @@ expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
|
|||
{
|
||||
enum insn_code icode;
|
||||
|
||||
if (TYPE_ALIGN_OK (type))
|
||||
{
|
||||
/* ??? Copying the MEM without substantially changing it might
|
||||
run afoul of the code handling volatile memory references in
|
||||
store_expr, which assumes that TARGET is returned unmodified
|
||||
if it has been used. */
|
||||
op0 = copy_rtx (op0);
|
||||
set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
|
||||
}
|
||||
else if (modifier != EXPAND_WRITE
|
||||
&& modifier != EXPAND_MEMORY
|
||||
&& !inner_reference_p
|
||||
&& mode != BLKmode
|
||||
&& MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
|
||||
if (modifier != EXPAND_WRITE
|
||||
&& modifier != EXPAND_MEMORY
|
||||
&& !inner_reference_p
|
||||
&& mode != BLKmode
|
||||
&& MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
|
||||
{
|
||||
/* If the target does have special handling for unaligned
|
||||
loads of mode then use them. */
|
||||
|
|
|
@ -3880,7 +3880,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
|
|||
do anything if the inner expression is a PLACEHOLDER_EXPR since we
|
||||
then will no longer be able to replace it. */
|
||||
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
|
||||
&lunsignedp, &lreversep, &lvolatilep, false);
|
||||
&lunsignedp, &lreversep, &lvolatilep);
|
||||
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|
||||
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
|
||||
return 0;
|
||||
|
@ -3893,7 +3893,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
|
|||
sizes, signedness and storage order are the same. */
|
||||
rinner
|
||||
= get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
|
||||
&runsignedp, &rreversep, &rvolatilep, false);
|
||||
&runsignedp, &rreversep, &rvolatilep);
|
||||
|
||||
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|
||||
|| lunsignedp != runsignedp || lreversep != rreversep || offset != 0
|
||||
|
@ -4075,7 +4075,7 @@ decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
|
|||
}
|
||||
|
||||
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
|
||||
punsignedp, preversep, pvolatilep, false);
|
||||
punsignedp, preversep, pvolatilep);
|
||||
if ((inner == exp && and_mask == 0)
|
||||
|| *pbitsize < 0 || offset != 0
|
||||
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
|
||||
|
@ -7730,7 +7730,7 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
|
|||
tree base
|
||||
= get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
|
||||
&offset, &mode, &unsignedp, &reversep,
|
||||
&volatilep, false);
|
||||
&volatilep);
|
||||
/* If the reference was to a (constant) zero offset, we can use
|
||||
the address of the base if it has the same base type
|
||||
as the result type and the pointer type is unqualified. */
|
||||
|
@ -8334,7 +8334,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
|
|||
base0
|
||||
= get_inner_reference (TREE_OPERAND (arg0, 0),
|
||||
&bitsize, &bitpos0, &offset0, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
if (TREE_CODE (base0) == INDIRECT_REF)
|
||||
base0 = TREE_OPERAND (base0, 0);
|
||||
else
|
||||
|
@ -8349,8 +8349,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
|
|||
base0
|
||||
= get_inner_reference (TREE_OPERAND (base0, 0),
|
||||
&bitsize, &bitpos0, &offset0, &mode,
|
||||
&unsignedp, &reversep, &volatilep,
|
||||
false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
if (TREE_CODE (base0) == INDIRECT_REF)
|
||||
base0 = TREE_OPERAND (base0, 0);
|
||||
else
|
||||
|
@ -8381,7 +8380,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
|
|||
base1
|
||||
= get_inner_reference (TREE_OPERAND (arg1, 0),
|
||||
&bitsize, &bitpos1, &offset1, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
if (TREE_CODE (base1) == INDIRECT_REF)
|
||||
base1 = TREE_OPERAND (base1, 0);
|
||||
else
|
||||
|
@ -8396,8 +8395,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
|
|||
base1
|
||||
= get_inner_reference (TREE_OPERAND (base1, 0),
|
||||
&bitsize, &bitpos1, &offset1, &mode,
|
||||
&unsignedp, &reversep, &volatilep,
|
||||
false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
if (TREE_CODE (base1) == INDIRECT_REF)
|
||||
base1 = TREE_OPERAND (base1, 0);
|
||||
else
|
||||
|
@ -14295,7 +14293,7 @@ split_address_to_core_and_offset (tree exp,
|
|||
{
|
||||
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
|
||||
poffset, &mode, &unsignedp, &reversep,
|
||||
&volatilep, false);
|
||||
&volatilep);
|
||||
core = build_fold_addr_expr_loc (loc, core);
|
||||
}
|
||||
else
|
||||
|
|
|
@ -105,7 +105,7 @@ pass_laddress::execute (function *fun)
|
|||
int volatilep = 0, reversep, unsignedp = 0;
|
||||
base = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize,
|
||||
&bitpos, &offset, &mode, &unsignedp,
|
||||
&reversep, &volatilep, false);
|
||||
&reversep, &volatilep);
|
||||
gcc_assert (base != NULL_TREE && (bitpos % BITS_PER_UNIT) == 0);
|
||||
if (offset != NULL_TREE)
|
||||
{
|
||||
|
|
|
@ -987,7 +987,7 @@ slsr_process_ref (gimple *gs)
|
|||
return;
|
||||
|
||||
base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
if (reversep)
|
||||
return;
|
||||
widest_int index = bitpos;
|
||||
|
|
|
@ -7071,7 +7071,7 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
|
|||
base = TREE_OPERAND (base, 0);
|
||||
base = get_inner_reference (base, &bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &reversep,
|
||||
&volatilep, false);
|
||||
&volatilep);
|
||||
tree orig_base = base;
|
||||
if ((TREE_CODE (base) == INDIRECT_REF
|
||||
|| (TREE_CODE (base) == MEM_REF
|
||||
|
@ -7207,8 +7207,7 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
|
|||
base = get_inner_reference (base, &bitsize2,
|
||||
&bitpos2, &offset2,
|
||||
&mode, &unsignedp,
|
||||
&reversep, &volatilep,
|
||||
false);
|
||||
&reversep, &volatilep);
|
||||
if ((TREE_CODE (base) == INDIRECT_REF
|
||||
|| (TREE_CODE (base) == MEM_REF
|
||||
&& integer_zerop (TREE_OPERAND (base,
|
||||
|
|
|
@ -2045,7 +2045,7 @@ gen_hsa_addr (tree ref, hsa_bb *hbb, HOST_WIDE_INT *output_bitsize = NULL,
|
|||
int unsignedp, volatilep, preversep;
|
||||
|
||||
ref = get_inner_reference (ref, &bitsize, &bitpos, &varoffset, &mode,
|
||||
&unsignedp, &preversep, &volatilep, false);
|
||||
&unsignedp, &preversep, &volatilep);
|
||||
|
||||
offset = bitpos;
|
||||
offset = wi::rshift (offset, LOG2_BITS_PER_UNIT, SIGNED);
|
||||
|
|
|
@ -316,7 +316,7 @@ print_node (FILE *file, const char *prefix, tree node, int indent)
|
|||
if (TREE_USED (node))
|
||||
fputs (" used", file);
|
||||
if (TREE_NOTHROW (node))
|
||||
fputs (TYPE_P (node) ? " align-ok" : " nothrow", file);
|
||||
fputs (" nothrow", file);
|
||||
if (TREE_PUBLIC (node))
|
||||
fputs (" public", file);
|
||||
if (TREE_PRIVATE (node))
|
||||
|
@ -601,6 +601,8 @@ print_node (FILE *file, const char *prefix, tree node, int indent)
|
|||
fputs (" type_5", file);
|
||||
if (TYPE_LANG_FLAG_6 (node))
|
||||
fputs (" type_6", file);
|
||||
if (TYPE_LANG_FLAG_7 (node))
|
||||
fputs (" type_7", file);
|
||||
|
||||
mode = TYPE_MODE (node);
|
||||
fprintf (file, " %s", GET_MODE_NAME (mode));
|
||||
|
|
|
@ -305,7 +305,7 @@ delegitimize_mem_from_attrs (rtx x)
|
|||
|
||||
decl
|
||||
= get_inner_reference (decl, &bitsize, &bitpos, &toffset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
if (bitsize != GET_MODE_BITSIZE (mode)
|
||||
|| (bitpos % BITS_PER_UNIT)
|
||||
|| (toffset && !tree_fits_shwi_p (toffset)))
|
||||
|
|
|
@ -318,7 +318,7 @@ tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
|
|||
}
|
||||
core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
|
||||
&toffset, &mode, &unsignedp, &reversep,
|
||||
&volatilep, false);
|
||||
&volatilep);
|
||||
if (bitpos % BITS_PER_UNIT != 0)
|
||||
break;
|
||||
aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
|
||||
|
@ -888,7 +888,7 @@ get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
|
|||
int uns, rev, vol;
|
||||
aff_tree tmp;
|
||||
tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
|
||||
&uns, &rev, &vol, false);
|
||||
&uns, &rev, &vol);
|
||||
tree base_addr = build_fold_addr_expr (base);
|
||||
|
||||
/* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
|
||||
|
|
|
@ -1154,9 +1154,6 @@ struct GTY(()) tree_base {
|
|||
CALL_EXPR
|
||||
FUNCTION_DECL
|
||||
|
||||
TYPE_ALIGN_OK in
|
||||
all types
|
||||
|
||||
TREE_THIS_NOTRAP in
|
||||
INDIRECT_REF, MEM_REF, TARGET_MEM_REF, ARRAY_REF, ARRAY_RANGE_REF
|
||||
|
||||
|
@ -1447,6 +1444,7 @@ struct GTY(()) tree_type_common {
|
|||
unsigned lang_flag_4 : 1;
|
||||
unsigned lang_flag_5 : 1;
|
||||
unsigned lang_flag_6 : 1;
|
||||
unsigned lang_flag_7 : 1;
|
||||
|
||||
/* TYPE_ALIGN in log2; this has to be large enough to hold values
|
||||
of the maximum of BIGGEST_ALIGNMENT and MAX_OFILE_ALIGNMENT,
|
||||
|
@ -1454,7 +1452,7 @@ struct GTY(()) tree_type_common {
|
|||
so we need to store the value 32 (not 31, as we need the zero
|
||||
as well), hence six bits. */
|
||||
unsigned align : 6;
|
||||
unsigned spare : 26;
|
||||
unsigned spare : 25;
|
||||
alias_set_type alias_set;
|
||||
tree pointer_to;
|
||||
tree reference_to;
|
||||
|
|
|
@ -618,7 +618,7 @@ split_constant_offset_1 (tree type, tree op0, enum tree_code code, tree op1,
|
|||
op0 = TREE_OPERAND (op0, 0);
|
||||
base
|
||||
= get_inner_reference (op0, &pbitsize, &pbitpos, &poffset, &pmode,
|
||||
&punsignedp, &preversep, &pvolatilep, false);
|
||||
&punsignedp, &preversep, &pvolatilep);
|
||||
|
||||
if (pbitpos % BITS_PER_UNIT != 0)
|
||||
return false;
|
||||
|
@ -771,7 +771,7 @@ dr_analyze_innermost (struct data_reference *dr, struct loop *nest)
|
|||
fprintf (dump_file, "analyze_innermost: ");
|
||||
|
||||
base = get_inner_reference (ref, &pbitsize, &pbitpos, &poffset, &pmode,
|
||||
&punsignedp, &preversep, &pvolatilep, false);
|
||||
&punsignedp, &preversep, &pvolatilep);
|
||||
gcc_assert (base != NULL_TREE);
|
||||
|
||||
if (pbitpos % BITS_PER_UNIT != 0)
|
||||
|
|
|
@ -1744,8 +1744,7 @@ interpret_rhs_expr (struct loop *loop, gimple *at_stmt,
|
|||
|
||||
base = get_inner_reference (TREE_OPERAND (rhs1, 0),
|
||||
&bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep,
|
||||
false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
if (TREE_CODE (base) == MEM_REF)
|
||||
{
|
||||
|
|
|
@ -5230,7 +5230,7 @@ ipa_sra_check_caller (struct cgraph_node *node, void *data)
|
|||
machine_mode mode;
|
||||
int unsignedp, reversep, volatilep = 0;
|
||||
get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
if (bitpos % BITS_PER_UNIT)
|
||||
{
|
||||
iscc->bad_arg_alignment = true;
|
||||
|
|
|
@ -4526,7 +4526,7 @@ split_address_cost (struct ivopts_data *data,
|
|||
int unsignedp, reversep, volatilep;
|
||||
|
||||
core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
if (toffset != 0
|
||||
|| bitpos % BITS_PER_UNIT != 0
|
||||
|
|
|
@ -2097,7 +2097,7 @@ find_bswap_or_nop_load (gimple *stmt, tree ref, struct symbolic_number *n)
|
|||
return false;
|
||||
|
||||
base_addr = get_inner_reference (ref, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
if (TREE_CODE (base_addr) == MEM_REF)
|
||||
{
|
||||
|
@ -2640,7 +2640,7 @@ bswap_replace (gimple *cur_stmt, gimple *src_stmt, tree fndecl,
|
|||
tree offset;
|
||||
|
||||
get_inner_reference (src, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
if (n->range < (unsigned HOST_WIDE_INT) bitsize)
|
||||
{
|
||||
load_offset = (bitsize - n->range) / BITS_PER_UNIT;
|
||||
|
|
|
@ -3239,7 +3239,7 @@ vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo,
|
|||
SSA_NAME OFF and put the loop invariants into a tree BASE
|
||||
that can be gimplified before the loop. */
|
||||
base = get_inner_reference (base, &pbitsize, &pbitpos, &off, &pmode,
|
||||
&punsignedp, &reversep, &pvolatilep, false);
|
||||
&punsignedp, &reversep, &pvolatilep);
|
||||
gcc_assert (base && (pbitpos % BITS_PER_UNIT) == 0 && !reversep);
|
||||
|
||||
if (TREE_CODE (base) == MEM_REF)
|
||||
|
@ -3709,7 +3709,7 @@ again:
|
|||
|
||||
outer_base = get_inner_reference (inner_base, &pbitsize, &pbitpos,
|
||||
&poffset, &pmode, &punsignedp,
|
||||
&preversep, &pvolatilep, false);
|
||||
&preversep, &pvolatilep);
|
||||
gcc_assert (outer_base != NULL_TREE);
|
||||
|
||||
if (pbitpos % BITS_PER_UNIT != 0)
|
||||
|
|
|
@ -13204,7 +13204,6 @@ verify_type_variant (const_tree t, tree tv)
|
|||
verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
|
||||
/* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
|
||||
verify_variant_match (TYPE_UNSIGNED);
|
||||
verify_variant_match (TYPE_ALIGN_OK);
|
||||
verify_variant_match (TYPE_PACKED);
|
||||
if (TREE_CODE (t) == REFERENCE_TYPE)
|
||||
verify_variant_match (TYPE_REF_IS_RVALUE);
|
||||
|
|
12
gcc/tree.h
12
gcc/tree.h
|
@ -916,14 +916,6 @@ extern void omp_clause_range_check_failed (const_tree, const char *, int,
|
|||
/* In a CALL_EXPR, means call was instrumented by Pointer Bounds Checker. */
|
||||
#define CALL_WITH_BOUNDS_P(NODE) (CALL_EXPR_CHECK (NODE)->base.deprecated_flag)
|
||||
|
||||
/* In a type, nonzero means that all objects of the type are guaranteed by the
|
||||
language or front-end to be properly aligned, so we can indicate that a MEM
|
||||
of this type is aligned at least to the alignment of the type, even if it
|
||||
doesn't appear that it is. We see this, for example, in object-oriented
|
||||
languages where a tag field may show this is an object of a more-aligned
|
||||
variant of the more generic type. */
|
||||
#define TYPE_ALIGN_OK(NODE) (TYPE_CHECK (NODE)->base.nothrow_flag)
|
||||
|
||||
/* Used in classes in C++. */
|
||||
#define TREE_PRIVATE(NODE) ((NODE)->base.private_flag)
|
||||
/* Used in classes in C++. */
|
||||
|
@ -1960,6 +1952,7 @@ extern machine_mode element_mode (const_tree t);
|
|||
#define TYPE_LANG_FLAG_4(NODE) (TYPE_CHECK (NODE)->type_common.lang_flag_4)
|
||||
#define TYPE_LANG_FLAG_5(NODE) (TYPE_CHECK (NODE)->type_common.lang_flag_5)
|
||||
#define TYPE_LANG_FLAG_6(NODE) (TYPE_CHECK (NODE)->type_common.lang_flag_6)
|
||||
#define TYPE_LANG_FLAG_7(NODE) (TYPE_CHECK (NODE)->type_common.lang_flag_7)
|
||||
|
||||
/* Used to keep track of visited nodes in tree traversals. This is set to
|
||||
0 by copy_node and make_node. */
|
||||
|
@ -5387,8 +5380,7 @@ extern bool complete_ctor_at_level_p (const_tree, HOST_WIDE_INT, const_tree);
|
|||
look for the ultimate containing object, which is returned and specify
|
||||
the access position and size. */
|
||||
extern tree get_inner_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
|
||||
tree *, machine_mode *, int *, int *,
|
||||
int *, bool);
|
||||
tree *, machine_mode *, int *, int *, int *);
|
||||
|
||||
extern tree build_personality_function (const char *);
|
||||
|
||||
|
|
|
@ -110,7 +110,7 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
|
|||
machine_mode mode;
|
||||
int unsignedp, reversep, volatilep = 0;
|
||||
base = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
/* No need to instrument accesses to decls that don't escape,
|
||||
they can't escape to other threads then. */
|
||||
|
|
|
@ -1360,7 +1360,7 @@ instrument_bool_enum_load (gimple_stmt_iterator *gsi)
|
|||
machine_mode mode;
|
||||
int volatilep = 0, reversep, unsignedp = 0;
|
||||
tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
tree utype = build_nonstandard_integer_type (modebitsize, 1);
|
||||
|
||||
if ((TREE_CODE (base) == VAR_DECL && DECL_HARD_REGISTER (base))
|
||||
|
@ -1781,7 +1781,7 @@ instrument_object_size (gimple_stmt_iterator *gsi, bool is_lhs)
|
|||
machine_mode mode;
|
||||
int volatilep = 0, reversep, unsignedp = 0;
|
||||
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &reversep, &volatilep, false);
|
||||
&unsignedp, &reversep, &volatilep);
|
||||
|
||||
if (bitpos % BITS_PER_UNIT != 0
|
||||
|| bitsize != size_in_bytes * BITS_PER_UNIT)
|
||||
|
|
Loading…
Add table
Reference in a new issue