cselib+expr+bitmap: Change return type of predicate functions from int to bool
gcc/ChangeLog: * cselib.h (rtx_equal_for_cselib_1): Change return type from int to bool. (references_value_p): Ditto. (rtx_equal_for_cselib_p): Ditto. * expr.h (can_store_by_pieces): Ditto. (try_casesi): Ditto. (try_tablejump): Ditto. (safe_from_p): Ditto. * sbitmap.h (bitmap_equal_p): Ditto. * cselib.cc (references_value_p): Change return type from int to void and adjust function body accordingly. (rtx_equal_for_cselib_1): Ditto. * expr.cc (is_aligning_offset): Ditto. (can_store_by_pieces): Ditto. (mostly_zeros_p): Ditto. (all_zeros_p): Ditto. (safe_from_p): Ditto. (is_aligning_offset): Ditto. (try_casesi): Ditto. (try_tablejump): Ditto. (store_constructor): Change "need_to_clear" and "const_bounds_p" variables to bool. * sbitmap.cc (bitmap_equal_p): Change return type from int to bool.
This commit is contained in:
parent
5dfdf0ae4d
commit
c41332abe7
6 changed files with 98 additions and 96 deletions
|
@ -636,7 +636,7 @@ cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
|
|||
element has been set to zero, which implies the cselib_val will be
|
||||
removed. */
|
||||
|
||||
int
|
||||
bool
|
||||
references_value_p (const_rtx x, int only_useless)
|
||||
{
|
||||
const enum rtx_code code = GET_CODE (x);
|
||||
|
@ -646,19 +646,19 @@ references_value_p (const_rtx x, int only_useless)
|
|||
if (GET_CODE (x) == VALUE
|
||||
&& (! only_useless
|
||||
|| (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
|
||||
return 1;
|
||||
return true;
|
||||
|
||||
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
||||
{
|
||||
if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
|
||||
return 1;
|
||||
return true;
|
||||
else if (fmt[i] == 'E')
|
||||
for (j = 0; j < XVECLEN (x, i); j++)
|
||||
if (references_value_p (XVECEXP (x, i, j), only_useless))
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
return 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Return true if V is a useless VALUE and can be discarded as such. */
|
||||
|
@ -926,13 +926,13 @@ autoinc_split (rtx x, rtx *off, machine_mode memmode)
|
|||
return x;
|
||||
}
|
||||
|
||||
/* Return nonzero if we can prove that X and Y contain the same value,
|
||||
/* Return true if we can prove that X and Y contain the same value,
|
||||
taking our gathered information into account. MEMMODE holds the
|
||||
mode of the enclosing MEM, if any, as required to deal with autoinc
|
||||
addressing modes. If X and Y are not (known to be) part of
|
||||
addresses, MEMMODE should be VOIDmode. */
|
||||
|
||||
int
|
||||
bool
|
||||
rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
||||
{
|
||||
enum rtx_code code;
|
||||
|
@ -956,7 +956,7 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
|||
}
|
||||
|
||||
if (x == y)
|
||||
return 1;
|
||||
return true;
|
||||
|
||||
if (GET_CODE (x) == VALUE)
|
||||
{
|
||||
|
@ -973,11 +973,11 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
|||
rtx yoff = NULL;
|
||||
rtx yr = autoinc_split (y, &yoff, memmode);
|
||||
if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX)
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (depth == 128)
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
for (l = e->locs; l; l = l->next)
|
||||
{
|
||||
|
@ -989,10 +989,10 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
|||
if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
|
||||
continue;
|
||||
else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
return 0;
|
||||
return false;
|
||||
}
|
||||
else if (GET_CODE (y) == VALUE)
|
||||
{
|
||||
|
@ -1006,11 +1006,11 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
|||
rtx xoff = NULL;
|
||||
rtx xr = autoinc_split (x, &xoff, memmode);
|
||||
if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX)
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (depth == 128)
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
for (l = e->locs; l; l = l->next)
|
||||
{
|
||||
|
@ -1019,14 +1019,14 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
|||
if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
|
||||
continue;
|
||||
else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
return 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (GET_MODE (x) != GET_MODE (y))
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
if (GET_CODE (x) != GET_CODE (y)
|
||||
|| (GET_CODE (x) == PLUS
|
||||
|
@ -1044,16 +1044,16 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
|||
if (x != xorig || y != yorig)
|
||||
{
|
||||
if (!xoff != !yoff)
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
return rtx_equal_for_cselib_1 (x, y, memmode, depth);
|
||||
}
|
||||
|
||||
if (GET_CODE (xorig) != GET_CODE (yorig))
|
||||
return 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* These won't be handled correctly by the code below. */
|
||||
|
@ -1061,7 +1061,7 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
|||
{
|
||||
CASE_CONST_UNIQUE:
|
||||
case DEBUG_EXPR:
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
case CONST_VECTOR:
|
||||
if (!same_vector_encodings_p (x, y))
|
||||
|
@ -1108,31 +1108,31 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
|||
{
|
||||
case 'w':
|
||||
if (XWINT (x, i) != XWINT (y, i))
|
||||
return 0;
|
||||
return false;
|
||||
break;
|
||||
|
||||
case 'n':
|
||||
case 'i':
|
||||
if (XINT (x, i) != XINT (y, i))
|
||||
return 0;
|
||||
return false;
|
||||
break;
|
||||
|
||||
case 'p':
|
||||
if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
|
||||
return 0;
|
||||
return false;
|
||||
break;
|
||||
|
||||
case 'V':
|
||||
case 'E':
|
||||
/* Two vectors must have the same length. */
|
||||
if (XVECLEN (x, i) != XVECLEN (y, i))
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
/* And the corresponding elements must match. */
|
||||
for (j = 0; j < XVECLEN (x, i); j++)
|
||||
if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
|
||||
XVECEXP (y, i, j), memmode, depth))
|
||||
return 0;
|
||||
return false;
|
||||
break;
|
||||
|
||||
case 'e':
|
||||
|
@ -1142,16 +1142,16 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
|||
depth)
|
||||
&& rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
|
||||
depth))
|
||||
return 1;
|
||||
return true;
|
||||
if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
|
||||
depth))
|
||||
return 0;
|
||||
return false;
|
||||
break;
|
||||
|
||||
case 'S':
|
||||
case 's':
|
||||
if (strcmp (XSTR (x, i), XSTR (y, i)))
|
||||
return 0;
|
||||
return false;
|
||||
break;
|
||||
|
||||
case 'u':
|
||||
|
@ -1169,7 +1169,7 @@ rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
|
|||
gcc_unreachable ();
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is
|
||||
|
|
10
gcc/cselib.h
10
gcc/cselib.h
|
@ -82,9 +82,9 @@ extern void cselib_finish (void);
|
|||
extern void cselib_process_insn (rtx_insn *);
|
||||
extern bool fp_setter_insn (rtx_insn *);
|
||||
extern machine_mode cselib_reg_set_mode (const_rtx);
|
||||
extern int rtx_equal_for_cselib_1 (rtx, rtx, machine_mode, int);
|
||||
extern bool rtx_equal_for_cselib_1 (rtx, rtx, machine_mode, int);
|
||||
extern bool cselib_redundant_set_p (rtx);
|
||||
extern int references_value_p (const_rtx, int);
|
||||
extern bool references_value_p (const_rtx, int);
|
||||
extern rtx cselib_expand_value_rtx (rtx, bitmap, int);
|
||||
typedef rtx (*cselib_expand_callback)(rtx, bitmap, int, void *);
|
||||
extern rtx cselib_expand_value_rtx_cb (rtx, bitmap, int,
|
||||
|
@ -128,14 +128,14 @@ canonical_cselib_val (cselib_val *val)
|
|||
return canon;
|
||||
}
|
||||
|
||||
/* Return nonzero if we can prove that X and Y contain the same value, taking
|
||||
/* Return true if we can prove that X and Y contain the same value, taking
|
||||
our gathered information into account. */
|
||||
|
||||
inline int
|
||||
inline bool
|
||||
rtx_equal_for_cselib_p (rtx x, rtx y)
|
||||
{
|
||||
if (x == y)
|
||||
return 1;
|
||||
return true;
|
||||
|
||||
return rtx_equal_for_cselib_1 (x, y, VOIDmode, 0);
|
||||
}
|
||||
|
|
104
gcc/expr.cc
104
gcc/expr.cc
|
@ -89,7 +89,7 @@ static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64,
|
|||
|
||||
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
|
||||
|
||||
static int is_aligning_offset (const_tree, const_tree);
|
||||
static bool is_aligning_offset (const_tree, const_tree);
|
||||
static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
|
||||
static rtx do_store_flag (sepops, rtx, machine_mode);
|
||||
#ifdef PUSH_ROUNDING
|
||||
|
@ -1691,9 +1691,9 @@ store_by_pieces_d::finish_retmode (memop_ret retmode)
|
|||
a pointer which will be passed as argument in every CONSTFUN call.
|
||||
ALIGN is maximum alignment we can assume. MEMSETP is true if this is
|
||||
a memset operation and false if it's a copy of a constant string.
|
||||
Return nonzero if a call to store_by_pieces should succeed. */
|
||||
Return true if a call to store_by_pieces should succeed. */
|
||||
|
||||
int
|
||||
bool
|
||||
can_store_by_pieces (unsigned HOST_WIDE_INT len,
|
||||
by_pieces_constfn constfun,
|
||||
void *constfundata, unsigned int align, bool memsetp)
|
||||
|
@ -1707,14 +1707,14 @@ can_store_by_pieces (unsigned HOST_WIDE_INT len,
|
|||
rtx cst ATTRIBUTE_UNUSED;
|
||||
|
||||
if (len == 0)
|
||||
return 1;
|
||||
return true;
|
||||
|
||||
if (!targetm.use_by_pieces_infrastructure_p (len, align,
|
||||
memsetp
|
||||
? SET_BY_PIECES
|
||||
: STORE_BY_PIECES,
|
||||
optimize_insn_for_speed_p ()))
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
|
||||
|
||||
|
@ -1749,7 +1749,7 @@ can_store_by_pieces (unsigned HOST_WIDE_INT len,
|
|||
vector mode for the memset expander. */
|
||||
if (!((memsetp && VECTOR_MODE_P (mode))
|
||||
|| targetm.legitimate_constant_p (mode, cst)))
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
if (!reverse)
|
||||
offset += size;
|
||||
|
@ -1765,7 +1765,7 @@ can_store_by_pieces (unsigned HOST_WIDE_INT len,
|
|||
gcc_assert (!l);
|
||||
}
|
||||
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Generate several move instructions to store LEN bytes generated by
|
||||
|
@ -6868,9 +6868,9 @@ complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
|
|||
return count_type_elements (type, true) == num_elts;
|
||||
}
|
||||
|
||||
/* Return 1 if EXP contains mostly (3/4) zeros. */
|
||||
/* Return true if EXP contains mostly (3/4) zeros. */
|
||||
|
||||
static int
|
||||
static bool
|
||||
mostly_zeros_p (const_tree exp)
|
||||
{
|
||||
if (TREE_CODE (exp) == CONSTRUCTOR)
|
||||
|
@ -6886,9 +6886,9 @@ mostly_zeros_p (const_tree exp)
|
|||
return initializer_zerop (exp);
|
||||
}
|
||||
|
||||
/* Return 1 if EXP contains all zeros. */
|
||||
/* Return true if EXP contains all zeros. */
|
||||
|
||||
static int
|
||||
static bool
|
||||
all_zeros_p (const_tree exp)
|
||||
{
|
||||
if (TREE_CODE (exp) == CONSTRUCTOR)
|
||||
|
@ -7146,10 +7146,10 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
|
|||
{
|
||||
tree value, index;
|
||||
unsigned HOST_WIDE_INT i;
|
||||
int need_to_clear;
|
||||
bool need_to_clear;
|
||||
tree domain;
|
||||
tree elttype = TREE_TYPE (type);
|
||||
int const_bounds_p;
|
||||
bool const_bounds_p;
|
||||
HOST_WIDE_INT minelt = 0;
|
||||
HOST_WIDE_INT maxelt = 0;
|
||||
|
||||
|
@ -7173,9 +7173,9 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
|
|||
the whole array first. Similarly if this is static
|
||||
constructor of a non-BLKmode object. */
|
||||
if (cleared)
|
||||
need_to_clear = 0;
|
||||
need_to_clear = false;
|
||||
else if (REG_P (target) && TREE_STATIC (exp))
|
||||
need_to_clear = 1;
|
||||
need_to_clear = true;
|
||||
else
|
||||
{
|
||||
unsigned HOST_WIDE_INT idx;
|
||||
|
@ -7200,7 +7200,7 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
|
|||
if (! tree_fits_uhwi_p (lo_index)
|
||||
|| ! tree_fits_uhwi_p (hi_index))
|
||||
{
|
||||
need_to_clear = 1;
|
||||
need_to_clear = true;
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -7221,7 +7221,7 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
|
|||
if (! need_to_clear
|
||||
&& (count < maxelt - minelt + 1
|
||||
|| 4 * zero_count >= 3 * count))
|
||||
need_to_clear = 1;
|
||||
need_to_clear = true;
|
||||
}
|
||||
|
||||
if (need_to_clear && maybe_gt (size, 0))
|
||||
|
@ -7413,7 +7413,7 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
|
|||
unsigned HOST_WIDE_INT idx;
|
||||
constructor_elt *ce;
|
||||
int i;
|
||||
int need_to_clear;
|
||||
bool need_to_clear;
|
||||
insn_code icode = CODE_FOR_nothing;
|
||||
tree elt;
|
||||
tree elttype = TREE_TYPE (type);
|
||||
|
@ -7511,9 +7511,9 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
|
|||
clear the whole array first. Similarly if this is static
|
||||
constructor of a non-BLKmode object. */
|
||||
if (cleared)
|
||||
need_to_clear = 0;
|
||||
need_to_clear = false;
|
||||
else if (REG_P (target) && TREE_STATIC (exp))
|
||||
need_to_clear = 1;
|
||||
need_to_clear = true;
|
||||
else
|
||||
{
|
||||
unsigned HOST_WIDE_INT count = 0, zero_count = 0;
|
||||
|
@ -8283,15 +8283,15 @@ force_operand (rtx value, rtx target)
|
|||
return value;
|
||||
}
|
||||
|
||||
/* Subroutine of expand_expr: return nonzero iff there is no way that
|
||||
/* Subroutine of expand_expr: return true iff there is no way that
|
||||
EXP can reference X, which is being modified. TOP_P is nonzero if this
|
||||
call is going to be used to determine whether we need a temporary
|
||||
for EXP, as opposed to a recursive call to this function.
|
||||
|
||||
It is always safe for this routine to return zero since it merely
|
||||
It is always safe for this routine to return false since it merely
|
||||
searches for optimization opportunities. */
|
||||
|
||||
int
|
||||
bool
|
||||
safe_from_p (const_rtx x, tree exp, int top_p)
|
||||
{
|
||||
rtx exp_rtl = 0;
|
||||
|
@ -8316,7 +8316,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
&& (XEXP (x, 0) == virtual_outgoing_args_rtx
|
||||
|| (GET_CODE (XEXP (x, 0)) == PLUS
|
||||
&& XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
|
||||
return 1;
|
||||
return true;
|
||||
|
||||
/* If this is a subreg of a hard register, declare it unsafe, otherwise,
|
||||
find the underlying pseudo. */
|
||||
|
@ -8324,7 +8324,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
{
|
||||
x = SUBREG_REG (x);
|
||||
if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
|
||||
return 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Now look at our tree code and possibly recurse. */
|
||||
|
@ -8335,7 +8335,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
break;
|
||||
|
||||
case tcc_constant:
|
||||
return 1;
|
||||
return true;
|
||||
|
||||
case tcc_exceptional:
|
||||
if (TREE_CODE (exp) == TREE_LIST)
|
||||
|
@ -8343,10 +8343,10 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
while (1)
|
||||
{
|
||||
if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
|
||||
return 0;
|
||||
return false;
|
||||
exp = TREE_CHAIN (exp);
|
||||
if (!exp)
|
||||
return 1;
|
||||
return true;
|
||||
if (TREE_CODE (exp) != TREE_LIST)
|
||||
return safe_from_p (x, exp, 0);
|
||||
}
|
||||
|
@ -8359,13 +8359,13 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
|
||||
if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
|
||||
|| !safe_from_p (x, ce->value, 0))
|
||||
return 0;
|
||||
return 1;
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
else if (TREE_CODE (exp) == ERROR_MARK)
|
||||
return 1; /* An already-visited SAVE_EXPR? */
|
||||
return true; /* An already-visited SAVE_EXPR? */
|
||||
else
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
case tcc_statement:
|
||||
/* The only case we look at here is the DECL_INITIAL inside a
|
||||
|
@ -8378,7 +8378,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
case tcc_binary:
|
||||
case tcc_comparison:
|
||||
if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
|
||||
return 0;
|
||||
return false;
|
||||
/* Fall through. */
|
||||
|
||||
case tcc_unary:
|
||||
|
@ -8400,7 +8400,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
if (staticp (TREE_OPERAND (exp, 0))
|
||||
|| TREE_STATIC (exp)
|
||||
|| safe_from_p (x, TREE_OPERAND (exp, 0), 0))
|
||||
return 1;
|
||||
return true;
|
||||
|
||||
/* Otherwise, the only way this can conflict is if we are taking
|
||||
the address of a DECL a that address if part of X, which is
|
||||
|
@ -8410,7 +8410,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
{
|
||||
if (!DECL_RTL_SET_P (exp)
|
||||
|| !MEM_P (DECL_RTL (exp)))
|
||||
return 0;
|
||||
return false;
|
||||
else
|
||||
exp_rtl = XEXP (DECL_RTL (exp), 0);
|
||||
}
|
||||
|
@ -8420,7 +8420,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
if (MEM_P (x)
|
||||
&& alias_sets_conflict_p (MEM_ALIAS_SET (x),
|
||||
get_alias_set (exp)))
|
||||
return 0;
|
||||
return false;
|
||||
break;
|
||||
|
||||
case CALL_EXPR:
|
||||
|
@ -8428,7 +8428,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
all of memory. */
|
||||
if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
|
||||
|| MEM_P (x))
|
||||
return 0;
|
||||
return false;
|
||||
break;
|
||||
|
||||
case WITH_CLEANUP_EXPR:
|
||||
|
@ -8451,7 +8451,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
for (i = 0; i < nops; i++)
|
||||
if (TREE_OPERAND (exp, i) != 0
|
||||
&& ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
break;
|
||||
|
||||
|
@ -8469,7 +8469,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
exp_rtl = SUBREG_REG (exp_rtl);
|
||||
if (REG_P (exp_rtl)
|
||||
&& REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
|
||||
return 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* If the rtl is X, then it is not safe. Otherwise, it is unless both
|
||||
|
@ -8480,7 +8480,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
|||
}
|
||||
|
||||
/* If we reach here, it is safe. */
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
@ -12195,11 +12195,11 @@ reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
|
|||
}
|
||||
}
|
||||
|
||||
/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
|
||||
/* Subroutine of above: returns true if OFFSET corresponds to an offset that
|
||||
when applied to the address of EXP produces an address known to be
|
||||
aligned more than BIGGEST_ALIGNMENT. */
|
||||
|
||||
static int
|
||||
static bool
|
||||
is_aligning_offset (const_tree offset, const_tree exp)
|
||||
{
|
||||
/* Strip off any conversions. */
|
||||
|
@ -12213,7 +12213,7 @@ is_aligning_offset (const_tree offset, const_tree exp)
|
|||
|| compare_tree_int (TREE_OPERAND (offset, 1),
|
||||
BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
|
||||
|| !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
/* Look at the first operand of BIT_AND_EXPR and strip any conversion.
|
||||
It must be NEGATE_EXPR. Then strip any more conversions. */
|
||||
|
@ -12222,7 +12222,7 @@ is_aligning_offset (const_tree offset, const_tree exp)
|
|||
offset = TREE_OPERAND (offset, 0);
|
||||
|
||||
if (TREE_CODE (offset) != NEGATE_EXPR)
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
offset = TREE_OPERAND (offset, 0);
|
||||
while (CONVERT_EXPR_P (offset))
|
||||
|
@ -13220,12 +13220,12 @@ do_store_flag (sepops ops, rtx target, machine_mode mode)
|
|||
&& !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
|
||||
}
|
||||
|
||||
/* Attempt to generate a casesi instruction. Returns 1 if successful,
|
||||
0 otherwise (i.e. if there is no casesi instruction).
|
||||
/* Attempt to generate a casesi instruction. Returns true if successful,
|
||||
false otherwise (i.e. if there is no casesi instruction).
|
||||
|
||||
DEFAULT_PROBABILITY is the probability of jumping to the default
|
||||
label. */
|
||||
int
|
||||
bool
|
||||
try_casesi (tree index_type, tree index_expr, tree minval, tree range,
|
||||
rtx table_label, rtx default_label, rtx fallback_label,
|
||||
profile_probability default_probability)
|
||||
|
@ -13235,7 +13235,7 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range,
|
|||
rtx op1, op2, index;
|
||||
|
||||
if (! targetm.have_casesi ())
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
/* The index must be some form of integer. Convert it to SImode. */
|
||||
scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type);
|
||||
|
@ -13279,7 +13279,7 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range,
|
|||
? default_label
|
||||
: fallback_label));
|
||||
expand_jump_insn (targetm.code_for_casesi, 5, ops);
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Attempt to generate a tablejump instruction; same concept. */
|
||||
|
@ -13374,7 +13374,7 @@ do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
|
|||
emit_barrier ();
|
||||
}
|
||||
|
||||
int
|
||||
bool
|
||||
try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
|
||||
rtx table_label, rtx default_label,
|
||||
profile_probability default_probability)
|
||||
|
@ -13382,7 +13382,7 @@ try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
|
|||
rtx index;
|
||||
|
||||
if (! targetm.have_tablejump ())
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
index_expr = fold_build2 (MINUS_EXPR, index_type,
|
||||
fold_convert (index_type, index_expr),
|
||||
|
@ -13396,7 +13396,7 @@ try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
|
|||
expand_normal (range),
|
||||
TYPE_UNSIGNED (TREE_TYPE (range))),
|
||||
table_label, default_label, default_probability);
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Return a CONST_VECTOR rtx representing vector mask for
|
||||
|
|
16
gcc/expr.h
16
gcc/expr.h
|
@ -222,16 +222,16 @@ extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int,
|
|||
unsigned HOST_WIDE_INT,
|
||||
unsigned HOST_WIDE_INT);
|
||||
|
||||
/* Return nonzero if it is desirable to store LEN bytes generated by
|
||||
/* Return true if it is desirable to store LEN bytes generated by
|
||||
CONSTFUN with several move instructions by store_by_pieces
|
||||
function. CONSTFUNDATA is a pointer which will be passed as argument
|
||||
in every CONSTFUN call.
|
||||
ALIGN is maximum alignment we can assume.
|
||||
MEMSETP is true if this is a real memset/bzero, not a copy
|
||||
of a const string. */
|
||||
extern int can_store_by_pieces (unsigned HOST_WIDE_INT,
|
||||
by_pieces_constfn,
|
||||
void *, unsigned int, bool);
|
||||
extern bool can_store_by_pieces (unsigned HOST_WIDE_INT,
|
||||
by_pieces_constfn,
|
||||
void *, unsigned int, bool);
|
||||
|
||||
/* Generate several move instructions to store LEN bytes generated by
|
||||
CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
|
||||
|
@ -329,10 +329,12 @@ extern enum tree_code maybe_optimize_mod_cmp (enum tree_code, tree *, tree *);
|
|||
extern void maybe_optimize_sub_cmp_0 (enum tree_code, tree *, tree *);
|
||||
|
||||
/* Two different ways of generating switch statements. */
|
||||
extern int try_casesi (tree, tree, tree, tree, rtx, rtx, rtx, profile_probability);
|
||||
extern int try_tablejump (tree, tree, tree, tree, rtx, rtx, profile_probability);
|
||||
extern bool try_casesi (tree, tree, tree, tree, rtx, rtx, rtx,
|
||||
profile_probability);
|
||||
extern bool try_tablejump (tree, tree, tree, tree, rtx, rtx,
|
||||
profile_probability);
|
||||
|
||||
extern int safe_from_p (const_rtx, tree, int);
|
||||
extern bool safe_from_p (const_rtx, tree, int);
|
||||
|
||||
/* Get the personality libfunc for a function decl. */
|
||||
rtx get_personality_function (tree);
|
||||
|
|
|
@ -187,7 +187,7 @@ bitmap_copy (sbitmap dst, const_sbitmap src)
|
|||
}
|
||||
|
||||
/* Determine if a == b. */
|
||||
int
|
||||
bool
|
||||
bitmap_equal_p (const_sbitmap a, const_sbitmap b)
|
||||
{
|
||||
bitmap_check_sizes (a, b);
|
||||
|
|
|
@ -264,7 +264,7 @@ extern sbitmap sbitmap_alloc (unsigned int);
|
|||
extern sbitmap *sbitmap_vector_alloc (unsigned int, unsigned int);
|
||||
extern sbitmap sbitmap_resize (sbitmap, unsigned int, int);
|
||||
extern void bitmap_copy (sbitmap, const_sbitmap);
|
||||
extern int bitmap_equal_p (const_sbitmap, const_sbitmap);
|
||||
extern bool bitmap_equal_p (const_sbitmap, const_sbitmap);
|
||||
extern unsigned int bitmap_count_bits (const_sbitmap);
|
||||
extern bool bitmap_empty_p (const_sbitmap);
|
||||
extern void bitmap_clear (sbitmap);
|
||||
|
|
Loading…
Add table
Reference in a new issue