Come up with {,UN}LIKELY macros.
gcc/c/ChangeLog: * c-parser.cc (c_parser_conditional_expression): Use {,UN}LIKELY macros. (c_parser_binary_expression): Likewise. gcc/cp/ChangeLog: * cp-gimplify.cc (cp_genericize_r): Use {,UN}LIKELY macros. * parser.cc (cp_finalize_omp_declare_simd): Likewise. (cp_finalize_oacc_routine): Likewise. gcc/ChangeLog: * system.h (LIKELY): Define. (UNLIKELY): Likewise. * domwalk.cc (sort_bbs_postorder): Use {,UN}LIKELY macros. * dse.cc (set_position_unneeded): Likewise. (set_all_positions_unneeded): Likewise. (any_positions_needed_p): Likewise. (all_positions_needed_p): Likewise. * expmed.cc (flip_storage_order): Likewise. * genmatch.cc (dt_simplify::gen_1): Likewise. * ggc-common.cc (gt_pch_save): Likewise. * print-rtl.cc: Likewise. * rtl-iter.h (T>::array_type::~array_type): Likewise. (T>::next): Likewise. * rtl-ssa/internals.inl: Likewise. * rtl-ssa/member-fns.inl: Likewise. * rtlanal.cc (T>::add_subrtxes_to_queue): Likewise. (rtx_properties::try_to_add_dest): Likewise. * rtlanal.h (growing_rtx_properties::repeat): Likewise. (vec_rtx_properties_base::~vec_rtx_properties_base): Likewise. * simplify-rtx.cc (simplify_replace_fn_rtx): Likewise. * sort.cc (likely): Likewise. (mergesort): Likewise. * wide-int.h (wi::eq_p): Likewise. (wi::ltu_p): Likewise. (wi::cmpu): Likewise. (wi::bit_and): Likewise. (wi::bit_and_not): Likewise. (wi::bit_or): Likewise. (wi::bit_or_not): Likewise. (wi::bit_xor): Likewise. (wi::add): Likewise. (wi::sub): Likewise.
This commit is contained in:
parent
27239e13b1
commit
22d9c8802a
18 changed files with 64 additions and 63 deletions
|
@ -7669,7 +7669,7 @@ c_parser_conditional_expression (c_parser *parser, struct c_expr *after,
|
|||
c_inhibit_evaluation_warnings -= cond.value == truthvalue_true_node;
|
||||
location_t loc1 = make_location (exp1.get_start (), exp1.src_range);
|
||||
location_t loc2 = make_location (exp2.get_start (), exp2.src_range);
|
||||
if (__builtin_expect (omp_atomic_lhs != NULL, 0)
|
||||
if (UNLIKELY (omp_atomic_lhs != NULL)
|
||||
&& (TREE_CODE (cond.value) == GT_EXPR
|
||||
|| TREE_CODE (cond.value) == LT_EXPR
|
||||
|| TREE_CODE (cond.value) == EQ_EXPR)
|
||||
|
@ -7865,7 +7865,7 @@ c_parser_binary_expression (c_parser *parser, struct c_expr *after,
|
|||
stack[sp].expr \
|
||||
= convert_lvalue_to_rvalue (stack[sp].loc, \
|
||||
stack[sp].expr, true, true); \
|
||||
if (__builtin_expect (omp_atomic_lhs != NULL_TREE, 0) && sp == 1 \
|
||||
if (UNLIKELY (omp_atomic_lhs != NULL_TREE) && sp == 1 \
|
||||
&& ((c_parser_next_token_is (parser, CPP_SEMICOLON) \
|
||||
&& ((1 << stack[sp].prec) \
|
||||
& ((1 << PREC_BITOR) | (1 << PREC_BITXOR) \
|
||||
|
|
|
@ -1178,7 +1178,7 @@ cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
|
|||
hash_set<tree> *p_set = wtd->p_set;
|
||||
|
||||
/* If in an OpenMP context, note var uses. */
|
||||
if (__builtin_expect (wtd->omp_ctx != NULL, 0)
|
||||
if (UNLIKELY (wtd->omp_ctx != NULL)
|
||||
&& (VAR_P (stmt)
|
||||
|| TREE_CODE (stmt) == PARM_DECL
|
||||
|| TREE_CODE (stmt) == RESULT_DECL)
|
||||
|
@ -1242,7 +1242,7 @@ cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
|
|||
if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
|
||||
{
|
||||
/* If in an OpenMP context, note var uses. */
|
||||
if (__builtin_expect (wtd->omp_ctx != NULL, 0)
|
||||
if (UNLIKELY (wtd->omp_ctx != NULL)
|
||||
&& omp_var_to_track (TREE_OPERAND (stmt, 0)))
|
||||
omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
|
||||
*stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
|
||||
|
@ -1369,7 +1369,7 @@ cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
|
|||
break;
|
||||
|
||||
case BIND_EXPR:
|
||||
if (__builtin_expect (wtd->omp_ctx != NULL, 0))
|
||||
if (UNLIKELY (wtd->omp_ctx != NULL))
|
||||
{
|
||||
tree decl;
|
||||
for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
|
||||
|
|
|
@ -1454,7 +1454,7 @@ cp_ensure_no_omp_declare_simd (cp_parser *parser)
|
|||
static inline void
|
||||
cp_finalize_omp_declare_simd (cp_parser *parser, tree fndecl)
|
||||
{
|
||||
if (__builtin_expect (parser->omp_declare_simd != NULL, 0))
|
||||
if (UNLIKELY (parser->omp_declare_simd != NULL))
|
||||
{
|
||||
if (fndecl == error_mark_node)
|
||||
{
|
||||
|
@ -22660,7 +22660,7 @@ cp_parser_init_declarator (cp_parser* parser,
|
|||
bool ok = (cp_lexer_next_token_is (parser->lexer, CPP_SEMICOLON)
|
||||
|| cp_lexer_next_token_is (parser->lexer, CPP_COMMA));
|
||||
cp_lexer_rollback_tokens (parser->lexer);
|
||||
if (__builtin_expect (!ok, 0))
|
||||
if (UNLIKELY (!ok))
|
||||
/* Not an init-declarator. */
|
||||
return error_mark_node;
|
||||
}
|
||||
|
@ -47196,7 +47196,7 @@ cp_parser_late_parsing_oacc_routine (cp_parser *parser, tree attrs)
|
|||
static void
|
||||
cp_finalize_oacc_routine (cp_parser *parser, tree fndecl, bool is_defn)
|
||||
{
|
||||
if (__builtin_expect (parser->oacc_routine != NULL, 0))
|
||||
if (UNLIKELY (parser->oacc_routine != NULL))
|
||||
{
|
||||
/* Keep going if we're in error reporting mode. */
|
||||
if (parser->oacc_routine->error_seen
|
||||
|
|
|
@ -144,13 +144,13 @@ cmp_bb_postorder (const void *a, const void *b, void *data)
|
|||
static void
|
||||
sort_bbs_postorder (basic_block *bbs, int n, int *bb_postorder)
|
||||
{
|
||||
if (__builtin_expect (n == 2, true))
|
||||
if (LIKELY (n == 2))
|
||||
{
|
||||
basic_block bb0 = bbs[0], bb1 = bbs[1];
|
||||
if (bb_postorder[bb0->index] < bb_postorder[bb1->index])
|
||||
bbs[0] = bb1, bbs[1] = bb0;
|
||||
}
|
||||
else if (__builtin_expect (n == 3, true))
|
||||
else if (LIKELY (n == 3))
|
||||
{
|
||||
basic_block bb0 = bbs[0], bb1 = bbs[1], bb2 = bbs[2];
|
||||
if (bb_postorder[bb0->index] < bb_postorder[bb1->index])
|
||||
|
|
|
@ -1249,7 +1249,7 @@ clear_rhs_from_active_local_stores (void)
|
|||
static inline void
|
||||
set_position_unneeded (store_info *s_info, int pos)
|
||||
{
|
||||
if (__builtin_expect (s_info->is_large, false))
|
||||
if (UNLIKELY (s_info->is_large))
|
||||
{
|
||||
if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
|
||||
s_info->positions_needed.large.count++;
|
||||
|
@ -1264,7 +1264,7 @@ set_position_unneeded (store_info *s_info, int pos)
|
|||
static inline void
|
||||
set_all_positions_unneeded (store_info *s_info)
|
||||
{
|
||||
if (__builtin_expect (s_info->is_large, false))
|
||||
if (UNLIKELY (s_info->is_large))
|
||||
{
|
||||
HOST_WIDE_INT width;
|
||||
if (s_info->width.is_constant (&width))
|
||||
|
@ -1287,7 +1287,7 @@ set_all_positions_unneeded (store_info *s_info)
|
|||
static inline bool
|
||||
any_positions_needed_p (store_info *s_info)
|
||||
{
|
||||
if (__builtin_expect (s_info->is_large, false))
|
||||
if (UNLIKELY (s_info->is_large))
|
||||
{
|
||||
HOST_WIDE_INT width;
|
||||
if (s_info->width.is_constant (&width))
|
||||
|
@ -1328,7 +1328,7 @@ all_positions_needed_p (store_info *s_info, poly_int64 start,
|
|||
|| !width.is_constant (&const_width))
|
||||
return false;
|
||||
|
||||
if (__builtin_expect (s_info->is_large, false))
|
||||
if (UNLIKELY (s_info->is_large))
|
||||
{
|
||||
for (HOST_WIDE_INT i = const_start; i < const_start + const_width; ++i)
|
||||
if (bitmap_bit_p (s_info->positions_needed.large.bmap, i))
|
||||
|
|
|
@ -403,13 +403,13 @@ flip_storage_order (machine_mode mode, rtx x)
|
|||
return gen_rtx_CONCAT (mode, real, imag);
|
||||
}
|
||||
|
||||
if (__builtin_expect (reverse_storage_order_supported < 0, 0))
|
||||
if (UNLIKELY (reverse_storage_order_supported < 0))
|
||||
check_reverse_storage_order_support ();
|
||||
|
||||
if (!is_a <scalar_int_mode> (mode, &int_mode))
|
||||
{
|
||||
if (FLOAT_MODE_P (mode)
|
||||
&& __builtin_expect (reverse_float_storage_order_supported < 0, 0))
|
||||
&& UNLIKELY (reverse_float_storage_order_supported < 0))
|
||||
check_reverse_float_storage_order_support ();
|
||||
|
||||
if (!int_mode_for_size (GET_MODE_PRECISION (mode), 0).exists (&int_mode)
|
||||
|
|
|
@ -3358,9 +3358,9 @@ dt_simplify::gen_1 (FILE *f, int indent, bool gimple, operand *result)
|
|||
}
|
||||
|
||||
if (s->kind == simplify::SIMPLIFY)
|
||||
fprintf_indent (f, indent, "if (__builtin_expect (!dbg_cnt (match), 0)) goto %s;\n", fail_label);
|
||||
fprintf_indent (f, indent, "if (UNLIKELY (!dbg_cnt (match))) goto %s;\n", fail_label);
|
||||
|
||||
fprintf_indent (f, indent, "if (__builtin_expect (dump_file && (dump_flags & TDF_FOLDING), 0)) "
|
||||
fprintf_indent (f, indent, "if (UNLIKELY (dump_file && (dump_flags & TDF_FOLDING))) "
|
||||
"fprintf (dump_file, \"%s ",
|
||||
s->kind == simplify::SIMPLIFY
|
||||
? "Applying pattern" : "Matching expression");
|
||||
|
|
|
@ -592,7 +592,7 @@ gt_pch_save (FILE *f)
|
|||
temporarily defined and then restoring previous state. */
|
||||
int get_vbits = 0;
|
||||
size_t valid_size = state.ptrs[i]->size;
|
||||
if (__builtin_expect (RUNNING_ON_VALGRIND, 0))
|
||||
if (UNLIKELY (RUNNING_ON_VALGRIND))
|
||||
{
|
||||
if (vbits.length () < valid_size)
|
||||
vbits.safe_grow (valid_size, true);
|
||||
|
@ -644,7 +644,7 @@ gt_pch_save (FILE *f)
|
|||
if (state.ptrs[i]->note_ptr_fn != gt_pch_p_S)
|
||||
memcpy (state.ptrs[i]->obj, this_object, state.ptrs[i]->size);
|
||||
#if defined ENABLE_VALGRIND_ANNOTATIONS && defined VALGRIND_GET_VBITS
|
||||
if (__builtin_expect (get_vbits == 1, 0))
|
||||
if (UNLIKELY (get_vbits == 1))
|
||||
{
|
||||
(void) VALGRIND_SET_VBITS (state.ptrs[i]->obj, vbits.address (),
|
||||
valid_size);
|
||||
|
|
|
@ -941,7 +941,7 @@ rtx_writer::print_rtx (const_rtx in_rtx)
|
|||
{
|
||||
#ifndef GENERATOR_FILE
|
||||
case MEM:
|
||||
if (__builtin_expect (final_insns_dump_p, false))
|
||||
if (UNLIKELY (final_insns_dump_p))
|
||||
fprintf (m_outfile, " [");
|
||||
else
|
||||
fprintf (m_outfile, " [" HOST_WIDE_INT_PRINT_DEC,
|
||||
|
|
|
@ -114,7 +114,7 @@ inline generic_subrtx_iterator <T>::array_type::array_type () : heap (0) {}
|
|||
template <typename T>
|
||||
inline generic_subrtx_iterator <T>::array_type::~array_type ()
|
||||
{
|
||||
if (__builtin_expect (heap != 0, false))
|
||||
if (UNLIKELY (heap != 0))
|
||||
free_array (*this);
|
||||
}
|
||||
|
||||
|
@ -172,7 +172,7 @@ generic_subrtx_iterator <T>::next ()
|
|||
{
|
||||
/* Add the subrtxes of M_CURRENT. */
|
||||
rtx_type x = T::get_rtx (m_current);
|
||||
if (__builtin_expect (x != 0, true))
|
||||
if (LIKELY (x != 0))
|
||||
{
|
||||
enum rtx_code code = GET_CODE (x);
|
||||
ssize_t count = m_bounds[code].count;
|
||||
|
@ -180,12 +180,12 @@ generic_subrtx_iterator <T>::next ()
|
|||
{
|
||||
/* Handle the simple case of a single "e" block that is known
|
||||
to fit into the current array. */
|
||||
if (__builtin_expect (m_end + count <= LOCAL_ELEMS + 1, true))
|
||||
if (LIKELY (m_end + count <= LOCAL_ELEMS + 1))
|
||||
{
|
||||
/* Set M_CURRENT to the first subrtx and queue the rest. */
|
||||
ssize_t start = m_bounds[code].start;
|
||||
rtunion_type *src = &x->u.fld[start];
|
||||
if (__builtin_expect (count > 2, false))
|
||||
if (UNLIKELY (count > 2))
|
||||
m_base[m_end++] = T::get_value (src[2].rt_rtx);
|
||||
if (count > 1)
|
||||
m_base[m_end++] = T::get_value (src[1].rt_rtx);
|
||||
|
|
|
@ -305,7 +305,7 @@ inline clobber_info::clobber_info (insn_info *insn, unsigned int regno)
|
|||
inline void
|
||||
clobber_info::update_group (clobber_group *group)
|
||||
{
|
||||
if (__builtin_expect (m_group != group, 0))
|
||||
if (UNLIKELY (m_group != group))
|
||||
m_group = group;
|
||||
}
|
||||
|
||||
|
|
|
@ -484,7 +484,7 @@ insn_info::operator< (const insn_info &other) const
|
|||
if (this == &other)
|
||||
return false;
|
||||
|
||||
if (__builtin_expect (m_point != other.m_point, 1))
|
||||
if (LIKELY (m_point != other.m_point))
|
||||
return m_point < other.m_point;
|
||||
|
||||
return slow_compare_with (other) < 0;
|
||||
|
@ -514,7 +514,7 @@ insn_info::compare_with (const insn_info *other) const
|
|||
if (this == other)
|
||||
return 0;
|
||||
|
||||
if (__builtin_expect (m_point != other->m_point, 1))
|
||||
if (LIKELY (m_point != other->m_point))
|
||||
// Assume that points remain in [0, INT_MAX].
|
||||
return m_point - other->m_point;
|
||||
|
||||
|
|
|
@ -131,7 +131,7 @@ generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
|
|||
enum rtx_code code = GET_CODE (x);
|
||||
const char *format = GET_RTX_FORMAT (code);
|
||||
size_t orig_end = end;
|
||||
if (__builtin_expect (INSN_P (x), false))
|
||||
if (UNLIKELY (INSN_P (x)))
|
||||
{
|
||||
/* Put the pattern at the top of the queue, since that's what
|
||||
we're likely to want most. It also allows for the SEQUENCE
|
||||
|
@ -140,7 +140,7 @@ generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
|
|||
if (format[i] == 'e')
|
||||
{
|
||||
value_type subx = T::get_value (x->u.fld[i].rt_rtx);
|
||||
if (__builtin_expect (end < LOCAL_ELEMS, true))
|
||||
if (LIKELY (end < LOCAL_ELEMS))
|
||||
base[end++] = subx;
|
||||
else
|
||||
base = add_single_to_queue (array, base, end++, subx);
|
||||
|
@ -151,7 +151,7 @@ generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
|
|||
if (format[i] == 'e')
|
||||
{
|
||||
value_type subx = T::get_value (x->u.fld[i].rt_rtx);
|
||||
if (__builtin_expect (end < LOCAL_ELEMS, true))
|
||||
if (LIKELY (end < LOCAL_ELEMS))
|
||||
base[end++] = subx;
|
||||
else
|
||||
base = add_single_to_queue (array, base, end++, subx);
|
||||
|
@ -160,7 +160,7 @@ generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
|
|||
{
|
||||
unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
|
||||
rtx *vec = x->u.fld[i].rt_rtvec->elem;
|
||||
if (__builtin_expect (end + length <= LOCAL_ELEMS, true))
|
||||
if (LIKELY (end + length <= LOCAL_ELEMS))
|
||||
for (unsigned int j = 0; j < length; j++)
|
||||
base[end++] = T::get_value (vec[j]);
|
||||
else
|
||||
|
@ -2114,7 +2114,7 @@ rtx_properties::try_to_add_dest (const_rtx x, unsigned int flags)
|
|||
{
|
||||
/* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
|
||||
each of whose first operand is a register. */
|
||||
if (__builtin_expect (GET_CODE (x) == PARALLEL, 0))
|
||||
if (UNLIKELY (GET_CODE (x) == PARALLEL))
|
||||
{
|
||||
for (int i = XVECLEN (x, 0) - 1; i >= 0; --i)
|
||||
if (rtx dest = XEXP (XVECEXP (x, 0, i), 0))
|
||||
|
@ -2159,7 +2159,7 @@ rtx_properties::try_to_add_dest (const_rtx x, unsigned int flags)
|
|||
return;
|
||||
}
|
||||
|
||||
if (__builtin_expect (REG_P (x), 1))
|
||||
if (LIKELY (REG_P (x)))
|
||||
{
|
||||
/* We want to keep sp alive everywhere - by making all
|
||||
writes to sp also use sp. */
|
||||
|
|
|
@ -247,7 +247,7 @@ growing_rtx_properties<Base>::repeat (AddFn add)
|
|||
/* This retries if the storage happened to be exactly the right size,
|
||||
but that's expected to be a rare case and so isn't worth
|
||||
optimizing for. */
|
||||
if (__builtin_expect (this->ref_iter != this->ref_end, 1))
|
||||
if (LIKELY (this->ref_iter != this->ref_end))
|
||||
break;
|
||||
this->grow (count);
|
||||
}
|
||||
|
@ -313,7 +313,7 @@ inline vec_rtx_properties_base::vec_rtx_properties_base ()
|
|||
|
||||
inline vec_rtx_properties_base::~vec_rtx_properties_base ()
|
||||
{
|
||||
if (__builtin_expect (ref_begin != m_storage, 0))
|
||||
if (UNLIKELY (ref_begin != m_storage))
|
||||
free (ref_begin);
|
||||
}
|
||||
|
||||
|
|
|
@ -414,7 +414,7 @@ simplify_replace_fn_rtx (rtx x, const_rtx old_rtx,
|
|||
rtvec vec, newvec;
|
||||
int i, j;
|
||||
|
||||
if (__builtin_expect (fn != NULL, 0))
|
||||
if (UNLIKELY (fn != NULL))
|
||||
{
|
||||
newx = fn (x, old_rtx, data);
|
||||
if (newx)
|
||||
|
|
28
gcc/sort.cc
28
gcc/sort.cc
|
@ -37,8 +37,6 @@ along with GCC; see the file COPYING3. If not see
|
|||
|
||||
#include "system.h"
|
||||
|
||||
#define likely(cond) __builtin_expect ((cond), 1)
|
||||
|
||||
#ifdef __GNUC__
|
||||
#define noinline __attribute__ ((__noinline__))
|
||||
#else
|
||||
|
@ -86,15 +84,15 @@ do { \
|
|||
memcpy (&t0, e0 + OFFSET, sizeof (TYPE)); \
|
||||
memcpy (&t1, e1 + OFFSET, sizeof (TYPE)); \
|
||||
char *out = c->out + OFFSET; \
|
||||
if (likely (c->n == 3)) \
|
||||
if (LIKELY (c->n == 3)) \
|
||||
memmove (out + 2*STRIDE, e2 + OFFSET, sizeof (TYPE));\
|
||||
memcpy (out, &t0, sizeof (TYPE)); out += STRIDE; \
|
||||
memcpy (out, &t1, sizeof (TYPE)); \
|
||||
} while (0)
|
||||
|
||||
if (likely (c->size == sizeof (size_t)))
|
||||
if (LIKELY (c->size == sizeof (size_t)))
|
||||
REORDER_23 (size_t, sizeof (size_t), 0);
|
||||
else if (likely (c->size == sizeof (int)))
|
||||
else if (LIKELY (c->size == sizeof (int)))
|
||||
REORDER_23 (int, sizeof (int), 0);
|
||||
else
|
||||
{
|
||||
|
@ -119,7 +117,7 @@ do { \
|
|||
memcpy (&t2, e2 + OFFSET, sizeof (TYPE)); \
|
||||
memcpy (&t3, e3 + OFFSET, sizeof (TYPE)); \
|
||||
char *out = c->out + OFFSET; \
|
||||
if (likely (c->n == 5)) \
|
||||
if (LIKELY (c->n == 5)) \
|
||||
memmove (out + 4*STRIDE, e4 + OFFSET, sizeof (TYPE));\
|
||||
memcpy (out, &t0, sizeof (TYPE)); out += STRIDE; \
|
||||
memcpy (out, &t1, sizeof (TYPE)); out += STRIDE; \
|
||||
|
@ -127,9 +125,9 @@ do { \
|
|||
memcpy (out, &t3, sizeof (TYPE)); \
|
||||
} while (0)
|
||||
|
||||
if (likely (c->size == sizeof (size_t)))
|
||||
if (LIKELY (c->size == sizeof (size_t)))
|
||||
REORDER_45 (size_t, sizeof (size_t), 0);
|
||||
else if (likely(c->size == sizeof (int)))
|
||||
else if (LIKELY (c->size == sizeof (int)))
|
||||
REORDER_45 (int, sizeof (int), 0);
|
||||
else
|
||||
{
|
||||
|
@ -168,7 +166,7 @@ do { \
|
|||
|
||||
char *e0 = in, *e1 = e0 + c->size, *e2 = e1 + c->size;
|
||||
CMP (e0, e1);
|
||||
if (likely (c->n == 3))
|
||||
if (LIKELY (c->n == 3))
|
||||
{
|
||||
CMP (e1, e2);
|
||||
CMP (e0, e1);
|
||||
|
@ -176,13 +174,13 @@ do { \
|
|||
if (c->n <= 3)
|
||||
return reorder23 (c, e0, e1, e2);
|
||||
char *e3 = e2 + c->size, *e4 = e3 + c->size;
|
||||
if (likely (c->n == 5))
|
||||
if (LIKELY (c->n == 5))
|
||||
{
|
||||
CMP (e3, e4);
|
||||
CMP (e2, e4);
|
||||
}
|
||||
CMP (e2, e3);
|
||||
if (likely (c->n == 5))
|
||||
if (LIKELY (c->n == 5))
|
||||
{
|
||||
CMP (e0, e3);
|
||||
CMP (e1, e4);
|
||||
|
@ -200,7 +198,7 @@ template<typename sort_ctx>
|
|||
static void
|
||||
mergesort (char *in, sort_ctx *c, size_t n, char *out, char *tmp)
|
||||
{
|
||||
if (likely (n <= c->nlim))
|
||||
if (LIKELY (n <= c->nlim))
|
||||
{
|
||||
c->out = out;
|
||||
c->n = n;
|
||||
|
@ -225,12 +223,12 @@ do { \
|
|||
l += ~mr & SIZE; \
|
||||
} while (r != end)
|
||||
|
||||
if (likely (c->cmp(r, l + (r - out) - c->size) < 0))
|
||||
if (LIKELY (c->cmp (r, l + (r - out) - c->size) < 0))
|
||||
{
|
||||
char *end = out + n * c->size;
|
||||
if (sizeof (size_t) == 8 && likely (c->size == 8))
|
||||
if (sizeof (size_t) == 8 && LIKELY (c->size == 8))
|
||||
MERGE_ELTSIZE (8);
|
||||
else if (likely (c->size == 4))
|
||||
else if (LIKELY (c->size == 4))
|
||||
MERGE_ELTSIZE (4);
|
||||
else
|
||||
MERGE_ELTSIZE (c->size);
|
||||
|
|
|
@ -736,6 +736,9 @@ extern int vsnprintf (char *, size_t, const char *, va_list);
|
|||
#define __builtin_expect(a, b) (a)
|
||||
#endif
|
||||
|
||||
#define LIKELY(x) (__builtin_expect ((x), 1))
|
||||
#define UNLIKELY(x) (__builtin_expect ((x), 0))
|
||||
|
||||
/* Some of the headers included by <memory> can use "abort" within a
|
||||
namespace, e.g. "_VSTD::abort();", which fails after we use the
|
||||
preprocessor to redefine "abort" as "fancy_abort" below. */
|
||||
|
@ -783,7 +786,7 @@ extern void fancy_abort (const char *, int, const char *)
|
|||
((void)(!(EXPR) ? fancy_abort (__FILE__, __LINE__, __FUNCTION__), 0 : 0))
|
||||
#elif (GCC_VERSION >= 4005)
|
||||
#define gcc_assert(EXPR) \
|
||||
((void)(__builtin_expect (!(EXPR), 0) ? __builtin_unreachable (), 0 : 0))
|
||||
((void)(UNLIKELY (!(EXPR)) ? __builtin_unreachable (), 0 : 0))
|
||||
#else
|
||||
/* Include EXPR, so that unused variable warnings do not occur. */
|
||||
#define gcc_assert(EXPR) ((void)(0 && (EXPR)))
|
||||
|
|
|
@ -1867,7 +1867,7 @@ wi::eq_p (const T1 &x, const T2 &y)
|
|||
while (++i != xi.len);
|
||||
return true;
|
||||
}
|
||||
if (__builtin_expect (yi.len == 1, true))
|
||||
if (LIKELY (yi.len == 1))
|
||||
{
|
||||
/* XI is only equal to YI if it too has a single HWI. */
|
||||
if (xi.len != 1)
|
||||
|
@ -1943,7 +1943,7 @@ wi::ltu_p (const T1 &x, const T2 &y)
|
|||
/* Optimize the case of two HWIs. The HWIs are implicitly sign-extended
|
||||
for precisions greater than HOST_BITS_WIDE_INT, but sign-extending both
|
||||
values does not change the result. */
|
||||
if (__builtin_expect (xi.len + yi.len == 2, true))
|
||||
if (LIKELY (xi.len + yi.len == 2))
|
||||
{
|
||||
unsigned HOST_WIDE_INT xl = xi.to_uhwi ();
|
||||
unsigned HOST_WIDE_INT yl = yi.to_uhwi ();
|
||||
|
@ -2114,7 +2114,7 @@ wi::cmpu (const T1 &x, const T2 &y)
|
|||
/* Optimize the case of two HWIs. The HWIs are implicitly sign-extended
|
||||
for precisions greater than HOST_BITS_WIDE_INT, but sign-extending both
|
||||
values does not change the result. */
|
||||
if (__builtin_expect (xi.len + yi.len == 2, true))
|
||||
if (LIKELY (xi.len + yi.len == 2))
|
||||
{
|
||||
unsigned HOST_WIDE_INT xl = xi.to_uhwi ();
|
||||
unsigned HOST_WIDE_INT yl = yi.to_uhwi ();
|
||||
|
@ -2321,7 +2321,7 @@ wi::bit_and (const T1 &x, const T2 &y)
|
|||
WIDE_INT_REF_FOR (T1) xi (x, precision);
|
||||
WIDE_INT_REF_FOR (T2) yi (y, precision);
|
||||
bool is_sign_extended = xi.is_sign_extended && yi.is_sign_extended;
|
||||
if (__builtin_expect (xi.len + yi.len == 2, true))
|
||||
if (LIKELY (xi.len + yi.len == 2))
|
||||
{
|
||||
val[0] = xi.ulow () & yi.ulow ();
|
||||
result.set_len (1, is_sign_extended);
|
||||
|
@ -2342,7 +2342,7 @@ wi::bit_and_not (const T1 &x, const T2 &y)
|
|||
WIDE_INT_REF_FOR (T1) xi (x, precision);
|
||||
WIDE_INT_REF_FOR (T2) yi (y, precision);
|
||||
bool is_sign_extended = xi.is_sign_extended && yi.is_sign_extended;
|
||||
if (__builtin_expect (xi.len + yi.len == 2, true))
|
||||
if (LIKELY (xi.len + yi.len == 2))
|
||||
{
|
||||
val[0] = xi.ulow () & ~yi.ulow ();
|
||||
result.set_len (1, is_sign_extended);
|
||||
|
@ -2363,7 +2363,7 @@ wi::bit_or (const T1 &x, const T2 &y)
|
|||
WIDE_INT_REF_FOR (T1) xi (x, precision);
|
||||
WIDE_INT_REF_FOR (T2) yi (y, precision);
|
||||
bool is_sign_extended = xi.is_sign_extended && yi.is_sign_extended;
|
||||
if (__builtin_expect (xi.len + yi.len == 2, true))
|
||||
if (LIKELY (xi.len + yi.len == 2))
|
||||
{
|
||||
val[0] = xi.ulow () | yi.ulow ();
|
||||
result.set_len (1, is_sign_extended);
|
||||
|
@ -2384,7 +2384,7 @@ wi::bit_or_not (const T1 &x, const T2 &y)
|
|||
WIDE_INT_REF_FOR (T1) xi (x, precision);
|
||||
WIDE_INT_REF_FOR (T2) yi (y, precision);
|
||||
bool is_sign_extended = xi.is_sign_extended && yi.is_sign_extended;
|
||||
if (__builtin_expect (xi.len + yi.len == 2, true))
|
||||
if (LIKELY (xi.len + yi.len == 2))
|
||||
{
|
||||
val[0] = xi.ulow () | ~yi.ulow ();
|
||||
result.set_len (1, is_sign_extended);
|
||||
|
@ -2405,7 +2405,7 @@ wi::bit_xor (const T1 &x, const T2 &y)
|
|||
WIDE_INT_REF_FOR (T1) xi (x, precision);
|
||||
WIDE_INT_REF_FOR (T2) yi (y, precision);
|
||||
bool is_sign_extended = xi.is_sign_extended && yi.is_sign_extended;
|
||||
if (__builtin_expect (xi.len + yi.len == 2, true))
|
||||
if (LIKELY (xi.len + yi.len == 2))
|
||||
{
|
||||
val[0] = xi.ulow () ^ yi.ulow ();
|
||||
result.set_len (1, is_sign_extended);
|
||||
|
@ -2441,7 +2441,7 @@ wi::add (const T1 &x, const T2 &y)
|
|||
HOST_BITS_PER_WIDE_INT are relatively rare and there's not much
|
||||
point handling them inline. */
|
||||
else if (STATIC_CONSTANT_P (precision > HOST_BITS_PER_WIDE_INT)
|
||||
&& __builtin_expect (xi.len + yi.len == 2, true))
|
||||
&& LIKELY (xi.len + yi.len == 2))
|
||||
{
|
||||
unsigned HOST_WIDE_INT xl = xi.ulow ();
|
||||
unsigned HOST_WIDE_INT yl = yi.ulow ();
|
||||
|
@ -2527,7 +2527,7 @@ wi::sub (const T1 &x, const T2 &y)
|
|||
HOST_BITS_PER_WIDE_INT are relatively rare and there's not much
|
||||
point handling them inline. */
|
||||
else if (STATIC_CONSTANT_P (precision > HOST_BITS_PER_WIDE_INT)
|
||||
&& __builtin_expect (xi.len + yi.len == 2, true))
|
||||
&& LIKELY (xi.len + yi.len == 2))
|
||||
{
|
||||
unsigned HOST_WIDE_INT xl = xi.ulow ();
|
||||
unsigned HOST_WIDE_INT yl = yi.ulow ();
|
||||
|
|
Loading…
Add table
Reference in a new issue