tailc: Handle musttail noreturn calls [PR119483]

The following (first) testcase is accepted by clang (if clang::musttail)
and rejected by gcc, because we discover the call is noreturn and then bail
out because we don't want noreturn tailcalls.
The general reason not to support noreturn tail calls is for cases like
abort where we want nicer backtrace, but if user asks explicitly to
musttail a call which either is explicitly noreturn or is implicitly
determined to be noreturn, I don't see a reason why we couldn't do that.
Both for tail calls and tail recursions.

An alternative would be to keep rejecting musttail to explicit noreturn,
but not actually implicitly mark anything as noreturn if it has any musttail
calls.  But it is unclear how we could do that, such marking is I think done
typically before IPA and e.g. for LTO we won't know whether some other TU
could have musttail calls to it.  And keeping around both explicit and
implicit noreturn bits would be ugly.  Well, I guess we could differentiate
between presence of noreturn/_Noreturn attributes and just ECF_NORETURN
without those, but then tailc would still need to support it, just error out
if it was explicit.

2025-03-28  Jakub Jelinek  <jakub@redhat.com>

	PR tree-optimization/119483
	* tree-tailcall.cc (find_tail_calls): Handle noreturn musttail
	calls.
	(eliminate_tail_call): Likewise.
	(tree_optimize_tail_calls_1): If cfun->has_musttail and
	diag_musttail, handle also basic blocks with no successors
	with noreturn musttail calls.
	* calls.cc (can_implement_as_sibling_call_p): Allow ECF_NORETURN
	calls if they are musttail calls.

	* c-c++-common/pr119483-1.c: New test.
	* c-c++-common/pr119483-2.c: New test.
This commit is contained in:
Jakub Jelinek 2025-03-28 10:49:40 +01:00 committed by Jakub Jelinek
parent c95f5a0c07
commit e176456cfe
4 changed files with 99 additions and 20 deletions

View file

@ -2568,7 +2568,7 @@ can_implement_as_sibling_call_p (tree exp,
maybe_complain_about_tail_call (exp, _("callee returns twice"));
return false;
}
if (flags & ECF_NORETURN)
if ((flags & ECF_NORETURN) && !CALL_EXPR_MUST_TAIL_CALL (exp))
{
maybe_complain_about_tail_call (exp, _("callee does not return"));
return false;

View file

@ -0,0 +1,29 @@
/* PR tree-optimization/119483 */
/* { dg-do compile { target musttail } } */
/* { dg-options "-O2 -fdump-tree-optimized" } */
/* { dg-final { scan-tree-dump-times "bar\[.a-z0-9]* \\\(\[^\n\r]*\\\); \\\[tail call\\\] \\\[must tail call\\\]" 1 "optimized" } } */
/* { dg-final { scan-tree-dump-times "baz \\\(\[^\n\r]*\\\); \\\[tail call\\\] \\\[must tail call\\\]" 1 "optimized" } } */
[[gnu::noreturn]] extern void foo (void);
[[gnu::noinline]] static int
bar (int x)
{
(void) x;
foo ();
return 0;
}
[[gnu::noipa]] int
baz (int x)
{
return x + 42;
}
int
qux (int x)
{
if (x == 1)
[[gnu::musttail]] return bar (1);
[[gnu::musttail]] return baz (x);
}

View file

@ -0,0 +1,12 @@
/* PR tree-optimization/119483 */
/* { dg-do compile { target musttail } } */
/* { dg-options "-O2" } */
[[noreturn]] int
foo (int x)
{
if (x > 10)
[[gnu::musttail]] return foo (x - 1); /* { dg-warning "function declared 'noreturn' has a 'return' statement" } */
for (;;)
;
}

View file

@ -484,7 +484,8 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
size_t idx;
tree var;
if (!single_succ_p (bb))
if (!single_succ_p (bb)
&& (EDGE_COUNT (bb->succs) || !cfun->has_musttail || !diag_musttail))
{
/* If there is an abnormal edge assume it's the only extra one.
Tolerate that case so that we can give better error messages
@ -605,7 +606,7 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
/* If the call might throw an exception that wouldn't propagate out of
cfun, we can't transform to a tail or sibling call (82081). */
if ((stmt_could_throw_p (cfun, stmt)
&& !stmt_can_throw_external (cfun, stmt)) || !single_succ_p (bb))
&& !stmt_can_throw_external (cfun, stmt)) || EDGE_COUNT (bb->succs) > 1)
{
if (stmt == last_stmt)
maybe_error_musttail (call,
@ -760,10 +761,12 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
a = NULL_TREE;
auto_bitmap to_move_defs;
auto_vec<gimple *> to_move_stmts;
bool is_noreturn
= EDGE_COUNT (bb->succs) == 0 && gimple_call_noreturn_p (call);
abb = bb;
agsi = gsi;
while (1)
while (!is_noreturn)
{
tree tmp_a = NULL_TREE;
tree tmp_m = NULL_TREE;
@ -844,7 +847,22 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
}
/* See if this is a tail call we can handle. */
ret_var = gimple_return_retval (as_a <greturn *> (stmt));
if (is_noreturn)
{
tree rettype = TREE_TYPE (TREE_TYPE (current_function_decl));
tree calltype = TREE_TYPE (gimple_call_fntype (call));
if (!VOID_TYPE_P (rettype)
&& !useless_type_conversion_p (rettype, calltype))
{
maybe_error_musttail (call,
_("call and return value are different"),
diag_musttail);
return;
}
ret_var = NULL_TREE;
}
else
ret_var = gimple_return_retval (as_a <greturn *> (stmt));
/* We may proceed if there either is no return value, or the return value
is identical to the call's return or if the return decl is an empty type
@ -1153,24 +1171,32 @@ eliminate_tail_call (struct tailcall *t, class loop *&new_loop)
gsi_prev (&gsi2);
}
/* Number of executions of function has reduced by the tailcall. */
e = single_succ_edge (gsi_bb (t->call_gsi));
if (gimple_call_noreturn_p (as_a <gcall *> (stmt)))
{
e = make_edge (gsi_bb (t->call_gsi), first, EDGE_FALLTHRU);
e->probability = profile_probability::always ();
}
else
{
/* Number of executions of function has reduced by the tailcall. */
e = single_succ_edge (gsi_bb (t->call_gsi));
profile_count count = e->count ();
profile_count count = e->count ();
/* When profile is inconsistent and the recursion edge is more frequent
than number of executions of functions, scale it down, so we do not end
up with 0 executions of entry block. */
if (count >= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (7, 8);
decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), count);
decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), count);
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
decrease_profile (e->dest, count);
/* When profile is inconsistent and the recursion edge is more frequent
than number of executions of functions, scale it down, so we do not
end up with 0 executions of entry block. */
if (count >= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (7, 8);
decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), count);
decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), count);
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
decrease_profile (e->dest, count);
/* Replace the call by a jump to the start of function. */
e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
first);
/* Replace the call by a jump to the start of function. */
e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
first);
}
gcc_assert (e);
PENDING_STMT (e) = NULL;
@ -1295,6 +1321,18 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls, bool only_musttail,
find_tail_calls (e->src, &tailcalls, only_musttail, opt_tailcalls,
diag_musttail);
}
if (cfun->has_musttail && diag_musttail)
{
basic_block bb;
FOR_EACH_BB_FN (bb, cfun)
if (EDGE_COUNT (bb->succs) == 0)
if (gimple *c = last_nondebug_stmt (bb))
if (is_gimple_call (c)
&& gimple_call_must_tail_p (as_a <gcall *> (c))
&& gimple_call_noreturn_p (as_a <gcall *> (c)))
find_tail_calls (bb, &tailcalls, only_musttail, opt_tailcalls,
diag_musttail);
}
if (live_vars)
{