poly_int: load_register_parameters

This patch makes load_register_parameters cope with polynomial sizes.
The requirement here is that any register parameters with non-constant
sizes must either have a specific mode (e.g. a variable-length vector
mode) or must be represented with a PARALLEL.  This is in practice
already a requirement for parameters passed in vector registers,
since the default behaviour of splitting parameters into words doesn't
make sense for them.

2018-01-03  Richard Sandiford  <richard.sandiford@linaro.org>
	    Alan Hayward  <alan.hayward@arm.com>
	    David Sherwood  <david.sherwood@arm.com>

gcc/
	* calls.c (load_register_parameters): Cope with polynomial
	mode sizes.  Require a constant size for BLKmode parameters
	that aren't described by a PARALLEL.  If BLOCK_REG_PADDING
	forces a parameter to be padded at the lsb end in order to
	fill a complete number of words, require the parameter size
	to be ordered wrt UNITS_PER_WORD.

Co-Authored-By: Alan Hayward <alan.hayward@arm.com>
Co-Authored-By: David Sherwood <david.sherwood@arm.com>

From-SVN: r256161
This commit is contained in:
Richard Sandiford 2018-01-03 07:18:58 +00:00 committed by Richard Sandiford
parent cff7107a97
commit 95fe7b4862
2 changed files with 47 additions and 22 deletions

View file

@ -1,3 +1,14 @@
2018-01-03 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
* calls.c (load_register_parameters): Cope with polynomial
mode sizes. Require a constant size for BLKmode parameters
that aren't described by a PARALLEL. If BLOCK_REG_PADDING
forces a parameter to be padded at the lsb end in order to
fill a complete number of words, require the parameter size
to be ordered wrt UNITS_PER_WORD.
2018-01-03 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>

View file

@ -2709,7 +2709,8 @@ load_register_parameters (struct arg_data *args, int num_actuals,
{
int partial = args[i].partial;
int nregs;
int size = 0;
poly_int64 size = 0;
HOST_WIDE_INT const_size = 0;
rtx_insn *before_arg = get_last_insn ();
/* Set non-negative if we must move a word at a time, even if
just one word (e.g, partial == 4 && mode == DFmode). Set
@ -2725,8 +2726,12 @@ load_register_parameters (struct arg_data *args, int num_actuals,
}
else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
{
size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
/* Variable-sized parameters should be described by a
PARALLEL instead. */
const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
gcc_assert (const_size >= 0);
nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
size = const_size;
}
else
size = GET_MODE_SIZE (args[i].mode);
@ -2748,21 +2753,27 @@ load_register_parameters (struct arg_data *args, int num_actuals,
/* Handle case where we have a value that needs shifting
up to the msb. eg. a QImode value and we're padding
upward on a BYTES_BIG_ENDIAN machine. */
if (size < UNITS_PER_WORD
&& (args[i].locate.where_pad
== (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
if (args[i].locate.where_pad
== (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
{
rtx x;
int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
if (maybe_lt (size, UNITS_PER_WORD))
{
rtx x;
poly_int64 shift
= (UNITS_PER_WORD - size) * BITS_PER_UNIT;
/* Assigning REG here rather than a temp makes CALL_FUSAGE
report the whole reg as used. Strictly speaking, the
call only uses SIZE bytes at the msb end, but it doesn't
seem worth generating rtl to say that. */
reg = gen_rtx_REG (word_mode, REGNO (reg));
x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
if (x != reg)
emit_move_insn (reg, x);
/* Assigning REG here rather than a temp makes
CALL_FUSAGE report the whole reg as used.
Strictly speaking, the call only uses SIZE
bytes at the msb end, but it doesn't seem worth
generating rtl to say that. */
reg = gen_rtx_REG (word_mode, REGNO (reg));
x = expand_shift (LSHIFT_EXPR, word_mode,
reg, shift, reg, 1);
if (x != reg)
emit_move_insn (reg, x);
}
}
#endif
}
@ -2777,17 +2788,20 @@ load_register_parameters (struct arg_data *args, int num_actuals,
else if (partial == 0 || args[i].pass_on_stack)
{
/* SIZE and CONST_SIZE are 0 for partial arguments and
the size of a BLKmode type otherwise. */
gcc_checking_assert (known_eq (size, const_size));
rtx mem = validize_mem (copy_rtx (args[i].value));
/* Check for overlap with already clobbered argument area,
providing that this has non-zero size. */
if (is_sibcall
&& size != 0
&& const_size != 0
&& (mem_might_overlap_already_clobbered_arg_p
(XEXP (args[i].value, 0), size)))
(XEXP (args[i].value, 0), const_size)))
*sibcall_failure = 1;
if (size % UNITS_PER_WORD == 0
if (const_size % UNITS_PER_WORD == 0
|| MEM_ALIGN (mem) % BITS_PER_WORD == 0)
move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
else
@ -2797,7 +2811,7 @@ load_register_parameters (struct arg_data *args, int num_actuals,
args[i].mode);
rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
word_mode, word_mode, false,
NULL);
@ -2809,7 +2823,7 @@ load_register_parameters (struct arg_data *args, int num_actuals,
}
/* Handle a BLKmode that needs shifting. */
if (nregs == 1 && size < UNITS_PER_WORD
if (nregs == 1 && const_size < UNITS_PER_WORD
#ifdef BLOCK_REG_PADDING
&& args[i].locate.where_pad == PAD_DOWNWARD
#else
@ -2818,7 +2832,7 @@ load_register_parameters (struct arg_data *args, int num_actuals,
)
{
rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
enum tree_code dir = (BYTES_BIG_ENDIAN
? RSHIFT_EXPR : LSHIFT_EXPR);
rtx x;