lower-bitint: Encode address space qualifiers in VIEW_CONVERT_EXPRs [PR113736]

As discussed in the PR, e.g. build_fold_addr_expr needs TYPE_ADDR_SPACE
on the outermost reference rather than just on the base, so the
following patch makes sure to propagate the address space from
the accessed var to the MEM_REFs and/or VIEW_CONVERT_EXPRs used to
access those.

2024-02-06  Jakub Jelinek  <jakub@redhat.com>

	PR tree-optimization/113736
	* gimple-lower-bitint.cc (bitint_large_huge::limb_access): Use
	var's address space for MEM_REF or VIEW_CONVERT_EXPRs.

	* gcc.dg/bitint-86.c: New test.
This commit is contained in:
Jakub Jelinek 2024-02-06 12:58:55 +01:00
parent 760a1a5b5e
commit 483c061d69
2 changed files with 49 additions and 4 deletions

View file

@ -601,12 +601,17 @@ bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
{
tree atype = (tree_fits_uhwi_p (idx)
? limb_access_type (type, idx) : m_limb_type);
tree ltype = m_limb_type;
addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (var));
if (as != TYPE_ADDR_SPACE (ltype))
ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
| ENCODE_QUAL_ADDR_SPACE (as));
tree ret;
if (DECL_P (var) && tree_fits_uhwi_p (idx))
{
tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
ret = build2 (MEM_REF, m_limb_type,
ret = build2 (MEM_REF, ltype,
build_fold_addr_expr (var),
build_int_cst (ptype, off));
TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
@ -615,7 +620,7 @@ bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
{
ret
= build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
= build2 (MEM_REF, ltype, TREE_OPERAND (var, 0),
size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
tree_to_uhwi (idx)
@ -633,10 +638,10 @@ bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
{
unsigned HOST_WIDE_INT nelts
= CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
tree atype = build_array_type_nelts (m_limb_type, nelts);
tree atype = build_array_type_nelts (ltype, nelts);
var = build1 (VIEW_CONVERT_EXPR, atype, var);
}
ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
ret = build4 (ARRAY_REF, ltype, var, idx, NULL_TREE, NULL_TREE);
}
if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
{

View file

@ -0,0 +1,40 @@
/* PR tree-optimization/113736 */
/* { dg-do compile { target bitint } } */
/* { dg-options "-O2 -std=gnu23 -w" } */
#if __BITINT_MAXWIDTH__ >= 710
struct S { _BitInt(710) a; };
struct T { struct S b[4]; };
#ifdef __x86_64__
#define SEG __seg_gs
#elif defined __i386__
#define SEG __seg_fs
#else
#define SEG
#endif
void
foo (__seg_gs struct T *p)
{
struct S s;
p->b[0] = s;
}
void
bar (__seg_gs struct T *p, _BitInt(710) x, int y, double z)
{
p->b[0].a = x + 42;
p->b[1].a = x << y;
p->b[2].a = x >> y;
p->b[3].a = z;
}
int
baz (__seg_gs struct T *p, _BitInt(710) x, _BitInt(710) y)
{
return __builtin_add_overflow (x, y, &p->b[1].a);
}
#else
int i;
#endif