[Patch ARM] Unified assembler in ARM state.

gcc/ChangeLog:

2015-11-06  Ramana Radhakrishnan  <ramana.radhakrishnan@arm.com>

	* config/arm/arm-ldmstm.ml: Rewrite to generate unified asm templates.
	* config/arm/arm.c (arm_asm_trampoline_template): Make unified asm safe.
	(arm_output_multireg_pop): Likewise.
	(output_move_double):  Likewise.
	(output_move_quad):  Likewise.
	(output_return_instruction): Likewise.
	(arm_print_operand): Remove support for %( and %. print modifiers.
	(arm_output_shift): Make unified asm.
	(arm_declare_function_name): Likewise.
	* config/arm/arm.h (TARGET_UNIFIED_ASM): Delete.
         (ASM_APP_OFF): Adjust.
         (ASM_OUTPUT_REG_PUSH): Undo special casing for TARGET_ARM.
         (ASM_OUTPUT_REG_POP): Likewise.
	* config/arm/arm.md: Adjust uses of %., %(, %) 
	* config/arm/sync.md: Likewise.
	* config/arm/thumb2.md: Likewise.
	* config/arm/ldmstm.md: Regenerate.
	* config/arm/arm.opt (masm-unified-syntax): Do not special case Thumb.
	* doc/invoke.texi (masm-unified-syntax): Update documentation.

gcc/testsuite/ChangeLog:

2015-11-06  Ramana Radhakrishnan  <ramana.radhakrishnan@arm.com>

	* gcc.target/arm/combine-movs.c:
	* gcc.target/arm/interrupt-1.c:
	* gcc.target/arm/interrupt-2.c:
	* gcc.target/arm/unaligned-memcpy-4.c:

From-SVN: r229875
This commit is contained in:
Ramana Radhakrishnan 2015-11-06 18:43:15 +00:00 committed by Ramana Radhakrishnan
parent ee015909ee
commit bae4ce0ff3
15 changed files with 269 additions and 285 deletions

View file

@ -1,3 +1,25 @@
2015-11-06 Ramana Radhakrishnan <ramana.radhakrishnan@arm.com>
* config/arm/arm-ldmstm.ml: Rewrite to generate unified asm templates.
* config/arm/arm.c (arm_asm_trampoline_template): Make unified asm safe.
(arm_output_multireg_pop): Likewise.
(output_move_double): Likewise.
(output_move_quad): Likewise.
(output_return_instruction): Likewise.
(arm_print_operand): Remove support for %( and %. print modifiers.
(arm_output_shift): Make unified asm.
(arm_declare_function_name): Likewise.
* config/arm/arm.h (TARGET_UNIFIED_ASM): Delete.
(ASM_APP_OFF): Adjust.
(ASM_OUTPUT_REG_PUSH): Undo special casing for TARGET_ARM.
(ASM_OUTPUT_REG_POP): Likewise.
* config/arm/arm.md: Adjust uses of %., %(, %)
* config/arm/sync.md: Likewise.
* config/arm/thumb2.md: Likewise.
* config/arm/ldmstm.md: Regenerate.
* config/arm/arm.opt (masm-unified-syntax): Do not special case Thumb.
* doc/invoke.texi (masm-unified-syntax): Update documentation.
2015-11-06 David Malcolm <dmalcolm@redhat.com>
* input.c (dump_line_table_statistics): Dump stats on adhoc table.

View file

@ -33,9 +33,20 @@ type amode = IA | IB | DA | DB
type optype = IN | OUT | INOUT
let rec string_of_addrmode addrmode =
let rec string_of_addrmode addrmode thumb update =
if thumb || update
then
match addrmode with
IA -> "ia" | IB -> "ib" | DA -> "da" | DB -> "db"
IA -> "ia"
| IB -> "ib"
| DA -> "da"
| DB -> "db"
else
match addrmode with
IA -> ""
| IB -> "ib"
| DA -> "da"
| DB -> "db"
let rec initial_offset addrmode nregs =
match addrmode with
@ -160,7 +171,7 @@ let target addrmode thumb =
| _, _ -> raise (InvalidAddrMode "ERROR: Invalid Addressing mode for Thumb1.")
let write_pattern_1 name ls addrmode nregs write_set_fn update thumb =
let astr = string_of_addrmode addrmode in
let astr = string_of_addrmode addrmode thumb update in
Printf.printf "(define_insn \"*%s%s%d_%s%s\"\n"
(if thumb then "thumb_" else "") name nregs astr
(if update then "_update" else "");
@ -180,8 +191,10 @@ let write_pattern_1 name ls addrmode nregs write_set_fn update thumb =
Printf.printf ")]\n \"%s && XVECLEN (operands[0], 0) == %d\"\n"
(target addrmode thumb)
(if update then nregs + 1 else nregs);
Printf.printf " \"%s%%(%s%%)\\t%%%d%s, {"
name astr (nregs + 1) (if update then "!" else "");
if thumb then
Printf.printf " \"%s%s\\t%%%d%s, {" name astr (nregs + 1) (if update then "!" else "")
else
Printf.printf " \"%s%s%%?\\t%%%d%s, {" name astr (nregs + 1) (if update then "!" else "");
for n = 1 to nregs; do
Printf.printf "%%%d%s" n (if n < nregs then ", " else "")
done;

View file

@ -913,7 +913,7 @@ int arm_regs_in_sequence[] =
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
};
#define ARM_LSL_NAME (TARGET_UNIFIED_ASM ? "lsl" : "asl")
#define ARM_LSL_NAME "lsl"
#define streq(string1, string2) (strcmp (string1, string2) == 0)
#define THUMB2_WORK_REGS (0xff & ~( (1 << THUMB_HARD_FRAME_POINTER_REGNUM) \
@ -3566,10 +3566,7 @@ arm_warn_func_return (tree decl)
static void
arm_asm_trampoline_template (FILE *f)
{
if (TARGET_UNIFIED_ASM)
fprintf (f, "\t.syntax unified\n");
else
fprintf (f, "\t.syntax divided\n");
fprintf (f, "\t.syntax unified\n");
if (TARGET_ARM)
{
@ -17637,10 +17634,8 @@ arm_output_multireg_pop (rtx *operands, bool return_pc, rtx cond, bool reverse,
}
conditional = reverse ? "%?%D0" : "%?%d0";
if ((regno_base == SP_REGNUM) && TARGET_THUMB)
if ((regno_base == SP_REGNUM) && update)
{
/* Output pop (not stmfd) because it has a shorter encoding. */
gcc_assert (update);
sprintf (pattern, "pop%s\t{", conditional);
}
else
@ -17648,11 +17643,14 @@ arm_output_multireg_pop (rtx *operands, bool return_pc, rtx cond, bool reverse,
/* Output ldmfd when the base register is SP, otherwise output ldmia.
It's just a convention, their semantics are identical. */
if (regno_base == SP_REGNUM)
sprintf (pattern, "ldm%sfd\t", conditional);
else if (TARGET_UNIFIED_ASM)
sprintf (pattern, "ldmia%s\t", conditional);
/* update is never true here, hence there is no need to handle
pop here. */
sprintf (pattern, "ldmfd%s", conditional);
if (update)
sprintf (pattern, "ldmia%s\t", conditional);
else
sprintf (pattern, "ldm%sia\t", conditional);
sprintf (pattern, "ldm%s\t", conditional);
strcat (pattern, reg_names[regno_base]);
if (update)
@ -17992,25 +17990,25 @@ output_move_double (rtx *operands, bool emit, int *count)
{
if (TARGET_LDRD
&& !(fix_cm3_ldrd && reg0 == REGNO(XEXP (operands[1], 0))))
output_asm_insn ("ldr%(d%)\t%0, [%m1]", operands);
output_asm_insn ("ldrd%?\t%0, [%m1]", operands);
else
output_asm_insn ("ldm%(ia%)\t%m1, %M0", operands);
output_asm_insn ("ldmia%?\t%m1, %M0", operands);
}
break;
case PRE_INC:
gcc_assert (TARGET_LDRD);
if (emit)
output_asm_insn ("ldr%(d%)\t%0, [%m1, #8]!", operands);
output_asm_insn ("ldrd%?\t%0, [%m1, #8]!", operands);
break;
case PRE_DEC:
if (emit)
{
if (TARGET_LDRD)
output_asm_insn ("ldr%(d%)\t%0, [%m1, #-8]!", operands);
output_asm_insn ("ldrd%?\t%0, [%m1, #-8]!", operands);
else
output_asm_insn ("ldm%(db%)\t%m1!, %M0", operands);
output_asm_insn ("ldmdb%?\t%m1!, %M0", operands);
}
break;
@ -18018,16 +18016,16 @@ output_move_double (rtx *operands, bool emit, int *count)
if (emit)
{
if (TARGET_LDRD)
output_asm_insn ("ldr%(d%)\t%0, [%m1], #8", operands);
output_asm_insn ("ldrd%?\t%0, [%m1], #8", operands);
else
output_asm_insn ("ldm%(ia%)\t%m1!, %M0", operands);
output_asm_insn ("ldmia%?\t%m1!, %M0", operands);
}
break;
case POST_DEC:
gcc_assert (TARGET_LDRD);
if (emit)
output_asm_insn ("ldr%(d%)\t%0, [%m1], #-8", operands);
output_asm_insn ("ldrd%?\t%0, [%m1], #-8", operands);
break;
case PRE_MODIFY:
@ -18048,7 +18046,7 @@ output_move_double (rtx *operands, bool emit, int *count)
if (emit)
{
output_asm_insn ("add%?\t%1, %1, %2", otherops);
output_asm_insn ("ldr%(d%)\t%0, [%1] @split", otherops);
output_asm_insn ("ldrd%?\t%0, [%1] @split", otherops);
}
if (count)
*count = 2;
@ -18064,7 +18062,7 @@ output_move_double (rtx *operands, bool emit, int *count)
&& INTVAL (otherops[2]) < 256))
{
if (emit)
output_asm_insn ("ldr%(d%)\t%0, [%1, %2]!", otherops);
output_asm_insn ("ldrd%?\t%0, [%1, %2]!", otherops);
}
else
{
@ -18090,7 +18088,7 @@ output_move_double (rtx *operands, bool emit, int *count)
&& INTVAL (otherops[2]) < 256))
{
if (emit)
output_asm_insn ("ldr%(d%)\t%0, [%1], %2", otherops);
output_asm_insn ("ldrd%?\t%0, [%1], %2", otherops);
}
else
{
@ -18119,9 +18117,9 @@ output_move_double (rtx *operands, bool emit, int *count)
if (emit)
{
if (TARGET_LDRD)
output_asm_insn ("ldr%(d%)\t%0, [%1]", operands);
output_asm_insn ("ldrd%?\t%0, [%1]", operands);
else
output_asm_insn ("ldm%(ia%)\t%1, %M0", operands);
output_asm_insn ("ldmia%?\t%1, %M0", operands);
}
if (count)
@ -18145,19 +18143,19 @@ output_move_double (rtx *operands, bool emit, int *count)
{
case -8:
if (emit)
output_asm_insn ("ldm%(db%)\t%1, %M0", otherops);
output_asm_insn ("ldmdb%?\t%1, %M0", otherops);
return "";
case -4:
if (TARGET_THUMB2)
break;
if (emit)
output_asm_insn ("ldm%(da%)\t%1, %M0", otherops);
output_asm_insn ("ldmda%?\t%1, %M0", otherops);
return "";
case 4:
if (TARGET_THUMB2)
break;
if (emit)
output_asm_insn ("ldm%(ib%)\t%1, %M0", otherops);
output_asm_insn ("ldmib%?\t%1, %M0", otherops);
return "";
}
}
@ -18185,7 +18183,7 @@ output_move_double (rtx *operands, bool emit, int *count)
if (emit)
{
output_asm_insn ("add%?\t%0, %1, %2", otherops);
output_asm_insn ("ldr%(d%)\t%0, [%1]", operands);
output_asm_insn ("ldrd%?\t%0, [%1]", operands);
}
if (count)
*count = 2;
@ -18194,7 +18192,7 @@ output_move_double (rtx *operands, bool emit, int *count)
{
otherops[0] = operands[0];
if (emit)
output_asm_insn ("ldr%(d%)\t%0, [%1, %2]", otherops);
output_asm_insn ("ldrd%?\t%0, [%1, %2]", otherops);
}
return "";
}
@ -18225,9 +18223,9 @@ output_move_double (rtx *operands, bool emit, int *count)
*count = 2;
if (TARGET_LDRD)
return "ldr%(d%)\t%0, [%1]";
return "ldrd%?\t%0, [%1]";
return "ldm%(ia%)\t%1, %M0";
return "ldmia%?\t%1, %M0";
}
else
{
@ -18270,25 +18268,25 @@ output_move_double (rtx *operands, bool emit, int *count)
if (emit)
{
if (TARGET_LDRD)
output_asm_insn ("str%(d%)\t%1, [%m0]", operands);
output_asm_insn ("strd%?\t%1, [%m0]", operands);
else
output_asm_insn ("stm%(ia%)\t%m0, %M1", operands);
output_asm_insn ("stm%?\t%m0, %M1", operands);
}
break;
case PRE_INC:
gcc_assert (TARGET_LDRD);
if (emit)
output_asm_insn ("str%(d%)\t%1, [%m0, #8]!", operands);
output_asm_insn ("strd%?\t%1, [%m0, #8]!", operands);
break;
case PRE_DEC:
if (emit)
{
if (TARGET_LDRD)
output_asm_insn ("str%(d%)\t%1, [%m0, #-8]!", operands);
output_asm_insn ("strd%?\t%1, [%m0, #-8]!", operands);
else
output_asm_insn ("stm%(db%)\t%m0!, %M1", operands);
output_asm_insn ("stmdb%?\t%m0!, %M1", operands);
}
break;
@ -18296,16 +18294,16 @@ output_move_double (rtx *operands, bool emit, int *count)
if (emit)
{
if (TARGET_LDRD)
output_asm_insn ("str%(d%)\t%1, [%m0], #8", operands);
output_asm_insn ("strd%?\t%1, [%m0], #8", operands);
else
output_asm_insn ("stm%(ia%)\t%m0!, %M1", operands);
output_asm_insn ("stm%?\t%m0!, %M1", operands);
}
break;
case POST_DEC:
gcc_assert (TARGET_LDRD);
if (emit)
output_asm_insn ("str%(d%)\t%1, [%m0], #-8", operands);
output_asm_insn ("strd%?\t%1, [%m0], #-8", operands);
break;
case PRE_MODIFY:
@ -18345,12 +18343,12 @@ output_move_double (rtx *operands, bool emit, int *count)
else if (GET_CODE (XEXP (operands[0], 0)) == PRE_MODIFY)
{
if (emit)
output_asm_insn ("str%(d%)\t%0, [%1, %2]!", otherops);
output_asm_insn ("strd%?\t%0, [%1, %2]!", otherops);
}
else
{
if (emit)
output_asm_insn ("str%(d%)\t%0, [%1], %2", otherops);
output_asm_insn ("strd%?\t%0, [%1], %2", otherops);
}
break;
@ -18362,21 +18360,21 @@ output_move_double (rtx *operands, bool emit, int *count)
{
case -8:
if (emit)
output_asm_insn ("stm%(db%)\t%m0, %M1", operands);
output_asm_insn ("stmdb%?\t%m0, %M1", operands);
return "";
case -4:
if (TARGET_THUMB2)
break;
if (emit)
output_asm_insn ("stm%(da%)\t%m0, %M1", operands);
output_asm_insn ("stmda%?\t%m0, %M1", operands);
return "";
case 4:
if (TARGET_THUMB2)
break;
if (emit)
output_asm_insn ("stm%(ib%)\t%m0, %M1", operands);
output_asm_insn ("stmib%?\t%m0, %M1", operands);
return "";
}
}
@ -18390,7 +18388,7 @@ output_move_double (rtx *operands, bool emit, int *count)
otherops[0] = operands[1];
otherops[1] = XEXP (XEXP (operands[0], 0), 0);
if (emit)
output_asm_insn ("str%(d%)\t%0, [%1, %2]", otherops);
output_asm_insn ("strd%?\t%0, [%1, %2]", otherops);
return "";
}
/* Fall through */
@ -18426,13 +18424,13 @@ output_move_quad (rtx *operands)
switch (GET_CODE (XEXP (operands[1], 0)))
{
case REG:
output_asm_insn ("ldm%(ia%)\t%m1, %M0", operands);
output_asm_insn ("ldmia%?\t%m1, %M0", operands);
break;
case LABEL_REF:
case CONST:
output_asm_insn ("adr%?\t%0, %1", operands);
output_asm_insn ("ldm%(ia%)\t%0, %M0", operands);
output_asm_insn ("ldmia%?\t%0, %M0", operands);
break;
default:
@ -18476,7 +18474,7 @@ output_move_quad (rtx *operands)
switch (GET_CODE (XEXP (operands[0], 0)))
{
case REG:
output_asm_insn ("stm%(ia%)\t%m0, %M1", operands);
output_asm_insn ("stm%?\t%m0, %M1", operands);
break;
default:
@ -19510,10 +19508,7 @@ output_return_instruction (rtx operand, bool really_return, bool reverse,
gcc_assert (stack_adjust == 0 || stack_adjust == 4);
if (stack_adjust && arm_arch5 && TARGET_ARM)
if (TARGET_UNIFIED_ASM)
sprintf (instr, "ldmib%s\t%%|sp, {", conditional);
else
sprintf (instr, "ldm%sib\t%%|sp, {", conditional);
else
{
/* If we can't use ldmib (SA110 bug),
@ -19521,17 +19516,11 @@ output_return_instruction (rtx operand, bool really_return, bool reverse,
if (stack_adjust)
live_regs_mask |= 1 << 3;
if (TARGET_UNIFIED_ASM)
sprintf (instr, "ldmfd%s\t%%|sp, {", conditional);
else
sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
sprintf (instr, "ldmfd%s\t%%|sp, {", conditional);
}
}
else
if (TARGET_UNIFIED_ASM)
sprintf (instr, "pop%s\t{", conditional);
else
sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
p = instr + strlen (instr);
@ -21804,37 +21793,17 @@ arm_print_operand (FILE *stream, rtx x, int code)
arm_print_condition (stream);
return;
case '(':
/* Nothing in unified syntax, otherwise the current condition code. */
if (!TARGET_UNIFIED_ASM)
arm_print_condition (stream);
break;
case ')':
/* The current condition code in unified syntax, otherwise nothing. */
if (TARGET_UNIFIED_ASM)
arm_print_condition (stream);
break;
case '.':
/* The current condition code for a condition code setting instruction.
Preceded by 's' in unified syntax, otherwise followed by 's'. */
if (TARGET_UNIFIED_ASM)
{
fputc('s', stream);
arm_print_condition (stream);
}
else
{
arm_print_condition (stream);
fputc('s', stream);
}
fputc('s', stream);
arm_print_condition (stream);
return;
case '!':
/* If the instruction is conditionally executed then print
the current condition code, otherwise print 's'. */
gcc_assert (TARGET_THUMB2 && TARGET_UNIFIED_ASM);
gcc_assert (TARGET_THUMB2);
if (current_insn_predicate)
arm_print_condition (stream);
else
@ -27243,20 +27212,16 @@ arm_output_shift(rtx * operands, int set_flags)
char c;
c = flag_chars[set_flags];
if (TARGET_UNIFIED_ASM)
shift = shift_op(operands[3], &val);
if (shift)
{
shift = shift_op(operands[3], &val);
if (shift)
{
if (val != -1)
operands[2] = GEN_INT(val);
sprintf (pattern, "%s%%%c\t%%0, %%1, %%2", shift, c);
}
else
sprintf (pattern, "mov%%%c\t%%0, %%1", c);
if (val != -1)
operands[2] = GEN_INT(val);
sprintf (pattern, "%s%%%c\t%%0, %%1, %%2", shift, c);
}
else
sprintf (pattern, "mov%%%c\t%%0, %%1%%S3", c);
sprintf (pattern, "mov%%%c\t%%0, %%1", c);
output_asm_insn (pattern, operands);
return "";
}
@ -30005,10 +29970,8 @@ arm_valid_target_attribute_p (tree fndecl, tree ARG_UNUSED (name),
void
arm_declare_function_name (FILE *stream, const char *name, tree decl)
{
if (TARGET_UNIFIED_ASM)
fprintf (stream, "\t.syntax unified\n");
else
fprintf (stream, "\t.syntax divided\n");
fprintf (stream, "\t.syntax unified\n");
if (TARGET_THUMB)
{

View file

@ -237,10 +237,6 @@ extern void (*arm_lang_output_object_attributes_hook)(void);
&& (arm_disable_literal_pool \
|| (!optimize_size && !current_tune->prefer_constant_pool)))
/* We could use unified syntax for arm mode, but for now we just use it
for thumb mode. */
#define TARGET_UNIFIED_ASM (TARGET_THUMB)
/* Nonzero if this chip provides the DMB instruction. */
#define TARGET_HAVE_DMB (arm_arch6m || arm_arch7)
@ -2023,8 +2019,7 @@ extern int making_const_table;
"\t.syntax divided\n")
#undef ASM_APP_OFF
#define ASM_APP_OFF (TARGET_ARM ? "\t.arm\n\t.syntax divided\n" : \
"\t.thumb\n\t.syntax unified\n")
#define ASM_APP_OFF "\t.syntax unified\n"
/* Output a push or a pop instruction (only used when profiling).
We can't push STATIC_CHAIN_REGNUM (r12) directly with Thumb-1. We know
@ -2035,10 +2030,7 @@ extern int making_const_table;
#define ASM_OUTPUT_REG_PUSH(STREAM, REGNO) \
do \
{ \
if (TARGET_ARM) \
asm_fprintf (STREAM,"\tstmfd\t%r!,{%r}\n", \
STACK_POINTER_REGNUM, REGNO); \
else if (TARGET_THUMB1 \
if (TARGET_THUMB1 \
&& (REGNO) == STATIC_CHAIN_REGNUM) \
{ \
asm_fprintf (STREAM, "\tpush\t{r7}\n"); \
@ -2054,11 +2046,8 @@ extern int making_const_table;
#define ASM_OUTPUT_REG_POP(STREAM, REGNO) \
do \
{ \
if (TARGET_ARM) \
asm_fprintf (STREAM, "\tldmfd\t%r!,{%r}\n", \
STACK_POINTER_REGNUM, REGNO); \
else if (TARGET_THUMB1 \
&& (REGNO) == STATIC_CHAIN_REGNUM) \
if (TARGET_THUMB1 \
&& (REGNO) == STATIC_CHAIN_REGNUM) \
{ \
asm_fprintf (STREAM, "\tpop\t{r7}\n"); \
asm_fprintf (STREAM, "\tmov\t%r, r7\n", REGNO);\

View file

@ -622,9 +622,9 @@
(plus:SI (match_dup 1) (match_dup 2)))]
"TARGET_ARM"
"@
add%.\\t%0, %1, %2
sub%.\\t%0, %1, #%n2
add%.\\t%0, %1, %2"
adds%?\\t%0, %1, %2
subs%?\\t%0, %1, #%n2
adds%?\\t%0, %1, %2"
[(set_attr "conds" "set")
(set_attr "type" "alus_imm,alus_imm,alus_sreg")]
)
@ -672,8 +672,8 @@
(match_operand:SI 3 "arm_addimm_operand" "I,L")))]
"TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
"@
add%.\\t%0, %1, %3
sub%.\\t%0, %1, #%n3"
adds%?\\t%0, %1, %3
subs%?\\t%0, %1, #%n3"
[(set_attr "conds" "set")
(set_attr "type" "alus_sreg")]
)
@ -729,9 +729,9 @@
(plus:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"@
add%.\\t%0, %1, %2
sub%.\\t%0, %1, #%n2
add%.\\t%0, %1, %2"
adds%?\\t%0, %1, %2
subs%?\\t%0, %1, #%n2
adds%?\\t%0, %1, %2"
[(set_attr "conds" "set")
(set_attr "type" "alus_imm,alus_imm,alus_sreg")]
)
@ -746,9 +746,9 @@
(plus:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"@
add%.\\t%0, %1, %2
add%.\\t%0, %1, %2
sub%.\\t%0, %1, #%n2"
adds%?\\t%0, %1, %2
adds%?\\t%0, %1, %2
subs%?\\t%0, %1, #%n2"
[(set_attr "conds" "set")
(set_attr "type" "alus_imm,alus_imm,alus_sreg")]
)
@ -856,7 +856,7 @@
(LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT"
"adc%.\\t%0, %1, %2"
"adcs%?\\t%0, %1, %2"
[(set_attr "conds" "set")
(set_attr "type" "adcs_reg")]
)
@ -1239,9 +1239,9 @@
(minus:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"@
sub%.\\t%0, %1, %2
sub%.\\t%0, %1, %2
rsb%.\\t%0, %2, %1"
subs%?\\t%0, %1, %2
subs%?\\t%0, %1, %2
rsbs%?\\t%0, %2, %1"
[(set_attr "conds" "set")
(set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
)
@ -1254,9 +1254,9 @@
(minus:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"@
sub%.\\t%0, %1, %2
sub%.\\t%0, %1, %2
rsb%.\\t%0, %2, %1"
subs%?\\t%0, %1, %2
subs%?\\t%0, %1, %2
rsbs%?\\t%0, %2, %1"
[(set_attr "conds" "set")
(set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
)
@ -1335,7 +1335,7 @@
(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
(mult:SI (match_dup 2) (match_dup 1)))]
"TARGET_ARM && !arm_arch6"
"mul%.\\t%0, %2, %1"
"muls%?\\t%0, %2, %1"
[(set_attr "conds" "set")
(set_attr "type" "muls")]
)
@ -1349,7 +1349,7 @@
(set (match_operand:SI 0 "s_register_operand" "=r")
(mult:SI (match_dup 2) (match_dup 1)))]
"TARGET_ARM && arm_arch6 && optimize_size"
"mul%.\\t%0, %2, %1"
"muls%?\\t%0, %2, %1"
[(set_attr "conds" "set")
(set_attr "type" "muls")]
)
@ -1362,7 +1362,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=&r,&r"))]
"TARGET_ARM && !arm_arch6"
"mul%.\\t%0, %2, %1"
"muls%?\\t%0, %2, %1"
[(set_attr "conds" "set")
(set_attr "type" "muls")]
)
@ -1375,7 +1375,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"TARGET_ARM && arm_arch6 && optimize_size"
"mul%.\\t%0, %2, %1"
"muls%?\\t%0, %2, %1"
[(set_attr "conds" "set")
(set_attr "type" "muls")]
)
@ -1419,7 +1419,7 @@
(plus:SI (mult:SI (match_dup 2) (match_dup 1))
(match_dup 3)))]
"TARGET_ARM && arm_arch6"
"mla%.\\t%0, %2, %1, %3"
"mlas%?\\t%0, %2, %1, %3"
[(set_attr "conds" "set")
(set_attr "type" "mlas")]
)
@ -1436,7 +1436,7 @@
(plus:SI (mult:SI (match_dup 2) (match_dup 1))
(match_dup 3)))]
"TARGET_ARM && arm_arch6 && optimize_size"
"mla%.\\t%0, %2, %1, %3"
"mlas%?\\t%0, %2, %1, %3"
[(set_attr "conds" "set")
(set_attr "type" "mlas")]
)
@ -1451,7 +1451,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
"TARGET_ARM && !arm_arch6"
"mla%.\\t%0, %2, %1, %3"
"mlas%?\\t%0, %2, %1, %3"
[(set_attr "conds" "set")
(set_attr "type" "mlas")]
)
@ -1466,7 +1466,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"TARGET_ARM && arm_arch6 && optimize_size"
"mla%.\\t%0, %2, %1, %3"
"mlas%?\\t%0, %2, %1, %3"
[(set_attr "conds" "set")
(set_attr "type" "mlas")]
)
@ -2195,9 +2195,9 @@
(and:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"@
and%.\\t%0, %1, %2
bic%.\\t%0, %1, #%B2
and%.\\t%0, %1, %2"
ands%?\\t%0, %1, %2
bics%?\\t%0, %1, #%B2
ands%?\\t%0, %1, %2"
[(set_attr "conds" "set")
(set_attr "type" "logics_imm,logics_imm,logics_reg")]
)
@ -2212,7 +2212,7 @@
"TARGET_32BIT"
"@
tst%?\\t%0, %1
bic%.\\t%2, %0, #%B1
bics%?\\t%2, %0, #%B1
tst%?\\t%0, %1"
[(set_attr "conds" "set")
(set_attr "type" "logics_imm,logics_imm,logics_reg")]
@ -2796,7 +2796,7 @@
(const_int 0)))
(clobber (match_scratch:SI 4 "=r"))]
"TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
"bic%.%?\\t%4, %3, %1%S0"
"bics%?\\t%4, %3, %1%S0"
[(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")
(set_attr "conds" "set")
@ -2822,7 +2822,7 @@
(match_dup 2)]))
(match_dup 3)))])]
"TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
"bic%.%?\\t%4, %3, %1%S0"
"bics%?\\t%4, %3, %1%S0"
[(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")
(set_attr "conds" "set")
@ -2841,7 +2841,7 @@
(set (match_operand:SI 0 "s_register_operand" "=r")
(and:SI (not:SI (match_dup 2)) (match_dup 1)))]
"TARGET_32BIT"
"bic%.\\t%0, %1, %2"
"bics\\t%0, %1, %2"
[(set_attr "conds" "set")
(set_attr "type" "logics_shift_reg")]
)
@ -2854,7 +2854,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"TARGET_32BIT"
"bic%.\\t%0, %1, %2"
"bics\\t%0, %1, %2"
[(set_attr "conds" "set")
(set_attr "type" "logics_shift_reg")]
)
@ -3023,7 +3023,7 @@
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(ior:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"orr%.\\t%0, %1, %2"
"orrs%?\\t%0, %1, %2"
[(set_attr "conds" "set")
(set_attr "type" "logics_imm,logics_reg")]
)
@ -3035,7 +3035,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r,r"))]
"TARGET_32BIT"
"orr%.\\t%0, %1, %2"
"orrs%?\\t%0, %1, %2"
[(set_attr "conds" "set")
(set_attr "type" "logics_imm,logics_reg")]
)
@ -3182,7 +3182,7 @@
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(xor:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"eor%.\\t%0, %1, %2"
"eors%?\\t%0, %1, %2"
[(set_attr "conds" "set")
(set_attr "type" "logics_imm,logics_reg")]
)
@ -4066,7 +4066,7 @@
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
"TARGET_32BIT"
"mvn%.\\t%0, %1%S3"
"mvns%?\\t%0, %1%S3"
[(set_attr "conds" "set")
(set_attr "shift" "1")
(set_attr "arch" "32,a")
@ -4081,7 +4081,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r,r"))]
"TARGET_32BIT"
"mvn%.\\t%0, %1%S3"
"mvns%?\\t%0, %1%S3"
[(set_attr "conds" "set")
(set_attr "shift" "1")
(set_attr "arch" "32,a")
@ -4271,7 +4271,7 @@
(unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,Uh")]
UNSPEC_UNALIGNED_LOAD)))]
"unaligned_access && TARGET_32BIT"
"ldr%(sh%)\t%0, %1\t@ unaligned"
"ldrsh%?\t%0, %1\t@ unaligned"
[(set_attr "arch" "t2,any")
(set_attr "length" "2,4")
(set_attr "predicable" "yes")
@ -4284,7 +4284,7 @@
(unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
UNSPEC_UNALIGNED_LOAD)))]
"unaligned_access && TARGET_32BIT"
"ldr%(h%)\t%0, %1\t@ unaligned"
"ldrh%?\t%0, %1\t@ unaligned"
[(set_attr "arch" "t2,any")
(set_attr "length" "2,4")
(set_attr "predicable" "yes")
@ -4308,7 +4308,7 @@
(unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
UNSPEC_UNALIGNED_STORE))]
"unaligned_access && TARGET_32BIT"
"str%(h%)\t%1, %0\t@ unaligned"
"strh%?\t%1, %0\t@ unaligned"
[(set_attr "arch" "t2,any")
(set_attr "length" "2,4")
(set_attr "predicable" "yes")
@ -4818,7 +4818,7 @@
(set (match_operand:SI 0 "s_register_operand" "=r")
(not:SI (match_dup 1)))]
"TARGET_32BIT"
"mvn%.\\t%0, %1"
"mvns%?\\t%0, %1"
[(set_attr "conds" "set")
(set_attr "type" "mvn_reg")]
)
@ -4829,7 +4829,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"TARGET_32BIT"
"mvn%.\\t%0, %1"
"mvns%?\\t%0, %1"
[(set_attr "conds" "set")
(set_attr "type" "mvn_reg")]
)
@ -5060,7 +5060,7 @@
"TARGET_ARM && arm_arch4 && !arm_arch6"
"@
#
ldr%(h%)\\t%0, %1"
ldrh%?\\t%0, %1"
[(set_attr "type" "alu_shift_reg,load_byte")
(set_attr "predicable" "yes")]
)
@ -5071,7 +5071,7 @@
"TARGET_ARM && arm_arch6"
"@
uxth%?\\t%0, %1
ldr%(h%)\\t%0, %1"
ldrh%?\\t%0, %1"
[(set_attr "predicable" "yes")
(set_attr "type" "extend,load_byte")]
)
@ -5130,7 +5130,7 @@
"TARGET_ARM && !arm_arch6"
"@
#
ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
[(set_attr "length" "8,4")
(set_attr "type" "alu_shift_reg,load_byte")
(set_attr "predicable" "yes")]
@ -5141,8 +5141,8 @@
(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
"TARGET_ARM && arm_arch6"
"@
uxtb%(%)\\t%0, %1
ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
uxtb%?\\t%0, %1
ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
[(set_attr "type" "extend,load_byte")
(set_attr "predicable" "yes")]
)
@ -5302,7 +5302,7 @@
"TARGET_ARM && arm_arch4 && !arm_arch6"
"@
#
ldr%(sh%)\\t%0, %1"
ldrsh%?\\t%0, %1"
[(set_attr "length" "8,4")
(set_attr "type" "alu_shift_reg,load_byte")
(set_attr "predicable" "yes")]
@ -5315,7 +5315,7 @@
"TARGET_32BIT && arm_arch6"
"@
sxth%?\\t%0, %1
ldr%(sh%)\\t%0, %1"
ldrsh%?\\t%0, %1"
[(set_attr "type" "extend,load_byte")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")]
@ -5358,7 +5358,7 @@
[(set (match_operand:HI 0 "s_register_operand" "=r")
(sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
"TARGET_ARM && arm_arch4"
"ldr%(sb%)\\t%0, %1"
"ldrsb%?\\t%0, %1"
[(set_attr "type" "load_byte")
(set_attr "predicable" "yes")]
)
@ -5397,7 +5397,7 @@
"TARGET_ARM && arm_arch4 && !arm_arch6"
"@
#
ldr%(sb%)\\t%0, %1"
ldrsb%?\\t%0, %1"
[(set_attr "length" "8,4")
(set_attr "type" "alu_shift_reg,load_byte")
(set_attr "predicable" "yes")]
@ -5410,7 +5410,7 @@
"TARGET_ARM && arm_arch6"
"@
sxtb%?\\t%0, %1
ldr%(sb%)\\t%0, %1"
ldrsb%?\\t%0, %1"
[(set_attr "type" "extend,load_byte")
(set_attr "predicable" "yes")]
)
@ -6065,7 +6065,7 @@
"TARGET_32BIT"
"@
cmp%?\\t%0, #0
sub%.\\t%0, %1, #0"
subs%?\\t%0, %1, #0"
[(set_attr "conds" "set")
(set_attr "type" "alus_imm,alus_imm")]
)
@ -6441,8 +6441,8 @@
mov%?\\t%0, %1\\t%@ movhi
mvn%?\\t%0, #%B1\\t%@ movhi
movw%?\\t%0, %L1\\t%@ movhi
str%(h%)\\t%1, %0\\t%@ movhi
ldr%(h%)\\t%0, %1\\t%@ movhi"
strh%?\\t%1, %0\\t%@ movhi
ldrh%?\\t%0, %1\\t%@ movhi"
[(set_attr "predicable" "yes")
(set_attr "pool_range" "*,*,*,*,256")
(set_attr "neg_pool_range" "*,*,*,*,244")
@ -6584,10 +6584,10 @@
mov%?\\t%0, %1
mov%?\\t%0, %1
mvn%?\\t%0, #%B1
ldr%(b%)\\t%0, %1
str%(b%)\\t%1, %0
ldr%(b%)\\t%0, %1
str%(b%)\\t%1, %0"
ldrb%?\\t%0, %1
strb%?\\t%1, %0
ldrb%?\\t%0, %1
strb%?\\t%1, %0"
[(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load1,store1,load1,store1")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "yes,yes,yes,no,no,no,no,no,no")
@ -6627,9 +6627,9 @@
switch (which_alternative)
{
case 0: /* ARM register from memory */
return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
case 1: /* memory from ARM register */
return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
return \"strh%?\\t%1, %0\\t%@ __fp16\";
case 2: /* ARM register from ARM register */
return \"mov%?\\t%0, %1\\t%@ __fp16\";
case 3: /* ARM register from constant */
@ -7141,7 +7141,7 @@
(const_int 0)))
(clobber (match_scratch:SI 1 "=r"))]
"TARGET_32BIT"
"orr%.\\t%1, %Q0, %R0"
"orrs%?\\t%1, %Q0, %R0"
[(set_attr "conds" "set")
(set_attr "type" "logics_reg")]
)
@ -8385,13 +8385,7 @@
(define_insn "nop"
[(const_int 0)]
"TARGET_EITHER"
"*
if (TARGET_UNIFIED_ASM)
return \"nop\";
if (TARGET_ARM)
return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
return \"mov\\tr8, r8\";
"
"nop"
[(set (attr "length")
(if_then_else (eq_attr "is_thumb" "yes")
(const_int 2)
@ -8479,7 +8473,7 @@
(match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
(match_dup 2)]))]
"TARGET_32BIT"
"%i1%.\\t%0, %2, %4%S3"
"%i1s%?\\t%0, %2, %4%S3"
[(set_attr "conds" "set")
(set_attr "shift" "4")
(set_attr "arch" "32,a")
@ -8496,7 +8490,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r,r"))]
"TARGET_32BIT"
"%i1%.\\t%0, %2, %4%S3"
"%i1s%?\\t%0, %2, %4%S3"
[(set_attr "conds" "set")
(set_attr "shift" "4")
(set_attr "arch" "32,a")
@ -8527,7 +8521,7 @@
(minus:SI (match_dup 1)
(match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
"TARGET_32BIT"
"sub%.\\t%0, %1, %3%S2"
"subs%?\\t%0, %1, %3%S2"
[(set_attr "conds" "set")
(set_attr "shift" "3")
(set_attr "arch" "32,a,a")
@ -8543,7 +8537,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r,r,r"))]
"TARGET_32BIT"
"sub%.\\t%0, %1, %3%S2"
"subs%?\\t%0, %1, %3%S2"
[(set_attr "conds" "set")
(set_attr "shift" "3")
(set_attr "arch" "32,a,a")
@ -10247,7 +10241,7 @@
if (val1 == 4 || val2 == 4)
/* Other val must be 8, since we know they are adjacent and neither
is zero. */
output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
{
ldm[0] = ops[0] = operands[4];
@ -10255,9 +10249,9 @@
ops[2] = GEN_INT (val1);
output_add_immediate (ops);
if (val1 < val2)
output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
else
output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
}
else
{
@ -10274,16 +10268,16 @@
else if (val1 != 0)
{
if (val1 < val2)
output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
else
output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
}
else
{
if (val1 < val2)
output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
else
output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
}
output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
return \"\";
@ -10619,9 +10613,7 @@
int i;
char pattern[100];
if (TARGET_ARM)
strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
else if (TARGET_THUMB2)
if (TARGET_32BIT)
strcpy (pattern, \"push%?\\t{%1\");
else
strcpy (pattern, \"push\\t{%1\");

View file

@ -280,4 +280,4 @@ Assume loading data from flash is slower than fetching instructions.
masm-syntax-unified
Target Report Var(inline_asm_unified) Init(0) Save
Assume unified syntax for Thumb inline assembly code.
Assume unified syntax for inline assembly code.

View file

@ -21,7 +21,7 @@
see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
<http://www.gnu.org/licenses/>. */
(define_insn "*ldm4_ia"
(define_insn "*ldm4_"
[(match_parallel 0 "load_multiple_operation"
[(set (match_operand:SI 1 "arm_hard_general_register_operand" "")
(mem:SI (match_operand:SI 5 "s_register_operand" "rk")))
@ -35,7 +35,7 @@
(mem:SI (plus:SI (match_dup 5)
(const_int 12))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
"ldm%(ia%)\t%5, {%1, %2, %3, %4}"
"ldm%?\t%5, {%1, %2, %3, %4}"
[(set_attr "type" "load4")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -54,7 +54,7 @@
(mem:SI (plus:SI (match_dup 5)
(const_int 12))))])]
"TARGET_THUMB1 && XVECLEN (operands[0], 0) == 4"
"ldm%(ia%)\t%5, {%1, %2, %3, %4}"
"ldmia\t%5, {%1, %2, %3, %4}"
[(set_attr "type" "load4")])
(define_insn "*ldm4_ia_update"
@ -73,7 +73,7 @@
(mem:SI (plus:SI (match_dup 5)
(const_int 12))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
"ldm%(ia%)\t%5!, {%1, %2, %3, %4}"
"ldmia%?\t%5!, {%1, %2, %3, %4}"
[(set_attr "type" "load4")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -94,10 +94,10 @@
(mem:SI (plus:SI (match_dup 5)
(const_int 12))))])]
"TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
"ldm%(ia%)\t%5!, {%1, %2, %3, %4}"
"ldmia\t%5!, {%1, %2, %3, %4}"
[(set_attr "type" "load4")])
(define_insn "*stm4_ia"
(define_insn "*stm4_"
[(match_parallel 0 "store_multiple_operation"
[(set (mem:SI (match_operand:SI 5 "s_register_operand" "rk"))
(match_operand:SI 1 "arm_hard_general_register_operand" ""))
@ -108,7 +108,7 @@
(set (mem:SI (plus:SI (match_dup 5) (const_int 12)))
(match_operand:SI 4 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
"stm%(ia%)\t%5, {%1, %2, %3, %4}"
"stm%?\t%5, {%1, %2, %3, %4}"
[(set_attr "type" "store4")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -126,7 +126,7 @@
(set (mem:SI (plus:SI (match_dup 5) (const_int 12)))
(match_operand:SI 4 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
"stm%(ia%)\t%5!, {%1, %2, %3, %4}"
"stmia%?\t%5!, {%1, %2, %3, %4}"
[(set_attr "type" "store4")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -144,7 +144,7 @@
(set (mem:SI (plus:SI (match_dup 5) (const_int 12)))
(match_operand:SI 4 "low_register_operand" ""))])]
"TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
"stm%(ia%)\t%5!, {%1, %2, %3, %4}"
"stmia\t%5!, {%1, %2, %3, %4}"
[(set_attr "type" "store4")])
(define_insn "*ldm4_ib"
@ -162,7 +162,7 @@
(mem:SI (plus:SI (match_dup 5)
(const_int 16))))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 4"
"ldm%(ib%)\t%5, {%1, %2, %3, %4}"
"ldmib%?\t%5, {%1, %2, %3, %4}"
[(set_attr "type" "load4")
(set_attr "predicable" "yes")])
@ -183,7 +183,7 @@
(mem:SI (plus:SI (match_dup 5)
(const_int 16))))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 5"
"ldm%(ib%)\t%5!, {%1, %2, %3, %4}"
"ldmib%?\t%5!, {%1, %2, %3, %4}"
[(set_attr "type" "load4")
(set_attr "predicable" "yes")])
@ -198,7 +198,7 @@
(set (mem:SI (plus:SI (match_dup 5) (const_int 16)))
(match_operand:SI 4 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 4"
"stm%(ib%)\t%5, {%1, %2, %3, %4}"
"stmib%?\t%5, {%1, %2, %3, %4}"
[(set_attr "type" "store4")
(set_attr "predicable" "yes")])
@ -215,7 +215,7 @@
(set (mem:SI (plus:SI (match_dup 5) (const_int 16)))
(match_operand:SI 4 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 5"
"stm%(ib%)\t%5!, {%1, %2, %3, %4}"
"stmib%?\t%5!, {%1, %2, %3, %4}"
[(set_attr "type" "store4")
(set_attr "predicable" "yes")])
@ -233,7 +233,7 @@
(set (match_operand:SI 4 "arm_hard_general_register_operand" "")
(mem:SI (match_dup 5)))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 4"
"ldm%(da%)\t%5, {%1, %2, %3, %4}"
"ldmda%?\t%5, {%1, %2, %3, %4}"
[(set_attr "type" "load4")
(set_attr "predicable" "yes")])
@ -253,7 +253,7 @@
(set (match_operand:SI 4 "arm_hard_general_register_operand" "")
(mem:SI (match_dup 5)))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 5"
"ldm%(da%)\t%5!, {%1, %2, %3, %4}"
"ldmda%?\t%5!, {%1, %2, %3, %4}"
[(set_attr "type" "load4")
(set_attr "predicable" "yes")])
@ -268,7 +268,7 @@
(set (mem:SI (match_dup 5))
(match_operand:SI 4 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 4"
"stm%(da%)\t%5, {%1, %2, %3, %4}"
"stmda%?\t%5, {%1, %2, %3, %4}"
[(set_attr "type" "store4")
(set_attr "predicable" "yes")])
@ -285,7 +285,7 @@
(set (mem:SI (match_dup 5))
(match_operand:SI 4 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 5"
"stm%(da%)\t%5!, {%1, %2, %3, %4}"
"stmda%?\t%5!, {%1, %2, %3, %4}"
[(set_attr "type" "store4")
(set_attr "predicable" "yes")])
@ -304,7 +304,7 @@
(mem:SI (plus:SI (match_dup 5)
(const_int -4))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
"ldm%(db%)\t%5, {%1, %2, %3, %4}"
"ldmdb%?\t%5, {%1, %2, %3, %4}"
[(set_attr "type" "load4")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -326,7 +326,7 @@
(mem:SI (plus:SI (match_dup 5)
(const_int -4))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
"ldm%(db%)\t%5!, {%1, %2, %3, %4}"
"ldmdb%?\t%5!, {%1, %2, %3, %4}"
[(set_attr "type" "load4")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -342,7 +342,7 @@
(set (mem:SI (plus:SI (match_dup 5) (const_int -4)))
(match_operand:SI 4 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
"stm%(db%)\t%5, {%1, %2, %3, %4}"
"stmdb%?\t%5, {%1, %2, %3, %4}"
[(set_attr "type" "store4")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -360,7 +360,7 @@
(set (mem:SI (plus:SI (match_dup 5) (const_int -4)))
(match_operand:SI 4 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
"stm%(db%)\t%5!, {%1, %2, %3, %4}"
"stmdb%?\t%5!, {%1, %2, %3, %4}"
[(set_attr "type" "store4")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -472,7 +472,7 @@
FAIL;
})
(define_insn "*ldm3_ia"
(define_insn "*ldm3_"
[(match_parallel 0 "load_multiple_operation"
[(set (match_operand:SI 1 "arm_hard_general_register_operand" "")
(mem:SI (match_operand:SI 4 "s_register_operand" "rk")))
@ -483,7 +483,7 @@
(mem:SI (plus:SI (match_dup 4)
(const_int 8))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
"ldm%(ia%)\t%4, {%1, %2, %3}"
"ldm%?\t%4, {%1, %2, %3}"
[(set_attr "type" "load3")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -499,7 +499,7 @@
(mem:SI (plus:SI (match_dup 4)
(const_int 8))))])]
"TARGET_THUMB1 && XVECLEN (operands[0], 0) == 3"
"ldm%(ia%)\t%4, {%1, %2, %3}"
"ldmia\t%4, {%1, %2, %3}"
[(set_attr "type" "load3")])
(define_insn "*ldm3_ia_update"
@ -515,7 +515,7 @@
(mem:SI (plus:SI (match_dup 4)
(const_int 8))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
"ldm%(ia%)\t%4!, {%1, %2, %3}"
"ldmia%?\t%4!, {%1, %2, %3}"
[(set_attr "type" "load3")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -533,10 +533,10 @@
(mem:SI (plus:SI (match_dup 4)
(const_int 8))))])]
"TARGET_THUMB1 && XVECLEN (operands[0], 0) == 4"
"ldm%(ia%)\t%4!, {%1, %2, %3}"
"ldmia\t%4!, {%1, %2, %3}"
[(set_attr "type" "load3")])
(define_insn "*stm3_ia"
(define_insn "*stm3_"
[(match_parallel 0 "store_multiple_operation"
[(set (mem:SI (match_operand:SI 4 "s_register_operand" "rk"))
(match_operand:SI 1 "arm_hard_general_register_operand" ""))
@ -545,7 +545,7 @@
(set (mem:SI (plus:SI (match_dup 4) (const_int 8)))
(match_operand:SI 3 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
"stm%(ia%)\t%4, {%1, %2, %3}"
"stm%?\t%4, {%1, %2, %3}"
[(set_attr "type" "store3")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -561,7 +561,7 @@
(set (mem:SI (plus:SI (match_dup 4) (const_int 8)))
(match_operand:SI 3 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
"stm%(ia%)\t%4!, {%1, %2, %3}"
"stmia%?\t%4!, {%1, %2, %3}"
[(set_attr "type" "store3")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -577,7 +577,7 @@
(set (mem:SI (plus:SI (match_dup 4) (const_int 8)))
(match_operand:SI 3 "low_register_operand" ""))])]
"TARGET_THUMB1 && XVECLEN (operands[0], 0) == 4"
"stm%(ia%)\t%4!, {%1, %2, %3}"
"stmia\t%4!, {%1, %2, %3}"
[(set_attr "type" "store3")])
(define_insn "*ldm3_ib"
@ -592,7 +592,7 @@
(mem:SI (plus:SI (match_dup 4)
(const_int 12))))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 3"
"ldm%(ib%)\t%4, {%1, %2, %3}"
"ldmib%?\t%4, {%1, %2, %3}"
[(set_attr "type" "load3")
(set_attr "predicable" "yes")])
@ -610,7 +610,7 @@
(mem:SI (plus:SI (match_dup 4)
(const_int 12))))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 4"
"ldm%(ib%)\t%4!, {%1, %2, %3}"
"ldmib%?\t%4!, {%1, %2, %3}"
[(set_attr "type" "load3")
(set_attr "predicable" "yes")])
@ -623,7 +623,7 @@
(set (mem:SI (plus:SI (match_dup 4) (const_int 12)))
(match_operand:SI 3 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 3"
"stm%(ib%)\t%4, {%1, %2, %3}"
"stmib%?\t%4, {%1, %2, %3}"
[(set_attr "type" "store3")
(set_attr "predicable" "yes")])
@ -638,7 +638,7 @@
(set (mem:SI (plus:SI (match_dup 4) (const_int 12)))
(match_operand:SI 3 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 4"
"stm%(ib%)\t%4!, {%1, %2, %3}"
"stmib%?\t%4!, {%1, %2, %3}"
[(set_attr "type" "store3")
(set_attr "predicable" "yes")])
@ -653,7 +653,7 @@
(set (match_operand:SI 3 "arm_hard_general_register_operand" "")
(mem:SI (match_dup 4)))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 3"
"ldm%(da%)\t%4, {%1, %2, %3}"
"ldmda%?\t%4, {%1, %2, %3}"
[(set_attr "type" "load3")
(set_attr "predicable" "yes")])
@ -670,7 +670,7 @@
(set (match_operand:SI 3 "arm_hard_general_register_operand" "")
(mem:SI (match_dup 4)))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 4"
"ldm%(da%)\t%4!, {%1, %2, %3}"
"ldmda%?\t%4!, {%1, %2, %3}"
[(set_attr "type" "load3")
(set_attr "predicable" "yes")])
@ -683,7 +683,7 @@
(set (mem:SI (match_dup 4))
(match_operand:SI 3 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 3"
"stm%(da%)\t%4, {%1, %2, %3}"
"stmda%?\t%4, {%1, %2, %3}"
[(set_attr "type" "store3")
(set_attr "predicable" "yes")])
@ -698,7 +698,7 @@
(set (mem:SI (match_dup 4))
(match_operand:SI 3 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 4"
"stm%(da%)\t%4!, {%1, %2, %3}"
"stmda%?\t%4!, {%1, %2, %3}"
[(set_attr "type" "store3")
(set_attr "predicable" "yes")])
@ -714,7 +714,7 @@
(mem:SI (plus:SI (match_dup 4)
(const_int -4))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
"ldm%(db%)\t%4, {%1, %2, %3}"
"ldmdb%?\t%4, {%1, %2, %3}"
[(set_attr "type" "load3")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -733,7 +733,7 @@
(mem:SI (plus:SI (match_dup 4)
(const_int -4))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
"ldm%(db%)\t%4!, {%1, %2, %3}"
"ldmdb%?\t%4!, {%1, %2, %3}"
[(set_attr "type" "load3")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -747,7 +747,7 @@
(set (mem:SI (plus:SI (match_dup 4) (const_int -4)))
(match_operand:SI 3 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
"stm%(db%)\t%4, {%1, %2, %3}"
"stmdb%?\t%4, {%1, %2, %3}"
[(set_attr "type" "store3")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -763,7 +763,7 @@
(set (mem:SI (plus:SI (match_dup 4) (const_int -4)))
(match_operand:SI 3 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
"stm%(db%)\t%4!, {%1, %2, %3}"
"stmdb%?\t%4!, {%1, %2, %3}"
[(set_attr "type" "store3")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -861,7 +861,7 @@
FAIL;
})
(define_insn "*ldm2_ia"
(define_insn "*ldm2_"
[(match_parallel 0 "load_multiple_operation"
[(set (match_operand:SI 1 "arm_hard_general_register_operand" "")
(mem:SI (match_operand:SI 3 "s_register_operand" "rk")))
@ -869,7 +869,7 @@
(mem:SI (plus:SI (match_dup 3)
(const_int 4))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
"ldm%(ia%)\t%3, {%1, %2}"
"ldm%?\t%3, {%1, %2}"
[(set_attr "type" "load2")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -882,7 +882,7 @@
(mem:SI (plus:SI (match_dup 3)
(const_int 4))))])]
"TARGET_THUMB1 && XVECLEN (operands[0], 0) == 2"
"ldm%(ia%)\t%3, {%1, %2}"
"ldmia\t%3, {%1, %2}"
[(set_attr "type" "load2")])
(define_insn "*ldm2_ia_update"
@ -895,7 +895,7 @@
(mem:SI (plus:SI (match_dup 3)
(const_int 4))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
"ldm%(ia%)\t%3!, {%1, %2}"
"ldmia%?\t%3!, {%1, %2}"
[(set_attr "type" "load2")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -910,17 +910,17 @@
(mem:SI (plus:SI (match_dup 3)
(const_int 4))))])]
"TARGET_THUMB1 && XVECLEN (operands[0], 0) == 3"
"ldm%(ia%)\t%3!, {%1, %2}"
"ldmia\t%3!, {%1, %2}"
[(set_attr "type" "load2")])
(define_insn "*stm2_ia"
(define_insn "*stm2_"
[(match_parallel 0 "store_multiple_operation"
[(set (mem:SI (match_operand:SI 3 "s_register_operand" "rk"))
(match_operand:SI 1 "arm_hard_general_register_operand" ""))
(set (mem:SI (plus:SI (match_dup 3) (const_int 4)))
(match_operand:SI 2 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
"stm%(ia%)\t%3, {%1, %2}"
"stm%?\t%3, {%1, %2}"
[(set_attr "type" "store2")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -934,7 +934,7 @@
(set (mem:SI (plus:SI (match_dup 3) (const_int 4)))
(match_operand:SI 2 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
"stm%(ia%)\t%3!, {%1, %2}"
"stmia%?\t%3!, {%1, %2}"
[(set_attr "type" "store2")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -948,7 +948,7 @@
(set (mem:SI (plus:SI (match_dup 3) (const_int 4)))
(match_operand:SI 2 "low_register_operand" ""))])]
"TARGET_THUMB1 && XVECLEN (operands[0], 0) == 3"
"stm%(ia%)\t%3!, {%1, %2}"
"stmia\t%3!, {%1, %2}"
[(set_attr "type" "store2")])
(define_insn "*ldm2_ib"
@ -960,7 +960,7 @@
(mem:SI (plus:SI (match_dup 3)
(const_int 8))))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 2"
"ldm%(ib%)\t%3, {%1, %2}"
"ldmib%?\t%3, {%1, %2}"
[(set_attr "type" "load2")
(set_attr "predicable" "yes")])
@ -975,7 +975,7 @@
(mem:SI (plus:SI (match_dup 3)
(const_int 8))))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 3"
"ldm%(ib%)\t%3!, {%1, %2}"
"ldmib%?\t%3!, {%1, %2}"
[(set_attr "type" "load2")
(set_attr "predicable" "yes")])
@ -986,7 +986,7 @@
(set (mem:SI (plus:SI (match_dup 3) (const_int 8)))
(match_operand:SI 2 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 2"
"stm%(ib%)\t%3, {%1, %2}"
"stmib%?\t%3, {%1, %2}"
[(set_attr "type" "store2")
(set_attr "predicable" "yes")])
@ -999,7 +999,7 @@
(set (mem:SI (plus:SI (match_dup 3) (const_int 8)))
(match_operand:SI 2 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 3"
"stm%(ib%)\t%3!, {%1, %2}"
"stmib%?\t%3!, {%1, %2}"
[(set_attr "type" "store2")
(set_attr "predicable" "yes")])
@ -1011,7 +1011,7 @@
(set (match_operand:SI 2 "arm_hard_general_register_operand" "")
(mem:SI (match_dup 3)))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 2"
"ldm%(da%)\t%3, {%1, %2}"
"ldmda%?\t%3, {%1, %2}"
[(set_attr "type" "load2")
(set_attr "predicable" "yes")])
@ -1025,7 +1025,7 @@
(set (match_operand:SI 2 "arm_hard_general_register_operand" "")
(mem:SI (match_dup 3)))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 3"
"ldm%(da%)\t%3!, {%1, %2}"
"ldmda%?\t%3!, {%1, %2}"
[(set_attr "type" "load2")
(set_attr "predicable" "yes")])
@ -1036,7 +1036,7 @@
(set (mem:SI (match_dup 3))
(match_operand:SI 2 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 2"
"stm%(da%)\t%3, {%1, %2}"
"stmda%?\t%3, {%1, %2}"
[(set_attr "type" "store2")
(set_attr "predicable" "yes")])
@ -1049,7 +1049,7 @@
(set (mem:SI (match_dup 3))
(match_operand:SI 2 "arm_hard_general_register_operand" ""))])]
"TARGET_ARM && XVECLEN (operands[0], 0) == 3"
"stm%(da%)\t%3!, {%1, %2}"
"stmda%?\t%3!, {%1, %2}"
[(set_attr "type" "store2")
(set_attr "predicable" "yes")])
@ -1062,7 +1062,7 @@
(mem:SI (plus:SI (match_dup 3)
(const_int -4))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
"ldm%(db%)\t%3, {%1, %2}"
"ldmdb%?\t%3, {%1, %2}"
[(set_attr "type" "load2")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -1078,7 +1078,7 @@
(mem:SI (plus:SI (match_dup 3)
(const_int -4))))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
"ldm%(db%)\t%3!, {%1, %2}"
"ldmdb%?\t%3!, {%1, %2}"
[(set_attr "type" "load2")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -1090,7 +1090,7 @@
(set (mem:SI (plus:SI (match_dup 3) (const_int -4)))
(match_operand:SI 2 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
"stm%(db%)\t%3, {%1, %2}"
"stmdb%?\t%3, {%1, %2}"
[(set_attr "type" "store2")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])
@ -1104,7 +1104,7 @@
(set (mem:SI (plus:SI (match_dup 3) (const_int -4)))
(match_operand:SI 2 "arm_hard_general_register_operand" ""))])]
"TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
"stm%(db%)\t%3!, {%1, %2}"
"stmdb%?\t%3!, {%1, %2}"
[(set_attr "type" "store2")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")])

View file

@ -72,7 +72,7 @@
{
enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
return \"ldr%(<sync_sfx>%)\\t%0, %1\";
return \"ldr<sync_sfx>%?\\t%0, %1\";
else
return \"lda<sync_sfx>%?\\t%0, %1\";
}
@ -89,7 +89,7 @@
{
enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_acquire (model))
return \"str%(<sync_sfx>%)\t%1, %0\";
return \"str<sync_sfx>%?\t%1, %0\";
else
return \"stl<sync_sfx>%?\t%1, %0\";
}

View file

@ -330,8 +330,8 @@
mov%?\\t%0, %1\\t%@ movhi
mov%?\\t%0, %1\\t%@ movhi
movw%?\\t%0, %L1\\t%@ movhi
str%(h%)\\t%1, %0\\t%@ movhi
ldr%(h%)\\t%0, %1\\t%@ movhi"
strh%?\\t%1, %0\\t%@ movhi
ldrh%?\\t%0, %1\\t%@ movhi"
[(set_attr "type" "mov_reg,mov_imm,mov_imm,mov_imm,store1,load1")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "yes,no,yes,no,no,no")
@ -1028,7 +1028,7 @@
"TARGET_THUMB2 && arm_arch6"
"@
sxtb%?\\t%0, %1
ldr%(sb%)\\t%0, %1"
ldrsb%?\\t%0, %1"
[(set_attr "type" "extend,load_byte")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")
@ -1042,7 +1042,7 @@
"TARGET_THUMB2 && arm_arch6"
"@
uxth%?\\t%0, %1
ldr%(h%)\\t%0, %1"
ldrh%?\\t%0, %1"
[(set_attr "type" "extend,load_byte")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")
@ -1055,8 +1055,8 @@
(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
"TARGET_THUMB2 && arm_arch6"
"@
uxtb%(%)\\t%0, %1
ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
uxtb%?\\t%0, %1
ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
[(set_attr "type" "extend,load_byte")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "no")

View file

@ -13757,10 +13757,9 @@ off by default.
@item -masm-syntax-unified
@opindex masm-syntax-unified
Assume inline assembler is using unified asm syntax. The default is
currently off which implies divided syntax. Currently this option is
available only for Thumb1 and has no effect on ARM state and Thumb2.
However, this may change in future releases of GCC. Divided syntax
should be considered deprecated.
currently off which implies divided syntax. This option has no impact
on Thumb2. However, this may change in future releases of GCC.
Divided syntax should be considered deprecated.
@item -mrestrict-it
@opindex mrestrict-it

View file

@ -1,3 +1,10 @@
2015-11-06 Ramana Radhakrishnan <ramana.radhakrishnan@arm.com>
* gcc.target/arm/combine-movs.c: Adjust for unified asm.
* gcc.target/arm/interrupt-1.c: Likewise.
* gcc.target/arm/interrupt-2.c: Likewise.
* gcc.target/arm/unaligned-memcpy-4.c: Likewise.
2015-11-06 Thomas Schwinge <thomas@codesourcery.com>
* gfortran.dg/goacc/combined_loop.f90: XFAIL.

View file

@ -9,5 +9,4 @@ void foo (unsigned long r[], unsigned int d)
r[i] = 0;
}
/* { dg-final { scan-assembler "lsrs\tr\[0-9\]" { target arm_thumb2 } } } */
/* { dg-final { scan-assembler "movs\tr\[0-9\]" { target { ! arm_thumb2 } } } } */
/* { dg-final { scan-assembler "lsrs\tr\[0-9\]" } } */

View file

@ -13,5 +13,5 @@ void foo ()
bar (0);
}
/* { dg-final { scan-assembler "stmfd\tsp!, {r0, r1, r2, r3, r4, fp, ip, lr}" } } */
/* { dg-final { scan-assembler "ldmfd\tsp!, {r0, r1, r2, r3, r4, fp, ip, pc}\\^" } } */
/* { dg-final { scan-assembler "push\t{r0, r1, r2, r3, r4, fp, ip, lr}" } } */
/* { dg-final { scan-assembler "pop\t{r0, r1, r2, r3, r4, fp, ip, pc}\\^" } } */

View file

@ -15,5 +15,5 @@ void test()
foo = 0;
}
/* { dg-final { scan-assembler "stmfd\tsp!, {r0, r1, r2, r3, r4, r5, ip, lr}" } } */
/* { dg-final { scan-assembler "ldmfd\tsp!, {r0, r1, r2, r3, r4, r5, ip, pc}\\^" } } */
/* { dg-final { scan-assembler "push\t{r0, r1, r2, r3, r4, r5, ip, lr}" } } */
/* { dg-final { scan-assembler "pop\t{r0, r1, r2, r3, r4, r5, ip, pc}\\^" } } */

View file

@ -14,7 +14,7 @@ void aligned_both (void)
/* We know both src and dest to be aligned: expect multiword loads/stores. */
/* { dg-final { scan-assembler-times "ldmia" 1 { target { ! { arm_prefer_ldrd_strd } } } } } */
/* { dg-final { scan-assembler-times "ldm" 1 { target { ! { arm_prefer_ldrd_strd } } } } } */
/* { dg-final { scan-assembler-times "stmia" 1 { target { ! { arm_prefer_ldrd_strd } } } } } */
/* { dg-final { scan-assembler "ldrd" { target { arm_prefer_ldrd_strd } } } } */
/* { dg-final { scan-assembler-times "ldm" 0 { target { arm_prefer_ldrd_strd } } } } */