avr.h (INIT_TARGET_OPTABS): Remove most of it, was the same as the default library function names.

* config/avr/avr.h (INIT_TARGET_OPTABS): Remove most of it, was
	the same as the default library function names.
	* config/avr/avr.md: Document special characters after '%'.
	(mulqi3, mulhi3, mulsi3): Call libgcc.S functions ourselves,
	knowing which of the call-used registers are really clobbered.
	(divmodqi4, udivmodqi4, divmodhi4, udivmodhi4, divmodsi4, udivmodsi4):
	New.  Both quotient and remainder from one libgcc.S call.
	* config/avr/libgcc.S: Optimize mul/divmod for the new insns above,
	clobber as few registers as possible.
	* config/avr/t-avr (LIB1ASMFUNCS): Adjust for the above changes.

From-SVN: r39155
This commit is contained in:
Marek Michalkiewicz 2001-01-20 17:49:01 +01:00 committed by Marek Michalkiewicz
parent b845f89747
commit 1d26ac96cc
5 changed files with 507 additions and 337 deletions

View file

@ -1,3 +1,16 @@
2001-01-20 Marek Michalkiewicz <marekm@linux.org.pl>
* config/avr/avr.h (INIT_TARGET_OPTABS): Remove most of it, was
the same as the default library function names.
* config/avr/avr.md: Document special characters after '%'.
(mulqi3, mulhi3, mulsi3): Call libgcc.S functions ourselves,
knowing which of the call-used registers are really clobbered.
(divmodqi4, udivmodqi4, divmodhi4, udivmodhi4, divmodsi4, udivmodsi4):
New. Both quotient and remainder from one libgcc.S call.
* config/avr/libgcc.S: Optimize mul/divmod for the new insns above,
clobber as few registers as possible.
* config/avr/t-avr (LIB1ASMFUNCS): Adjust for the above changes.
2001-01-20 Neil Booth <neil@daikokuya.demon.co.uk>
* cppmacro.c (funlike_invocation_p): Don't move back up to the

View file

@ -3228,50 +3228,6 @@ valid_machine_decl_attribute (DECL, ATTRIBUTES, IDENTIFIER, ARGS)
#define INIT_TARGET_OPTABS \
{ \
smul_optab->handlers[(int) QImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__mulqi3"); \
\
sdiv_optab->handlers[(int) QImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__divqi3"); \
\
smod_optab->handlers[(int) QImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__modqi3"); \
\
udiv_optab->handlers[(int) QImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__udivqi3"); \
\
umod_optab->handlers[(int) QImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__umodqi3"); \
\
smul_optab->handlers[(int) HImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__mulhi3"); \
\
sdiv_optab->handlers[(int) HImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__divhi3"); \
\
smod_optab->handlers[(int) HImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__modhi3"); \
\
udiv_optab->handlers[(int) HImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__udivhi3"); \
\
umod_optab->handlers[(int) HImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__umodhi3"); \
\
smul_optab->handlers[(int) SImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__mulsi3"); \
\
sdiv_optab->handlers[(int) SImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__divsi3"); \
\
smod_optab->handlers[(int) SImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__modsi3"); \
\
udiv_optab->handlers[(int) SImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__udivsi3"); \
\
umod_optab->handlers[(int) SImode].libfunc \
= gen_rtx (SYMBOL_REF, Pmode, "__umodsi3"); \
avr_init_once (); \
}

View file

@ -21,6 +21,16 @@
;; the Free Software Foundation, 59 Temple Place - Suite 330,
;; Boston, MA 02111-1307, USA.
;; Special characters after '%':
;; A No effect (add 0).
;; B Add 1 to REG number, MEM address or CONST_INT.
;; C Add 2.
;; D Add 3.
;; j Branch condition.
;; k Reverse branch condition.
;; o Displacement for (mem (plus (reg) (const_int))) operands.
;; ~ Output 'r' if not AVR_MEGA.
;; UNSPEC usage:
;; 0 Length of a string, see "strlenhi".
;; 1 Read from a word address in program memory, see "casesi".
@ -634,7 +644,20 @@
;******************************************************************************
; mul
(define_insn "mulqi3"
(define_expand "mulqi3"
[(set (match_operand:QI 0 "register_operand" "")
(mult:QI (match_operand:QI 1 "register_operand" "")
(match_operand:QI 2 "register_operand" "")))]
""
"{
if (!AVR_ENHANCED)
{
emit_insn (gen_mulqi3_call (operands[0], operands[1], operands[2]));
DONE;
}
}")
(define_insn "*mulqi3_enh"
[(set (match_operand:QI 0 "register_operand" "=r")
(mult:QI (match_operand:QI 1 "register_operand" "r")
(match_operand:QI 2 "register_operand" "r")))]
@ -645,6 +668,25 @@
[(set_attr "length" "3")
(set_attr "cc" "clobber")])
(define_expand "mulqi3_call"
[(set (reg:QI 24) (match_operand:QI 1 "register_operand" ""))
(set (reg:QI 22) (match_operand:QI 2 "register_operand" ""))
(parallel [(set (reg:QI 24) (mult:QI (reg:QI 24) (reg:QI 22)))
(clobber (reg:QI 22))])
(set (match_operand:QI 0 "register_operand" "") (reg:QI 24))]
""
"")
(define_insn "*mulqi3_call"
[(set (reg:QI 24) (mult:QI (reg:QI 24) (reg:QI 22)))
(clobber (reg:QI 22))]
"!AVR_ENHANCED"
"%~call __mulqi3"
[(set (attr "length") (if_then_else (eq_attr "mcu_mega" "no")
(const_int 1)
(const_int 2)))
(set_attr "cc" "clobber")])
(define_insn "mulqihi3"
[(set (match_operand:HI 0 "register_operand" "=r")
(mult:HI (sign_extend:HI (match_operand:QI 1 "register_operand" "d"))
@ -667,7 +709,21 @@
[(set_attr "length" "3")
(set_attr "cc" "clobber")])
(define_insn "mulhi3"
(define_expand "mulhi3"
[(set (match_operand:HI 0 "register_operand" "")
(mult:HI (match_operand:HI 1 "register_operand" "")
(match_operand:HI 2 "register_operand" "")))]
""
"
{
if (!AVR_ENHANCED)
{
emit_insn (gen_mulhi3_call (operands[0], operands[1], operands[2]));
DONE;
}
}")
(define_insn "*mulhi3_enh"
[(set (match_operand:HI 0 "register_operand" "=&r")
(mult:HI (match_operand:HI 1 "register_operand" "r")
(match_operand:HI 2 "register_operand" "r")))]
@ -682,6 +738,200 @@
[(set_attr "length" "7")
(set_attr "cc" "clobber")])
(define_expand "mulhi3_call"
[(set (reg:HI 24) (match_operand:HI 1 "register_operand" ""))
(set (reg:HI 22) (match_operand:HI 2 "register_operand" ""))
(parallel [(set (reg:HI 24) (mult:HI (reg:HI 24) (reg:HI 22)))
(clobber (reg:HI 22))
(clobber (reg:QI 21))])
(set (match_operand:HI 0 "register_operand" "") (reg:HI 24))]
""
"")
(define_insn "*mulhi3_call"
[(set (reg:HI 24) (mult:HI (reg:HI 24) (reg:HI 22)))
(clobber (reg:HI 22))
(clobber (reg:QI 21))]
"!AVR_ENHANCED"
"%~call __mulhi3"
[(set (attr "length") (if_then_else (eq_attr "mcu_mega" "no")
(const_int 1)
(const_int 2)))
(set_attr "cc" "clobber")])
;; Operand 2 (reg:SI 18) not clobbered on the enhanced core.
;; All call-used registers clobbered otherwise - normal library call.
(define_expand "mulsi3"
[(set (reg:SI 22) (match_operand:SI 1 "register_operand" ""))
(set (reg:SI 18) (match_operand:SI 2 "register_operand" ""))
(parallel [(set (reg:SI 22) (mult:SI (reg:SI 22) (reg:SI 18)))
(clobber (reg:HI 26))
(clobber (reg:HI 30))])
(set (match_operand:SI 0 "register_operand" "") (reg:SI 22))]
"AVR_ENHANCED"
"")
(define_insn "*mulsi3_call"
[(set (reg:SI 22) (mult:SI (reg:SI 22) (reg:SI 18)))
(clobber (reg:HI 26))
(clobber (reg:HI 30))]
"AVR_ENHANCED"
"%~call __mulsi3"
[(set (attr "length") (if_then_else (eq_attr "mcu_mega" "no")
(const_int 1)
(const_int 2)))
(set_attr "cc" "clobber")])
; / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / %
; divmod
;; Generate libgcc.S calls ourselves, because:
;; - we know exactly which registers are clobbered (for QI and HI
;; modes, some of the call-used registers are preserved)
;; - we get both the quotient and the remainder at no extra cost
(define_expand "divmodqi4"
[(set (reg:QI 24) (match_operand:QI 1 "register_operand" ""))
(set (reg:QI 22) (match_operand:QI 2 "register_operand" ""))
(parallel [(set (reg:QI 24) (div:QI (reg:QI 24) (reg:QI 22)))
(set (reg:QI 25) (mod:QI (reg:QI 24) (reg:QI 22)))
(clobber (reg:QI 22))
(clobber (reg:QI 23))])
(set (match_operand:QI 0 "register_operand" "") (reg:QI 24))
(set (match_operand:QI 3 "register_operand" "") (reg:QI 25))]
""
"")
(define_insn "*divmodqi4_call"
[(set (reg:QI 24) (div:QI (reg:QI 24) (reg:QI 22)))
(set (reg:QI 25) (mod:QI (reg:QI 24) (reg:QI 22)))
(clobber (reg:QI 22))
(clobber (reg:QI 23))]
""
"%~call __divmodqi4"
[(set (attr "length") (if_then_else (eq_attr "mcu_mega" "no")
(const_int 1)
(const_int 2)))
(set_attr "cc" "clobber")])
(define_expand "udivmodqi4"
[(set (reg:QI 24) (match_operand:QI 1 "register_operand" ""))
(set (reg:QI 22) (match_operand:QI 2 "register_operand" ""))
(parallel [(set (reg:QI 24) (udiv:QI (reg:QI 24) (reg:QI 22)))
(set (reg:QI 25) (umod:QI (reg:QI 24) (reg:QI 22)))
(clobber (reg:QI 23))])
(set (match_operand:QI 0 "register_operand" "") (reg:QI 24))
(set (match_operand:QI 3 "register_operand" "") (reg:QI 25))]
""
"")
(define_insn "*udivmodqi4_call"
[(set (reg:QI 24) (udiv:QI (reg:QI 24) (reg:QI 22)))
(set (reg:QI 25) (umod:QI (reg:QI 24) (reg:QI 22)))
(clobber (reg:QI 23))]
""
"%~call __udivmodqi4"
[(set (attr "length") (if_then_else (eq_attr "mcu_mega" "no")
(const_int 1)
(const_int 2)))
(set_attr "cc" "clobber")])
(define_expand "divmodhi4"
[(set (reg:HI 24) (match_operand:HI 1 "register_operand" ""))
(set (reg:HI 22) (match_operand:HI 2 "register_operand" ""))
(parallel [(set (reg:HI 22) (div:HI (reg:HI 24) (reg:HI 22)))
(set (reg:HI 24) (mod:HI (reg:HI 24) (reg:HI 22)))
(clobber (reg:HI 26))
(clobber (reg:QI 21))])
(set (match_operand:HI 0 "register_operand" "") (reg:HI 22))
(set (match_operand:HI 3 "register_operand" "") (reg:HI 24))]
""
"")
(define_insn "*divmodhi4_call"
[(set (reg:HI 22) (div:HI (reg:HI 24) (reg:HI 22)))
(set (reg:HI 24) (mod:HI (reg:HI 24) (reg:HI 22)))
(clobber (reg:HI 26))
(clobber (reg:QI 21))]
""
"%~call __divmodhi4"
[(set (attr "length") (if_then_else (eq_attr "mcu_mega" "no")
(const_int 1)
(const_int 2)))
(set_attr "cc" "clobber")])
(define_expand "udivmodhi4"
[(set (reg:HI 24) (match_operand:HI 1 "register_operand" ""))
(set (reg:HI 22) (match_operand:HI 2 "register_operand" ""))
(parallel [(set (reg:HI 22) (udiv:HI (reg:HI 24) (reg:HI 22)))
(set (reg:HI 24) (umod:HI (reg:HI 24) (reg:HI 22)))
(clobber (reg:HI 26))
(clobber (reg:QI 21))])
(set (match_operand:HI 0 "register_operand" "") (reg:HI 22))
(set (match_operand:HI 3 "register_operand" "") (reg:HI 24))]
""
"")
(define_insn "*udivmodhi4_call"
[(set (reg:HI 22) (udiv:HI (reg:HI 24) (reg:HI 22)))
(set (reg:HI 24) (umod:HI (reg:HI 24) (reg:HI 22)))
(clobber (reg:HI 26))
(clobber (reg:QI 21))]
""
"%~call __udivmodhi4"
[(set (attr "length") (if_then_else (eq_attr "mcu_mega" "no")
(const_int 1)
(const_int 2)))
(set_attr "cc" "clobber")])
(define_expand "divmodsi4"
[(set (reg:SI 22) (match_operand:SI 1 "register_operand" ""))
(set (reg:SI 18) (match_operand:SI 2 "register_operand" ""))
(parallel [(set (reg:SI 18) (div:SI (reg:SI 22) (reg:SI 18)))
(set (reg:SI 22) (mod:SI (reg:SI 22) (reg:SI 18)))
(clobber (reg:HI 26))
(clobber (reg:HI 30))])
(set (match_operand:SI 0 "register_operand" "") (reg:SI 18))
(set (match_operand:SI 3 "register_operand" "") (reg:SI 22))]
""
"")
(define_insn "*divmodsi4_call"
[(set (reg:SI 18) (div:SI (reg:SI 22) (reg:SI 18)))
(set (reg:SI 22) (mod:SI (reg:SI 22) (reg:SI 18)))
(clobber (reg:HI 26))
(clobber (reg:HI 30))]
""
"%~call __divmodsi4"
[(set (attr "length") (if_then_else (eq_attr "mcu_mega" "no")
(const_int 1)
(const_int 2)))
(set_attr "cc" "clobber")])
(define_expand "udivmodsi4"
[(set (reg:SI 22) (match_operand:SI 1 "register_operand" ""))
(set (reg:SI 18) (match_operand:SI 2 "register_operand" ""))
(parallel [(set (reg:SI 18) (udiv:SI (reg:SI 22) (reg:SI 18)))
(set (reg:SI 22) (umod:SI (reg:SI 22) (reg:SI 18)))
(clobber (reg:HI 26))
(clobber (reg:HI 30))])
(set (match_operand:SI 0 "register_operand" "") (reg:SI 18))
(set (match_operand:SI 3 "register_operand" "") (reg:SI 22))]
""
"")
(define_insn "*udivmodsi4_call"
[(set (reg:SI 18) (udiv:SI (reg:SI 22) (reg:SI 18)))
(set (reg:SI 22) (umod:SI (reg:SI 22) (reg:SI 18)))
(clobber (reg:HI 26))
(clobber (reg:HI 30))]
""
"%~call __udivmodsi4"
[(set (attr "length") (if_then_else (eq_attr "mcu_mega" "no")
(const_int 1)
(const_int 2)))
(set_attr "cc" "clobber")])
;&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
; and
@ -1794,9 +2044,7 @@
AS2 (mov, r31, %B0) CR_TAB
\"icall\");
}
else if (!AVR_MEGA)
return AS1(rcall,%c0);
return AS1(call,%c0);
return AS1(%~call,%c0);
}"
[(set_attr "cc" "clobber,clobber,clobber")
(set_attr_alternative "length"
@ -1829,9 +2077,7 @@
AS2 (mov, r31, %B1) CR_TAB
\"icall\");
}
else if (!AVR_MEGA)
return AS1(rcall,%c1);
return AS1(call,%c1);
return AS1(%~call,%c1);
}"
[(set_attr "cc" "clobber,clobber,clobber")
(set_attr_alternative "length"

View file

@ -32,8 +32,29 @@ Boston, MA 02111-1307, USA. */
#define __SP_H__ 0x3e
#define __SP_L__ 0x3d
/* Most of the functions here are called directly from avr.md
patterns, instead of using the standard libcall mechanisms.
This can make better code because GCC knows exactly which
of the call-used registers (not all of them) are clobbered. */
.section .text.libgcc, "ax", @progbits
.macro mov_l r_dest, r_src
#if defined (__AVR_ENHANCED__)
movw \r_dest, \r_src
#else
mov \r_dest, \r_src
#endif
.endm
.macro mov_h r_dest, r_src
#if defined (__AVR_ENHANCED__)
; empty
#else
mov \r_dest, \r_src
#endif
.endm
/* Note: mulqi3, mulhi3 are open-coded on the enhanced core. */
#if !defined (__AVR_ENHANCED__)
/*******************************************************
@ -67,6 +88,29 @@ __mulqi3_exit:
.endfunc
#endif /* defined (L_mulqi3) */
#if defined (L_mulqihi3)
.global __mulqihi3
.func __mulqihi3
__mulqihi3:
clr r25
sbrc r24, 7
dec r25
clr r23
sbrc r22, 7
dec r22
rjmp __mulhi3
.endfunc
#endif /* defined (L_mulqihi3) */
#if defined (L_umulqihi3)
.global __umulqihi3
.func __umulqihi3
__umulqihi3:
clr r25
clr r23
rjmp __mulhi3
.endfunc
#endif /* defined (L_umulqihi3) */
/*******************************************************
Multiplication 16 x 16
@ -76,7 +120,7 @@ __mulqi3_exit:
#define r_arg1H r25 /* multiplier High */
#define r_arg2L r22 /* multiplicand Low */
#define r_arg2H r23 /* multiplicand High */
#define r_resL r20 /* result Low */
#define r_resL __tmp_reg__ /* result Low */
#define r_resH r21 /* result High */
.global __mulhi3
@ -116,6 +160,38 @@ __mulhi3_exit:
#endif /* defined (L_mulhi3) */
#endif /* !defined (__AVR_ENHANCED__) */
#if defined (L_mulhisi3)
.global __mulhisi3
.func __mulhisi3
__mulhisi3:
mov_l r18, r24
mov_h r19, r25
clr r24
sbrc r23, 7
dec r24
mov r25, r24
clr r20
sbrc r19, 7
dec r20
mov r21, r20
rjmp __mulsi3
.endfunc
#endif /* defined (L_mulhisi3) */
#if defined (L_umulhisi3)
.global __umulhisi3
.func __umulhisi3
__umulhisi3:
mov_l r18, r24
mov_h r19, r25
clr r24
clr r25
clr r20
clr r21
rjmp __mulsi3
.endfunc
#endif /* defined (L_umulhisi3) */
#if defined (L_mulsi3)
/*******************************************************
Multiplication 32 x 32
@ -227,81 +303,54 @@ __mulsi3_exit:
/*******************************************************
Division 8 / 8 => (result + remainder)
*******************************************************/
#define r_rem r26 /* remainder */
#define r_arg1 r24 /* dividend */
#define r_rem r25 /* remainder */
#define r_arg1 r24 /* dividend, quotient */
#define r_arg2 r22 /* divisor */
#define r_cnt r27 /* loop count */
#define r_cnt r23 /* loop count */
#if defined (L_umodqi3)
.global __umodqi3
.func __umodqi3
__umodqi3:
clt
rcall __udivqi3
mov r24,r_rem
#if defined (L_udivmodqi4)
.global __udivmodqi4
.func __udivmodqi4
__udivmodqi4:
sub r_rem,r_rem ; clear remainder and carry
ldi r_cnt,9 ; init loop counter
rjmp __udivmodqi4_ep ; jump to entry point
__udivmodqi4_loop:
rol r_rem ; shift dividend into remainder
cp r_rem,r_arg2 ; compare remainder & divisor
brcs __udivmodqi4_ep ; remainder <= divisor
sub r_rem,r_arg2 ; restore remainder
__udivmodqi4_ep:
rol r_arg1 ; shift dividend (with CARRY)
dec r_cnt ; decrement loop counter
brne __udivmodqi4_loop
com r_arg1 ; complement result
; because C flag was complemented in loop
ret
.endfunc
#endif /* defined (L_umodqi3) */
#if defined (L_udivqi3)
.endfunc
#endif /* defined (L_udivmodqi4) */
.global __udivqi3
.func __udivqi3
__udivqi3:
clr __tmp_reg__
rjmp __divqi_raw
.endfunc
#endif /* defined (L_udivqi3) */
#if defined (L_modqi3)
.global __modqi3
.func __modqi3
__modqi3:
rcall __divqi3
mov r24,r_rem
ret
.endfunc
#endif /* defined (L_modqi3) */
#if defined (L_divqi3)
.global __divqi3
.func __divqi3
__divqi3:
bst r_arg1,7 ; store sign of divident
#if defined (L_divmodqi4)
.global __divmodqi4
.func __divmodqi4
__divmodqi4:
bst r_arg1,7 ; store sign of dividend
mov __tmp_reg__,r_arg1
eor __tmp_reg__,r_arg2; r0.7 is sign of result
sbrc r_arg1,7
neg r_arg1 ; divident negative : negate
neg r_arg1 ; dividend negative : negate
sbrc r_arg2,7
neg r_arg2 ; divisor negative : negate
.global __divqi_raw
__divqi_raw:
sub r_rem,r_rem ; clear remainder and carry
ldi r_cnt,9 ; init loop counter
rjmp __divqi3_ep ; jump to entry point
__divqi3_loop:
rol r_rem ; shift dividend into remainder
cp r_rem,r_arg2 ; compare remainder & divisor
brcs __divqi3_ep ; remainder <= divisor
sub r_rem,r_arg2 ; restore remainder
__divqi3_ep:
rol r_arg1 ; shift dividend (with CARRY)
dec r_cnt ; decrement loop counter
brne __divqi3_loop ; loop
com r_arg1 ; complement result
; because C flag was complemented in loop
brtc __divqi3_1
neg r_arg2 ; divisor negative : negate
rcall __udivmodqi4 ; do the unsigned div/mod
brtc __divmodqi4_1
neg r_rem ; correct remainder sign
__divqi3_1:
__divmodqi4_1:
sbrc __tmp_reg__,7
neg r_arg1 ; correct result sign
__divqi3_exit:
ret ; result already in r24 (r_arg1)
.endfunc
#endif /* defined (L_divqi3) */
__divmodqi4_exit:
ret
.endfunc
#endif /* defined (L_divmodqi4) */
#undef r_rem
#undef r_arg1
@ -314,113 +363,80 @@ __divqi3_exit:
*******************************************************/
#define r_remL r26 /* remainder Low */
#define r_remH r27 /* remainder High */
/* return: remainder */
#define r_arg1L r24 /* dividend Low */
#define r_arg1H r25 /* dividend High */
/* return: quotient */
#define r_arg2L r22 /* divisor Low */
#define r_arg2H r23 /* divisor High */
#define r_cnt r21 /* loop count */
#if defined (L_umodhi3)
.global __umodhi3
.func __umodhi3
__umodhi3:
clt
rcall __udivhi3
.global __umodhi3_ret
__umodhi3_ret:
#if defined (__AVR_ENHANCED__)
movw r24, r_remL
#else
mov r24,r_remL
mov r25,r_remH
#endif
ret
.endfunc
#endif /* defined (L_umodhi3) */
#if defined (L_udivhi3)
.global __udivhi3
.func __udivhi3
__udivhi3:
clr __tmp_reg__
rjmp __divhi_raw
.endfunc
#endif /* defined (L_udivhi3) */
#if defined (L_modhi3)
.global __modhi3
.func __modhi3
__modhi3:
.global _div
_div:
rcall __divhi3
#if defined (__AVR_ENHANCED__)
movw r22, r24
#else
mov r22,r24 ; needed for div () function
mov r23,r25
#endif
rjmp __umodhi3_ret
.endfunc
#endif /* defined (L_modhi3) */
#if defined (L_divhi3)
.global __divhi3
.func __divhi3
__divhi3:
bst r_arg1H,7 ; store sign of divident
mov __tmp_reg__,r_arg1H
eor __tmp_reg__,r_arg2H ; r0.7 is sign of result
brtc __divhi3_skip1
com r_arg1H
neg r_arg1L ; divident negative : negate
sbci r_arg1H,0xff
__divhi3_skip1:
tst r_arg2H
brpl __divhi3_skip2
com r_arg2H
neg r_arg2L ; divisor negative : negate
sbci r_arg2H,0xff
__divhi3_skip2:
.global __divhi_raw
__divhi_raw:
#if defined (L_udivmodhi4)
.global __udivmodhi4
.func __udivmodhi4
__udivmodhi4:
sub r_remL,r_remL
sub r_remH,r_remH ; clear remainder and carry
sub r_remH,r_remH ; clear remainder and carry
ldi r_cnt,17 ; init loop counter
rjmp __divhi3_ep ; jump to entry point
__divhi3_loop:
rjmp __udivmodhi4_ep ; jump to entry point
__udivmodhi4_loop:
rol r_remL ; shift dividend into remainder
rol r_remH
cp r_remL,r_arg2L ; compare remainder & divisor
cpc r_remH,r_arg2H
brcs __divhi3_ep ; remainder < divisor
brcs __udivmodhi4_ep ; remainder < divisor
sub r_remL,r_arg2L ; restore remainder
sbc r_remH,r_arg2H
__divhi3_ep:
__udivmodhi4_ep:
rol r_arg1L ; shift dividend (with CARRY)
rol r_arg1H
dec r_cnt ; decrement loop counter
brne __divhi3_loop ; loop
brtc __divhi3_1
com r_remH
neg r_remL ; correct remainder sign
sbci r_remH,0xff
__divhi3_1:
tst __tmp_reg__
brpl __divhi3_exit
adiw r_arg1L,1 ; correct result sign
ret
__divhi3_exit:
brne __udivmodhi4_loop
com r_arg1L
com r_arg1H
; div/mod results to return registers, as for the div() function
mov_l r_arg2L, r_arg1L ; quotient
mov_h r_arg2H, r_arg1H
mov_l r_arg1L, r_remL ; remainder
mov_h r_arg1H, r_remH
ret
.endfunc
#endif /* defined (L_divhi3) */
.endfunc
#endif /* defined (L_udivmodhi4) */
#if defined (L_divmodhi4)
.global __divmodhi4
.func __divmodhi4
__divmodhi4:
.global _div
_div:
bst r_arg1H,7 ; store sign of dividend
mov __tmp_reg__,r_arg1H
eor __tmp_reg__,r_arg2H ; r0.7 is sign of result
rcall __divmodhi4_neg1 ; dividend negative : negate
sbrc r_arg2H,7
rcall __divmodhi4_neg2 ; divisor negative : negate
rcall __udivmodhi4 ; do the unsigned div/mod
rcall __divmodhi4_neg1 ; correct remainder sign
tst __tmp_reg__
brpl __divmodhi4_exit
__divmodhi4_neg2:
com r_arg2H
neg r_arg2L ; correct divisor/result sign
sbci r_arg2H,0xff
__divmodhi4_exit:
ret
__divmodhi4_neg1:
brtc __divmodhi4_exit
com r_arg1H
neg r_arg1L ; correct dividend/remainder sign
sbci r_arg1H,0xff
ret
.endfunc
#endif /* defined (L_divmodhi4) */
#undef r_remH
#undef r_remL
@ -439,12 +455,14 @@ __divhi3_exit:
#define r_remHL r30
#define r_remH r27
#define r_remL r26 /* remainder Low */
/* return: remainder */
#define r_arg1HH r25 /* dividend High */
#define r_arg1HL r24
#define r_arg1H r23
#define r_arg1L r22 /* dividend Low */
/* return: quotient */
#define r_arg2HH r21 /* divisor High */
#define r_arg2HL r20
#define r_arg2H r19
@ -452,100 +470,18 @@ __divhi3_exit:
#define r_cnt __zero_reg__ /* loop count (0 after the loop!) */
#if defined (L_umodsi3)
.global __umodsi3
.func __umodsi3
__umodsi3:
clt
rcall __udivsi3
.global __umodsi3_ret
__umodsi3_ret:
#if defined (__AVR_ENHANCED__)
movw r24, r_remHL
movw r22, r_remL
#else
mov r25,r_remHH
mov r24,r_remHL
mov r23,r_remH
mov r22,r_remL
#endif
ret
.endfunc
#endif /* defined (L_umodsi3) */
#if defined (L_udivsi3)
.global __udivsi3
.func __udivsi3
__udivsi3:
clr __tmp_reg__
rjmp __divsi_raw
.endfunc
#endif /* defined (L_udivsi3) */
#if defined (L_modsi3)
.global __modsi3
.func __modsi3
__modsi3:
.global _ldiv
_ldiv:
rcall __divsi3
#if defined (__AVR_ENHANCED__)
movw r18, r22
movw r20, r24
#else
mov r18,r22 /* Needed for ldiv */
mov r19,r23
mov r20,r24
mov r21,r25
#endif
rjmp __umodsi3_ret
.endfunc
#endif /* defined (L_modsi3) */
#if defined (L_divsi3)
.global __divsi3
.func __divsi3
__divsi3:
bst r_arg1HH,7 ; store sign of divident
mov __tmp_reg__,r_arg1HH
eor __tmp_reg__,r_arg2HH ; r0.7 is sign of result
brtc __divsi3_skip1
com r_arg1HH
com r_arg1HL
com r_arg1H
neg r_arg1L ; divident negative : negate
sbci r_arg1H, 0xff
sbci r_arg1HL,0xff
sbci r_arg1HH,0xff
__divsi3_skip1:
tst r_arg2HH
brpl __divsi3_skip2
com r_arg2HH
com r_arg2HL
com r_arg2H
neg r_arg2L ; divisor negative : negate
sbci r_arg2H, 0xff
sbci r_arg2HL,0xff
sbci r_arg2HH,0xff
__divsi3_skip2:
.global __divsi_raw
__divsi_raw:
#if defined (L_udivmodsi4)
.global __udivmodsi4
.func __udivmodsi4
__udivmodsi4:
ldi r_remL, 33 ; init loop counter
mov r_cnt, r_remL
sub r_remL,r_remL
sub r_remH,r_remH
#if defined (__AVR_ENHANCED__)
movw r_remHL, r_remL
#else
sub r_remHL,r_remHL
sub r_remHH,r_remHH ; clear remainder and carry
#endif
rjmp __divsi3_ep ; jump to entry point
__divsi3_loop:
sub r_remH,r_remH ; clear remainder and carry
mov_l r_remHL, r_remL
mov_h r_remHH, r_remH
rjmp __udivmodsi4_ep ; jump to entry point
__udivmodsi4_loop:
rol r_remL ; shift dividend into remainder
rol r_remH
rol r_remHL
@ -554,43 +490,72 @@ __divsi3_loop:
cpc r_remH,r_arg2H
cpc r_remHL,r_arg2HL
cpc r_remHH,r_arg2HH
brcs __divsi3_ep ; remainder <= divisor
brcs __udivmodsi4_ep ; remainder <= divisor
sub r_remL,r_arg2L ; restore remainder
sbc r_remH,r_arg2H
sbc r_remHL,r_arg2HL
sbc r_remHH,r_arg2HH
__divsi3_ep:
__udivmodsi4_ep:
rol r_arg1L ; shift dividend (with CARRY)
rol r_arg1H
rol r_arg1HL
rol r_arg1HH
dec r_cnt ; decrement loop counter
brne __divsi3_loop ; loop
brne __udivmodsi4_loop
; __zero_reg__ now restored (r_cnt == 0)
brtc __divsi3_1
com r_remHH
com r_remHL
com r_remH
neg r_remL ; correct remainder sign
sbci r_remH, 0xff
sbci r_remHL,0xff
sbci r_remHH,0xff
__divsi3_1:
rol __tmp_reg__
brcc __divsi3_exit
adc r_arg1L,__zero_reg__; correct result sign
adc r_arg1H,__zero_reg__
adc r_arg1HL,__zero_reg__
adc r_arg1HH,__zero_reg__
ret
__divsi3_exit:
com r_arg1L
com r_arg1H
com r_arg1HL
com r_arg1HH
; div/mod results to return registers, as for the ldiv() function
mov_l r_arg2L, r_arg1L ; quotient
mov_h r_arg2H, r_arg1H
mov_l r_arg2HL, r_arg1HL
mov_h r_arg2HH, r_arg1HH
mov_l r_arg1L, r_remL ; remainder
mov_h r_arg1H, r_remH
mov_l r_arg1HL, r_remHL
mov_h r_arg1HH, r_remHH
ret
.endfunc
#endif /* defined (L_divsi3) */
.endfunc
#endif /* defined (L_udivmodsi4) */
#if defined (L_divmodsi4)
.global __divmodsi4
.func __divmodsi4
__divmodsi4:
bst r_arg1HH,7 ; store sign of dividend
mov __tmp_reg__,r_arg1HH
eor __tmp_reg__,r_arg2HH ; r0.7 is sign of result
rcall __divmodsi4_neg1 ; dividend negative : negate
sbrc r_arg2HH,7
rcall __divmodsi4_neg2 ; divisor negative : negate
rcall __udivmodsi4 ; do the unsigned div/mod
rcall __divmodsi4_neg1 ; correct remainder sign
rol __tmp_reg__
brcc __divmodsi4_exit
__divmodsi4_neg2:
com r_arg2HH
com r_arg2HL
com r_arg2H
neg r_arg2L ; correct divisor/quotient sign
sbci r_arg2H,0xff
sbci r_arg2HL,0xff
sbci r_arg2HH,0xff
__divmodsi4_exit:
ret
__divmodsi4_neg1:
brtc __divmodsi4_exit
com r_arg1HH
com r_arg1HL
com r_arg1H
neg r_arg1L ; correct dividend/remainder sign
sbci r_arg1H, 0xff
sbci r_arg1HL,0xff
sbci r_arg1HH,0xff
ret
.endfunc
#endif /* defined (L_divmodsi4) */
/**********************************
* This is a prologue subroutine
@ -664,12 +629,8 @@ __epilogue_restores__:
out __SP_H__,r29
out __SREG__,__tmp_reg__
out __SP_L__,r28
#if defined (__AVR_ENHANCED__)
movw r28, r26
#else
mov r28,r26
mov r29,r27
#endif
mov_l r28, r26
mov_h r29, r27
ret
.endfunc
#endif /* defined (L_epilogue) */
@ -708,7 +669,7 @@ __tablejump2__:
lpm
push r0
ret
.endfunc
#endif
.endfunc
#endif /* defined (L_tablejump) */

View file

@ -8,18 +8,12 @@ LIB1ASMFUNCS = \
_mulqi3 \
_mulhi3 \
_mulsi3 \
_umodqi3 \
_udivqi3 \
_modqi3 \
_divqi3 \
_umodhi3 \
_udivhi3 \
_modhi3 \
_divhi3 \
_umodsi3 \
_udivsi3 \
_modsi3 \
_divsi3 \
_udivmodqi4 \
_divmodqi4 \
_udivmodhi4 \
_divmodhi4 \
_udivmodsi4 \
_divmodsi4 \
_prologue \
_epilogue \
_exit \