ports/devel/avr-gcc/files/patch-16bitassign
Joerg Wunsch 96fe03484f Bring in some patches from the head of GCC's CVS back to the latest
released version, 3.4.3.  This mainly adds support for new AVR devices
that appeared on the market recently, and fixes a bug related to the
order of assignments for volatile uint16_t * objects (in the
assumption they might point to IO space where the order of two 8-bit
operations can be important).
2005-03-10 21:14:44 +00:00

216 lines
7.1 KiB
Plaintext

===================================================================
RCS file: /cvsroot/gcc/cvsroot/gcc/gcc/gcc/config/avr/avr.c,v
retrieving revision 1.129
retrieving revision 1.130
diff -u -r1.129 -r1.130
--- gcc/config/avr/avr.c 2005/02/09 14:43:28 1.129
+++ gcc/config/avr/avr.c 2005/03/06 21:50:34 1.130
@@ -1120,6 +1120,16 @@
print_operand (file, XEXP (addr, 1), 0);
}
+ else if (code == 'p' || code == 'r')
+ {
+ if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
+ fatal_insn ("bad address, not post_inc or pre_dec:", addr);
+
+ if (code == 'p')
+ print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
+ else
+ print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
+ }
else if (GET_CODE (addr) == PLUS)
{
print_operand_address (file, XEXP (addr,0));
@@ -1835,6 +1845,9 @@
rtx base = XEXP (src, 0);
int reg_dest = true_regnum (dest);
int reg_base = true_regnum (base);
+ /* "volatile" forces reading low byte first, even if less efficient,
+ for correct operation with 16-bit I/O registers. */
+ int mem_volatile_p = MEM_VOLATILE_P (src);
int tmp;
if (!l)
@@ -1928,6 +1941,25 @@
if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
fatal_insn ("incorrect insn:", insn);
+ if (mem_volatile_p)
+ {
+ if (REGNO (XEXP (base, 0)) == REG_X)
+ {
+ *l = 4;
+ return (AS2 (sbiw,r26,2) CR_TAB
+ AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X) CR_TAB
+ AS2 (sbiw,r26,1));
+ }
+ else
+ {
+ *l = 3;
+ return (AS2 (sbiw,%r1,2) CR_TAB
+ AS2 (ld,%A0,%p1) CR_TAB
+ AS2 (ldd,%B0,%p1+1));
+ }
+ }
+
*l = 2;
return (AS2 (ld,%B0,%1) CR_TAB
AS2 (ld,%A0,%1));
@@ -2508,7 +2540,11 @@
rtx base = XEXP (dest, 0);
int reg_base = true_regnum (base);
int reg_src = true_regnum (src);
+ /* "volatile" forces writing high byte first, even if less efficient,
+ for correct operation with 16-bit I/O registers. */
+ int mem_volatile_p = MEM_VOLATILE_P (dest);
int tmp;
+
if (!l)
l = &tmp;
if (CONSTANT_ADDRESS_P (base))
@@ -2528,33 +2564,33 @@
{
if (reg_src == REG_X)
{
- /* "st X+,r26" is undefined */
- if (reg_unused_after (insn, src))
+ /* "st X+,r26" and "st -X,r26" are undefined. */
+ if (!mem_volatile_p && reg_unused_after (insn, src))
return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
AS2 (st,X,r26) CR_TAB
AS2 (adiw,r26,1) CR_TAB
AS2 (st,X,__tmp_reg__));
else
return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
- AS2 (st,X,r26) CR_TAB
AS2 (adiw,r26,1) CR_TAB
AS2 (st,X,__tmp_reg__) CR_TAB
- AS2 (sbiw,r26,1));
+ AS2 (sbiw,r26,1) CR_TAB
+ AS2 (st,X,r26));
}
else
{
- if (reg_unused_after (insn, base))
+ if (!mem_volatile_p && reg_unused_after (insn, base))
return *l=2, (AS2 (st,X+,%A1) CR_TAB
AS2 (st,X,%B1));
else
- return *l=3, (AS2 (st ,X+,%A1) CR_TAB
- AS2 (st ,X,%B1) CR_TAB
- AS2 (sbiw,r26,1));
+ return *l=3, (AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (st,-X,%A1));
}
}
else
- return *l=2, (AS2 (st ,%0,%A1) CR_TAB
- AS2 (std,%0+1,%B1));
+ return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
+ AS2 (st,%0,%A1));
}
else if (GET_CODE (base) == PLUS)
{
@@ -2567,14 +2603,14 @@
if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
- AS2 (std,Y+62,%A1) CR_TAB
AS2 (std,Y+63,%B1) CR_TAB
+ AS2 (std,Y+62,%A1) CR_TAB
AS2 (sbiw,r28,%o0-62));
return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
AS2 (sbci,r29,hi8(-%o0)) CR_TAB
- AS2 (st,Y,%A1) CR_TAB
AS2 (std,Y+1,%B1) CR_TAB
+ AS2 (st,Y,%A1) CR_TAB
AS2 (subi,r28,lo8(%o0)) CR_TAB
AS2 (sbci,r29,hi8(%o0)));
}
@@ -2582,31 +2618,53 @@
{
/* (X + d) = R */
if (reg_src == REG_X)
- {
+ {
*l = 7;
return (AS2 (mov,__tmp_reg__,r26) CR_TAB
AS2 (mov,__zero_reg__,r27) CR_TAB
- AS2 (adiw,r26,%o0) CR_TAB
- AS2 (st,X+,__tmp_reg__) CR_TAB
+ AS2 (adiw,r26,%o0+1) CR_TAB
AS2 (st,X,__zero_reg__) CR_TAB
+ AS2 (st,-X,__tmp_reg__) CR_TAB
AS1 (clr,__zero_reg__) CR_TAB
- AS2 (sbiw,r26,%o0+1));
+ AS2 (sbiw,r26,%o0));
}
*l = 4;
- return (AS2 (adiw,r26,%o0) CR_TAB
- AS2 (st,X+,%A1) CR_TAB
- AS2 (st,X,%B1) CR_TAB
- AS2 (sbiw,r26,%o0+1));
+ return (AS2 (adiw,r26,%o0+1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (st,-X,%A1) CR_TAB
+ AS2 (sbiw,r26,%o0));
}
- return *l=2, (AS2 (std,%A0,%A1) CR_TAB
- AS2 (std,%B0,%B1));
+ return *l=2, (AS2 (std,%B0,%B1) CR_TAB
+ AS2 (std,%A0,%A1));
}
else if (GET_CODE (base) == PRE_DEC) /* (--R) */
return *l=2, (AS2 (st,%0,%B1) CR_TAB
AS2 (st,%0,%A1));
else if (GET_CODE (base) == POST_INC) /* (R++) */
- return *l=2, (AS2 (st,%0,%A1) CR_TAB
- AS2 (st,%0,%B1));
+ {
+ if (mem_volatile_p)
+ {
+ if (REGNO (XEXP (base, 0)) == REG_X)
+ {
+ *l = 4;
+ return (AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (st,-X,%A1) CR_TAB
+ AS2 (adiw,r26,2));
+ }
+ else
+ {
+ *l = 3;
+ return (AS2 (std,%p0+1,%B1) CR_TAB
+ AS2 (st,%p0,%A1) CR_TAB
+ AS2 (adiw,%r0,2));
+ }
+ }
+
+ *l = 2;
+ return (AS2 (st,%0,%A1) CR_TAB
+ AS2 (st,%0,%B1));
+ }
fatal_insn ("unknown move insn:",insn);
return "";
}
===================================================================
RCS file: /cvsroot/gcc/cvsroot/gcc/gcc/gcc/config/avr/avr.md,v
retrieving revision 1.49
retrieving revision 1.50
diff -u -r1.49 -r1.50
--- gcc/config/avr/avr.md 2005/01/27 18:22:25 1.49
+++ gcc/config/avr/avr.md 2005/03/06 21:50:36 1.50
@@ -30,6 +30,8 @@
;; j Branch condition.
;; k Reverse branch condition.
;; o Displacement for (mem (plus (reg) (const_int))) operands.
+;; p POST_INC or PRE_DEC address as a pointer (X, Y, Z)
+;; r POST_INC or PRE_DEC address as a register (r26, r28, r30)
;; ~ Output 'r' if not AVR_MEGA.
;; UNSPEC usage: