Lines 1-2132
Link Here
|
1 |
diff -Naurp gcc/config/avr/avr.c gcc/config/avr/avr.c |
|
|
2 |
--- gcc/config/avr/avr.c 2011-01-19 13:03:59.000000000 -0600 |
3 |
+++ gcc/config/avr/avr.c 2011-01-19 13:11:23.000000000 -0600 |
4 |
@@ -232,8 +232,8 @@ avr_override_options (void) |
5 |
avr_current_arch = &avr_arch_types[avr_current_device->arch]; |
6 |
avr_extra_arch_macro = avr_current_device->macro; |
7 |
|
8 |
- tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO); |
9 |
- zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO); |
10 |
+ tmp_reg_rtx = gen_rtx_REG (QImode, AVR_TINY ? TMP_REGNO_AVRTINY10 : TMP_REGNO); |
11 |
+ zero_reg_rtx = gen_rtx_REG (QImode, AVR_TINY ? ZERO_REGNO_AVRTINY10 : ZERO_REGNO); |
12 |
|
13 |
init_machine_status = avr_init_machine_status; |
14 |
} |
15 |
@@ -1641,7 +1641,7 @@ avr_simplify_comparison_p (enum machine_ |
16 |
int |
17 |
function_arg_regno_p(int r) |
18 |
{ |
19 |
- return (r >= 8 && r <= 25); |
20 |
+ return (AVR_TINY ? r >= 20 && r <= 25 : r >= 8 && r <= 25); |
21 |
} |
22 |
|
23 |
/* Initializing the variable cum for the state at the beginning |
24 |
@@ -1651,7 +1651,11 @@ void |
25 |
init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname, |
26 |
tree fndecl ATTRIBUTE_UNUSED) |
27 |
{ |
28 |
+ if (AVR_TINY) |
29 |
+ cum->nregs = 6; |
30 |
+ else |
31 |
cum->nregs = 18; |
32 |
+ |
33 |
cum->regno = FIRST_CUM_REG; |
34 |
if (!libname && fntype) |
35 |
{ |
36 |
@@ -1675,9 +1679,8 @@ avr_num_arg_regs (enum machine_mode mode |
37 |
else |
38 |
size = GET_MODE_SIZE (mode); |
39 |
|
40 |
- /* Align all function arguments to start in even-numbered registers. |
41 |
+ /* if not AVR_TINY, Align all function arguments to start in even-numbered registers. |
42 |
Odd-sized arguments leave holes above them. */ |
43 |
- |
44 |
return (size + 1) & ~1; |
45 |
} |
46 |
|
47 |
@@ -2009,10 +2012,20 @@ out_movqi_r_mr (rtx insn, rtx op[], int |
48 |
fatal_insn ("incorrect insn:",insn); |
49 |
|
50 |
if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))) |
51 |
- return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB |
52 |
+ return *l = 3, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-63))) CR_TAB |
53 |
+ AS2 (sbci,r29,hi8(-(%o1-63))) CR_TAB |
54 |
+ AS2 (subi,r28,lo8(-63)) CR_TAB |
55 |
+ AS2 (sbci,r29,hi8(-63)) CR_TAB |
56 |
+ AS2 (ld,%0,Y) CR_TAB |
57 |
+ AS2 (subi,r28,lo8(63)) CR_TAB |
58 |
+ AS2 (sbci,r29,hi8(63)) CR_TAB |
59 |
+ AS2 (subi,r28,lo8(%o1-63)) CR_TAB |
60 |
+ AS2 (sbci,r29,hi8(%o1-63))) |
61 |
+ : (AS2 (adiw,r28,%o1-63) CR_TAB |
62 |
AS2 (ldd,%0,Y+63) CR_TAB |
63 |
AS2 (sbiw,r28,%o1-63)); |
64 |
|
65 |
+ |
66 |
return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB |
67 |
AS2 (sbci,r29,hi8(-%o1)) CR_TAB |
68 |
AS2 (ld,%0,Y) CR_TAB |
69 |
@@ -2025,15 +2038,38 @@ out_movqi_r_mr (rtx insn, rtx op[], int |
70 |
it but I have this situation with extremal optimizing options. */ |
71 |
if (reg_overlap_mentioned_p (dest, XEXP (x,0)) |
72 |
|| reg_unused_after (insn, XEXP (x,0))) |
73 |
- return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB |
74 |
+ return *l = 2, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB |
75 |
+ AS2 (sbci,r27,hi8(-(%o1))) CR_TAB |
76 |
+ AS2 (ld,%0,X)) |
77 |
+ : (AS2 (adiw,r26,%o1) CR_TAB |
78 |
AS2 (ld,%0,X)); |
79 |
|
80 |
- return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB |
81 |
+ return *l = 3, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB |
82 |
+ AS2 (sbci,r27,hi8(-(%o1))) CR_TAB |
83 |
+ AS2 (ld,%0,X) CR_TAB |
84 |
+ AS2 (subi,r26,lo8(%o1)) CR_TAB |
85 |
+ AS2 (sbci,r27,hi8(%o1))) |
86 |
+ : (AS2 (adiw,r26,%o1) CR_TAB |
87 |
AS2 (ld,%0,X) CR_TAB |
88 |
AS2 (sbiw,r26,%o1)); |
89 |
} |
90 |
+ |
91 |
*l = 1; |
92 |
- return AS2 (ldd,%0,%1); |
93 |
+ op[2] = XEXP(x, 0); |
94 |
+ if(REGNO(op[2]) == REG_Y) |
95 |
+ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB |
96 |
+ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB |
97 |
+ AS2 (ld,%0,Y) CR_TAB |
98 |
+ AS2 (subi,%A2,lo8(%o1)) CR_TAB |
99 |
+ AS2 (sbci,%B2,hi8(%o1))) |
100 |
+ : AS2 (ldd,%0,%1); |
101 |
+ if(REGNO(op[2]) == REG_Z) |
102 |
+ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB |
103 |
+ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB |
104 |
+ AS2 (ld,%0,Z) CR_TAB |
105 |
+ AS2 (subi,%A2,lo8(%o1)) CR_TAB |
106 |
+ AS2 (sbci,%B2,hi8(%o1))) |
107 |
+ : AS2 (ldd,%0,%1); |
108 |
} |
109 |
*l = 1; |
110 |
return AS2 (ld,%0,%1); |
111 |
@@ -2073,14 +2109,34 @@ out_movhi_r_mr (rtx insn, rtx op[], int |
112 |
AS2 (ld,%B0,X)); |
113 |
} |
114 |
*l = 3; |
115 |
- return (AS2 (ld,%A0,X+) CR_TAB |
116 |
+ return AVR_TINY ? (AS2 (ld,%A0,X+) CR_TAB |
117 |
+ AS2 (ld,%B0,X) CR_TAB |
118 |
+ AS2 (subi,r26,lo8(1)) CR_TAB |
119 |
+ AS2 (sbci,r27,hi8(1))) |
120 |
+ : (AS2 (ld,%A0,X+) CR_TAB |
121 |
AS2 (ld,%B0,X) CR_TAB |
122 |
AS2 (sbiw,r26,1)); |
123 |
} |
124 |
else /* (R) */ |
125 |
{ |
126 |
*l = 2; |
127 |
- return (AS2 (ld,%A0,%1) CR_TAB |
128 |
+ if(reg_base == REG_Y) |
129 |
+ return AVR_TINY ? (AS2 (ld,%A0,%1) CR_TAB |
130 |
+ AS2 (subi,r28,lo8((-1))) CR_TAB |
131 |
+ AS2 (sbci,r29,hi8((-1))) CR_TAB |
132 |
+ AS2 (ld,%B0,%1) CR_TAB |
133 |
+ AS2 (subi,r28,lo8(1)) CR_TAB |
134 |
+ AS2 (sbci,r29,hi8(1))) |
135 |
+ : (AS2 (ld,%A0,%1) CR_TAB |
136 |
+ AS2 (ldd,%B0,%1+1)); |
137 |
+ if(reg_base == REG_Z) |
138 |
+ return AVR_TINY ? (AS2 (ld,%A0,%1) CR_TAB |
139 |
+ AS2 (subi,r30,lo8((-1))) CR_TAB |
140 |
+ AS2 (sbci,r31,hi8((-1))) CR_TAB |
141 |
+ AS2 (ld,%B0,%1) CR_TAB |
142 |
+ AS2 (subi,r30,lo8(1)) CR_TAB |
143 |
+ AS2 (sbci,r31,hi8(1))) |
144 |
+ : (AS2 (ld,%A0,%1) CR_TAB |
145 |
AS2 (ldd,%B0,%1+1)); |
146 |
} |
147 |
} |
148 |
@@ -2095,12 +2151,30 @@ out_movhi_r_mr (rtx insn, rtx op[], int |
149 |
fatal_insn ("incorrect insn:",insn); |
150 |
|
151 |
if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))) |
152 |
- return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB |
153 |
+ return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-62))) CR_TAB |
154 |
+ AS2 (sbci,r29,hi8(-(%o1-62))) CR_TAB |
155 |
+ AS2 (subi,r28,lo8(-62)) CR_TAB |
156 |
+ AS2 (sbci,r29,hi8(-62)) CR_TAB |
157 |
+ AS2 (ld,%A0,Y+) CR_TAB |
158 |
+ AS2 (ld,%B0,Y) CR_TAB |
159 |
+ AS2 (subi,r28,lo8(63)) CR_TAB |
160 |
+ AS2 (sbci,r29,hi8(63)) CR_TAB |
161 |
+ AS2 (subi,r28,lo8(%o1-62)) CR_TAB |
162 |
+ AS2 (sbci,r29,hi8(%o1-62))) |
163 |
+ : (AS2 (adiw,r28,%o1-62) CR_TAB |
164 |
AS2 (ldd,%A0,Y+62) CR_TAB |
165 |
AS2 (ldd,%B0,Y+63) CR_TAB |
166 |
AS2 (sbiw,r28,%o1-62)); |
167 |
|
168 |
- return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB |
169 |
+ return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o1)) CR_TAB |
170 |
+ AS2 (sbci,r29,hi8(-%o1)) CR_TAB |
171 |
+ AS2 (ld,%A0,Y+) CR_TAB |
172 |
+ AS2 (ld,%B0,Y) CR_TAB |
173 |
+ AS2 (subi,r28,lo8(1)) CR_TAB |
174 |
+ AS2 (sbci,r29,hi8(1)) CR_TAB |
175 |
+ AS2 (subi,r28,lo8(%o1)) CR_TAB |
176 |
+ AS2 (sbci,r29,hi8(%o1))) |
177 |
+ : (AS2 (subi,r28,lo8(-%o1)) CR_TAB |
178 |
AS2 (sbci,r29,hi8(-%o1)) CR_TAB |
179 |
AS2 (ld,%A0,Y) CR_TAB |
180 |
AS2 (ldd,%B0,Y+1) CR_TAB |
181 |
@@ -2115,12 +2189,23 @@ out_movhi_r_mr (rtx insn, rtx op[], int |
182 |
|
183 |
*l = 4; |
184 |
if (reg_base == reg_dest) |
185 |
- return (AS2 (adiw,r26,%o1) CR_TAB |
186 |
+ return AVR_TINY ? (AS2 (subi,r26,lo8(-%o1)) CR_TAB |
187 |
+ AS2 (sbci,r27,hi8(-%o1)) CR_TAB |
188 |
+ AS2 (ld,__tmp_reg__,X+) CR_TAB |
189 |
+ AS2 (ld,%B0,X) CR_TAB |
190 |
+ AS2 (mov,%A0,__tmp_reg__)) |
191 |
+ : (AS2 (adiw,r26,%o1) CR_TAB |
192 |
AS2 (ld,__tmp_reg__,X+) CR_TAB |
193 |
AS2 (ld,%B0,X) CR_TAB |
194 |
AS2 (mov,%A0,__tmp_reg__)); |
195 |
|
196 |
- return (AS2 (adiw,r26,%o1) CR_TAB |
197 |
+ return AVR_TINY ? (AS2 (subi,r26,lo8(-%o1)) CR_TAB |
198 |
+ AS2 (sbci,r27,hi8(-%o1)) CR_TAB |
199 |
+ AS2 (ld,%A0,X+) CR_TAB |
200 |
+ AS2 (ld,%B0,X) CR_TAB |
201 |
+ AS2 (subi,r26,lo8(%o1+1)) CR_TAB |
202 |
+ AS2 (sbci,r27,hi8(%o1+1))) |
203 |
+ : (AS2 (adiw,r26,%o1) CR_TAB |
204 |
AS2 (ld,%A0,X+) CR_TAB |
205 |
AS2 (ld,%B0,X) CR_TAB |
206 |
AS2 (sbiw,r26,%o1+1)); |
207 |
@@ -2129,14 +2214,54 @@ out_movhi_r_mr (rtx insn, rtx op[], int |
208 |
if (reg_base == reg_dest) |
209 |
{ |
210 |
*l = 3; |
211 |
- return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB |
212 |
+ op[2] = XEXP(base, 0); |
213 |
+ |
214 |
+ if(REGNO(op[2]) == REG_Y) |
215 |
+ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB |
216 |
+ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB |
217 |
+ AS2 (ld,__tmp_reg__,Y+) CR_TAB |
218 |
+ AS2 (ld,%B0,Y) CR_TAB |
219 |
+ AS2 (subi,%A2,lo8(%o1+1)) CR_TAB |
220 |
+ AS2 (subi,%B2,hi8(%o1+1)) CR_TAB |
221 |
+ AS2 (mov,%A0,__tmp_reg__)) |
222 |
+ : (AS2 (ldd,__tmp_reg__,%A1) CR_TAB |
223 |
+ AS2 (ldd,%B0,%B1) CR_TAB |
224 |
+ AS2 (mov,%A0,__tmp_reg__)); |
225 |
+ if(REGNO(op[2]) == REG_Z) |
226 |
+ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB |
227 |
+ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB |
228 |
+ AS2 (ld,__tmp_reg__,Z+) CR_TAB |
229 |
+ AS2 (ld,%B0,Z) CR_TAB |
230 |
+ AS2 (subi,%A2,lo8(%o1+1)) CR_TAB |
231 |
+ AS2 (subi,%B2,hi8(%o1+1)) CR_TAB |
232 |
+ AS2 (mov,%A0,__tmp_reg__)) |
233 |
+ : (AS2 (ldd,__tmp_reg__,%A1) CR_TAB |
234 |
AS2 (ldd,%B0,%B1) CR_TAB |
235 |
AS2 (mov,%A0,__tmp_reg__)); |
236 |
} |
237 |
- |
238 |
*l = 2; |
239 |
- return (AS2 (ldd,%A0,%A1) CR_TAB |
240 |
+ |
241 |
+ op[2] = XEXP(base, 0); |
242 |
+ |
243 |
+ if(REGNO(op[2]) == REG_Y) |
244 |
+ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB |
245 |
+ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB |
246 |
+ AS2 (ld,%A0,Y+) CR_TAB |
247 |
+ AS2 (ld,%B0,Y) CR_TAB |
248 |
+ AS2 (subi,%A2,lo8(%o1+1)) CR_TAB |
249 |
+ AS2 (subi,%B2,hi8(%o1+1))) |
250 |
+ : (AS2 (ldd,%A0,%A1) CR_TAB |
251 |
+ AS2 (ldd,%B0,%B1)); |
252 |
+ if(REGNO(op[2]) == REG_Z) |
253 |
+ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB |
254 |
+ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB |
255 |
+ AS2 (ld,%A0,Z+) CR_TAB |
256 |
+ AS2 (ld,%B0,Z) CR_TAB |
257 |
+ AS2 (subi,%A2,lo8(%o1+1)) CR_TAB |
258 |
+ AS2 (subi,%B2,hi8(%o1+1))) |
259 |
+ : (AS2 (ldd,%A0,%A1) CR_TAB |
260 |
AS2 (ldd,%B0,%B1)); |
261 |
+ |
262 |
} |
263 |
else if (GET_CODE (base) == PRE_DEC) /* (--R) */ |
264 |
{ |
265 |
@@ -2148,7 +2273,13 @@ out_movhi_r_mr (rtx insn, rtx op[], int |
266 |
if (REGNO (XEXP (base, 0)) == REG_X) |
267 |
{ |
268 |
*l = 4; |
269 |
- return (AS2 (sbiw,r26,2) CR_TAB |
270 |
+ return AVR_TINY ? (AS2 (subi,r26,lo8(2)) CR_TAB |
271 |
+ AS2 (sbci,r27,hi8(2)) CR_TAB |
272 |
+ AS2 (ld,%A0,X+) CR_TAB |
273 |
+ AS2 (ld,%B0,X) CR_TAB |
274 |
+ AS2 (subi,r26,lo8(1)) CR_TAB |
275 |
+ AS2 (sbci,r27,hi8(1))) |
276 |
+ : (AS2 (sbiw,r26,2) CR_TAB |
277 |
AS2 (ld,%A0,X+) CR_TAB |
278 |
AS2 (ld,%B0,X) CR_TAB |
279 |
AS2 (sbiw,r26,1)); |
280 |
@@ -2156,7 +2287,16 @@ out_movhi_r_mr (rtx insn, rtx op[], int |
281 |
else |
282 |
{ |
283 |
*l = 3; |
284 |
- return (AS2 (sbiw,%r1,2) CR_TAB |
285 |
+ //FIXME:check the code once again for AVR_TINY |
286 |
+ return AVR_TINY ? (AS2 (subi,%A1,lo8(3)) CR_TAB |
287 |
+ AS2 (sbci,%B1,hi8(3)) CR_TAB |
288 |
+ AS2 (ld,%A0,%p1) CR_TAB |
289 |
+ AS2 (subi,%A1,lo8(-1)) CR_TAB |
290 |
+ AS2 (sbci,%B1,hi8(-1)) CR_TAB |
291 |
+ AS2 (ld,%B0,%p1) CR_TAB |
292 |
+ AS2 (subi,%A1,lo8(1)) CR_TAB |
293 |
+ AS2 (sbci,%B1,hi8(1))) |
294 |
+ : (AS2 (sbiw,%r1,2) CR_TAB |
295 |
AS2 (ld,%A0,%p1) CR_TAB |
296 |
AS2 (ldd,%B0,%p1+1)); |
297 |
} |
298 |
@@ -2212,13 +2352,23 @@ out_movsi_r_mr (rtx insn, rtx op[], int |
299 |
{ |
300 |
if (reg_dest == REG_X) |
301 |
/* "ld r26,-X" is undefined */ |
302 |
- return *l=7, (AS2 (adiw,r26,3) CR_TAB |
303 |
+ return *l=7, AVR_TINY ? (AS2 (subi,r26,lo8(-3)) CR_TAB |
304 |
+ AS2 (sbci,r27,hi8(-3)) CR_TAB |
305 |
+ AS2 (ld,r29,X) CR_TAB |
306 |
+ AS2 (ld,r28,-X) CR_TAB |
307 |
+ AS2 (ld,__tmp_reg__,-X) CR_TAB |
308 |
+ AS2 (subi,r26,lo8(1)) CR_TAB |
309 |
+ AS2 (sbci,r27,hi8(1)) CR_TAB |
310 |
+ AS2 (ld,r26,X) CR_TAB |
311 |
+ AS2 (mov,r27,__tmp_reg__)) |
312 |
+ : (AS2 (adiw,r26,3) CR_TAB |
313 |
AS2 (ld,r29,X) CR_TAB |
314 |
AS2 (ld,r28,-X) CR_TAB |
315 |
AS2 (ld,__tmp_reg__,-X) CR_TAB |
316 |
AS2 (sbiw,r26,1) CR_TAB |
317 |
AS2 (ld,r26,X) CR_TAB |
318 |
AS2 (mov,r27,__tmp_reg__)); |
319 |
+ |
320 |
else if (reg_dest == REG_X - 2) |
321 |
return *l=5, (AS2 (ld,%A0,X+) CR_TAB |
322 |
AS2 (ld,%B0,X+) CR_TAB |
323 |
@@ -2231,7 +2381,13 @@ out_movsi_r_mr (rtx insn, rtx op[], int |
324 |
AS2 (ld,%C0,X+) CR_TAB |
325 |
AS2 (ld,%D0,X)); |
326 |
else |
327 |
- return *l=5, (AS2 (ld,%A0,X+) CR_TAB |
328 |
+ return *l=5, AVR_TINY ? (AS2 (ld,%A0,X+) CR_TAB |
329 |
+ AS2 (ld,%B0,X+) CR_TAB |
330 |
+ AS2 (ld,%C0,X+) CR_TAB |
331 |
+ AS2 (ld,%D0,X) CR_TAB |
332 |
+ AS2 (subi,r26,lo8(3)) CR_TAB |
333 |
+ AS2 (sbci,r27,hi8(3))) |
334 |
+ : (AS2 (ld,%A0,X+) CR_TAB |
335 |
AS2 (ld,%B0,X+) CR_TAB |
336 |
AS2 (ld,%C0,X+) CR_TAB |
337 |
AS2 (ld,%D0,X) CR_TAB |
338 |
@@ -2240,22 +2396,97 @@ out_movsi_r_mr (rtx insn, rtx op[], int |
339 |
else |
340 |
{ |
341 |
if (reg_dest == reg_base) |
342 |
- return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB |
343 |
+ { |
344 |
+ if(reg_base == REG_Y) |
345 |
+ return *l=5, AVR_TINY ? (AS2 (subi,r28,lo8(-3)) CR_TAB |
346 |
+ AS2 (sbci,r29,hi8(-3)) CR_TAB |
347 |
+ AS2 (ld,%D0,Y) CR_TAB |
348 |
+ AS2 (ld,%C0,-Y) CR_TAB |
349 |
+ AS2 (subi,r28,lo8(1)) CR_TAB |
350 |
+ AS2 (sbci,r29,hi8(1)) CR_TAB |
351 |
+ AS2 (ld,__tmp_reg__,%1) CR_TAB |
352 |
+ AS2 (subi,r28,lo8(1)) CR_TAB |
353 |
+ AS2 (sbci,r29,hi8(1)) CR_TAB |
354 |
+ AS2 (ld,%A0,%1) CR_TAB |
355 |
+ AS2 (mov,%B0,__tmp_reg__)) |
356 |
+ : (AS2 (ldd,%D0,%1+3) CR_TAB |
357 |
+ AS2 (ldd,%C0,%1+2) CR_TAB |
358 |
+ AS2 (ldd,__tmp_reg__,%1+1) CR_TAB |
359 |
+ AS2 (ld,%A0,%1) CR_TAB |
360 |
+ AS2 (mov,%B0,__tmp_reg__)); |
361 |
+ if(reg_base == REG_Z) |
362 |
+ return *l=5, AVR_TINY ? (AS2 (subi,r30,lo8(-3)) CR_TAB |
363 |
+ AS2 (sbci,r31,hi8(-3)) CR_TAB |
364 |
+ AS2 (ld,%D0,Z) CR_TAB |
365 |
+ AS2 (ld,%C0,-Z) CR_TAB |
366 |
+ AS2 (subi,r30,lo8(1)) CR_TAB |
367 |
+ AS2 (sbci,r31,hi8(1)) CR_TAB |
368 |
+ AS2 (ld,__tmp_reg__,%1) CR_TAB |
369 |
+ AS2 (subi,r30,lo8(1)) CR_TAB |
370 |
+ AS2 (sbci,r31,hi8(1)) CR_TAB |
371 |
+ AS2 (ld,%A0,%1) CR_TAB |
372 |
+ AS2 (mov,%B0,__tmp_reg__)) |
373 |
+ : (AS2 (ldd,%D0,%1+3) CR_TAB |
374 |
AS2 (ldd,%C0,%1+2) CR_TAB |
375 |
AS2 (ldd,__tmp_reg__,%1+1) CR_TAB |
376 |
AS2 (ld,%A0,%1) CR_TAB |
377 |
AS2 (mov,%B0,__tmp_reg__)); |
378 |
+ } |
379 |
+ |
380 |
else if (reg_base == reg_dest + 2) |
381 |
- return *l=5, (AS2 (ld ,%A0,%1) CR_TAB |
382 |
+ { |
383 |
+ if(reg_base == REG_Y) |
384 |
+ return *l=5, AVR_TINY ? (AS2 (ld ,%A0,Y+) CR_TAB |
385 |
+ AS2 (ld,%B0,Y+) CR_TAB |
386 |
+ AS2 (ld,__tmp_reg__,Y+) CR_TAB |
387 |
+ AS2 (ld,%D0,Y) CR_TAB |
388 |
+ AS2 (subi,r28,lo8(3)) CR_TAB |
389 |
+ AS2 (sbci,r29,hi8(3)) CR_TAB |
390 |
+ AS2 (mov,%C0,__tmp_reg__)) |
391 |
+ : (AS2 (ld ,%A0,%1) CR_TAB |
392 |
+ AS2 (ldd,%B0,%1+1) CR_TAB |
393 |
+ AS2 (ldd,__tmp_reg__,%1+2) CR_TAB |
394 |
+ AS2 (ldd,%D0,%1+3) CR_TAB |
395 |
+ AS2 (mov,%C0,__tmp_reg__)); |
396 |
+ if(reg_base == REG_Z) |
397 |
+ return *l=5, AVR_TINY ? (AS2 (ld ,%A0,Z+) CR_TAB |
398 |
+ AS2 (ld,%B0,Z+) CR_TAB |
399 |
+ AS2 (ld,__tmp_reg__,Z+) CR_TAB |
400 |
+ AS2 (ld,%D0,Z) CR_TAB |
401 |
+ AS2 (subi,r30,lo8(3)) CR_TAB |
402 |
+ AS2 (sbci,r31,hi8(3)) CR_TAB |
403 |
+ AS2 (mov,%C0,__tmp_reg__)) |
404 |
+ : (AS2 (ld ,%A0,%1) CR_TAB |
405 |
AS2 (ldd,%B0,%1+1) CR_TAB |
406 |
AS2 (ldd,__tmp_reg__,%1+2) CR_TAB |
407 |
AS2 (ldd,%D0,%1+3) CR_TAB |
408 |
AS2 (mov,%C0,__tmp_reg__)); |
409 |
+ } |
410 |
else |
411 |
- return *l=4, (AS2 (ld ,%A0,%1) CR_TAB |
412 |
+ { |
413 |
+ if(reg_base == REG_Y) |
414 |
+ return *l=4, AVR_TINY ? (AS2 (ld ,%A0,Y+) CR_TAB |
415 |
+ AS2 (ld,%B0,Y+) CR_TAB |
416 |
+ AS2 (ld,%C0,Y+) CR_TAB |
417 |
+ AS2 (ld,%D0,Y) CR_TAB |
418 |
+ AS2 (subi,r28,lo8(3)) CR_TAB |
419 |
+ AS2 (sbci,r29,hi8(3))) |
420 |
+ : (AS2 (ld ,%A0,%1) CR_TAB |
421 |
AS2 (ldd,%B0,%1+1) CR_TAB |
422 |
AS2 (ldd,%C0,%1+2) CR_TAB |
423 |
AS2 (ldd,%D0,%1+3)); |
424 |
+ if(reg_base == REG_Z) |
425 |
+ return *l=4, AVR_TINY ? (AS2 (ld ,%A0,Z+) CR_TAB |
426 |
+ AS2 (ld,%B0,Z+) CR_TAB |
427 |
+ AS2 (ld,%C0,Z+) CR_TAB |
428 |
+ AS2 (ld,%D0,Z) CR_TAB |
429 |
+ AS2 (subi,r30,lo8(3)) CR_TAB |
430 |
+ AS2 (sbci,r31,hi8(3))) |
431 |
+ : (AS2 (ld ,%A0,%1) CR_TAB |
432 |
+ AS2 (ldd,%B0,%1+1) CR_TAB |
433 |
+ AS2 (ldd,%C0,%1+2) CR_TAB |
434 |
+ AS2 (ldd,%D0,%1+3)); |
435 |
+ } |
436 |
} |
437 |
} |
438 |
else if (GET_CODE (base) == PLUS) /* (R + i) */ |
439 |
@@ -2268,14 +2499,36 @@ out_movsi_r_mr (rtx insn, rtx op[], int |
440 |
fatal_insn ("incorrect insn:",insn); |
441 |
|
442 |
if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))) |
443 |
- return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB |
444 |
+ return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-60))) CR_TAB |
445 |
+ AS2 (sbci,r29,hi8(-(%o1-60))) CR_TAB |
446 |
+ AS2 (subi,r28,lo8(-60)) CR_TAB |
447 |
+ AS2 (sbci,r29,hi8(-60)) CR_TAB |
448 |
+ AS2 (ld,%A0,Y+) CR_TAB |
449 |
+ AS2 (ld,%B0,Y+) CR_TAB |
450 |
+ AS2 (ld,%C0,Y+) CR_TAB |
451 |
+ AS2 (ld,%D0,Y) CR_TAB |
452 |
+ AS2 (subi,r28,lo8(63)) CR_TAB |
453 |
+ AS2 (sbci,r29,hi8(63)) CR_TAB |
454 |
+ AS2 (subi,r28,lo8(%o1-60)) CR_TAB |
455 |
+ AS2 (sbci,r29,hi8(%o1-60))) |
456 |
+ : (AS2 (adiw,r28,%o1-60) CR_TAB |
457 |
AS2 (ldd,%A0,Y+60) CR_TAB |
458 |
AS2 (ldd,%B0,Y+61) CR_TAB |
459 |
AS2 (ldd,%C0,Y+62) CR_TAB |
460 |
AS2 (ldd,%D0,Y+63) CR_TAB |
461 |
AS2 (sbiw,r28,%o1-60)); |
462 |
|
463 |
- return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB |
464 |
+ return *l = 8, AVR_TINY ? (AS2 (subi,r28,lo8(-%o1)) CR_TAB |
465 |
+ AS2 (sbci,r29,hi8(-%o1)) CR_TAB |
466 |
+ AS2 (ld,%A0,Y+) CR_TAB |
467 |
+ AS2 (ld,%B0,Y+) CR_TAB |
468 |
+ AS2 (ld,%C0,Y+) CR_TAB |
469 |
+ AS2 (ld,%D0,Y) CR_TAB |
470 |
+ AS2 (subi,r28,lo8(3)) CR_TAB |
471 |
+ AS2 (sbci,r29,hi8(3)) CR_TAB |
472 |
+ AS2 (subi,r28,lo8(%o1)) CR_TAB |
473 |
+ AS2 (sbci,r29,hi8(%o1))) |
474 |
+ : (AS2 (subi,r28,lo8(-%o1)) CR_TAB |
475 |
AS2 (sbci,r29,hi8(-%o1)) CR_TAB |
476 |
AS2 (ld,%A0,Y) CR_TAB |
477 |
AS2 (ldd,%B0,Y+1) CR_TAB |
478 |
@@ -2293,7 +2546,16 @@ out_movsi_r_mr (rtx insn, rtx op[], int |
479 |
{ |
480 |
*l = 7; |
481 |
/* "ld r26,-X" is undefined */ |
482 |
- return (AS2 (adiw,r26,%o1+3) CR_TAB |
483 |
+ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1+3))) CR_TAB |
484 |
+ AS2 (sbci,r27,hi8(-(%o1+3))) CR_TAB |
485 |
+ AS2 (ld,r29,X) CR_TAB |
486 |
+ AS2 (ld,r28,-X) CR_TAB |
487 |
+ AS2 (ld,__tmp_reg__,-X) CR_TAB |
488 |
+ AS2 (subi,r26,lo8(1)) CR_TAB |
489 |
+ AS2 (sbci,r27,hi8(1)) CR_TAB |
490 |
+ AS2 (ld,r26,X) CR_TAB |
491 |
+ AS2 (mov,r27,__tmp_reg__)) |
492 |
+ : (AS2 (adiw,r26,%o1+3) CR_TAB |
493 |
AS2 (ld,r29,X) CR_TAB |
494 |
AS2 (ld,r28,-X) CR_TAB |
495 |
AS2 (ld,__tmp_reg__,-X) CR_TAB |
496 |
@@ -2303,14 +2565,29 @@ out_movsi_r_mr (rtx insn, rtx op[], int |
497 |
} |
498 |
*l = 6; |
499 |
if (reg_dest == REG_X - 2) |
500 |
- return (AS2 (adiw,r26,%o1) CR_TAB |
501 |
+ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB |
502 |
+ AS2 (sbci,r27,hi8(-(%o1))) CR_TAB |
503 |
+ AS2 (ld,r24,X+) CR_TAB |
504 |
+ AS2 (ld,r25,X+) CR_TAB |
505 |
+ AS2 (ld,__tmp_reg__,X+) CR_TAB |
506 |
+ AS2 (ld,r27,X) CR_TAB |
507 |
+ AS2 (mov,r26,__tmp_reg__)) |
508 |
+ : (AS2 (adiw,r26,%o1) CR_TAB |
509 |
AS2 (ld,r24,X+) CR_TAB |
510 |
AS2 (ld,r25,X+) CR_TAB |
511 |
AS2 (ld,__tmp_reg__,X+) CR_TAB |
512 |
AS2 (ld,r27,X) CR_TAB |
513 |
AS2 (mov,r26,__tmp_reg__)); |
514 |
|
515 |
- return (AS2 (adiw,r26,%o1) CR_TAB |
516 |
+ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB |
517 |
+ AS2 (sbci,r27,hi8(-(%o1))) CR_TAB |
518 |
+ AS2 (ld,%A0,X+) CR_TAB |
519 |
+ AS2 (ld,%B0,X+) CR_TAB |
520 |
+ AS2 (ld,%C0,X+) CR_TAB |
521 |
+ AS2 (ld,%D0,X) CR_TAB |
522 |
+ AS2 (subi,r26,lo8(%o1+3)) CR_TAB |
523 |
+ AS2 (sbci,r27,hi8(%o1+3))) |
524 |
+ : (AS2 (adiw,r26,%o1) CR_TAB |
525 |
AS2 (ld,%A0,X+) CR_TAB |
526 |
AS2 (ld,%B0,X+) CR_TAB |
527 |
AS2 (ld,%C0,X+) CR_TAB |
528 |
@@ -2318,18 +2595,99 @@ out_movsi_r_mr (rtx insn, rtx op[], int |
529 |
AS2 (sbiw,r26,%o1+3)); |
530 |
} |
531 |
if (reg_dest == reg_base) |
532 |
- return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB |
533 |
+ { |
534 |
+ op[2] = XEXP(base, 0); |
535 |
+ |
536 |
+ if(REGNO(op[2]) == REG_Y) |
537 |
+ return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1+4))) CR_TAB |
538 |
+ AS2 (sbci,%B2,hi8(-(%o1+4))) CR_TAB |
539 |
+ AS2 (ld,%D0,-Y) CR_TAB |
540 |
+ AS2 (ld,%C0,-Y) CR_TAB |
541 |
+ AS2 (ld,__tmp_reg__,-Y) CR_TAB |
542 |
+ AS2 (ld,%A0,-Y) CR_TAB |
543 |
+ AS2 (subi,%A2,lo8(%o1)) CR_TAB |
544 |
+ AS2 (sbci,%B2,hi8(%o1)) CR_TAB |
545 |
+ AS2 (mov,%B0,__tmp_reg__)) |
546 |
+ : (AS2 (ldd,%D0,%D1) CR_TAB |
547 |
+ AS2 (ldd,%C0,%C1) CR_TAB |
548 |
+ AS2 (ldd,__tmp_reg__,%B1) CR_TAB |
549 |
+ AS2 (ldd,%A0,%A1) CR_TAB |
550 |
+ AS2 (mov,%B0,__tmp_reg__)); |
551 |
+ if(REGNO(op[2]) == REG_Z) |
552 |
+ return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1+4))) CR_TAB |
553 |
+ AS2 (sbci,%B2,hi8(-(%o1+4))) CR_TAB |
554 |
+ AS2 (ld,%D0,-Z) CR_TAB |
555 |
+ AS2 (ld,%C0,-Z) CR_TAB |
556 |
+ AS2 (ld,__tmp_reg__,-Z) CR_TAB |
557 |
+ AS2 (ld,%A0,-Z) CR_TAB |
558 |
+ AS2 (subi,%A2,lo8(%o1)) CR_TAB |
559 |
+ AS2 (sbci,%B2,hi8(%o1)) CR_TAB |
560 |
+ AS2 (mov,%B0,__tmp_reg__)) |
561 |
+ : (AS2 (ldd,%D0,%D1) CR_TAB |
562 |
AS2 (ldd,%C0,%C1) CR_TAB |
563 |
AS2 (ldd,__tmp_reg__,%B1) CR_TAB |
564 |
AS2 (ldd,%A0,%A1) CR_TAB |
565 |
AS2 (mov,%B0,__tmp_reg__)); |
566 |
+ } |
567 |
else if (reg_dest == reg_base - 2) |
568 |
- return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB |
569 |
+ { |
570 |
+ op[2] = XEXP(base, 0); |
571 |
+ |
572 |
+ if(REGNO(op[2]) == REG_Y) |
573 |
+ return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB |
574 |
+ AS2 (subi,%B2,hi8(-(%o1))) CR_TAB |
575 |
+ AS2 (ld,%A0,Y+) CR_TAB |
576 |
+ AS2 (ld,%B0,Y+) CR_TAB |
577 |
+ AS2 (ld,__tmp_reg__,Y+) CR_TAB |
578 |
+ AS2 (ld,%D0,Y) CR_TAB |
579 |
+ AS2 (subi,%A2,lo8(%o1+3)) CR_TAB |
580 |
+ AS2 (sbci,%B2,hi8(%o1+3)) CR_TAB |
581 |
+ AS2 (mov,%C0,__tmp_reg__)) |
582 |
+ : (AS2 (ldd,%A0,%A1) CR_TAB |
583 |
AS2 (ldd,%B0,%B1) CR_TAB |
584 |
AS2 (ldd,__tmp_reg__,%C1) CR_TAB |
585 |
AS2 (ldd,%D0,%D1) CR_TAB |
586 |
AS2 (mov,%C0,__tmp_reg__)); |
587 |
- return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB |
588 |
+ if(REGNO(op[2]) == REG_Z) |
589 |
+ return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB |
590 |
+ AS2 (subi,%B2,hi8(-(%o1))) CR_TAB |
591 |
+ AS2 (ld,%A0,Z+) CR_TAB |
592 |
+ AS2 (ld,%B0,Z+) CR_TAB |
593 |
+ AS2 (ld,__tmp_reg__,Z+) CR_TAB |
594 |
+ AS2 (ld,%D0,Z) CR_TAB |
595 |
+ AS2 (subi,%A2,lo8(%o1+3)) CR_TAB |
596 |
+ AS2 (sbci,%B2,hi8(%o1+3)) CR_TAB |
597 |
+ AS2 (mov,%C0,__tmp_reg__)) |
598 |
+ : (AS2 (ldd,%A0,%A1) CR_TAB |
599 |
+ AS2 (ldd,%B0,%B1) CR_TAB |
600 |
+ AS2 (ldd,__tmp_reg__,%C1) CR_TAB |
601 |
+ AS2 (ldd,%D0,%D1) CR_TAB |
602 |
+ AS2 (mov,%C0,__tmp_reg__)); |
603 |
+ } |
604 |
+ op[2] = XEXP(base, 0); |
605 |
+ if(REGNO(op[2]) == REG_Y) |
606 |
+ return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB |
607 |
+ AS2 (subi,%B2,hi8(-(%o1))) CR_TAB |
608 |
+ AS2 (ld,%A0,Y+) CR_TAB |
609 |
+ AS2 (ld,%B0,Y+) CR_TAB |
610 |
+ AS2 (ld,%C0,Y+) CR_TAB |
611 |
+ AS2 (ld,%D0,Y) CR_TAB |
612 |
+ AS2 (subi,%A2,lo8(%o1+3)) CR_TAB |
613 |
+ AS2 (sbci,%B2,hi8(%o1+3))) |
614 |
+ : (AS2 (ldd,%A0,%A1) CR_TAB |
615 |
+ AS2 (ldd,%B0,%B1) CR_TAB |
616 |
+ AS2 (ldd,%C0,%C1) CR_TAB |
617 |
+ AS2 (ldd,%D0,%D1)); |
618 |
+ if(REGNO(op[2]) == REG_Z) |
619 |
+ return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB |
620 |
+ AS2 (subi,%B2,hi8(-(%o1))) CR_TAB |
621 |
+ AS2 (ld,%A0,Z+) CR_TAB |
622 |
+ AS2 (ld,%B0,Z+) CR_TAB |
623 |
+ AS2 (ld,%C0,Z+) CR_TAB |
624 |
+ AS2 (ld,%D0,Z) CR_TAB |
625 |
+ AS2 (subi,%A2,lo8(%o1+3)) CR_TAB |
626 |
+ AS2 (sbci,%B2,hi8(%o1+3))) |
627 |
+ : (AS2 (ldd,%A0,%A1) CR_TAB |
628 |
AS2 (ldd,%B0,%B1) CR_TAB |
629 |
AS2 (ldd,%C0,%C1) CR_TAB |
630 |
AS2 (ldd,%D0,%D1)); |
631 |
@@ -2380,14 +2738,30 @@ out_movsi_mr_r (rtx insn, rtx op[], int |
632 |
{ |
633 |
/* "st X+,r26" is undefined */ |
634 |
if (reg_unused_after (insn, base)) |
635 |
- return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB |
636 |
+ return *l=6, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB |
637 |
+ AS2 (st,X,r26) CR_TAB |
638 |
+ AS2 (subi,r26,lo8(-1)) CR_TAB |
639 |
+ AS2 (sbci,r27,hi8(-1)) CR_TAB |
640 |
+ AS2 (st,X+,__tmp_reg__) CR_TAB |
641 |
+ AS2 (st,X+,r28) CR_TAB |
642 |
+ AS2 (st,X,r29)) |
643 |
+ : (AS2 (mov,__tmp_reg__,r27) CR_TAB |
644 |
AS2 (st,X,r26) CR_TAB |
645 |
AS2 (adiw,r26,1) CR_TAB |
646 |
AS2 (st,X+,__tmp_reg__) CR_TAB |
647 |
AS2 (st,X+,r28) CR_TAB |
648 |
AS2 (st,X,r29)); |
649 |
else |
650 |
- return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB |
651 |
+ return *l=7, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB |
652 |
+ AS2 (st,X,r26) CR_TAB |
653 |
+ AS2 (subi,r26,lo8(-1)) CR_TAB |
654 |
+ AS2 (sbci,r27,hi8(-1)) CR_TAB |
655 |
+ AS2 (st,X+,__tmp_reg__) CR_TAB |
656 |
+ AS2 (st,X+,r28) CR_TAB |
657 |
+ AS2 (st,X,r29) CR_TAB |
658 |
+ AS2 (subi,r26,lo8(3)) CR_TAB |
659 |
+ AS2 (sbci,r27,hi8(3))) |
660 |
+ : (AS2 (mov,__tmp_reg__,r27) CR_TAB |
661 |
AS2 (st,X,r26) CR_TAB |
662 |
AS2 (adiw,r26,1) CR_TAB |
663 |
AS2 (st,X+,__tmp_reg__) CR_TAB |
664 |
@@ -2406,7 +2780,16 @@ out_movsi_mr_r (rtx insn, rtx op[], int |
665 |
AS2 (st,%0,__tmp_reg__) CR_TAB |
666 |
AS1 (clr,__zero_reg__)); |
667 |
else |
668 |
- return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB |
669 |
+ return *l=8, AVR_TINY ? (AS2 (mov,__zero_reg__,%C1) CR_TAB |
670 |
+ AS2 (mov,__tmp_reg__,%D1) CR_TAB |
671 |
+ AS2 (st,%0+,%A1) CR_TAB |
672 |
+ AS2 (st,%0+,%B1) CR_TAB |
673 |
+ AS2 (st,%0+,__zero_reg__) CR_TAB |
674 |
+ AS2 (st,%0,__tmp_reg__) CR_TAB |
675 |
+ AS1 (clr,__zero_reg__) CR_TAB |
676 |
+ AS2 (subi,r26,lo8(3)) CR_TAB |
677 |
+ AS2 (sbci,r27,hi8(3))) |
678 |
+ : (AS2 (mov,__zero_reg__,%C1) CR_TAB |
679 |
AS2 (mov,__tmp_reg__,%D1) CR_TAB |
680 |
AS2 (st,%0+,%A1) CR_TAB |
681 |
AS2 (st,%0+,%B1) CR_TAB |
682 |
@@ -2415,18 +2798,44 @@ out_movsi_mr_r (rtx insn, rtx op[], int |
683 |
AS1 (clr,__zero_reg__) CR_TAB |
684 |
AS2 (sbiw,r26,3)); |
685 |
} |
686 |
- return *l=5, (AS2 (st,%0+,%A1) CR_TAB |
687 |
+ return *l=5, AVR_TINY ? (AS2 (st,%0+,%A1) CR_TAB |
688 |
+ AS2 (st,%0+,%B1) CR_TAB |
689 |
+ AS2 (st,%0+,%C1) CR_TAB |
690 |
+ AS2 (st,%0,%D1) CR_TAB |
691 |
+ AS2 (subi,r26,lo8(3)) CR_TAB |
692 |
+ AS2 (sbci,r27,hi8(3))) |
693 |
+ : (AS2 (st,%0+,%A1) CR_TAB |
694 |
AS2 (st,%0+,%B1) CR_TAB |
695 |
AS2 (st,%0+,%C1) CR_TAB |
696 |
AS2 (st,%0,%D1) CR_TAB |
697 |
AS2 (sbiw,r26,3)); |
698 |
} |
699 |
else |
700 |
- return *l=4, (AS2 (st,%0,%A1) CR_TAB |
701 |
+ { |
702 |
+ if(reg_base == REG_Y) |
703 |
+ return *l=4, AVR_TINY ? (AS2 (st,Y+,%A1) CR_TAB |
704 |
+ AS2 (st,Y+,%B1) CR_TAB |
705 |
+ AS2 (st,Y+,%C1) CR_TAB |
706 |
+ AS2 (st,Y,%D1) CR_TAB |
707 |
+ AS2 (subi,r28,lo8(3)) CR_TAB |
708 |
+ AS2 (sbci,r29,lo8(3))) |
709 |
+ : (AS2 (st,%0,%A1) CR_TAB |
710 |
+ AS2 (std,%0+1,%B1) CR_TAB |
711 |
+ AS2 (std,%0+2,%C1) CR_TAB |
712 |
+ AS2 (std,%0+3,%D1)); |
713 |
+ if(reg_base == REG_Z) |
714 |
+ return *l=4, AVR_TINY ? (AS2 (st,Z+,%A1) CR_TAB |
715 |
+ AS2 (st,Z+,%B1) CR_TAB |
716 |
+ AS2 (st,Z+,%C1) CR_TAB |
717 |
+ AS2 (st,Z,%D1) CR_TAB |
718 |
+ AS2 (subi,r30,lo8(3)) CR_TAB |
719 |
+ AS2 (sbci,r31,lo8(3))) |
720 |
+ : (AS2 (st,%0,%A1) CR_TAB |
721 |
AS2 (std,%0+1,%B1) CR_TAB |
722 |
AS2 (std,%0+2,%C1) CR_TAB |
723 |
AS2 (std,%0+3,%D1)); |
724 |
} |
725 |
+ } |
726 |
else if (GET_CODE (base) == PLUS) /* (R + i) */ |
727 |
{ |
728 |
int disp = INTVAL (XEXP (base, 1)); |
729 |
@@ -2437,14 +2846,35 @@ out_movsi_mr_r (rtx insn, rtx op[], int |
730 |
fatal_insn ("incorrect insn:",insn); |
731 |
|
732 |
if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))) |
733 |
- return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB |
734 |
+ return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-60))) CR_TAB |
735 |
+ AS2 (sbci,r29,hi8(-(%o0-60))) CR_TAB |
736 |
+ AS2 (subi,r28,lo8(-60)) CR_TAB |
737 |
+ AS2 (sbci,r29,lo8(-60)) CR_TAB |
738 |
+ AS2 (st,Y+,%A1) CR_TAB |
739 |
+ AS2 (st,Y+,%B1) CR_TAB |
740 |
+ AS2 (st,Y+,%C1) CR_TAB |
741 |
+ AS2 (st,Y,%D1) CR_TAB |
742 |
+ AS2 (subi,r28,lo8(63)) CR_TAB |
743 |
+ AS2 (sbci,r29,lo8(63)) CR_TAB |
744 |
+ AS2 (subi,r28,lo8(%o0-60)) CR_TAB |
745 |
+ AS2 (sbci,r29,hi8(%o0-60))) |
746 |
+ : (AS2 (adiw,r28,%o0-60) CR_TAB |
747 |
AS2 (std,Y+60,%A1) CR_TAB |
748 |
AS2 (std,Y+61,%B1) CR_TAB |
749 |
AS2 (std,Y+62,%C1) CR_TAB |
750 |
AS2 (std,Y+63,%D1) CR_TAB |
751 |
AS2 (sbiw,r28,%o0-60)); |
752 |
- |
753 |
- return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB |
754 |
+ return *l = 8, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB |
755 |
+ AS2 (sbci,r29,hi8(-%o0)) CR_TAB |
756 |
+ AS2 (st,Y+,%A1) CR_TAB |
757 |
+ AS2 (st,Y+,%B1) CR_TAB |
758 |
+ AS2 (st,Y+,%C1) CR_TAB |
759 |
+ AS2 (st,Y,%D1) CR_TAB |
760 |
+ AS2 (subi,r28,lo8(3)) CR_TAB |
761 |
+ AS2 (sbci,r29,lo8(3)) CR_TAB |
762 |
+ AS2 (subi,r28,lo8(%o0)) CR_TAB |
763 |
+ AS2 (sbci,r29,hi8(%o0))) |
764 |
+ : (AS2 (subi,r28,lo8(-%o0)) CR_TAB |
765 |
AS2 (sbci,r29,hi8(-%o0)) CR_TAB |
766 |
AS2 (st,Y,%A1) CR_TAB |
767 |
AS2 (std,Y+1,%B1) CR_TAB |
768 |
@@ -2459,7 +2889,18 @@ out_movsi_mr_r (rtx insn, rtx op[], int |
769 |
if (reg_src == REG_X) |
770 |
{ |
771 |
*l = 9; |
772 |
- return (AS2 (mov,__tmp_reg__,r26) CR_TAB |
773 |
+ return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB |
774 |
+ AS2 (mov,__zero_reg__,r27) CR_TAB |
775 |
+ AS2 (subi,r26,lo8(-(%o0))) CR_TAB |
776 |
+ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB |
777 |
+ AS2 (st,X+,__tmp_reg__) CR_TAB |
778 |
+ AS2 (st,X+,__zero_reg__) CR_TAB |
779 |
+ AS2 (st,X+,r28) CR_TAB |
780 |
+ AS2 (st,X,r29) CR_TAB |
781 |
+ AS1 (clr,__zero_reg__) CR_TAB |
782 |
+ AS2 (subi,r26,lo8(%o0+3)) CR_TAB |
783 |
+ AS2 (sbci,r27,hi8(%o0+3))) |
784 |
+ : (AS2 (mov,__tmp_reg__,r26) CR_TAB |
785 |
AS2 (mov,__zero_reg__,r27) CR_TAB |
786 |
AS2 (adiw,r26,%o0) CR_TAB |
787 |
AS2 (st,X+,__tmp_reg__) CR_TAB |
788 |
@@ -2472,7 +2913,18 @@ out_movsi_mr_r (rtx insn, rtx op[], int |
789 |
else if (reg_src == REG_X - 2) |
790 |
{ |
791 |
*l = 9; |
792 |
- return (AS2 (mov,__tmp_reg__,r26) CR_TAB |
793 |
+ return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB |
794 |
+ AS2 (mov,__zero_reg__,r27) CR_TAB |
795 |
+ AS2 (subi,r26,lo8(-(%o0))) CR_TAB |
796 |
+ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB |
797 |
+ AS2 (st,X+,r24) CR_TAB |
798 |
+ AS2 (st,X+,r25) CR_TAB |
799 |
+ AS2 (st,X+,__tmp_reg__) CR_TAB |
800 |
+ AS2 (st,X,__zero_reg__) CR_TAB |
801 |
+ AS1 (clr,__zero_reg__) CR_TAB |
802 |
+ AS2 (subi,r26,lo8(%o0+3)) CR_TAB |
803 |
+ AS2 (sbci,r27,hi8(%o0+3))) |
804 |
+ : (AS2 (mov,__tmp_reg__,r26) CR_TAB |
805 |
AS2 (mov,__zero_reg__,r27) CR_TAB |
806 |
AS2 (adiw,r26,%o0) CR_TAB |
807 |
AS2 (st,X+,r24) CR_TAB |
808 |
@@ -2483,14 +2935,46 @@ out_movsi_mr_r (rtx insn, rtx op[], int |
809 |
AS2 (sbiw,r26,%o0+3)); |
810 |
} |
811 |
*l = 6; |
812 |
- return (AS2 (adiw,r26,%o0) CR_TAB |
813 |
+ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB |
814 |
+ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB |
815 |
+ AS2 (st,X+,%A1) CR_TAB |
816 |
+ AS2 (st,X+,%B1) CR_TAB |
817 |
+ AS2 (st,X+,%C1) CR_TAB |
818 |
+ AS2 (st,X,%D1) CR_TAB |
819 |
+ AS2 (subi,r26,lo8(%o0+3)) CR_TAB |
820 |
+ AS2 (sbci,r27,hi8(%o0+3))) |
821 |
+ : (AS2 (adiw,r26,%o0) CR_TAB |
822 |
AS2 (st,X+,%A1) CR_TAB |
823 |
AS2 (st,X+,%B1) CR_TAB |
824 |
AS2 (st,X+,%C1) CR_TAB |
825 |
AS2 (st,X,%D1) CR_TAB |
826 |
AS2 (sbiw,r26,%o0+3)); |
827 |
} |
828 |
- return *l=4, (AS2 (std,%A0,%A1) CR_TAB |
829 |
+ op[2] = XEXP(base, 0); |
830 |
+ if(REGNO(op[2]) == REG_Y) |
831 |
+ return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB |
832 |
+ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB |
833 |
+ AS2 (st,Y+,%A1) CR_TAB |
834 |
+ AS2 (st,Y+,%B1) CR_TAB |
835 |
+ AS2 (st,Y+,%C1) CR_TAB |
836 |
+ AS2 (st,Y,%D1) CR_TAB |
837 |
+ AS2 (subi,%A2,lo8(%o0+3)) CR_TAB |
838 |
+ AS2 (sbci,%B2,hi8(%o0+3))) |
839 |
+ : (AS2 (std,%A0,%A1) CR_TAB |
840 |
+ AS2 (std,%B0,%B1) CR_TAB |
841 |
+ AS2 (std,%C0,%C1) CR_TAB |
842 |
+ AS2 (std,%D0,%D1)); |
843 |
+ |
844 |
+ if(REGNO(op[2]) == REG_Z) |
845 |
+ return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB |
846 |
+ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB |
847 |
+ AS2 (st,Z+,%A1) CR_TAB |
848 |
+ AS2 (st,Z+,%B1) CR_TAB |
849 |
+ AS2 (st,Z+,%C1) CR_TAB |
850 |
+ AS2 (st,Z,%D1) CR_TAB |
851 |
+ AS2 (subi,%A2,lo8(%o0+3)) CR_TAB |
852 |
+ AS2 (sbci,%B2,hi8(%o0+3))) |
853 |
+ : (AS2 (std,%A0,%A1) CR_TAB |
854 |
AS2 (std,%B0,%B1) CR_TAB |
855 |
AS2 (std,%C0,%C1) CR_TAB |
856 |
AS2 (std,%D0,%D1)); |
857 |
@@ -2707,7 +3191,16 @@ out_movqi_mr_r (rtx insn, rtx op[], int |
858 |
fatal_insn ("incorrect insn:",insn); |
859 |
|
860 |
if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))) |
861 |
- return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB |
862 |
+ return *l = 3, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-63))) CR_TAB |
863 |
+ AS2 (sbci,r29,hi8(-(%o0-63))) CR_TAB |
864 |
+ AS2 (subi,r28,lo8(-63)) CR_TAB |
865 |
+ AS2 (sbci,r29,hi8(-63)) CR_TAB |
866 |
+ AS2 (st,Y,%1) CR_TAB |
867 |
+ AS2 (subi,r28,lo8(63)) CR_TAB |
868 |
+ AS2 (sbci,r29,hi8(63)) CR_TAB |
869 |
+ AS2 (subi,r28,lo8(%o0-63)) CR_TAB |
870 |
+ AS2 (sbci,r29,hi8(%o0-63))) |
871 |
+ : (AS2 (adiw,r28,%o0-63) CR_TAB |
872 |
AS2 (std,Y+63,%1) CR_TAB |
873 |
AS2 (sbiw,r28,%o0-63)); |
874 |
|
875 |
@@ -2722,11 +3215,21 @@ out_movqi_mr_r (rtx insn, rtx op[], int |
876 |
if (reg_overlap_mentioned_p (src, XEXP (x, 0))) |
877 |
{ |
878 |
if (reg_unused_after (insn, XEXP (x,0))) |
879 |
- return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB |
880 |
+ return *l = 3, AVR_TINY ? (AS2 (mov,__tmp_reg__,%1) CR_TAB |
881 |
+ AS2 (subi,r26,lo8(-(%o0))) CR_TAB |
882 |
+ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB |
883 |
+ AS2 (st,X,__tmp_reg__)) |
884 |
+ : (AS2 (mov,__tmp_reg__,%1) CR_TAB |
885 |
AS2 (adiw,r26,%o0) CR_TAB |
886 |
AS2 (st,X,__tmp_reg__)); |
887 |
|
888 |
- return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB |
889 |
+ return *l = 4, AVR_TINY ? (AS2 (mov,__tmp_reg__,%1) CR_TAB |
890 |
+ AS2 (subi,r26,lo8(-(%o0))) CR_TAB |
891 |
+ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB |
892 |
+ AS2 (st,X,__tmp_reg__) CR_TAB |
893 |
+ AS2 (subi,r26,lo8(%o0)) CR_TAB |
894 |
+ AS2 (sbci,r27,hi8(%o0))) |
895 |
+ : (AS2 (mov,__tmp_reg__,%1) CR_TAB |
896 |
AS2 (adiw,r26,%o0) CR_TAB |
897 |
AS2 (st,X,__tmp_reg__) CR_TAB |
898 |
AS2 (sbiw,r26,%o0)); |
899 |
@@ -2734,16 +3237,38 @@ out_movqi_mr_r (rtx insn, rtx op[], int |
900 |
else |
901 |
{ |
902 |
if (reg_unused_after (insn, XEXP (x,0))) |
903 |
- return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB |
904 |
+ return *l = 2, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB |
905 |
+ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB |
906 |
+ AS2 (st,X,%1)) |
907 |
+ : (AS2 (adiw,r26,%o0) CR_TAB |
908 |
AS2 (st,X,%1)); |
909 |
|
910 |
- return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB |
911 |
+ return *l = 3, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB |
912 |
+ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB |
913 |
+ AS2 (st,X,%1) CR_TAB |
914 |
+ AS2 (subi,r26,lo8(%o0)) CR_TAB |
915 |
+ AS2 (sbci,r27,hi8(%o0))) |
916 |
+ : (AS2 (adiw,r26,%o0) CR_TAB |
917 |
AS2 (st,X,%1) CR_TAB |
918 |
AS2 (sbiw,r26,%o0)); |
919 |
} |
920 |
} |
921 |
*l = 1; |
922 |
- return AS2 (std,%0,%1); |
923 |
+ op[2] = XEXP(x, 0); |
924 |
+ if(REGNO(op[2]) == REG_Y) |
925 |
+ return AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB |
926 |
+ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB |
927 |
+ AS2 (st,Y,%1) CR_TAB |
928 |
+ AS2 (subi,%A2,lo8(%o0)) CR_TAB |
929 |
+ AS2 (sbci,%B2,hi8(%o0))) |
930 |
+ : AS2 (std,%0,%1); |
931 |
+ if(REGNO(op[2]) == REG_Z) |
932 |
+ return AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB |
933 |
+ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB |
934 |
+ AS2 (st,Z,%1) CR_TAB |
935 |
+ AS2 (subi,%A2,lo8(%o0)) CR_TAB |
936 |
+ AS2 (sbci,%B2,hi8(%o0))) |
937 |
+ : AS2 (std,%0,%1); |
938 |
} |
939 |
*l = 1; |
940 |
return AS2 (st,%0,%1); |
941 |
@@ -2792,20 +3317,39 @@ out_movhi_mr_r (rtx insn, rtx op[], int |
942 |
{ |
943 |
/* "st X+,r26" and "st -X,r26" are undefined. */ |
944 |
if (!mem_volatile_p && reg_unused_after (insn, src)) |
945 |
- return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB |
946 |
+ return *l=4, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB |
947 |
+ AS2 (st,X,r26) CR_TAB |
948 |
+ AS2 (subi,r26,lo8(-1)) CR_TAB |
949 |
+ AS2 (sbci,r27,hi8(-1)) CR_TAB |
950 |
+ AS2 (st,X,__tmp_reg__)) |
951 |
+ : (AS2 (mov,__tmp_reg__,r27) CR_TAB |
952 |
AS2 (st,X,r26) CR_TAB |
953 |
AS2 (adiw,r26,1) CR_TAB |
954 |
AS2 (st,X,__tmp_reg__)); |
955 |
else |
956 |
{ |
957 |
if (!AVR_XMEGA) |
958 |
- return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB |
959 |
+ return *l=5, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB |
960 |
+ AS2 (subi,r26,lo8(-1)) CR_TAB |
961 |
+ AS2 (sbci,r27,hi8(-1)) CR_TAB |
962 |
+ AS2 (st,X,__tmp_reg__) CR_TAB |
963 |
+ AS2 (subi,r26,lo8(1)) CR_TAB |
964 |
+ AS2 (sbci,r27,hi8(1)) CR_TAB |
965 |
+ AS2 (st,X,r26)) |
966 |
+ : (AS2 (mov,__tmp_reg__,r27) CR_TAB |
967 |
AS2 (adiw,r26,1) CR_TAB |
968 |
AS2 (st,X,__tmp_reg__) CR_TAB |
969 |
AS2 (sbiw,r26,1) CR_TAB |
970 |
AS2 (st,X,r26)); |
971 |
else |
972 |
- return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB |
973 |
+ return *l=5, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB |
974 |
+ AS2 (st,X,r26) CR_TAB |
975 |
+ AS2 (subi,r26,lo8(-1)) CR_TAB |
976 |
+ AS2 (sbci,r27,hi8(-1)) CR_TAB |
977 |
+ AS2 (st,X,__tmp_reg__) CR_TAB |
978 |
+ AS2 (subi,r26,lo8(1)) CR_TAB |
979 |
+ AS2 (sbci,r27,hi8(1))) |
980 |
+ : (AS2 (mov,__tmp_reg__,r27) CR_TAB |
981 |
AS2 (st,X,r26) CR_TAB |
982 |
AS2 (adiw,r26,1) CR_TAB |
983 |
AS2 (st,X,__tmp_reg__) CR_TAB |
984 |
@@ -2820,11 +3364,19 @@ out_movhi_mr_r (rtx insn, rtx op[], int |
985 |
else |
986 |
{ |
987 |
if (!AVR_XMEGA) |
988 |
- return *l=3, (AS2 (adiw,r26,1) CR_TAB |
989 |
+ return *l=3, AVR_TINY ? (AS2 (subi,r26,lo8(-1)) CR_TAB |
990 |
+ AS2 (sbci,r27,hi8(-1)) CR_TAB |
991 |
+ AS2 (st,X,%B1) CR_TAB |
992 |
+ AS2 (st,-X,%A1)) |
993 |
+ : (AS2 (adiw,r26,1) CR_TAB |
994 |
AS2 (st,X,%B1) CR_TAB |
995 |
AS2 (st,-X,%A1)); |
996 |
else |
997 |
- return *l=3, (AS2 (st,X+,%A1) CR_TAB |
998 |
+ return *l=3, AVR_TINY ? (AS2 (st,X+,%A1) CR_TAB |
999 |
+ AS2 (st,X,%B1) CR_TAB |
1000 |
+ AS2 (subi,r26,lo8(1)) CR_TAB |
1001 |
+ AS2 (sbci,r27,hi8(1))) |
1002 |
+ : (AS2 (st,X+,%A1) CR_TAB |
1003 |
AS2 (st,X,%B1) CR_TAB |
1004 |
AS2 (sbiw,r26,1)); |
1005 |
} |
1006 |
@@ -2833,13 +3385,41 @@ out_movhi_mr_r (rtx insn, rtx op[], int |
1007 |
else |
1008 |
{ |
1009 |
if (!AVR_XMEGA) |
1010 |
- return *l=2, (AS2 (std,%0+1,%B1) CR_TAB |
1011 |
+ { |
1012 |
+ if(reg_base == REG_Y) |
1013 |
+ return *l=2, AVR_TINY ? (AS2 (subi,r28,lo8(-1)) CR_TAB |
1014 |
+ AS2 (sbci,r29,hi8(-1)) CR_TAB |
1015 |
+ AS2 (st,Y,%B1) CR_TAB |
1016 |
+ AS2 (st,-Y,%A1)) |
1017 |
+ : (AS2 (std,%0+1,%B1) CR_TAB |
1018 |
+ AS2 (st,%0,%A1)); |
1019 |
+ if(reg_base == REG_Z) |
1020 |
+ return *l=2, AVR_TINY ? (AS2 (subi,r30,lo8(-1)) CR_TAB |
1021 |
+ AS2 (sbci,r31,hi8(-1)) CR_TAB |
1022 |
+ AS2 (st,Z,%B1) CR_TAB |
1023 |
+ AS2 (st,-Z,%A1)) |
1024 |
+ : (AS2 (std,%0+1,%B1) CR_TAB |
1025 |
AS2 (st,%0,%A1)); |
1026 |
+ } |
1027 |
else |
1028 |
- return *l=2, (AS2 (st,%0,%A1) CR_TAB |
1029 |
+ { |
1030 |
+ if(reg_base == REG_Y) |
1031 |
+ return *l=2, AVR_TINY ? (AS2 (st,Y+,%A1) CR_TAB |
1032 |
+ AS2 (st,Y,%B1) CR_TAB |
1033 |
+ AS2 (subi,r28,lo8(1)) CR_TAB |
1034 |
+ AS2 (sbci,r29,hi8(1))) |
1035 |
+ : (AS2 (st,%0,%A1) CR_TAB |
1036 |
+ AS2 (std,%0+1,%B1)); |
1037 |
+ if(reg_base == REG_Z) |
1038 |
+ return *l=2, AVR_TINY ? (AS2 (st,Z+,%A1) CR_TAB |
1039 |
+ AS2 (st,Z,%B1) CR_TAB |
1040 |
+ AS2 (subi,r30,lo8(1)) CR_TAB |
1041 |
+ AS2 (sbci,r31,hi8(1))) |
1042 |
+ : (AS2 (st,%0,%A1) CR_TAB |
1043 |
AS2 (std,%0+1,%B1)); |
1044 |
} |
1045 |
} |
1046 |
+ } |
1047 |
else if (GET_CODE (base) == PLUS) |
1048 |
{ |
1049 |
int disp = INTVAL (XEXP (base, 1)); |
1050 |
@@ -2852,12 +3432,30 @@ out_movhi_mr_r (rtx insn, rtx op[], int |
1051 |
if (!AVR_XMEGA) |
1052 |
{ |
1053 |
if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))) |
1054 |
- return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB |
1055 |
+ return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-62))) CR_TAB |
1056 |
+ AS2 (sbci,r29,hi8(-(%o0-62))) CR_TAB |
1057 |
+ AS2 (subi,r28,lo8(-63)) CR_TAB |
1058 |
+ AS2 (sbci,r29,hi8(-63)) CR_TAB |
1059 |
+ AS2 (st,Y,%B1) CR_TAB |
1060 |
+ AS2 (st,-Y,%A1) CR_TAB |
1061 |
+ AS2 (subi,r28,lo8(62)) CR_TAB |
1062 |
+ AS2 (sbci,r29,hi8(62)) CR_TAB |
1063 |
+ AS2 (subi,r28,lo8(%o0-62)) CR_TAB |
1064 |
+ AS2 (sbci,r29,hi8(%o0-62))) |
1065 |
+ : (AS2 (adiw,r28,%o0-62) CR_TAB |
1066 |
AS2 (std,Y+63,%B1) CR_TAB |
1067 |
AS2 (std,Y+62,%A1) CR_TAB |
1068 |
AS2 (sbiw,r28,%o0-62)); |
1069 |
|
1070 |
- return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB |
1071 |
+ return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB |
1072 |
+ AS2 (sbci,r29,hi8(-%o0)) CR_TAB |
1073 |
+ AS2 (subi,r28,lo8(-1)) CR_TAB |
1074 |
+ AS2 (sbci,r29,hi8(-1)) CR_TAB |
1075 |
+ AS2 (st,Y,%B1) CR_TAB |
1076 |
+ AS2 (st,-Y,%A1) CR_TAB |
1077 |
+ AS2 (subi,r28,lo8(%o0)) CR_TAB |
1078 |
+ AS2 (sbci,r29,hi8(%o0))) |
1079 |
+ : (AS2 (subi,r28,lo8(-%o0)) CR_TAB |
1080 |
AS2 (sbci,r29,hi8(-%o0)) CR_TAB |
1081 |
AS2 (std,Y+1,%B1) CR_TAB |
1082 |
AS2 (st,Y,%A1) CR_TAB |
1083 |
@@ -2867,12 +3465,30 @@ out_movhi_mr_r (rtx insn, rtx op[], int |
1084 |
else |
1085 |
{ |
1086 |
if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))) |
1087 |
- return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB |
1088 |
+ return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-62))) CR_TAB |
1089 |
+ AS2 (sbci,r29,hi8(-(%o0-62))) CR_TAB |
1090 |
+ AS2 (subi,r28,lo8(-62)) CR_TAB |
1091 |
+ AS2 (sbci,r29,hi8(-62)) CR_TAB |
1092 |
+ AS2 (st,Y+,%A1) CR_TAB |
1093 |
+ AS2 (st,Y,%B1) CR_TAB |
1094 |
+ AS2 (subi,r28,lo8(63)) CR_TAB |
1095 |
+ AS2 (sbci,r29,hi8(63)) CR_TAB |
1096 |
+ AS2 (subi,r28,lo8(%o0-62)) CR_TAB |
1097 |
+ AS2 (sbci,r29,hi8(%o0-62))) |
1098 |
+ : (AS2 (adiw,r28,%o0-62) CR_TAB |
1099 |
AS2 (std,Y+62,%A1) CR_TAB |
1100 |
AS2 (std,Y+63,%B1) CR_TAB |
1101 |
AS2 (sbiw,r28,%o0-62)); |
1102 |
|
1103 |
- return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB |
1104 |
+ return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB |
1105 |
+ AS2 (sbci,r29,hi8(-%o0)) CR_TAB |
1106 |
+ AS2 (st,Y+,%A1) CR_TAB |
1107 |
+ AS2 (st,Y,%B1) CR_TAB |
1108 |
+ AS2 (subi,r28,lo8(1)) CR_TAB |
1109 |
+ AS2 (sbci,r29,hi8(1)) CR_TAB |
1110 |
+ AS2 (subi,r28,lo8(%o0)) CR_TAB |
1111 |
+ AS2 (sbci,r29,hi8(%o0))) |
1112 |
+ : (AS2 (subi,r28,lo8(-%o0)) CR_TAB |
1113 |
AS2 (sbci,r29,hi8(-%o0)) CR_TAB |
1114 |
AS2 (st,Y,%A1) CR_TAB |
1115 |
AS2 (std,Y+1,%B1) CR_TAB |
1116 |
@@ -2888,7 +3504,16 @@ out_movhi_mr_r (rtx insn, rtx op[], int |
1117 |
if (!AVR_XMEGA) |
1118 |
{ |
1119 |
*l = 7; |
1120 |
- return (AS2 (mov,__tmp_reg__,r26) CR_TAB |
1121 |
+ return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB |
1122 |
+ AS2 (mov,__zero_reg__,r27) CR_TAB |
1123 |
+ AS2 (subi,r26,lo8(-(%o0+1))) CR_TAB |
1124 |
+ AS2 (sbci,r27,hi8(-(%o0+1))) CR_TAB |
1125 |
+ AS2 (st,X,__zero_reg__) CR_TAB |
1126 |
+ AS2 (st,-X,__tmp_reg__) CR_TAB |
1127 |
+ AS1 (clr,__zero_reg__) CR_TAB |
1128 |
+ AS2 (subi,r26,lo8(%o0)) CR_TAB |
1129 |
+ AS2 (sbci,r27,hi8(%o0))) |
1130 |
+ : (AS2 (mov,__tmp_reg__,r26) CR_TAB |
1131 |
AS2 (mov,__zero_reg__,r27) CR_TAB |
1132 |
AS2 (adiw,r26,%o0+1) CR_TAB |
1133 |
AS2 (st,X,__zero_reg__) CR_TAB |
1134 |
@@ -2899,19 +3524,35 @@ out_movhi_mr_r (rtx insn, rtx op[], int |
1135 |
else |
1136 |
{ |
1137 |
*l = 7; |
1138 |
- return (AS2 (mov,__tmp_reg__,r26) CR_TAB |
1139 |
+ return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB |
1140 |
AS2 (mov,__zero_reg__,r27) CR_TAB |
1141 |
- AS2 (adiw,r26,%o0) CR_TAB |
1142 |
+ AS2 (subi,r26,lo8(-(%o0))) CR_TAB |
1143 |
+ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB |
1144 |
AS2 (st,X+,__tmp_reg__) CR_TAB |
1145 |
AS2 (st,X,__zero_reg__) CR_TAB |
1146 |
AS1 (clr,__zero_reg__) CR_TAB |
1147 |
- AS2 (sbiw,r26,%o0+1)); |
1148 |
+ AS2 (subi,r26,lo8(%o0+1)) CR_TAB |
1149 |
+ AS2 (sbci,r27,hi8(%o0+1))) |
1150 |
+ : (AS2 (mov,__tmp_reg__,r26) CR_TAB |
1151 |
+ AS2 (mov,__zero_reg__,r27) CR_TAB |
1152 |
+ AS2 (adiw,r26,%o0+1) CR_TAB |
1153 |
+ AS2 (st,X+,__tmp_reg__) CR_TAB |
1154 |
+ AS2 (st,X,__zero_reg__) CR_TAB |
1155 |
+ AS1 (clr,__zero_reg__) CR_TAB |
1156 |
+ AS2 (sbiw,r26,%o0)); |
1157 |
+ |
1158 |
} |
1159 |
} |
1160 |
if (!AVR_XMEGA) |
1161 |
{ |
1162 |
*l = 4; |
1163 |
- return (AS2 (adiw,r26,%o0+1) CR_TAB |
1164 |
+ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0+1))) CR_TAB |
1165 |
+ AS2 (sbci,r27,hi8(-(%o0+1))) CR_TAB |
1166 |
+ AS2 (st,X,%B1) CR_TAB |
1167 |
+ AS2 (st,-X,%A1) CR_TAB |
1168 |
+ AS2 (subi,r26,lo8(%o0)) CR_TAB |
1169 |
+ AS2 (sbci,r27,hi8(%o0))) |
1170 |
+ : (AS2 (adiw,r26,%o0+1) CR_TAB |
1171 |
AS2 (st,X,%B1) CR_TAB |
1172 |
AS2 (st,-X,%A1) CR_TAB |
1173 |
AS2 (sbiw,r26,%o0)); |
1174 |
@@ -2919,7 +3560,13 @@ out_movhi_mr_r (rtx insn, rtx op[], int |
1175 |
else |
1176 |
{ |
1177 |
*l = 4; |
1178 |
- return (AS2 (adiw,r26,%o0) CR_TAB |
1179 |
+ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB |
1180 |
+ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB |
1181 |
+ AS2 (st,X+,%A1) CR_TAB |
1182 |
+ AS2 (st,X,%B1) CR_TAB |
1183 |
+ AS2 (subi,r26,lo8(%o0)) CR_TAB |
1184 |
+ AS2 (sbci,r27,hi8(%o0))) |
1185 |
+ : (AS2 (adiw,r26,%o0) CR_TAB |
1186 |
AS2 (st,X+,%A1) CR_TAB |
1187 |
AS2 (st,X,%B1) CR_TAB |
1188 |
AS2 (sbiw,r26,%o0+1)); |
1189 |
@@ -2927,11 +3574,49 @@ out_movhi_mr_r (rtx insn, rtx op[], int |
1190 |
} |
1191 |
|
1192 |
if (!AVR_XMEGA) |
1193 |
- return *l=2, (AS2 (std,%B0,%B1) CR_TAB |
1194 |
+ { |
1195 |
+ op[2] = XEXP(base, 0); |
1196 |
+ if(REGNO(op[2]) == REG_Y) |
1197 |
+ return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0+2))) CR_TAB |
1198 |
+ AS2 (sbci,%B2,hi8(-(%o0+2))) CR_TAB |
1199 |
+ AS2 (st,-Y,%B1) CR_TAB |
1200 |
+ AS2 (st,-Y,%A1) CR_TAB |
1201 |
+ AS2 (subi,%A2,lo8(%o0)) CR_TAB |
1202 |
+ AS2 (sbci,%B2,hi8(%o0))) |
1203 |
+ : (AS2 (std,%B0,%B1) CR_TAB |
1204 |
AS2 (std,%A0,%A1)); |
1205 |
+ if(REGNO(op[2]) == REG_Z) |
1206 |
+ return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0+1))) CR_TAB |
1207 |
+ AS2 (sbci,%B2,hi8(-(%o0+1))) CR_TAB |
1208 |
+ AS2 (st,-Z,%B1) CR_TAB |
1209 |
+ AS2 (st,-Z,%A1) CR_TAB |
1210 |
+ AS2 (subi,%A2,lo8(%o0)) CR_TAB |
1211 |
+ AS2 (sbci,%B2,hi8(%o0))) |
1212 |
+ : (AS2 (std,%B0,%B1) CR_TAB |
1213 |
+ AS2 (std,%A0,%A1)); |
1214 |
+ } |
1215 |
else |
1216 |
- return *l=2, (AS2 (std,%A0,%A1) CR_TAB |
1217 |
+ { |
1218 |
+ op[2] = XEXP(base, 0); |
1219 |
+ if(REGNO(op[2]) == REG_Y) |
1220 |
+ return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB |
1221 |
+ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB |
1222 |
+ AS2 (st,Y+,%A1) CR_TAB |
1223 |
+ AS2 (st,Y,%B1) CR_TAB |
1224 |
+ AS2 (subi,%A2,lo8(%o0+1)) CR_TAB |
1225 |
+ AS2 (sbci,%B2,hi8(%o0+1))) |
1226 |
+ : (AS2 (std,%A0,%A1) CR_TAB |
1227 |
AS2 (std,%B0,%B1)); |
1228 |
+ if(REGNO(op[2]) == REG_Z) |
1229 |
+ return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB |
1230 |
+ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB |
1231 |
+ AS2 (st,Z+,%A1) CR_TAB |
1232 |
+ AS2 (st,Z,%B1) CR_TAB |
1233 |
+ AS2 (subi,%A2,lo8(%o0+1)) CR_TAB |
1234 |
+ AS2 (sbci,%B2,hi8(%o0+1))) |
1235 |
+ : (AS2 (std,%A0,%A1) CR_TAB |
1236 |
+ AS2 (std,%B0,%B1)); |
1237 |
+ } |
1238 |
} |
1239 |
else if (GET_CODE (base) == PRE_DEC) /* (--R) */ |
1240 |
{ |
1241 |
@@ -2951,15 +3636,30 @@ out_movhi_mr_r (rtx insn, rtx op[], int |
1242 |
if (REGNO (XEXP (base, 0)) == REG_X) |
1243 |
{ |
1244 |
*l = 4; |
1245 |
- return (AS2 (adiw,r26,1) CR_TAB |
1246 |
+ return AVR_TINY ? (AS2 (subi,r26,lo8(-1)) CR_TAB |
1247 |
+ AS2 (sbci,r27,hi8(-1)) CR_TAB |
1248 |
+ AS2 (st,X,%B1) CR_TAB |
1249 |
+ AS2 (st,-X,%A1) CR_TAB |
1250 |
+ AS2 (subi,r26,lo8(-2)) CR_TAB |
1251 |
+ AS2 (sbci,r27,hi8(-2))) |
1252 |
+ : (AS2 (adiw,r26,1) CR_TAB |
1253 |
AS2 (st,X,%B1) CR_TAB |
1254 |
AS2 (st,-X,%A1) CR_TAB |
1255 |
AS2 (adiw,r26,2)); |
1256 |
} |
1257 |
else |
1258 |
{ |
1259 |
+ //FIXME:check the code once again for AVR_TINY |
1260 |
*l = 3; |
1261 |
- return (AS2 (std,%p0+1,%B1) CR_TAB |
1262 |
+ return AVR_TINY ? (AS2 (subi,%A0,lo8(-1)) CR_TAB |
1263 |
+ AS2 (sbci,%B0,hi8(-1)) CR_TAB |
1264 |
+ AS2 (st,%p0,%B1) CR_TAB |
1265 |
+ AS2 (subi,%A0,lo8(1)) CR_TAB |
1266 |
+ AS2 (sbci,%B0,hi8(1)) CR_TAB |
1267 |
+ AS2 (st,%p0,%A1) CR_TAB |
1268 |
+ AS2 (subi,%A0,lo8(-3)) CR_TAB |
1269 |
+ AS2 (sbci,%B0,hi8(-3))) |
1270 |
+ : (AS2 (std,%p0+1,%B1) CR_TAB |
1271 |
AS2 (st,%p0,%A1) CR_TAB |
1272 |
AS2 (adiw,%r0,2)); |
1273 |
} |
1274 |
@@ -3049,7 +3749,9 @@ out_tsthi (rtx insn, rtx op, int *l) |
1275 |
if (test_hard_reg_class (ADDW_REGS, op)) |
1276 |
{ |
1277 |
if (l) *l = 1; |
1278 |
- return AS2 (sbiw,%0,0); |
1279 |
+ return AVR_TINY ? (AS2 (subi,%A0,lo8(0)) CR_TAB |
1280 |
+ AS2 (sbci,%B0,hi8(0))) |
1281 |
+ : AS2 (sbiw,%0,0); |
1282 |
} |
1283 |
if (l) *l = 2; |
1284 |
return (AS2 (cp,%A0,__zero_reg__) CR_TAB |
1285 |
@@ -3070,7 +3772,11 @@ out_tstsi (rtx insn, rtx op, int *l) |
1286 |
if (test_hard_reg_class (ADDW_REGS, op)) |
1287 |
{ |
1288 |
if (l) *l = 3; |
1289 |
- return (AS2 (sbiw,%A0,0) CR_TAB |
1290 |
+ return AVR_TINY ? (AS2 (subi,%A0,lo8(-(-0))) CR_TAB |
1291 |
+ AS2 (sbci,%B0,hi8(-(-0))) CR_TAB |
1292 |
+ AS2 (cpc,%C0,__zero_reg__) CR_TAB |
1293 |
+ AS2 (cpc,%D0,__zero_reg__)) |
1294 |
+ : (AS2 (sbiw,%A0,0) CR_TAB |
1295 |
AS2 (cpc,%C0,__zero_reg__) CR_TAB |
1296 |
AS2 (cpc,%D0,__zero_reg__)); |
1297 |
} |
1298 |
@@ -5392,10 +6098,12 @@ avr_file_start (void) |
1299 |
/* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/ |
1300 |
fputs ("__SREG__ = 0x3f\n" |
1301 |
"__SP_H__ = 0x3e\n" |
1302 |
- "__SP_L__ = 0x3d\n" |
1303 |
- "__CCP__ = 0x34\n", asm_out_file); |
1304 |
+ "__SP_L__ = 0x3d\n", asm_out_file); |
1305 |
+ |
1306 |
+ AVR_TINY ? fputs ("__CCP__ = 0x3c\n", asm_out_file) : fputs ("__CCP__ = 0x34\n", asm_out_file); |
1307 |
|
1308 |
- fputs ("__tmp_reg__ = 0\n" |
1309 |
+ AVR_TINY ? fputs ("__tmp_reg__ = 16\n" |
1310 |
+ "__zero_reg__ = 17\n", asm_out_file) : fputs ("__tmp_reg__ = 0\n" |
1311 |
"__zero_reg__ = 1\n", asm_out_file); |
1312 |
|
1313 |
/* FIXME: output these only if there is anything in the .data / .bss |
1314 |
diff -Naurp gcc/config/avr/avr-c.c gcc/config/avr/avr-c.c |
1315 |
--- gcc/config/avr/avr-c.c 2011-01-19 13:03:59.000000000 -0600 |
1316 |
+++ gcc/config/avr/avr-c.c 2011-01-19 13:11:23.000000000 -0600 |
1317 |
@@ -94,5 +94,9 @@ avr_cpu_cpp_builtins (struct cpp_reader |
1318 |
cpp_define (pfile, "__AVR_HAVE_RAMPD__"); |
1319 |
} |
1320 |
|
1321 |
+ if (avr_current_arch->avrtiny) |
1322 |
+ { |
1323 |
+ cpp_define (pfile, "__AVR_TINY__"); |
1324 |
+ } |
1325 |
} |
1326 |
|
1327 |
diff -Naurp gcc/config/avr/avr-devices.c gcc/config/avr/avr-devices.c |
1328 |
--- gcc/config/avr/avr-devices.c 2011-01-19 13:03:59.000000000 -0600 |
1329 |
+++ gcc/config/avr/avr-devices.c 2011-01-19 13:11:23.000000000 -0600 |
1330 |
@@ -26,24 +26,25 @@ |
1331 |
/* List of all known AVR MCU architectyres. */ |
1332 |
|
1333 |
const struct base_arch_s avr_arch_types[] = { |
1334 |
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, NULL, "avr2" }, /* unknown device specified */ |
1335 |
- { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=1", "avr1" }, |
1336 |
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=2", "avr2" }, |
1337 |
- { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=25", "avr25" }, |
1338 |
- { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=3", "avr3" }, |
1339 |
- { 0, 0, 1, 0, 1, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=31", "avr31" }, |
1340 |
- { 0, 0, 1, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=35", "avr35" }, |
1341 |
- { 0, 1, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=4", "avr4" }, |
1342 |
- { 0, 1, 1, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=5", "avr5" }, |
1343 |
- { 0, 1, 1, 1, 1, 1, 0, 0, 0, 0x0060, "__AVR_ARCH__=51", "avr51" }, |
1344 |
- { 0, 1, 1, 1, 1, 1, 1, 0, 0, 0x0060, "__AVR_ARCH__=6", "avr6" }, |
1345 |
- { 0, 1, 0, 1, 0, 0, 0, 1, 0, 0x2000, "__AVR_ARCH__=101", "avrxmega1" }, |
1346 |
- { 0, 1, 1, 1, 0, 0, 0, 1, 0, 0x2000, "__AVR_ARCH__=102", "avrxmega2" }, |
1347 |
- { 0, 1, 1, 1, 0, 0, 0, 1, 1, 0x2000, "__AVR_ARCH__=103", "avrxmega3" }, |
1348 |
- { 0, 1, 1, 1, 1, 1, 0, 1, 0, 0x2000, "__AVR_ARCH__=104", "avrxmega4" }, |
1349 |
- { 0, 1, 1, 1, 1, 1, 0, 1, 1, 0x2000, "__AVR_ARCH__=105", "avrxmega5" }, |
1350 |
- { 0, 1, 1, 1, 1, 1, 1, 1, 0, 0x2000, "__AVR_ARCH__=106", "avrxmega6" }, |
1351 |
- { 0, 1, 1, 1, 1, 1, 1, 1, 1, 0x2000, "__AVR_ARCH__=107", "avrxmega7" } |
1352 |
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, NULL, "avr2" }, /* unknown device specified */ |
1353 |
+ { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=1", "avr1" }, |
1354 |
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=2", "avr2" }, |
1355 |
+ { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=25", "avr25" }, |
1356 |
+ { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=3", "avr3" }, |
1357 |
+ { 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=31", "avr31" }, |
1358 |
+ { 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=35", "avr35" }, |
1359 |
+ { 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=4", "avr4" }, |
1360 |
+ { 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=5", "avr5" }, |
1361 |
+ { 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=51", "avr51" }, |
1362 |
+ { 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0x0060, "__AVR_ARCH__=6", "avr6" }, |
1363 |
+ { 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0x2000, "__AVR_ARCH__=101", "avrxmega1" }, |
1364 |
+ { 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0x2000, "__AVR_ARCH__=102", "avrxmega2" }, |
1365 |
+ { 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0x2000, "__AVR_ARCH__=103", "avrxmega3" }, |
1366 |
+ { 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0x2000, "__AVR_ARCH__=104", "avrxmega4" }, |
1367 |
+ { 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0x2000, "__AVR_ARCH__=105", "avrxmega5" }, |
1368 |
+ { 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0x2000, "__AVR_ARCH__=106", "avrxmega6" }, |
1369 |
+ { 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0x2000, "__AVR_ARCH__=107", "avrxmega7" }, |
1370 |
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0x0040, "__AVR_ARCH__=201", "avrtiny10" } |
1371 |
}; |
1372 |
|
1373 |
/* List of all known AVR MCU types - if updated, it has to be kept |
1374 |
@@ -229,6 +230,14 @@ const struct mcu_type_s avr_mcu_types[] |
1375 |
{ "avrxmega7", ARCH_AVRXMEGA7, NULL, 0, 0x2000, "x128a1" }, |
1376 |
{ "atxmega128a1", ARCH_AVRXMEGA7, "__AVR_ATxmega128A1__", 0, 0x2000, "x128a1" }, |
1377 |
{ "atxmega128a1u", ARCH_AVRXMEGA7, "__AVR_ATxmega128A1U__", 0, 0x2000, "x128a1u" }, |
1378 |
+ /* tiny10 family */ |
1379 |
+ { "avrtiny10", ARCH_AVRTINY10, NULL, 0, 0x0040, "tn10" }, |
1380 |
+ { "attiny4", ARCH_AVRTINY10, "__AVR_ATtiny4__", 0, 0x0040, "tn4" }, |
1381 |
+ { "attiny5", ARCH_AVRTINY10, "__AVR_ATtiny5__", 0, 0x0040, "tn5" }, |
1382 |
+ { "attiny9", ARCH_AVRTINY10, "__AVR_ATtiny9__", 0, 0x0040, "tn9" }, |
1383 |
+ { "attiny10", ARCH_AVRTINY10, "__AVR_ATtiny10__", 0, 0x0040, "tn10" }, |
1384 |
+ { "attiny20", ARCH_AVRTINY10, "__AVR_ATtiny20__", 0, 0x0040, "tn20" }, |
1385 |
+ { "attiny40", ARCH_AVRTINY10, "__AVR_ATtiny40__", 0, 0x0040, "tn40" }, |
1386 |
/* Assembler only. */ |
1387 |
{ "avr1", ARCH_AVR1, NULL, 0, 0x0060, "s1200" }, |
1388 |
{ "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__", 0, 0x0060, "s1200" }, |
1389 |
diff -Naurp gcc/config/avr/avr.h gcc/config/avr/avr.h |
1390 |
--- gcc/config/avr/avr.h 2011-01-19 13:03:59.000000000 -0600 |
1391 |
+++ gcc/config/avr/avr.h 2011-01-19 13:11:23.000000000 -0600 |
1392 |
@@ -51,6 +51,9 @@ struct base_arch_s { |
1393 |
/* Core have RAMPX, RAMPY and RAMPD registers. */ |
1394 |
int have_rampx_y_d; |
1395 |
|
1396 |
+ /* Core is in avrtiny10 family. */ |
1397 |
+ int avrtiny; |
1398 |
+ |
1399 |
/* Default start of data section address for architecture. */ |
1400 |
int default_data_section_start; |
1401 |
|
1402 |
@@ -82,7 +85,8 @@ enum avr_arch |
1403 |
ARCH_AVRXMEGA4, |
1404 |
ARCH_AVRXMEGA5, |
1405 |
ARCH_AVRXMEGA6, |
1406 |
- ARCH_AVRXMEGA7 |
1407 |
+ ARCH_AVRXMEGA7, |
1408 |
+ ARCH_AVRTINY10 |
1409 |
}; |
1410 |
|
1411 |
struct mcu_type_s { |
1412 |
@@ -126,6 +130,7 @@ extern GTY(()) section *progmem_section; |
1413 |
#define AVR_HAVE_EIJMP_EICALL (avr_current_arch->have_eijmp_eicall) |
1414 |
#define AVR_HAVE_8BIT_SP (avr_current_device->short_sp || TARGET_TINY_STACK) |
1415 |
#define AVR_XMEGA (avr_current_arch->xmega) |
1416 |
+#define AVR_TINY (avr_current_arch->avrtiny) |
1417 |
#define AVR_HAVE_RAMPX_Y_D (avr_current_arch->have_rampx_y_d) |
1418 |
|
1419 |
#define AVR_2_BYTE_PC (!AVR_HAVE_EIJMP_EICALL) |
1420 |
@@ -249,7 +254,6 @@ extern GTY(()) section *progmem_section; |
1421 |
|
1422 |
#define ORDER_REGS_FOR_LOCAL_ALLOC order_regs_for_local_alloc () |
1423 |
|
1424 |
- |
1425 |
#define HARD_REGNO_NREGS(REGNO, MODE) ((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD) |
1426 |
|
1427 |
#define HARD_REGNO_MODE_OK(REGNO, MODE) avr_hard_regno_mode_ok(REGNO, MODE) |
1428 |
@@ -313,6 +317,41 @@ enum reg_class { |
1429 |
{0xffffffff,0x00000003} /* ALL_REGS */ \ |
1430 |
} |
1431 |
|
1432 |
+/* Zero or more C statements that may conditionally modify five variables |
1433 |
+ fixed_regs, call_used_regs, global_regs, reg_names, and reg_class_contents, |
1434 |
+ to take into account any dependence of these register sets on target flags. |
1435 |
+ The first three of these are of type char [] (interpreted as Boolean |
1436 |
+ vectors). global_regs is a const char *[], and reg_class_contents is a |
1437 |
+ HARD_REG_SET. Before the macro is called, fixed_regs, call_used_regs, |
1438 |
+ reg_class_contents, and reg_names have been initialized from |
1439 |
+ FIXED_REGISTERS, CALL_USED_REGISTERS, REG_CLASS_CONTENTS, and |
1440 |
+ REGISTER_NAMES, respectively. global_regs has been cleared, and any |
1441 |
+ ‘-ffixed-reg’, ‘-fcall-used-reg’ and ‘-fcall-saved-reg’ command options |
1442 |
+ have been applied. |
1443 |
+ |
1444 |
+ You need not define this macro if it has no work to do. |
1445 |
+ |
1446 |
+ If the usage of an entire class of registers depends on the target flags, |
1447 |
+ you may indicate this to GCC by using this macro to modify fixed_regs and |
1448 |
+ call_used_regs to 1 for each of the registers in the classes which should |
1449 |
+ not be used by GCC. Also define the macro REG_CLASS_FROM_LETTER / |
1450 |
+ REG_CLASS_FROM_CONSTRAINT to return NO_REGS if it is called with a letter |
1451 |
+ for a class that shouldnÂ’t be used. (However, if this class is not included |
1452 |
+ in GENERAL_REGS and all of the insn patterns whose constraints permit this |
1453 |
+ class are controlled by target switches, then GCC will automatically avoid |
1454 |
+ using these registers when the target switches are opposed to them.) */ |
1455 |
+ |
1456 |
+#define CONDITIONAL_REGISTER_USAGE \ |
1457 |
+ if (AVR_TINY) { \ |
1458 |
+ int i; \ |
1459 |
+ for (i = 0; i <= 17; i++) { \ |
1460 |
+ fixed_regs[i] = 1; \ |
1461 |
+ call_used_regs[i] = 1; \ |
1462 |
+ } \ |
1463 |
+ CLEAR_HARD_REG_SET(reg_class_contents[(int)ADDW_REGS]); \ |
1464 |
+ CLEAR_HARD_REG_SET(reg_class_contents[(int)NO_LD_REGS]); \ |
1465 |
+ } |
1466 |
+ |
1467 |
#define REGNO_REG_CLASS(R) avr_regno_reg_class(R) |
1468 |
|
1469 |
/* The following macro defines cover classes for Integrated Register |
1470 |
diff -Naurp gcc/config/avr/avr.md gcc/config/avr/avr.md |
1471 |
--- gcc/config/avr/avr.md 2011-01-19 13:03:59.000000000 -0600 |
1472 |
+++ gcc/config/avr/avr.md 2011-01-19 13:11:23.000000000 -0600 |
1473 |
@@ -186,6 +186,9 @@ |
1474 |
DONE; |
1475 |
}) |
1476 |
|
1477 |
+(define_constants |
1478 |
+ [(TMP_REGNO_AVRTINY10 16) ; temporary register r16 |
1479 |
+ (ZERO_REGNO_AVRTINY10 17)]) ; zero register r17 |
1480 |
|
1481 |
(define_insn "*push<ALLQ:mode>" |
1482 |
[(set (mem:ALLQ (post_dec (reg:HI REG_SP))) |
1483 |
@@ -479,7 +482,7 @@ |
1484 |
rtx addr1 = copy_to_mode_reg (Pmode, XEXP (operands[1], 0)); |
1485 |
|
1486 |
/* Create rtx for tmp register - we use this as scratch. */ |
1487 |
- rtx tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO); |
1488 |
+ rtx tmp_reg_rtx = gen_rtx_REG (QImode, AVR_TINY ? TMP_REGNO_AVRTINY10 : TMP_REGNO); |
1489 |
|
1490 |
if (GET_CODE (operands[2]) != CONST_INT) |
1491 |
FAIL; |
1492 |
@@ -2900,7 +2903,7 @@ |
1493 |
UNSPEC_INDEX_JMP)) |
1494 |
(use (label_ref (match_operand 1 "" ""))) |
1495 |
(clobber (match_dup 0))] |
1496 |
- "AVR_HAVE_JMP_CALL && !AVR_HAVE_EIJMP_EICALL" |
1497 |
+ "(AVR_HAVE_JMP_CALL && !AVR_HAVE_EIJMP_EICALL)" |
1498 |
"lsl r30 |
1499 |
rol r31 |
1500 |
lpm |
1501 |
diff -Naurp gcc/config/avr/libgcc-fixed.S gcc/config/avr/libgcc-fixed.S |
1502 |
--- gcc/config/avr/libgcc-fixed.S 2011-01-18 17:58:12.000000000 -0600 |
1503 |
+++ gcc/config/avr/libgcc-fixed.S 2011-01-19 13:11:23.000000000 -0600 |
1504 |
@@ -29,13 +29,17 @@ Boston, MA 02110-1301, USA. */ |
1505 |
|
1506 |
/* Fixed point library routines for avr. */ |
1507 |
|
1508 |
+#if defined (__AVR_TINY__) |
1509 |
+#define __zero_reg__ r17 |
1510 |
+#define __tmp_reg__ r16 |
1511 |
+#else |
1512 |
#define __zero_reg__ r1 |
1513 |
#define __tmp_reg__ r0 |
1514 |
+#endif |
1515 |
#define __SREG__ 0x3f |
1516 |
#define __SP_H__ 0x3e |
1517 |
#define __SP_L__ 0x3d |
1518 |
#define __RAMPZ__ 0x3B |
1519 |
- |
1520 |
/* Conversions to float. */ |
1521 |
#if defined (L_fractqqsf) |
1522 |
.global __fractqqsf |
1523 |
@@ -281,15 +285,15 @@ __muluqq3_exit: |
1524 |
.func __mulhq3 |
1525 |
__mulhq3: |
1526 |
fmuls r_arg1H, r_arg2H |
1527 |
- movw r_resL, r0 |
1528 |
+ movw r_resL, __tmp_reg__ |
1529 |
fmulsu r_arg2H, r_arg1L |
1530 |
clr r_arg1L |
1531 |
sbc r_resH, r_arg1L |
1532 |
- add r_resL, r1 |
1533 |
+ add r_resL, __zero_reg__ |
1534 |
adc r_resH, r_arg1L |
1535 |
fmulsu r_arg1H, r_arg2L |
1536 |
sbc r_resH, r_arg1L |
1537 |
- add r_resL, r1 |
1538 |
+ add r_resL, __zero_reg__ |
1539 |
adc r_resH, r_arg1L |
1540 |
clr __zero_reg__ |
1541 |
ret |
1542 |
@@ -301,13 +305,13 @@ __mulhq3: |
1543 |
.func __muluhq3 |
1544 |
__muluhq3: |
1545 |
mul r_arg1H, r_arg2H |
1546 |
- movw r_resL, r0 |
1547 |
+ movw r_resL, __tmp_reg__ |
1548 |
mul r_arg1H, r_arg2L |
1549 |
- add r_resL, r1 |
1550 |
+ add r_resL, __zero_reg__ |
1551 |
clr __zero_reg__ |
1552 |
adc r_resH, __zero_reg__ |
1553 |
mul r_arg1L, r_arg2H |
1554 |
- add r_resL, r1 |
1555 |
+ add r_resL, __zero_reg__ |
1556 |
clr __zero_reg__ |
1557 |
adc r_resH, __zero_reg__ |
1558 |
ret |
1559 |
@@ -401,15 +405,15 @@ __muluhq3_skip: |
1560 |
.func __mulha3 |
1561 |
__mulha3: |
1562 |
mul r_arg1L, r_arg2L |
1563 |
- mov r_resL, r1 |
1564 |
+ mov r_resL, __zero_reg__ |
1565 |
muls r_arg1H, r_arg2H |
1566 |
- mov r_resH, r0 |
1567 |
+ mov r_resH, __tmp_reg__ |
1568 |
mulsu r_arg1H, r_arg2L |
1569 |
- add r_resL, r0 |
1570 |
- adc r_resH, r1 |
1571 |
+ add r_resL, __tmp_reg__ |
1572 |
+ adc r_resH, __zero_reg__ |
1573 |
mulsu r_arg2H, r_arg1L |
1574 |
- add r_resL, r0 |
1575 |
- adc r_resH, r1 |
1576 |
+ add r_resL, __tmp_reg__ |
1577 |
+ adc r_resH, __zero_reg__ |
1578 |
clr __zero_reg__ |
1579 |
ret |
1580 |
.endfunc |
1581 |
@@ -420,15 +424,15 @@ __mulha3: |
1582 |
.func __muluha3 |
1583 |
__muluha3: |
1584 |
mul r_arg1L, r_arg2L |
1585 |
- mov r_resL, r1 |
1586 |
+ mov r_resL, __zero_reg__ |
1587 |
mul r_arg1H, r_arg2H |
1588 |
- mov r_resH, r0 |
1589 |
+ mov r_resH, __tmp_reg__ |
1590 |
mul r_arg1H, r_arg2L |
1591 |
- add r_resL, r0 |
1592 |
- adc r_resH, r1 |
1593 |
+ add r_resL, __tmp_reg__ |
1594 |
+ adc r_resH, __zero_reg__ |
1595 |
mul r_arg1L, r_arg2H |
1596 |
- add r_resL, r0 |
1597 |
- adc r_resH, r1 |
1598 |
+ add r_resL, __tmp_reg__ |
1599 |
+ adc r_resH, __zero_reg__ |
1600 |
clr __zero_reg__ |
1601 |
ret |
1602 |
.endfunc |
1603 |
@@ -442,8 +446,8 @@ __muluha3: |
1604 |
#define r_arg2H r23 /* multiplicand High */ |
1605 |
#define r_resL r18 /* result Low */ |
1606 |
#define r_resH r19 /* result High */ |
1607 |
-#define r_scratchL r0 /* scratch Low */ |
1608 |
-#define r_scratchH r1 |
1609 |
+#define r_scratchL __tmp_reg__ /* scratch Low */ |
1610 |
+#define r_scratchH __zero_reg__ |
1611 |
|
1612 |
#if defined (L_mulha3) |
1613 |
.global __mulha3 |
1614 |
@@ -480,8 +484,8 @@ __mulha3_exit: |
1615 |
__muluha3: |
1616 |
clr r_resL ; clear result |
1617 |
clr r_resH |
1618 |
- mov_l r0, r_arg1L ; save multiplicand |
1619 |
- mov_h r1, r_arg1H |
1620 |
+ mov_l __tmp_reg__, r_arg1L ; save multiplicand |
1621 |
+ mov_h __zero_reg__, r_arg1H |
1622 |
__muluha3_loop1: |
1623 |
sbrs r_arg2H,0 |
1624 |
rjmp __muluha3_skip1 |
1625 |
@@ -490,7 +494,12 @@ __muluha3_loop1: |
1626 |
__muluha3_skip1: |
1627 |
lsl r_arg1L ; shift multiplicand |
1628 |
rol r_arg1H |
1629 |
+#if defined (__AVR_TINY__) |
1630 |
+ subi r_arg1L, lo8(0) |
1631 |
+ sbci r_arg1L, hi8(0) |
1632 |
+#else |
1633 |
sbiw r_arg1L,0 |
1634 |
+#endif |
1635 |
breq __muluha3_loop1_done ; exit multiplicand = 0 |
1636 |
lsr r_arg2H |
1637 |
brne __muluha3_loop1 ; exit multiplier = 0 |
1638 |
@@ -500,7 +509,12 @@ __muluha3_loop1_done: |
1639 |
__muluha3_loop2: |
1640 |
lsr r_arg1H ; shift multiplicand |
1641 |
ror r_arg1L |
1642 |
+#if defined (__AVR_TINY__) |
1643 |
+ subi r_arg1L, lo8(0) |
1644 |
+ sbci r_arg1L, hi8(0) |
1645 |
+#else |
1646 |
sbiw r_arg1L,0 |
1647 |
+#endif |
1648 |
breq __muluha3_exit ; exit if multiplicand = 0 |
1649 |
sbrs r_arg2L,7 |
1650 |
rjmp __muluha3_skip2 |
1651 |
@@ -556,53 +570,53 @@ __mulsa3: |
1652 |
clr r_resHL |
1653 |
clr r_resHH |
1654 |
mul r_arg1H, r_arg2L |
1655 |
- mov r_resL, r1 |
1656 |
+ mov r_resL, __zero_reg__ |
1657 |
mul r_arg1L, r_arg2H |
1658 |
- add r_resL, r1 |
1659 |
+ add r_resL, __zero_reg__ |
1660 |
adc r_resH, r_clr |
1661 |
mul r_arg1L, r_arg2HL |
1662 |
- add r_resL, r0 |
1663 |
- adc r_resH, r1 |
1664 |
+ add r_resL, __tmp_reg__ |
1665 |
+ adc r_resH, __zero_reg__ |
1666 |
adc r_resHL, r_clr |
1667 |
mul r_arg1H, r_arg2H |
1668 |
- add r_resL, r0 |
1669 |
- adc r_resH, r1 |
1670 |
+ add r_resL, __tmp_reg__ |
1671 |
+ adc r_resH, __zero_reg__ |
1672 |
adc r_resHL, r_clr |
1673 |
mul r_arg1HL, r_arg2L |
1674 |
- add r_resL, r0 |
1675 |
- adc r_resH, r1 |
1676 |
+ add r_resL, __tmp_reg__ |
1677 |
+ adc r_resH, __zero_reg__ |
1678 |
adc r_resHL, r_clr |
1679 |
mulsu r_arg2HH, r_arg1L |
1680 |
sbc r_resHH, r_clr |
1681 |
- add r_resH, r0 |
1682 |
- adc r_resHL, r1 |
1683 |
+ add r_resH, __tmp_reg__ |
1684 |
+ adc r_resHL, __zero_reg__ |
1685 |
adc r_resHH, r_clr |
1686 |
mul r_arg1H, r_arg2HL |
1687 |
- add r_resH, r0 |
1688 |
- adc r_resHL, r1 |
1689 |
+ add r_resH, __tmp_reg__ |
1690 |
+ adc r_resHL, __zero_reg__ |
1691 |
adc r_resHH, r_clr |
1692 |
mul r_arg1HL, r_arg2H |
1693 |
- add r_resH, r0 |
1694 |
- adc r_resHL, r1 |
1695 |
+ add r_resH, __tmp_reg__ |
1696 |
+ adc r_resHL, __zero_reg__ |
1697 |
adc r_resHH, r_clr |
1698 |
mulsu r_arg1HH, r_arg2L |
1699 |
sbc r_resHH, r_clr |
1700 |
- add r_resH, r0 |
1701 |
- adc r_resHL, r1 |
1702 |
+ add r_resH, __tmp_reg__ |
1703 |
+ adc r_resHL, __zero_reg__ |
1704 |
adc r_resHH, r_clr |
1705 |
mulsu r_arg2HH, r_arg1H |
1706 |
- add r_resHL, r0 |
1707 |
- adc r_resHH, r1 |
1708 |
+ add r_resHL, __tmp_reg__ |
1709 |
+ adc r_resHH, __zero_reg__ |
1710 |
mul r_arg1HL, r_arg2HL |
1711 |
- add r_resHL, r0 |
1712 |
- adc r_resHH, r1 |
1713 |
+ add r_resHL, __tmp_reg__ |
1714 |
+ adc r_resHH, __zero_reg__ |
1715 |
mulsu r_arg1HH, r_arg2H |
1716 |
- add r_resHL, r0 |
1717 |
- adc r_resHH, r1 |
1718 |
+ add r_resHL, __tmp_reg__ |
1719 |
+ adc r_resHH, __zero_reg__ |
1720 |
mulsu r_arg2HH, r_arg1HL |
1721 |
- add r_resHH, r0 |
1722 |
+ add r_resHH, __tmp_reg__ |
1723 |
mulsu r_arg1HH, r_arg2HL |
1724 |
- add r_resHH, r0 |
1725 |
+ add r_resHH, __tmp_reg__ |
1726 |
clr __zero_reg__ |
1727 |
ret |
1728 |
.endfunc |
1729 |
@@ -617,51 +631,51 @@ __mulusa3: |
1730 |
clr r_resHL |
1731 |
clr r_resHH |
1732 |
mul r_arg1H, r_arg2L |
1733 |
- mov r_resL, r1 |
1734 |
+ mov r_resL, __zero_reg__ |
1735 |
mul r_arg1L, r_arg2H |
1736 |
- add r_resL, r1 |
1737 |
+ add r_resL, __zero_reg__ |
1738 |
adc r_resH, r_clr |
1739 |
mul r_arg1L, r_arg2HL |
1740 |
- add r_resL, r0 |
1741 |
- adc r_resH, r1 |
1742 |
+ add r_resL, __tmp_reg__ |
1743 |
+ adc r_resH, __zero_reg__ |
1744 |
adc r_resHL, r_clr |
1745 |
mul r_arg1H, r_arg2H |
1746 |
- add r_resL, r0 |
1747 |
- adc r_resH, r1 |
1748 |
+ add r_resL, __tmp_reg__ |
1749 |
+ adc r_resH, __zero_reg__ |
1750 |
adc r_resHL, r_clr |
1751 |
mul r_arg1HL, r_arg2L |
1752 |
- add r_resL, r0 |
1753 |
- adc r_resH, r1 |
1754 |
+ add r_resL, __tmp_reg__ |
1755 |
+ adc r_resH, __zero_reg__ |
1756 |
adc r_resHL, r_clr |
1757 |
mul r_arg1L, r_arg2HH |
1758 |
- add r_resH, r0 |
1759 |
- adc r_resHL, r1 |
1760 |
+ add r_resH, __tmp_reg__ |
1761 |
+ adc r_resHL, __zero_reg__ |
1762 |
adc r_resHH, r_clr |
1763 |
mul r_arg1H, r_arg2HL |
1764 |
- add r_resH, r0 |
1765 |
- adc r_resHL, r1 |
1766 |
+ add r_resH, __tmp_reg__ |
1767 |
+ adc r_resHL, __zero_reg__ |
1768 |
adc r_resHH, r_clr |
1769 |
mul r_arg1HL, r_arg2H |
1770 |
- add r_resH, r0 |
1771 |
- adc r_resHL, r1 |
1772 |
+ add r_resH, __tmp_reg__ |
1773 |
+ adc r_resHL, __zero_reg__ |
1774 |
adc r_resHH, r_clr |
1775 |
mul r_arg1HH, r_arg2L |
1776 |
- add r_resH, r0 |
1777 |
- adc r_resHL, r1 |
1778 |
+ add r_resH, __tmp_reg__ |
1779 |
+ adc r_resHL, __zero_reg__ |
1780 |
adc r_resHH, r_clr |
1781 |
mul r_arg1H, r_arg2HH |
1782 |
- add r_resHL, r0 |
1783 |
- adc r_resHH, r1 |
1784 |
+ add r_resHL, __tmp_reg__ |
1785 |
+ adc r_resHH, __zero_reg__ |
1786 |
mul r_arg1HL, r_arg2HL |
1787 |
- add r_resHL, r0 |
1788 |
- adc r_resHH, r1 |
1789 |
+ add r_resHL, __tmp_reg__ |
1790 |
+ adc r_resHH, __zero_reg__ |
1791 |
mul r_arg1HH, r_arg2H |
1792 |
- add r_resHL, r0 |
1793 |
- adc r_resHH, r1 |
1794 |
+ add r_resHL, __tmp_reg__ |
1795 |
+ adc r_resHH, __zero_reg__ |
1796 |
mul r_arg1HL, r_arg2HH |
1797 |
- add r_resHH, r0 |
1798 |
+ add r_resHH, __tmp_reg__ |
1799 |
mul r_arg1HH, r_arg2HL |
1800 |
- add r_resHH, r0 |
1801 |
+ add r_resHH, __tmp_reg__ |
1802 |
clr __zero_reg__ |
1803 |
ret |
1804 |
.endfunc |
1805 |
@@ -680,13 +694,20 @@ __mulusa3: |
1806 |
#define r_arg2HL r26 |
1807 |
#define r_arg2HH r27 /* multiplicand High */ |
1808 |
|
1809 |
+#if defined (__AVR_TINY__) |
1810 |
+#define r_resL r28 /* result Low */ |
1811 |
+#define r_resH r29 |
1812 |
+#define r_resHL r30 |
1813 |
+#define r_resHH r31 /* result High */ |
1814 |
+#else |
1815 |
#define r_resL r14 /* result Low */ |
1816 |
#define r_resH r15 |
1817 |
#define r_resHL r16 |
1818 |
#define r_resHH r17 /* result High */ |
1819 |
+#endif |
1820 |
|
1821 |
-#define r_scratchL r0 /* scratch Low */ |
1822 |
-#define r_scratchH r1 |
1823 |
+#define r_scratchL __tmp_reg__ /* scratch Low */ |
1824 |
+#define r_scratchH __zero_reg__ |
1825 |
#define r_scratchHL r22 |
1826 |
#define r_scratchHH r23 /* scratch High */ |
1827 |
|
1828 |
@@ -758,7 +779,12 @@ __mulusa3_skip1: |
1829 |
rol r_arg1HH |
1830 |
lsr r_arg2HH |
1831 |
ror r_arg2HL |
1832 |
+#if defined (__AVR_TINY__) |
1833 |
+ subi r_arg2HL, lo8(0) |
1834 |
+ sbci r_arg2HL, hi8(0) |
1835 |
+#else |
1836 |
sbiw r_arg2HL,0 |
1837 |
+#endif |
1838 |
brne __mulusa3_loop1 ; exit multiplier = 0 |
1839 |
__mulusa3_loop1_done: |
1840 |
mov_l r_arg1L, r_scratchL ; restore multiplicand |
1841 |
@@ -779,7 +805,12 @@ __mulusa3_loop2: |
1842 |
__mulusa3_skip2: |
1843 |
lsl r_arg2L |
1844 |
rol r_arg2H |
1845 |
+#if defined (__AVR_TINY__) |
1846 |
+ subi r_arg2L, lo8(0) |
1847 |
+ sbci r_arg2L, hi8(0) |
1848 |
+#else |
1849 |
sbiw r_arg2L,0 |
1850 |
+#endif |
1851 |
brne __mulusa3_loop2 ; exit if multiplier = 0 |
1852 |
__mulusa3_exit: |
1853 |
clr __zero_reg__ ; got clobbered |
1854 |
@@ -791,9 +822,7 @@ __mulusa3_exit: |
1855 |
#undef r_scratchH |
1856 |
#undef r_scratchHL |
1857 |
#undef r_scratchHH |
1858 |
- |
1859 |
#endif |
1860 |
- |
1861 |
#undef r_arg1L |
1862 |
#undef r_arg1H |
1863 |
#undef r_arg1HL |
1864 |
@@ -821,8 +850,8 @@ __mulusa3_exit: |
1865 |
.global __divqq3 |
1866 |
.func __divqq3 |
1867 |
__divqq3: |
1868 |
- mov r0, r_divd |
1869 |
- eor r0, r_div |
1870 |
+ mov __tmp_reg__, r_divd |
1871 |
+ eor __tmp_reg__, r_div |
1872 |
sbrc r_div, 7 |
1873 |
neg r_div |
1874 |
sbrc r_divd, 7 |
1875 |
@@ -831,7 +860,7 @@ __divqq3: |
1876 |
breq __divqq3_minus1 ; if equal return -1 |
1877 |
rcall __udivuqq3 |
1878 |
lsr r_quo |
1879 |
- sbrc r0, 7 ; negate result if needed |
1880 |
+ sbrc __tmp_reg__, 7 ; negate result if needed |
1881 |
neg r_quo |
1882 |
ret |
1883 |
__divqq3_minus1: |
1884 |
@@ -886,8 +915,8 @@ __udivuqq3_cont: |
1885 |
.global __divhq3 |
1886 |
.func __divhq3 |
1887 |
__divhq3: |
1888 |
- mov r0, r_divdH |
1889 |
- eor r0, r_divH |
1890 |
+ mov __tmp_reg__, r_divdH |
1891 |
+ eor __tmp_reg__, r_divH |
1892 |
sbrs r_divH, 7 |
1893 |
rjmp __divhq3_divpos |
1894 |
com r_divH |
1895 |
@@ -906,7 +935,7 @@ __divhq3_divdpos: |
1896 |
rcall __udivuhq3 |
1897 |
lsr r_quoH |
1898 |
ror r_quoL |
1899 |
- sbrs r0, 7 ; negate result if needed |
1900 |
+ sbrs __tmp_reg__, 7 ; negate result if needed |
1901 |
ret |
1902 |
com r_quoH |
1903 |
neg r_quoL |
1904 |
@@ -958,8 +987,8 @@ __udivuhq3_cont: |
1905 |
.global __divha3 |
1906 |
.func __divha3 |
1907 |
__divha3: |
1908 |
- mov r0, r_divdH |
1909 |
- eor r0, r_divH |
1910 |
+ mov __tmp_reg__, r_divdH |
1911 |
+ eor __tmp_reg__, r_divH |
1912 |
sbrs r_divH, 7 |
1913 |
rjmp __divha3_divpos |
1914 |
com r_divH |
1915 |
@@ -973,7 +1002,7 @@ __divha3_divpos: |
1916 |
sbci r_divdH,-1 |
1917 |
__divha3_divdpos: |
1918 |
rcall __udivuha3 |
1919 |
- sbrs r0, 7 ; negate result if needed |
1920 |
+ sbrs __tmp_reg__, 7 ; negate result if needed |
1921 |
ret |
1922 |
com r_quoH |
1923 |
neg r_quoL |
1924 |
@@ -1027,8 +1056,8 @@ __udivuha3: |
1925 |
.global __divsa3 |
1926 |
.func __divsa3 |
1927 |
__divsa3: |
1928 |
- mov r0, r27 |
1929 |
- eor r0, r_divHH |
1930 |
+ mov __tmp_reg__, r27 |
1931 |
+ eor __tmp_reg__, r_divHH |
1932 |
sbrs r_divHH, 7 |
1933 |
rjmp __divsa3_divpos |
1934 |
com r_divHH |
1935 |
@@ -1050,7 +1079,7 @@ __divsa3_divpos: |
1936 |
sbci r_arg1HH,-1 |
1937 |
__divsa3_arg1pos: |
1938 |
rcall __udivusa3 |
1939 |
- sbrs r0, 7 ; negate result if needed |
1940 |
+ sbrs __tmp_reg__, 7 ; negate result if needed |
1941 |
ret |
1942 |
com r_quoHH |
1943 |
com r_quoHL |
1944 |
diff -Naurp gcc/config/avr/libgcc.S gcc/config/avr/libgcc.S |
1945 |
--- gcc/config/avr/libgcc.S 2011-01-19 13:03:59.000000000 -0600 |
1946 |
+++ gcc/config/avr/libgcc.S 2011-01-19 13:11:23.000000000 -0600 |
1947 |
@@ -22,8 +22,13 @@ a copy of the GCC Runtime Library Except |
1948 |
see the files COPYING3 and COPYING.RUNTIME respectively. If not, see |
1949 |
<http://www.gnu.org/licenses/>. */ |
1950 |
|
1951 |
+#if defined (__AVR_TINY__) |
1952 |
+#define __zero_reg__ r17 |
1953 |
+#define __tmp_reg__ r16 |
1954 |
+#else |
1955 |
#define __zero_reg__ r1 |
1956 |
#define __tmp_reg__ r0 |
1957 |
+#endif |
1958 |
#define __SREG__ 0x3f |
1959 |
#define __SP_H__ 0x3e |
1960 |
#define __SP_L__ 0x3d |
1961 |
@@ -140,7 +145,12 @@ __mulhi3_skip1: |
1962 |
|
1963 |
lsr r_arg1H ; gets LSB of multiplier |
1964 |
ror r_arg1L |
1965 |
+#if defined (__AVR_TINY__) |
1966 |
+ subi r_arg1L, lo8(0) |
1967 |
+ sbci r_arg1L, hi8(0) |
1968 |
+#else |
1969 |
sbiw r_arg1L,0 |
1970 |
+#endif |
1971 |
brne __mulhi3_loop ; exit if multiplier = 0 |
1972 |
__mulhi3_exit: |
1973 |
mov r_arg1H,r_resH ; result to return register |
1974 |
@@ -304,7 +314,12 @@ __mulsi3_skip1: |
1975 |
ror r_arg1H |
1976 |
ror r_arg1L |
1977 |
brne __mulsi3_loop |
1978 |
+#if defined (__AVR_TINY__) |
1979 |
+ subi r_arg1HL, lo8(0) |
1980 |
+ sbci r_arg1HL, hi8(0) |
1981 |
+#else |
1982 |
sbiw r_arg1HL,0 |
1983 |
+#endif |
1984 |
cpc r_arg1H,r_arg1L |
1985 |
brne __mulsi3_loop ; exit if multiplier = 0 |
1986 |
__mulsi3_exit: |
1987 |
@@ -610,6 +625,7 @@ __divmodsi4_neg1: |
1988 |
/********************************** |
1989 |
* This is a prologue subroutine |
1990 |
**********************************/ |
1991 |
+#if !defined (__AVR_TINY__) |
1992 |
#if defined (L_prologue) |
1993 |
|
1994 |
.global __prologue_saves__ |
1995 |
@@ -663,7 +679,6 @@ __prologue_saves__: |
1996 |
* This is an epilogue subroutine |
1997 |
*/ |
1998 |
#if defined (L_epilogue) |
1999 |
- |
2000 |
.global __epilogue_restores__ |
2001 |
.func __epilogue_restores__ |
2002 |
__epilogue_restores__: |
2003 |
@@ -704,6 +719,7 @@ __epilogue_restores__: |
2004 |
ret |
2005 |
.endfunc |
2006 |
#endif /* defined (L_epilogue) */ |
2007 |
+#endif /* !defined (__AVR_TINY__) */ |
2008 |
|
2009 |
#ifdef L_exit |
2010 |
.section .fini9,"ax",@progbits |
2011 |
@@ -730,6 +746,7 @@ _cleanup: |
2012 |
.endfunc |
2013 |
#endif /* defined (L_cleanup) */ |
2014 |
|
2015 |
+#if !defined(__AVR_TINY__) |
2016 |
#ifdef L_tablejump |
2017 |
.global __tablejump2__ |
2018 |
.func __tablejump2__ |
2019 |
@@ -762,7 +779,9 @@ __tablejump__: |
2020 |
#endif |
2021 |
.endfunc |
2022 |
#endif /* defined (L_tablejump) */ |
2023 |
+#endif |
2024 |
|
2025 |
+#if !defined(__AVR_TINY__) |
2026 |
#ifdef L_copy_data |
2027 |
.section .init4,"ax",@progbits |
2028 |
.global __do_copy_data |
2029 |
@@ -824,6 +843,7 @@ __do_copy_data: |
2030 |
brne .L__do_copy_data_loop |
2031 |
#endif /* !defined(__AVR_HAVE_ELPMX__) && !defined(__AVR_HAVE_ELPM__) */ |
2032 |
#endif /* L_copy_data */ |
2033 |
+#endif |
2034 |
|
2035 |
/* __do_clear_bss is only necessary if there is anything in .bss section. */ |
2036 |
|
2037 |
@@ -864,7 +884,12 @@ __do_global_ctors: |
2038 |
ldi r20, hh8(__ctors_end) |
2039 |
rjmp .L__do_global_ctors_start |
2040 |
.L__do_global_ctors_loop: |
2041 |
+#if defined (__AVR_TINY__) |
2042 |
+ subi r28, lo8(2) |
2043 |
+ sbci r29, hi8(2) |
2044 |
+#else |
2045 |
sbiw r28, 2 |
2046 |
+#endif |
2047 |
sbc r20, __zero_reg__ |
2048 |
mov_h r31, r29 |
2049 |
mov_l r30, r28 |
2050 |
@@ -882,7 +907,12 @@ __do_global_ctors: |
2051 |
ldi r29, hi8(__ctors_end) |
2052 |
rjmp .L__do_global_ctors_start |
2053 |
.L__do_global_ctors_loop: |
2054 |
+#if defined (__AVR_TINY__) |
2055 |
+ subi r28, lo8(2) |
2056 |
+ sbci r29, hi8(2) |
2057 |
+#else |
2058 |
sbiw r28, 2 |
2059 |
+#endif |
2060 |
mov_h r31, r29 |
2061 |
mov_l r30, r28 |
2062 |
XCALL __tablejump__ |
2063 |
@@ -905,7 +935,12 @@ __do_global_dtors: |
2064 |
ldi r20, hh8(__dtors_start) |
2065 |
rjmp .L__do_global_dtors_start |
2066 |
.L__do_global_dtors_loop: |
2067 |
+#if defined (__AVR_TINY__) |
2068 |
+ subi r28, lo8(2) |
2069 |
+ sbci r29, hi8(2) |
2070 |
+#else |
2071 |
sbiw r28, 2 |
2072 |
+#endif |
2073 |
sbc r20, __zero_reg__ |
2074 |
mov_h r31, r29 |
2075 |
mov_l r30, r28 |
2076 |
@@ -926,7 +961,12 @@ __do_global_dtors: |
2077 |
mov_h r31, r29 |
2078 |
mov_l r30, r28 |
2079 |
XCALL __tablejump__ |
2080 |
+#if defined (__AVR_TINY__) |
2081 |
+ subi r28, lo8(-2) |
2082 |
+ sbci r29, hi8(-2) |
2083 |
+#else |
2084 |
adiw r28, 2 |
2085 |
+#endif |
2086 |
.L__do_global_dtors_start: |
2087 |
cpi r28, lo8(__dtors_end) |
2088 |
cpc r29, r17 |
2089 |
@@ -934,6 +974,7 @@ __do_global_dtors: |
2090 |
#endif /* defined(__AVR_HAVE_RAMPZ__) */ |
2091 |
#endif /* L_dtors */ |
2092 |
|
2093 |
+#if !defined (__AVR_TINY__) |
2094 |
#ifdef L_tablejump_elpm |
2095 |
.global __tablejump_elpm__ |
2096 |
.func __tablejump_elpm__ |
2097 |
@@ -963,5 +1004,6 @@ __tablejump_elpm__: |
2098 |
#endif /* defined (__AVR_HAVE_ELPM__) */ |
2099 |
.endfunc |
2100 |
#endif /* defined (L_tablejump_elpm) */ |
2101 |
+#endif /* !defined (__AVR_TINY__) */ |
2102 |
|
2103 |
#include "libgcc-fixed.S" |
2104 |
diff -Naurp gcc/config/avr/t-avr gcc/config/avr/t-avr |
2105 |
--- gcc/config/avr/t-avr 2011-01-19 13:03:59.000000000 -0600 |
2106 |
+++ gcc/config/avr/t-avr 2011-01-19 13:11:23.000000000 -0600 |
2107 |
@@ -107,8 +107,8 @@ fp-bit.c: $(srcdir)/config/fp-bit.c $(sr |
2108 |
|
2109 |
FPBIT = fp-bit.c |
2110 |
|
2111 |
-MULTILIB_OPTIONS = mmcu=avr2/mmcu=avr25/mmcu=avr3/mmcu=avr31/mmcu=avr35/mmcu=avr4/mmcu=avr5/mmcu=avr51/mmcu=avr6/mmcu=avrxmega2/mmcu=avrxmega4/mmcu=avrxmega5/mmcu=avrxmega6/mmcu=avrxmega7 |
2112 |
-MULTILIB_DIRNAMES = avr2 avr25 avr3 avr31 avr35 avr4 avr5 avr51 avr6 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7 |
2113 |
+MULTILIB_OPTIONS = mmcu=avr2/mmcu=avr25/mmcu=avr3/mmcu=avr31/mmcu=avr35/mmcu=avr4/mmcu=avr5/mmcu=avr51/mmcu=avr6/mmcu=avrxmega2/mmcu=avrxmega4/mmcu=avrxmega5/mmcu=avrxmega6/mmcu=avrxmega7/mmcu=avrtiny10 |
2114 |
+MULTILIB_DIRNAMES = avr2 avr25 avr3 avr31 avr35 avr4 avr5 avr51 avr6 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7 avrtiny10 |
2115 |
|
2116 |
# The many avr2 matches are not listed here - this is the default. |
2117 |
MULTILIB_MATCHES = \ |
2118 |
@@ -242,7 +242,13 @@ MULTILIB_MATCHES = \ |
2119 |
mmcu?avrxmega6=mmcu?atxmega256a3b \ |
2120 |
mmcu?avrxmega6=mmcu?atxmega256d3 \ |
2121 |
mmcu?avrxmega7=mmcu?atxmega128a1 \ |
2122 |
- mmcu?avrxmega7=mmcu?atxmega128a1u |
2123 |
+ mmcu?avrxmega7=mmcu?atxmega128a1u \ |
2124 |
+ mmcu?avrtiny10=mmcu?attiny4 \ |
2125 |
+ mmcu?avrtiny10=mmcu?attiny5 \ |
2126 |
+ mmcu?avrtiny10=mmcu?attiny9 \ |
2127 |
+ mmcu?avrtiny10=mmcu?attiny10 \ |
2128 |
+ mmcu?avrtiny10=mmcu?attiny20 \ |
2129 |
+ mmcu?avrtiny10=mmcu?attiny40 |
2130 |
|
2131 |
MULTILIB_EXCEPTIONS = |
2132 |
|