diff options
Diffstat (limited to 'toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99384.patch')
-rw-r--r-- | toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99384.patch | 1202 |
1 files changed, 0 insertions, 1202 deletions
diff --git a/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99384.patch b/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99384.patch deleted file mode 100644 index 89c04a8949..0000000000 --- a/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99384.patch +++ /dev/null | |||
@@ -1,1202 +0,0 @@ | |||
1 | 2010-08-18 Marcus Shawcroft <marcus.shawcroft@arm.com> | ||
2 | * config/arm/arm-protos.h (arm_expand_sync): New. | ||
3 | (arm_output_memory_barrier, arm_output_sync_insn): New. | ||
4 | (arm_sync_loop_insns): New. | ||
5 | * config/arm/arm.c (FL_ARCH7): New. | ||
6 | (FL_FOR_ARCH7): Include FL_ARCH7. | ||
7 | (arm_arch7): New. | ||
8 | (arm_print_operand): Support %C markup. | ||
9 | (arm_legitimize_sync_memory): New. | ||
10 | (arm_emit, arm_insn_count, arm_count, arm_output_asm_insn): New. | ||
11 | (arm_process_output_memory_barrier, arm_output_memory_barrier): New. | ||
12 | (arm_ldrex_suffix, arm_output_ldrex, arm_output_strex): New. | ||
13 | (arm_output_op2, arm_output_op3, arm_output_sync_loop): New. | ||
14 | (arm_get_sync_operand, FETCH_SYNC_OPERAND): New. | ||
15 | (arm_process_output_sync_insn, arm_output_sync_insn): New. | ||
16 | (arm_sync_loop_insns,arm_call_generator, arm_expand_sync): New. | ||
17 | * config/arm/arm.h (struct arm_sync_generator): New. | ||
18 | (TARGET_HAVE_DMB, TARGET_HAVE_DMB_MCR): New. | ||
19 | (TARGET_HAVE_MEMORY_BARRIER): New. | ||
20 | (TARGET_HAVE_LDREX, TARGET_HAVE_LDREXBHD): New. | ||
21 | * config/arm/arm.md: Include sync.md. | ||
22 | (UNSPEC_MEMORY_BARRIER): New. | ||
23 | (VUNSPEC_SYNC_COMPARE_AND_SWAP, VUNSPEC_SYNC_LOCK): New. | ||
24 | (VUNSPEC_SYNC_OP):New. | ||
25 | (VUNSPEC_SYNC_NEW_OP, VUNSPEC_SYNC_OLD_OP): New. | ||
26 | (sync_result, sync_memory, sync_required_value): New attributes. | ||
27 | (sync_new_value, sync_t1, sync_t2): Likewise. | ||
28 | (sync_release_barrier, sync_op): Likewise. | ||
29 | (length): Add logic to length attribute defintion to call | ||
30 | arm_sync_loop_insns when appropriate. | ||
31 | * config/arm/sync.md: New file. | ||
32 | |||
33 | 2010-09-09 Andrew Stubbs <ams@codesourcery.com> | ||
34 | |||
35 | Backport from mainline: | ||
36 | |||
37 | 2010-08-25 Tejas Belagod <tejas.belagod@arm.com> | ||
38 | * config/arm/iterators.md (VU, SE, V_widen_l): New. | ||
39 | (V_unpack, US): New. | ||
40 | |||
41 | === modified file 'gcc/config/arm/arm-protos.h' | ||
42 | --- old/gcc/config/arm/arm-protos.h 2010-08-24 13:15:54 +0000 | ||
43 | +++ new/gcc/config/arm/arm-protos.h 2010-09-09 15:03:00 +0000 | ||
44 | @@ -148,6 +148,11 @@ | ||
45 | extern void arm_set_return_address (rtx, rtx); | ||
46 | extern int arm_eliminable_register (rtx); | ||
47 | extern const char *arm_output_shift(rtx *, int); | ||
48 | +extern void arm_expand_sync (enum machine_mode, struct arm_sync_generator *, | ||
49 | + rtx, rtx, rtx, rtx); | ||
50 | +extern const char *arm_output_memory_barrier (rtx *); | ||
51 | +extern const char *arm_output_sync_insn (rtx, rtx *); | ||
52 | +extern unsigned int arm_sync_loop_insns (rtx , rtx *); | ||
53 | |||
54 | extern bool arm_output_addr_const_extra (FILE *, rtx); | ||
55 | |||
56 | |||
57 | === modified file 'gcc/config/arm/arm.c' | ||
58 | --- old/gcc/config/arm/arm.c 2010-09-01 13:29:58 +0000 | ||
59 | +++ new/gcc/config/arm/arm.c 2010-09-09 15:03:00 +0000 | ||
60 | @@ -605,6 +605,7 @@ | ||
61 | #define FL_NEON (1 << 20) /* Neon instructions. */ | ||
62 | #define FL_ARCH7EM (1 << 21) /* Instructions present in the ARMv7E-M | ||
63 | architecture. */ | ||
64 | +#define FL_ARCH7 (1 << 22) /* Architecture 7. */ | ||
65 | |||
66 | #define FL_IWMMXT (1 << 29) /* XScale v2 or "Intel Wireless MMX technology". */ | ||
67 | |||
68 | @@ -625,7 +626,7 @@ | ||
69 | #define FL_FOR_ARCH6ZK FL_FOR_ARCH6K | ||
70 | #define FL_FOR_ARCH6T2 (FL_FOR_ARCH6 | FL_THUMB2) | ||
71 | #define FL_FOR_ARCH6M (FL_FOR_ARCH6 & ~FL_NOTM) | ||
72 | -#define FL_FOR_ARCH7 (FL_FOR_ARCH6T2 &~ FL_NOTM) | ||
73 | +#define FL_FOR_ARCH7 ((FL_FOR_ARCH6T2 & ~FL_NOTM) | FL_ARCH7) | ||
74 | #define FL_FOR_ARCH7A (FL_FOR_ARCH7 | FL_NOTM | FL_ARCH6K) | ||
75 | #define FL_FOR_ARCH7R (FL_FOR_ARCH7A | FL_DIV) | ||
76 | #define FL_FOR_ARCH7M (FL_FOR_ARCH7 | FL_DIV) | ||
77 | @@ -663,6 +664,9 @@ | ||
78 | /* Nonzero if this chip supports the ARM 6K extensions. */ | ||
79 | int arm_arch6k = 0; | ||
80 | |||
81 | +/* Nonzero if this chip supports the ARM 7 extensions. */ | ||
82 | +int arm_arch7 = 0; | ||
83 | + | ||
84 | /* Nonzero if instructions not present in the 'M' profile can be used. */ | ||
85 | int arm_arch_notm = 0; | ||
86 | |||
87 | @@ -1634,6 +1638,7 @@ | ||
88 | arm_arch6 = (insn_flags & FL_ARCH6) != 0; | ||
89 | arm_arch6k = (insn_flags & FL_ARCH6K) != 0; | ||
90 | arm_arch_notm = (insn_flags & FL_NOTM) != 0; | ||
91 | + arm_arch7 = (insn_flags & FL_ARCH7) != 0; | ||
92 | arm_arch7em = (insn_flags & FL_ARCH7EM) != 0; | ||
93 | arm_arch_thumb2 = (insn_flags & FL_THUMB2) != 0; | ||
94 | arm_arch_xscale = (insn_flags & FL_XSCALE) != 0; | ||
95 | @@ -16561,6 +16566,17 @@ | ||
96 | } | ||
97 | return; | ||
98 | |||
99 | + case 'C': | ||
100 | + { | ||
101 | + rtx addr; | ||
102 | + | ||
103 | + gcc_assert (GET_CODE (x) == MEM); | ||
104 | + addr = XEXP (x, 0); | ||
105 | + gcc_assert (GET_CODE (addr) == REG); | ||
106 | + asm_fprintf (stream, "[%r]", REGNO (addr)); | ||
107 | + } | ||
108 | + return; | ||
109 | + | ||
110 | /* Translate an S register number into a D register number and element index. */ | ||
111 | case 'y': | ||
112 | { | ||
113 | @@ -22763,4 +22779,372 @@ | ||
114 | is_packed); | ||
115 | } | ||
116 | |||
117 | +/* Legitimize a memory reference for sync primitive implemented using | ||
118 | + ldrex / strex. We currently force the form of the reference to be | ||
119 | + indirect without offset. We do not yet support the indirect offset | ||
120 | + addressing supported by some ARM targets for these | ||
121 | + instructions. */ | ||
122 | +static rtx | ||
123 | +arm_legitimize_sync_memory (rtx memory) | ||
124 | +{ | ||
125 | + rtx addr = force_reg (Pmode, XEXP (memory, 0)); | ||
126 | + rtx legitimate_memory = gen_rtx_MEM (GET_MODE (memory), addr); | ||
127 | + | ||
128 | + set_mem_alias_set (legitimate_memory, ALIAS_SET_MEMORY_BARRIER); | ||
129 | + MEM_VOLATILE_P (legitimate_memory) = MEM_VOLATILE_P (memory); | ||
130 | + return legitimate_memory; | ||
131 | +} | ||
132 | + | ||
133 | +/* An instruction emitter. */ | ||
134 | +typedef void (* emit_f) (int label, const char *, rtx *); | ||
135 | + | ||
136 | +/* An instruction emitter that emits via the conventional | ||
137 | + output_asm_insn. */ | ||
138 | +static void | ||
139 | +arm_emit (int label ATTRIBUTE_UNUSED, const char *pattern, rtx *operands) | ||
140 | +{ | ||
141 | + output_asm_insn (pattern, operands); | ||
142 | +} | ||
143 | + | ||
144 | +/* Count the number of emitted synchronization instructions. */ | ||
145 | +static unsigned arm_insn_count; | ||
146 | + | ||
147 | +/* An emitter that counts emitted instructions but does not actually | ||
148 | + emit instruction into the the instruction stream. */ | ||
149 | +static void | ||
150 | +arm_count (int label, | ||
151 | + const char *pattern ATTRIBUTE_UNUSED, | ||
152 | + rtx *operands ATTRIBUTE_UNUSED) | ||
153 | +{ | ||
154 | + if (! label) | ||
155 | + ++ arm_insn_count; | ||
156 | +} | ||
157 | + | ||
158 | +/* Construct a pattern using conventional output formatting and feed | ||
159 | + it to output_asm_insn. Provides a mechanism to construct the | ||
160 | + output pattern on the fly. Note the hard limit on the pattern | ||
161 | + buffer size. */ | ||
162 | +static void | ||
163 | +arm_output_asm_insn (emit_f emit, int label, rtx *operands, | ||
164 | + const char *pattern, ...) | ||
165 | +{ | ||
166 | + va_list ap; | ||
167 | + char buffer[256]; | ||
168 | + | ||
169 | + va_start (ap, pattern); | ||
170 | + vsprintf (buffer, pattern, ap); | ||
171 | + va_end (ap); | ||
172 | + emit (label, buffer, operands); | ||
173 | +} | ||
174 | + | ||
175 | +/* Emit the memory barrier instruction, if any, provided by this | ||
176 | + target to a specified emitter. */ | ||
177 | +static void | ||
178 | +arm_process_output_memory_barrier (emit_f emit, rtx *operands) | ||
179 | +{ | ||
180 | + if (TARGET_HAVE_DMB) | ||
181 | + { | ||
182 | + /* Note we issue a system level barrier. We should consider | ||
183 | + issuing a inner shareabilty zone barrier here instead, ie. | ||
184 | + "DMB ISH". */ | ||
185 | + emit (0, "dmb\tsy", operands); | ||
186 | + return; | ||
187 | + } | ||
188 | + | ||
189 | + if (TARGET_HAVE_DMB_MCR) | ||
190 | + { | ||
191 | + emit (0, "mcr\tp15, 0, r0, c7, c10, 5", operands); | ||
192 | + return; | ||
193 | + } | ||
194 | + | ||
195 | + gcc_unreachable (); | ||
196 | +} | ||
197 | + | ||
198 | +/* Emit the memory barrier instruction, if any, provided by this | ||
199 | + target. */ | ||
200 | +const char * | ||
201 | +arm_output_memory_barrier (rtx *operands) | ||
202 | +{ | ||
203 | + arm_process_output_memory_barrier (arm_emit, operands); | ||
204 | + return ""; | ||
205 | +} | ||
206 | + | ||
207 | +/* Helper to figure out the instruction suffix required on ldrex/strex | ||
208 | + for operations on an object of the specified mode. */ | ||
209 | +static const char * | ||
210 | +arm_ldrex_suffix (enum machine_mode mode) | ||
211 | +{ | ||
212 | + switch (mode) | ||
213 | + { | ||
214 | + case QImode: return "b"; | ||
215 | + case HImode: return "h"; | ||
216 | + case SImode: return ""; | ||
217 | + case DImode: return "d"; | ||
218 | + default: | ||
219 | + gcc_unreachable (); | ||
220 | + } | ||
221 | + return ""; | ||
222 | +} | ||
223 | + | ||
224 | +/* Emit an ldrex{b,h,d, } instruction appropriate for the specified | ||
225 | + mode. */ | ||
226 | +static void | ||
227 | +arm_output_ldrex (emit_f emit, | ||
228 | + enum machine_mode mode, | ||
229 | + rtx target, | ||
230 | + rtx memory) | ||
231 | +{ | ||
232 | + const char *suffix = arm_ldrex_suffix (mode); | ||
233 | + rtx operands[2]; | ||
234 | + | ||
235 | + operands[0] = target; | ||
236 | + operands[1] = memory; | ||
237 | + arm_output_asm_insn (emit, 0, operands, "ldrex%s\t%%0, %%C1", suffix); | ||
238 | +} | ||
239 | + | ||
240 | +/* Emit a strex{b,h,d, } instruction appropriate for the specified | ||
241 | + mode. */ | ||
242 | +static void | ||
243 | +arm_output_strex (emit_f emit, | ||
244 | + enum machine_mode mode, | ||
245 | + const char *cc, | ||
246 | + rtx result, | ||
247 | + rtx value, | ||
248 | + rtx memory) | ||
249 | +{ | ||
250 | + const char *suffix = arm_ldrex_suffix (mode); | ||
251 | + rtx operands[3]; | ||
252 | + | ||
253 | + operands[0] = result; | ||
254 | + operands[1] = value; | ||
255 | + operands[2] = memory; | ||
256 | + arm_output_asm_insn (emit, 0, operands, "strex%s%s\t%%0, %%1, %%C2", suffix, | ||
257 | + cc); | ||
258 | +} | ||
259 | + | ||
260 | +/* Helper to emit a two operand instruction. */ | ||
261 | +static void | ||
262 | +arm_output_op2 (emit_f emit, const char *mnemonic, rtx d, rtx s) | ||
263 | +{ | ||
264 | + rtx operands[2]; | ||
265 | + | ||
266 | + operands[0] = d; | ||
267 | + operands[1] = s; | ||
268 | + arm_output_asm_insn (emit, 0, operands, "%s\t%%0, %%1", mnemonic); | ||
269 | +} | ||
270 | + | ||
271 | +/* Helper to emit a three operand instruction. */ | ||
272 | +static void | ||
273 | +arm_output_op3 (emit_f emit, const char *mnemonic, rtx d, rtx a, rtx b) | ||
274 | +{ | ||
275 | + rtx operands[3]; | ||
276 | + | ||
277 | + operands[0] = d; | ||
278 | + operands[1] = a; | ||
279 | + operands[2] = b; | ||
280 | + arm_output_asm_insn (emit, 0, operands, "%s\t%%0, %%1, %%2", mnemonic); | ||
281 | +} | ||
282 | + | ||
283 | +/* Emit a load store exclusive synchronization loop. | ||
284 | + | ||
285 | + do | ||
286 | + old_value = [mem] | ||
287 | + if old_value != required_value | ||
288 | + break; | ||
289 | + t1 = sync_op (old_value, new_value) | ||
290 | + [mem] = t1, t2 = [0|1] | ||
291 | + while ! t2 | ||
292 | + | ||
293 | + Note: | ||
294 | + t1 == t2 is not permitted | ||
295 | + t1 == old_value is permitted | ||
296 | + | ||
297 | + required_value: | ||
298 | + | ||
299 | + RTX register or const_int representing the required old_value for | ||
300 | + the modify to continue, if NULL no comparsion is performed. */ | ||
301 | +static void | ||
302 | +arm_output_sync_loop (emit_f emit, | ||
303 | + enum machine_mode mode, | ||
304 | + rtx old_value, | ||
305 | + rtx memory, | ||
306 | + rtx required_value, | ||
307 | + rtx new_value, | ||
308 | + rtx t1, | ||
309 | + rtx t2, | ||
310 | + enum attr_sync_op sync_op, | ||
311 | + int early_barrier_required) | ||
312 | +{ | ||
313 | + rtx operands[1]; | ||
314 | + | ||
315 | + gcc_assert (t1 != t2); | ||
316 | + | ||
317 | + if (early_barrier_required) | ||
318 | + arm_process_output_memory_barrier (emit, NULL); | ||
319 | + | ||
320 | + arm_output_asm_insn (emit, 1, operands, "%sLSYT%%=:", LOCAL_LABEL_PREFIX); | ||
321 | + | ||
322 | + arm_output_ldrex (emit, mode, old_value, memory); | ||
323 | + | ||
324 | + if (required_value) | ||
325 | + { | ||
326 | + rtx operands[2]; | ||
327 | + | ||
328 | + operands[0] = old_value; | ||
329 | + operands[1] = required_value; | ||
330 | + arm_output_asm_insn (emit, 0, operands, "cmp\t%%0, %%1"); | ||
331 | + arm_output_asm_insn (emit, 0, operands, "bne\t%sLSYB%%=", LOCAL_LABEL_PREFIX); | ||
332 | + } | ||
333 | + | ||
334 | + switch (sync_op) | ||
335 | + { | ||
336 | + case SYNC_OP_ADD: | ||
337 | + arm_output_op3 (emit, "add", t1, old_value, new_value); | ||
338 | + break; | ||
339 | + | ||
340 | + case SYNC_OP_SUB: | ||
341 | + arm_output_op3 (emit, "sub", t1, old_value, new_value); | ||
342 | + break; | ||
343 | + | ||
344 | + case SYNC_OP_IOR: | ||
345 | + arm_output_op3 (emit, "orr", t1, old_value, new_value); | ||
346 | + break; | ||
347 | + | ||
348 | + case SYNC_OP_XOR: | ||
349 | + arm_output_op3 (emit, "eor", t1, old_value, new_value); | ||
350 | + break; | ||
351 | + | ||
352 | + case SYNC_OP_AND: | ||
353 | + arm_output_op3 (emit,"and", t1, old_value, new_value); | ||
354 | + break; | ||
355 | + | ||
356 | + case SYNC_OP_NAND: | ||
357 | + arm_output_op3 (emit, "and", t1, old_value, new_value); | ||
358 | + arm_output_op2 (emit, "mvn", t1, t1); | ||
359 | + break; | ||
360 | + | ||
361 | + case SYNC_OP_NONE: | ||
362 | + t1 = new_value; | ||
363 | + break; | ||
364 | + } | ||
365 | + | ||
366 | + arm_output_strex (emit, mode, "", t2, t1, memory); | ||
367 | + operands[0] = t2; | ||
368 | + arm_output_asm_insn (emit, 0, operands, "teq\t%%0, #0"); | ||
369 | + arm_output_asm_insn (emit, 0, operands, "bne\t%sLSYT%%=", LOCAL_LABEL_PREFIX); | ||
370 | + | ||
371 | + arm_process_output_memory_barrier (emit, NULL); | ||
372 | + arm_output_asm_insn (emit, 1, operands, "%sLSYB%%=:", LOCAL_LABEL_PREFIX); | ||
373 | +} | ||
374 | + | ||
375 | +static rtx | ||
376 | +arm_get_sync_operand (rtx *operands, int index, rtx default_value) | ||
377 | +{ | ||
378 | + if (index > 0) | ||
379 | + default_value = operands[index - 1]; | ||
380 | + | ||
381 | + return default_value; | ||
382 | +} | ||
383 | + | ||
384 | +#define FETCH_SYNC_OPERAND(NAME, DEFAULT) \ | ||
385 | + arm_get_sync_operand (operands, (int) get_attr_sync_##NAME (insn), DEFAULT); | ||
386 | + | ||
387 | +/* Extract the operands for a synchroniztion instruction from the | ||
388 | + instructions attributes and emit the instruction. */ | ||
389 | +static void | ||
390 | +arm_process_output_sync_insn (emit_f emit, rtx insn, rtx *operands) | ||
391 | +{ | ||
392 | + rtx result, memory, required_value, new_value, t1, t2; | ||
393 | + int early_barrier; | ||
394 | + enum machine_mode mode; | ||
395 | + enum attr_sync_op sync_op; | ||
396 | + | ||
397 | + result = FETCH_SYNC_OPERAND(result, 0); | ||
398 | + memory = FETCH_SYNC_OPERAND(memory, 0); | ||
399 | + required_value = FETCH_SYNC_OPERAND(required_value, 0); | ||
400 | + new_value = FETCH_SYNC_OPERAND(new_value, 0); | ||
401 | + t1 = FETCH_SYNC_OPERAND(t1, 0); | ||
402 | + t2 = FETCH_SYNC_OPERAND(t2, 0); | ||
403 | + early_barrier = | ||
404 | + get_attr_sync_release_barrier (insn) == SYNC_RELEASE_BARRIER_YES; | ||
405 | + sync_op = get_attr_sync_op (insn); | ||
406 | + mode = GET_MODE (memory); | ||
407 | + | ||
408 | + arm_output_sync_loop (emit, mode, result, memory, required_value, | ||
409 | + new_value, t1, t2, sync_op, early_barrier); | ||
410 | +} | ||
411 | + | ||
412 | +/* Emit a synchronization instruction loop. */ | ||
413 | +const char * | ||
414 | +arm_output_sync_insn (rtx insn, rtx *operands) | ||
415 | +{ | ||
416 | + arm_process_output_sync_insn (arm_emit, insn, operands); | ||
417 | + return ""; | ||
418 | +} | ||
419 | + | ||
420 | +/* Count the number of machine instruction that will be emitted for a | ||
421 | + synchronization instruction. Note that the emitter used does not | ||
422 | + emit instructions, it just counts instructions being carefull not | ||
423 | + to count labels. */ | ||
424 | +unsigned int | ||
425 | +arm_sync_loop_insns (rtx insn, rtx *operands) | ||
426 | +{ | ||
427 | + arm_insn_count = 0; | ||
428 | + arm_process_output_sync_insn (arm_count, insn, operands); | ||
429 | + return arm_insn_count; | ||
430 | +} | ||
431 | + | ||
432 | +/* Helper to call a target sync instruction generator, dealing with | ||
433 | + the variation in operands required by the different generators. */ | ||
434 | +static rtx | ||
435 | +arm_call_generator (struct arm_sync_generator *generator, rtx old_value, | ||
436 | + rtx memory, rtx required_value, rtx new_value) | ||
437 | +{ | ||
438 | + switch (generator->op) | ||
439 | + { | ||
440 | + case arm_sync_generator_omn: | ||
441 | + gcc_assert (! required_value); | ||
442 | + return generator->u.omn (old_value, memory, new_value); | ||
443 | + | ||
444 | + case arm_sync_generator_omrn: | ||
445 | + gcc_assert (required_value); | ||
446 | + return generator->u.omrn (old_value, memory, required_value, new_value); | ||
447 | + } | ||
448 | + | ||
449 | + return NULL; | ||
450 | +} | ||
451 | + | ||
452 | +/* Expand a synchronization loop. The synchronization loop is expanded | ||
453 | + as an opaque block of instructions in order to ensure that we do | ||
454 | + not subsequently get extraneous memory accesses inserted within the | ||
455 | + critical region. The exclusive access property of ldrex/strex is | ||
456 | + only guaranteed in there are no intervening memory accesses. */ | ||
457 | +void | ||
458 | +arm_expand_sync (enum machine_mode mode, | ||
459 | + struct arm_sync_generator *generator, | ||
460 | + rtx target, rtx memory, rtx required_value, rtx new_value) | ||
461 | +{ | ||
462 | + if (target == NULL) | ||
463 | + target = gen_reg_rtx (mode); | ||
464 | + | ||
465 | + memory = arm_legitimize_sync_memory (memory); | ||
466 | + if (mode != SImode) | ||
467 | + { | ||
468 | + rtx load_temp = gen_reg_rtx (SImode); | ||
469 | + | ||
470 | + if (required_value) | ||
471 | + required_value = convert_modes (SImode, mode, required_value, true); | ||
472 | + | ||
473 | + new_value = convert_modes (SImode, mode, new_value, true); | ||
474 | + emit_insn (arm_call_generator (generator, load_temp, memory, | ||
475 | + required_value, new_value)); | ||
476 | + emit_move_insn (target, gen_lowpart (mode, load_temp)); | ||
477 | + } | ||
478 | + else | ||
479 | + { | ||
480 | + emit_insn (arm_call_generator (generator, target, memory, required_value, | ||
481 | + new_value)); | ||
482 | + } | ||
483 | +} | ||
484 | + | ||
485 | #include "gt-arm.h" | ||
486 | |||
487 | === modified file 'gcc/config/arm/arm.h' | ||
488 | --- old/gcc/config/arm/arm.h 2010-09-01 13:29:58 +0000 | ||
489 | +++ new/gcc/config/arm/arm.h 2010-09-09 15:03:00 +0000 | ||
490 | @@ -128,6 +128,24 @@ | ||
491 | /* The processor for which instructions should be scheduled. */ | ||
492 | extern enum processor_type arm_tune; | ||
493 | |||
494 | +enum arm_sync_generator_tag | ||
495 | + { | ||
496 | + arm_sync_generator_omn, | ||
497 | + arm_sync_generator_omrn | ||
498 | + }; | ||
499 | + | ||
500 | +/* Wrapper to pass around a polymorphic pointer to a sync instruction | ||
501 | + generator and. */ | ||
502 | +struct arm_sync_generator | ||
503 | +{ | ||
504 | + enum arm_sync_generator_tag op; | ||
505 | + union | ||
506 | + { | ||
507 | + rtx (* omn) (rtx, rtx, rtx); | ||
508 | + rtx (* omrn) (rtx, rtx, rtx, rtx); | ||
509 | + } u; | ||
510 | +}; | ||
511 | + | ||
512 | typedef enum arm_cond_code | ||
513 | { | ||
514 | ARM_EQ = 0, ARM_NE, ARM_CS, ARM_CC, ARM_MI, ARM_PL, ARM_VS, ARM_VC, | ||
515 | @@ -272,6 +290,20 @@ | ||
516 | for Thumb-2. */ | ||
517 | #define TARGET_UNIFIED_ASM TARGET_THUMB2 | ||
518 | |||
519 | +/* Nonzero if this chip provides the DMB instruction. */ | ||
520 | +#define TARGET_HAVE_DMB (arm_arch7) | ||
521 | + | ||
522 | +/* Nonzero if this chip implements a memory barrier via CP15. */ | ||
523 | +#define TARGET_HAVE_DMB_MCR (arm_arch6k && ! TARGET_HAVE_DMB) | ||
524 | + | ||
525 | +/* Nonzero if this chip implements a memory barrier instruction. */ | ||
526 | +#define TARGET_HAVE_MEMORY_BARRIER (TARGET_HAVE_DMB || TARGET_HAVE_DMB_MCR) | ||
527 | + | ||
528 | +/* Nonzero if this chip supports ldrex and strex */ | ||
529 | +#define TARGET_HAVE_LDREX ((arm_arch6 && TARGET_ARM) || arm_arch7) | ||
530 | + | ||
531 | +/* Nonzero if this chip supports ldrex{bhd} and strex{bhd}. */ | ||
532 | +#define TARGET_HAVE_LDREXBHD ((arm_arch6k && TARGET_ARM) || arm_arch7) | ||
533 | |||
534 | /* True iff the full BPABI is being used. If TARGET_BPABI is true, | ||
535 | then TARGET_AAPCS_BASED must be true -- but the converse does not | ||
536 | @@ -405,6 +437,12 @@ | ||
537 | /* Nonzero if this chip supports the ARM Architecture 6 extensions. */ | ||
538 | extern int arm_arch6; | ||
539 | |||
540 | +/* Nonzero if this chip supports the ARM Architecture 6k extensions. */ | ||
541 | +extern int arm_arch6k; | ||
542 | + | ||
543 | +/* Nonzero if this chip supports the ARM Architecture 7 extensions. */ | ||
544 | +extern int arm_arch7; | ||
545 | + | ||
546 | /* Nonzero if instructions not present in the 'M' profile can be used. */ | ||
547 | extern int arm_arch_notm; | ||
548 | |||
549 | |||
550 | === modified file 'gcc/config/arm/arm.md' | ||
551 | --- old/gcc/config/arm/arm.md 2010-09-09 14:11:34 +0000 | ||
552 | +++ new/gcc/config/arm/arm.md 2010-09-09 15:03:00 +0000 | ||
553 | @@ -103,6 +103,7 @@ | ||
554 | (UNSPEC_RBIT 26) ; rbit operation. | ||
555 | (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from | ||
556 | ; another symbolic address. | ||
557 | + (UNSPEC_MEMORY_BARRIER 28) ; Represent a memory barrier. | ||
558 | ] | ||
559 | ) | ||
560 | |||
561 | @@ -139,6 +140,11 @@ | ||
562 | (VUNSPEC_ALIGN32 16) ; Used to force 32-byte alignment. | ||
563 | (VUNSPEC_EH_RETURN 20); Use to override the return address for exception | ||
564 | ; handling. | ||
565 | + (VUNSPEC_SYNC_COMPARE_AND_SWAP 21) ; Represent an atomic compare swap. | ||
566 | + (VUNSPEC_SYNC_LOCK 22) ; Represent a sync_lock_test_and_set. | ||
567 | + (VUNSPEC_SYNC_OP 23) ; Represent a sync_<op> | ||
568 | + (VUNSPEC_SYNC_NEW_OP 24) ; Represent a sync_new_<op> | ||
569 | + (VUNSPEC_SYNC_OLD_OP 25) ; Represent a sync_old_<op> | ||
570 | ] | ||
571 | ) | ||
572 | |||
573 | @@ -163,8 +169,21 @@ | ||
574 | (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp" | ||
575 | (const (symbol_ref "arm_fpu_attr"))) | ||
576 | |||
577 | +(define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none")) | ||
578 | +(define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none")) | ||
579 | +(define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none")) | ||
580 | +(define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none")) | ||
581 | +(define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none")) | ||
582 | +(define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none")) | ||
583 | +(define_attr "sync_release_barrier" "yes,no" (const_string "yes")) | ||
584 | +(define_attr "sync_op" "none,add,sub,ior,xor,and,nand" | ||
585 | + (const_string "none")) | ||
586 | + | ||
587 | ; LENGTH of an instruction (in bytes) | ||
588 | -(define_attr "length" "" (const_int 4)) | ||
589 | +(define_attr "length" "" | ||
590 | + (cond [(not (eq_attr "sync_memory" "none")) | ||
591 | + (symbol_ref "arm_sync_loop_insns (insn, operands) * 4") | ||
592 | + ] (const_int 4))) | ||
593 | |||
594 | ; POOL_RANGE is how far away from a constant pool entry that this insn | ||
595 | ; can be placed. If the distance is zero, then this insn will never | ||
596 | @@ -11530,4 +11549,5 @@ | ||
597 | (include "thumb2.md") | ||
598 | ;; Neon patterns | ||
599 | (include "neon.md") | ||
600 | - | ||
601 | +;; Synchronization Primitives | ||
602 | +(include "sync.md") | ||
603 | |||
604 | === added file 'gcc/config/arm/sync.md' | ||
605 | --- old/gcc/config/arm/sync.md 1970-01-01 00:00:00 +0000 | ||
606 | +++ new/gcc/config/arm/sync.md 2010-09-09 15:03:00 +0000 | ||
607 | @@ -0,0 +1,594 @@ | ||
608 | +;; Machine description for ARM processor synchronization primitives. | ||
609 | +;; Copyright (C) 2010 Free Software Foundation, Inc. | ||
610 | +;; Written by Marcus Shawcroft (marcus.shawcroft@arm.com) | ||
611 | +;; | ||
612 | +;; This file is part of GCC. | ||
613 | +;; | ||
614 | +;; GCC is free software; you can redistribute it and/or modify it | ||
615 | +;; under the terms of the GNU General Public License as published by | ||
616 | +;; the Free Software Foundation; either version 3, or (at your option) | ||
617 | +;; any later version. | ||
618 | +;; | ||
619 | +;; GCC is distributed in the hope that it will be useful, but | ||
620 | +;; WITHOUT ANY WARRANTY; without even the implied warranty of | ||
621 | +;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
622 | +;; General Public License for more details. | ||
623 | +;; | ||
624 | +;; You should have received a copy of the GNU General Public License | ||
625 | +;; along with GCC; see the file COPYING3. If not see | ||
626 | +;; <http://www.gnu.org/licenses/>. */ | ||
627 | + | ||
628 | +;; ARMV6 introduced ldrex and strex instruction. These instruction | ||
629 | +;; access SI width data. In order to implement synchronization | ||
630 | +;; primitives for the narrower QI and HI modes we insert appropriate | ||
631 | +;; AND/OR sequences into the synchronization loop to mask out the | ||
632 | +;; relevant component of an SI access. | ||
633 | + | ||
634 | +(define_expand "memory_barrier" | ||
635 | + [(set (match_dup 0) | ||
636 | + (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))] | ||
637 | + "TARGET_HAVE_MEMORY_BARRIER" | ||
638 | +{ | ||
639 | + operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode)); | ||
640 | + MEM_VOLATILE_P (operands[0]) = 1; | ||
641 | +}) | ||
642 | + | ||
643 | +(define_expand "sync_compare_and_swapsi" | ||
644 | + [(set (match_operand:SI 0 "s_register_operand") | ||
645 | + (unspec_volatile:SI [(match_operand:SI 1 "memory_operand") | ||
646 | + (match_operand:SI 2 "s_register_operand") | ||
647 | + (match_operand:SI 3 "s_register_operand")] | ||
648 | + VUNSPEC_SYNC_COMPARE_AND_SWAP))] | ||
649 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
650 | + { | ||
651 | + struct arm_sync_generator generator; | ||
652 | + generator.op = arm_sync_generator_omrn; | ||
653 | + generator.u.omrn = gen_arm_sync_compare_and_swapsi; | ||
654 | + arm_expand_sync (SImode, &generator, operands[0], operands[1], operands[2], | ||
655 | + operands[3]); | ||
656 | + DONE; | ||
657 | + }) | ||
658 | + | ||
659 | +(define_mode_iterator NARROW [QI HI]) | ||
660 | + | ||
661 | +(define_expand "sync_compare_and_swap<mode>" | ||
662 | + [(set (match_operand:NARROW 0 "s_register_operand") | ||
663 | + (unspec_volatile:NARROW [(match_operand:NARROW 1 "memory_operand") | ||
664 | + (match_operand:NARROW 2 "s_register_operand") | ||
665 | + (match_operand:NARROW 3 "s_register_operand")] | ||
666 | + VUNSPEC_SYNC_COMPARE_AND_SWAP))] | ||
667 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
668 | + { | ||
669 | + struct arm_sync_generator generator; | ||
670 | + generator.op = arm_sync_generator_omrn; | ||
671 | + generator.u.omrn = gen_arm_sync_compare_and_swap<mode>; | ||
672 | + arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], | ||
673 | + operands[2], operands[3]); | ||
674 | + DONE; | ||
675 | + }) | ||
676 | + | ||
677 | +(define_expand "sync_lock_test_and_setsi" | ||
678 | + [(match_operand:SI 0 "s_register_operand") | ||
679 | + (match_operand:SI 1 "memory_operand") | ||
680 | + (match_operand:SI 2 "s_register_operand")] | ||
681 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
682 | + { | ||
683 | + struct arm_sync_generator generator; | ||
684 | + generator.op = arm_sync_generator_omn; | ||
685 | + generator.u.omn = gen_arm_sync_lock_test_and_setsi; | ||
686 | + arm_expand_sync (SImode, &generator, operands[0], operands[1], NULL, | ||
687 | + operands[2]); | ||
688 | + DONE; | ||
689 | + }) | ||
690 | + | ||
691 | +(define_expand "sync_lock_test_and_set<mode>" | ||
692 | + [(match_operand:NARROW 0 "s_register_operand") | ||
693 | + (match_operand:NARROW 1 "memory_operand") | ||
694 | + (match_operand:NARROW 2 "s_register_operand")] | ||
695 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
696 | + { | ||
697 | + struct arm_sync_generator generator; | ||
698 | + generator.op = arm_sync_generator_omn; | ||
699 | + generator.u.omn = gen_arm_sync_lock_test_and_set<mode>; | ||
700 | + arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], NULL, | ||
701 | + operands[2]); | ||
702 | + DONE; | ||
703 | + }) | ||
704 | + | ||
705 | +(define_code_iterator syncop [plus minus ior xor and]) | ||
706 | + | ||
707 | +(define_code_attr sync_optab [(ior "ior") | ||
708 | + (xor "xor") | ||
709 | + (and "and") | ||
710 | + (plus "add") | ||
711 | + (minus "sub")]) | ||
712 | + | ||
713 | +(define_expand "sync_<sync_optab>si" | ||
714 | + [(match_operand:SI 0 "memory_operand") | ||
715 | + (match_operand:SI 1 "s_register_operand") | ||
716 | + (syncop:SI (match_dup 0) (match_dup 1))] | ||
717 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
718 | + { | ||
719 | + struct arm_sync_generator generator; | ||
720 | + generator.op = arm_sync_generator_omn; | ||
721 | + generator.u.omn = gen_arm_sync_new_<sync_optab>si; | ||
722 | + arm_expand_sync (SImode, &generator, NULL, operands[0], NULL, operands[1]); | ||
723 | + DONE; | ||
724 | + }) | ||
725 | + | ||
726 | +(define_expand "sync_nandsi" | ||
727 | + [(match_operand:SI 0 "memory_operand") | ||
728 | + (match_operand:SI 1 "s_register_operand") | ||
729 | + (not:SI (and:SI (match_dup 0) (match_dup 1)))] | ||
730 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
731 | + { | ||
732 | + struct arm_sync_generator generator; | ||
733 | + generator.op = arm_sync_generator_omn; | ||
734 | + generator.u.omn = gen_arm_sync_new_nandsi; | ||
735 | + arm_expand_sync (SImode, &generator, NULL, operands[0], NULL, operands[1]); | ||
736 | + DONE; | ||
737 | + }) | ||
738 | + | ||
739 | +(define_expand "sync_<sync_optab><mode>" | ||
740 | + [(match_operand:NARROW 0 "memory_operand") | ||
741 | + (match_operand:NARROW 1 "s_register_operand") | ||
742 | + (syncop:NARROW (match_dup 0) (match_dup 1))] | ||
743 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
744 | + { | ||
745 | + struct arm_sync_generator generator; | ||
746 | + generator.op = arm_sync_generator_omn; | ||
747 | + generator.u.omn = gen_arm_sync_new_<sync_optab><mode>; | ||
748 | + arm_expand_sync (<MODE>mode, &generator, NULL, operands[0], NULL, | ||
749 | + operands[1]); | ||
750 | + DONE; | ||
751 | + }) | ||
752 | + | ||
753 | +(define_expand "sync_nand<mode>" | ||
754 | + [(match_operand:NARROW 0 "memory_operand") | ||
755 | + (match_operand:NARROW 1 "s_register_operand") | ||
756 | + (not:NARROW (and:NARROW (match_dup 0) (match_dup 1)))] | ||
757 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
758 | + { | ||
759 | + struct arm_sync_generator generator; | ||
760 | + generator.op = arm_sync_generator_omn; | ||
761 | + generator.u.omn = gen_arm_sync_new_nand<mode>; | ||
762 | + arm_expand_sync (<MODE>mode, &generator, NULL, operands[0], NULL, | ||
763 | + operands[1]); | ||
764 | + DONE; | ||
765 | + }) | ||
766 | + | ||
767 | +(define_expand "sync_new_<sync_optab>si" | ||
768 | + [(match_operand:SI 0 "s_register_operand") | ||
769 | + (match_operand:SI 1 "memory_operand") | ||
770 | + (match_operand:SI 2 "s_register_operand") | ||
771 | + (syncop:SI (match_dup 1) (match_dup 2))] | ||
772 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
773 | + { | ||
774 | + struct arm_sync_generator generator; | ||
775 | + generator.op = arm_sync_generator_omn; | ||
776 | + generator.u.omn = gen_arm_sync_new_<sync_optab>si; | ||
777 | + arm_expand_sync (SImode, &generator, operands[0], operands[1], NULL, | ||
778 | + operands[2]); | ||
779 | + DONE; | ||
780 | + }) | ||
781 | + | ||
782 | +(define_expand "sync_new_nandsi" | ||
783 | + [(match_operand:SI 0 "s_register_operand") | ||
784 | + (match_operand:SI 1 "memory_operand") | ||
785 | + (match_operand:SI 2 "s_register_operand") | ||
786 | + (not:SI (and:SI (match_dup 1) (match_dup 2)))] | ||
787 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
788 | + { | ||
789 | + struct arm_sync_generator generator; | ||
790 | + generator.op = arm_sync_generator_omn; | ||
791 | + generator.u.omn = gen_arm_sync_new_nandsi; | ||
792 | + arm_expand_sync (SImode, &generator, operands[0], operands[1], NULL, | ||
793 | + operands[2]); | ||
794 | + DONE; | ||
795 | + }) | ||
796 | + | ||
797 | +(define_expand "sync_new_<sync_optab><mode>" | ||
798 | + [(match_operand:NARROW 0 "s_register_operand") | ||
799 | + (match_operand:NARROW 1 "memory_operand") | ||
800 | + (match_operand:NARROW 2 "s_register_operand") | ||
801 | + (syncop:NARROW (match_dup 1) (match_dup 2))] | ||
802 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
803 | + { | ||
804 | + struct arm_sync_generator generator; | ||
805 | + generator.op = arm_sync_generator_omn; | ||
806 | + generator.u.omn = gen_arm_sync_new_<sync_optab><mode>; | ||
807 | + arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], | ||
808 | + NULL, operands[2]); | ||
809 | + DONE; | ||
810 | + }) | ||
811 | + | ||
812 | +(define_expand "sync_new_nand<mode>" | ||
813 | + [(match_operand:NARROW 0 "s_register_operand") | ||
814 | + (match_operand:NARROW 1 "memory_operand") | ||
815 | + (match_operand:NARROW 2 "s_register_operand") | ||
816 | + (not:NARROW (and:NARROW (match_dup 1) (match_dup 2)))] | ||
817 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
818 | + { | ||
819 | + struct arm_sync_generator generator; | ||
820 | + generator.op = arm_sync_generator_omn; | ||
821 | + generator.u.omn = gen_arm_sync_new_nand<mode>; | ||
822 | + arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], | ||
823 | + NULL, operands[2]); | ||
824 | + DONE; | ||
825 | + }); | ||
826 | + | ||
827 | +(define_expand "sync_old_<sync_optab>si" | ||
828 | + [(match_operand:SI 0 "s_register_operand") | ||
829 | + (match_operand:SI 1 "memory_operand") | ||
830 | + (match_operand:SI 2 "s_register_operand") | ||
831 | + (syncop:SI (match_dup 1) (match_dup 2))] | ||
832 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
833 | + { | ||
834 | + struct arm_sync_generator generator; | ||
835 | + generator.op = arm_sync_generator_omn; | ||
836 | + generator.u.omn = gen_arm_sync_old_<sync_optab>si; | ||
837 | + arm_expand_sync (SImode, &generator, operands[0], operands[1], NULL, | ||
838 | + operands[2]); | ||
839 | + DONE; | ||
840 | + }) | ||
841 | + | ||
842 | +(define_expand "sync_old_nandsi" | ||
843 | + [(match_operand:SI 0 "s_register_operand") | ||
844 | + (match_operand:SI 1 "memory_operand") | ||
845 | + (match_operand:SI 2 "s_register_operand") | ||
846 | + (not:SI (and:SI (match_dup 1) (match_dup 2)))] | ||
847 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
848 | + { | ||
849 | + struct arm_sync_generator generator; | ||
850 | + generator.op = arm_sync_generator_omn; | ||
851 | + generator.u.omn = gen_arm_sync_old_nandsi; | ||
852 | + arm_expand_sync (SImode, &generator, operands[0], operands[1], NULL, | ||
853 | + operands[2]); | ||
854 | + DONE; | ||
855 | + }) | ||
856 | + | ||
857 | +(define_expand "sync_old_<sync_optab><mode>" | ||
858 | + [(match_operand:NARROW 0 "s_register_operand") | ||
859 | + (match_operand:NARROW 1 "memory_operand") | ||
860 | + (match_operand:NARROW 2 "s_register_operand") | ||
861 | + (syncop:NARROW (match_dup 1) (match_dup 2))] | ||
862 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
863 | + { | ||
864 | + struct arm_sync_generator generator; | ||
865 | + generator.op = arm_sync_generator_omn; | ||
866 | + generator.u.omn = gen_arm_sync_old_<sync_optab><mode>; | ||
867 | + arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], | ||
868 | + NULL, operands[2]); | ||
869 | + DONE; | ||
870 | + }) | ||
871 | + | ||
872 | +(define_expand "sync_old_nand<mode>" | ||
873 | + [(match_operand:NARROW 0 "s_register_operand") | ||
874 | + (match_operand:NARROW 1 "memory_operand") | ||
875 | + (match_operand:NARROW 2 "s_register_operand") | ||
876 | + (not:NARROW (and:NARROW (match_dup 1) (match_dup 2)))] | ||
877 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
878 | + { | ||
879 | + struct arm_sync_generator generator; | ||
880 | + generator.op = arm_sync_generator_omn; | ||
881 | + generator.u.omn = gen_arm_sync_old_nand<mode>; | ||
882 | + arm_expand_sync (<MODE>mode, &generator, operands[0], operands[1], | ||
883 | + NULL, operands[2]); | ||
884 | + DONE; | ||
885 | + }) | ||
886 | + | ||
887 | +(define_insn "arm_sync_compare_and_swapsi" | ||
888 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
889 | + (unspec_volatile:SI | ||
890 | + [(match_operand:SI 1 "memory_operand" "+m") | ||
891 | + (match_operand:SI 2 "s_register_operand" "r") | ||
892 | + (match_operand:SI 3 "s_register_operand" "r")] | ||
893 | + VUNSPEC_SYNC_COMPARE_AND_SWAP)) | ||
894 | + (set (match_dup 1) (unspec_volatile:SI [(match_dup 2)] | ||
895 | + VUNSPEC_SYNC_COMPARE_AND_SWAP)) | ||
896 | + (clobber:SI (match_scratch:SI 4 "=&r")) | ||
897 | + (set (reg:CC CC_REGNUM) (unspec_volatile:CC [(match_dup 1)] | ||
898 | + VUNSPEC_SYNC_COMPARE_AND_SWAP)) | ||
899 | + ] | ||
900 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
901 | + { | ||
902 | + return arm_output_sync_insn (insn, operands); | ||
903 | + } | ||
904 | + [(set_attr "sync_result" "0") | ||
905 | + (set_attr "sync_memory" "1") | ||
906 | + (set_attr "sync_required_value" "2") | ||
907 | + (set_attr "sync_new_value" "3") | ||
908 | + (set_attr "sync_t1" "0") | ||
909 | + (set_attr "sync_t2" "4") | ||
910 | + (set_attr "conds" "nocond") | ||
911 | + (set_attr "predicable" "no")]) | ||
912 | + | ||
913 | +(define_insn "arm_sync_compare_and_swap<mode>" | ||
914 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
915 | + (zero_extend:SI | ||
916 | + (unspec_volatile:NARROW | ||
917 | + [(match_operand:NARROW 1 "memory_operand" "+m") | ||
918 | + (match_operand:SI 2 "s_register_operand" "r") | ||
919 | + (match_operand:SI 3 "s_register_operand" "r")] | ||
920 | + VUNSPEC_SYNC_COMPARE_AND_SWAP))) | ||
921 | + (set (match_dup 1) (unspec_volatile:NARROW [(match_dup 2)] | ||
922 | + VUNSPEC_SYNC_COMPARE_AND_SWAP)) | ||
923 | + (clobber:SI (match_scratch:SI 4 "=&r")) | ||
924 | + (set (reg:CC CC_REGNUM) (unspec_volatile:CC [(match_dup 1)] | ||
925 | + VUNSPEC_SYNC_COMPARE_AND_SWAP)) | ||
926 | + ] | ||
927 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
928 | + { | ||
929 | + return arm_output_sync_insn (insn, operands); | ||
930 | + } | ||
931 | + [(set_attr "sync_result" "0") | ||
932 | + (set_attr "sync_memory" "1") | ||
933 | + (set_attr "sync_required_value" "2") | ||
934 | + (set_attr "sync_new_value" "3") | ||
935 | + (set_attr "sync_t1" "0") | ||
936 | + (set_attr "sync_t2" "4") | ||
937 | + (set_attr "conds" "nocond") | ||
938 | + (set_attr "predicable" "no")]) | ||
939 | + | ||
940 | +(define_insn "arm_sync_lock_test_and_setsi" | ||
941 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
942 | + (match_operand:SI 1 "memory_operand" "+m")) | ||
943 | + (set (match_dup 1) | ||
944 | + (unspec_volatile:SI [(match_operand:SI 2 "s_register_operand" "r")] | ||
945 | + VUNSPEC_SYNC_LOCK)) | ||
946 | + (clobber (reg:CC CC_REGNUM)) | ||
947 | + (clobber (match_scratch:SI 3 "=&r"))] | ||
948 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
949 | + { | ||
950 | + return arm_output_sync_insn (insn, operands); | ||
951 | + } | ||
952 | + [(set_attr "sync_release_barrier" "no") | ||
953 | + (set_attr "sync_result" "0") | ||
954 | + (set_attr "sync_memory" "1") | ||
955 | + (set_attr "sync_new_value" "2") | ||
956 | + (set_attr "sync_t1" "0") | ||
957 | + (set_attr "sync_t2" "3") | ||
958 | + (set_attr "conds" "nocond") | ||
959 | + (set_attr "predicable" "no")]) | ||
960 | + | ||
961 | +(define_insn "arm_sync_lock_test_and_set<mode>" | ||
962 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
963 | + (zero_extend:SI (match_operand:NARROW 1 "memory_operand" "+m"))) | ||
964 | + (set (match_dup 1) | ||
965 | + (unspec_volatile:NARROW [(match_operand:SI 2 "s_register_operand" "r")] | ||
966 | + VUNSPEC_SYNC_LOCK)) | ||
967 | + (clobber (reg:CC CC_REGNUM)) | ||
968 | + (clobber (match_scratch:SI 3 "=&r"))] | ||
969 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
970 | + { | ||
971 | + return arm_output_sync_insn (insn, operands); | ||
972 | + } | ||
973 | + [(set_attr "sync_release_barrier" "no") | ||
974 | + (set_attr "sync_result" "0") | ||
975 | + (set_attr "sync_memory" "1") | ||
976 | + (set_attr "sync_new_value" "2") | ||
977 | + (set_attr "sync_t1" "0") | ||
978 | + (set_attr "sync_t2" "3") | ||
979 | + (set_attr "conds" "nocond") | ||
980 | + (set_attr "predicable" "no")]) | ||
981 | + | ||
982 | +(define_insn "arm_sync_new_<sync_optab>si" | ||
983 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
984 | + (unspec_volatile:SI [(syncop:SI | ||
985 | + (match_operand:SI 1 "memory_operand" "+m") | ||
986 | + (match_operand:SI 2 "s_register_operand" "r")) | ||
987 | + ] | ||
988 | + VUNSPEC_SYNC_NEW_OP)) | ||
989 | + (set (match_dup 1) | ||
990 | + (unspec_volatile:SI [(match_dup 1) (match_dup 2)] | ||
991 | + VUNSPEC_SYNC_NEW_OP)) | ||
992 | + (clobber (reg:CC CC_REGNUM)) | ||
993 | + (clobber (match_scratch:SI 3 "=&r"))] | ||
994 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
995 | + { | ||
996 | + return arm_output_sync_insn (insn, operands); | ||
997 | + } | ||
998 | + [(set_attr "sync_result" "0") | ||
999 | + (set_attr "sync_memory" "1") | ||
1000 | + (set_attr "sync_new_value" "2") | ||
1001 | + (set_attr "sync_t1" "0") | ||
1002 | + (set_attr "sync_t2" "3") | ||
1003 | + (set_attr "sync_op" "<sync_optab>") | ||
1004 | + (set_attr "conds" "nocond") | ||
1005 | + (set_attr "predicable" "no")]) | ||
1006 | + | ||
1007 | +(define_insn "arm_sync_new_nandsi" | ||
1008 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
1009 | + (unspec_volatile:SI [(not:SI (and:SI | ||
1010 | + (match_operand:SI 1 "memory_operand" "+m") | ||
1011 | + (match_operand:SI 2 "s_register_operand" "r"))) | ||
1012 | + ] | ||
1013 | + VUNSPEC_SYNC_NEW_OP)) | ||
1014 | + (set (match_dup 1) | ||
1015 | + (unspec_volatile:SI [(match_dup 1) (match_dup 2)] | ||
1016 | + VUNSPEC_SYNC_NEW_OP)) | ||
1017 | + (clobber (reg:CC CC_REGNUM)) | ||
1018 | + (clobber (match_scratch:SI 3 "=&r"))] | ||
1019 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
1020 | + { | ||
1021 | + return arm_output_sync_insn (insn, operands); | ||
1022 | + } | ||
1023 | + [(set_attr "sync_result" "0") | ||
1024 | + (set_attr "sync_memory" "1") | ||
1025 | + (set_attr "sync_new_value" "2") | ||
1026 | + (set_attr "sync_t1" "0") | ||
1027 | + (set_attr "sync_t2" "3") | ||
1028 | + (set_attr "sync_op" "nand") | ||
1029 | + (set_attr "conds" "nocond") | ||
1030 | + (set_attr "predicable" "no")]) | ||
1031 | + | ||
1032 | +(define_insn "arm_sync_new_<sync_optab><mode>" | ||
1033 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
1034 | + (unspec_volatile:SI [(syncop:SI | ||
1035 | + (zero_extend:SI | ||
1036 | + (match_operand:NARROW 1 "memory_operand" "+m")) | ||
1037 | + (match_operand:SI 2 "s_register_operand" "r")) | ||
1038 | + ] | ||
1039 | + VUNSPEC_SYNC_NEW_OP)) | ||
1040 | + (set (match_dup 1) | ||
1041 | + (unspec_volatile:NARROW [(match_dup 1) (match_dup 2)] | ||
1042 | + VUNSPEC_SYNC_NEW_OP)) | ||
1043 | + (clobber (reg:CC CC_REGNUM)) | ||
1044 | + (clobber (match_scratch:SI 3 "=&r"))] | ||
1045 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
1046 | + { | ||
1047 | + return arm_output_sync_insn (insn, operands); | ||
1048 | + } | ||
1049 | + [(set_attr "sync_result" "0") | ||
1050 | + (set_attr "sync_memory" "1") | ||
1051 | + (set_attr "sync_new_value" "2") | ||
1052 | + (set_attr "sync_t1" "0") | ||
1053 | + (set_attr "sync_t2" "3") | ||
1054 | + (set_attr "sync_op" "<sync_optab>") | ||
1055 | + (set_attr "conds" "nocond") | ||
1056 | + (set_attr "predicable" "no")]) | ||
1057 | + | ||
1058 | +(define_insn "arm_sync_new_nand<mode>" | ||
1059 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
1060 | + (unspec_volatile:SI | ||
1061 | + [(not:SI | ||
1062 | + (and:SI | ||
1063 | + (zero_extend:SI | ||
1064 | + (match_operand:NARROW 1 "memory_operand" "+m")) | ||
1065 | + (match_operand:SI 2 "s_register_operand" "r"))) | ||
1066 | + ] VUNSPEC_SYNC_NEW_OP)) | ||
1067 | + (set (match_dup 1) | ||
1068 | + (unspec_volatile:NARROW [(match_dup 1) (match_dup 2)] | ||
1069 | + VUNSPEC_SYNC_NEW_OP)) | ||
1070 | + (clobber (reg:CC CC_REGNUM)) | ||
1071 | + (clobber (match_scratch:SI 3 "=&r"))] | ||
1072 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
1073 | + { | ||
1074 | + return arm_output_sync_insn (insn, operands); | ||
1075 | + } | ||
1076 | + [(set_attr "sync_result" "0") | ||
1077 | + (set_attr "sync_memory" "1") | ||
1078 | + (set_attr "sync_new_value" "2") | ||
1079 | + (set_attr "sync_t1" "0") | ||
1080 | + (set_attr "sync_t2" "3") | ||
1081 | + (set_attr "sync_op" "nand") | ||
1082 | + (set_attr "conds" "nocond") | ||
1083 | + (set_attr "predicable" "no")]) | ||
1084 | + | ||
1085 | +(define_insn "arm_sync_old_<sync_optab>si" | ||
1086 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
1087 | + (unspec_volatile:SI [(syncop:SI | ||
1088 | + (match_operand:SI 1 "memory_operand" "+m") | ||
1089 | + (match_operand:SI 2 "s_register_operand" "r")) | ||
1090 | + ] | ||
1091 | + VUNSPEC_SYNC_OLD_OP)) | ||
1092 | + (set (match_dup 1) | ||
1093 | + (unspec_volatile:SI [(match_dup 1) (match_dup 2)] | ||
1094 | + VUNSPEC_SYNC_OLD_OP)) | ||
1095 | + (clobber (reg:CC CC_REGNUM)) | ||
1096 | + (clobber (match_scratch:SI 3 "=&r")) | ||
1097 | + (clobber (match_scratch:SI 4 "=&r"))] | ||
1098 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
1099 | + { | ||
1100 | + return arm_output_sync_insn (insn, operands); | ||
1101 | + } | ||
1102 | + [(set_attr "sync_result" "0") | ||
1103 | + (set_attr "sync_memory" "1") | ||
1104 | + (set_attr "sync_new_value" "2") | ||
1105 | + (set_attr "sync_t1" "3") | ||
1106 | + (set_attr "sync_t2" "4") | ||
1107 | + (set_attr "sync_op" "<sync_optab>") | ||
1108 | + (set_attr "conds" "nocond") | ||
1109 | + (set_attr "predicable" "no")]) | ||
1110 | + | ||
1111 | +(define_insn "arm_sync_old_nandsi" | ||
1112 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
1113 | + (unspec_volatile:SI [(not:SI (and:SI | ||
1114 | + (match_operand:SI 1 "memory_operand" "+m") | ||
1115 | + (match_operand:SI 2 "s_register_operand" "r"))) | ||
1116 | + ] | ||
1117 | + VUNSPEC_SYNC_OLD_OP)) | ||
1118 | + (set (match_dup 1) | ||
1119 | + (unspec_volatile:SI [(match_dup 1) (match_dup 2)] | ||
1120 | + VUNSPEC_SYNC_OLD_OP)) | ||
1121 | + (clobber (reg:CC CC_REGNUM)) | ||
1122 | + (clobber (match_scratch:SI 3 "=&r")) | ||
1123 | + (clobber (match_scratch:SI 4 "=&r"))] | ||
1124 | + "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER" | ||
1125 | + { | ||
1126 | + return arm_output_sync_insn (insn, operands); | ||
1127 | + } | ||
1128 | + [(set_attr "sync_result" "0") | ||
1129 | + (set_attr "sync_memory" "1") | ||
1130 | + (set_attr "sync_new_value" "2") | ||
1131 | + (set_attr "sync_t1" "3") | ||
1132 | + (set_attr "sync_t2" "4") | ||
1133 | + (set_attr "sync_op" "nand") | ||
1134 | + (set_attr "conds" "nocond") | ||
1135 | + (set_attr "predicable" "no")]) | ||
1136 | + | ||
1137 | +(define_insn "arm_sync_old_<sync_optab><mode>" | ||
1138 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
1139 | + (unspec_volatile:SI [(syncop:SI | ||
1140 | + (zero_extend:SI | ||
1141 | + (match_operand:NARROW 1 "memory_operand" "+m")) | ||
1142 | + (match_operand:SI 2 "s_register_operand" "r")) | ||
1143 | + ] | ||
1144 | + VUNSPEC_SYNC_OLD_OP)) | ||
1145 | + (set (match_dup 1) | ||
1146 | + (unspec_volatile:NARROW [(match_dup 1) (match_dup 2)] | ||
1147 | + VUNSPEC_SYNC_OLD_OP)) | ||
1148 | + (clobber (reg:CC CC_REGNUM)) | ||
1149 | + (clobber (match_scratch:SI 3 "=&r")) | ||
1150 | + (clobber (match_scratch:SI 4 "=&r"))] | ||
1151 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
1152 | + { | ||
1153 | + return arm_output_sync_insn (insn, operands); | ||
1154 | + } | ||
1155 | + [(set_attr "sync_result" "0") | ||
1156 | + (set_attr "sync_memory" "1") | ||
1157 | + (set_attr "sync_new_value" "2") | ||
1158 | + (set_attr "sync_t1" "3") | ||
1159 | + (set_attr "sync_t2" "4") | ||
1160 | + (set_attr "sync_op" "<sync_optab>") | ||
1161 | + (set_attr "conds" "nocond") | ||
1162 | + (set_attr "predicable" "no")]) | ||
1163 | + | ||
1164 | +(define_insn "arm_sync_old_nand<mode>" | ||
1165 | + [(set (match_operand:SI 0 "s_register_operand" "=&r") | ||
1166 | + (unspec_volatile:SI [(not:SI (and:SI | ||
1167 | + (zero_extend:SI | ||
1168 | + (match_operand:NARROW 1 "memory_operand" "+m")) | ||
1169 | + (match_operand:SI 2 "s_register_operand" "r"))) | ||
1170 | + ] | ||
1171 | + VUNSPEC_SYNC_OLD_OP)) | ||
1172 | + (set (match_dup 1) | ||
1173 | + (unspec_volatile:NARROW [(match_dup 1) (match_dup 2)] | ||
1174 | + VUNSPEC_SYNC_OLD_OP)) | ||
1175 | + (clobber (reg:CC CC_REGNUM)) | ||
1176 | + (clobber (match_scratch:SI 3 "=&r")) | ||
1177 | + (clobber (match_scratch:SI 4 "=&r"))] | ||
1178 | + "TARGET_HAVE_LDREXBHD && TARGET_HAVE_MEMORY_BARRIER" | ||
1179 | + { | ||
1180 | + return arm_output_sync_insn (insn, operands); | ||
1181 | + } | ||
1182 | + [(set_attr "sync_result" "0") | ||
1183 | + (set_attr "sync_memory" "1") | ||
1184 | + (set_attr "sync_new_value" "2") | ||
1185 | + (set_attr "sync_t1" "3") | ||
1186 | + (set_attr "sync_t2" "4") | ||
1187 | + (set_attr "sync_op" "nand") | ||
1188 | + (set_attr "conds" "nocond") | ||
1189 | + (set_attr "predicable" "no")]) | ||
1190 | + | ||
1191 | +(define_insn "*memory_barrier" | ||
1192 | + [(set (match_operand:BLK 0 "" "") | ||
1193 | + (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))] | ||
1194 | + "TARGET_HAVE_MEMORY_BARRIER" | ||
1195 | + { | ||
1196 | + return arm_output_memory_barrier (operands); | ||
1197 | + } | ||
1198 | + [(set_attr "length" "4") | ||
1199 | + (set_attr "conds" "unconditional") | ||
1200 | + (set_attr "predicable" "no")]) | ||
1201 | + | ||
1202 | |||