summaryrefslogtreecommitdiffstats
path: root/meta/recipes-devtools/gcc/gcc-4.6.0/gcc-4_6-branch-backports/0215-config-i386-i386.md-movdi_internal_rex64-TYPE_SSEMOV.patch
diff options
context:
space:
mode:
Diffstat (limited to 'meta/recipes-devtools/gcc/gcc-4.6.0/gcc-4_6-branch-backports/0215-config-i386-i386.md-movdi_internal_rex64-TYPE_SSEMOV.patch')
-rw-r--r--meta/recipes-devtools/gcc/gcc-4.6.0/gcc-4_6-branch-backports/0215-config-i386-i386.md-movdi_internal_rex64-TYPE_SSEMOV.patch244
1 files changed, 244 insertions, 0 deletions
diff --git a/meta/recipes-devtools/gcc/gcc-4.6.0/gcc-4_6-branch-backports/0215-config-i386-i386.md-movdi_internal_rex64-TYPE_SSEMOV.patch b/meta/recipes-devtools/gcc/gcc-4.6.0/gcc-4_6-branch-backports/0215-config-i386-i386.md-movdi_internal_rex64-TYPE_SSEMOV.patch
new file mode 100644
index 0000000000..813b549ed3
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc-4.6.0/gcc-4_6-branch-backports/0215-config-i386-i386.md-movdi_internal_rex64-TYPE_SSEMOV.patch
@@ -0,0 +1,244 @@
1From 29c9d7b889311e84c52f371bdbc8d37f1c82b47f Mon Sep 17 00:00:00 2001
2From: uros <uros@138bc75d-0d04-0410-961f-82ee72b054a4>
3Date: Wed, 4 May 2011 17:01:43 +0000
4Subject: [PATCH] * config/i386/i386.md (*movdi_internal_rex64) <TYPE_SSEMOV>:
5 Use %v prefix in insn mnemonic to handle TARGET_AVX.
6 (*movdi_internal): Use "maybe_vex" instead of "vex" in "prefix"
7 attribute calculation.
8 (*movdf_internal): Output AVX mnemonics. Add "prefix" attribute.
9 * config/i386/sse.md (*sse2_storeq_rex64): Do not emit %v prefix
10 for mov{q} mnemonic.
11 (*vec_extractv2di_1_rex64_avx): Ditto.
12
13git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/gcc-4_6-branch@173382 138bc75d-0d04-0410-961f-82ee72b054a4
14
15index c50002e..a90e310 100644
16--- a/gcc/config/i386/i386.md
17+++ b/gcc/config/i386/i386.md
18@@ -1974,21 +1974,15 @@
19 return "movdq2q\t{%1, %0|%0, %1}";
20
21 case TYPE_SSEMOV:
22- if (TARGET_AVX)
23- {
24- if (get_attr_mode (insn) == MODE_TI)
25- return "vmovdqa\t{%1, %0|%0, %1}";
26- else
27- return "vmovq\t{%1, %0|%0, %1}";
28- }
29-
30 if (get_attr_mode (insn) == MODE_TI)
31- return "movdqa\t{%1, %0|%0, %1}";
32- /* FALLTHRU */
33+ return "%vmovdqa\t{%1, %0|%0, %1}";
34+ /* Handle broken assemblers that require movd instead of movq. */
35+ if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
36+ return "%vmovd\t{%1, %0|%0, %1}";
37+ return "%vmovq\t{%1, %0|%0, %1}";
38
39 case TYPE_MMXMOV:
40- /* Moves from and into integer register is done using movd
41- opcode with REX prefix. */
42+ /* Handle broken assemblers that require movd instead of movq. */
43 if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
44 return "movd\t{%1, %0|%0, %1}";
45 return "movq\t{%1, %0|%0, %1}";
46@@ -2108,7 +2102,7 @@
47 [(set_attr "type" "*,*,mmx,mmxmov,mmxmov,sselog1,ssemov,ssemov,ssemov,sselog1,ssemov,ssemov,ssemov")
48 (set (attr "prefix")
49 (if_then_else (eq_attr "alternative" "5,6,7,8")
50- (const_string "vex")
51+ (const_string "maybe_vex")
52 (const_string "orig")))
53 (set_attr "mode" "DI,DI,DI,DI,DI,TI,DI,TI,DI,V4SF,V2SF,V4SF,V2SF")])
54
55@@ -2997,7 +2991,8 @@
56
57 case 11:
58 case 12:
59- return "%vmovd\t{%1, %0|%0, %1}";
60+ /* Handle broken assemblers that require movd instead of movq. */
61+ return "%vmovd\t{%1, %0|%0, %1}";
62
63 default:
64 gcc_unreachable();
65@@ -3106,17 +3101,17 @@
66 switch (get_attr_mode (insn))
67 {
68 case MODE_V4SF:
69- return "xorps\t%0, %0";
70+ return "%vxorps\t%0, %d0";
71 case MODE_V2DF:
72 if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
73- return "xorps\t%0, %0";
74+ return "%vxorps\t%0, %d0";
75 else
76- return "xorpd\t%0, %0";
77+ return "%vxorpd\t%0, %d0";
78 case MODE_TI:
79 if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
80- return "xorps\t%0, %0";
81+ return "%vxorps\t%0, %d0";
82 else
83- return "pxor\t%0, %0";
84+ return "%vpxor\t%0, %d0";
85 default:
86 gcc_unreachable ();
87 }
88@@ -3126,34 +3121,62 @@
89 switch (get_attr_mode (insn))
90 {
91 case MODE_V4SF:
92- return "movaps\t{%1, %0|%0, %1}";
93+ return "%vmovaps\t{%1, %0|%0, %1}";
94 case MODE_V2DF:
95 if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
96- return "movaps\t{%1, %0|%0, %1}";
97+ return "%vmovaps\t{%1, %0|%0, %1}";
98 else
99- return "movapd\t{%1, %0|%0, %1}";
100+ return "%vmovapd\t{%1, %0|%0, %1}";
101 case MODE_TI:
102 if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
103- return "movaps\t{%1, %0|%0, %1}";
104+ return "%vmovaps\t{%1, %0|%0, %1}";
105 else
106- return "movdqa\t{%1, %0|%0, %1}";
107+ return "%vmovdqa\t{%1, %0|%0, %1}";
108 case MODE_DI:
109- return "movq\t{%1, %0|%0, %1}";
110+ return "%vmovq\t{%1, %0|%0, %1}";
111 case MODE_DF:
112- return "movsd\t{%1, %0|%0, %1}";
113+ if (TARGET_AVX)
114+ {
115+ if (REG_P (operands[0]) && REG_P (operands[1]))
116+ return "vmovsd\t{%1, %0, %0|%0, %0, %1}";
117+ else
118+ return "vmovsd\t{%1, %0|%0, %1}";
119+ }
120+ else
121+ return "movsd\t{%1, %0|%0, %1}";
122 case MODE_V1DF:
123- return "movlpd\t{%1, %0|%0, %1}";
124+ if (TARGET_AVX)
125+ {
126+ if (REG_P (operands[0]))
127+ return "vmovlpd\t{%1, %0, %0|%0, %0, %1}";
128+ else
129+ return "vmovlpd\t{%1, %0|%0, %1}";
130+ }
131+ else
132+ return "movlpd\t{%1, %0|%0, %1}";
133 case MODE_V2SF:
134- return "movlps\t{%1, %0|%0, %1}";
135+ if (TARGET_AVX)
136+ {
137+ if (REG_P (operands[0]))
138+ return "vmovlps\t{%1, %0, %0|%0, %0, %1}";
139+ else
140+ return "vmovlps\t{%1, %0|%0, %1}";
141+ }
142+ else
143+ return "movlps\t{%1, %0|%0, %1}";
144 default:
145 gcc_unreachable ();
146 }
147
148 default:
149- gcc_unreachable();
150+ gcc_unreachable ();
151 }
152 }
153 [(set_attr "type" "fmov,fmov,fmov,multi,multi,sselog1,ssemov,ssemov,ssemov")
154+ (set (attr "prefix")
155+ (if_then_else (eq_attr "alternative" "0,1,2,3,4")
156+ (const_string "orig")
157+ (const_string "maybe_vex")))
158 (set (attr "prefix_data16")
159 (if_then_else (eq_attr "mode" "V1DF")
160 (const_string "1")
161@@ -3441,12 +3464,13 @@
162
163 case 9: case 10: case 14: case 15:
164 return "movd\t{%1, %0|%0, %1}";
165- case 12: case 13:
166- return "%vmovd\t{%1, %0|%0, %1}";
167
168 case 11:
169 return "movq\t{%1, %0|%0, %1}";
170
171+ case 12: case 13:
172+ return "%vmovd\t{%1, %0|%0, %1}";
173+
174 default:
175 gcc_unreachable ();
176 }
177diff --git a/gcc/config/i386/mmx.md b/gcc/config/i386/mmx.md
178index 6a254b6..ca37622 100644
179--- a/gcc/config/i386/mmx.md
180+++ b/gcc/config/i386/mmx.md
181@@ -63,6 +63,7 @@
182 DONE;
183 })
184
185+;; movd instead of movq is required to handle broken assemblers.
186 (define_insn "*mov<mode>_internal_rex64"
187 [(set (match_operand:MMXMODEI8 0 "nonimmediate_operand"
188 "=rm,r,!?y,!y,!?y,m ,!y ,*Y2,x,x ,m,r ,Yi")
189@@ -196,6 +197,7 @@
190 (const_string "orig")))
191 (set_attr "mode" "DI,DI,DI,DI,DI,DI,DI,DI,V4SF,V4SF,V2SF,V2SF,DI,DI")])
192
193+;; movd instead of movq is required to handle broken assemblers.
194 (define_insn "*movv2sf_internal_rex64"
195 [(set (match_operand:V2SF 0 "nonimmediate_operand"
196 "=rm,r,!?y,!y,!?y,m ,!y ,*Y2,x,x,x,m,r ,Yi")
197diff --git a/gcc/config/i386/sse.md b/gcc/config/i386/sse.md
198index 6db9b77..db252c8 100644
199--- a/gcc/config/i386/sse.md
200+++ b/gcc/config/i386/sse.md
201@@ -7434,9 +7434,8 @@
202 "@
203 #
204 #
205- %vmov{q}\t{%1, %0|%0, %1}"
206+ mov{q}\t{%1, %0|%0, %1}"
207 [(set_attr "type" "*,*,imov")
208- (set_attr "prefix" "*,*,maybe_vex")
209 (set_attr "mode" "*,*,DI")])
210
211 (define_insn "*sse2_storeq"
212@@ -7472,11 +7471,11 @@
213 vmovhps\t{%1, %0|%0, %1}
214 vpsrldq\t{$8, %1, %0|%0, %1, 8}
215 vmovq\t{%H1, %0|%0, %H1}
216- vmov{q}\t{%H1, %0|%0, %H1}"
217+ mov{q}\t{%H1, %0|%0, %H1}"
218 [(set_attr "type" "ssemov,sseishft1,ssemov,imov")
219 (set_attr "length_immediate" "*,1,*,*")
220 (set_attr "memory" "*,none,*,*")
221- (set_attr "prefix" "vex")
222+ (set_attr "prefix" "vex,vex,vex,orig")
223 (set_attr "mode" "V2SF,TI,TI,DI")])
224
225 (define_insn "*vec_extractv2di_1_rex64"
226@@ -7754,6 +7753,7 @@
227 (const_string "vex")))
228 (set_attr "mode" "TI,TI,TI,TI,TI,V2SF")])
229
230+;; movd instead of movq is required to handle broken assemblers.
231 (define_insn "*vec_concatv2di_rex64_sse4_1"
232 [(set (match_operand:V2DI 0 "register_operand" "=x ,x ,Yi,!x,x,x,x")
233 (vec_concat:V2DI
234@@ -7774,6 +7774,7 @@
235 (set_attr "length_immediate" "1,*,*,*,*,*,*")
236 (set_attr "mode" "TI,TI,TI,TI,TI,V4SF,V2SF")])
237
238+;; movd instead of movq is required to handle broken assemblers.
239 (define_insn "*vec_concatv2di_rex64_sse"
240 [(set (match_operand:V2DI 0 "register_operand" "=Y2 ,Yi,!Y2,Y2,x,x")
241 (vec_concat:V2DI
242--
2431.7.0.4
244