1
2
3 package ssa
4
5 import "cmd/compile/internal/types"
6
7 func rewriteValueMIPS64(v *Value) bool {
8 switch v.Op {
9 case OpAbs:
10 v.Op = OpMIPS64ABSD
11 return true
12 case OpAdd16:
13 v.Op = OpMIPS64ADDV
14 return true
15 case OpAdd32:
16 v.Op = OpMIPS64ADDV
17 return true
18 case OpAdd32F:
19 v.Op = OpMIPS64ADDF
20 return true
21 case OpAdd64:
22 v.Op = OpMIPS64ADDV
23 return true
24 case OpAdd64F:
25 v.Op = OpMIPS64ADDD
26 return true
27 case OpAdd8:
28 v.Op = OpMIPS64ADDV
29 return true
30 case OpAddPtr:
31 v.Op = OpMIPS64ADDV
32 return true
33 case OpAddr:
34 return rewriteValueMIPS64_OpAddr(v)
35 case OpAnd16:
36 v.Op = OpMIPS64AND
37 return true
38 case OpAnd32:
39 v.Op = OpMIPS64AND
40 return true
41 case OpAnd64:
42 v.Op = OpMIPS64AND
43 return true
44 case OpAnd8:
45 v.Op = OpMIPS64AND
46 return true
47 case OpAndB:
48 v.Op = OpMIPS64AND
49 return true
50 case OpAtomicAdd32:
51 v.Op = OpMIPS64LoweredAtomicAdd32
52 return true
53 case OpAtomicAdd64:
54 v.Op = OpMIPS64LoweredAtomicAdd64
55 return true
56 case OpAtomicAnd32:
57 v.Op = OpMIPS64LoweredAtomicAnd32
58 return true
59 case OpAtomicAnd8:
60 return rewriteValueMIPS64_OpAtomicAnd8(v)
61 case OpAtomicCompareAndSwap32:
62 return rewriteValueMIPS64_OpAtomicCompareAndSwap32(v)
63 case OpAtomicCompareAndSwap64:
64 v.Op = OpMIPS64LoweredAtomicCas64
65 return true
66 case OpAtomicExchange32:
67 v.Op = OpMIPS64LoweredAtomicExchange32
68 return true
69 case OpAtomicExchange64:
70 v.Op = OpMIPS64LoweredAtomicExchange64
71 return true
72 case OpAtomicLoad32:
73 v.Op = OpMIPS64LoweredAtomicLoad32
74 return true
75 case OpAtomicLoad64:
76 v.Op = OpMIPS64LoweredAtomicLoad64
77 return true
78 case OpAtomicLoad8:
79 v.Op = OpMIPS64LoweredAtomicLoad8
80 return true
81 case OpAtomicLoadPtr:
82 v.Op = OpMIPS64LoweredAtomicLoad64
83 return true
84 case OpAtomicOr32:
85 v.Op = OpMIPS64LoweredAtomicOr32
86 return true
87 case OpAtomicOr8:
88 return rewriteValueMIPS64_OpAtomicOr8(v)
89 case OpAtomicStore32:
90 v.Op = OpMIPS64LoweredAtomicStore32
91 return true
92 case OpAtomicStore64:
93 v.Op = OpMIPS64LoweredAtomicStore64
94 return true
95 case OpAtomicStore8:
96 v.Op = OpMIPS64LoweredAtomicStore8
97 return true
98 case OpAtomicStorePtrNoWB:
99 v.Op = OpMIPS64LoweredAtomicStore64
100 return true
101 case OpAvg64u:
102 return rewriteValueMIPS64_OpAvg64u(v)
103 case OpClosureCall:
104 v.Op = OpMIPS64CALLclosure
105 return true
106 case OpCom16:
107 return rewriteValueMIPS64_OpCom16(v)
108 case OpCom32:
109 return rewriteValueMIPS64_OpCom32(v)
110 case OpCom64:
111 return rewriteValueMIPS64_OpCom64(v)
112 case OpCom8:
113 return rewriteValueMIPS64_OpCom8(v)
114 case OpConst16:
115 return rewriteValueMIPS64_OpConst16(v)
116 case OpConst32:
117 return rewriteValueMIPS64_OpConst32(v)
118 case OpConst32F:
119 return rewriteValueMIPS64_OpConst32F(v)
120 case OpConst64:
121 return rewriteValueMIPS64_OpConst64(v)
122 case OpConst64F:
123 return rewriteValueMIPS64_OpConst64F(v)
124 case OpConst8:
125 return rewriteValueMIPS64_OpConst8(v)
126 case OpConstBool:
127 return rewriteValueMIPS64_OpConstBool(v)
128 case OpConstNil:
129 return rewriteValueMIPS64_OpConstNil(v)
130 case OpCvt32Fto32:
131 v.Op = OpMIPS64TRUNCFW
132 return true
133 case OpCvt32Fto64:
134 v.Op = OpMIPS64TRUNCFV
135 return true
136 case OpCvt32Fto64F:
137 v.Op = OpMIPS64MOVFD
138 return true
139 case OpCvt32to32F:
140 v.Op = OpMIPS64MOVWF
141 return true
142 case OpCvt32to64F:
143 v.Op = OpMIPS64MOVWD
144 return true
145 case OpCvt64Fto32:
146 v.Op = OpMIPS64TRUNCDW
147 return true
148 case OpCvt64Fto32F:
149 v.Op = OpMIPS64MOVDF
150 return true
151 case OpCvt64Fto64:
152 v.Op = OpMIPS64TRUNCDV
153 return true
154 case OpCvt64to32F:
155 v.Op = OpMIPS64MOVVF
156 return true
157 case OpCvt64to64F:
158 v.Op = OpMIPS64MOVVD
159 return true
160 case OpCvtBoolToUint8:
161 v.Op = OpCopy
162 return true
163 case OpDiv16:
164 return rewriteValueMIPS64_OpDiv16(v)
165 case OpDiv16u:
166 return rewriteValueMIPS64_OpDiv16u(v)
167 case OpDiv32:
168 return rewriteValueMIPS64_OpDiv32(v)
169 case OpDiv32F:
170 v.Op = OpMIPS64DIVF
171 return true
172 case OpDiv32u:
173 return rewriteValueMIPS64_OpDiv32u(v)
174 case OpDiv64:
175 return rewriteValueMIPS64_OpDiv64(v)
176 case OpDiv64F:
177 v.Op = OpMIPS64DIVD
178 return true
179 case OpDiv64u:
180 return rewriteValueMIPS64_OpDiv64u(v)
181 case OpDiv8:
182 return rewriteValueMIPS64_OpDiv8(v)
183 case OpDiv8u:
184 return rewriteValueMIPS64_OpDiv8u(v)
185 case OpEq16:
186 return rewriteValueMIPS64_OpEq16(v)
187 case OpEq32:
188 return rewriteValueMIPS64_OpEq32(v)
189 case OpEq32F:
190 return rewriteValueMIPS64_OpEq32F(v)
191 case OpEq64:
192 return rewriteValueMIPS64_OpEq64(v)
193 case OpEq64F:
194 return rewriteValueMIPS64_OpEq64F(v)
195 case OpEq8:
196 return rewriteValueMIPS64_OpEq8(v)
197 case OpEqB:
198 return rewriteValueMIPS64_OpEqB(v)
199 case OpEqPtr:
200 return rewriteValueMIPS64_OpEqPtr(v)
201 case OpGetCallerPC:
202 v.Op = OpMIPS64LoweredGetCallerPC
203 return true
204 case OpGetCallerSP:
205 v.Op = OpMIPS64LoweredGetCallerSP
206 return true
207 case OpGetClosurePtr:
208 v.Op = OpMIPS64LoweredGetClosurePtr
209 return true
210 case OpHmul32:
211 return rewriteValueMIPS64_OpHmul32(v)
212 case OpHmul32u:
213 return rewriteValueMIPS64_OpHmul32u(v)
214 case OpHmul64:
215 return rewriteValueMIPS64_OpHmul64(v)
216 case OpHmul64u:
217 return rewriteValueMIPS64_OpHmul64u(v)
218 case OpInterCall:
219 v.Op = OpMIPS64CALLinter
220 return true
221 case OpIsInBounds:
222 return rewriteValueMIPS64_OpIsInBounds(v)
223 case OpIsNonNil:
224 return rewriteValueMIPS64_OpIsNonNil(v)
225 case OpIsSliceInBounds:
226 return rewriteValueMIPS64_OpIsSliceInBounds(v)
227 case OpLeq16:
228 return rewriteValueMIPS64_OpLeq16(v)
229 case OpLeq16U:
230 return rewriteValueMIPS64_OpLeq16U(v)
231 case OpLeq32:
232 return rewriteValueMIPS64_OpLeq32(v)
233 case OpLeq32F:
234 return rewriteValueMIPS64_OpLeq32F(v)
235 case OpLeq32U:
236 return rewriteValueMIPS64_OpLeq32U(v)
237 case OpLeq64:
238 return rewriteValueMIPS64_OpLeq64(v)
239 case OpLeq64F:
240 return rewriteValueMIPS64_OpLeq64F(v)
241 case OpLeq64U:
242 return rewriteValueMIPS64_OpLeq64U(v)
243 case OpLeq8:
244 return rewriteValueMIPS64_OpLeq8(v)
245 case OpLeq8U:
246 return rewriteValueMIPS64_OpLeq8U(v)
247 case OpLess16:
248 return rewriteValueMIPS64_OpLess16(v)
249 case OpLess16U:
250 return rewriteValueMIPS64_OpLess16U(v)
251 case OpLess32:
252 return rewriteValueMIPS64_OpLess32(v)
253 case OpLess32F:
254 return rewriteValueMIPS64_OpLess32F(v)
255 case OpLess32U:
256 return rewriteValueMIPS64_OpLess32U(v)
257 case OpLess64:
258 return rewriteValueMIPS64_OpLess64(v)
259 case OpLess64F:
260 return rewriteValueMIPS64_OpLess64F(v)
261 case OpLess64U:
262 return rewriteValueMIPS64_OpLess64U(v)
263 case OpLess8:
264 return rewriteValueMIPS64_OpLess8(v)
265 case OpLess8U:
266 return rewriteValueMIPS64_OpLess8U(v)
267 case OpLoad:
268 return rewriteValueMIPS64_OpLoad(v)
269 case OpLocalAddr:
270 return rewriteValueMIPS64_OpLocalAddr(v)
271 case OpLsh16x16:
272 return rewriteValueMIPS64_OpLsh16x16(v)
273 case OpLsh16x32:
274 return rewriteValueMIPS64_OpLsh16x32(v)
275 case OpLsh16x64:
276 return rewriteValueMIPS64_OpLsh16x64(v)
277 case OpLsh16x8:
278 return rewriteValueMIPS64_OpLsh16x8(v)
279 case OpLsh32x16:
280 return rewriteValueMIPS64_OpLsh32x16(v)
281 case OpLsh32x32:
282 return rewriteValueMIPS64_OpLsh32x32(v)
283 case OpLsh32x64:
284 return rewriteValueMIPS64_OpLsh32x64(v)
285 case OpLsh32x8:
286 return rewriteValueMIPS64_OpLsh32x8(v)
287 case OpLsh64x16:
288 return rewriteValueMIPS64_OpLsh64x16(v)
289 case OpLsh64x32:
290 return rewriteValueMIPS64_OpLsh64x32(v)
291 case OpLsh64x64:
292 return rewriteValueMIPS64_OpLsh64x64(v)
293 case OpLsh64x8:
294 return rewriteValueMIPS64_OpLsh64x8(v)
295 case OpLsh8x16:
296 return rewriteValueMIPS64_OpLsh8x16(v)
297 case OpLsh8x32:
298 return rewriteValueMIPS64_OpLsh8x32(v)
299 case OpLsh8x64:
300 return rewriteValueMIPS64_OpLsh8x64(v)
301 case OpLsh8x8:
302 return rewriteValueMIPS64_OpLsh8x8(v)
303 case OpMIPS64ADDV:
304 return rewriteValueMIPS64_OpMIPS64ADDV(v)
305 case OpMIPS64ADDVconst:
306 return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
307 case OpMIPS64AND:
308 return rewriteValueMIPS64_OpMIPS64AND(v)
309 case OpMIPS64ANDconst:
310 return rewriteValueMIPS64_OpMIPS64ANDconst(v)
311 case OpMIPS64LoweredAtomicAdd32:
312 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v)
313 case OpMIPS64LoweredAtomicAdd64:
314 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v)
315 case OpMIPS64LoweredAtomicStore32:
316 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v)
317 case OpMIPS64LoweredAtomicStore64:
318 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v)
319 case OpMIPS64MOVBUload:
320 return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
321 case OpMIPS64MOVBUreg:
322 return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
323 case OpMIPS64MOVBload:
324 return rewriteValueMIPS64_OpMIPS64MOVBload(v)
325 case OpMIPS64MOVBreg:
326 return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
327 case OpMIPS64MOVBstore:
328 return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
329 case OpMIPS64MOVBstorezero:
330 return rewriteValueMIPS64_OpMIPS64MOVBstorezero(v)
331 case OpMIPS64MOVDload:
332 return rewriteValueMIPS64_OpMIPS64MOVDload(v)
333 case OpMIPS64MOVDstore:
334 return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
335 case OpMIPS64MOVFload:
336 return rewriteValueMIPS64_OpMIPS64MOVFload(v)
337 case OpMIPS64MOVFstore:
338 return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
339 case OpMIPS64MOVHUload:
340 return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
341 case OpMIPS64MOVHUreg:
342 return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
343 case OpMIPS64MOVHload:
344 return rewriteValueMIPS64_OpMIPS64MOVHload(v)
345 case OpMIPS64MOVHreg:
346 return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
347 case OpMIPS64MOVHstore:
348 return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
349 case OpMIPS64MOVHstorezero:
350 return rewriteValueMIPS64_OpMIPS64MOVHstorezero(v)
351 case OpMIPS64MOVVload:
352 return rewriteValueMIPS64_OpMIPS64MOVVload(v)
353 case OpMIPS64MOVVnop:
354 return rewriteValueMIPS64_OpMIPS64MOVVnop(v)
355 case OpMIPS64MOVVreg:
356 return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
357 case OpMIPS64MOVVstore:
358 return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
359 case OpMIPS64MOVVstorezero:
360 return rewriteValueMIPS64_OpMIPS64MOVVstorezero(v)
361 case OpMIPS64MOVWUload:
362 return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
363 case OpMIPS64MOVWUreg:
364 return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
365 case OpMIPS64MOVWload:
366 return rewriteValueMIPS64_OpMIPS64MOVWload(v)
367 case OpMIPS64MOVWreg:
368 return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
369 case OpMIPS64MOVWstore:
370 return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
371 case OpMIPS64MOVWstorezero:
372 return rewriteValueMIPS64_OpMIPS64MOVWstorezero(v)
373 case OpMIPS64NEGV:
374 return rewriteValueMIPS64_OpMIPS64NEGV(v)
375 case OpMIPS64NOR:
376 return rewriteValueMIPS64_OpMIPS64NOR(v)
377 case OpMIPS64NORconst:
378 return rewriteValueMIPS64_OpMIPS64NORconst(v)
379 case OpMIPS64OR:
380 return rewriteValueMIPS64_OpMIPS64OR(v)
381 case OpMIPS64ORconst:
382 return rewriteValueMIPS64_OpMIPS64ORconst(v)
383 case OpMIPS64SGT:
384 return rewriteValueMIPS64_OpMIPS64SGT(v)
385 case OpMIPS64SGTU:
386 return rewriteValueMIPS64_OpMIPS64SGTU(v)
387 case OpMIPS64SGTUconst:
388 return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
389 case OpMIPS64SGTconst:
390 return rewriteValueMIPS64_OpMIPS64SGTconst(v)
391 case OpMIPS64SLLV:
392 return rewriteValueMIPS64_OpMIPS64SLLV(v)
393 case OpMIPS64SLLVconst:
394 return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
395 case OpMIPS64SRAV:
396 return rewriteValueMIPS64_OpMIPS64SRAV(v)
397 case OpMIPS64SRAVconst:
398 return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
399 case OpMIPS64SRLV:
400 return rewriteValueMIPS64_OpMIPS64SRLV(v)
401 case OpMIPS64SRLVconst:
402 return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
403 case OpMIPS64SUBV:
404 return rewriteValueMIPS64_OpMIPS64SUBV(v)
405 case OpMIPS64SUBVconst:
406 return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
407 case OpMIPS64XOR:
408 return rewriteValueMIPS64_OpMIPS64XOR(v)
409 case OpMIPS64XORconst:
410 return rewriteValueMIPS64_OpMIPS64XORconst(v)
411 case OpMod16:
412 return rewriteValueMIPS64_OpMod16(v)
413 case OpMod16u:
414 return rewriteValueMIPS64_OpMod16u(v)
415 case OpMod32:
416 return rewriteValueMIPS64_OpMod32(v)
417 case OpMod32u:
418 return rewriteValueMIPS64_OpMod32u(v)
419 case OpMod64:
420 return rewriteValueMIPS64_OpMod64(v)
421 case OpMod64u:
422 return rewriteValueMIPS64_OpMod64u(v)
423 case OpMod8:
424 return rewriteValueMIPS64_OpMod8(v)
425 case OpMod8u:
426 return rewriteValueMIPS64_OpMod8u(v)
427 case OpMove:
428 return rewriteValueMIPS64_OpMove(v)
429 case OpMul16:
430 return rewriteValueMIPS64_OpMul16(v)
431 case OpMul32:
432 return rewriteValueMIPS64_OpMul32(v)
433 case OpMul32F:
434 v.Op = OpMIPS64MULF
435 return true
436 case OpMul64:
437 return rewriteValueMIPS64_OpMul64(v)
438 case OpMul64F:
439 v.Op = OpMIPS64MULD
440 return true
441 case OpMul64uhilo:
442 v.Op = OpMIPS64MULVU
443 return true
444 case OpMul8:
445 return rewriteValueMIPS64_OpMul8(v)
446 case OpNeg16:
447 v.Op = OpMIPS64NEGV
448 return true
449 case OpNeg32:
450 v.Op = OpMIPS64NEGV
451 return true
452 case OpNeg32F:
453 v.Op = OpMIPS64NEGF
454 return true
455 case OpNeg64:
456 v.Op = OpMIPS64NEGV
457 return true
458 case OpNeg64F:
459 v.Op = OpMIPS64NEGD
460 return true
461 case OpNeg8:
462 v.Op = OpMIPS64NEGV
463 return true
464 case OpNeq16:
465 return rewriteValueMIPS64_OpNeq16(v)
466 case OpNeq32:
467 return rewriteValueMIPS64_OpNeq32(v)
468 case OpNeq32F:
469 return rewriteValueMIPS64_OpNeq32F(v)
470 case OpNeq64:
471 return rewriteValueMIPS64_OpNeq64(v)
472 case OpNeq64F:
473 return rewriteValueMIPS64_OpNeq64F(v)
474 case OpNeq8:
475 return rewriteValueMIPS64_OpNeq8(v)
476 case OpNeqB:
477 v.Op = OpMIPS64XOR
478 return true
479 case OpNeqPtr:
480 return rewriteValueMIPS64_OpNeqPtr(v)
481 case OpNilCheck:
482 v.Op = OpMIPS64LoweredNilCheck
483 return true
484 case OpNot:
485 return rewriteValueMIPS64_OpNot(v)
486 case OpOffPtr:
487 return rewriteValueMIPS64_OpOffPtr(v)
488 case OpOr16:
489 v.Op = OpMIPS64OR
490 return true
491 case OpOr32:
492 v.Op = OpMIPS64OR
493 return true
494 case OpOr64:
495 v.Op = OpMIPS64OR
496 return true
497 case OpOr8:
498 v.Op = OpMIPS64OR
499 return true
500 case OpOrB:
501 v.Op = OpMIPS64OR
502 return true
503 case OpPanicBounds:
504 return rewriteValueMIPS64_OpPanicBounds(v)
505 case OpPubBarrier:
506 v.Op = OpMIPS64LoweredPubBarrier
507 return true
508 case OpRotateLeft16:
509 return rewriteValueMIPS64_OpRotateLeft16(v)
510 case OpRotateLeft32:
511 return rewriteValueMIPS64_OpRotateLeft32(v)
512 case OpRotateLeft64:
513 return rewriteValueMIPS64_OpRotateLeft64(v)
514 case OpRotateLeft8:
515 return rewriteValueMIPS64_OpRotateLeft8(v)
516 case OpRound32F:
517 v.Op = OpCopy
518 return true
519 case OpRound64F:
520 v.Op = OpCopy
521 return true
522 case OpRsh16Ux16:
523 return rewriteValueMIPS64_OpRsh16Ux16(v)
524 case OpRsh16Ux32:
525 return rewriteValueMIPS64_OpRsh16Ux32(v)
526 case OpRsh16Ux64:
527 return rewriteValueMIPS64_OpRsh16Ux64(v)
528 case OpRsh16Ux8:
529 return rewriteValueMIPS64_OpRsh16Ux8(v)
530 case OpRsh16x16:
531 return rewriteValueMIPS64_OpRsh16x16(v)
532 case OpRsh16x32:
533 return rewriteValueMIPS64_OpRsh16x32(v)
534 case OpRsh16x64:
535 return rewriteValueMIPS64_OpRsh16x64(v)
536 case OpRsh16x8:
537 return rewriteValueMIPS64_OpRsh16x8(v)
538 case OpRsh32Ux16:
539 return rewriteValueMIPS64_OpRsh32Ux16(v)
540 case OpRsh32Ux32:
541 return rewriteValueMIPS64_OpRsh32Ux32(v)
542 case OpRsh32Ux64:
543 return rewriteValueMIPS64_OpRsh32Ux64(v)
544 case OpRsh32Ux8:
545 return rewriteValueMIPS64_OpRsh32Ux8(v)
546 case OpRsh32x16:
547 return rewriteValueMIPS64_OpRsh32x16(v)
548 case OpRsh32x32:
549 return rewriteValueMIPS64_OpRsh32x32(v)
550 case OpRsh32x64:
551 return rewriteValueMIPS64_OpRsh32x64(v)
552 case OpRsh32x8:
553 return rewriteValueMIPS64_OpRsh32x8(v)
554 case OpRsh64Ux16:
555 return rewriteValueMIPS64_OpRsh64Ux16(v)
556 case OpRsh64Ux32:
557 return rewriteValueMIPS64_OpRsh64Ux32(v)
558 case OpRsh64Ux64:
559 return rewriteValueMIPS64_OpRsh64Ux64(v)
560 case OpRsh64Ux8:
561 return rewriteValueMIPS64_OpRsh64Ux8(v)
562 case OpRsh64x16:
563 return rewriteValueMIPS64_OpRsh64x16(v)
564 case OpRsh64x32:
565 return rewriteValueMIPS64_OpRsh64x32(v)
566 case OpRsh64x64:
567 return rewriteValueMIPS64_OpRsh64x64(v)
568 case OpRsh64x8:
569 return rewriteValueMIPS64_OpRsh64x8(v)
570 case OpRsh8Ux16:
571 return rewriteValueMIPS64_OpRsh8Ux16(v)
572 case OpRsh8Ux32:
573 return rewriteValueMIPS64_OpRsh8Ux32(v)
574 case OpRsh8Ux64:
575 return rewriteValueMIPS64_OpRsh8Ux64(v)
576 case OpRsh8Ux8:
577 return rewriteValueMIPS64_OpRsh8Ux8(v)
578 case OpRsh8x16:
579 return rewriteValueMIPS64_OpRsh8x16(v)
580 case OpRsh8x32:
581 return rewriteValueMIPS64_OpRsh8x32(v)
582 case OpRsh8x64:
583 return rewriteValueMIPS64_OpRsh8x64(v)
584 case OpRsh8x8:
585 return rewriteValueMIPS64_OpRsh8x8(v)
586 case OpSelect0:
587 return rewriteValueMIPS64_OpSelect0(v)
588 case OpSelect1:
589 return rewriteValueMIPS64_OpSelect1(v)
590 case OpSignExt16to32:
591 v.Op = OpMIPS64MOVHreg
592 return true
593 case OpSignExt16to64:
594 v.Op = OpMIPS64MOVHreg
595 return true
596 case OpSignExt32to64:
597 v.Op = OpMIPS64MOVWreg
598 return true
599 case OpSignExt8to16:
600 v.Op = OpMIPS64MOVBreg
601 return true
602 case OpSignExt8to32:
603 v.Op = OpMIPS64MOVBreg
604 return true
605 case OpSignExt8to64:
606 v.Op = OpMIPS64MOVBreg
607 return true
608 case OpSlicemask:
609 return rewriteValueMIPS64_OpSlicemask(v)
610 case OpSqrt:
611 v.Op = OpMIPS64SQRTD
612 return true
613 case OpSqrt32:
614 v.Op = OpMIPS64SQRTF
615 return true
616 case OpStaticCall:
617 v.Op = OpMIPS64CALLstatic
618 return true
619 case OpStore:
620 return rewriteValueMIPS64_OpStore(v)
621 case OpSub16:
622 v.Op = OpMIPS64SUBV
623 return true
624 case OpSub32:
625 v.Op = OpMIPS64SUBV
626 return true
627 case OpSub32F:
628 v.Op = OpMIPS64SUBF
629 return true
630 case OpSub64:
631 v.Op = OpMIPS64SUBV
632 return true
633 case OpSub64F:
634 v.Op = OpMIPS64SUBD
635 return true
636 case OpSub8:
637 v.Op = OpMIPS64SUBV
638 return true
639 case OpSubPtr:
640 v.Op = OpMIPS64SUBV
641 return true
642 case OpTailCall:
643 v.Op = OpMIPS64CALLtail
644 return true
645 case OpTrunc16to8:
646 v.Op = OpCopy
647 return true
648 case OpTrunc32to16:
649 v.Op = OpCopy
650 return true
651 case OpTrunc32to8:
652 v.Op = OpCopy
653 return true
654 case OpTrunc64to16:
655 v.Op = OpCopy
656 return true
657 case OpTrunc64to32:
658 v.Op = OpCopy
659 return true
660 case OpTrunc64to8:
661 v.Op = OpCopy
662 return true
663 case OpWB:
664 v.Op = OpMIPS64LoweredWB
665 return true
666 case OpXor16:
667 v.Op = OpMIPS64XOR
668 return true
669 case OpXor32:
670 v.Op = OpMIPS64XOR
671 return true
672 case OpXor64:
673 v.Op = OpMIPS64XOR
674 return true
675 case OpXor8:
676 v.Op = OpMIPS64XOR
677 return true
678 case OpZero:
679 return rewriteValueMIPS64_OpZero(v)
680 case OpZeroExt16to32:
681 v.Op = OpMIPS64MOVHUreg
682 return true
683 case OpZeroExt16to64:
684 v.Op = OpMIPS64MOVHUreg
685 return true
686 case OpZeroExt32to64:
687 v.Op = OpMIPS64MOVWUreg
688 return true
689 case OpZeroExt8to16:
690 v.Op = OpMIPS64MOVBUreg
691 return true
692 case OpZeroExt8to32:
693 v.Op = OpMIPS64MOVBUreg
694 return true
695 case OpZeroExt8to64:
696 v.Op = OpMIPS64MOVBUreg
697 return true
698 }
699 return false
700 }
701 func rewriteValueMIPS64_OpAddr(v *Value) bool {
702 v_0 := v.Args[0]
703
704
705 for {
706 sym := auxToSym(v.Aux)
707 base := v_0
708 v.reset(OpMIPS64MOVVaddr)
709 v.Aux = symToAux(sym)
710 v.AddArg(base)
711 return true
712 }
713 }
714 func rewriteValueMIPS64_OpAtomicAnd8(v *Value) bool {
715 v_2 := v.Args[2]
716 v_1 := v.Args[1]
717 v_0 := v.Args[0]
718 b := v.Block
719 config := b.Func.Config
720 typ := &b.Func.Config.Types
721
722
723
724 for {
725 ptr := v_0
726 val := v_1
727 mem := v_2
728 if !(!config.BigEndian) {
729 break
730 }
731 v.reset(OpMIPS64LoweredAtomicAnd32)
732 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
733 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
734 v1.AuxInt = int64ToAuxInt(^3)
735 v0.AddArg2(v1, ptr)
736 v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
737 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
738 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
739 v4.AddArg(val)
740 v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
741 v5.AuxInt = int64ToAuxInt(3)
742 v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
743 v6.AuxInt = int64ToAuxInt(3)
744 v6.AddArg(ptr)
745 v5.AddArg(v6)
746 v3.AddArg2(v4, v5)
747 v7 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
748 v7.AuxInt = int64ToAuxInt(0)
749 v8 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
750 v9 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
751 v9.AuxInt = int64ToAuxInt(0xff)
752 v8.AddArg2(v9, v5)
753 v7.AddArg(v8)
754 v2.AddArg2(v3, v7)
755 v.AddArg3(v0, v2, mem)
756 return true
757 }
758
759
760
761 for {
762 ptr := v_0
763 val := v_1
764 mem := v_2
765 if !(config.BigEndian) {
766 break
767 }
768 v.reset(OpMIPS64LoweredAtomicAnd32)
769 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
770 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
771 v1.AuxInt = int64ToAuxInt(^3)
772 v0.AddArg2(v1, ptr)
773 v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
774 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
775 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
776 v4.AddArg(val)
777 v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
778 v5.AuxInt = int64ToAuxInt(3)
779 v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
780 v6.AuxInt = int64ToAuxInt(3)
781 v7 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
782 v7.AuxInt = int64ToAuxInt(3)
783 v7.AddArg(ptr)
784 v6.AddArg(v7)
785 v5.AddArg(v6)
786 v3.AddArg2(v4, v5)
787 v8 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
788 v8.AuxInt = int64ToAuxInt(0)
789 v9 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
790 v10 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
791 v10.AuxInt = int64ToAuxInt(0xff)
792 v9.AddArg2(v10, v5)
793 v8.AddArg(v9)
794 v2.AddArg2(v3, v8)
795 v.AddArg3(v0, v2, mem)
796 return true
797 }
798 return false
799 }
800 func rewriteValueMIPS64_OpAtomicCompareAndSwap32(v *Value) bool {
801 v_3 := v.Args[3]
802 v_2 := v.Args[2]
803 v_1 := v.Args[1]
804 v_0 := v.Args[0]
805 b := v.Block
806 typ := &b.Func.Config.Types
807
808
809 for {
810 ptr := v_0
811 old := v_1
812 new := v_2
813 mem := v_3
814 v.reset(OpMIPS64LoweredAtomicCas32)
815 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
816 v0.AddArg(old)
817 v.AddArg4(ptr, v0, new, mem)
818 return true
819 }
820 }
821 func rewriteValueMIPS64_OpAtomicOr8(v *Value) bool {
822 v_2 := v.Args[2]
823 v_1 := v.Args[1]
824 v_0 := v.Args[0]
825 b := v.Block
826 config := b.Func.Config
827 typ := &b.Func.Config.Types
828
829
830
831 for {
832 ptr := v_0
833 val := v_1
834 mem := v_2
835 if !(!config.BigEndian) {
836 break
837 }
838 v.reset(OpMIPS64LoweredAtomicOr32)
839 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
840 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
841 v1.AuxInt = int64ToAuxInt(^3)
842 v0.AddArg2(v1, ptr)
843 v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
844 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
845 v3.AddArg(val)
846 v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
847 v4.AuxInt = int64ToAuxInt(3)
848 v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
849 v5.AuxInt = int64ToAuxInt(3)
850 v5.AddArg(ptr)
851 v4.AddArg(v5)
852 v2.AddArg2(v3, v4)
853 v.AddArg3(v0, v2, mem)
854 return true
855 }
856
857
858
859 for {
860 ptr := v_0
861 val := v_1
862 mem := v_2
863 if !(config.BigEndian) {
864 break
865 }
866 v.reset(OpMIPS64LoweredAtomicOr32)
867 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
868 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
869 v1.AuxInt = int64ToAuxInt(^3)
870 v0.AddArg2(v1, ptr)
871 v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
872 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
873 v3.AddArg(val)
874 v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
875 v4.AuxInt = int64ToAuxInt(3)
876 v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
877 v5.AuxInt = int64ToAuxInt(3)
878 v6 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
879 v6.AuxInt = int64ToAuxInt(3)
880 v6.AddArg(ptr)
881 v5.AddArg(v6)
882 v4.AddArg(v5)
883 v2.AddArg2(v3, v4)
884 v.AddArg3(v0, v2, mem)
885 return true
886 }
887 return false
888 }
889 func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
890 v_1 := v.Args[1]
891 v_0 := v.Args[0]
892 b := v.Block
893
894
895 for {
896 t := v.Type
897 x := v_0
898 y := v_1
899 v.reset(OpMIPS64ADDV)
900 v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
901 v0.AuxInt = int64ToAuxInt(1)
902 v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
903 v1.AddArg2(x, y)
904 v0.AddArg(v1)
905 v.AddArg2(v0, y)
906 return true
907 }
908 }
909 func rewriteValueMIPS64_OpCom16(v *Value) bool {
910 v_0 := v.Args[0]
911 b := v.Block
912 typ := &b.Func.Config.Types
913
914
915 for {
916 x := v_0
917 v.reset(OpMIPS64NOR)
918 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
919 v0.AuxInt = int64ToAuxInt(0)
920 v.AddArg2(v0, x)
921 return true
922 }
923 }
924 func rewriteValueMIPS64_OpCom32(v *Value) bool {
925 v_0 := v.Args[0]
926 b := v.Block
927 typ := &b.Func.Config.Types
928
929
930 for {
931 x := v_0
932 v.reset(OpMIPS64NOR)
933 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
934 v0.AuxInt = int64ToAuxInt(0)
935 v.AddArg2(v0, x)
936 return true
937 }
938 }
939 func rewriteValueMIPS64_OpCom64(v *Value) bool {
940 v_0 := v.Args[0]
941 b := v.Block
942 typ := &b.Func.Config.Types
943
944
945 for {
946 x := v_0
947 v.reset(OpMIPS64NOR)
948 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
949 v0.AuxInt = int64ToAuxInt(0)
950 v.AddArg2(v0, x)
951 return true
952 }
953 }
954 func rewriteValueMIPS64_OpCom8(v *Value) bool {
955 v_0 := v.Args[0]
956 b := v.Block
957 typ := &b.Func.Config.Types
958
959
960 for {
961 x := v_0
962 v.reset(OpMIPS64NOR)
963 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
964 v0.AuxInt = int64ToAuxInt(0)
965 v.AddArg2(v0, x)
966 return true
967 }
968 }
969 func rewriteValueMIPS64_OpConst16(v *Value) bool {
970
971
972 for {
973 val := auxIntToInt16(v.AuxInt)
974 v.reset(OpMIPS64MOVVconst)
975 v.AuxInt = int64ToAuxInt(int64(val))
976 return true
977 }
978 }
979 func rewriteValueMIPS64_OpConst32(v *Value) bool {
980
981
982 for {
983 val := auxIntToInt32(v.AuxInt)
984 v.reset(OpMIPS64MOVVconst)
985 v.AuxInt = int64ToAuxInt(int64(val))
986 return true
987 }
988 }
989 func rewriteValueMIPS64_OpConst32F(v *Value) bool {
990
991
992 for {
993 val := auxIntToFloat32(v.AuxInt)
994 v.reset(OpMIPS64MOVFconst)
995 v.AuxInt = float64ToAuxInt(float64(val))
996 return true
997 }
998 }
999 func rewriteValueMIPS64_OpConst64(v *Value) bool {
1000
1001
1002 for {
1003 val := auxIntToInt64(v.AuxInt)
1004 v.reset(OpMIPS64MOVVconst)
1005 v.AuxInt = int64ToAuxInt(int64(val))
1006 return true
1007 }
1008 }
1009 func rewriteValueMIPS64_OpConst64F(v *Value) bool {
1010
1011
1012 for {
1013 val := auxIntToFloat64(v.AuxInt)
1014 v.reset(OpMIPS64MOVDconst)
1015 v.AuxInt = float64ToAuxInt(float64(val))
1016 return true
1017 }
1018 }
1019 func rewriteValueMIPS64_OpConst8(v *Value) bool {
1020
1021
1022 for {
1023 val := auxIntToInt8(v.AuxInt)
1024 v.reset(OpMIPS64MOVVconst)
1025 v.AuxInt = int64ToAuxInt(int64(val))
1026 return true
1027 }
1028 }
1029 func rewriteValueMIPS64_OpConstBool(v *Value) bool {
1030
1031
1032 for {
1033 t := auxIntToBool(v.AuxInt)
1034 v.reset(OpMIPS64MOVVconst)
1035 v.AuxInt = int64ToAuxInt(int64(b2i(t)))
1036 return true
1037 }
1038 }
1039 func rewriteValueMIPS64_OpConstNil(v *Value) bool {
1040
1041
1042 for {
1043 v.reset(OpMIPS64MOVVconst)
1044 v.AuxInt = int64ToAuxInt(0)
1045 return true
1046 }
1047 }
1048 func rewriteValueMIPS64_OpDiv16(v *Value) bool {
1049 v_1 := v.Args[1]
1050 v_0 := v.Args[0]
1051 b := v.Block
1052 typ := &b.Func.Config.Types
1053
1054
1055 for {
1056 x := v_0
1057 y := v_1
1058 v.reset(OpSelect1)
1059 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1060 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1061 v1.AddArg(x)
1062 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1063 v2.AddArg(y)
1064 v0.AddArg2(v1, v2)
1065 v.AddArg(v0)
1066 return true
1067 }
1068 }
1069 func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
1070 v_1 := v.Args[1]
1071 v_0 := v.Args[0]
1072 b := v.Block
1073 typ := &b.Func.Config.Types
1074
1075
1076 for {
1077 x := v_0
1078 y := v_1
1079 v.reset(OpSelect1)
1080 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1081 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1082 v1.AddArg(x)
1083 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1084 v2.AddArg(y)
1085 v0.AddArg2(v1, v2)
1086 v.AddArg(v0)
1087 return true
1088 }
1089 }
1090 func rewriteValueMIPS64_OpDiv32(v *Value) bool {
1091 v_1 := v.Args[1]
1092 v_0 := v.Args[0]
1093 b := v.Block
1094 typ := &b.Func.Config.Types
1095
1096
1097 for {
1098 x := v_0
1099 y := v_1
1100 v.reset(OpSelect1)
1101 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1102 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1103 v1.AddArg(x)
1104 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1105 v2.AddArg(y)
1106 v0.AddArg2(v1, v2)
1107 v.AddArg(v0)
1108 return true
1109 }
1110 }
1111 func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
1112 v_1 := v.Args[1]
1113 v_0 := v.Args[0]
1114 b := v.Block
1115 typ := &b.Func.Config.Types
1116
1117
1118 for {
1119 x := v_0
1120 y := v_1
1121 v.reset(OpSelect1)
1122 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1123 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1124 v1.AddArg(x)
1125 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1126 v2.AddArg(y)
1127 v0.AddArg2(v1, v2)
1128 v.AddArg(v0)
1129 return true
1130 }
1131 }
1132 func rewriteValueMIPS64_OpDiv64(v *Value) bool {
1133 v_1 := v.Args[1]
1134 v_0 := v.Args[0]
1135 b := v.Block
1136 typ := &b.Func.Config.Types
1137
1138
1139 for {
1140 x := v_0
1141 y := v_1
1142 v.reset(OpSelect1)
1143 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1144 v0.AddArg2(x, y)
1145 v.AddArg(v0)
1146 return true
1147 }
1148 }
1149 func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
1150 v_1 := v.Args[1]
1151 v_0 := v.Args[0]
1152 b := v.Block
1153 typ := &b.Func.Config.Types
1154
1155
1156 for {
1157 x := v_0
1158 y := v_1
1159 v.reset(OpSelect1)
1160 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1161 v0.AddArg2(x, y)
1162 v.AddArg(v0)
1163 return true
1164 }
1165 }
1166 func rewriteValueMIPS64_OpDiv8(v *Value) bool {
1167 v_1 := v.Args[1]
1168 v_0 := v.Args[0]
1169 b := v.Block
1170 typ := &b.Func.Config.Types
1171
1172
1173 for {
1174 x := v_0
1175 y := v_1
1176 v.reset(OpSelect1)
1177 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1178 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1179 v1.AddArg(x)
1180 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1181 v2.AddArg(y)
1182 v0.AddArg2(v1, v2)
1183 v.AddArg(v0)
1184 return true
1185 }
1186 }
1187 func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
1188 v_1 := v.Args[1]
1189 v_0 := v.Args[0]
1190 b := v.Block
1191 typ := &b.Func.Config.Types
1192
1193
1194 for {
1195 x := v_0
1196 y := v_1
1197 v.reset(OpSelect1)
1198 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1199 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1200 v1.AddArg(x)
1201 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1202 v2.AddArg(y)
1203 v0.AddArg2(v1, v2)
1204 v.AddArg(v0)
1205 return true
1206 }
1207 }
1208 func rewriteValueMIPS64_OpEq16(v *Value) bool {
1209 v_1 := v.Args[1]
1210 v_0 := v.Args[0]
1211 b := v.Block
1212 typ := &b.Func.Config.Types
1213
1214
1215 for {
1216 x := v_0
1217 y := v_1
1218 v.reset(OpMIPS64SGTU)
1219 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1220 v0.AuxInt = int64ToAuxInt(1)
1221 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1222 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1223 v2.AddArg(x)
1224 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1225 v3.AddArg(y)
1226 v1.AddArg2(v2, v3)
1227 v.AddArg2(v0, v1)
1228 return true
1229 }
1230 }
1231 func rewriteValueMIPS64_OpEq32(v *Value) bool {
1232 v_1 := v.Args[1]
1233 v_0 := v.Args[0]
1234 b := v.Block
1235 typ := &b.Func.Config.Types
1236
1237
1238 for {
1239 x := v_0
1240 y := v_1
1241 v.reset(OpMIPS64SGTU)
1242 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1243 v0.AuxInt = int64ToAuxInt(1)
1244 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1245 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1246 v2.AddArg(x)
1247 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1248 v3.AddArg(y)
1249 v1.AddArg2(v2, v3)
1250 v.AddArg2(v0, v1)
1251 return true
1252 }
1253 }
1254 func rewriteValueMIPS64_OpEq32F(v *Value) bool {
1255 v_1 := v.Args[1]
1256 v_0 := v.Args[0]
1257 b := v.Block
1258
1259
1260 for {
1261 x := v_0
1262 y := v_1
1263 v.reset(OpMIPS64FPFlagTrue)
1264 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
1265 v0.AddArg2(x, y)
1266 v.AddArg(v0)
1267 return true
1268 }
1269 }
1270 func rewriteValueMIPS64_OpEq64(v *Value) bool {
1271 v_1 := v.Args[1]
1272 v_0 := v.Args[0]
1273 b := v.Block
1274 typ := &b.Func.Config.Types
1275
1276
1277 for {
1278 x := v_0
1279 y := v_1
1280 v.reset(OpMIPS64SGTU)
1281 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1282 v0.AuxInt = int64ToAuxInt(1)
1283 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1284 v1.AddArg2(x, y)
1285 v.AddArg2(v0, v1)
1286 return true
1287 }
1288 }
1289 func rewriteValueMIPS64_OpEq64F(v *Value) bool {
1290 v_1 := v.Args[1]
1291 v_0 := v.Args[0]
1292 b := v.Block
1293
1294
1295 for {
1296 x := v_0
1297 y := v_1
1298 v.reset(OpMIPS64FPFlagTrue)
1299 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
1300 v0.AddArg2(x, y)
1301 v.AddArg(v0)
1302 return true
1303 }
1304 }
1305 func rewriteValueMIPS64_OpEq8(v *Value) bool {
1306 v_1 := v.Args[1]
1307 v_0 := v.Args[0]
1308 b := v.Block
1309 typ := &b.Func.Config.Types
1310
1311
1312 for {
1313 x := v_0
1314 y := v_1
1315 v.reset(OpMIPS64SGTU)
1316 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1317 v0.AuxInt = int64ToAuxInt(1)
1318 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1319 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1320 v2.AddArg(x)
1321 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1322 v3.AddArg(y)
1323 v1.AddArg2(v2, v3)
1324 v.AddArg2(v0, v1)
1325 return true
1326 }
1327 }
1328 func rewriteValueMIPS64_OpEqB(v *Value) bool {
1329 v_1 := v.Args[1]
1330 v_0 := v.Args[0]
1331 b := v.Block
1332 typ := &b.Func.Config.Types
1333
1334
1335 for {
1336 x := v_0
1337 y := v_1
1338 v.reset(OpMIPS64XOR)
1339 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1340 v0.AuxInt = int64ToAuxInt(1)
1341 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
1342 v1.AddArg2(x, y)
1343 v.AddArg2(v0, v1)
1344 return true
1345 }
1346 }
1347 func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
1348 v_1 := v.Args[1]
1349 v_0 := v.Args[0]
1350 b := v.Block
1351 typ := &b.Func.Config.Types
1352
1353
1354 for {
1355 x := v_0
1356 y := v_1
1357 v.reset(OpMIPS64SGTU)
1358 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1359 v0.AuxInt = int64ToAuxInt(1)
1360 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1361 v1.AddArg2(x, y)
1362 v.AddArg2(v0, v1)
1363 return true
1364 }
1365 }
1366 func rewriteValueMIPS64_OpHmul32(v *Value) bool {
1367 v_1 := v.Args[1]
1368 v_0 := v.Args[0]
1369 b := v.Block
1370 typ := &b.Func.Config.Types
1371
1372
1373 for {
1374 x := v_0
1375 y := v_1
1376 v.reset(OpMIPS64SRAVconst)
1377 v.AuxInt = int64ToAuxInt(32)
1378 v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
1379 v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1380 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1381 v2.AddArg(x)
1382 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1383 v3.AddArg(y)
1384 v1.AddArg2(v2, v3)
1385 v0.AddArg(v1)
1386 v.AddArg(v0)
1387 return true
1388 }
1389 }
1390 func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
1391 v_1 := v.Args[1]
1392 v_0 := v.Args[0]
1393 b := v.Block
1394 typ := &b.Func.Config.Types
1395
1396
1397 for {
1398 x := v_0
1399 y := v_1
1400 v.reset(OpMIPS64SRLVconst)
1401 v.AuxInt = int64ToAuxInt(32)
1402 v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
1403 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1404 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1405 v2.AddArg(x)
1406 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1407 v3.AddArg(y)
1408 v1.AddArg2(v2, v3)
1409 v0.AddArg(v1)
1410 v.AddArg(v0)
1411 return true
1412 }
1413 }
1414 func rewriteValueMIPS64_OpHmul64(v *Value) bool {
1415 v_1 := v.Args[1]
1416 v_0 := v.Args[0]
1417 b := v.Block
1418 typ := &b.Func.Config.Types
1419
1420
1421 for {
1422 x := v_0
1423 y := v_1
1424 v.reset(OpSelect0)
1425 v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1426 v0.AddArg2(x, y)
1427 v.AddArg(v0)
1428 return true
1429 }
1430 }
1431 func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
1432 v_1 := v.Args[1]
1433 v_0 := v.Args[0]
1434 b := v.Block
1435 typ := &b.Func.Config.Types
1436
1437
1438 for {
1439 x := v_0
1440 y := v_1
1441 v.reset(OpSelect0)
1442 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1443 v0.AddArg2(x, y)
1444 v.AddArg(v0)
1445 return true
1446 }
1447 }
1448 func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
1449 v_1 := v.Args[1]
1450 v_0 := v.Args[0]
1451
1452
1453 for {
1454 idx := v_0
1455 len := v_1
1456 v.reset(OpMIPS64SGTU)
1457 v.AddArg2(len, idx)
1458 return true
1459 }
1460 }
1461 func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
1462 v_0 := v.Args[0]
1463 b := v.Block
1464 typ := &b.Func.Config.Types
1465
1466
1467 for {
1468 ptr := v_0
1469 v.reset(OpMIPS64SGTU)
1470 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1471 v0.AuxInt = int64ToAuxInt(0)
1472 v.AddArg2(ptr, v0)
1473 return true
1474 }
1475 }
1476 func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
1477 v_1 := v.Args[1]
1478 v_0 := v.Args[0]
1479 b := v.Block
1480 typ := &b.Func.Config.Types
1481
1482
1483 for {
1484 idx := v_0
1485 len := v_1
1486 v.reset(OpMIPS64XOR)
1487 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1488 v0.AuxInt = int64ToAuxInt(1)
1489 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1490 v1.AddArg2(idx, len)
1491 v.AddArg2(v0, v1)
1492 return true
1493 }
1494 }
1495 func rewriteValueMIPS64_OpLeq16(v *Value) bool {
1496 v_1 := v.Args[1]
1497 v_0 := v.Args[0]
1498 b := v.Block
1499 typ := &b.Func.Config.Types
1500
1501
1502 for {
1503 x := v_0
1504 y := v_1
1505 v.reset(OpMIPS64XOR)
1506 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1507 v0.AuxInt = int64ToAuxInt(1)
1508 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1509 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1510 v2.AddArg(x)
1511 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1512 v3.AddArg(y)
1513 v1.AddArg2(v2, v3)
1514 v.AddArg2(v0, v1)
1515 return true
1516 }
1517 }
1518 func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
1519 v_1 := v.Args[1]
1520 v_0 := v.Args[0]
1521 b := v.Block
1522 typ := &b.Func.Config.Types
1523
1524
1525 for {
1526 x := v_0
1527 y := v_1
1528 v.reset(OpMIPS64XOR)
1529 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1530 v0.AuxInt = int64ToAuxInt(1)
1531 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1532 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1533 v2.AddArg(x)
1534 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1535 v3.AddArg(y)
1536 v1.AddArg2(v2, v3)
1537 v.AddArg2(v0, v1)
1538 return true
1539 }
1540 }
1541 func rewriteValueMIPS64_OpLeq32(v *Value) bool {
1542 v_1 := v.Args[1]
1543 v_0 := v.Args[0]
1544 b := v.Block
1545 typ := &b.Func.Config.Types
1546
1547
1548 for {
1549 x := v_0
1550 y := v_1
1551 v.reset(OpMIPS64XOR)
1552 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1553 v0.AuxInt = int64ToAuxInt(1)
1554 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1555 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1556 v2.AddArg(x)
1557 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1558 v3.AddArg(y)
1559 v1.AddArg2(v2, v3)
1560 v.AddArg2(v0, v1)
1561 return true
1562 }
1563 }
1564 func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
1565 v_1 := v.Args[1]
1566 v_0 := v.Args[0]
1567 b := v.Block
1568
1569
1570 for {
1571 x := v_0
1572 y := v_1
1573 v.reset(OpMIPS64FPFlagTrue)
1574 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
1575 v0.AddArg2(y, x)
1576 v.AddArg(v0)
1577 return true
1578 }
1579 }
1580 func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
1581 v_1 := v.Args[1]
1582 v_0 := v.Args[0]
1583 b := v.Block
1584 typ := &b.Func.Config.Types
1585
1586
1587 for {
1588 x := v_0
1589 y := v_1
1590 v.reset(OpMIPS64XOR)
1591 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1592 v0.AuxInt = int64ToAuxInt(1)
1593 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1594 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1595 v2.AddArg(x)
1596 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1597 v3.AddArg(y)
1598 v1.AddArg2(v2, v3)
1599 v.AddArg2(v0, v1)
1600 return true
1601 }
1602 }
1603 func rewriteValueMIPS64_OpLeq64(v *Value) bool {
1604 v_1 := v.Args[1]
1605 v_0 := v.Args[0]
1606 b := v.Block
1607 typ := &b.Func.Config.Types
1608
1609
1610 for {
1611 x := v_0
1612 y := v_1
1613 v.reset(OpMIPS64XOR)
1614 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1615 v0.AuxInt = int64ToAuxInt(1)
1616 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1617 v1.AddArg2(x, y)
1618 v.AddArg2(v0, v1)
1619 return true
1620 }
1621 }
1622 func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
1623 v_1 := v.Args[1]
1624 v_0 := v.Args[0]
1625 b := v.Block
1626
1627
1628 for {
1629 x := v_0
1630 y := v_1
1631 v.reset(OpMIPS64FPFlagTrue)
1632 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
1633 v0.AddArg2(y, x)
1634 v.AddArg(v0)
1635 return true
1636 }
1637 }
1638 func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
1639 v_1 := v.Args[1]
1640 v_0 := v.Args[0]
1641 b := v.Block
1642 typ := &b.Func.Config.Types
1643
1644
1645 for {
1646 x := v_0
1647 y := v_1
1648 v.reset(OpMIPS64XOR)
1649 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1650 v0.AuxInt = int64ToAuxInt(1)
1651 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1652 v1.AddArg2(x, y)
1653 v.AddArg2(v0, v1)
1654 return true
1655 }
1656 }
1657 func rewriteValueMIPS64_OpLeq8(v *Value) bool {
1658 v_1 := v.Args[1]
1659 v_0 := v.Args[0]
1660 b := v.Block
1661 typ := &b.Func.Config.Types
1662
1663
1664 for {
1665 x := v_0
1666 y := v_1
1667 v.reset(OpMIPS64XOR)
1668 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1669 v0.AuxInt = int64ToAuxInt(1)
1670 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1671 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1672 v2.AddArg(x)
1673 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1674 v3.AddArg(y)
1675 v1.AddArg2(v2, v3)
1676 v.AddArg2(v0, v1)
1677 return true
1678 }
1679 }
1680 func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
1681 v_1 := v.Args[1]
1682 v_0 := v.Args[0]
1683 b := v.Block
1684 typ := &b.Func.Config.Types
1685
1686
1687 for {
1688 x := v_0
1689 y := v_1
1690 v.reset(OpMIPS64XOR)
1691 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1692 v0.AuxInt = int64ToAuxInt(1)
1693 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1694 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1695 v2.AddArg(x)
1696 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1697 v3.AddArg(y)
1698 v1.AddArg2(v2, v3)
1699 v.AddArg2(v0, v1)
1700 return true
1701 }
1702 }
1703 func rewriteValueMIPS64_OpLess16(v *Value) bool {
1704 v_1 := v.Args[1]
1705 v_0 := v.Args[0]
1706 b := v.Block
1707 typ := &b.Func.Config.Types
1708
1709
1710 for {
1711 x := v_0
1712 y := v_1
1713 v.reset(OpMIPS64SGT)
1714 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1715 v0.AddArg(y)
1716 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1717 v1.AddArg(x)
1718 v.AddArg2(v0, v1)
1719 return true
1720 }
1721 }
1722 func rewriteValueMIPS64_OpLess16U(v *Value) bool {
1723 v_1 := v.Args[1]
1724 v_0 := v.Args[0]
1725 b := v.Block
1726 typ := &b.Func.Config.Types
1727
1728
1729 for {
1730 x := v_0
1731 y := v_1
1732 v.reset(OpMIPS64SGTU)
1733 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1734 v0.AddArg(y)
1735 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1736 v1.AddArg(x)
1737 v.AddArg2(v0, v1)
1738 return true
1739 }
1740 }
1741 func rewriteValueMIPS64_OpLess32(v *Value) bool {
1742 v_1 := v.Args[1]
1743 v_0 := v.Args[0]
1744 b := v.Block
1745 typ := &b.Func.Config.Types
1746
1747
1748 for {
1749 x := v_0
1750 y := v_1
1751 v.reset(OpMIPS64SGT)
1752 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1753 v0.AddArg(y)
1754 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1755 v1.AddArg(x)
1756 v.AddArg2(v0, v1)
1757 return true
1758 }
1759 }
1760 func rewriteValueMIPS64_OpLess32F(v *Value) bool {
1761 v_1 := v.Args[1]
1762 v_0 := v.Args[0]
1763 b := v.Block
1764
1765
1766 for {
1767 x := v_0
1768 y := v_1
1769 v.reset(OpMIPS64FPFlagTrue)
1770 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
1771 v0.AddArg2(y, x)
1772 v.AddArg(v0)
1773 return true
1774 }
1775 }
1776 func rewriteValueMIPS64_OpLess32U(v *Value) bool {
1777 v_1 := v.Args[1]
1778 v_0 := v.Args[0]
1779 b := v.Block
1780 typ := &b.Func.Config.Types
1781
1782
1783 for {
1784 x := v_0
1785 y := v_1
1786 v.reset(OpMIPS64SGTU)
1787 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1788 v0.AddArg(y)
1789 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1790 v1.AddArg(x)
1791 v.AddArg2(v0, v1)
1792 return true
1793 }
1794 }
1795 func rewriteValueMIPS64_OpLess64(v *Value) bool {
1796 v_1 := v.Args[1]
1797 v_0 := v.Args[0]
1798
1799
1800 for {
1801 x := v_0
1802 y := v_1
1803 v.reset(OpMIPS64SGT)
1804 v.AddArg2(y, x)
1805 return true
1806 }
1807 }
1808 func rewriteValueMIPS64_OpLess64F(v *Value) bool {
1809 v_1 := v.Args[1]
1810 v_0 := v.Args[0]
1811 b := v.Block
1812
1813
1814 for {
1815 x := v_0
1816 y := v_1
1817 v.reset(OpMIPS64FPFlagTrue)
1818 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
1819 v0.AddArg2(y, x)
1820 v.AddArg(v0)
1821 return true
1822 }
1823 }
1824 func rewriteValueMIPS64_OpLess64U(v *Value) bool {
1825 v_1 := v.Args[1]
1826 v_0 := v.Args[0]
1827
1828
1829 for {
1830 x := v_0
1831 y := v_1
1832 v.reset(OpMIPS64SGTU)
1833 v.AddArg2(y, x)
1834 return true
1835 }
1836 }
1837 func rewriteValueMIPS64_OpLess8(v *Value) bool {
1838 v_1 := v.Args[1]
1839 v_0 := v.Args[0]
1840 b := v.Block
1841 typ := &b.Func.Config.Types
1842
1843
1844 for {
1845 x := v_0
1846 y := v_1
1847 v.reset(OpMIPS64SGT)
1848 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1849 v0.AddArg(y)
1850 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1851 v1.AddArg(x)
1852 v.AddArg2(v0, v1)
1853 return true
1854 }
1855 }
1856 func rewriteValueMIPS64_OpLess8U(v *Value) bool {
1857 v_1 := v.Args[1]
1858 v_0 := v.Args[0]
1859 b := v.Block
1860 typ := &b.Func.Config.Types
1861
1862
1863 for {
1864 x := v_0
1865 y := v_1
1866 v.reset(OpMIPS64SGTU)
1867 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1868 v0.AddArg(y)
1869 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1870 v1.AddArg(x)
1871 v.AddArg2(v0, v1)
1872 return true
1873 }
1874 }
1875 func rewriteValueMIPS64_OpLoad(v *Value) bool {
1876 v_1 := v.Args[1]
1877 v_0 := v.Args[0]
1878
1879
1880
1881 for {
1882 t := v.Type
1883 ptr := v_0
1884 mem := v_1
1885 if !(t.IsBoolean()) {
1886 break
1887 }
1888 v.reset(OpMIPS64MOVBUload)
1889 v.AddArg2(ptr, mem)
1890 return true
1891 }
1892
1893
1894
1895 for {
1896 t := v.Type
1897 ptr := v_0
1898 mem := v_1
1899 if !(is8BitInt(t) && t.IsSigned()) {
1900 break
1901 }
1902 v.reset(OpMIPS64MOVBload)
1903 v.AddArg2(ptr, mem)
1904 return true
1905 }
1906
1907
1908
1909 for {
1910 t := v.Type
1911 ptr := v_0
1912 mem := v_1
1913 if !(is8BitInt(t) && !t.IsSigned()) {
1914 break
1915 }
1916 v.reset(OpMIPS64MOVBUload)
1917 v.AddArg2(ptr, mem)
1918 return true
1919 }
1920
1921
1922
1923 for {
1924 t := v.Type
1925 ptr := v_0
1926 mem := v_1
1927 if !(is16BitInt(t) && t.IsSigned()) {
1928 break
1929 }
1930 v.reset(OpMIPS64MOVHload)
1931 v.AddArg2(ptr, mem)
1932 return true
1933 }
1934
1935
1936
1937 for {
1938 t := v.Type
1939 ptr := v_0
1940 mem := v_1
1941 if !(is16BitInt(t) && !t.IsSigned()) {
1942 break
1943 }
1944 v.reset(OpMIPS64MOVHUload)
1945 v.AddArg2(ptr, mem)
1946 return true
1947 }
1948
1949
1950
1951 for {
1952 t := v.Type
1953 ptr := v_0
1954 mem := v_1
1955 if !(is32BitInt(t) && t.IsSigned()) {
1956 break
1957 }
1958 v.reset(OpMIPS64MOVWload)
1959 v.AddArg2(ptr, mem)
1960 return true
1961 }
1962
1963
1964
1965 for {
1966 t := v.Type
1967 ptr := v_0
1968 mem := v_1
1969 if !(is32BitInt(t) && !t.IsSigned()) {
1970 break
1971 }
1972 v.reset(OpMIPS64MOVWUload)
1973 v.AddArg2(ptr, mem)
1974 return true
1975 }
1976
1977
1978
1979 for {
1980 t := v.Type
1981 ptr := v_0
1982 mem := v_1
1983 if !(is64BitInt(t) || isPtr(t)) {
1984 break
1985 }
1986 v.reset(OpMIPS64MOVVload)
1987 v.AddArg2(ptr, mem)
1988 return true
1989 }
1990
1991
1992
1993 for {
1994 t := v.Type
1995 ptr := v_0
1996 mem := v_1
1997 if !(is32BitFloat(t)) {
1998 break
1999 }
2000 v.reset(OpMIPS64MOVFload)
2001 v.AddArg2(ptr, mem)
2002 return true
2003 }
2004
2005
2006
2007 for {
2008 t := v.Type
2009 ptr := v_0
2010 mem := v_1
2011 if !(is64BitFloat(t)) {
2012 break
2013 }
2014 v.reset(OpMIPS64MOVDload)
2015 v.AddArg2(ptr, mem)
2016 return true
2017 }
2018 return false
2019 }
2020 func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
2021 v_1 := v.Args[1]
2022 v_0 := v.Args[0]
2023 b := v.Block
2024 typ := &b.Func.Config.Types
2025
2026
2027
2028 for {
2029 t := v.Type
2030 sym := auxToSym(v.Aux)
2031 base := v_0
2032 mem := v_1
2033 if !(t.Elem().HasPointers()) {
2034 break
2035 }
2036 v.reset(OpMIPS64MOVVaddr)
2037 v.Aux = symToAux(sym)
2038 v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
2039 v0.AddArg2(base, mem)
2040 v.AddArg(v0)
2041 return true
2042 }
2043
2044
2045
2046 for {
2047 t := v.Type
2048 sym := auxToSym(v.Aux)
2049 base := v_0
2050 if !(!t.Elem().HasPointers()) {
2051 break
2052 }
2053 v.reset(OpMIPS64MOVVaddr)
2054 v.Aux = symToAux(sym)
2055 v.AddArg(base)
2056 return true
2057 }
2058 return false
2059 }
2060 func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
2061 v_1 := v.Args[1]
2062 v_0 := v.Args[0]
2063 b := v.Block
2064 typ := &b.Func.Config.Types
2065
2066
2067 for {
2068 t := v.Type
2069 x := v_0
2070 y := v_1
2071 v.reset(OpMIPS64AND)
2072 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2073 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2074 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2075 v2.AuxInt = int64ToAuxInt(64)
2076 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2077 v3.AddArg(y)
2078 v1.AddArg2(v2, v3)
2079 v0.AddArg(v1)
2080 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2081 v4.AddArg2(x, v3)
2082 v.AddArg2(v0, v4)
2083 return true
2084 }
2085 }
2086 func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
2087 v_1 := v.Args[1]
2088 v_0 := v.Args[0]
2089 b := v.Block
2090 typ := &b.Func.Config.Types
2091
2092
2093 for {
2094 t := v.Type
2095 x := v_0
2096 y := v_1
2097 v.reset(OpMIPS64AND)
2098 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2099 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2100 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2101 v2.AuxInt = int64ToAuxInt(64)
2102 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2103 v3.AddArg(y)
2104 v1.AddArg2(v2, v3)
2105 v0.AddArg(v1)
2106 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2107 v4.AddArg2(x, v3)
2108 v.AddArg2(v0, v4)
2109 return true
2110 }
2111 }
2112 func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
2113 v_1 := v.Args[1]
2114 v_0 := v.Args[0]
2115 b := v.Block
2116 typ := &b.Func.Config.Types
2117
2118
2119 for {
2120 t := v.Type
2121 x := v_0
2122 y := v_1
2123 v.reset(OpMIPS64AND)
2124 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2125 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2126 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2127 v2.AuxInt = int64ToAuxInt(64)
2128 v1.AddArg2(v2, y)
2129 v0.AddArg(v1)
2130 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2131 v3.AddArg2(x, y)
2132 v.AddArg2(v0, v3)
2133 return true
2134 }
2135 }
2136 func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
2137 v_1 := v.Args[1]
2138 v_0 := v.Args[0]
2139 b := v.Block
2140 typ := &b.Func.Config.Types
2141
2142
2143 for {
2144 t := v.Type
2145 x := v_0
2146 y := v_1
2147 v.reset(OpMIPS64AND)
2148 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2149 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2150 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2151 v2.AuxInt = int64ToAuxInt(64)
2152 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2153 v3.AddArg(y)
2154 v1.AddArg2(v2, v3)
2155 v0.AddArg(v1)
2156 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2157 v4.AddArg2(x, v3)
2158 v.AddArg2(v0, v4)
2159 return true
2160 }
2161 }
2162 func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
2163 v_1 := v.Args[1]
2164 v_0 := v.Args[0]
2165 b := v.Block
2166 typ := &b.Func.Config.Types
2167
2168
2169 for {
2170 t := v.Type
2171 x := v_0
2172 y := v_1
2173 v.reset(OpMIPS64AND)
2174 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2175 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2176 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2177 v2.AuxInt = int64ToAuxInt(64)
2178 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2179 v3.AddArg(y)
2180 v1.AddArg2(v2, v3)
2181 v0.AddArg(v1)
2182 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2183 v4.AddArg2(x, v3)
2184 v.AddArg2(v0, v4)
2185 return true
2186 }
2187 }
2188 func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
2189 v_1 := v.Args[1]
2190 v_0 := v.Args[0]
2191 b := v.Block
2192 typ := &b.Func.Config.Types
2193
2194
2195 for {
2196 t := v.Type
2197 x := v_0
2198 y := v_1
2199 v.reset(OpMIPS64AND)
2200 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2201 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2202 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2203 v2.AuxInt = int64ToAuxInt(64)
2204 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2205 v3.AddArg(y)
2206 v1.AddArg2(v2, v3)
2207 v0.AddArg(v1)
2208 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2209 v4.AddArg2(x, v3)
2210 v.AddArg2(v0, v4)
2211 return true
2212 }
2213 }
2214 func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
2215 v_1 := v.Args[1]
2216 v_0 := v.Args[0]
2217 b := v.Block
2218 typ := &b.Func.Config.Types
2219
2220
2221 for {
2222 t := v.Type
2223 x := v_0
2224 y := v_1
2225 v.reset(OpMIPS64AND)
2226 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2227 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2228 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2229 v2.AuxInt = int64ToAuxInt(64)
2230 v1.AddArg2(v2, y)
2231 v0.AddArg(v1)
2232 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2233 v3.AddArg2(x, y)
2234 v.AddArg2(v0, v3)
2235 return true
2236 }
2237 }
2238 func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
2239 v_1 := v.Args[1]
2240 v_0 := v.Args[0]
2241 b := v.Block
2242 typ := &b.Func.Config.Types
2243
2244
2245 for {
2246 t := v.Type
2247 x := v_0
2248 y := v_1
2249 v.reset(OpMIPS64AND)
2250 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2251 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2252 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2253 v2.AuxInt = int64ToAuxInt(64)
2254 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2255 v3.AddArg(y)
2256 v1.AddArg2(v2, v3)
2257 v0.AddArg(v1)
2258 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2259 v4.AddArg2(x, v3)
2260 v.AddArg2(v0, v4)
2261 return true
2262 }
2263 }
2264 func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
2265 v_1 := v.Args[1]
2266 v_0 := v.Args[0]
2267 b := v.Block
2268 typ := &b.Func.Config.Types
2269
2270
2271 for {
2272 t := v.Type
2273 x := v_0
2274 y := v_1
2275 v.reset(OpMIPS64AND)
2276 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2277 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2278 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2279 v2.AuxInt = int64ToAuxInt(64)
2280 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2281 v3.AddArg(y)
2282 v1.AddArg2(v2, v3)
2283 v0.AddArg(v1)
2284 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2285 v4.AddArg2(x, v3)
2286 v.AddArg2(v0, v4)
2287 return true
2288 }
2289 }
2290 func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
2291 v_1 := v.Args[1]
2292 v_0 := v.Args[0]
2293 b := v.Block
2294 typ := &b.Func.Config.Types
2295
2296
2297 for {
2298 t := v.Type
2299 x := v_0
2300 y := v_1
2301 v.reset(OpMIPS64AND)
2302 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2303 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2304 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2305 v2.AuxInt = int64ToAuxInt(64)
2306 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2307 v3.AddArg(y)
2308 v1.AddArg2(v2, v3)
2309 v0.AddArg(v1)
2310 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2311 v4.AddArg2(x, v3)
2312 v.AddArg2(v0, v4)
2313 return true
2314 }
2315 }
2316 func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
2317 v_1 := v.Args[1]
2318 v_0 := v.Args[0]
2319 b := v.Block
2320 typ := &b.Func.Config.Types
2321
2322
2323 for {
2324 t := v.Type
2325 x := v_0
2326 y := v_1
2327 v.reset(OpMIPS64AND)
2328 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2329 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2330 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2331 v2.AuxInt = int64ToAuxInt(64)
2332 v1.AddArg2(v2, y)
2333 v0.AddArg(v1)
2334 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2335 v3.AddArg2(x, y)
2336 v.AddArg2(v0, v3)
2337 return true
2338 }
2339 }
2340 func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
2341 v_1 := v.Args[1]
2342 v_0 := v.Args[0]
2343 b := v.Block
2344 typ := &b.Func.Config.Types
2345
2346
2347 for {
2348 t := v.Type
2349 x := v_0
2350 y := v_1
2351 v.reset(OpMIPS64AND)
2352 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2353 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2354 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2355 v2.AuxInt = int64ToAuxInt(64)
2356 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2357 v3.AddArg(y)
2358 v1.AddArg2(v2, v3)
2359 v0.AddArg(v1)
2360 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2361 v4.AddArg2(x, v3)
2362 v.AddArg2(v0, v4)
2363 return true
2364 }
2365 }
2366 func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
2367 v_1 := v.Args[1]
2368 v_0 := v.Args[0]
2369 b := v.Block
2370 typ := &b.Func.Config.Types
2371
2372
2373 for {
2374 t := v.Type
2375 x := v_0
2376 y := v_1
2377 v.reset(OpMIPS64AND)
2378 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2379 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2380 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2381 v2.AuxInt = int64ToAuxInt(64)
2382 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2383 v3.AddArg(y)
2384 v1.AddArg2(v2, v3)
2385 v0.AddArg(v1)
2386 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2387 v4.AddArg2(x, v3)
2388 v.AddArg2(v0, v4)
2389 return true
2390 }
2391 }
2392 func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
2393 v_1 := v.Args[1]
2394 v_0 := v.Args[0]
2395 b := v.Block
2396 typ := &b.Func.Config.Types
2397
2398
2399 for {
2400 t := v.Type
2401 x := v_0
2402 y := v_1
2403 v.reset(OpMIPS64AND)
2404 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2405 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2406 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2407 v2.AuxInt = int64ToAuxInt(64)
2408 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2409 v3.AddArg(y)
2410 v1.AddArg2(v2, v3)
2411 v0.AddArg(v1)
2412 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2413 v4.AddArg2(x, v3)
2414 v.AddArg2(v0, v4)
2415 return true
2416 }
2417 }
2418 func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
2419 v_1 := v.Args[1]
2420 v_0 := v.Args[0]
2421 b := v.Block
2422 typ := &b.Func.Config.Types
2423
2424
2425 for {
2426 t := v.Type
2427 x := v_0
2428 y := v_1
2429 v.reset(OpMIPS64AND)
2430 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2431 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2432 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2433 v2.AuxInt = int64ToAuxInt(64)
2434 v1.AddArg2(v2, y)
2435 v0.AddArg(v1)
2436 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2437 v3.AddArg2(x, y)
2438 v.AddArg2(v0, v3)
2439 return true
2440 }
2441 }
2442 func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
2443 v_1 := v.Args[1]
2444 v_0 := v.Args[0]
2445 b := v.Block
2446 typ := &b.Func.Config.Types
2447
2448
2449 for {
2450 t := v.Type
2451 x := v_0
2452 y := v_1
2453 v.reset(OpMIPS64AND)
2454 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2455 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2456 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2457 v2.AuxInt = int64ToAuxInt(64)
2458 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2459 v3.AddArg(y)
2460 v1.AddArg2(v2, v3)
2461 v0.AddArg(v1)
2462 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2463 v4.AddArg2(x, v3)
2464 v.AddArg2(v0, v4)
2465 return true
2466 }
2467 }
2468 func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
2469 v_1 := v.Args[1]
2470 v_0 := v.Args[0]
2471
2472
2473
2474 for {
2475 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2476 x := v_0
2477 if v_1.Op != OpMIPS64MOVVconst {
2478 continue
2479 }
2480 t := v_1.Type
2481 c := auxIntToInt64(v_1.AuxInt)
2482 if !(is32Bit(c) && !t.IsPtr()) {
2483 continue
2484 }
2485 v.reset(OpMIPS64ADDVconst)
2486 v.AuxInt = int64ToAuxInt(c)
2487 v.AddArg(x)
2488 return true
2489 }
2490 break
2491 }
2492
2493
2494 for {
2495 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2496 x := v_0
2497 if v_1.Op != OpMIPS64NEGV {
2498 continue
2499 }
2500 y := v_1.Args[0]
2501 v.reset(OpMIPS64SUBV)
2502 v.AddArg2(x, y)
2503 return true
2504 }
2505 break
2506 }
2507 return false
2508 }
2509 func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
2510 v_0 := v.Args[0]
2511
2512
2513
2514 for {
2515 off1 := auxIntToInt64(v.AuxInt)
2516 if v_0.Op != OpMIPS64MOVVaddr {
2517 break
2518 }
2519 off2 := auxIntToInt32(v_0.AuxInt)
2520 sym := auxToSym(v_0.Aux)
2521 ptr := v_0.Args[0]
2522 if !(is32Bit(off1 + int64(off2))) {
2523 break
2524 }
2525 v.reset(OpMIPS64MOVVaddr)
2526 v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
2527 v.Aux = symToAux(sym)
2528 v.AddArg(ptr)
2529 return true
2530 }
2531
2532
2533 for {
2534 if auxIntToInt64(v.AuxInt) != 0 {
2535 break
2536 }
2537 x := v_0
2538 v.copyOf(x)
2539 return true
2540 }
2541
2542
2543 for {
2544 c := auxIntToInt64(v.AuxInt)
2545 if v_0.Op != OpMIPS64MOVVconst {
2546 break
2547 }
2548 d := auxIntToInt64(v_0.AuxInt)
2549 v.reset(OpMIPS64MOVVconst)
2550 v.AuxInt = int64ToAuxInt(c + d)
2551 return true
2552 }
2553
2554
2555
2556 for {
2557 c := auxIntToInt64(v.AuxInt)
2558 if v_0.Op != OpMIPS64ADDVconst {
2559 break
2560 }
2561 d := auxIntToInt64(v_0.AuxInt)
2562 x := v_0.Args[0]
2563 if !(is32Bit(c + d)) {
2564 break
2565 }
2566 v.reset(OpMIPS64ADDVconst)
2567 v.AuxInt = int64ToAuxInt(c + d)
2568 v.AddArg(x)
2569 return true
2570 }
2571
2572
2573
2574 for {
2575 c := auxIntToInt64(v.AuxInt)
2576 if v_0.Op != OpMIPS64SUBVconst {
2577 break
2578 }
2579 d := auxIntToInt64(v_0.AuxInt)
2580 x := v_0.Args[0]
2581 if !(is32Bit(c - d)) {
2582 break
2583 }
2584 v.reset(OpMIPS64ADDVconst)
2585 v.AuxInt = int64ToAuxInt(c - d)
2586 v.AddArg(x)
2587 return true
2588 }
2589 return false
2590 }
2591 func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
2592 v_1 := v.Args[1]
2593 v_0 := v.Args[0]
2594
2595
2596
2597 for {
2598 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2599 x := v_0
2600 if v_1.Op != OpMIPS64MOVVconst {
2601 continue
2602 }
2603 c := auxIntToInt64(v_1.AuxInt)
2604 if !(is32Bit(c)) {
2605 continue
2606 }
2607 v.reset(OpMIPS64ANDconst)
2608 v.AuxInt = int64ToAuxInt(c)
2609 v.AddArg(x)
2610 return true
2611 }
2612 break
2613 }
2614
2615
2616 for {
2617 x := v_0
2618 if x != v_1 {
2619 break
2620 }
2621 v.copyOf(x)
2622 return true
2623 }
2624 return false
2625 }
2626 func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
2627 v_0 := v.Args[0]
2628
2629
2630 for {
2631 if auxIntToInt64(v.AuxInt) != 0 {
2632 break
2633 }
2634 v.reset(OpMIPS64MOVVconst)
2635 v.AuxInt = int64ToAuxInt(0)
2636 return true
2637 }
2638
2639
2640 for {
2641 if auxIntToInt64(v.AuxInt) != -1 {
2642 break
2643 }
2644 x := v_0
2645 v.copyOf(x)
2646 return true
2647 }
2648
2649
2650 for {
2651 c := auxIntToInt64(v.AuxInt)
2652 if v_0.Op != OpMIPS64MOVVconst {
2653 break
2654 }
2655 d := auxIntToInt64(v_0.AuxInt)
2656 v.reset(OpMIPS64MOVVconst)
2657 v.AuxInt = int64ToAuxInt(c & d)
2658 return true
2659 }
2660
2661
2662 for {
2663 c := auxIntToInt64(v.AuxInt)
2664 if v_0.Op != OpMIPS64ANDconst {
2665 break
2666 }
2667 d := auxIntToInt64(v_0.AuxInt)
2668 x := v_0.Args[0]
2669 v.reset(OpMIPS64ANDconst)
2670 v.AuxInt = int64ToAuxInt(c & d)
2671 v.AddArg(x)
2672 return true
2673 }
2674 return false
2675 }
2676 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v *Value) bool {
2677 v_2 := v.Args[2]
2678 v_1 := v.Args[1]
2679 v_0 := v.Args[0]
2680
2681
2682
2683 for {
2684 ptr := v_0
2685 if v_1.Op != OpMIPS64MOVVconst {
2686 break
2687 }
2688 c := auxIntToInt64(v_1.AuxInt)
2689 mem := v_2
2690 if !(is32Bit(c)) {
2691 break
2692 }
2693 v.reset(OpMIPS64LoweredAtomicAddconst32)
2694 v.AuxInt = int32ToAuxInt(int32(c))
2695 v.AddArg2(ptr, mem)
2696 return true
2697 }
2698 return false
2699 }
2700 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v *Value) bool {
2701 v_2 := v.Args[2]
2702 v_1 := v.Args[1]
2703 v_0 := v.Args[0]
2704
2705
2706
2707 for {
2708 ptr := v_0
2709 if v_1.Op != OpMIPS64MOVVconst {
2710 break
2711 }
2712 c := auxIntToInt64(v_1.AuxInt)
2713 mem := v_2
2714 if !(is32Bit(c)) {
2715 break
2716 }
2717 v.reset(OpMIPS64LoweredAtomicAddconst64)
2718 v.AuxInt = int64ToAuxInt(c)
2719 v.AddArg2(ptr, mem)
2720 return true
2721 }
2722 return false
2723 }
2724 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v *Value) bool {
2725 v_2 := v.Args[2]
2726 v_1 := v.Args[1]
2727 v_0 := v.Args[0]
2728
2729
2730 for {
2731 ptr := v_0
2732 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2733 break
2734 }
2735 mem := v_2
2736 v.reset(OpMIPS64LoweredAtomicStorezero32)
2737 v.AddArg2(ptr, mem)
2738 return true
2739 }
2740 return false
2741 }
2742 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v *Value) bool {
2743 v_2 := v.Args[2]
2744 v_1 := v.Args[1]
2745 v_0 := v.Args[0]
2746
2747
2748 for {
2749 ptr := v_0
2750 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2751 break
2752 }
2753 mem := v_2
2754 v.reset(OpMIPS64LoweredAtomicStorezero64)
2755 v.AddArg2(ptr, mem)
2756 return true
2757 }
2758 return false
2759 }
2760 func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
2761 v_1 := v.Args[1]
2762 v_0 := v.Args[0]
2763 b := v.Block
2764 config := b.Func.Config
2765
2766
2767
2768 for {
2769 off1 := auxIntToInt32(v.AuxInt)
2770 sym := auxToSym(v.Aux)
2771 if v_0.Op != OpMIPS64ADDVconst {
2772 break
2773 }
2774 off2 := auxIntToInt64(v_0.AuxInt)
2775 ptr := v_0.Args[0]
2776 mem := v_1
2777 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2778 break
2779 }
2780 v.reset(OpMIPS64MOVBUload)
2781 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2782 v.Aux = symToAux(sym)
2783 v.AddArg2(ptr, mem)
2784 return true
2785 }
2786
2787
2788
2789 for {
2790 off1 := auxIntToInt32(v.AuxInt)
2791 sym1 := auxToSym(v.Aux)
2792 if v_0.Op != OpMIPS64MOVVaddr {
2793 break
2794 }
2795 off2 := auxIntToInt32(v_0.AuxInt)
2796 sym2 := auxToSym(v_0.Aux)
2797 ptr := v_0.Args[0]
2798 mem := v_1
2799 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2800 break
2801 }
2802 v.reset(OpMIPS64MOVBUload)
2803 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2804 v.Aux = symToAux(mergeSym(sym1, sym2))
2805 v.AddArg2(ptr, mem)
2806 return true
2807 }
2808
2809
2810
2811 for {
2812 off := auxIntToInt32(v.AuxInt)
2813 sym := auxToSym(v.Aux)
2814 if v_0.Op != OpSB || !(symIsRO(sym)) {
2815 break
2816 }
2817 v.reset(OpMIPS64MOVVconst)
2818 v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
2819 return true
2820 }
2821 return false
2822 }
2823 func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
2824 v_0 := v.Args[0]
2825
2826
2827 for {
2828 x := v_0
2829 if x.Op != OpMIPS64MOVBUload {
2830 break
2831 }
2832 v.reset(OpMIPS64MOVVreg)
2833 v.AddArg(x)
2834 return true
2835 }
2836
2837
2838 for {
2839 x := v_0
2840 if x.Op != OpMIPS64MOVBUreg {
2841 break
2842 }
2843 v.reset(OpMIPS64MOVVreg)
2844 v.AddArg(x)
2845 return true
2846 }
2847
2848
2849 for {
2850 if v_0.Op != OpMIPS64MOVVconst {
2851 break
2852 }
2853 c := auxIntToInt64(v_0.AuxInt)
2854 v.reset(OpMIPS64MOVVconst)
2855 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
2856 return true
2857 }
2858 return false
2859 }
2860 func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
2861 v_1 := v.Args[1]
2862 v_0 := v.Args[0]
2863 b := v.Block
2864 config := b.Func.Config
2865
2866
2867
2868 for {
2869 off1 := auxIntToInt32(v.AuxInt)
2870 sym := auxToSym(v.Aux)
2871 if v_0.Op != OpMIPS64ADDVconst {
2872 break
2873 }
2874 off2 := auxIntToInt64(v_0.AuxInt)
2875 ptr := v_0.Args[0]
2876 mem := v_1
2877 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2878 break
2879 }
2880 v.reset(OpMIPS64MOVBload)
2881 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2882 v.Aux = symToAux(sym)
2883 v.AddArg2(ptr, mem)
2884 return true
2885 }
2886
2887
2888
2889 for {
2890 off1 := auxIntToInt32(v.AuxInt)
2891 sym1 := auxToSym(v.Aux)
2892 if v_0.Op != OpMIPS64MOVVaddr {
2893 break
2894 }
2895 off2 := auxIntToInt32(v_0.AuxInt)
2896 sym2 := auxToSym(v_0.Aux)
2897 ptr := v_0.Args[0]
2898 mem := v_1
2899 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2900 break
2901 }
2902 v.reset(OpMIPS64MOVBload)
2903 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2904 v.Aux = symToAux(mergeSym(sym1, sym2))
2905 v.AddArg2(ptr, mem)
2906 return true
2907 }
2908
2909
2910
2911 for {
2912 off := auxIntToInt32(v.AuxInt)
2913 sym := auxToSym(v.Aux)
2914 if v_0.Op != OpSB || !(symIsRO(sym)) {
2915 break
2916 }
2917 v.reset(OpMIPS64MOVVconst)
2918 v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
2919 return true
2920 }
2921 return false
2922 }
2923 func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
2924 v_0 := v.Args[0]
2925
2926
2927 for {
2928 x := v_0
2929 if x.Op != OpMIPS64MOVBload {
2930 break
2931 }
2932 v.reset(OpMIPS64MOVVreg)
2933 v.AddArg(x)
2934 return true
2935 }
2936
2937
2938 for {
2939 x := v_0
2940 if x.Op != OpMIPS64MOVBreg {
2941 break
2942 }
2943 v.reset(OpMIPS64MOVVreg)
2944 v.AddArg(x)
2945 return true
2946 }
2947
2948
2949 for {
2950 if v_0.Op != OpMIPS64MOVVconst {
2951 break
2952 }
2953 c := auxIntToInt64(v_0.AuxInt)
2954 v.reset(OpMIPS64MOVVconst)
2955 v.AuxInt = int64ToAuxInt(int64(int8(c)))
2956 return true
2957 }
2958 return false
2959 }
2960 func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
2961 v_2 := v.Args[2]
2962 v_1 := v.Args[1]
2963 v_0 := v.Args[0]
2964 b := v.Block
2965 config := b.Func.Config
2966
2967
2968
2969 for {
2970 off1 := auxIntToInt32(v.AuxInt)
2971 sym := auxToSym(v.Aux)
2972 if v_0.Op != OpMIPS64ADDVconst {
2973 break
2974 }
2975 off2 := auxIntToInt64(v_0.AuxInt)
2976 ptr := v_0.Args[0]
2977 val := v_1
2978 mem := v_2
2979 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2980 break
2981 }
2982 v.reset(OpMIPS64MOVBstore)
2983 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2984 v.Aux = symToAux(sym)
2985 v.AddArg3(ptr, val, mem)
2986 return true
2987 }
2988
2989
2990
2991 for {
2992 off1 := auxIntToInt32(v.AuxInt)
2993 sym1 := auxToSym(v.Aux)
2994 if v_0.Op != OpMIPS64MOVVaddr {
2995 break
2996 }
2997 off2 := auxIntToInt32(v_0.AuxInt)
2998 sym2 := auxToSym(v_0.Aux)
2999 ptr := v_0.Args[0]
3000 val := v_1
3001 mem := v_2
3002 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3003 break
3004 }
3005 v.reset(OpMIPS64MOVBstore)
3006 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3007 v.Aux = symToAux(mergeSym(sym1, sym2))
3008 v.AddArg3(ptr, val, mem)
3009 return true
3010 }
3011
3012
3013 for {
3014 off := auxIntToInt32(v.AuxInt)
3015 sym := auxToSym(v.Aux)
3016 ptr := v_0
3017 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
3018 break
3019 }
3020 mem := v_2
3021 v.reset(OpMIPS64MOVBstorezero)
3022 v.AuxInt = int32ToAuxInt(off)
3023 v.Aux = symToAux(sym)
3024 v.AddArg2(ptr, mem)
3025 return true
3026 }
3027
3028
3029 for {
3030 off := auxIntToInt32(v.AuxInt)
3031 sym := auxToSym(v.Aux)
3032 ptr := v_0
3033 if v_1.Op != OpMIPS64MOVBreg {
3034 break
3035 }
3036 x := v_1.Args[0]
3037 mem := v_2
3038 v.reset(OpMIPS64MOVBstore)
3039 v.AuxInt = int32ToAuxInt(off)
3040 v.Aux = symToAux(sym)
3041 v.AddArg3(ptr, x, mem)
3042 return true
3043 }
3044
3045
3046 for {
3047 off := auxIntToInt32(v.AuxInt)
3048 sym := auxToSym(v.Aux)
3049 ptr := v_0
3050 if v_1.Op != OpMIPS64MOVBUreg {
3051 break
3052 }
3053 x := v_1.Args[0]
3054 mem := v_2
3055 v.reset(OpMIPS64MOVBstore)
3056 v.AuxInt = int32ToAuxInt(off)
3057 v.Aux = symToAux(sym)
3058 v.AddArg3(ptr, x, mem)
3059 return true
3060 }
3061
3062
3063 for {
3064 off := auxIntToInt32(v.AuxInt)
3065 sym := auxToSym(v.Aux)
3066 ptr := v_0
3067 if v_1.Op != OpMIPS64MOVHreg {
3068 break
3069 }
3070 x := v_1.Args[0]
3071 mem := v_2
3072 v.reset(OpMIPS64MOVBstore)
3073 v.AuxInt = int32ToAuxInt(off)
3074 v.Aux = symToAux(sym)
3075 v.AddArg3(ptr, x, mem)
3076 return true
3077 }
3078
3079
3080 for {
3081 off := auxIntToInt32(v.AuxInt)
3082 sym := auxToSym(v.Aux)
3083 ptr := v_0
3084 if v_1.Op != OpMIPS64MOVHUreg {
3085 break
3086 }
3087 x := v_1.Args[0]
3088 mem := v_2
3089 v.reset(OpMIPS64MOVBstore)
3090 v.AuxInt = int32ToAuxInt(off)
3091 v.Aux = symToAux(sym)
3092 v.AddArg3(ptr, x, mem)
3093 return true
3094 }
3095
3096
3097 for {
3098 off := auxIntToInt32(v.AuxInt)
3099 sym := auxToSym(v.Aux)
3100 ptr := v_0
3101 if v_1.Op != OpMIPS64MOVWreg {
3102 break
3103 }
3104 x := v_1.Args[0]
3105 mem := v_2
3106 v.reset(OpMIPS64MOVBstore)
3107 v.AuxInt = int32ToAuxInt(off)
3108 v.Aux = symToAux(sym)
3109 v.AddArg3(ptr, x, mem)
3110 return true
3111 }
3112
3113
3114 for {
3115 off := auxIntToInt32(v.AuxInt)
3116 sym := auxToSym(v.Aux)
3117 ptr := v_0
3118 if v_1.Op != OpMIPS64MOVWUreg {
3119 break
3120 }
3121 x := v_1.Args[0]
3122 mem := v_2
3123 v.reset(OpMIPS64MOVBstore)
3124 v.AuxInt = int32ToAuxInt(off)
3125 v.Aux = symToAux(sym)
3126 v.AddArg3(ptr, x, mem)
3127 return true
3128 }
3129 return false
3130 }
3131 func rewriteValueMIPS64_OpMIPS64MOVBstorezero(v *Value) bool {
3132 v_1 := v.Args[1]
3133 v_0 := v.Args[0]
3134 b := v.Block
3135 config := b.Func.Config
3136
3137
3138
3139 for {
3140 off1 := auxIntToInt32(v.AuxInt)
3141 sym := auxToSym(v.Aux)
3142 if v_0.Op != OpMIPS64ADDVconst {
3143 break
3144 }
3145 off2 := auxIntToInt64(v_0.AuxInt)
3146 ptr := v_0.Args[0]
3147 mem := v_1
3148 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3149 break
3150 }
3151 v.reset(OpMIPS64MOVBstorezero)
3152 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3153 v.Aux = symToAux(sym)
3154 v.AddArg2(ptr, mem)
3155 return true
3156 }
3157
3158
3159
3160 for {
3161 off1 := auxIntToInt32(v.AuxInt)
3162 sym1 := auxToSym(v.Aux)
3163 if v_0.Op != OpMIPS64MOVVaddr {
3164 break
3165 }
3166 off2 := auxIntToInt32(v_0.AuxInt)
3167 sym2 := auxToSym(v_0.Aux)
3168 ptr := v_0.Args[0]
3169 mem := v_1
3170 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3171 break
3172 }
3173 v.reset(OpMIPS64MOVBstorezero)
3174 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3175 v.Aux = symToAux(mergeSym(sym1, sym2))
3176 v.AddArg2(ptr, mem)
3177 return true
3178 }
3179 return false
3180 }
3181 func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
3182 v_1 := v.Args[1]
3183 v_0 := v.Args[0]
3184 b := v.Block
3185 config := b.Func.Config
3186
3187
3188 for {
3189 off := auxIntToInt32(v.AuxInt)
3190 sym := auxToSym(v.Aux)
3191 ptr := v_0
3192 if v_1.Op != OpMIPS64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3193 break
3194 }
3195 val := v_1.Args[1]
3196 if ptr != v_1.Args[0] {
3197 break
3198 }
3199 v.reset(OpMIPS64MOVVgpfp)
3200 v.AddArg(val)
3201 return true
3202 }
3203
3204
3205
3206 for {
3207 off1 := auxIntToInt32(v.AuxInt)
3208 sym := auxToSym(v.Aux)
3209 if v_0.Op != OpMIPS64ADDVconst {
3210 break
3211 }
3212 off2 := auxIntToInt64(v_0.AuxInt)
3213 ptr := v_0.Args[0]
3214 mem := v_1
3215 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3216 break
3217 }
3218 v.reset(OpMIPS64MOVDload)
3219 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3220 v.Aux = symToAux(sym)
3221 v.AddArg2(ptr, mem)
3222 return true
3223 }
3224
3225
3226
3227 for {
3228 off1 := auxIntToInt32(v.AuxInt)
3229 sym1 := auxToSym(v.Aux)
3230 if v_0.Op != OpMIPS64MOVVaddr {
3231 break
3232 }
3233 off2 := auxIntToInt32(v_0.AuxInt)
3234 sym2 := auxToSym(v_0.Aux)
3235 ptr := v_0.Args[0]
3236 mem := v_1
3237 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3238 break
3239 }
3240 v.reset(OpMIPS64MOVDload)
3241 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3242 v.Aux = symToAux(mergeSym(sym1, sym2))
3243 v.AddArg2(ptr, mem)
3244 return true
3245 }
3246 return false
3247 }
3248 func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
3249 v_2 := v.Args[2]
3250 v_1 := v.Args[1]
3251 v_0 := v.Args[0]
3252 b := v.Block
3253 config := b.Func.Config
3254
3255
3256 for {
3257 off := auxIntToInt32(v.AuxInt)
3258 sym := auxToSym(v.Aux)
3259 ptr := v_0
3260 if v_1.Op != OpMIPS64MOVVgpfp {
3261 break
3262 }
3263 val := v_1.Args[0]
3264 mem := v_2
3265 v.reset(OpMIPS64MOVVstore)
3266 v.AuxInt = int32ToAuxInt(off)
3267 v.Aux = symToAux(sym)
3268 v.AddArg3(ptr, val, mem)
3269 return true
3270 }
3271
3272
3273
3274 for {
3275 off1 := auxIntToInt32(v.AuxInt)
3276 sym := auxToSym(v.Aux)
3277 if v_0.Op != OpMIPS64ADDVconst {
3278 break
3279 }
3280 off2 := auxIntToInt64(v_0.AuxInt)
3281 ptr := v_0.Args[0]
3282 val := v_1
3283 mem := v_2
3284 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3285 break
3286 }
3287 v.reset(OpMIPS64MOVDstore)
3288 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3289 v.Aux = symToAux(sym)
3290 v.AddArg3(ptr, val, mem)
3291 return true
3292 }
3293
3294
3295
3296 for {
3297 off1 := auxIntToInt32(v.AuxInt)
3298 sym1 := auxToSym(v.Aux)
3299 if v_0.Op != OpMIPS64MOVVaddr {
3300 break
3301 }
3302 off2 := auxIntToInt32(v_0.AuxInt)
3303 sym2 := auxToSym(v_0.Aux)
3304 ptr := v_0.Args[0]
3305 val := v_1
3306 mem := v_2
3307 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3308 break
3309 }
3310 v.reset(OpMIPS64MOVDstore)
3311 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3312 v.Aux = symToAux(mergeSym(sym1, sym2))
3313 v.AddArg3(ptr, val, mem)
3314 return true
3315 }
3316 return false
3317 }
3318 func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
3319 v_1 := v.Args[1]
3320 v_0 := v.Args[0]
3321 b := v.Block
3322 config := b.Func.Config
3323
3324
3325 for {
3326 off := auxIntToInt32(v.AuxInt)
3327 sym := auxToSym(v.Aux)
3328 ptr := v_0
3329 if v_1.Op != OpMIPS64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3330 break
3331 }
3332 val := v_1.Args[1]
3333 if ptr != v_1.Args[0] {
3334 break
3335 }
3336 v.reset(OpMIPS64MOVWgpfp)
3337 v.AddArg(val)
3338 return true
3339 }
3340
3341
3342
3343 for {
3344 off1 := auxIntToInt32(v.AuxInt)
3345 sym := auxToSym(v.Aux)
3346 if v_0.Op != OpMIPS64ADDVconst {
3347 break
3348 }
3349 off2 := auxIntToInt64(v_0.AuxInt)
3350 ptr := v_0.Args[0]
3351 mem := v_1
3352 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3353 break
3354 }
3355 v.reset(OpMIPS64MOVFload)
3356 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3357 v.Aux = symToAux(sym)
3358 v.AddArg2(ptr, mem)
3359 return true
3360 }
3361
3362
3363
3364 for {
3365 off1 := auxIntToInt32(v.AuxInt)
3366 sym1 := auxToSym(v.Aux)
3367 if v_0.Op != OpMIPS64MOVVaddr {
3368 break
3369 }
3370 off2 := auxIntToInt32(v_0.AuxInt)
3371 sym2 := auxToSym(v_0.Aux)
3372 ptr := v_0.Args[0]
3373 mem := v_1
3374 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3375 break
3376 }
3377 v.reset(OpMIPS64MOVFload)
3378 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3379 v.Aux = symToAux(mergeSym(sym1, sym2))
3380 v.AddArg2(ptr, mem)
3381 return true
3382 }
3383 return false
3384 }
3385 func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
3386 v_2 := v.Args[2]
3387 v_1 := v.Args[1]
3388 v_0 := v.Args[0]
3389 b := v.Block
3390 config := b.Func.Config
3391
3392
3393 for {
3394 off := auxIntToInt32(v.AuxInt)
3395 sym := auxToSym(v.Aux)
3396 ptr := v_0
3397 if v_1.Op != OpMIPS64MOVWgpfp {
3398 break
3399 }
3400 val := v_1.Args[0]
3401 mem := v_2
3402 v.reset(OpMIPS64MOVWstore)
3403 v.AuxInt = int32ToAuxInt(off)
3404 v.Aux = symToAux(sym)
3405 v.AddArg3(ptr, val, mem)
3406 return true
3407 }
3408
3409
3410
3411 for {
3412 off1 := auxIntToInt32(v.AuxInt)
3413 sym := auxToSym(v.Aux)
3414 if v_0.Op != OpMIPS64ADDVconst {
3415 break
3416 }
3417 off2 := auxIntToInt64(v_0.AuxInt)
3418 ptr := v_0.Args[0]
3419 val := v_1
3420 mem := v_2
3421 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3422 break
3423 }
3424 v.reset(OpMIPS64MOVFstore)
3425 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3426 v.Aux = symToAux(sym)
3427 v.AddArg3(ptr, val, mem)
3428 return true
3429 }
3430
3431
3432
3433 for {
3434 off1 := auxIntToInt32(v.AuxInt)
3435 sym1 := auxToSym(v.Aux)
3436 if v_0.Op != OpMIPS64MOVVaddr {
3437 break
3438 }
3439 off2 := auxIntToInt32(v_0.AuxInt)
3440 sym2 := auxToSym(v_0.Aux)
3441 ptr := v_0.Args[0]
3442 val := v_1
3443 mem := v_2
3444 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3445 break
3446 }
3447 v.reset(OpMIPS64MOVFstore)
3448 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3449 v.Aux = symToAux(mergeSym(sym1, sym2))
3450 v.AddArg3(ptr, val, mem)
3451 return true
3452 }
3453 return false
3454 }
3455 func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
3456 v_1 := v.Args[1]
3457 v_0 := v.Args[0]
3458 b := v.Block
3459 config := b.Func.Config
3460
3461
3462
3463 for {
3464 off1 := auxIntToInt32(v.AuxInt)
3465 sym := auxToSym(v.Aux)
3466 if v_0.Op != OpMIPS64ADDVconst {
3467 break
3468 }
3469 off2 := auxIntToInt64(v_0.AuxInt)
3470 ptr := v_0.Args[0]
3471 mem := v_1
3472 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3473 break
3474 }
3475 v.reset(OpMIPS64MOVHUload)
3476 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3477 v.Aux = symToAux(sym)
3478 v.AddArg2(ptr, mem)
3479 return true
3480 }
3481
3482
3483
3484 for {
3485 off1 := auxIntToInt32(v.AuxInt)
3486 sym1 := auxToSym(v.Aux)
3487 if v_0.Op != OpMIPS64MOVVaddr {
3488 break
3489 }
3490 off2 := auxIntToInt32(v_0.AuxInt)
3491 sym2 := auxToSym(v_0.Aux)
3492 ptr := v_0.Args[0]
3493 mem := v_1
3494 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3495 break
3496 }
3497 v.reset(OpMIPS64MOVHUload)
3498 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3499 v.Aux = symToAux(mergeSym(sym1, sym2))
3500 v.AddArg2(ptr, mem)
3501 return true
3502 }
3503
3504
3505
3506 for {
3507 off := auxIntToInt32(v.AuxInt)
3508 sym := auxToSym(v.Aux)
3509 if v_0.Op != OpSB || !(symIsRO(sym)) {
3510 break
3511 }
3512 v.reset(OpMIPS64MOVVconst)
3513 v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
3514 return true
3515 }
3516 return false
3517 }
3518 func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
3519 v_0 := v.Args[0]
3520
3521
3522 for {
3523 x := v_0
3524 if x.Op != OpMIPS64MOVBUload {
3525 break
3526 }
3527 v.reset(OpMIPS64MOVVreg)
3528 v.AddArg(x)
3529 return true
3530 }
3531
3532
3533 for {
3534 x := v_0
3535 if x.Op != OpMIPS64MOVHUload {
3536 break
3537 }
3538 v.reset(OpMIPS64MOVVreg)
3539 v.AddArg(x)
3540 return true
3541 }
3542
3543
3544 for {
3545 x := v_0
3546 if x.Op != OpMIPS64MOVBUreg {
3547 break
3548 }
3549 v.reset(OpMIPS64MOVVreg)
3550 v.AddArg(x)
3551 return true
3552 }
3553
3554
3555 for {
3556 x := v_0
3557 if x.Op != OpMIPS64MOVHUreg {
3558 break
3559 }
3560 v.reset(OpMIPS64MOVVreg)
3561 v.AddArg(x)
3562 return true
3563 }
3564
3565
3566 for {
3567 if v_0.Op != OpMIPS64MOVVconst {
3568 break
3569 }
3570 c := auxIntToInt64(v_0.AuxInt)
3571 v.reset(OpMIPS64MOVVconst)
3572 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
3573 return true
3574 }
3575 return false
3576 }
3577 func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
3578 v_1 := v.Args[1]
3579 v_0 := v.Args[0]
3580 b := v.Block
3581 config := b.Func.Config
3582
3583
3584
3585 for {
3586 off1 := auxIntToInt32(v.AuxInt)
3587 sym := auxToSym(v.Aux)
3588 if v_0.Op != OpMIPS64ADDVconst {
3589 break
3590 }
3591 off2 := auxIntToInt64(v_0.AuxInt)
3592 ptr := v_0.Args[0]
3593 mem := v_1
3594 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3595 break
3596 }
3597 v.reset(OpMIPS64MOVHload)
3598 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3599 v.Aux = symToAux(sym)
3600 v.AddArg2(ptr, mem)
3601 return true
3602 }
3603
3604
3605
3606 for {
3607 off1 := auxIntToInt32(v.AuxInt)
3608 sym1 := auxToSym(v.Aux)
3609 if v_0.Op != OpMIPS64MOVVaddr {
3610 break
3611 }
3612 off2 := auxIntToInt32(v_0.AuxInt)
3613 sym2 := auxToSym(v_0.Aux)
3614 ptr := v_0.Args[0]
3615 mem := v_1
3616 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3617 break
3618 }
3619 v.reset(OpMIPS64MOVHload)
3620 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3621 v.Aux = symToAux(mergeSym(sym1, sym2))
3622 v.AddArg2(ptr, mem)
3623 return true
3624 }
3625
3626
3627
3628 for {
3629 off := auxIntToInt32(v.AuxInt)
3630 sym := auxToSym(v.Aux)
3631 if v_0.Op != OpSB || !(symIsRO(sym)) {
3632 break
3633 }
3634 v.reset(OpMIPS64MOVVconst)
3635 v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
3636 return true
3637 }
3638 return false
3639 }
3640 func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
3641 v_0 := v.Args[0]
3642
3643
3644 for {
3645 x := v_0
3646 if x.Op != OpMIPS64MOVBload {
3647 break
3648 }
3649 v.reset(OpMIPS64MOVVreg)
3650 v.AddArg(x)
3651 return true
3652 }
3653
3654
3655 for {
3656 x := v_0
3657 if x.Op != OpMIPS64MOVBUload {
3658 break
3659 }
3660 v.reset(OpMIPS64MOVVreg)
3661 v.AddArg(x)
3662 return true
3663 }
3664
3665
3666 for {
3667 x := v_0
3668 if x.Op != OpMIPS64MOVHload {
3669 break
3670 }
3671 v.reset(OpMIPS64MOVVreg)
3672 v.AddArg(x)
3673 return true
3674 }
3675
3676
3677 for {
3678 x := v_0
3679 if x.Op != OpMIPS64MOVBreg {
3680 break
3681 }
3682 v.reset(OpMIPS64MOVVreg)
3683 v.AddArg(x)
3684 return true
3685 }
3686
3687
3688 for {
3689 x := v_0
3690 if x.Op != OpMIPS64MOVBUreg {
3691 break
3692 }
3693 v.reset(OpMIPS64MOVVreg)
3694 v.AddArg(x)
3695 return true
3696 }
3697
3698
3699 for {
3700 x := v_0
3701 if x.Op != OpMIPS64MOVHreg {
3702 break
3703 }
3704 v.reset(OpMIPS64MOVVreg)
3705 v.AddArg(x)
3706 return true
3707 }
3708
3709
3710 for {
3711 if v_0.Op != OpMIPS64MOVVconst {
3712 break
3713 }
3714 c := auxIntToInt64(v_0.AuxInt)
3715 v.reset(OpMIPS64MOVVconst)
3716 v.AuxInt = int64ToAuxInt(int64(int16(c)))
3717 return true
3718 }
3719 return false
3720 }
3721 func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
3722 v_2 := v.Args[2]
3723 v_1 := v.Args[1]
3724 v_0 := v.Args[0]
3725 b := v.Block
3726 config := b.Func.Config
3727
3728
3729
3730 for {
3731 off1 := auxIntToInt32(v.AuxInt)
3732 sym := auxToSym(v.Aux)
3733 if v_0.Op != OpMIPS64ADDVconst {
3734 break
3735 }
3736 off2 := auxIntToInt64(v_0.AuxInt)
3737 ptr := v_0.Args[0]
3738 val := v_1
3739 mem := v_2
3740 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3741 break
3742 }
3743 v.reset(OpMIPS64MOVHstore)
3744 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3745 v.Aux = symToAux(sym)
3746 v.AddArg3(ptr, val, mem)
3747 return true
3748 }
3749
3750
3751
3752 for {
3753 off1 := auxIntToInt32(v.AuxInt)
3754 sym1 := auxToSym(v.Aux)
3755 if v_0.Op != OpMIPS64MOVVaddr {
3756 break
3757 }
3758 off2 := auxIntToInt32(v_0.AuxInt)
3759 sym2 := auxToSym(v_0.Aux)
3760 ptr := v_0.Args[0]
3761 val := v_1
3762 mem := v_2
3763 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3764 break
3765 }
3766 v.reset(OpMIPS64MOVHstore)
3767 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3768 v.Aux = symToAux(mergeSym(sym1, sym2))
3769 v.AddArg3(ptr, val, mem)
3770 return true
3771 }
3772
3773
3774 for {
3775 off := auxIntToInt32(v.AuxInt)
3776 sym := auxToSym(v.Aux)
3777 ptr := v_0
3778 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
3779 break
3780 }
3781 mem := v_2
3782 v.reset(OpMIPS64MOVHstorezero)
3783 v.AuxInt = int32ToAuxInt(off)
3784 v.Aux = symToAux(sym)
3785 v.AddArg2(ptr, mem)
3786 return true
3787 }
3788
3789
3790 for {
3791 off := auxIntToInt32(v.AuxInt)
3792 sym := auxToSym(v.Aux)
3793 ptr := v_0
3794 if v_1.Op != OpMIPS64MOVHreg {
3795 break
3796 }
3797 x := v_1.Args[0]
3798 mem := v_2
3799 v.reset(OpMIPS64MOVHstore)
3800 v.AuxInt = int32ToAuxInt(off)
3801 v.Aux = symToAux(sym)
3802 v.AddArg3(ptr, x, mem)
3803 return true
3804 }
3805
3806
3807 for {
3808 off := auxIntToInt32(v.AuxInt)
3809 sym := auxToSym(v.Aux)
3810 ptr := v_0
3811 if v_1.Op != OpMIPS64MOVHUreg {
3812 break
3813 }
3814 x := v_1.Args[0]
3815 mem := v_2
3816 v.reset(OpMIPS64MOVHstore)
3817 v.AuxInt = int32ToAuxInt(off)
3818 v.Aux = symToAux(sym)
3819 v.AddArg3(ptr, x, mem)
3820 return true
3821 }
3822
3823
3824 for {
3825 off := auxIntToInt32(v.AuxInt)
3826 sym := auxToSym(v.Aux)
3827 ptr := v_0
3828 if v_1.Op != OpMIPS64MOVWreg {
3829 break
3830 }
3831 x := v_1.Args[0]
3832 mem := v_2
3833 v.reset(OpMIPS64MOVHstore)
3834 v.AuxInt = int32ToAuxInt(off)
3835 v.Aux = symToAux(sym)
3836 v.AddArg3(ptr, x, mem)
3837 return true
3838 }
3839
3840
3841 for {
3842 off := auxIntToInt32(v.AuxInt)
3843 sym := auxToSym(v.Aux)
3844 ptr := v_0
3845 if v_1.Op != OpMIPS64MOVWUreg {
3846 break
3847 }
3848 x := v_1.Args[0]
3849 mem := v_2
3850 v.reset(OpMIPS64MOVHstore)
3851 v.AuxInt = int32ToAuxInt(off)
3852 v.Aux = symToAux(sym)
3853 v.AddArg3(ptr, x, mem)
3854 return true
3855 }
3856 return false
3857 }
3858 func rewriteValueMIPS64_OpMIPS64MOVHstorezero(v *Value) bool {
3859 v_1 := v.Args[1]
3860 v_0 := v.Args[0]
3861 b := v.Block
3862 config := b.Func.Config
3863
3864
3865
3866 for {
3867 off1 := auxIntToInt32(v.AuxInt)
3868 sym := auxToSym(v.Aux)
3869 if v_0.Op != OpMIPS64ADDVconst {
3870 break
3871 }
3872 off2 := auxIntToInt64(v_0.AuxInt)
3873 ptr := v_0.Args[0]
3874 mem := v_1
3875 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3876 break
3877 }
3878 v.reset(OpMIPS64MOVHstorezero)
3879 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3880 v.Aux = symToAux(sym)
3881 v.AddArg2(ptr, mem)
3882 return true
3883 }
3884
3885
3886
3887 for {
3888 off1 := auxIntToInt32(v.AuxInt)
3889 sym1 := auxToSym(v.Aux)
3890 if v_0.Op != OpMIPS64MOVVaddr {
3891 break
3892 }
3893 off2 := auxIntToInt32(v_0.AuxInt)
3894 sym2 := auxToSym(v_0.Aux)
3895 ptr := v_0.Args[0]
3896 mem := v_1
3897 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3898 break
3899 }
3900 v.reset(OpMIPS64MOVHstorezero)
3901 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3902 v.Aux = symToAux(mergeSym(sym1, sym2))
3903 v.AddArg2(ptr, mem)
3904 return true
3905 }
3906 return false
3907 }
3908 func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
3909 v_1 := v.Args[1]
3910 v_0 := v.Args[0]
3911 b := v.Block
3912 config := b.Func.Config
3913
3914
3915 for {
3916 off := auxIntToInt32(v.AuxInt)
3917 sym := auxToSym(v.Aux)
3918 ptr := v_0
3919 if v_1.Op != OpMIPS64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3920 break
3921 }
3922 val := v_1.Args[1]
3923 if ptr != v_1.Args[0] {
3924 break
3925 }
3926 v.reset(OpMIPS64MOVVfpgp)
3927 v.AddArg(val)
3928 return true
3929 }
3930
3931
3932
3933 for {
3934 off1 := auxIntToInt32(v.AuxInt)
3935 sym := auxToSym(v.Aux)
3936 if v_0.Op != OpMIPS64ADDVconst {
3937 break
3938 }
3939 off2 := auxIntToInt64(v_0.AuxInt)
3940 ptr := v_0.Args[0]
3941 mem := v_1
3942 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3943 break
3944 }
3945 v.reset(OpMIPS64MOVVload)
3946 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3947 v.Aux = symToAux(sym)
3948 v.AddArg2(ptr, mem)
3949 return true
3950 }
3951
3952
3953
3954 for {
3955 off1 := auxIntToInt32(v.AuxInt)
3956 sym1 := auxToSym(v.Aux)
3957 if v_0.Op != OpMIPS64MOVVaddr {
3958 break
3959 }
3960 off2 := auxIntToInt32(v_0.AuxInt)
3961 sym2 := auxToSym(v_0.Aux)
3962 ptr := v_0.Args[0]
3963 mem := v_1
3964 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3965 break
3966 }
3967 v.reset(OpMIPS64MOVVload)
3968 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3969 v.Aux = symToAux(mergeSym(sym1, sym2))
3970 v.AddArg2(ptr, mem)
3971 return true
3972 }
3973
3974
3975
3976 for {
3977 off := auxIntToInt32(v.AuxInt)
3978 sym := auxToSym(v.Aux)
3979 if v_0.Op != OpSB || !(symIsRO(sym)) {
3980 break
3981 }
3982 v.reset(OpMIPS64MOVVconst)
3983 v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
3984 return true
3985 }
3986 return false
3987 }
3988 func rewriteValueMIPS64_OpMIPS64MOVVnop(v *Value) bool {
3989 v_0 := v.Args[0]
3990
3991
3992 for {
3993 if v_0.Op != OpMIPS64MOVVconst {
3994 break
3995 }
3996 c := auxIntToInt64(v_0.AuxInt)
3997 v.reset(OpMIPS64MOVVconst)
3998 v.AuxInt = int64ToAuxInt(c)
3999 return true
4000 }
4001 return false
4002 }
4003 func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
4004 v_0 := v.Args[0]
4005
4006
4007
4008 for {
4009 x := v_0
4010 if !(x.Uses == 1) {
4011 break
4012 }
4013 v.reset(OpMIPS64MOVVnop)
4014 v.AddArg(x)
4015 return true
4016 }
4017
4018
4019 for {
4020 if v_0.Op != OpMIPS64MOVVconst {
4021 break
4022 }
4023 c := auxIntToInt64(v_0.AuxInt)
4024 v.reset(OpMIPS64MOVVconst)
4025 v.AuxInt = int64ToAuxInt(c)
4026 return true
4027 }
4028 return false
4029 }
4030 func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
4031 v_2 := v.Args[2]
4032 v_1 := v.Args[1]
4033 v_0 := v.Args[0]
4034 b := v.Block
4035 config := b.Func.Config
4036
4037
4038 for {
4039 off := auxIntToInt32(v.AuxInt)
4040 sym := auxToSym(v.Aux)
4041 ptr := v_0
4042 if v_1.Op != OpMIPS64MOVVfpgp {
4043 break
4044 }
4045 val := v_1.Args[0]
4046 mem := v_2
4047 v.reset(OpMIPS64MOVDstore)
4048 v.AuxInt = int32ToAuxInt(off)
4049 v.Aux = symToAux(sym)
4050 v.AddArg3(ptr, val, mem)
4051 return true
4052 }
4053
4054
4055
4056 for {
4057 off1 := auxIntToInt32(v.AuxInt)
4058 sym := auxToSym(v.Aux)
4059 if v_0.Op != OpMIPS64ADDVconst {
4060 break
4061 }
4062 off2 := auxIntToInt64(v_0.AuxInt)
4063 ptr := v_0.Args[0]
4064 val := v_1
4065 mem := v_2
4066 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4067 break
4068 }
4069 v.reset(OpMIPS64MOVVstore)
4070 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4071 v.Aux = symToAux(sym)
4072 v.AddArg3(ptr, val, mem)
4073 return true
4074 }
4075
4076
4077
4078 for {
4079 off1 := auxIntToInt32(v.AuxInt)
4080 sym1 := auxToSym(v.Aux)
4081 if v_0.Op != OpMIPS64MOVVaddr {
4082 break
4083 }
4084 off2 := auxIntToInt32(v_0.AuxInt)
4085 sym2 := auxToSym(v_0.Aux)
4086 ptr := v_0.Args[0]
4087 val := v_1
4088 mem := v_2
4089 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4090 break
4091 }
4092 v.reset(OpMIPS64MOVVstore)
4093 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4094 v.Aux = symToAux(mergeSym(sym1, sym2))
4095 v.AddArg3(ptr, val, mem)
4096 return true
4097 }
4098
4099
4100 for {
4101 off := auxIntToInt32(v.AuxInt)
4102 sym := auxToSym(v.Aux)
4103 ptr := v_0
4104 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
4105 break
4106 }
4107 mem := v_2
4108 v.reset(OpMIPS64MOVVstorezero)
4109 v.AuxInt = int32ToAuxInt(off)
4110 v.Aux = symToAux(sym)
4111 v.AddArg2(ptr, mem)
4112 return true
4113 }
4114 return false
4115 }
4116 func rewriteValueMIPS64_OpMIPS64MOVVstorezero(v *Value) bool {
4117 v_1 := v.Args[1]
4118 v_0 := v.Args[0]
4119 b := v.Block
4120 config := b.Func.Config
4121
4122
4123
4124 for {
4125 off1 := auxIntToInt32(v.AuxInt)
4126 sym := auxToSym(v.Aux)
4127 if v_0.Op != OpMIPS64ADDVconst {
4128 break
4129 }
4130 off2 := auxIntToInt64(v_0.AuxInt)
4131 ptr := v_0.Args[0]
4132 mem := v_1
4133 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4134 break
4135 }
4136 v.reset(OpMIPS64MOVVstorezero)
4137 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4138 v.Aux = symToAux(sym)
4139 v.AddArg2(ptr, mem)
4140 return true
4141 }
4142
4143
4144
4145 for {
4146 off1 := auxIntToInt32(v.AuxInt)
4147 sym1 := auxToSym(v.Aux)
4148 if v_0.Op != OpMIPS64MOVVaddr {
4149 break
4150 }
4151 off2 := auxIntToInt32(v_0.AuxInt)
4152 sym2 := auxToSym(v_0.Aux)
4153 ptr := v_0.Args[0]
4154 mem := v_1
4155 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4156 break
4157 }
4158 v.reset(OpMIPS64MOVVstorezero)
4159 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4160 v.Aux = symToAux(mergeSym(sym1, sym2))
4161 v.AddArg2(ptr, mem)
4162 return true
4163 }
4164 return false
4165 }
4166 func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
4167 v_1 := v.Args[1]
4168 v_0 := v.Args[0]
4169 b := v.Block
4170 config := b.Func.Config
4171 typ := &b.Func.Config.Types
4172
4173
4174 for {
4175 off := auxIntToInt32(v.AuxInt)
4176 sym := auxToSym(v.Aux)
4177 ptr := v_0
4178 if v_1.Op != OpMIPS64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
4179 break
4180 }
4181 val := v_1.Args[1]
4182 if ptr != v_1.Args[0] {
4183 break
4184 }
4185 v.reset(OpZeroExt32to64)
4186 v0 := b.NewValue0(v_1.Pos, OpMIPS64MOVWfpgp, typ.Float32)
4187 v0.AddArg(val)
4188 v.AddArg(v0)
4189 return true
4190 }
4191
4192
4193
4194 for {
4195 off1 := auxIntToInt32(v.AuxInt)
4196 sym := auxToSym(v.Aux)
4197 if v_0.Op != OpMIPS64ADDVconst {
4198 break
4199 }
4200 off2 := auxIntToInt64(v_0.AuxInt)
4201 ptr := v_0.Args[0]
4202 mem := v_1
4203 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4204 break
4205 }
4206 v.reset(OpMIPS64MOVWUload)
4207 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4208 v.Aux = symToAux(sym)
4209 v.AddArg2(ptr, mem)
4210 return true
4211 }
4212
4213
4214
4215 for {
4216 off1 := auxIntToInt32(v.AuxInt)
4217 sym1 := auxToSym(v.Aux)
4218 if v_0.Op != OpMIPS64MOVVaddr {
4219 break
4220 }
4221 off2 := auxIntToInt32(v_0.AuxInt)
4222 sym2 := auxToSym(v_0.Aux)
4223 ptr := v_0.Args[0]
4224 mem := v_1
4225 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4226 break
4227 }
4228 v.reset(OpMIPS64MOVWUload)
4229 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4230 v.Aux = symToAux(mergeSym(sym1, sym2))
4231 v.AddArg2(ptr, mem)
4232 return true
4233 }
4234
4235
4236
4237 for {
4238 off := auxIntToInt32(v.AuxInt)
4239 sym := auxToSym(v.Aux)
4240 if v_0.Op != OpSB || !(symIsRO(sym)) {
4241 break
4242 }
4243 v.reset(OpMIPS64MOVVconst)
4244 v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
4245 return true
4246 }
4247 return false
4248 }
4249 func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
4250 v_0 := v.Args[0]
4251
4252
4253 for {
4254 x := v_0
4255 if x.Op != OpMIPS64MOVBUload {
4256 break
4257 }
4258 v.reset(OpMIPS64MOVVreg)
4259 v.AddArg(x)
4260 return true
4261 }
4262
4263
4264 for {
4265 x := v_0
4266 if x.Op != OpMIPS64MOVHUload {
4267 break
4268 }
4269 v.reset(OpMIPS64MOVVreg)
4270 v.AddArg(x)
4271 return true
4272 }
4273
4274
4275 for {
4276 x := v_0
4277 if x.Op != OpMIPS64MOVWUload {
4278 break
4279 }
4280 v.reset(OpMIPS64MOVVreg)
4281 v.AddArg(x)
4282 return true
4283 }
4284
4285
4286 for {
4287 x := v_0
4288 if x.Op != OpMIPS64MOVBUreg {
4289 break
4290 }
4291 v.reset(OpMIPS64MOVVreg)
4292 v.AddArg(x)
4293 return true
4294 }
4295
4296
4297 for {
4298 x := v_0
4299 if x.Op != OpMIPS64MOVHUreg {
4300 break
4301 }
4302 v.reset(OpMIPS64MOVVreg)
4303 v.AddArg(x)
4304 return true
4305 }
4306
4307
4308 for {
4309 x := v_0
4310 if x.Op != OpMIPS64MOVWUreg {
4311 break
4312 }
4313 v.reset(OpMIPS64MOVVreg)
4314 v.AddArg(x)
4315 return true
4316 }
4317
4318
4319 for {
4320 if v_0.Op != OpMIPS64MOVVconst {
4321 break
4322 }
4323 c := auxIntToInt64(v_0.AuxInt)
4324 v.reset(OpMIPS64MOVVconst)
4325 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
4326 return true
4327 }
4328 return false
4329 }
4330 func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
4331 v_1 := v.Args[1]
4332 v_0 := v.Args[0]
4333 b := v.Block
4334 config := b.Func.Config
4335
4336
4337
4338 for {
4339 off1 := auxIntToInt32(v.AuxInt)
4340 sym := auxToSym(v.Aux)
4341 if v_0.Op != OpMIPS64ADDVconst {
4342 break
4343 }
4344 off2 := auxIntToInt64(v_0.AuxInt)
4345 ptr := v_0.Args[0]
4346 mem := v_1
4347 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4348 break
4349 }
4350 v.reset(OpMIPS64MOVWload)
4351 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4352 v.Aux = symToAux(sym)
4353 v.AddArg2(ptr, mem)
4354 return true
4355 }
4356
4357
4358
4359 for {
4360 off1 := auxIntToInt32(v.AuxInt)
4361 sym1 := auxToSym(v.Aux)
4362 if v_0.Op != OpMIPS64MOVVaddr {
4363 break
4364 }
4365 off2 := auxIntToInt32(v_0.AuxInt)
4366 sym2 := auxToSym(v_0.Aux)
4367 ptr := v_0.Args[0]
4368 mem := v_1
4369 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4370 break
4371 }
4372 v.reset(OpMIPS64MOVWload)
4373 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4374 v.Aux = symToAux(mergeSym(sym1, sym2))
4375 v.AddArg2(ptr, mem)
4376 return true
4377 }
4378
4379
4380
4381 for {
4382 off := auxIntToInt32(v.AuxInt)
4383 sym := auxToSym(v.Aux)
4384 if v_0.Op != OpSB || !(symIsRO(sym)) {
4385 break
4386 }
4387 v.reset(OpMIPS64MOVVconst)
4388 v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
4389 return true
4390 }
4391 return false
4392 }
4393 func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
4394 v_0 := v.Args[0]
4395
4396
4397 for {
4398 x := v_0
4399 if x.Op != OpMIPS64MOVBload {
4400 break
4401 }
4402 v.reset(OpMIPS64MOVVreg)
4403 v.AddArg(x)
4404 return true
4405 }
4406
4407
4408 for {
4409 x := v_0
4410 if x.Op != OpMIPS64MOVBUload {
4411 break
4412 }
4413 v.reset(OpMIPS64MOVVreg)
4414 v.AddArg(x)
4415 return true
4416 }
4417
4418
4419 for {
4420 x := v_0
4421 if x.Op != OpMIPS64MOVHload {
4422 break
4423 }
4424 v.reset(OpMIPS64MOVVreg)
4425 v.AddArg(x)
4426 return true
4427 }
4428
4429
4430 for {
4431 x := v_0
4432 if x.Op != OpMIPS64MOVHUload {
4433 break
4434 }
4435 v.reset(OpMIPS64MOVVreg)
4436 v.AddArg(x)
4437 return true
4438 }
4439
4440
4441 for {
4442 x := v_0
4443 if x.Op != OpMIPS64MOVWload {
4444 break
4445 }
4446 v.reset(OpMIPS64MOVVreg)
4447 v.AddArg(x)
4448 return true
4449 }
4450
4451
4452 for {
4453 x := v_0
4454 if x.Op != OpMIPS64MOVBreg {
4455 break
4456 }
4457 v.reset(OpMIPS64MOVVreg)
4458 v.AddArg(x)
4459 return true
4460 }
4461
4462
4463 for {
4464 x := v_0
4465 if x.Op != OpMIPS64MOVBUreg {
4466 break
4467 }
4468 v.reset(OpMIPS64MOVVreg)
4469 v.AddArg(x)
4470 return true
4471 }
4472
4473
4474 for {
4475 x := v_0
4476 if x.Op != OpMIPS64MOVHreg {
4477 break
4478 }
4479 v.reset(OpMIPS64MOVVreg)
4480 v.AddArg(x)
4481 return true
4482 }
4483
4484
4485 for {
4486 x := v_0
4487 if x.Op != OpMIPS64MOVWreg {
4488 break
4489 }
4490 v.reset(OpMIPS64MOVVreg)
4491 v.AddArg(x)
4492 return true
4493 }
4494
4495
4496 for {
4497 if v_0.Op != OpMIPS64MOVVconst {
4498 break
4499 }
4500 c := auxIntToInt64(v_0.AuxInt)
4501 v.reset(OpMIPS64MOVVconst)
4502 v.AuxInt = int64ToAuxInt(int64(int32(c)))
4503 return true
4504 }
4505 return false
4506 }
4507 func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
4508 v_2 := v.Args[2]
4509 v_1 := v.Args[1]
4510 v_0 := v.Args[0]
4511 b := v.Block
4512 config := b.Func.Config
4513
4514
4515 for {
4516 off := auxIntToInt32(v.AuxInt)
4517 sym := auxToSym(v.Aux)
4518 ptr := v_0
4519 if v_1.Op != OpMIPS64MOVWfpgp {
4520 break
4521 }
4522 val := v_1.Args[0]
4523 mem := v_2
4524 v.reset(OpMIPS64MOVFstore)
4525 v.AuxInt = int32ToAuxInt(off)
4526 v.Aux = symToAux(sym)
4527 v.AddArg3(ptr, val, mem)
4528 return true
4529 }
4530
4531
4532
4533 for {
4534 off1 := auxIntToInt32(v.AuxInt)
4535 sym := auxToSym(v.Aux)
4536 if v_0.Op != OpMIPS64ADDVconst {
4537 break
4538 }
4539 off2 := auxIntToInt64(v_0.AuxInt)
4540 ptr := v_0.Args[0]
4541 val := v_1
4542 mem := v_2
4543 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4544 break
4545 }
4546 v.reset(OpMIPS64MOVWstore)
4547 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4548 v.Aux = symToAux(sym)
4549 v.AddArg3(ptr, val, mem)
4550 return true
4551 }
4552
4553
4554
4555 for {
4556 off1 := auxIntToInt32(v.AuxInt)
4557 sym1 := auxToSym(v.Aux)
4558 if v_0.Op != OpMIPS64MOVVaddr {
4559 break
4560 }
4561 off2 := auxIntToInt32(v_0.AuxInt)
4562 sym2 := auxToSym(v_0.Aux)
4563 ptr := v_0.Args[0]
4564 val := v_1
4565 mem := v_2
4566 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4567 break
4568 }
4569 v.reset(OpMIPS64MOVWstore)
4570 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4571 v.Aux = symToAux(mergeSym(sym1, sym2))
4572 v.AddArg3(ptr, val, mem)
4573 return true
4574 }
4575
4576
4577 for {
4578 off := auxIntToInt32(v.AuxInt)
4579 sym := auxToSym(v.Aux)
4580 ptr := v_0
4581 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
4582 break
4583 }
4584 mem := v_2
4585 v.reset(OpMIPS64MOVWstorezero)
4586 v.AuxInt = int32ToAuxInt(off)
4587 v.Aux = symToAux(sym)
4588 v.AddArg2(ptr, mem)
4589 return true
4590 }
4591
4592
4593 for {
4594 off := auxIntToInt32(v.AuxInt)
4595 sym := auxToSym(v.Aux)
4596 ptr := v_0
4597 if v_1.Op != OpMIPS64MOVWreg {
4598 break
4599 }
4600 x := v_1.Args[0]
4601 mem := v_2
4602 v.reset(OpMIPS64MOVWstore)
4603 v.AuxInt = int32ToAuxInt(off)
4604 v.Aux = symToAux(sym)
4605 v.AddArg3(ptr, x, mem)
4606 return true
4607 }
4608
4609
4610 for {
4611 off := auxIntToInt32(v.AuxInt)
4612 sym := auxToSym(v.Aux)
4613 ptr := v_0
4614 if v_1.Op != OpMIPS64MOVWUreg {
4615 break
4616 }
4617 x := v_1.Args[0]
4618 mem := v_2
4619 v.reset(OpMIPS64MOVWstore)
4620 v.AuxInt = int32ToAuxInt(off)
4621 v.Aux = symToAux(sym)
4622 v.AddArg3(ptr, x, mem)
4623 return true
4624 }
4625 return false
4626 }
4627 func rewriteValueMIPS64_OpMIPS64MOVWstorezero(v *Value) bool {
4628 v_1 := v.Args[1]
4629 v_0 := v.Args[0]
4630 b := v.Block
4631 config := b.Func.Config
4632
4633
4634
4635 for {
4636 off1 := auxIntToInt32(v.AuxInt)
4637 sym := auxToSym(v.Aux)
4638 if v_0.Op != OpMIPS64ADDVconst {
4639 break
4640 }
4641 off2 := auxIntToInt64(v_0.AuxInt)
4642 ptr := v_0.Args[0]
4643 mem := v_1
4644 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4645 break
4646 }
4647 v.reset(OpMIPS64MOVWstorezero)
4648 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4649 v.Aux = symToAux(sym)
4650 v.AddArg2(ptr, mem)
4651 return true
4652 }
4653
4654
4655
4656 for {
4657 off1 := auxIntToInt32(v.AuxInt)
4658 sym1 := auxToSym(v.Aux)
4659 if v_0.Op != OpMIPS64MOVVaddr {
4660 break
4661 }
4662 off2 := auxIntToInt32(v_0.AuxInt)
4663 sym2 := auxToSym(v_0.Aux)
4664 ptr := v_0.Args[0]
4665 mem := v_1
4666 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4667 break
4668 }
4669 v.reset(OpMIPS64MOVWstorezero)
4670 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4671 v.Aux = symToAux(mergeSym(sym1, sym2))
4672 v.AddArg2(ptr, mem)
4673 return true
4674 }
4675 return false
4676 }
4677 func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
4678 v_0 := v.Args[0]
4679
4680
4681 for {
4682 if v_0.Op != OpMIPS64SUBV {
4683 break
4684 }
4685 y := v_0.Args[1]
4686 x := v_0.Args[0]
4687 v.reset(OpMIPS64SUBV)
4688 v.AddArg2(y, x)
4689 return true
4690 }
4691
4692
4693 for {
4694 if v_0.Op != OpMIPS64NEGV {
4695 break
4696 }
4697 x := v_0.Args[0]
4698 v.copyOf(x)
4699 return true
4700 }
4701
4702
4703 for {
4704 if v_0.Op != OpMIPS64MOVVconst {
4705 break
4706 }
4707 c := auxIntToInt64(v_0.AuxInt)
4708 v.reset(OpMIPS64MOVVconst)
4709 v.AuxInt = int64ToAuxInt(-c)
4710 return true
4711 }
4712 return false
4713 }
4714 func rewriteValueMIPS64_OpMIPS64NOR(v *Value) bool {
4715 v_1 := v.Args[1]
4716 v_0 := v.Args[0]
4717
4718
4719
4720 for {
4721 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4722 x := v_0
4723 if v_1.Op != OpMIPS64MOVVconst {
4724 continue
4725 }
4726 c := auxIntToInt64(v_1.AuxInt)
4727 if !(is32Bit(c)) {
4728 continue
4729 }
4730 v.reset(OpMIPS64NORconst)
4731 v.AuxInt = int64ToAuxInt(c)
4732 v.AddArg(x)
4733 return true
4734 }
4735 break
4736 }
4737 return false
4738 }
4739 func rewriteValueMIPS64_OpMIPS64NORconst(v *Value) bool {
4740 v_0 := v.Args[0]
4741
4742
4743 for {
4744 c := auxIntToInt64(v.AuxInt)
4745 if v_0.Op != OpMIPS64MOVVconst {
4746 break
4747 }
4748 d := auxIntToInt64(v_0.AuxInt)
4749 v.reset(OpMIPS64MOVVconst)
4750 v.AuxInt = int64ToAuxInt(^(c | d))
4751 return true
4752 }
4753 return false
4754 }
4755 func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
4756 v_1 := v.Args[1]
4757 v_0 := v.Args[0]
4758
4759
4760
4761 for {
4762 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4763 x := v_0
4764 if v_1.Op != OpMIPS64MOVVconst {
4765 continue
4766 }
4767 c := auxIntToInt64(v_1.AuxInt)
4768 if !(is32Bit(c)) {
4769 continue
4770 }
4771 v.reset(OpMIPS64ORconst)
4772 v.AuxInt = int64ToAuxInt(c)
4773 v.AddArg(x)
4774 return true
4775 }
4776 break
4777 }
4778
4779
4780 for {
4781 x := v_0
4782 if x != v_1 {
4783 break
4784 }
4785 v.copyOf(x)
4786 return true
4787 }
4788 return false
4789 }
4790 func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
4791 v_0 := v.Args[0]
4792
4793
4794 for {
4795 if auxIntToInt64(v.AuxInt) != 0 {
4796 break
4797 }
4798 x := v_0
4799 v.copyOf(x)
4800 return true
4801 }
4802
4803
4804 for {
4805 if auxIntToInt64(v.AuxInt) != -1 {
4806 break
4807 }
4808 v.reset(OpMIPS64MOVVconst)
4809 v.AuxInt = int64ToAuxInt(-1)
4810 return true
4811 }
4812
4813
4814 for {
4815 c := auxIntToInt64(v.AuxInt)
4816 if v_0.Op != OpMIPS64MOVVconst {
4817 break
4818 }
4819 d := auxIntToInt64(v_0.AuxInt)
4820 v.reset(OpMIPS64MOVVconst)
4821 v.AuxInt = int64ToAuxInt(c | d)
4822 return true
4823 }
4824
4825
4826
4827 for {
4828 c := auxIntToInt64(v.AuxInt)
4829 if v_0.Op != OpMIPS64ORconst {
4830 break
4831 }
4832 d := auxIntToInt64(v_0.AuxInt)
4833 x := v_0.Args[0]
4834 if !(is32Bit(c | d)) {
4835 break
4836 }
4837 v.reset(OpMIPS64ORconst)
4838 v.AuxInt = int64ToAuxInt(c | d)
4839 v.AddArg(x)
4840 return true
4841 }
4842 return false
4843 }
4844 func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
4845 v_1 := v.Args[1]
4846 v_0 := v.Args[0]
4847
4848
4849
4850 for {
4851 if v_0.Op != OpMIPS64MOVVconst {
4852 break
4853 }
4854 c := auxIntToInt64(v_0.AuxInt)
4855 x := v_1
4856 if !(is32Bit(c)) {
4857 break
4858 }
4859 v.reset(OpMIPS64SGTconst)
4860 v.AuxInt = int64ToAuxInt(c)
4861 v.AddArg(x)
4862 return true
4863 }
4864
4865
4866 for {
4867 x := v_0
4868 if x != v_1 {
4869 break
4870 }
4871 v.reset(OpMIPS64MOVVconst)
4872 v.AuxInt = int64ToAuxInt(0)
4873 return true
4874 }
4875 return false
4876 }
4877 func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
4878 v_1 := v.Args[1]
4879 v_0 := v.Args[0]
4880
4881
4882
4883 for {
4884 if v_0.Op != OpMIPS64MOVVconst {
4885 break
4886 }
4887 c := auxIntToInt64(v_0.AuxInt)
4888 x := v_1
4889 if !(is32Bit(c)) {
4890 break
4891 }
4892 v.reset(OpMIPS64SGTUconst)
4893 v.AuxInt = int64ToAuxInt(c)
4894 v.AddArg(x)
4895 return true
4896 }
4897
4898
4899 for {
4900 x := v_0
4901 if x != v_1 {
4902 break
4903 }
4904 v.reset(OpMIPS64MOVVconst)
4905 v.AuxInt = int64ToAuxInt(0)
4906 return true
4907 }
4908 return false
4909 }
4910 func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
4911 v_0 := v.Args[0]
4912
4913
4914
4915 for {
4916 c := auxIntToInt64(v.AuxInt)
4917 if v_0.Op != OpMIPS64MOVVconst {
4918 break
4919 }
4920 d := auxIntToInt64(v_0.AuxInt)
4921 if !(uint64(c) > uint64(d)) {
4922 break
4923 }
4924 v.reset(OpMIPS64MOVVconst)
4925 v.AuxInt = int64ToAuxInt(1)
4926 return true
4927 }
4928
4929
4930
4931 for {
4932 c := auxIntToInt64(v.AuxInt)
4933 if v_0.Op != OpMIPS64MOVVconst {
4934 break
4935 }
4936 d := auxIntToInt64(v_0.AuxInt)
4937 if !(uint64(c) <= uint64(d)) {
4938 break
4939 }
4940 v.reset(OpMIPS64MOVVconst)
4941 v.AuxInt = int64ToAuxInt(0)
4942 return true
4943 }
4944
4945
4946
4947 for {
4948 c := auxIntToInt64(v.AuxInt)
4949 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < uint64(c)) {
4950 break
4951 }
4952 v.reset(OpMIPS64MOVVconst)
4953 v.AuxInt = int64ToAuxInt(1)
4954 return true
4955 }
4956
4957
4958
4959 for {
4960 c := auxIntToInt64(v.AuxInt)
4961 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < uint64(c)) {
4962 break
4963 }
4964 v.reset(OpMIPS64MOVVconst)
4965 v.AuxInt = int64ToAuxInt(1)
4966 return true
4967 }
4968
4969
4970
4971 for {
4972 c := auxIntToInt64(v.AuxInt)
4973 if v_0.Op != OpMIPS64ANDconst {
4974 break
4975 }
4976 m := auxIntToInt64(v_0.AuxInt)
4977 if !(uint64(m) < uint64(c)) {
4978 break
4979 }
4980 v.reset(OpMIPS64MOVVconst)
4981 v.AuxInt = int64ToAuxInt(1)
4982 return true
4983 }
4984
4985
4986
4987 for {
4988 c := auxIntToInt64(v.AuxInt)
4989 if v_0.Op != OpMIPS64SRLVconst {
4990 break
4991 }
4992 d := auxIntToInt64(v_0.AuxInt)
4993 if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
4994 break
4995 }
4996 v.reset(OpMIPS64MOVVconst)
4997 v.AuxInt = int64ToAuxInt(1)
4998 return true
4999 }
5000 return false
5001 }
5002 func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
5003 v_0 := v.Args[0]
5004
5005
5006
5007 for {
5008 c := auxIntToInt64(v.AuxInt)
5009 if v_0.Op != OpMIPS64MOVVconst {
5010 break
5011 }
5012 d := auxIntToInt64(v_0.AuxInt)
5013 if !(c > d) {
5014 break
5015 }
5016 v.reset(OpMIPS64MOVVconst)
5017 v.AuxInt = int64ToAuxInt(1)
5018 return true
5019 }
5020
5021
5022
5023 for {
5024 c := auxIntToInt64(v.AuxInt)
5025 if v_0.Op != OpMIPS64MOVVconst {
5026 break
5027 }
5028 d := auxIntToInt64(v_0.AuxInt)
5029 if !(c <= d) {
5030 break
5031 }
5032 v.reset(OpMIPS64MOVVconst)
5033 v.AuxInt = int64ToAuxInt(0)
5034 return true
5035 }
5036
5037
5038
5039 for {
5040 c := auxIntToInt64(v.AuxInt)
5041 if v_0.Op != OpMIPS64MOVBreg || !(0x7f < c) {
5042 break
5043 }
5044 v.reset(OpMIPS64MOVVconst)
5045 v.AuxInt = int64ToAuxInt(1)
5046 return true
5047 }
5048
5049
5050
5051 for {
5052 c := auxIntToInt64(v.AuxInt)
5053 if v_0.Op != OpMIPS64MOVBreg || !(c <= -0x80) {
5054 break
5055 }
5056 v.reset(OpMIPS64MOVVconst)
5057 v.AuxInt = int64ToAuxInt(0)
5058 return true
5059 }
5060
5061
5062
5063 for {
5064 c := auxIntToInt64(v.AuxInt)
5065 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < c) {
5066 break
5067 }
5068 v.reset(OpMIPS64MOVVconst)
5069 v.AuxInt = int64ToAuxInt(1)
5070 return true
5071 }
5072
5073
5074
5075 for {
5076 c := auxIntToInt64(v.AuxInt)
5077 if v_0.Op != OpMIPS64MOVBUreg || !(c < 0) {
5078 break
5079 }
5080 v.reset(OpMIPS64MOVVconst)
5081 v.AuxInt = int64ToAuxInt(0)
5082 return true
5083 }
5084
5085
5086
5087 for {
5088 c := auxIntToInt64(v.AuxInt)
5089 if v_0.Op != OpMIPS64MOVHreg || !(0x7fff < c) {
5090 break
5091 }
5092 v.reset(OpMIPS64MOVVconst)
5093 v.AuxInt = int64ToAuxInt(1)
5094 return true
5095 }
5096
5097
5098
5099 for {
5100 c := auxIntToInt64(v.AuxInt)
5101 if v_0.Op != OpMIPS64MOVHreg || !(c <= -0x8000) {
5102 break
5103 }
5104 v.reset(OpMIPS64MOVVconst)
5105 v.AuxInt = int64ToAuxInt(0)
5106 return true
5107 }
5108
5109
5110
5111 for {
5112 c := auxIntToInt64(v.AuxInt)
5113 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < c) {
5114 break
5115 }
5116 v.reset(OpMIPS64MOVVconst)
5117 v.AuxInt = int64ToAuxInt(1)
5118 return true
5119 }
5120
5121
5122
5123 for {
5124 c := auxIntToInt64(v.AuxInt)
5125 if v_0.Op != OpMIPS64MOVHUreg || !(c < 0) {
5126 break
5127 }
5128 v.reset(OpMIPS64MOVVconst)
5129 v.AuxInt = int64ToAuxInt(0)
5130 return true
5131 }
5132
5133
5134
5135 for {
5136 c := auxIntToInt64(v.AuxInt)
5137 if v_0.Op != OpMIPS64MOVWUreg || !(c < 0) {
5138 break
5139 }
5140 v.reset(OpMIPS64MOVVconst)
5141 v.AuxInt = int64ToAuxInt(0)
5142 return true
5143 }
5144
5145
5146
5147 for {
5148 c := auxIntToInt64(v.AuxInt)
5149 if v_0.Op != OpMIPS64ANDconst {
5150 break
5151 }
5152 m := auxIntToInt64(v_0.AuxInt)
5153 if !(0 <= m && m < c) {
5154 break
5155 }
5156 v.reset(OpMIPS64MOVVconst)
5157 v.AuxInt = int64ToAuxInt(1)
5158 return true
5159 }
5160
5161
5162
5163 for {
5164 c := auxIntToInt64(v.AuxInt)
5165 if v_0.Op != OpMIPS64SRLVconst {
5166 break
5167 }
5168 d := auxIntToInt64(v_0.AuxInt)
5169 if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
5170 break
5171 }
5172 v.reset(OpMIPS64MOVVconst)
5173 v.AuxInt = int64ToAuxInt(1)
5174 return true
5175 }
5176 return false
5177 }
5178 func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
5179 v_1 := v.Args[1]
5180 v_0 := v.Args[0]
5181
5182
5183
5184 for {
5185 if v_1.Op != OpMIPS64MOVVconst {
5186 break
5187 }
5188 c := auxIntToInt64(v_1.AuxInt)
5189 if !(uint64(c) >= 64) {
5190 break
5191 }
5192 v.reset(OpMIPS64MOVVconst)
5193 v.AuxInt = int64ToAuxInt(0)
5194 return true
5195 }
5196
5197
5198 for {
5199 x := v_0
5200 if v_1.Op != OpMIPS64MOVVconst {
5201 break
5202 }
5203 c := auxIntToInt64(v_1.AuxInt)
5204 v.reset(OpMIPS64SLLVconst)
5205 v.AuxInt = int64ToAuxInt(c)
5206 v.AddArg(x)
5207 return true
5208 }
5209 return false
5210 }
5211 func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
5212 v_0 := v.Args[0]
5213
5214
5215 for {
5216 c := auxIntToInt64(v.AuxInt)
5217 if v_0.Op != OpMIPS64MOVVconst {
5218 break
5219 }
5220 d := auxIntToInt64(v_0.AuxInt)
5221 v.reset(OpMIPS64MOVVconst)
5222 v.AuxInt = int64ToAuxInt(d << uint64(c))
5223 return true
5224 }
5225 return false
5226 }
5227 func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
5228 v_1 := v.Args[1]
5229 v_0 := v.Args[0]
5230
5231
5232
5233 for {
5234 x := v_0
5235 if v_1.Op != OpMIPS64MOVVconst {
5236 break
5237 }
5238 c := auxIntToInt64(v_1.AuxInt)
5239 if !(uint64(c) >= 64) {
5240 break
5241 }
5242 v.reset(OpMIPS64SRAVconst)
5243 v.AuxInt = int64ToAuxInt(63)
5244 v.AddArg(x)
5245 return true
5246 }
5247
5248
5249 for {
5250 x := v_0
5251 if v_1.Op != OpMIPS64MOVVconst {
5252 break
5253 }
5254 c := auxIntToInt64(v_1.AuxInt)
5255 v.reset(OpMIPS64SRAVconst)
5256 v.AuxInt = int64ToAuxInt(c)
5257 v.AddArg(x)
5258 return true
5259 }
5260 return false
5261 }
5262 func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
5263 v_0 := v.Args[0]
5264
5265
5266 for {
5267 c := auxIntToInt64(v.AuxInt)
5268 if v_0.Op != OpMIPS64MOVVconst {
5269 break
5270 }
5271 d := auxIntToInt64(v_0.AuxInt)
5272 v.reset(OpMIPS64MOVVconst)
5273 v.AuxInt = int64ToAuxInt(d >> uint64(c))
5274 return true
5275 }
5276 return false
5277 }
5278 func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
5279 v_1 := v.Args[1]
5280 v_0 := v.Args[0]
5281
5282
5283
5284 for {
5285 if v_1.Op != OpMIPS64MOVVconst {
5286 break
5287 }
5288 c := auxIntToInt64(v_1.AuxInt)
5289 if !(uint64(c) >= 64) {
5290 break
5291 }
5292 v.reset(OpMIPS64MOVVconst)
5293 v.AuxInt = int64ToAuxInt(0)
5294 return true
5295 }
5296
5297
5298 for {
5299 x := v_0
5300 if v_1.Op != OpMIPS64MOVVconst {
5301 break
5302 }
5303 c := auxIntToInt64(v_1.AuxInt)
5304 v.reset(OpMIPS64SRLVconst)
5305 v.AuxInt = int64ToAuxInt(c)
5306 v.AddArg(x)
5307 return true
5308 }
5309 return false
5310 }
5311 func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
5312 v_0 := v.Args[0]
5313
5314
5315 for {
5316 c := auxIntToInt64(v.AuxInt)
5317 if v_0.Op != OpMIPS64MOVVconst {
5318 break
5319 }
5320 d := auxIntToInt64(v_0.AuxInt)
5321 v.reset(OpMIPS64MOVVconst)
5322 v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
5323 return true
5324 }
5325 return false
5326 }
5327 func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
5328 v_1 := v.Args[1]
5329 v_0 := v.Args[0]
5330
5331
5332
5333 for {
5334 x := v_0
5335 if v_1.Op != OpMIPS64MOVVconst {
5336 break
5337 }
5338 c := auxIntToInt64(v_1.AuxInt)
5339 if !(is32Bit(c)) {
5340 break
5341 }
5342 v.reset(OpMIPS64SUBVconst)
5343 v.AuxInt = int64ToAuxInt(c)
5344 v.AddArg(x)
5345 return true
5346 }
5347
5348
5349 for {
5350 x := v_0
5351 if v_1.Op != OpMIPS64NEGV {
5352 break
5353 }
5354 y := v_1.Args[0]
5355 v.reset(OpMIPS64ADDV)
5356 v.AddArg2(x, y)
5357 return true
5358 }
5359
5360
5361 for {
5362 x := v_0
5363 if x != v_1 {
5364 break
5365 }
5366 v.reset(OpMIPS64MOVVconst)
5367 v.AuxInt = int64ToAuxInt(0)
5368 return true
5369 }
5370
5371
5372 for {
5373 if v_0.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
5374 break
5375 }
5376 x := v_1
5377 v.reset(OpMIPS64NEGV)
5378 v.AddArg(x)
5379 return true
5380 }
5381 return false
5382 }
5383 func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
5384 v_0 := v.Args[0]
5385
5386
5387 for {
5388 if auxIntToInt64(v.AuxInt) != 0 {
5389 break
5390 }
5391 x := v_0
5392 v.copyOf(x)
5393 return true
5394 }
5395
5396
5397 for {
5398 c := auxIntToInt64(v.AuxInt)
5399 if v_0.Op != OpMIPS64MOVVconst {
5400 break
5401 }
5402 d := auxIntToInt64(v_0.AuxInt)
5403 v.reset(OpMIPS64MOVVconst)
5404 v.AuxInt = int64ToAuxInt(d - c)
5405 return true
5406 }
5407
5408
5409
5410 for {
5411 c := auxIntToInt64(v.AuxInt)
5412 if v_0.Op != OpMIPS64SUBVconst {
5413 break
5414 }
5415 d := auxIntToInt64(v_0.AuxInt)
5416 x := v_0.Args[0]
5417 if !(is32Bit(-c - d)) {
5418 break
5419 }
5420 v.reset(OpMIPS64ADDVconst)
5421 v.AuxInt = int64ToAuxInt(-c - d)
5422 v.AddArg(x)
5423 return true
5424 }
5425
5426
5427
5428 for {
5429 c := auxIntToInt64(v.AuxInt)
5430 if v_0.Op != OpMIPS64ADDVconst {
5431 break
5432 }
5433 d := auxIntToInt64(v_0.AuxInt)
5434 x := v_0.Args[0]
5435 if !(is32Bit(-c + d)) {
5436 break
5437 }
5438 v.reset(OpMIPS64ADDVconst)
5439 v.AuxInt = int64ToAuxInt(-c + d)
5440 v.AddArg(x)
5441 return true
5442 }
5443 return false
5444 }
5445 func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
5446 v_1 := v.Args[1]
5447 v_0 := v.Args[0]
5448
5449
5450
5451 for {
5452 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5453 x := v_0
5454 if v_1.Op != OpMIPS64MOVVconst {
5455 continue
5456 }
5457 c := auxIntToInt64(v_1.AuxInt)
5458 if !(is32Bit(c)) {
5459 continue
5460 }
5461 v.reset(OpMIPS64XORconst)
5462 v.AuxInt = int64ToAuxInt(c)
5463 v.AddArg(x)
5464 return true
5465 }
5466 break
5467 }
5468
5469
5470 for {
5471 x := v_0
5472 if x != v_1 {
5473 break
5474 }
5475 v.reset(OpMIPS64MOVVconst)
5476 v.AuxInt = int64ToAuxInt(0)
5477 return true
5478 }
5479 return false
5480 }
5481 func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
5482 v_0 := v.Args[0]
5483
5484
5485 for {
5486 if auxIntToInt64(v.AuxInt) != 0 {
5487 break
5488 }
5489 x := v_0
5490 v.copyOf(x)
5491 return true
5492 }
5493
5494
5495 for {
5496 if auxIntToInt64(v.AuxInt) != -1 {
5497 break
5498 }
5499 x := v_0
5500 v.reset(OpMIPS64NORconst)
5501 v.AuxInt = int64ToAuxInt(0)
5502 v.AddArg(x)
5503 return true
5504 }
5505
5506
5507 for {
5508 c := auxIntToInt64(v.AuxInt)
5509 if v_0.Op != OpMIPS64MOVVconst {
5510 break
5511 }
5512 d := auxIntToInt64(v_0.AuxInt)
5513 v.reset(OpMIPS64MOVVconst)
5514 v.AuxInt = int64ToAuxInt(c ^ d)
5515 return true
5516 }
5517
5518
5519
5520 for {
5521 c := auxIntToInt64(v.AuxInt)
5522 if v_0.Op != OpMIPS64XORconst {
5523 break
5524 }
5525 d := auxIntToInt64(v_0.AuxInt)
5526 x := v_0.Args[0]
5527 if !(is32Bit(c ^ d)) {
5528 break
5529 }
5530 v.reset(OpMIPS64XORconst)
5531 v.AuxInt = int64ToAuxInt(c ^ d)
5532 v.AddArg(x)
5533 return true
5534 }
5535 return false
5536 }
5537 func rewriteValueMIPS64_OpMod16(v *Value) bool {
5538 v_1 := v.Args[1]
5539 v_0 := v.Args[0]
5540 b := v.Block
5541 typ := &b.Func.Config.Types
5542
5543
5544 for {
5545 x := v_0
5546 y := v_1
5547 v.reset(OpSelect0)
5548 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5549 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5550 v1.AddArg(x)
5551 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5552 v2.AddArg(y)
5553 v0.AddArg2(v1, v2)
5554 v.AddArg(v0)
5555 return true
5556 }
5557 }
5558 func rewriteValueMIPS64_OpMod16u(v *Value) bool {
5559 v_1 := v.Args[1]
5560 v_0 := v.Args[0]
5561 b := v.Block
5562 typ := &b.Func.Config.Types
5563
5564
5565 for {
5566 x := v_0
5567 y := v_1
5568 v.reset(OpSelect0)
5569 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5570 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5571 v1.AddArg(x)
5572 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5573 v2.AddArg(y)
5574 v0.AddArg2(v1, v2)
5575 v.AddArg(v0)
5576 return true
5577 }
5578 }
5579 func rewriteValueMIPS64_OpMod32(v *Value) bool {
5580 v_1 := v.Args[1]
5581 v_0 := v.Args[0]
5582 b := v.Block
5583 typ := &b.Func.Config.Types
5584
5585
5586 for {
5587 x := v_0
5588 y := v_1
5589 v.reset(OpSelect0)
5590 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5591 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5592 v1.AddArg(x)
5593 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5594 v2.AddArg(y)
5595 v0.AddArg2(v1, v2)
5596 v.AddArg(v0)
5597 return true
5598 }
5599 }
5600 func rewriteValueMIPS64_OpMod32u(v *Value) bool {
5601 v_1 := v.Args[1]
5602 v_0 := v.Args[0]
5603 b := v.Block
5604 typ := &b.Func.Config.Types
5605
5606
5607 for {
5608 x := v_0
5609 y := v_1
5610 v.reset(OpSelect0)
5611 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5612 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5613 v1.AddArg(x)
5614 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5615 v2.AddArg(y)
5616 v0.AddArg2(v1, v2)
5617 v.AddArg(v0)
5618 return true
5619 }
5620 }
5621 func rewriteValueMIPS64_OpMod64(v *Value) bool {
5622 v_1 := v.Args[1]
5623 v_0 := v.Args[0]
5624 b := v.Block
5625 typ := &b.Func.Config.Types
5626
5627
5628 for {
5629 x := v_0
5630 y := v_1
5631 v.reset(OpSelect0)
5632 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5633 v0.AddArg2(x, y)
5634 v.AddArg(v0)
5635 return true
5636 }
5637 }
5638 func rewriteValueMIPS64_OpMod64u(v *Value) bool {
5639 v_1 := v.Args[1]
5640 v_0 := v.Args[0]
5641 b := v.Block
5642 typ := &b.Func.Config.Types
5643
5644
5645 for {
5646 x := v_0
5647 y := v_1
5648 v.reset(OpSelect0)
5649 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5650 v0.AddArg2(x, y)
5651 v.AddArg(v0)
5652 return true
5653 }
5654 }
5655 func rewriteValueMIPS64_OpMod8(v *Value) bool {
5656 v_1 := v.Args[1]
5657 v_0 := v.Args[0]
5658 b := v.Block
5659 typ := &b.Func.Config.Types
5660
5661
5662 for {
5663 x := v_0
5664 y := v_1
5665 v.reset(OpSelect0)
5666 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5667 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5668 v1.AddArg(x)
5669 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5670 v2.AddArg(y)
5671 v0.AddArg2(v1, v2)
5672 v.AddArg(v0)
5673 return true
5674 }
5675 }
5676 func rewriteValueMIPS64_OpMod8u(v *Value) bool {
5677 v_1 := v.Args[1]
5678 v_0 := v.Args[0]
5679 b := v.Block
5680 typ := &b.Func.Config.Types
5681
5682
5683 for {
5684 x := v_0
5685 y := v_1
5686 v.reset(OpSelect0)
5687 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5688 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5689 v1.AddArg(x)
5690 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5691 v2.AddArg(y)
5692 v0.AddArg2(v1, v2)
5693 v.AddArg(v0)
5694 return true
5695 }
5696 }
5697 func rewriteValueMIPS64_OpMove(v *Value) bool {
5698 v_2 := v.Args[2]
5699 v_1 := v.Args[1]
5700 v_0 := v.Args[0]
5701 b := v.Block
5702 config := b.Func.Config
5703 typ := &b.Func.Config.Types
5704
5705
5706 for {
5707 if auxIntToInt64(v.AuxInt) != 0 {
5708 break
5709 }
5710 mem := v_2
5711 v.copyOf(mem)
5712 return true
5713 }
5714
5715
5716 for {
5717 if auxIntToInt64(v.AuxInt) != 1 {
5718 break
5719 }
5720 dst := v_0
5721 src := v_1
5722 mem := v_2
5723 v.reset(OpMIPS64MOVBstore)
5724 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5725 v0.AddArg2(src, mem)
5726 v.AddArg3(dst, v0, mem)
5727 return true
5728 }
5729
5730
5731
5732 for {
5733 if auxIntToInt64(v.AuxInt) != 2 {
5734 break
5735 }
5736 t := auxToType(v.Aux)
5737 dst := v_0
5738 src := v_1
5739 mem := v_2
5740 if !(t.Alignment()%2 == 0) {
5741 break
5742 }
5743 v.reset(OpMIPS64MOVHstore)
5744 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5745 v0.AddArg2(src, mem)
5746 v.AddArg3(dst, v0, mem)
5747 return true
5748 }
5749
5750
5751 for {
5752 if auxIntToInt64(v.AuxInt) != 2 {
5753 break
5754 }
5755 dst := v_0
5756 src := v_1
5757 mem := v_2
5758 v.reset(OpMIPS64MOVBstore)
5759 v.AuxInt = int32ToAuxInt(1)
5760 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5761 v0.AuxInt = int32ToAuxInt(1)
5762 v0.AddArg2(src, mem)
5763 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5764 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5765 v2.AddArg2(src, mem)
5766 v1.AddArg3(dst, v2, mem)
5767 v.AddArg3(dst, v0, v1)
5768 return true
5769 }
5770
5771
5772
5773 for {
5774 if auxIntToInt64(v.AuxInt) != 4 {
5775 break
5776 }
5777 t := auxToType(v.Aux)
5778 dst := v_0
5779 src := v_1
5780 mem := v_2
5781 if !(t.Alignment()%4 == 0) {
5782 break
5783 }
5784 v.reset(OpMIPS64MOVWstore)
5785 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5786 v0.AddArg2(src, mem)
5787 v.AddArg3(dst, v0, mem)
5788 return true
5789 }
5790
5791
5792
5793 for {
5794 if auxIntToInt64(v.AuxInt) != 4 {
5795 break
5796 }
5797 t := auxToType(v.Aux)
5798 dst := v_0
5799 src := v_1
5800 mem := v_2
5801 if !(t.Alignment()%2 == 0) {
5802 break
5803 }
5804 v.reset(OpMIPS64MOVHstore)
5805 v.AuxInt = int32ToAuxInt(2)
5806 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5807 v0.AuxInt = int32ToAuxInt(2)
5808 v0.AddArg2(src, mem)
5809 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5810 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5811 v2.AddArg2(src, mem)
5812 v1.AddArg3(dst, v2, mem)
5813 v.AddArg3(dst, v0, v1)
5814 return true
5815 }
5816
5817
5818 for {
5819 if auxIntToInt64(v.AuxInt) != 4 {
5820 break
5821 }
5822 dst := v_0
5823 src := v_1
5824 mem := v_2
5825 v.reset(OpMIPS64MOVBstore)
5826 v.AuxInt = int32ToAuxInt(3)
5827 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5828 v0.AuxInt = int32ToAuxInt(3)
5829 v0.AddArg2(src, mem)
5830 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5831 v1.AuxInt = int32ToAuxInt(2)
5832 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5833 v2.AuxInt = int32ToAuxInt(2)
5834 v2.AddArg2(src, mem)
5835 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5836 v3.AuxInt = int32ToAuxInt(1)
5837 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5838 v4.AuxInt = int32ToAuxInt(1)
5839 v4.AddArg2(src, mem)
5840 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5841 v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5842 v6.AddArg2(src, mem)
5843 v5.AddArg3(dst, v6, mem)
5844 v3.AddArg3(dst, v4, v5)
5845 v1.AddArg3(dst, v2, v3)
5846 v.AddArg3(dst, v0, v1)
5847 return true
5848 }
5849
5850
5851
5852 for {
5853 if auxIntToInt64(v.AuxInt) != 8 {
5854 break
5855 }
5856 t := auxToType(v.Aux)
5857 dst := v_0
5858 src := v_1
5859 mem := v_2
5860 if !(t.Alignment()%8 == 0) {
5861 break
5862 }
5863 v.reset(OpMIPS64MOVVstore)
5864 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5865 v0.AddArg2(src, mem)
5866 v.AddArg3(dst, v0, mem)
5867 return true
5868 }
5869
5870
5871
5872 for {
5873 if auxIntToInt64(v.AuxInt) != 8 {
5874 break
5875 }
5876 t := auxToType(v.Aux)
5877 dst := v_0
5878 src := v_1
5879 mem := v_2
5880 if !(t.Alignment()%4 == 0) {
5881 break
5882 }
5883 v.reset(OpMIPS64MOVWstore)
5884 v.AuxInt = int32ToAuxInt(4)
5885 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5886 v0.AuxInt = int32ToAuxInt(4)
5887 v0.AddArg2(src, mem)
5888 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5889 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5890 v2.AddArg2(src, mem)
5891 v1.AddArg3(dst, v2, mem)
5892 v.AddArg3(dst, v0, v1)
5893 return true
5894 }
5895
5896
5897
5898 for {
5899 if auxIntToInt64(v.AuxInt) != 8 {
5900 break
5901 }
5902 t := auxToType(v.Aux)
5903 dst := v_0
5904 src := v_1
5905 mem := v_2
5906 if !(t.Alignment()%2 == 0) {
5907 break
5908 }
5909 v.reset(OpMIPS64MOVHstore)
5910 v.AuxInt = int32ToAuxInt(6)
5911 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5912 v0.AuxInt = int32ToAuxInt(6)
5913 v0.AddArg2(src, mem)
5914 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5915 v1.AuxInt = int32ToAuxInt(4)
5916 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5917 v2.AuxInt = int32ToAuxInt(4)
5918 v2.AddArg2(src, mem)
5919 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5920 v3.AuxInt = int32ToAuxInt(2)
5921 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5922 v4.AuxInt = int32ToAuxInt(2)
5923 v4.AddArg2(src, mem)
5924 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5925 v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5926 v6.AddArg2(src, mem)
5927 v5.AddArg3(dst, v6, mem)
5928 v3.AddArg3(dst, v4, v5)
5929 v1.AddArg3(dst, v2, v3)
5930 v.AddArg3(dst, v0, v1)
5931 return true
5932 }
5933
5934
5935 for {
5936 if auxIntToInt64(v.AuxInt) != 3 {
5937 break
5938 }
5939 dst := v_0
5940 src := v_1
5941 mem := v_2
5942 v.reset(OpMIPS64MOVBstore)
5943 v.AuxInt = int32ToAuxInt(2)
5944 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5945 v0.AuxInt = int32ToAuxInt(2)
5946 v0.AddArg2(src, mem)
5947 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5948 v1.AuxInt = int32ToAuxInt(1)
5949 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5950 v2.AuxInt = int32ToAuxInt(1)
5951 v2.AddArg2(src, mem)
5952 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5953 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5954 v4.AddArg2(src, mem)
5955 v3.AddArg3(dst, v4, mem)
5956 v1.AddArg3(dst, v2, v3)
5957 v.AddArg3(dst, v0, v1)
5958 return true
5959 }
5960
5961
5962
5963 for {
5964 if auxIntToInt64(v.AuxInt) != 6 {
5965 break
5966 }
5967 t := auxToType(v.Aux)
5968 dst := v_0
5969 src := v_1
5970 mem := v_2
5971 if !(t.Alignment()%2 == 0) {
5972 break
5973 }
5974 v.reset(OpMIPS64MOVHstore)
5975 v.AuxInt = int32ToAuxInt(4)
5976 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5977 v0.AuxInt = int32ToAuxInt(4)
5978 v0.AddArg2(src, mem)
5979 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5980 v1.AuxInt = int32ToAuxInt(2)
5981 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5982 v2.AuxInt = int32ToAuxInt(2)
5983 v2.AddArg2(src, mem)
5984 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5985 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5986 v4.AddArg2(src, mem)
5987 v3.AddArg3(dst, v4, mem)
5988 v1.AddArg3(dst, v2, v3)
5989 v.AddArg3(dst, v0, v1)
5990 return true
5991 }
5992
5993
5994
5995 for {
5996 if auxIntToInt64(v.AuxInt) != 12 {
5997 break
5998 }
5999 t := auxToType(v.Aux)
6000 dst := v_0
6001 src := v_1
6002 mem := v_2
6003 if !(t.Alignment()%4 == 0) {
6004 break
6005 }
6006 v.reset(OpMIPS64MOVWstore)
6007 v.AuxInt = int32ToAuxInt(8)
6008 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
6009 v0.AuxInt = int32ToAuxInt(8)
6010 v0.AddArg2(src, mem)
6011 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
6012 v1.AuxInt = int32ToAuxInt(4)
6013 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
6014 v2.AuxInt = int32ToAuxInt(4)
6015 v2.AddArg2(src, mem)
6016 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
6017 v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
6018 v4.AddArg2(src, mem)
6019 v3.AddArg3(dst, v4, mem)
6020 v1.AddArg3(dst, v2, v3)
6021 v.AddArg3(dst, v0, v1)
6022 return true
6023 }
6024
6025
6026
6027 for {
6028 if auxIntToInt64(v.AuxInt) != 16 {
6029 break
6030 }
6031 t := auxToType(v.Aux)
6032 dst := v_0
6033 src := v_1
6034 mem := v_2
6035 if !(t.Alignment()%8 == 0) {
6036 break
6037 }
6038 v.reset(OpMIPS64MOVVstore)
6039 v.AuxInt = int32ToAuxInt(8)
6040 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6041 v0.AuxInt = int32ToAuxInt(8)
6042 v0.AddArg2(src, mem)
6043 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
6044 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6045 v2.AddArg2(src, mem)
6046 v1.AddArg3(dst, v2, mem)
6047 v.AddArg3(dst, v0, v1)
6048 return true
6049 }
6050
6051
6052
6053 for {
6054 if auxIntToInt64(v.AuxInt) != 24 {
6055 break
6056 }
6057 t := auxToType(v.Aux)
6058 dst := v_0
6059 src := v_1
6060 mem := v_2
6061 if !(t.Alignment()%8 == 0) {
6062 break
6063 }
6064 v.reset(OpMIPS64MOVVstore)
6065 v.AuxInt = int32ToAuxInt(16)
6066 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6067 v0.AuxInt = int32ToAuxInt(16)
6068 v0.AddArg2(src, mem)
6069 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
6070 v1.AuxInt = int32ToAuxInt(8)
6071 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6072 v2.AuxInt = int32ToAuxInt(8)
6073 v2.AddArg2(src, mem)
6074 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
6075 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6076 v4.AddArg2(src, mem)
6077 v3.AddArg3(dst, v4, mem)
6078 v1.AddArg3(dst, v2, v3)
6079 v.AddArg3(dst, v0, v1)
6080 return true
6081 }
6082
6083
6084
6085 for {
6086 s := auxIntToInt64(v.AuxInt)
6087 t := auxToType(v.Aux)
6088 dst := v_0
6089 src := v_1
6090 mem := v_2
6091 if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)) {
6092 break
6093 }
6094 v.reset(OpMIPS64DUFFCOPY)
6095 v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
6096 v.AddArg3(dst, src, mem)
6097 return true
6098 }
6099
6100
6101
6102 for {
6103 s := auxIntToInt64(v.AuxInt)
6104 t := auxToType(v.Aux)
6105 dst := v_0
6106 src := v_1
6107 mem := v_2
6108 if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
6109 break
6110 }
6111 v.reset(OpMIPS64LoweredMove)
6112 v.AuxInt = int64ToAuxInt(t.Alignment())
6113 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
6114 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
6115 v0.AddArg(src)
6116 v.AddArg4(dst, src, v0, mem)
6117 return true
6118 }
6119 return false
6120 }
6121 func rewriteValueMIPS64_OpMul16(v *Value) bool {
6122 v_1 := v.Args[1]
6123 v_0 := v.Args[0]
6124 b := v.Block
6125 typ := &b.Func.Config.Types
6126
6127
6128 for {
6129 x := v_0
6130 y := v_1
6131 v.reset(OpSelect1)
6132 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6133 v0.AddArg2(x, y)
6134 v.AddArg(v0)
6135 return true
6136 }
6137 }
6138 func rewriteValueMIPS64_OpMul32(v *Value) bool {
6139 v_1 := v.Args[1]
6140 v_0 := v.Args[0]
6141 b := v.Block
6142 typ := &b.Func.Config.Types
6143
6144
6145 for {
6146 x := v_0
6147 y := v_1
6148 v.reset(OpSelect1)
6149 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6150 v0.AddArg2(x, y)
6151 v.AddArg(v0)
6152 return true
6153 }
6154 }
6155 func rewriteValueMIPS64_OpMul64(v *Value) bool {
6156 v_1 := v.Args[1]
6157 v_0 := v.Args[0]
6158 b := v.Block
6159 typ := &b.Func.Config.Types
6160
6161
6162 for {
6163 x := v_0
6164 y := v_1
6165 v.reset(OpSelect1)
6166 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6167 v0.AddArg2(x, y)
6168 v.AddArg(v0)
6169 return true
6170 }
6171 }
6172 func rewriteValueMIPS64_OpMul8(v *Value) bool {
6173 v_1 := v.Args[1]
6174 v_0 := v.Args[0]
6175 b := v.Block
6176 typ := &b.Func.Config.Types
6177
6178
6179 for {
6180 x := v_0
6181 y := v_1
6182 v.reset(OpSelect1)
6183 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6184 v0.AddArg2(x, y)
6185 v.AddArg(v0)
6186 return true
6187 }
6188 }
6189 func rewriteValueMIPS64_OpNeq16(v *Value) bool {
6190 v_1 := v.Args[1]
6191 v_0 := v.Args[0]
6192 b := v.Block
6193 typ := &b.Func.Config.Types
6194
6195
6196 for {
6197 x := v_0
6198 y := v_1
6199 v.reset(OpMIPS64SGTU)
6200 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6201 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6202 v1.AddArg(x)
6203 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6204 v2.AddArg(y)
6205 v0.AddArg2(v1, v2)
6206 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6207 v3.AuxInt = int64ToAuxInt(0)
6208 v.AddArg2(v0, v3)
6209 return true
6210 }
6211 }
6212 func rewriteValueMIPS64_OpNeq32(v *Value) bool {
6213 v_1 := v.Args[1]
6214 v_0 := v.Args[0]
6215 b := v.Block
6216 typ := &b.Func.Config.Types
6217
6218
6219 for {
6220 x := v_0
6221 y := v_1
6222 v.reset(OpMIPS64SGTU)
6223 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6224 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6225 v1.AddArg(x)
6226 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6227 v2.AddArg(y)
6228 v0.AddArg2(v1, v2)
6229 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6230 v3.AuxInt = int64ToAuxInt(0)
6231 v.AddArg2(v0, v3)
6232 return true
6233 }
6234 }
6235 func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
6236 v_1 := v.Args[1]
6237 v_0 := v.Args[0]
6238 b := v.Block
6239
6240
6241 for {
6242 x := v_0
6243 y := v_1
6244 v.reset(OpMIPS64FPFlagFalse)
6245 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
6246 v0.AddArg2(x, y)
6247 v.AddArg(v0)
6248 return true
6249 }
6250 }
6251 func rewriteValueMIPS64_OpNeq64(v *Value) bool {
6252 v_1 := v.Args[1]
6253 v_0 := v.Args[0]
6254 b := v.Block
6255 typ := &b.Func.Config.Types
6256
6257
6258 for {
6259 x := v_0
6260 y := v_1
6261 v.reset(OpMIPS64SGTU)
6262 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6263 v0.AddArg2(x, y)
6264 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6265 v1.AuxInt = int64ToAuxInt(0)
6266 v.AddArg2(v0, v1)
6267 return true
6268 }
6269 }
6270 func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
6271 v_1 := v.Args[1]
6272 v_0 := v.Args[0]
6273 b := v.Block
6274
6275
6276 for {
6277 x := v_0
6278 y := v_1
6279 v.reset(OpMIPS64FPFlagFalse)
6280 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
6281 v0.AddArg2(x, y)
6282 v.AddArg(v0)
6283 return true
6284 }
6285 }
6286 func rewriteValueMIPS64_OpNeq8(v *Value) bool {
6287 v_1 := v.Args[1]
6288 v_0 := v.Args[0]
6289 b := v.Block
6290 typ := &b.Func.Config.Types
6291
6292
6293 for {
6294 x := v_0
6295 y := v_1
6296 v.reset(OpMIPS64SGTU)
6297 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6298 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6299 v1.AddArg(x)
6300 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6301 v2.AddArg(y)
6302 v0.AddArg2(v1, v2)
6303 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6304 v3.AuxInt = int64ToAuxInt(0)
6305 v.AddArg2(v0, v3)
6306 return true
6307 }
6308 }
6309 func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
6310 v_1 := v.Args[1]
6311 v_0 := v.Args[0]
6312 b := v.Block
6313 typ := &b.Func.Config.Types
6314
6315
6316 for {
6317 x := v_0
6318 y := v_1
6319 v.reset(OpMIPS64SGTU)
6320 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6321 v0.AddArg2(x, y)
6322 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6323 v1.AuxInt = int64ToAuxInt(0)
6324 v.AddArg2(v0, v1)
6325 return true
6326 }
6327 }
6328 func rewriteValueMIPS64_OpNot(v *Value) bool {
6329 v_0 := v.Args[0]
6330
6331
6332 for {
6333 x := v_0
6334 v.reset(OpMIPS64XORconst)
6335 v.AuxInt = int64ToAuxInt(1)
6336 v.AddArg(x)
6337 return true
6338 }
6339 }
6340 func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
6341 v_0 := v.Args[0]
6342
6343
6344
6345 for {
6346 off := auxIntToInt64(v.AuxInt)
6347 ptr := v_0
6348 if ptr.Op != OpSP || !(is32Bit(off)) {
6349 break
6350 }
6351 v.reset(OpMIPS64MOVVaddr)
6352 v.AuxInt = int32ToAuxInt(int32(off))
6353 v.AddArg(ptr)
6354 return true
6355 }
6356
6357
6358 for {
6359 off := auxIntToInt64(v.AuxInt)
6360 ptr := v_0
6361 v.reset(OpMIPS64ADDVconst)
6362 v.AuxInt = int64ToAuxInt(off)
6363 v.AddArg(ptr)
6364 return true
6365 }
6366 }
6367 func rewriteValueMIPS64_OpPanicBounds(v *Value) bool {
6368 v_2 := v.Args[2]
6369 v_1 := v.Args[1]
6370 v_0 := v.Args[0]
6371
6372
6373
6374 for {
6375 kind := auxIntToInt64(v.AuxInt)
6376 x := v_0
6377 y := v_1
6378 mem := v_2
6379 if !(boundsABI(kind) == 0) {
6380 break
6381 }
6382 v.reset(OpMIPS64LoweredPanicBoundsA)
6383 v.AuxInt = int64ToAuxInt(kind)
6384 v.AddArg3(x, y, mem)
6385 return true
6386 }
6387
6388
6389
6390 for {
6391 kind := auxIntToInt64(v.AuxInt)
6392 x := v_0
6393 y := v_1
6394 mem := v_2
6395 if !(boundsABI(kind) == 1) {
6396 break
6397 }
6398 v.reset(OpMIPS64LoweredPanicBoundsB)
6399 v.AuxInt = int64ToAuxInt(kind)
6400 v.AddArg3(x, y, mem)
6401 return true
6402 }
6403
6404
6405
6406 for {
6407 kind := auxIntToInt64(v.AuxInt)
6408 x := v_0
6409 y := v_1
6410 mem := v_2
6411 if !(boundsABI(kind) == 2) {
6412 break
6413 }
6414 v.reset(OpMIPS64LoweredPanicBoundsC)
6415 v.AuxInt = int64ToAuxInt(kind)
6416 v.AddArg3(x, y, mem)
6417 return true
6418 }
6419 return false
6420 }
6421 func rewriteValueMIPS64_OpRotateLeft16(v *Value) bool {
6422 v_1 := v.Args[1]
6423 v_0 := v.Args[0]
6424 b := v.Block
6425 typ := &b.Func.Config.Types
6426
6427
6428 for {
6429 t := v.Type
6430 x := v_0
6431 if v_1.Op != OpMIPS64MOVVconst {
6432 break
6433 }
6434 c := auxIntToInt64(v_1.AuxInt)
6435 v.reset(OpOr16)
6436 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
6437 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6438 v1.AuxInt = int64ToAuxInt(c & 15)
6439 v0.AddArg2(x, v1)
6440 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
6441 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6442 v3.AuxInt = int64ToAuxInt(-c & 15)
6443 v2.AddArg2(x, v3)
6444 v.AddArg2(v0, v2)
6445 return true
6446 }
6447 return false
6448 }
6449 func rewriteValueMIPS64_OpRotateLeft32(v *Value) bool {
6450 v_1 := v.Args[1]
6451 v_0 := v.Args[0]
6452 b := v.Block
6453 typ := &b.Func.Config.Types
6454
6455
6456 for {
6457 t := v.Type
6458 x := v_0
6459 if v_1.Op != OpMIPS64MOVVconst {
6460 break
6461 }
6462 c := auxIntToInt64(v_1.AuxInt)
6463 v.reset(OpOr32)
6464 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
6465 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6466 v1.AuxInt = int64ToAuxInt(c & 31)
6467 v0.AddArg2(x, v1)
6468 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
6469 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6470 v3.AuxInt = int64ToAuxInt(-c & 31)
6471 v2.AddArg2(x, v3)
6472 v.AddArg2(v0, v2)
6473 return true
6474 }
6475 return false
6476 }
6477 func rewriteValueMIPS64_OpRotateLeft64(v *Value) bool {
6478 v_1 := v.Args[1]
6479 v_0 := v.Args[0]
6480 b := v.Block
6481 typ := &b.Func.Config.Types
6482
6483
6484 for {
6485 t := v.Type
6486 x := v_0
6487 if v_1.Op != OpMIPS64MOVVconst {
6488 break
6489 }
6490 c := auxIntToInt64(v_1.AuxInt)
6491 v.reset(OpOr64)
6492 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
6493 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6494 v1.AuxInt = int64ToAuxInt(c & 63)
6495 v0.AddArg2(x, v1)
6496 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
6497 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6498 v3.AuxInt = int64ToAuxInt(-c & 63)
6499 v2.AddArg2(x, v3)
6500 v.AddArg2(v0, v2)
6501 return true
6502 }
6503 return false
6504 }
6505 func rewriteValueMIPS64_OpRotateLeft8(v *Value) bool {
6506 v_1 := v.Args[1]
6507 v_0 := v.Args[0]
6508 b := v.Block
6509 typ := &b.Func.Config.Types
6510
6511
6512 for {
6513 t := v.Type
6514 x := v_0
6515 if v_1.Op != OpMIPS64MOVVconst {
6516 break
6517 }
6518 c := auxIntToInt64(v_1.AuxInt)
6519 v.reset(OpOr8)
6520 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
6521 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6522 v1.AuxInt = int64ToAuxInt(c & 7)
6523 v0.AddArg2(x, v1)
6524 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
6525 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6526 v3.AuxInt = int64ToAuxInt(-c & 7)
6527 v2.AddArg2(x, v3)
6528 v.AddArg2(v0, v2)
6529 return true
6530 }
6531 return false
6532 }
6533 func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
6534 v_1 := v.Args[1]
6535 v_0 := v.Args[0]
6536 b := v.Block
6537 typ := &b.Func.Config.Types
6538
6539
6540 for {
6541 t := v.Type
6542 x := v_0
6543 y := v_1
6544 v.reset(OpMIPS64AND)
6545 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6546 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6547 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6548 v2.AuxInt = int64ToAuxInt(64)
6549 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6550 v3.AddArg(y)
6551 v1.AddArg2(v2, v3)
6552 v0.AddArg(v1)
6553 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6554 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6555 v5.AddArg(x)
6556 v4.AddArg2(v5, v3)
6557 v.AddArg2(v0, v4)
6558 return true
6559 }
6560 }
6561 func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
6562 v_1 := v.Args[1]
6563 v_0 := v.Args[0]
6564 b := v.Block
6565 typ := &b.Func.Config.Types
6566
6567
6568 for {
6569 t := v.Type
6570 x := v_0
6571 y := v_1
6572 v.reset(OpMIPS64AND)
6573 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6574 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6575 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6576 v2.AuxInt = int64ToAuxInt(64)
6577 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6578 v3.AddArg(y)
6579 v1.AddArg2(v2, v3)
6580 v0.AddArg(v1)
6581 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6582 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6583 v5.AddArg(x)
6584 v4.AddArg2(v5, v3)
6585 v.AddArg2(v0, v4)
6586 return true
6587 }
6588 }
6589 func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
6590 v_1 := v.Args[1]
6591 v_0 := v.Args[0]
6592 b := v.Block
6593 typ := &b.Func.Config.Types
6594
6595
6596 for {
6597 t := v.Type
6598 x := v_0
6599 y := v_1
6600 v.reset(OpMIPS64AND)
6601 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6602 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6603 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6604 v2.AuxInt = int64ToAuxInt(64)
6605 v1.AddArg2(v2, y)
6606 v0.AddArg(v1)
6607 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6608 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6609 v4.AddArg(x)
6610 v3.AddArg2(v4, y)
6611 v.AddArg2(v0, v3)
6612 return true
6613 }
6614 }
6615 func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
6616 v_1 := v.Args[1]
6617 v_0 := v.Args[0]
6618 b := v.Block
6619 typ := &b.Func.Config.Types
6620
6621
6622 for {
6623 t := v.Type
6624 x := v_0
6625 y := v_1
6626 v.reset(OpMIPS64AND)
6627 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6628 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6629 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6630 v2.AuxInt = int64ToAuxInt(64)
6631 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6632 v3.AddArg(y)
6633 v1.AddArg2(v2, v3)
6634 v0.AddArg(v1)
6635 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6636 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6637 v5.AddArg(x)
6638 v4.AddArg2(v5, v3)
6639 v.AddArg2(v0, v4)
6640 return true
6641 }
6642 }
6643 func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
6644 v_1 := v.Args[1]
6645 v_0 := v.Args[0]
6646 b := v.Block
6647 typ := &b.Func.Config.Types
6648
6649
6650 for {
6651 t := v.Type
6652 x := v_0
6653 y := v_1
6654 v.reset(OpMIPS64SRAV)
6655 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6656 v0.AddArg(x)
6657 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6658 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6659 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6660 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6661 v4.AddArg(y)
6662 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6663 v5.AuxInt = int64ToAuxInt(63)
6664 v3.AddArg2(v4, v5)
6665 v2.AddArg(v3)
6666 v1.AddArg2(v2, v4)
6667 v.AddArg2(v0, v1)
6668 return true
6669 }
6670 }
6671 func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
6672 v_1 := v.Args[1]
6673 v_0 := v.Args[0]
6674 b := v.Block
6675 typ := &b.Func.Config.Types
6676
6677
6678 for {
6679 t := v.Type
6680 x := v_0
6681 y := v_1
6682 v.reset(OpMIPS64SRAV)
6683 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6684 v0.AddArg(x)
6685 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6686 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6687 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6688 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6689 v4.AddArg(y)
6690 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6691 v5.AuxInt = int64ToAuxInt(63)
6692 v3.AddArg2(v4, v5)
6693 v2.AddArg(v3)
6694 v1.AddArg2(v2, v4)
6695 v.AddArg2(v0, v1)
6696 return true
6697 }
6698 }
6699 func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
6700 v_1 := v.Args[1]
6701 v_0 := v.Args[0]
6702 b := v.Block
6703 typ := &b.Func.Config.Types
6704
6705
6706 for {
6707 t := v.Type
6708 x := v_0
6709 y := v_1
6710 v.reset(OpMIPS64SRAV)
6711 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6712 v0.AddArg(x)
6713 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6714 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6715 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6716 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6717 v4.AuxInt = int64ToAuxInt(63)
6718 v3.AddArg2(y, v4)
6719 v2.AddArg(v3)
6720 v1.AddArg2(v2, y)
6721 v.AddArg2(v0, v1)
6722 return true
6723 }
6724 }
6725 func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
6726 v_1 := v.Args[1]
6727 v_0 := v.Args[0]
6728 b := v.Block
6729 typ := &b.Func.Config.Types
6730
6731
6732 for {
6733 t := v.Type
6734 x := v_0
6735 y := v_1
6736 v.reset(OpMIPS64SRAV)
6737 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6738 v0.AddArg(x)
6739 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6740 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6741 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6742 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6743 v4.AddArg(y)
6744 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6745 v5.AuxInt = int64ToAuxInt(63)
6746 v3.AddArg2(v4, v5)
6747 v2.AddArg(v3)
6748 v1.AddArg2(v2, v4)
6749 v.AddArg2(v0, v1)
6750 return true
6751 }
6752 }
6753 func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
6754 v_1 := v.Args[1]
6755 v_0 := v.Args[0]
6756 b := v.Block
6757 typ := &b.Func.Config.Types
6758
6759
6760 for {
6761 t := v.Type
6762 x := v_0
6763 y := v_1
6764 v.reset(OpMIPS64AND)
6765 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6766 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6767 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6768 v2.AuxInt = int64ToAuxInt(64)
6769 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6770 v3.AddArg(y)
6771 v1.AddArg2(v2, v3)
6772 v0.AddArg(v1)
6773 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6774 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6775 v5.AddArg(x)
6776 v4.AddArg2(v5, v3)
6777 v.AddArg2(v0, v4)
6778 return true
6779 }
6780 }
6781 func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
6782 v_1 := v.Args[1]
6783 v_0 := v.Args[0]
6784 b := v.Block
6785 typ := &b.Func.Config.Types
6786
6787
6788 for {
6789 t := v.Type
6790 x := v_0
6791 y := v_1
6792 v.reset(OpMIPS64AND)
6793 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6794 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6795 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6796 v2.AuxInt = int64ToAuxInt(64)
6797 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6798 v3.AddArg(y)
6799 v1.AddArg2(v2, v3)
6800 v0.AddArg(v1)
6801 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6802 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6803 v5.AddArg(x)
6804 v4.AddArg2(v5, v3)
6805 v.AddArg2(v0, v4)
6806 return true
6807 }
6808 }
6809 func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
6810 v_1 := v.Args[1]
6811 v_0 := v.Args[0]
6812 b := v.Block
6813 typ := &b.Func.Config.Types
6814
6815
6816 for {
6817 t := v.Type
6818 x := v_0
6819 y := v_1
6820 v.reset(OpMIPS64AND)
6821 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6822 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6823 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6824 v2.AuxInt = int64ToAuxInt(64)
6825 v1.AddArg2(v2, y)
6826 v0.AddArg(v1)
6827 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6828 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6829 v4.AddArg(x)
6830 v3.AddArg2(v4, y)
6831 v.AddArg2(v0, v3)
6832 return true
6833 }
6834 }
6835 func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
6836 v_1 := v.Args[1]
6837 v_0 := v.Args[0]
6838 b := v.Block
6839 typ := &b.Func.Config.Types
6840
6841
6842 for {
6843 t := v.Type
6844 x := v_0
6845 y := v_1
6846 v.reset(OpMIPS64AND)
6847 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6848 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6849 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6850 v2.AuxInt = int64ToAuxInt(64)
6851 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6852 v3.AddArg(y)
6853 v1.AddArg2(v2, v3)
6854 v0.AddArg(v1)
6855 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6856 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6857 v5.AddArg(x)
6858 v4.AddArg2(v5, v3)
6859 v.AddArg2(v0, v4)
6860 return true
6861 }
6862 }
6863 func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
6864 v_1 := v.Args[1]
6865 v_0 := v.Args[0]
6866 b := v.Block
6867 typ := &b.Func.Config.Types
6868
6869
6870 for {
6871 t := v.Type
6872 x := v_0
6873 y := v_1
6874 v.reset(OpMIPS64SRAV)
6875 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6876 v0.AddArg(x)
6877 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6878 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6879 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6880 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6881 v4.AddArg(y)
6882 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6883 v5.AuxInt = int64ToAuxInt(63)
6884 v3.AddArg2(v4, v5)
6885 v2.AddArg(v3)
6886 v1.AddArg2(v2, v4)
6887 v.AddArg2(v0, v1)
6888 return true
6889 }
6890 }
6891 func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
6892 v_1 := v.Args[1]
6893 v_0 := v.Args[0]
6894 b := v.Block
6895 typ := &b.Func.Config.Types
6896
6897
6898 for {
6899 t := v.Type
6900 x := v_0
6901 y := v_1
6902 v.reset(OpMIPS64SRAV)
6903 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6904 v0.AddArg(x)
6905 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6906 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6907 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6908 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6909 v4.AddArg(y)
6910 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6911 v5.AuxInt = int64ToAuxInt(63)
6912 v3.AddArg2(v4, v5)
6913 v2.AddArg(v3)
6914 v1.AddArg2(v2, v4)
6915 v.AddArg2(v0, v1)
6916 return true
6917 }
6918 }
6919 func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
6920 v_1 := v.Args[1]
6921 v_0 := v.Args[0]
6922 b := v.Block
6923 typ := &b.Func.Config.Types
6924
6925
6926 for {
6927 t := v.Type
6928 x := v_0
6929 y := v_1
6930 v.reset(OpMIPS64SRAV)
6931 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6932 v0.AddArg(x)
6933 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6934 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6935 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6936 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6937 v4.AuxInt = int64ToAuxInt(63)
6938 v3.AddArg2(y, v4)
6939 v2.AddArg(v3)
6940 v1.AddArg2(v2, y)
6941 v.AddArg2(v0, v1)
6942 return true
6943 }
6944 }
6945 func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
6946 v_1 := v.Args[1]
6947 v_0 := v.Args[0]
6948 b := v.Block
6949 typ := &b.Func.Config.Types
6950
6951
6952 for {
6953 t := v.Type
6954 x := v_0
6955 y := v_1
6956 v.reset(OpMIPS64SRAV)
6957 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6958 v0.AddArg(x)
6959 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6960 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6961 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6962 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6963 v4.AddArg(y)
6964 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6965 v5.AuxInt = int64ToAuxInt(63)
6966 v3.AddArg2(v4, v5)
6967 v2.AddArg(v3)
6968 v1.AddArg2(v2, v4)
6969 v.AddArg2(v0, v1)
6970 return true
6971 }
6972 }
6973 func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
6974 v_1 := v.Args[1]
6975 v_0 := v.Args[0]
6976 b := v.Block
6977 typ := &b.Func.Config.Types
6978
6979
6980 for {
6981 t := v.Type
6982 x := v_0
6983 y := v_1
6984 v.reset(OpMIPS64AND)
6985 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6986 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6987 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6988 v2.AuxInt = int64ToAuxInt(64)
6989 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6990 v3.AddArg(y)
6991 v1.AddArg2(v2, v3)
6992 v0.AddArg(v1)
6993 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6994 v4.AddArg2(x, v3)
6995 v.AddArg2(v0, v4)
6996 return true
6997 }
6998 }
6999 func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
7000 v_1 := v.Args[1]
7001 v_0 := v.Args[0]
7002 b := v.Block
7003 typ := &b.Func.Config.Types
7004
7005
7006 for {
7007 t := v.Type
7008 x := v_0
7009 y := v_1
7010 v.reset(OpMIPS64AND)
7011 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7012 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7013 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7014 v2.AuxInt = int64ToAuxInt(64)
7015 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7016 v3.AddArg(y)
7017 v1.AddArg2(v2, v3)
7018 v0.AddArg(v1)
7019 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7020 v4.AddArg2(x, v3)
7021 v.AddArg2(v0, v4)
7022 return true
7023 }
7024 }
7025 func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
7026 v_1 := v.Args[1]
7027 v_0 := v.Args[0]
7028 b := v.Block
7029 typ := &b.Func.Config.Types
7030
7031
7032 for {
7033 t := v.Type
7034 x := v_0
7035 y := v_1
7036 v.reset(OpMIPS64AND)
7037 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7038 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7039 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7040 v2.AuxInt = int64ToAuxInt(64)
7041 v1.AddArg2(v2, y)
7042 v0.AddArg(v1)
7043 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7044 v3.AddArg2(x, y)
7045 v.AddArg2(v0, v3)
7046 return true
7047 }
7048 }
7049 func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
7050 v_1 := v.Args[1]
7051 v_0 := v.Args[0]
7052 b := v.Block
7053 typ := &b.Func.Config.Types
7054
7055
7056 for {
7057 t := v.Type
7058 x := v_0
7059 y := v_1
7060 v.reset(OpMIPS64AND)
7061 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7062 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7063 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7064 v2.AuxInt = int64ToAuxInt(64)
7065 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7066 v3.AddArg(y)
7067 v1.AddArg2(v2, v3)
7068 v0.AddArg(v1)
7069 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7070 v4.AddArg2(x, v3)
7071 v.AddArg2(v0, v4)
7072 return true
7073 }
7074 }
7075 func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
7076 v_1 := v.Args[1]
7077 v_0 := v.Args[0]
7078 b := v.Block
7079 typ := &b.Func.Config.Types
7080
7081
7082 for {
7083 t := v.Type
7084 x := v_0
7085 y := v_1
7086 v.reset(OpMIPS64SRAV)
7087 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7088 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7089 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7090 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7091 v3.AddArg(y)
7092 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7093 v4.AuxInt = int64ToAuxInt(63)
7094 v2.AddArg2(v3, v4)
7095 v1.AddArg(v2)
7096 v0.AddArg2(v1, v3)
7097 v.AddArg2(x, v0)
7098 return true
7099 }
7100 }
7101 func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
7102 v_1 := v.Args[1]
7103 v_0 := v.Args[0]
7104 b := v.Block
7105 typ := &b.Func.Config.Types
7106
7107
7108 for {
7109 t := v.Type
7110 x := v_0
7111 y := v_1
7112 v.reset(OpMIPS64SRAV)
7113 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7114 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7115 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7116 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7117 v3.AddArg(y)
7118 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7119 v4.AuxInt = int64ToAuxInt(63)
7120 v2.AddArg2(v3, v4)
7121 v1.AddArg(v2)
7122 v0.AddArg2(v1, v3)
7123 v.AddArg2(x, v0)
7124 return true
7125 }
7126 }
7127 func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
7128 v_1 := v.Args[1]
7129 v_0 := v.Args[0]
7130 b := v.Block
7131 typ := &b.Func.Config.Types
7132
7133
7134 for {
7135 t := v.Type
7136 x := v_0
7137 y := v_1
7138 v.reset(OpMIPS64SRAV)
7139 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7140 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7141 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7142 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7143 v3.AuxInt = int64ToAuxInt(63)
7144 v2.AddArg2(y, v3)
7145 v1.AddArg(v2)
7146 v0.AddArg2(v1, y)
7147 v.AddArg2(x, v0)
7148 return true
7149 }
7150 }
7151 func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
7152 v_1 := v.Args[1]
7153 v_0 := v.Args[0]
7154 b := v.Block
7155 typ := &b.Func.Config.Types
7156
7157
7158 for {
7159 t := v.Type
7160 x := v_0
7161 y := v_1
7162 v.reset(OpMIPS64SRAV)
7163 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7164 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7165 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7166 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7167 v3.AddArg(y)
7168 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7169 v4.AuxInt = int64ToAuxInt(63)
7170 v2.AddArg2(v3, v4)
7171 v1.AddArg(v2)
7172 v0.AddArg2(v1, v3)
7173 v.AddArg2(x, v0)
7174 return true
7175 }
7176 }
7177 func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
7178 v_1 := v.Args[1]
7179 v_0 := v.Args[0]
7180 b := v.Block
7181 typ := &b.Func.Config.Types
7182
7183
7184 for {
7185 t := v.Type
7186 x := v_0
7187 y := v_1
7188 v.reset(OpMIPS64AND)
7189 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7190 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7191 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7192 v2.AuxInt = int64ToAuxInt(64)
7193 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7194 v3.AddArg(y)
7195 v1.AddArg2(v2, v3)
7196 v0.AddArg(v1)
7197 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7198 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7199 v5.AddArg(x)
7200 v4.AddArg2(v5, v3)
7201 v.AddArg2(v0, v4)
7202 return true
7203 }
7204 }
7205 func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
7206 v_1 := v.Args[1]
7207 v_0 := v.Args[0]
7208 b := v.Block
7209 typ := &b.Func.Config.Types
7210
7211
7212 for {
7213 t := v.Type
7214 x := v_0
7215 y := v_1
7216 v.reset(OpMIPS64AND)
7217 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7218 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7219 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7220 v2.AuxInt = int64ToAuxInt(64)
7221 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7222 v3.AddArg(y)
7223 v1.AddArg2(v2, v3)
7224 v0.AddArg(v1)
7225 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7226 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7227 v5.AddArg(x)
7228 v4.AddArg2(v5, v3)
7229 v.AddArg2(v0, v4)
7230 return true
7231 }
7232 }
7233 func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
7234 v_1 := v.Args[1]
7235 v_0 := v.Args[0]
7236 b := v.Block
7237 typ := &b.Func.Config.Types
7238
7239
7240 for {
7241 t := v.Type
7242 x := v_0
7243 y := v_1
7244 v.reset(OpMIPS64AND)
7245 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7246 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7247 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7248 v2.AuxInt = int64ToAuxInt(64)
7249 v1.AddArg2(v2, y)
7250 v0.AddArg(v1)
7251 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7252 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7253 v4.AddArg(x)
7254 v3.AddArg2(v4, y)
7255 v.AddArg2(v0, v3)
7256 return true
7257 }
7258 }
7259 func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
7260 v_1 := v.Args[1]
7261 v_0 := v.Args[0]
7262 b := v.Block
7263 typ := &b.Func.Config.Types
7264
7265
7266 for {
7267 t := v.Type
7268 x := v_0
7269 y := v_1
7270 v.reset(OpMIPS64AND)
7271 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7272 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7273 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7274 v2.AuxInt = int64ToAuxInt(64)
7275 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7276 v3.AddArg(y)
7277 v1.AddArg2(v2, v3)
7278 v0.AddArg(v1)
7279 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7280 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7281 v5.AddArg(x)
7282 v4.AddArg2(v5, v3)
7283 v.AddArg2(v0, v4)
7284 return true
7285 }
7286 }
7287 func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
7288 v_1 := v.Args[1]
7289 v_0 := v.Args[0]
7290 b := v.Block
7291 typ := &b.Func.Config.Types
7292
7293
7294 for {
7295 t := v.Type
7296 x := v_0
7297 y := v_1
7298 v.reset(OpMIPS64SRAV)
7299 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7300 v0.AddArg(x)
7301 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7302 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7303 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7304 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7305 v4.AddArg(y)
7306 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7307 v5.AuxInt = int64ToAuxInt(63)
7308 v3.AddArg2(v4, v5)
7309 v2.AddArg(v3)
7310 v1.AddArg2(v2, v4)
7311 v.AddArg2(v0, v1)
7312 return true
7313 }
7314 }
7315 func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
7316 v_1 := v.Args[1]
7317 v_0 := v.Args[0]
7318 b := v.Block
7319 typ := &b.Func.Config.Types
7320
7321
7322 for {
7323 t := v.Type
7324 x := v_0
7325 y := v_1
7326 v.reset(OpMIPS64SRAV)
7327 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7328 v0.AddArg(x)
7329 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7330 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7331 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7332 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7333 v4.AddArg(y)
7334 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7335 v5.AuxInt = int64ToAuxInt(63)
7336 v3.AddArg2(v4, v5)
7337 v2.AddArg(v3)
7338 v1.AddArg2(v2, v4)
7339 v.AddArg2(v0, v1)
7340 return true
7341 }
7342 }
7343 func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
7344 v_1 := v.Args[1]
7345 v_0 := v.Args[0]
7346 b := v.Block
7347 typ := &b.Func.Config.Types
7348
7349
7350 for {
7351 t := v.Type
7352 x := v_0
7353 y := v_1
7354 v.reset(OpMIPS64SRAV)
7355 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7356 v0.AddArg(x)
7357 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7358 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7359 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7360 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7361 v4.AuxInt = int64ToAuxInt(63)
7362 v3.AddArg2(y, v4)
7363 v2.AddArg(v3)
7364 v1.AddArg2(v2, y)
7365 v.AddArg2(v0, v1)
7366 return true
7367 }
7368 }
7369 func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
7370 v_1 := v.Args[1]
7371 v_0 := v.Args[0]
7372 b := v.Block
7373 typ := &b.Func.Config.Types
7374
7375
7376 for {
7377 t := v.Type
7378 x := v_0
7379 y := v_1
7380 v.reset(OpMIPS64SRAV)
7381 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7382 v0.AddArg(x)
7383 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7384 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7385 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7386 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7387 v4.AddArg(y)
7388 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7389 v5.AuxInt = int64ToAuxInt(63)
7390 v3.AddArg2(v4, v5)
7391 v2.AddArg(v3)
7392 v1.AddArg2(v2, v4)
7393 v.AddArg2(v0, v1)
7394 return true
7395 }
7396 }
7397 func rewriteValueMIPS64_OpSelect0(v *Value) bool {
7398 v_0 := v.Args[0]
7399 b := v.Block
7400 typ := &b.Func.Config.Types
7401
7402
7403 for {
7404 if v_0.Op != OpMul64uover {
7405 break
7406 }
7407 y := v_0.Args[1]
7408 x := v_0.Args[0]
7409 v.reset(OpSelect1)
7410 v.Type = typ.UInt64
7411 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
7412 v0.AddArg2(x, y)
7413 v.AddArg(v0)
7414 return true
7415 }
7416
7417
7418 for {
7419 t := v.Type
7420 if v_0.Op != OpAdd64carry {
7421 break
7422 }
7423 c := v_0.Args[2]
7424 x := v_0.Args[0]
7425 y := v_0.Args[1]
7426 v.reset(OpMIPS64ADDV)
7427 v0 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7428 v0.AddArg2(x, y)
7429 v.AddArg2(v0, c)
7430 return true
7431 }
7432
7433
7434 for {
7435 t := v.Type
7436 if v_0.Op != OpSub64borrow {
7437 break
7438 }
7439 c := v_0.Args[2]
7440 x := v_0.Args[0]
7441 y := v_0.Args[1]
7442 v.reset(OpMIPS64SUBV)
7443 v0 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7444 v0.AddArg2(x, y)
7445 v.AddArg2(v0, c)
7446 return true
7447 }
7448
7449
7450 for {
7451 if v_0.Op != OpMIPS64DIVVU {
7452 break
7453 }
7454 _ = v_0.Args[1]
7455 v_0_1 := v_0.Args[1]
7456 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7457 break
7458 }
7459 v.reset(OpMIPS64MOVVconst)
7460 v.AuxInt = int64ToAuxInt(0)
7461 return true
7462 }
7463
7464
7465
7466 for {
7467 if v_0.Op != OpMIPS64DIVVU {
7468 break
7469 }
7470 _ = v_0.Args[1]
7471 x := v_0.Args[0]
7472 v_0_1 := v_0.Args[1]
7473 if v_0_1.Op != OpMIPS64MOVVconst {
7474 break
7475 }
7476 c := auxIntToInt64(v_0_1.AuxInt)
7477 if !(isPowerOfTwo(c)) {
7478 break
7479 }
7480 v.reset(OpMIPS64ANDconst)
7481 v.AuxInt = int64ToAuxInt(c - 1)
7482 v.AddArg(x)
7483 return true
7484 }
7485
7486
7487
7488 for {
7489 if v_0.Op != OpMIPS64DIVV {
7490 break
7491 }
7492 _ = v_0.Args[1]
7493 v_0_0 := v_0.Args[0]
7494 if v_0_0.Op != OpMIPS64MOVVconst {
7495 break
7496 }
7497 c := auxIntToInt64(v_0_0.AuxInt)
7498 v_0_1 := v_0.Args[1]
7499 if v_0_1.Op != OpMIPS64MOVVconst {
7500 break
7501 }
7502 d := auxIntToInt64(v_0_1.AuxInt)
7503 if !(d != 0) {
7504 break
7505 }
7506 v.reset(OpMIPS64MOVVconst)
7507 v.AuxInt = int64ToAuxInt(c % d)
7508 return true
7509 }
7510
7511
7512
7513 for {
7514 if v_0.Op != OpMIPS64DIVVU {
7515 break
7516 }
7517 _ = v_0.Args[1]
7518 v_0_0 := v_0.Args[0]
7519 if v_0_0.Op != OpMIPS64MOVVconst {
7520 break
7521 }
7522 c := auxIntToInt64(v_0_0.AuxInt)
7523 v_0_1 := v_0.Args[1]
7524 if v_0_1.Op != OpMIPS64MOVVconst {
7525 break
7526 }
7527 d := auxIntToInt64(v_0_1.AuxInt)
7528 if !(d != 0) {
7529 break
7530 }
7531 v.reset(OpMIPS64MOVVconst)
7532 v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
7533 return true
7534 }
7535 return false
7536 }
7537 func rewriteValueMIPS64_OpSelect1(v *Value) bool {
7538 v_0 := v.Args[0]
7539 b := v.Block
7540 typ := &b.Func.Config.Types
7541
7542
7543 for {
7544 if v_0.Op != OpMul64uover {
7545 break
7546 }
7547 y := v_0.Args[1]
7548 x := v_0.Args[0]
7549 v.reset(OpMIPS64SGTU)
7550 v.Type = typ.Bool
7551 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
7552 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
7553 v1.AddArg2(x, y)
7554 v0.AddArg(v1)
7555 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7556 v2.AuxInt = int64ToAuxInt(0)
7557 v.AddArg2(v0, v2)
7558 return true
7559 }
7560
7561
7562 for {
7563 t := v.Type
7564 if v_0.Op != OpAdd64carry {
7565 break
7566 }
7567 c := v_0.Args[2]
7568 x := v_0.Args[0]
7569 y := v_0.Args[1]
7570 v.reset(OpMIPS64OR)
7571 v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7572 s := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7573 s.AddArg2(x, y)
7574 v0.AddArg2(x, s)
7575 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7576 v3 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7577 v3.AddArg2(s, c)
7578 v2.AddArg2(s, v3)
7579 v.AddArg2(v0, v2)
7580 return true
7581 }
7582
7583
7584 for {
7585 t := v.Type
7586 if v_0.Op != OpSub64borrow {
7587 break
7588 }
7589 c := v_0.Args[2]
7590 x := v_0.Args[0]
7591 y := v_0.Args[1]
7592 v.reset(OpMIPS64OR)
7593 v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7594 s := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7595 s.AddArg2(x, y)
7596 v0.AddArg2(s, x)
7597 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7598 v3 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7599 v3.AddArg2(s, c)
7600 v2.AddArg2(v3, s)
7601 v.AddArg2(v0, v2)
7602 return true
7603 }
7604
7605
7606 for {
7607 if v_0.Op != OpMIPS64MULVU {
7608 break
7609 }
7610 _ = v_0.Args[1]
7611 v_0_0 := v_0.Args[0]
7612 v_0_1 := v_0.Args[1]
7613 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7614 x := v_0_0
7615 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 {
7616 continue
7617 }
7618 v.reset(OpMIPS64NEGV)
7619 v.AddArg(x)
7620 return true
7621 }
7622 break
7623 }
7624
7625
7626 for {
7627 if v_0.Op != OpMIPS64MULVU {
7628 break
7629 }
7630 _ = v_0.Args[1]
7631 v_0_0 := v_0.Args[0]
7632 v_0_1 := v_0.Args[1]
7633 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7634 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
7635 continue
7636 }
7637 v.reset(OpMIPS64MOVVconst)
7638 v.AuxInt = int64ToAuxInt(0)
7639 return true
7640 }
7641 break
7642 }
7643
7644
7645 for {
7646 if v_0.Op != OpMIPS64MULVU {
7647 break
7648 }
7649 _ = v_0.Args[1]
7650 v_0_0 := v_0.Args[0]
7651 v_0_1 := v_0.Args[1]
7652 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7653 x := v_0_0
7654 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7655 continue
7656 }
7657 v.copyOf(x)
7658 return true
7659 }
7660 break
7661 }
7662
7663
7664
7665 for {
7666 if v_0.Op != OpMIPS64MULVU {
7667 break
7668 }
7669 _ = v_0.Args[1]
7670 v_0_0 := v_0.Args[0]
7671 v_0_1 := v_0.Args[1]
7672 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7673 x := v_0_0
7674 if v_0_1.Op != OpMIPS64MOVVconst {
7675 continue
7676 }
7677 c := auxIntToInt64(v_0_1.AuxInt)
7678 if !(isPowerOfTwo(c)) {
7679 continue
7680 }
7681 v.reset(OpMIPS64SLLVconst)
7682 v.AuxInt = int64ToAuxInt(log64(c))
7683 v.AddArg(x)
7684 return true
7685 }
7686 break
7687 }
7688
7689
7690 for {
7691 if v_0.Op != OpMIPS64DIVVU {
7692 break
7693 }
7694 _ = v_0.Args[1]
7695 x := v_0.Args[0]
7696 v_0_1 := v_0.Args[1]
7697 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7698 break
7699 }
7700 v.copyOf(x)
7701 return true
7702 }
7703
7704
7705
7706 for {
7707 if v_0.Op != OpMIPS64DIVVU {
7708 break
7709 }
7710 _ = v_0.Args[1]
7711 x := v_0.Args[0]
7712 v_0_1 := v_0.Args[1]
7713 if v_0_1.Op != OpMIPS64MOVVconst {
7714 break
7715 }
7716 c := auxIntToInt64(v_0_1.AuxInt)
7717 if !(isPowerOfTwo(c)) {
7718 break
7719 }
7720 v.reset(OpMIPS64SRLVconst)
7721 v.AuxInt = int64ToAuxInt(log64(c))
7722 v.AddArg(x)
7723 return true
7724 }
7725
7726
7727 for {
7728 if v_0.Op != OpMIPS64MULVU {
7729 break
7730 }
7731 _ = v_0.Args[1]
7732 v_0_0 := v_0.Args[0]
7733 v_0_1 := v_0.Args[1]
7734 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7735 if v_0_0.Op != OpMIPS64MOVVconst {
7736 continue
7737 }
7738 c := auxIntToInt64(v_0_0.AuxInt)
7739 if v_0_1.Op != OpMIPS64MOVVconst {
7740 continue
7741 }
7742 d := auxIntToInt64(v_0_1.AuxInt)
7743 v.reset(OpMIPS64MOVVconst)
7744 v.AuxInt = int64ToAuxInt(c * d)
7745 return true
7746 }
7747 break
7748 }
7749
7750
7751
7752 for {
7753 if v_0.Op != OpMIPS64DIVV {
7754 break
7755 }
7756 _ = v_0.Args[1]
7757 v_0_0 := v_0.Args[0]
7758 if v_0_0.Op != OpMIPS64MOVVconst {
7759 break
7760 }
7761 c := auxIntToInt64(v_0_0.AuxInt)
7762 v_0_1 := v_0.Args[1]
7763 if v_0_1.Op != OpMIPS64MOVVconst {
7764 break
7765 }
7766 d := auxIntToInt64(v_0_1.AuxInt)
7767 if !(d != 0) {
7768 break
7769 }
7770 v.reset(OpMIPS64MOVVconst)
7771 v.AuxInt = int64ToAuxInt(c / d)
7772 return true
7773 }
7774
7775
7776
7777 for {
7778 if v_0.Op != OpMIPS64DIVVU {
7779 break
7780 }
7781 _ = v_0.Args[1]
7782 v_0_0 := v_0.Args[0]
7783 if v_0_0.Op != OpMIPS64MOVVconst {
7784 break
7785 }
7786 c := auxIntToInt64(v_0_0.AuxInt)
7787 v_0_1 := v_0.Args[1]
7788 if v_0_1.Op != OpMIPS64MOVVconst {
7789 break
7790 }
7791 d := auxIntToInt64(v_0_1.AuxInt)
7792 if !(d != 0) {
7793 break
7794 }
7795 v.reset(OpMIPS64MOVVconst)
7796 v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
7797 return true
7798 }
7799 return false
7800 }
7801 func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
7802 v_0 := v.Args[0]
7803 b := v.Block
7804
7805
7806 for {
7807 t := v.Type
7808 x := v_0
7809 v.reset(OpMIPS64SRAVconst)
7810 v.AuxInt = int64ToAuxInt(63)
7811 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7812 v0.AddArg(x)
7813 v.AddArg(v0)
7814 return true
7815 }
7816 }
7817 func rewriteValueMIPS64_OpStore(v *Value) bool {
7818 v_2 := v.Args[2]
7819 v_1 := v.Args[1]
7820 v_0 := v.Args[0]
7821
7822
7823
7824 for {
7825 t := auxToType(v.Aux)
7826 ptr := v_0
7827 val := v_1
7828 mem := v_2
7829 if !(t.Size() == 1) {
7830 break
7831 }
7832 v.reset(OpMIPS64MOVBstore)
7833 v.AddArg3(ptr, val, mem)
7834 return true
7835 }
7836
7837
7838
7839 for {
7840 t := auxToType(v.Aux)
7841 ptr := v_0
7842 val := v_1
7843 mem := v_2
7844 if !(t.Size() == 2) {
7845 break
7846 }
7847 v.reset(OpMIPS64MOVHstore)
7848 v.AddArg3(ptr, val, mem)
7849 return true
7850 }
7851
7852
7853
7854 for {
7855 t := auxToType(v.Aux)
7856 ptr := v_0
7857 val := v_1
7858 mem := v_2
7859 if !(t.Size() == 4 && !t.IsFloat()) {
7860 break
7861 }
7862 v.reset(OpMIPS64MOVWstore)
7863 v.AddArg3(ptr, val, mem)
7864 return true
7865 }
7866
7867
7868
7869 for {
7870 t := auxToType(v.Aux)
7871 ptr := v_0
7872 val := v_1
7873 mem := v_2
7874 if !(t.Size() == 8 && !t.IsFloat()) {
7875 break
7876 }
7877 v.reset(OpMIPS64MOVVstore)
7878 v.AddArg3(ptr, val, mem)
7879 return true
7880 }
7881
7882
7883
7884 for {
7885 t := auxToType(v.Aux)
7886 ptr := v_0
7887 val := v_1
7888 mem := v_2
7889 if !(t.Size() == 4 && t.IsFloat()) {
7890 break
7891 }
7892 v.reset(OpMIPS64MOVFstore)
7893 v.AddArg3(ptr, val, mem)
7894 return true
7895 }
7896
7897
7898
7899 for {
7900 t := auxToType(v.Aux)
7901 ptr := v_0
7902 val := v_1
7903 mem := v_2
7904 if !(t.Size() == 8 && t.IsFloat()) {
7905 break
7906 }
7907 v.reset(OpMIPS64MOVDstore)
7908 v.AddArg3(ptr, val, mem)
7909 return true
7910 }
7911 return false
7912 }
7913 func rewriteValueMIPS64_OpZero(v *Value) bool {
7914 v_1 := v.Args[1]
7915 v_0 := v.Args[0]
7916 b := v.Block
7917 config := b.Func.Config
7918 typ := &b.Func.Config.Types
7919
7920
7921 for {
7922 if auxIntToInt64(v.AuxInt) != 0 {
7923 break
7924 }
7925 mem := v_1
7926 v.copyOf(mem)
7927 return true
7928 }
7929
7930
7931 for {
7932 if auxIntToInt64(v.AuxInt) != 1 {
7933 break
7934 }
7935 ptr := v_0
7936 mem := v_1
7937 v.reset(OpMIPS64MOVBstore)
7938 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7939 v0.AuxInt = int64ToAuxInt(0)
7940 v.AddArg3(ptr, v0, mem)
7941 return true
7942 }
7943
7944
7945
7946 for {
7947 if auxIntToInt64(v.AuxInt) != 2 {
7948 break
7949 }
7950 t := auxToType(v.Aux)
7951 ptr := v_0
7952 mem := v_1
7953 if !(t.Alignment()%2 == 0) {
7954 break
7955 }
7956 v.reset(OpMIPS64MOVHstore)
7957 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7958 v0.AuxInt = int64ToAuxInt(0)
7959 v.AddArg3(ptr, v0, mem)
7960 return true
7961 }
7962
7963
7964 for {
7965 if auxIntToInt64(v.AuxInt) != 2 {
7966 break
7967 }
7968 ptr := v_0
7969 mem := v_1
7970 v.reset(OpMIPS64MOVBstore)
7971 v.AuxInt = int32ToAuxInt(1)
7972 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7973 v0.AuxInt = int64ToAuxInt(0)
7974 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7975 v1.AuxInt = int32ToAuxInt(0)
7976 v1.AddArg3(ptr, v0, mem)
7977 v.AddArg3(ptr, v0, v1)
7978 return true
7979 }
7980
7981
7982
7983 for {
7984 if auxIntToInt64(v.AuxInt) != 4 {
7985 break
7986 }
7987 t := auxToType(v.Aux)
7988 ptr := v_0
7989 mem := v_1
7990 if !(t.Alignment()%4 == 0) {
7991 break
7992 }
7993 v.reset(OpMIPS64MOVWstore)
7994 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7995 v0.AuxInt = int64ToAuxInt(0)
7996 v.AddArg3(ptr, v0, mem)
7997 return true
7998 }
7999
8000
8001
8002 for {
8003 if auxIntToInt64(v.AuxInt) != 4 {
8004 break
8005 }
8006 t := auxToType(v.Aux)
8007 ptr := v_0
8008 mem := v_1
8009 if !(t.Alignment()%2 == 0) {
8010 break
8011 }
8012 v.reset(OpMIPS64MOVHstore)
8013 v.AuxInt = int32ToAuxInt(2)
8014 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8015 v0.AuxInt = int64ToAuxInt(0)
8016 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8017 v1.AuxInt = int32ToAuxInt(0)
8018 v1.AddArg3(ptr, v0, mem)
8019 v.AddArg3(ptr, v0, v1)
8020 return true
8021 }
8022
8023
8024 for {
8025 if auxIntToInt64(v.AuxInt) != 4 {
8026 break
8027 }
8028 ptr := v_0
8029 mem := v_1
8030 v.reset(OpMIPS64MOVBstore)
8031 v.AuxInt = int32ToAuxInt(3)
8032 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8033 v0.AuxInt = int64ToAuxInt(0)
8034 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8035 v1.AuxInt = int32ToAuxInt(2)
8036 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8037 v2.AuxInt = int32ToAuxInt(1)
8038 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8039 v3.AuxInt = int32ToAuxInt(0)
8040 v3.AddArg3(ptr, v0, mem)
8041 v2.AddArg3(ptr, v0, v3)
8042 v1.AddArg3(ptr, v0, v2)
8043 v.AddArg3(ptr, v0, v1)
8044 return true
8045 }
8046
8047
8048
8049 for {
8050 if auxIntToInt64(v.AuxInt) != 8 {
8051 break
8052 }
8053 t := auxToType(v.Aux)
8054 ptr := v_0
8055 mem := v_1
8056 if !(t.Alignment()%8 == 0) {
8057 break
8058 }
8059 v.reset(OpMIPS64MOVVstore)
8060 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8061 v0.AuxInt = int64ToAuxInt(0)
8062 v.AddArg3(ptr, v0, mem)
8063 return true
8064 }
8065
8066
8067
8068 for {
8069 if auxIntToInt64(v.AuxInt) != 8 {
8070 break
8071 }
8072 t := auxToType(v.Aux)
8073 ptr := v_0
8074 mem := v_1
8075 if !(t.Alignment()%4 == 0) {
8076 break
8077 }
8078 v.reset(OpMIPS64MOVWstore)
8079 v.AuxInt = int32ToAuxInt(4)
8080 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8081 v0.AuxInt = int64ToAuxInt(0)
8082 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8083 v1.AuxInt = int32ToAuxInt(0)
8084 v1.AddArg3(ptr, v0, mem)
8085 v.AddArg3(ptr, v0, v1)
8086 return true
8087 }
8088
8089
8090
8091 for {
8092 if auxIntToInt64(v.AuxInt) != 8 {
8093 break
8094 }
8095 t := auxToType(v.Aux)
8096 ptr := v_0
8097 mem := v_1
8098 if !(t.Alignment()%2 == 0) {
8099 break
8100 }
8101 v.reset(OpMIPS64MOVHstore)
8102 v.AuxInt = int32ToAuxInt(6)
8103 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8104 v0.AuxInt = int64ToAuxInt(0)
8105 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8106 v1.AuxInt = int32ToAuxInt(4)
8107 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8108 v2.AuxInt = int32ToAuxInt(2)
8109 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8110 v3.AuxInt = int32ToAuxInt(0)
8111 v3.AddArg3(ptr, v0, mem)
8112 v2.AddArg3(ptr, v0, v3)
8113 v1.AddArg3(ptr, v0, v2)
8114 v.AddArg3(ptr, v0, v1)
8115 return true
8116 }
8117
8118
8119 for {
8120 if auxIntToInt64(v.AuxInt) != 3 {
8121 break
8122 }
8123 ptr := v_0
8124 mem := v_1
8125 v.reset(OpMIPS64MOVBstore)
8126 v.AuxInt = int32ToAuxInt(2)
8127 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8128 v0.AuxInt = int64ToAuxInt(0)
8129 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8130 v1.AuxInt = int32ToAuxInt(1)
8131 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8132 v2.AuxInt = int32ToAuxInt(0)
8133 v2.AddArg3(ptr, v0, mem)
8134 v1.AddArg3(ptr, v0, v2)
8135 v.AddArg3(ptr, v0, v1)
8136 return true
8137 }
8138
8139
8140
8141 for {
8142 if auxIntToInt64(v.AuxInt) != 6 {
8143 break
8144 }
8145 t := auxToType(v.Aux)
8146 ptr := v_0
8147 mem := v_1
8148 if !(t.Alignment()%2 == 0) {
8149 break
8150 }
8151 v.reset(OpMIPS64MOVHstore)
8152 v.AuxInt = int32ToAuxInt(4)
8153 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8154 v0.AuxInt = int64ToAuxInt(0)
8155 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8156 v1.AuxInt = int32ToAuxInt(2)
8157 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8158 v2.AuxInt = int32ToAuxInt(0)
8159 v2.AddArg3(ptr, v0, mem)
8160 v1.AddArg3(ptr, v0, v2)
8161 v.AddArg3(ptr, v0, v1)
8162 return true
8163 }
8164
8165
8166
8167 for {
8168 if auxIntToInt64(v.AuxInt) != 12 {
8169 break
8170 }
8171 t := auxToType(v.Aux)
8172 ptr := v_0
8173 mem := v_1
8174 if !(t.Alignment()%4 == 0) {
8175 break
8176 }
8177 v.reset(OpMIPS64MOVWstore)
8178 v.AuxInt = int32ToAuxInt(8)
8179 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8180 v0.AuxInt = int64ToAuxInt(0)
8181 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8182 v1.AuxInt = int32ToAuxInt(4)
8183 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8184 v2.AuxInt = int32ToAuxInt(0)
8185 v2.AddArg3(ptr, v0, mem)
8186 v1.AddArg3(ptr, v0, v2)
8187 v.AddArg3(ptr, v0, v1)
8188 return true
8189 }
8190
8191
8192
8193 for {
8194 if auxIntToInt64(v.AuxInt) != 16 {
8195 break
8196 }
8197 t := auxToType(v.Aux)
8198 ptr := v_0
8199 mem := v_1
8200 if !(t.Alignment()%8 == 0) {
8201 break
8202 }
8203 v.reset(OpMIPS64MOVVstore)
8204 v.AuxInt = int32ToAuxInt(8)
8205 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8206 v0.AuxInt = int64ToAuxInt(0)
8207 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8208 v1.AuxInt = int32ToAuxInt(0)
8209 v1.AddArg3(ptr, v0, mem)
8210 v.AddArg3(ptr, v0, v1)
8211 return true
8212 }
8213
8214
8215
8216 for {
8217 if auxIntToInt64(v.AuxInt) != 24 {
8218 break
8219 }
8220 t := auxToType(v.Aux)
8221 ptr := v_0
8222 mem := v_1
8223 if !(t.Alignment()%8 == 0) {
8224 break
8225 }
8226 v.reset(OpMIPS64MOVVstore)
8227 v.AuxInt = int32ToAuxInt(16)
8228 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8229 v0.AuxInt = int64ToAuxInt(0)
8230 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8231 v1.AuxInt = int32ToAuxInt(8)
8232 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8233 v2.AuxInt = int32ToAuxInt(0)
8234 v2.AddArg3(ptr, v0, mem)
8235 v1.AddArg3(ptr, v0, v2)
8236 v.AddArg3(ptr, v0, v1)
8237 return true
8238 }
8239
8240
8241
8242 for {
8243 s := auxIntToInt64(v.AuxInt)
8244 t := auxToType(v.Aux)
8245 ptr := v_0
8246 mem := v_1
8247 if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0) {
8248 break
8249 }
8250 v.reset(OpMIPS64DUFFZERO)
8251 v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
8252 v.AddArg2(ptr, mem)
8253 return true
8254 }
8255
8256
8257
8258 for {
8259 s := auxIntToInt64(v.AuxInt)
8260 t := auxToType(v.Aux)
8261 ptr := v_0
8262 mem := v_1
8263 if !(s > 8*128 || t.Alignment()%8 != 0) {
8264 break
8265 }
8266 v.reset(OpMIPS64LoweredZero)
8267 v.AuxInt = int64ToAuxInt(t.Alignment())
8268 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
8269 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
8270 v0.AddArg(ptr)
8271 v.AddArg3(ptr, v0, mem)
8272 return true
8273 }
8274 return false
8275 }
8276 func rewriteBlockMIPS64(b *Block) bool {
8277 switch b.Kind {
8278 case BlockMIPS64EQ:
8279
8280
8281 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
8282 v_0 := b.Controls[0]
8283 cmp := v_0.Args[0]
8284 b.resetWithControl(BlockMIPS64FPF, cmp)
8285 return true
8286 }
8287
8288
8289 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
8290 v_0 := b.Controls[0]
8291 cmp := v_0.Args[0]
8292 b.resetWithControl(BlockMIPS64FPT, cmp)
8293 return true
8294 }
8295
8296
8297 for b.Controls[0].Op == OpMIPS64XORconst {
8298 v_0 := b.Controls[0]
8299 if auxIntToInt64(v_0.AuxInt) != 1 {
8300 break
8301 }
8302 cmp := v_0.Args[0]
8303 if cmp.Op != OpMIPS64SGT {
8304 break
8305 }
8306 b.resetWithControl(BlockMIPS64NE, cmp)
8307 return true
8308 }
8309
8310
8311 for b.Controls[0].Op == OpMIPS64XORconst {
8312 v_0 := b.Controls[0]
8313 if auxIntToInt64(v_0.AuxInt) != 1 {
8314 break
8315 }
8316 cmp := v_0.Args[0]
8317 if cmp.Op != OpMIPS64SGTU {
8318 break
8319 }
8320 b.resetWithControl(BlockMIPS64NE, cmp)
8321 return true
8322 }
8323
8324
8325 for b.Controls[0].Op == OpMIPS64XORconst {
8326 v_0 := b.Controls[0]
8327 if auxIntToInt64(v_0.AuxInt) != 1 {
8328 break
8329 }
8330 cmp := v_0.Args[0]
8331 if cmp.Op != OpMIPS64SGTconst {
8332 break
8333 }
8334 b.resetWithControl(BlockMIPS64NE, cmp)
8335 return true
8336 }
8337
8338
8339 for b.Controls[0].Op == OpMIPS64XORconst {
8340 v_0 := b.Controls[0]
8341 if auxIntToInt64(v_0.AuxInt) != 1 {
8342 break
8343 }
8344 cmp := v_0.Args[0]
8345 if cmp.Op != OpMIPS64SGTUconst {
8346 break
8347 }
8348 b.resetWithControl(BlockMIPS64NE, cmp)
8349 return true
8350 }
8351
8352
8353 for b.Controls[0].Op == OpMIPS64SGTUconst {
8354 v_0 := b.Controls[0]
8355 if auxIntToInt64(v_0.AuxInt) != 1 {
8356 break
8357 }
8358 x := v_0.Args[0]
8359 b.resetWithControl(BlockMIPS64NE, x)
8360 return true
8361 }
8362
8363
8364 for b.Controls[0].Op == OpMIPS64SGTU {
8365 v_0 := b.Controls[0]
8366 _ = v_0.Args[1]
8367 x := v_0.Args[0]
8368 v_0_1 := v_0.Args[1]
8369 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8370 break
8371 }
8372 b.resetWithControl(BlockMIPS64EQ, x)
8373 return true
8374 }
8375
8376
8377 for b.Controls[0].Op == OpMIPS64SGTconst {
8378 v_0 := b.Controls[0]
8379 if auxIntToInt64(v_0.AuxInt) != 0 {
8380 break
8381 }
8382 x := v_0.Args[0]
8383 b.resetWithControl(BlockMIPS64GEZ, x)
8384 return true
8385 }
8386
8387
8388 for b.Controls[0].Op == OpMIPS64SGT {
8389 v_0 := b.Controls[0]
8390 _ = v_0.Args[1]
8391 x := v_0.Args[0]
8392 v_0_1 := v_0.Args[1]
8393 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8394 break
8395 }
8396 b.resetWithControl(BlockMIPS64LEZ, x)
8397 return true
8398 }
8399
8400
8401 for b.Controls[0].Op == OpMIPS64MOVVconst {
8402 v_0 := b.Controls[0]
8403 if auxIntToInt64(v_0.AuxInt) != 0 {
8404 break
8405 }
8406 b.Reset(BlockFirst)
8407 return true
8408 }
8409
8410
8411
8412 for b.Controls[0].Op == OpMIPS64MOVVconst {
8413 v_0 := b.Controls[0]
8414 c := auxIntToInt64(v_0.AuxInt)
8415 if !(c != 0) {
8416 break
8417 }
8418 b.Reset(BlockFirst)
8419 b.swapSuccessors()
8420 return true
8421 }
8422 case BlockMIPS64GEZ:
8423
8424
8425
8426 for b.Controls[0].Op == OpMIPS64MOVVconst {
8427 v_0 := b.Controls[0]
8428 c := auxIntToInt64(v_0.AuxInt)
8429 if !(c >= 0) {
8430 break
8431 }
8432 b.Reset(BlockFirst)
8433 return true
8434 }
8435
8436
8437
8438 for b.Controls[0].Op == OpMIPS64MOVVconst {
8439 v_0 := b.Controls[0]
8440 c := auxIntToInt64(v_0.AuxInt)
8441 if !(c < 0) {
8442 break
8443 }
8444 b.Reset(BlockFirst)
8445 b.swapSuccessors()
8446 return true
8447 }
8448 case BlockMIPS64GTZ:
8449
8450
8451
8452 for b.Controls[0].Op == OpMIPS64MOVVconst {
8453 v_0 := b.Controls[0]
8454 c := auxIntToInt64(v_0.AuxInt)
8455 if !(c > 0) {
8456 break
8457 }
8458 b.Reset(BlockFirst)
8459 return true
8460 }
8461
8462
8463
8464 for b.Controls[0].Op == OpMIPS64MOVVconst {
8465 v_0 := b.Controls[0]
8466 c := auxIntToInt64(v_0.AuxInt)
8467 if !(c <= 0) {
8468 break
8469 }
8470 b.Reset(BlockFirst)
8471 b.swapSuccessors()
8472 return true
8473 }
8474 case BlockIf:
8475
8476
8477 for {
8478 cond := b.Controls[0]
8479 b.resetWithControl(BlockMIPS64NE, cond)
8480 return true
8481 }
8482 case BlockMIPS64LEZ:
8483
8484
8485
8486 for b.Controls[0].Op == OpMIPS64MOVVconst {
8487 v_0 := b.Controls[0]
8488 c := auxIntToInt64(v_0.AuxInt)
8489 if !(c <= 0) {
8490 break
8491 }
8492 b.Reset(BlockFirst)
8493 return true
8494 }
8495
8496
8497
8498 for b.Controls[0].Op == OpMIPS64MOVVconst {
8499 v_0 := b.Controls[0]
8500 c := auxIntToInt64(v_0.AuxInt)
8501 if !(c > 0) {
8502 break
8503 }
8504 b.Reset(BlockFirst)
8505 b.swapSuccessors()
8506 return true
8507 }
8508 case BlockMIPS64LTZ:
8509
8510
8511
8512 for b.Controls[0].Op == OpMIPS64MOVVconst {
8513 v_0 := b.Controls[0]
8514 c := auxIntToInt64(v_0.AuxInt)
8515 if !(c < 0) {
8516 break
8517 }
8518 b.Reset(BlockFirst)
8519 return true
8520 }
8521
8522
8523
8524 for b.Controls[0].Op == OpMIPS64MOVVconst {
8525 v_0 := b.Controls[0]
8526 c := auxIntToInt64(v_0.AuxInt)
8527 if !(c >= 0) {
8528 break
8529 }
8530 b.Reset(BlockFirst)
8531 b.swapSuccessors()
8532 return true
8533 }
8534 case BlockMIPS64NE:
8535
8536
8537 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
8538 v_0 := b.Controls[0]
8539 cmp := v_0.Args[0]
8540 b.resetWithControl(BlockMIPS64FPT, cmp)
8541 return true
8542 }
8543
8544
8545 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
8546 v_0 := b.Controls[0]
8547 cmp := v_0.Args[0]
8548 b.resetWithControl(BlockMIPS64FPF, cmp)
8549 return true
8550 }
8551
8552
8553 for b.Controls[0].Op == OpMIPS64XORconst {
8554 v_0 := b.Controls[0]
8555 if auxIntToInt64(v_0.AuxInt) != 1 {
8556 break
8557 }
8558 cmp := v_0.Args[0]
8559 if cmp.Op != OpMIPS64SGT {
8560 break
8561 }
8562 b.resetWithControl(BlockMIPS64EQ, cmp)
8563 return true
8564 }
8565
8566
8567 for b.Controls[0].Op == OpMIPS64XORconst {
8568 v_0 := b.Controls[0]
8569 if auxIntToInt64(v_0.AuxInt) != 1 {
8570 break
8571 }
8572 cmp := v_0.Args[0]
8573 if cmp.Op != OpMIPS64SGTU {
8574 break
8575 }
8576 b.resetWithControl(BlockMIPS64EQ, cmp)
8577 return true
8578 }
8579
8580
8581 for b.Controls[0].Op == OpMIPS64XORconst {
8582 v_0 := b.Controls[0]
8583 if auxIntToInt64(v_0.AuxInt) != 1 {
8584 break
8585 }
8586 cmp := v_0.Args[0]
8587 if cmp.Op != OpMIPS64SGTconst {
8588 break
8589 }
8590 b.resetWithControl(BlockMIPS64EQ, cmp)
8591 return true
8592 }
8593
8594
8595 for b.Controls[0].Op == OpMIPS64XORconst {
8596 v_0 := b.Controls[0]
8597 if auxIntToInt64(v_0.AuxInt) != 1 {
8598 break
8599 }
8600 cmp := v_0.Args[0]
8601 if cmp.Op != OpMIPS64SGTUconst {
8602 break
8603 }
8604 b.resetWithControl(BlockMIPS64EQ, cmp)
8605 return true
8606 }
8607
8608
8609 for b.Controls[0].Op == OpMIPS64SGTUconst {
8610 v_0 := b.Controls[0]
8611 if auxIntToInt64(v_0.AuxInt) != 1 {
8612 break
8613 }
8614 x := v_0.Args[0]
8615 b.resetWithControl(BlockMIPS64EQ, x)
8616 return true
8617 }
8618
8619
8620 for b.Controls[0].Op == OpMIPS64SGTU {
8621 v_0 := b.Controls[0]
8622 _ = v_0.Args[1]
8623 x := v_0.Args[0]
8624 v_0_1 := v_0.Args[1]
8625 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8626 break
8627 }
8628 b.resetWithControl(BlockMIPS64NE, x)
8629 return true
8630 }
8631
8632
8633 for b.Controls[0].Op == OpMIPS64SGTconst {
8634 v_0 := b.Controls[0]
8635 if auxIntToInt64(v_0.AuxInt) != 0 {
8636 break
8637 }
8638 x := v_0.Args[0]
8639 b.resetWithControl(BlockMIPS64LTZ, x)
8640 return true
8641 }
8642
8643
8644 for b.Controls[0].Op == OpMIPS64SGT {
8645 v_0 := b.Controls[0]
8646 _ = v_0.Args[1]
8647 x := v_0.Args[0]
8648 v_0_1 := v_0.Args[1]
8649 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8650 break
8651 }
8652 b.resetWithControl(BlockMIPS64GTZ, x)
8653 return true
8654 }
8655
8656
8657 for b.Controls[0].Op == OpMIPS64MOVVconst {
8658 v_0 := b.Controls[0]
8659 if auxIntToInt64(v_0.AuxInt) != 0 {
8660 break
8661 }
8662 b.Reset(BlockFirst)
8663 b.swapSuccessors()
8664 return true
8665 }
8666
8667
8668
8669 for b.Controls[0].Op == OpMIPS64MOVVconst {
8670 v_0 := b.Controls[0]
8671 c := auxIntToInt64(v_0.AuxInt)
8672 if !(c != 0) {
8673 break
8674 }
8675 b.Reset(BlockFirst)
8676 return true
8677 }
8678 }
8679 return false
8680 }
8681
View as plain text