1
2
3 package ssa
4
5 import "cmd/compile/internal/types"
6
7 func rewriteValueMIPS64(v *Value) bool {
8 switch v.Op {
9 case OpAbs:
10 v.Op = OpMIPS64ABSD
11 return true
12 case OpAdd16:
13 v.Op = OpMIPS64ADDV
14 return true
15 case OpAdd32:
16 v.Op = OpMIPS64ADDV
17 return true
18 case OpAdd32F:
19 v.Op = OpMIPS64ADDF
20 return true
21 case OpAdd64:
22 v.Op = OpMIPS64ADDV
23 return true
24 case OpAdd64F:
25 v.Op = OpMIPS64ADDD
26 return true
27 case OpAdd8:
28 v.Op = OpMIPS64ADDV
29 return true
30 case OpAddPtr:
31 v.Op = OpMIPS64ADDV
32 return true
33 case OpAddr:
34 return rewriteValueMIPS64_OpAddr(v)
35 case OpAnd16:
36 v.Op = OpMIPS64AND
37 return true
38 case OpAnd32:
39 v.Op = OpMIPS64AND
40 return true
41 case OpAnd64:
42 v.Op = OpMIPS64AND
43 return true
44 case OpAnd8:
45 v.Op = OpMIPS64AND
46 return true
47 case OpAndB:
48 v.Op = OpMIPS64AND
49 return true
50 case OpAtomicAdd32:
51 v.Op = OpMIPS64LoweredAtomicAdd32
52 return true
53 case OpAtomicAdd64:
54 v.Op = OpMIPS64LoweredAtomicAdd64
55 return true
56 case OpAtomicAnd32:
57 v.Op = OpMIPS64LoweredAtomicAnd32
58 return true
59 case OpAtomicAnd8:
60 return rewriteValueMIPS64_OpAtomicAnd8(v)
61 case OpAtomicCompareAndSwap32:
62 return rewriteValueMIPS64_OpAtomicCompareAndSwap32(v)
63 case OpAtomicCompareAndSwap64:
64 v.Op = OpMIPS64LoweredAtomicCas64
65 return true
66 case OpAtomicExchange32:
67 v.Op = OpMIPS64LoweredAtomicExchange32
68 return true
69 case OpAtomicExchange64:
70 v.Op = OpMIPS64LoweredAtomicExchange64
71 return true
72 case OpAtomicLoad32:
73 v.Op = OpMIPS64LoweredAtomicLoad32
74 return true
75 case OpAtomicLoad64:
76 v.Op = OpMIPS64LoweredAtomicLoad64
77 return true
78 case OpAtomicLoad8:
79 v.Op = OpMIPS64LoweredAtomicLoad8
80 return true
81 case OpAtomicLoadPtr:
82 v.Op = OpMIPS64LoweredAtomicLoad64
83 return true
84 case OpAtomicOr32:
85 v.Op = OpMIPS64LoweredAtomicOr32
86 return true
87 case OpAtomicOr8:
88 return rewriteValueMIPS64_OpAtomicOr8(v)
89 case OpAtomicStore32:
90 v.Op = OpMIPS64LoweredAtomicStore32
91 return true
92 case OpAtomicStore64:
93 v.Op = OpMIPS64LoweredAtomicStore64
94 return true
95 case OpAtomicStore8:
96 v.Op = OpMIPS64LoweredAtomicStore8
97 return true
98 case OpAtomicStorePtrNoWB:
99 v.Op = OpMIPS64LoweredAtomicStore64
100 return true
101 case OpAvg64u:
102 return rewriteValueMIPS64_OpAvg64u(v)
103 case OpClosureCall:
104 v.Op = OpMIPS64CALLclosure
105 return true
106 case OpCom16:
107 return rewriteValueMIPS64_OpCom16(v)
108 case OpCom32:
109 return rewriteValueMIPS64_OpCom32(v)
110 case OpCom64:
111 return rewriteValueMIPS64_OpCom64(v)
112 case OpCom8:
113 return rewriteValueMIPS64_OpCom8(v)
114 case OpConst16:
115 return rewriteValueMIPS64_OpConst16(v)
116 case OpConst32:
117 return rewriteValueMIPS64_OpConst32(v)
118 case OpConst32F:
119 return rewriteValueMIPS64_OpConst32F(v)
120 case OpConst64:
121 return rewriteValueMIPS64_OpConst64(v)
122 case OpConst64F:
123 return rewriteValueMIPS64_OpConst64F(v)
124 case OpConst8:
125 return rewriteValueMIPS64_OpConst8(v)
126 case OpConstBool:
127 return rewriteValueMIPS64_OpConstBool(v)
128 case OpConstNil:
129 return rewriteValueMIPS64_OpConstNil(v)
130 case OpCvt32Fto32:
131 v.Op = OpMIPS64TRUNCFW
132 return true
133 case OpCvt32Fto64:
134 v.Op = OpMIPS64TRUNCFV
135 return true
136 case OpCvt32Fto64F:
137 v.Op = OpMIPS64MOVFD
138 return true
139 case OpCvt32to32F:
140 v.Op = OpMIPS64MOVWF
141 return true
142 case OpCvt32to64F:
143 v.Op = OpMIPS64MOVWD
144 return true
145 case OpCvt64Fto32:
146 v.Op = OpMIPS64TRUNCDW
147 return true
148 case OpCvt64Fto32F:
149 v.Op = OpMIPS64MOVDF
150 return true
151 case OpCvt64Fto64:
152 v.Op = OpMIPS64TRUNCDV
153 return true
154 case OpCvt64to32F:
155 v.Op = OpMIPS64MOVVF
156 return true
157 case OpCvt64to64F:
158 v.Op = OpMIPS64MOVVD
159 return true
160 case OpCvtBoolToUint8:
161 v.Op = OpCopy
162 return true
163 case OpDiv16:
164 return rewriteValueMIPS64_OpDiv16(v)
165 case OpDiv16u:
166 return rewriteValueMIPS64_OpDiv16u(v)
167 case OpDiv32:
168 return rewriteValueMIPS64_OpDiv32(v)
169 case OpDiv32F:
170 v.Op = OpMIPS64DIVF
171 return true
172 case OpDiv32u:
173 return rewriteValueMIPS64_OpDiv32u(v)
174 case OpDiv64:
175 return rewriteValueMIPS64_OpDiv64(v)
176 case OpDiv64F:
177 v.Op = OpMIPS64DIVD
178 return true
179 case OpDiv64u:
180 return rewriteValueMIPS64_OpDiv64u(v)
181 case OpDiv8:
182 return rewriteValueMIPS64_OpDiv8(v)
183 case OpDiv8u:
184 return rewriteValueMIPS64_OpDiv8u(v)
185 case OpEq16:
186 return rewriteValueMIPS64_OpEq16(v)
187 case OpEq32:
188 return rewriteValueMIPS64_OpEq32(v)
189 case OpEq32F:
190 return rewriteValueMIPS64_OpEq32F(v)
191 case OpEq64:
192 return rewriteValueMIPS64_OpEq64(v)
193 case OpEq64F:
194 return rewriteValueMIPS64_OpEq64F(v)
195 case OpEq8:
196 return rewriteValueMIPS64_OpEq8(v)
197 case OpEqB:
198 return rewriteValueMIPS64_OpEqB(v)
199 case OpEqPtr:
200 return rewriteValueMIPS64_OpEqPtr(v)
201 case OpGetCallerPC:
202 v.Op = OpMIPS64LoweredGetCallerPC
203 return true
204 case OpGetCallerSP:
205 v.Op = OpMIPS64LoweredGetCallerSP
206 return true
207 case OpGetClosurePtr:
208 v.Op = OpMIPS64LoweredGetClosurePtr
209 return true
210 case OpHmul32:
211 return rewriteValueMIPS64_OpHmul32(v)
212 case OpHmul32u:
213 return rewriteValueMIPS64_OpHmul32u(v)
214 case OpHmul64:
215 return rewriteValueMIPS64_OpHmul64(v)
216 case OpHmul64u:
217 return rewriteValueMIPS64_OpHmul64u(v)
218 case OpInterCall:
219 v.Op = OpMIPS64CALLinter
220 return true
221 case OpIsInBounds:
222 return rewriteValueMIPS64_OpIsInBounds(v)
223 case OpIsNonNil:
224 return rewriteValueMIPS64_OpIsNonNil(v)
225 case OpIsSliceInBounds:
226 return rewriteValueMIPS64_OpIsSliceInBounds(v)
227 case OpLeq16:
228 return rewriteValueMIPS64_OpLeq16(v)
229 case OpLeq16U:
230 return rewriteValueMIPS64_OpLeq16U(v)
231 case OpLeq32:
232 return rewriteValueMIPS64_OpLeq32(v)
233 case OpLeq32F:
234 return rewriteValueMIPS64_OpLeq32F(v)
235 case OpLeq32U:
236 return rewriteValueMIPS64_OpLeq32U(v)
237 case OpLeq64:
238 return rewriteValueMIPS64_OpLeq64(v)
239 case OpLeq64F:
240 return rewriteValueMIPS64_OpLeq64F(v)
241 case OpLeq64U:
242 return rewriteValueMIPS64_OpLeq64U(v)
243 case OpLeq8:
244 return rewriteValueMIPS64_OpLeq8(v)
245 case OpLeq8U:
246 return rewriteValueMIPS64_OpLeq8U(v)
247 case OpLess16:
248 return rewriteValueMIPS64_OpLess16(v)
249 case OpLess16U:
250 return rewriteValueMIPS64_OpLess16U(v)
251 case OpLess32:
252 return rewriteValueMIPS64_OpLess32(v)
253 case OpLess32F:
254 return rewriteValueMIPS64_OpLess32F(v)
255 case OpLess32U:
256 return rewriteValueMIPS64_OpLess32U(v)
257 case OpLess64:
258 return rewriteValueMIPS64_OpLess64(v)
259 case OpLess64F:
260 return rewriteValueMIPS64_OpLess64F(v)
261 case OpLess64U:
262 return rewriteValueMIPS64_OpLess64U(v)
263 case OpLess8:
264 return rewriteValueMIPS64_OpLess8(v)
265 case OpLess8U:
266 return rewriteValueMIPS64_OpLess8U(v)
267 case OpLoad:
268 return rewriteValueMIPS64_OpLoad(v)
269 case OpLocalAddr:
270 return rewriteValueMIPS64_OpLocalAddr(v)
271 case OpLsh16x16:
272 return rewriteValueMIPS64_OpLsh16x16(v)
273 case OpLsh16x32:
274 return rewriteValueMIPS64_OpLsh16x32(v)
275 case OpLsh16x64:
276 return rewriteValueMIPS64_OpLsh16x64(v)
277 case OpLsh16x8:
278 return rewriteValueMIPS64_OpLsh16x8(v)
279 case OpLsh32x16:
280 return rewriteValueMIPS64_OpLsh32x16(v)
281 case OpLsh32x32:
282 return rewriteValueMIPS64_OpLsh32x32(v)
283 case OpLsh32x64:
284 return rewriteValueMIPS64_OpLsh32x64(v)
285 case OpLsh32x8:
286 return rewriteValueMIPS64_OpLsh32x8(v)
287 case OpLsh64x16:
288 return rewriteValueMIPS64_OpLsh64x16(v)
289 case OpLsh64x32:
290 return rewriteValueMIPS64_OpLsh64x32(v)
291 case OpLsh64x64:
292 return rewriteValueMIPS64_OpLsh64x64(v)
293 case OpLsh64x8:
294 return rewriteValueMIPS64_OpLsh64x8(v)
295 case OpLsh8x16:
296 return rewriteValueMIPS64_OpLsh8x16(v)
297 case OpLsh8x32:
298 return rewriteValueMIPS64_OpLsh8x32(v)
299 case OpLsh8x64:
300 return rewriteValueMIPS64_OpLsh8x64(v)
301 case OpLsh8x8:
302 return rewriteValueMIPS64_OpLsh8x8(v)
303 case OpMIPS64ADDV:
304 return rewriteValueMIPS64_OpMIPS64ADDV(v)
305 case OpMIPS64ADDVconst:
306 return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
307 case OpMIPS64AND:
308 return rewriteValueMIPS64_OpMIPS64AND(v)
309 case OpMIPS64ANDconst:
310 return rewriteValueMIPS64_OpMIPS64ANDconst(v)
311 case OpMIPS64LoweredAtomicAdd32:
312 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v)
313 case OpMIPS64LoweredAtomicAdd64:
314 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v)
315 case OpMIPS64LoweredAtomicStore32:
316 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v)
317 case OpMIPS64LoweredAtomicStore64:
318 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v)
319 case OpMIPS64MOVBUload:
320 return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
321 case OpMIPS64MOVBUreg:
322 return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
323 case OpMIPS64MOVBload:
324 return rewriteValueMIPS64_OpMIPS64MOVBload(v)
325 case OpMIPS64MOVBreg:
326 return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
327 case OpMIPS64MOVBstore:
328 return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
329 case OpMIPS64MOVBstorezero:
330 return rewriteValueMIPS64_OpMIPS64MOVBstorezero(v)
331 case OpMIPS64MOVDload:
332 return rewriteValueMIPS64_OpMIPS64MOVDload(v)
333 case OpMIPS64MOVDstore:
334 return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
335 case OpMIPS64MOVFload:
336 return rewriteValueMIPS64_OpMIPS64MOVFload(v)
337 case OpMIPS64MOVFstore:
338 return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
339 case OpMIPS64MOVHUload:
340 return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
341 case OpMIPS64MOVHUreg:
342 return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
343 case OpMIPS64MOVHload:
344 return rewriteValueMIPS64_OpMIPS64MOVHload(v)
345 case OpMIPS64MOVHreg:
346 return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
347 case OpMIPS64MOVHstore:
348 return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
349 case OpMIPS64MOVHstorezero:
350 return rewriteValueMIPS64_OpMIPS64MOVHstorezero(v)
351 case OpMIPS64MOVVload:
352 return rewriteValueMIPS64_OpMIPS64MOVVload(v)
353 case OpMIPS64MOVVnop:
354 return rewriteValueMIPS64_OpMIPS64MOVVnop(v)
355 case OpMIPS64MOVVreg:
356 return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
357 case OpMIPS64MOVVstore:
358 return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
359 case OpMIPS64MOVVstorezero:
360 return rewriteValueMIPS64_OpMIPS64MOVVstorezero(v)
361 case OpMIPS64MOVWUload:
362 return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
363 case OpMIPS64MOVWUreg:
364 return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
365 case OpMIPS64MOVWload:
366 return rewriteValueMIPS64_OpMIPS64MOVWload(v)
367 case OpMIPS64MOVWreg:
368 return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
369 case OpMIPS64MOVWstore:
370 return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
371 case OpMIPS64MOVWstorezero:
372 return rewriteValueMIPS64_OpMIPS64MOVWstorezero(v)
373 case OpMIPS64NEGV:
374 return rewriteValueMIPS64_OpMIPS64NEGV(v)
375 case OpMIPS64NOR:
376 return rewriteValueMIPS64_OpMIPS64NOR(v)
377 case OpMIPS64NORconst:
378 return rewriteValueMIPS64_OpMIPS64NORconst(v)
379 case OpMIPS64OR:
380 return rewriteValueMIPS64_OpMIPS64OR(v)
381 case OpMIPS64ORconst:
382 return rewriteValueMIPS64_OpMIPS64ORconst(v)
383 case OpMIPS64SGT:
384 return rewriteValueMIPS64_OpMIPS64SGT(v)
385 case OpMIPS64SGTU:
386 return rewriteValueMIPS64_OpMIPS64SGTU(v)
387 case OpMIPS64SGTUconst:
388 return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
389 case OpMIPS64SGTconst:
390 return rewriteValueMIPS64_OpMIPS64SGTconst(v)
391 case OpMIPS64SLLV:
392 return rewriteValueMIPS64_OpMIPS64SLLV(v)
393 case OpMIPS64SLLVconst:
394 return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
395 case OpMIPS64SRAV:
396 return rewriteValueMIPS64_OpMIPS64SRAV(v)
397 case OpMIPS64SRAVconst:
398 return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
399 case OpMIPS64SRLV:
400 return rewriteValueMIPS64_OpMIPS64SRLV(v)
401 case OpMIPS64SRLVconst:
402 return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
403 case OpMIPS64SUBV:
404 return rewriteValueMIPS64_OpMIPS64SUBV(v)
405 case OpMIPS64SUBVconst:
406 return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
407 case OpMIPS64XOR:
408 return rewriteValueMIPS64_OpMIPS64XOR(v)
409 case OpMIPS64XORconst:
410 return rewriteValueMIPS64_OpMIPS64XORconst(v)
411 case OpMod16:
412 return rewriteValueMIPS64_OpMod16(v)
413 case OpMod16u:
414 return rewriteValueMIPS64_OpMod16u(v)
415 case OpMod32:
416 return rewriteValueMIPS64_OpMod32(v)
417 case OpMod32u:
418 return rewriteValueMIPS64_OpMod32u(v)
419 case OpMod64:
420 return rewriteValueMIPS64_OpMod64(v)
421 case OpMod64u:
422 return rewriteValueMIPS64_OpMod64u(v)
423 case OpMod8:
424 return rewriteValueMIPS64_OpMod8(v)
425 case OpMod8u:
426 return rewriteValueMIPS64_OpMod8u(v)
427 case OpMove:
428 return rewriteValueMIPS64_OpMove(v)
429 case OpMul16:
430 return rewriteValueMIPS64_OpMul16(v)
431 case OpMul32:
432 return rewriteValueMIPS64_OpMul32(v)
433 case OpMul32F:
434 v.Op = OpMIPS64MULF
435 return true
436 case OpMul64:
437 return rewriteValueMIPS64_OpMul64(v)
438 case OpMul64F:
439 v.Op = OpMIPS64MULD
440 return true
441 case OpMul64uhilo:
442 v.Op = OpMIPS64MULVU
443 return true
444 case OpMul8:
445 return rewriteValueMIPS64_OpMul8(v)
446 case OpNeg16:
447 v.Op = OpMIPS64NEGV
448 return true
449 case OpNeg32:
450 v.Op = OpMIPS64NEGV
451 return true
452 case OpNeg32F:
453 v.Op = OpMIPS64NEGF
454 return true
455 case OpNeg64:
456 v.Op = OpMIPS64NEGV
457 return true
458 case OpNeg64F:
459 v.Op = OpMIPS64NEGD
460 return true
461 case OpNeg8:
462 v.Op = OpMIPS64NEGV
463 return true
464 case OpNeq16:
465 return rewriteValueMIPS64_OpNeq16(v)
466 case OpNeq32:
467 return rewriteValueMIPS64_OpNeq32(v)
468 case OpNeq32F:
469 return rewriteValueMIPS64_OpNeq32F(v)
470 case OpNeq64:
471 return rewriteValueMIPS64_OpNeq64(v)
472 case OpNeq64F:
473 return rewriteValueMIPS64_OpNeq64F(v)
474 case OpNeq8:
475 return rewriteValueMIPS64_OpNeq8(v)
476 case OpNeqB:
477 v.Op = OpMIPS64XOR
478 return true
479 case OpNeqPtr:
480 return rewriteValueMIPS64_OpNeqPtr(v)
481 case OpNilCheck:
482 v.Op = OpMIPS64LoweredNilCheck
483 return true
484 case OpNot:
485 return rewriteValueMIPS64_OpNot(v)
486 case OpOffPtr:
487 return rewriteValueMIPS64_OpOffPtr(v)
488 case OpOr16:
489 v.Op = OpMIPS64OR
490 return true
491 case OpOr32:
492 v.Op = OpMIPS64OR
493 return true
494 case OpOr64:
495 v.Op = OpMIPS64OR
496 return true
497 case OpOr8:
498 v.Op = OpMIPS64OR
499 return true
500 case OpOrB:
501 v.Op = OpMIPS64OR
502 return true
503 case OpPanicBounds:
504 return rewriteValueMIPS64_OpPanicBounds(v)
505 case OpRotateLeft16:
506 return rewriteValueMIPS64_OpRotateLeft16(v)
507 case OpRotateLeft32:
508 return rewriteValueMIPS64_OpRotateLeft32(v)
509 case OpRotateLeft64:
510 return rewriteValueMIPS64_OpRotateLeft64(v)
511 case OpRotateLeft8:
512 return rewriteValueMIPS64_OpRotateLeft8(v)
513 case OpRound32F:
514 v.Op = OpCopy
515 return true
516 case OpRound64F:
517 v.Op = OpCopy
518 return true
519 case OpRsh16Ux16:
520 return rewriteValueMIPS64_OpRsh16Ux16(v)
521 case OpRsh16Ux32:
522 return rewriteValueMIPS64_OpRsh16Ux32(v)
523 case OpRsh16Ux64:
524 return rewriteValueMIPS64_OpRsh16Ux64(v)
525 case OpRsh16Ux8:
526 return rewriteValueMIPS64_OpRsh16Ux8(v)
527 case OpRsh16x16:
528 return rewriteValueMIPS64_OpRsh16x16(v)
529 case OpRsh16x32:
530 return rewriteValueMIPS64_OpRsh16x32(v)
531 case OpRsh16x64:
532 return rewriteValueMIPS64_OpRsh16x64(v)
533 case OpRsh16x8:
534 return rewriteValueMIPS64_OpRsh16x8(v)
535 case OpRsh32Ux16:
536 return rewriteValueMIPS64_OpRsh32Ux16(v)
537 case OpRsh32Ux32:
538 return rewriteValueMIPS64_OpRsh32Ux32(v)
539 case OpRsh32Ux64:
540 return rewriteValueMIPS64_OpRsh32Ux64(v)
541 case OpRsh32Ux8:
542 return rewriteValueMIPS64_OpRsh32Ux8(v)
543 case OpRsh32x16:
544 return rewriteValueMIPS64_OpRsh32x16(v)
545 case OpRsh32x32:
546 return rewriteValueMIPS64_OpRsh32x32(v)
547 case OpRsh32x64:
548 return rewriteValueMIPS64_OpRsh32x64(v)
549 case OpRsh32x8:
550 return rewriteValueMIPS64_OpRsh32x8(v)
551 case OpRsh64Ux16:
552 return rewriteValueMIPS64_OpRsh64Ux16(v)
553 case OpRsh64Ux32:
554 return rewriteValueMIPS64_OpRsh64Ux32(v)
555 case OpRsh64Ux64:
556 return rewriteValueMIPS64_OpRsh64Ux64(v)
557 case OpRsh64Ux8:
558 return rewriteValueMIPS64_OpRsh64Ux8(v)
559 case OpRsh64x16:
560 return rewriteValueMIPS64_OpRsh64x16(v)
561 case OpRsh64x32:
562 return rewriteValueMIPS64_OpRsh64x32(v)
563 case OpRsh64x64:
564 return rewriteValueMIPS64_OpRsh64x64(v)
565 case OpRsh64x8:
566 return rewriteValueMIPS64_OpRsh64x8(v)
567 case OpRsh8Ux16:
568 return rewriteValueMIPS64_OpRsh8Ux16(v)
569 case OpRsh8Ux32:
570 return rewriteValueMIPS64_OpRsh8Ux32(v)
571 case OpRsh8Ux64:
572 return rewriteValueMIPS64_OpRsh8Ux64(v)
573 case OpRsh8Ux8:
574 return rewriteValueMIPS64_OpRsh8Ux8(v)
575 case OpRsh8x16:
576 return rewriteValueMIPS64_OpRsh8x16(v)
577 case OpRsh8x32:
578 return rewriteValueMIPS64_OpRsh8x32(v)
579 case OpRsh8x64:
580 return rewriteValueMIPS64_OpRsh8x64(v)
581 case OpRsh8x8:
582 return rewriteValueMIPS64_OpRsh8x8(v)
583 case OpSelect0:
584 return rewriteValueMIPS64_OpSelect0(v)
585 case OpSelect1:
586 return rewriteValueMIPS64_OpSelect1(v)
587 case OpSignExt16to32:
588 v.Op = OpMIPS64MOVHreg
589 return true
590 case OpSignExt16to64:
591 v.Op = OpMIPS64MOVHreg
592 return true
593 case OpSignExt32to64:
594 v.Op = OpMIPS64MOVWreg
595 return true
596 case OpSignExt8to16:
597 v.Op = OpMIPS64MOVBreg
598 return true
599 case OpSignExt8to32:
600 v.Op = OpMIPS64MOVBreg
601 return true
602 case OpSignExt8to64:
603 v.Op = OpMIPS64MOVBreg
604 return true
605 case OpSlicemask:
606 return rewriteValueMIPS64_OpSlicemask(v)
607 case OpSqrt:
608 v.Op = OpMIPS64SQRTD
609 return true
610 case OpSqrt32:
611 v.Op = OpMIPS64SQRTF
612 return true
613 case OpStaticCall:
614 v.Op = OpMIPS64CALLstatic
615 return true
616 case OpStore:
617 return rewriteValueMIPS64_OpStore(v)
618 case OpSub16:
619 v.Op = OpMIPS64SUBV
620 return true
621 case OpSub32:
622 v.Op = OpMIPS64SUBV
623 return true
624 case OpSub32F:
625 v.Op = OpMIPS64SUBF
626 return true
627 case OpSub64:
628 v.Op = OpMIPS64SUBV
629 return true
630 case OpSub64F:
631 v.Op = OpMIPS64SUBD
632 return true
633 case OpSub8:
634 v.Op = OpMIPS64SUBV
635 return true
636 case OpSubPtr:
637 v.Op = OpMIPS64SUBV
638 return true
639 case OpTailCall:
640 v.Op = OpMIPS64CALLtail
641 return true
642 case OpTrunc16to8:
643 v.Op = OpCopy
644 return true
645 case OpTrunc32to16:
646 v.Op = OpCopy
647 return true
648 case OpTrunc32to8:
649 v.Op = OpCopy
650 return true
651 case OpTrunc64to16:
652 v.Op = OpCopy
653 return true
654 case OpTrunc64to32:
655 v.Op = OpCopy
656 return true
657 case OpTrunc64to8:
658 v.Op = OpCopy
659 return true
660 case OpWB:
661 v.Op = OpMIPS64LoweredWB
662 return true
663 case OpXor16:
664 v.Op = OpMIPS64XOR
665 return true
666 case OpXor32:
667 v.Op = OpMIPS64XOR
668 return true
669 case OpXor64:
670 v.Op = OpMIPS64XOR
671 return true
672 case OpXor8:
673 v.Op = OpMIPS64XOR
674 return true
675 case OpZero:
676 return rewriteValueMIPS64_OpZero(v)
677 case OpZeroExt16to32:
678 v.Op = OpMIPS64MOVHUreg
679 return true
680 case OpZeroExt16to64:
681 v.Op = OpMIPS64MOVHUreg
682 return true
683 case OpZeroExt32to64:
684 v.Op = OpMIPS64MOVWUreg
685 return true
686 case OpZeroExt8to16:
687 v.Op = OpMIPS64MOVBUreg
688 return true
689 case OpZeroExt8to32:
690 v.Op = OpMIPS64MOVBUreg
691 return true
692 case OpZeroExt8to64:
693 v.Op = OpMIPS64MOVBUreg
694 return true
695 }
696 return false
697 }
698 func rewriteValueMIPS64_OpAddr(v *Value) bool {
699 v_0 := v.Args[0]
700
701
702 for {
703 sym := auxToSym(v.Aux)
704 base := v_0
705 v.reset(OpMIPS64MOVVaddr)
706 v.Aux = symToAux(sym)
707 v.AddArg(base)
708 return true
709 }
710 }
711 func rewriteValueMIPS64_OpAtomicAnd8(v *Value) bool {
712 v_2 := v.Args[2]
713 v_1 := v.Args[1]
714 v_0 := v.Args[0]
715 b := v.Block
716 config := b.Func.Config
717 typ := &b.Func.Config.Types
718
719
720
721 for {
722 ptr := v_0
723 val := v_1
724 mem := v_2
725 if !(!config.BigEndian) {
726 break
727 }
728 v.reset(OpMIPS64LoweredAtomicAnd32)
729 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
730 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
731 v1.AuxInt = int64ToAuxInt(^3)
732 v0.AddArg2(v1, ptr)
733 v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
734 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
735 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
736 v4.AddArg(val)
737 v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
738 v5.AuxInt = int64ToAuxInt(3)
739 v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
740 v6.AuxInt = int64ToAuxInt(3)
741 v6.AddArg(ptr)
742 v5.AddArg(v6)
743 v3.AddArg2(v4, v5)
744 v7 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
745 v7.AuxInt = int64ToAuxInt(0)
746 v8 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
747 v9 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
748 v9.AuxInt = int64ToAuxInt(0xff)
749 v8.AddArg2(v9, v5)
750 v7.AddArg(v8)
751 v2.AddArg2(v3, v7)
752 v.AddArg3(v0, v2, mem)
753 return true
754 }
755
756
757
758 for {
759 ptr := v_0
760 val := v_1
761 mem := v_2
762 if !(config.BigEndian) {
763 break
764 }
765 v.reset(OpMIPS64LoweredAtomicAnd32)
766 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
767 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
768 v1.AuxInt = int64ToAuxInt(^3)
769 v0.AddArg2(v1, ptr)
770 v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
771 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
772 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
773 v4.AddArg(val)
774 v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
775 v5.AuxInt = int64ToAuxInt(3)
776 v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
777 v6.AuxInt = int64ToAuxInt(3)
778 v7 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
779 v7.AuxInt = int64ToAuxInt(3)
780 v7.AddArg(ptr)
781 v6.AddArg(v7)
782 v5.AddArg(v6)
783 v3.AddArg2(v4, v5)
784 v8 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
785 v8.AuxInt = int64ToAuxInt(0)
786 v9 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
787 v10 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
788 v10.AuxInt = int64ToAuxInt(0xff)
789 v9.AddArg2(v10, v5)
790 v8.AddArg(v9)
791 v2.AddArg2(v3, v8)
792 v.AddArg3(v0, v2, mem)
793 return true
794 }
795 return false
796 }
797 func rewriteValueMIPS64_OpAtomicCompareAndSwap32(v *Value) bool {
798 v_3 := v.Args[3]
799 v_2 := v.Args[2]
800 v_1 := v.Args[1]
801 v_0 := v.Args[0]
802 b := v.Block
803 typ := &b.Func.Config.Types
804
805
806 for {
807 ptr := v_0
808 old := v_1
809 new := v_2
810 mem := v_3
811 v.reset(OpMIPS64LoweredAtomicCas32)
812 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
813 v0.AddArg(old)
814 v.AddArg4(ptr, v0, new, mem)
815 return true
816 }
817 }
818 func rewriteValueMIPS64_OpAtomicOr8(v *Value) bool {
819 v_2 := v.Args[2]
820 v_1 := v.Args[1]
821 v_0 := v.Args[0]
822 b := v.Block
823 config := b.Func.Config
824 typ := &b.Func.Config.Types
825
826
827
828 for {
829 ptr := v_0
830 val := v_1
831 mem := v_2
832 if !(!config.BigEndian) {
833 break
834 }
835 v.reset(OpMIPS64LoweredAtomicOr32)
836 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
837 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
838 v1.AuxInt = int64ToAuxInt(^3)
839 v0.AddArg2(v1, ptr)
840 v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
841 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
842 v3.AddArg(val)
843 v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
844 v4.AuxInt = int64ToAuxInt(3)
845 v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
846 v5.AuxInt = int64ToAuxInt(3)
847 v5.AddArg(ptr)
848 v4.AddArg(v5)
849 v2.AddArg2(v3, v4)
850 v.AddArg3(v0, v2, mem)
851 return true
852 }
853
854
855
856 for {
857 ptr := v_0
858 val := v_1
859 mem := v_2
860 if !(config.BigEndian) {
861 break
862 }
863 v.reset(OpMIPS64LoweredAtomicOr32)
864 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
865 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
866 v1.AuxInt = int64ToAuxInt(^3)
867 v0.AddArg2(v1, ptr)
868 v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
869 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
870 v3.AddArg(val)
871 v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
872 v4.AuxInt = int64ToAuxInt(3)
873 v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
874 v5.AuxInt = int64ToAuxInt(3)
875 v6 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
876 v6.AuxInt = int64ToAuxInt(3)
877 v6.AddArg(ptr)
878 v5.AddArg(v6)
879 v4.AddArg(v5)
880 v2.AddArg2(v3, v4)
881 v.AddArg3(v0, v2, mem)
882 return true
883 }
884 return false
885 }
886 func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
887 v_1 := v.Args[1]
888 v_0 := v.Args[0]
889 b := v.Block
890
891
892 for {
893 t := v.Type
894 x := v_0
895 y := v_1
896 v.reset(OpMIPS64ADDV)
897 v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
898 v0.AuxInt = int64ToAuxInt(1)
899 v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
900 v1.AddArg2(x, y)
901 v0.AddArg(v1)
902 v.AddArg2(v0, y)
903 return true
904 }
905 }
906 func rewriteValueMIPS64_OpCom16(v *Value) bool {
907 v_0 := v.Args[0]
908 b := v.Block
909 typ := &b.Func.Config.Types
910
911
912 for {
913 x := v_0
914 v.reset(OpMIPS64NOR)
915 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
916 v0.AuxInt = int64ToAuxInt(0)
917 v.AddArg2(v0, x)
918 return true
919 }
920 }
921 func rewriteValueMIPS64_OpCom32(v *Value) bool {
922 v_0 := v.Args[0]
923 b := v.Block
924 typ := &b.Func.Config.Types
925
926
927 for {
928 x := v_0
929 v.reset(OpMIPS64NOR)
930 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
931 v0.AuxInt = int64ToAuxInt(0)
932 v.AddArg2(v0, x)
933 return true
934 }
935 }
936 func rewriteValueMIPS64_OpCom64(v *Value) bool {
937 v_0 := v.Args[0]
938 b := v.Block
939 typ := &b.Func.Config.Types
940
941
942 for {
943 x := v_0
944 v.reset(OpMIPS64NOR)
945 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
946 v0.AuxInt = int64ToAuxInt(0)
947 v.AddArg2(v0, x)
948 return true
949 }
950 }
951 func rewriteValueMIPS64_OpCom8(v *Value) bool {
952 v_0 := v.Args[0]
953 b := v.Block
954 typ := &b.Func.Config.Types
955
956
957 for {
958 x := v_0
959 v.reset(OpMIPS64NOR)
960 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
961 v0.AuxInt = int64ToAuxInt(0)
962 v.AddArg2(v0, x)
963 return true
964 }
965 }
966 func rewriteValueMIPS64_OpConst16(v *Value) bool {
967
968
969 for {
970 val := auxIntToInt16(v.AuxInt)
971 v.reset(OpMIPS64MOVVconst)
972 v.AuxInt = int64ToAuxInt(int64(val))
973 return true
974 }
975 }
976 func rewriteValueMIPS64_OpConst32(v *Value) bool {
977
978
979 for {
980 val := auxIntToInt32(v.AuxInt)
981 v.reset(OpMIPS64MOVVconst)
982 v.AuxInt = int64ToAuxInt(int64(val))
983 return true
984 }
985 }
986 func rewriteValueMIPS64_OpConst32F(v *Value) bool {
987
988
989 for {
990 val := auxIntToFloat32(v.AuxInt)
991 v.reset(OpMIPS64MOVFconst)
992 v.AuxInt = float64ToAuxInt(float64(val))
993 return true
994 }
995 }
996 func rewriteValueMIPS64_OpConst64(v *Value) bool {
997
998
999 for {
1000 val := auxIntToInt64(v.AuxInt)
1001 v.reset(OpMIPS64MOVVconst)
1002 v.AuxInt = int64ToAuxInt(int64(val))
1003 return true
1004 }
1005 }
1006 func rewriteValueMIPS64_OpConst64F(v *Value) bool {
1007
1008
1009 for {
1010 val := auxIntToFloat64(v.AuxInt)
1011 v.reset(OpMIPS64MOVDconst)
1012 v.AuxInt = float64ToAuxInt(float64(val))
1013 return true
1014 }
1015 }
1016 func rewriteValueMIPS64_OpConst8(v *Value) bool {
1017
1018
1019 for {
1020 val := auxIntToInt8(v.AuxInt)
1021 v.reset(OpMIPS64MOVVconst)
1022 v.AuxInt = int64ToAuxInt(int64(val))
1023 return true
1024 }
1025 }
1026 func rewriteValueMIPS64_OpConstBool(v *Value) bool {
1027
1028
1029 for {
1030 t := auxIntToBool(v.AuxInt)
1031 v.reset(OpMIPS64MOVVconst)
1032 v.AuxInt = int64ToAuxInt(int64(b2i(t)))
1033 return true
1034 }
1035 }
1036 func rewriteValueMIPS64_OpConstNil(v *Value) bool {
1037
1038
1039 for {
1040 v.reset(OpMIPS64MOVVconst)
1041 v.AuxInt = int64ToAuxInt(0)
1042 return true
1043 }
1044 }
1045 func rewriteValueMIPS64_OpDiv16(v *Value) bool {
1046 v_1 := v.Args[1]
1047 v_0 := v.Args[0]
1048 b := v.Block
1049 typ := &b.Func.Config.Types
1050
1051
1052 for {
1053 x := v_0
1054 y := v_1
1055 v.reset(OpSelect1)
1056 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1057 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1058 v1.AddArg(x)
1059 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1060 v2.AddArg(y)
1061 v0.AddArg2(v1, v2)
1062 v.AddArg(v0)
1063 return true
1064 }
1065 }
1066 func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
1067 v_1 := v.Args[1]
1068 v_0 := v.Args[0]
1069 b := v.Block
1070 typ := &b.Func.Config.Types
1071
1072
1073 for {
1074 x := v_0
1075 y := v_1
1076 v.reset(OpSelect1)
1077 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1078 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1079 v1.AddArg(x)
1080 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1081 v2.AddArg(y)
1082 v0.AddArg2(v1, v2)
1083 v.AddArg(v0)
1084 return true
1085 }
1086 }
1087 func rewriteValueMIPS64_OpDiv32(v *Value) bool {
1088 v_1 := v.Args[1]
1089 v_0 := v.Args[0]
1090 b := v.Block
1091 typ := &b.Func.Config.Types
1092
1093
1094 for {
1095 x := v_0
1096 y := v_1
1097 v.reset(OpSelect1)
1098 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1099 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1100 v1.AddArg(x)
1101 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1102 v2.AddArg(y)
1103 v0.AddArg2(v1, v2)
1104 v.AddArg(v0)
1105 return true
1106 }
1107 }
1108 func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
1109 v_1 := v.Args[1]
1110 v_0 := v.Args[0]
1111 b := v.Block
1112 typ := &b.Func.Config.Types
1113
1114
1115 for {
1116 x := v_0
1117 y := v_1
1118 v.reset(OpSelect1)
1119 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1120 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1121 v1.AddArg(x)
1122 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1123 v2.AddArg(y)
1124 v0.AddArg2(v1, v2)
1125 v.AddArg(v0)
1126 return true
1127 }
1128 }
1129 func rewriteValueMIPS64_OpDiv64(v *Value) bool {
1130 v_1 := v.Args[1]
1131 v_0 := v.Args[0]
1132 b := v.Block
1133 typ := &b.Func.Config.Types
1134
1135
1136 for {
1137 x := v_0
1138 y := v_1
1139 v.reset(OpSelect1)
1140 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1141 v0.AddArg2(x, y)
1142 v.AddArg(v0)
1143 return true
1144 }
1145 }
1146 func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
1147 v_1 := v.Args[1]
1148 v_0 := v.Args[0]
1149 b := v.Block
1150 typ := &b.Func.Config.Types
1151
1152
1153 for {
1154 x := v_0
1155 y := v_1
1156 v.reset(OpSelect1)
1157 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1158 v0.AddArg2(x, y)
1159 v.AddArg(v0)
1160 return true
1161 }
1162 }
1163 func rewriteValueMIPS64_OpDiv8(v *Value) bool {
1164 v_1 := v.Args[1]
1165 v_0 := v.Args[0]
1166 b := v.Block
1167 typ := &b.Func.Config.Types
1168
1169
1170 for {
1171 x := v_0
1172 y := v_1
1173 v.reset(OpSelect1)
1174 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1175 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1176 v1.AddArg(x)
1177 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1178 v2.AddArg(y)
1179 v0.AddArg2(v1, v2)
1180 v.AddArg(v0)
1181 return true
1182 }
1183 }
1184 func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
1185 v_1 := v.Args[1]
1186 v_0 := v.Args[0]
1187 b := v.Block
1188 typ := &b.Func.Config.Types
1189
1190
1191 for {
1192 x := v_0
1193 y := v_1
1194 v.reset(OpSelect1)
1195 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1196 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1197 v1.AddArg(x)
1198 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1199 v2.AddArg(y)
1200 v0.AddArg2(v1, v2)
1201 v.AddArg(v0)
1202 return true
1203 }
1204 }
1205 func rewriteValueMIPS64_OpEq16(v *Value) bool {
1206 v_1 := v.Args[1]
1207 v_0 := v.Args[0]
1208 b := v.Block
1209 typ := &b.Func.Config.Types
1210
1211
1212 for {
1213 x := v_0
1214 y := v_1
1215 v.reset(OpMIPS64SGTU)
1216 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1217 v0.AuxInt = int64ToAuxInt(1)
1218 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1219 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1220 v2.AddArg(x)
1221 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1222 v3.AddArg(y)
1223 v1.AddArg2(v2, v3)
1224 v.AddArg2(v0, v1)
1225 return true
1226 }
1227 }
1228 func rewriteValueMIPS64_OpEq32(v *Value) bool {
1229 v_1 := v.Args[1]
1230 v_0 := v.Args[0]
1231 b := v.Block
1232 typ := &b.Func.Config.Types
1233
1234
1235 for {
1236 x := v_0
1237 y := v_1
1238 v.reset(OpMIPS64SGTU)
1239 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1240 v0.AuxInt = int64ToAuxInt(1)
1241 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1242 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1243 v2.AddArg(x)
1244 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1245 v3.AddArg(y)
1246 v1.AddArg2(v2, v3)
1247 v.AddArg2(v0, v1)
1248 return true
1249 }
1250 }
1251 func rewriteValueMIPS64_OpEq32F(v *Value) bool {
1252 v_1 := v.Args[1]
1253 v_0 := v.Args[0]
1254 b := v.Block
1255
1256
1257 for {
1258 x := v_0
1259 y := v_1
1260 v.reset(OpMIPS64FPFlagTrue)
1261 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
1262 v0.AddArg2(x, y)
1263 v.AddArg(v0)
1264 return true
1265 }
1266 }
1267 func rewriteValueMIPS64_OpEq64(v *Value) bool {
1268 v_1 := v.Args[1]
1269 v_0 := v.Args[0]
1270 b := v.Block
1271 typ := &b.Func.Config.Types
1272
1273
1274 for {
1275 x := v_0
1276 y := v_1
1277 v.reset(OpMIPS64SGTU)
1278 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1279 v0.AuxInt = int64ToAuxInt(1)
1280 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1281 v1.AddArg2(x, y)
1282 v.AddArg2(v0, v1)
1283 return true
1284 }
1285 }
1286 func rewriteValueMIPS64_OpEq64F(v *Value) bool {
1287 v_1 := v.Args[1]
1288 v_0 := v.Args[0]
1289 b := v.Block
1290
1291
1292 for {
1293 x := v_0
1294 y := v_1
1295 v.reset(OpMIPS64FPFlagTrue)
1296 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
1297 v0.AddArg2(x, y)
1298 v.AddArg(v0)
1299 return true
1300 }
1301 }
1302 func rewriteValueMIPS64_OpEq8(v *Value) bool {
1303 v_1 := v.Args[1]
1304 v_0 := v.Args[0]
1305 b := v.Block
1306 typ := &b.Func.Config.Types
1307
1308
1309 for {
1310 x := v_0
1311 y := v_1
1312 v.reset(OpMIPS64SGTU)
1313 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1314 v0.AuxInt = int64ToAuxInt(1)
1315 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1316 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1317 v2.AddArg(x)
1318 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1319 v3.AddArg(y)
1320 v1.AddArg2(v2, v3)
1321 v.AddArg2(v0, v1)
1322 return true
1323 }
1324 }
1325 func rewriteValueMIPS64_OpEqB(v *Value) bool {
1326 v_1 := v.Args[1]
1327 v_0 := v.Args[0]
1328 b := v.Block
1329 typ := &b.Func.Config.Types
1330
1331
1332 for {
1333 x := v_0
1334 y := v_1
1335 v.reset(OpMIPS64XOR)
1336 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1337 v0.AuxInt = int64ToAuxInt(1)
1338 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
1339 v1.AddArg2(x, y)
1340 v.AddArg2(v0, v1)
1341 return true
1342 }
1343 }
1344 func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
1345 v_1 := v.Args[1]
1346 v_0 := v.Args[0]
1347 b := v.Block
1348 typ := &b.Func.Config.Types
1349
1350
1351 for {
1352 x := v_0
1353 y := v_1
1354 v.reset(OpMIPS64SGTU)
1355 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1356 v0.AuxInt = int64ToAuxInt(1)
1357 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1358 v1.AddArg2(x, y)
1359 v.AddArg2(v0, v1)
1360 return true
1361 }
1362 }
1363 func rewriteValueMIPS64_OpHmul32(v *Value) bool {
1364 v_1 := v.Args[1]
1365 v_0 := v.Args[0]
1366 b := v.Block
1367 typ := &b.Func.Config.Types
1368
1369
1370 for {
1371 x := v_0
1372 y := v_1
1373 v.reset(OpMIPS64SRAVconst)
1374 v.AuxInt = int64ToAuxInt(32)
1375 v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
1376 v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1377 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1378 v2.AddArg(x)
1379 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1380 v3.AddArg(y)
1381 v1.AddArg2(v2, v3)
1382 v0.AddArg(v1)
1383 v.AddArg(v0)
1384 return true
1385 }
1386 }
1387 func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
1388 v_1 := v.Args[1]
1389 v_0 := v.Args[0]
1390 b := v.Block
1391 typ := &b.Func.Config.Types
1392
1393
1394 for {
1395 x := v_0
1396 y := v_1
1397 v.reset(OpMIPS64SRLVconst)
1398 v.AuxInt = int64ToAuxInt(32)
1399 v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
1400 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1401 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1402 v2.AddArg(x)
1403 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1404 v3.AddArg(y)
1405 v1.AddArg2(v2, v3)
1406 v0.AddArg(v1)
1407 v.AddArg(v0)
1408 return true
1409 }
1410 }
1411 func rewriteValueMIPS64_OpHmul64(v *Value) bool {
1412 v_1 := v.Args[1]
1413 v_0 := v.Args[0]
1414 b := v.Block
1415 typ := &b.Func.Config.Types
1416
1417
1418 for {
1419 x := v_0
1420 y := v_1
1421 v.reset(OpSelect0)
1422 v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1423 v0.AddArg2(x, y)
1424 v.AddArg(v0)
1425 return true
1426 }
1427 }
1428 func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
1429 v_1 := v.Args[1]
1430 v_0 := v.Args[0]
1431 b := v.Block
1432 typ := &b.Func.Config.Types
1433
1434
1435 for {
1436 x := v_0
1437 y := v_1
1438 v.reset(OpSelect0)
1439 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1440 v0.AddArg2(x, y)
1441 v.AddArg(v0)
1442 return true
1443 }
1444 }
1445 func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
1446 v_1 := v.Args[1]
1447 v_0 := v.Args[0]
1448
1449
1450 for {
1451 idx := v_0
1452 len := v_1
1453 v.reset(OpMIPS64SGTU)
1454 v.AddArg2(len, idx)
1455 return true
1456 }
1457 }
1458 func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
1459 v_0 := v.Args[0]
1460 b := v.Block
1461 typ := &b.Func.Config.Types
1462
1463
1464 for {
1465 ptr := v_0
1466 v.reset(OpMIPS64SGTU)
1467 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1468 v0.AuxInt = int64ToAuxInt(0)
1469 v.AddArg2(ptr, v0)
1470 return true
1471 }
1472 }
1473 func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
1474 v_1 := v.Args[1]
1475 v_0 := v.Args[0]
1476 b := v.Block
1477 typ := &b.Func.Config.Types
1478
1479
1480 for {
1481 idx := v_0
1482 len := v_1
1483 v.reset(OpMIPS64XOR)
1484 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1485 v0.AuxInt = int64ToAuxInt(1)
1486 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1487 v1.AddArg2(idx, len)
1488 v.AddArg2(v0, v1)
1489 return true
1490 }
1491 }
1492 func rewriteValueMIPS64_OpLeq16(v *Value) bool {
1493 v_1 := v.Args[1]
1494 v_0 := v.Args[0]
1495 b := v.Block
1496 typ := &b.Func.Config.Types
1497
1498
1499 for {
1500 x := v_0
1501 y := v_1
1502 v.reset(OpMIPS64XOR)
1503 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1504 v0.AuxInt = int64ToAuxInt(1)
1505 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1506 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1507 v2.AddArg(x)
1508 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1509 v3.AddArg(y)
1510 v1.AddArg2(v2, v3)
1511 v.AddArg2(v0, v1)
1512 return true
1513 }
1514 }
1515 func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
1516 v_1 := v.Args[1]
1517 v_0 := v.Args[0]
1518 b := v.Block
1519 typ := &b.Func.Config.Types
1520
1521
1522 for {
1523 x := v_0
1524 y := v_1
1525 v.reset(OpMIPS64XOR)
1526 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1527 v0.AuxInt = int64ToAuxInt(1)
1528 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1529 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1530 v2.AddArg(x)
1531 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1532 v3.AddArg(y)
1533 v1.AddArg2(v2, v3)
1534 v.AddArg2(v0, v1)
1535 return true
1536 }
1537 }
1538 func rewriteValueMIPS64_OpLeq32(v *Value) bool {
1539 v_1 := v.Args[1]
1540 v_0 := v.Args[0]
1541 b := v.Block
1542 typ := &b.Func.Config.Types
1543
1544
1545 for {
1546 x := v_0
1547 y := v_1
1548 v.reset(OpMIPS64XOR)
1549 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1550 v0.AuxInt = int64ToAuxInt(1)
1551 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1552 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1553 v2.AddArg(x)
1554 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1555 v3.AddArg(y)
1556 v1.AddArg2(v2, v3)
1557 v.AddArg2(v0, v1)
1558 return true
1559 }
1560 }
1561 func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
1562 v_1 := v.Args[1]
1563 v_0 := v.Args[0]
1564 b := v.Block
1565
1566
1567 for {
1568 x := v_0
1569 y := v_1
1570 v.reset(OpMIPS64FPFlagTrue)
1571 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
1572 v0.AddArg2(y, x)
1573 v.AddArg(v0)
1574 return true
1575 }
1576 }
1577 func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
1578 v_1 := v.Args[1]
1579 v_0 := v.Args[0]
1580 b := v.Block
1581 typ := &b.Func.Config.Types
1582
1583
1584 for {
1585 x := v_0
1586 y := v_1
1587 v.reset(OpMIPS64XOR)
1588 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1589 v0.AuxInt = int64ToAuxInt(1)
1590 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1591 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1592 v2.AddArg(x)
1593 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1594 v3.AddArg(y)
1595 v1.AddArg2(v2, v3)
1596 v.AddArg2(v0, v1)
1597 return true
1598 }
1599 }
1600 func rewriteValueMIPS64_OpLeq64(v *Value) bool {
1601 v_1 := v.Args[1]
1602 v_0 := v.Args[0]
1603 b := v.Block
1604 typ := &b.Func.Config.Types
1605
1606
1607 for {
1608 x := v_0
1609 y := v_1
1610 v.reset(OpMIPS64XOR)
1611 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1612 v0.AuxInt = int64ToAuxInt(1)
1613 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1614 v1.AddArg2(x, y)
1615 v.AddArg2(v0, v1)
1616 return true
1617 }
1618 }
1619 func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
1620 v_1 := v.Args[1]
1621 v_0 := v.Args[0]
1622 b := v.Block
1623
1624
1625 for {
1626 x := v_0
1627 y := v_1
1628 v.reset(OpMIPS64FPFlagTrue)
1629 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
1630 v0.AddArg2(y, x)
1631 v.AddArg(v0)
1632 return true
1633 }
1634 }
1635 func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
1636 v_1 := v.Args[1]
1637 v_0 := v.Args[0]
1638 b := v.Block
1639 typ := &b.Func.Config.Types
1640
1641
1642 for {
1643 x := v_0
1644 y := v_1
1645 v.reset(OpMIPS64XOR)
1646 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1647 v0.AuxInt = int64ToAuxInt(1)
1648 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1649 v1.AddArg2(x, y)
1650 v.AddArg2(v0, v1)
1651 return true
1652 }
1653 }
1654 func rewriteValueMIPS64_OpLeq8(v *Value) bool {
1655 v_1 := v.Args[1]
1656 v_0 := v.Args[0]
1657 b := v.Block
1658 typ := &b.Func.Config.Types
1659
1660
1661 for {
1662 x := v_0
1663 y := v_1
1664 v.reset(OpMIPS64XOR)
1665 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1666 v0.AuxInt = int64ToAuxInt(1)
1667 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1668 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1669 v2.AddArg(x)
1670 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1671 v3.AddArg(y)
1672 v1.AddArg2(v2, v3)
1673 v.AddArg2(v0, v1)
1674 return true
1675 }
1676 }
1677 func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
1678 v_1 := v.Args[1]
1679 v_0 := v.Args[0]
1680 b := v.Block
1681 typ := &b.Func.Config.Types
1682
1683
1684 for {
1685 x := v_0
1686 y := v_1
1687 v.reset(OpMIPS64XOR)
1688 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1689 v0.AuxInt = int64ToAuxInt(1)
1690 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1691 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1692 v2.AddArg(x)
1693 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1694 v3.AddArg(y)
1695 v1.AddArg2(v2, v3)
1696 v.AddArg2(v0, v1)
1697 return true
1698 }
1699 }
1700 func rewriteValueMIPS64_OpLess16(v *Value) bool {
1701 v_1 := v.Args[1]
1702 v_0 := v.Args[0]
1703 b := v.Block
1704 typ := &b.Func.Config.Types
1705
1706
1707 for {
1708 x := v_0
1709 y := v_1
1710 v.reset(OpMIPS64SGT)
1711 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1712 v0.AddArg(y)
1713 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1714 v1.AddArg(x)
1715 v.AddArg2(v0, v1)
1716 return true
1717 }
1718 }
1719 func rewriteValueMIPS64_OpLess16U(v *Value) bool {
1720 v_1 := v.Args[1]
1721 v_0 := v.Args[0]
1722 b := v.Block
1723 typ := &b.Func.Config.Types
1724
1725
1726 for {
1727 x := v_0
1728 y := v_1
1729 v.reset(OpMIPS64SGTU)
1730 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1731 v0.AddArg(y)
1732 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1733 v1.AddArg(x)
1734 v.AddArg2(v0, v1)
1735 return true
1736 }
1737 }
1738 func rewriteValueMIPS64_OpLess32(v *Value) bool {
1739 v_1 := v.Args[1]
1740 v_0 := v.Args[0]
1741 b := v.Block
1742 typ := &b.Func.Config.Types
1743
1744
1745 for {
1746 x := v_0
1747 y := v_1
1748 v.reset(OpMIPS64SGT)
1749 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1750 v0.AddArg(y)
1751 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1752 v1.AddArg(x)
1753 v.AddArg2(v0, v1)
1754 return true
1755 }
1756 }
1757 func rewriteValueMIPS64_OpLess32F(v *Value) bool {
1758 v_1 := v.Args[1]
1759 v_0 := v.Args[0]
1760 b := v.Block
1761
1762
1763 for {
1764 x := v_0
1765 y := v_1
1766 v.reset(OpMIPS64FPFlagTrue)
1767 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
1768 v0.AddArg2(y, x)
1769 v.AddArg(v0)
1770 return true
1771 }
1772 }
1773 func rewriteValueMIPS64_OpLess32U(v *Value) bool {
1774 v_1 := v.Args[1]
1775 v_0 := v.Args[0]
1776 b := v.Block
1777 typ := &b.Func.Config.Types
1778
1779
1780 for {
1781 x := v_0
1782 y := v_1
1783 v.reset(OpMIPS64SGTU)
1784 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1785 v0.AddArg(y)
1786 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1787 v1.AddArg(x)
1788 v.AddArg2(v0, v1)
1789 return true
1790 }
1791 }
1792 func rewriteValueMIPS64_OpLess64(v *Value) bool {
1793 v_1 := v.Args[1]
1794 v_0 := v.Args[0]
1795
1796
1797 for {
1798 x := v_0
1799 y := v_1
1800 v.reset(OpMIPS64SGT)
1801 v.AddArg2(y, x)
1802 return true
1803 }
1804 }
1805 func rewriteValueMIPS64_OpLess64F(v *Value) bool {
1806 v_1 := v.Args[1]
1807 v_0 := v.Args[0]
1808 b := v.Block
1809
1810
1811 for {
1812 x := v_0
1813 y := v_1
1814 v.reset(OpMIPS64FPFlagTrue)
1815 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
1816 v0.AddArg2(y, x)
1817 v.AddArg(v0)
1818 return true
1819 }
1820 }
1821 func rewriteValueMIPS64_OpLess64U(v *Value) bool {
1822 v_1 := v.Args[1]
1823 v_0 := v.Args[0]
1824
1825
1826 for {
1827 x := v_0
1828 y := v_1
1829 v.reset(OpMIPS64SGTU)
1830 v.AddArg2(y, x)
1831 return true
1832 }
1833 }
1834 func rewriteValueMIPS64_OpLess8(v *Value) bool {
1835 v_1 := v.Args[1]
1836 v_0 := v.Args[0]
1837 b := v.Block
1838 typ := &b.Func.Config.Types
1839
1840
1841 for {
1842 x := v_0
1843 y := v_1
1844 v.reset(OpMIPS64SGT)
1845 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1846 v0.AddArg(y)
1847 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1848 v1.AddArg(x)
1849 v.AddArg2(v0, v1)
1850 return true
1851 }
1852 }
1853 func rewriteValueMIPS64_OpLess8U(v *Value) bool {
1854 v_1 := v.Args[1]
1855 v_0 := v.Args[0]
1856 b := v.Block
1857 typ := &b.Func.Config.Types
1858
1859
1860 for {
1861 x := v_0
1862 y := v_1
1863 v.reset(OpMIPS64SGTU)
1864 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1865 v0.AddArg(y)
1866 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1867 v1.AddArg(x)
1868 v.AddArg2(v0, v1)
1869 return true
1870 }
1871 }
1872 func rewriteValueMIPS64_OpLoad(v *Value) bool {
1873 v_1 := v.Args[1]
1874 v_0 := v.Args[0]
1875
1876
1877
1878 for {
1879 t := v.Type
1880 ptr := v_0
1881 mem := v_1
1882 if !(t.IsBoolean()) {
1883 break
1884 }
1885 v.reset(OpMIPS64MOVBUload)
1886 v.AddArg2(ptr, mem)
1887 return true
1888 }
1889
1890
1891
1892 for {
1893 t := v.Type
1894 ptr := v_0
1895 mem := v_1
1896 if !(is8BitInt(t) && t.IsSigned()) {
1897 break
1898 }
1899 v.reset(OpMIPS64MOVBload)
1900 v.AddArg2(ptr, mem)
1901 return true
1902 }
1903
1904
1905
1906 for {
1907 t := v.Type
1908 ptr := v_0
1909 mem := v_1
1910 if !(is8BitInt(t) && !t.IsSigned()) {
1911 break
1912 }
1913 v.reset(OpMIPS64MOVBUload)
1914 v.AddArg2(ptr, mem)
1915 return true
1916 }
1917
1918
1919
1920 for {
1921 t := v.Type
1922 ptr := v_0
1923 mem := v_1
1924 if !(is16BitInt(t) && t.IsSigned()) {
1925 break
1926 }
1927 v.reset(OpMIPS64MOVHload)
1928 v.AddArg2(ptr, mem)
1929 return true
1930 }
1931
1932
1933
1934 for {
1935 t := v.Type
1936 ptr := v_0
1937 mem := v_1
1938 if !(is16BitInt(t) && !t.IsSigned()) {
1939 break
1940 }
1941 v.reset(OpMIPS64MOVHUload)
1942 v.AddArg2(ptr, mem)
1943 return true
1944 }
1945
1946
1947
1948 for {
1949 t := v.Type
1950 ptr := v_0
1951 mem := v_1
1952 if !(is32BitInt(t) && t.IsSigned()) {
1953 break
1954 }
1955 v.reset(OpMIPS64MOVWload)
1956 v.AddArg2(ptr, mem)
1957 return true
1958 }
1959
1960
1961
1962 for {
1963 t := v.Type
1964 ptr := v_0
1965 mem := v_1
1966 if !(is32BitInt(t) && !t.IsSigned()) {
1967 break
1968 }
1969 v.reset(OpMIPS64MOVWUload)
1970 v.AddArg2(ptr, mem)
1971 return true
1972 }
1973
1974
1975
1976 for {
1977 t := v.Type
1978 ptr := v_0
1979 mem := v_1
1980 if !(is64BitInt(t) || isPtr(t)) {
1981 break
1982 }
1983 v.reset(OpMIPS64MOVVload)
1984 v.AddArg2(ptr, mem)
1985 return true
1986 }
1987
1988
1989
1990 for {
1991 t := v.Type
1992 ptr := v_0
1993 mem := v_1
1994 if !(is32BitFloat(t)) {
1995 break
1996 }
1997 v.reset(OpMIPS64MOVFload)
1998 v.AddArg2(ptr, mem)
1999 return true
2000 }
2001
2002
2003
2004 for {
2005 t := v.Type
2006 ptr := v_0
2007 mem := v_1
2008 if !(is64BitFloat(t)) {
2009 break
2010 }
2011 v.reset(OpMIPS64MOVDload)
2012 v.AddArg2(ptr, mem)
2013 return true
2014 }
2015 return false
2016 }
2017 func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
2018 v_1 := v.Args[1]
2019 v_0 := v.Args[0]
2020 b := v.Block
2021 typ := &b.Func.Config.Types
2022
2023
2024
2025 for {
2026 t := v.Type
2027 sym := auxToSym(v.Aux)
2028 base := v_0
2029 mem := v_1
2030 if !(t.Elem().HasPointers()) {
2031 break
2032 }
2033 v.reset(OpMIPS64MOVVaddr)
2034 v.Aux = symToAux(sym)
2035 v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
2036 v0.AddArg2(base, mem)
2037 v.AddArg(v0)
2038 return true
2039 }
2040
2041
2042
2043 for {
2044 t := v.Type
2045 sym := auxToSym(v.Aux)
2046 base := v_0
2047 if !(!t.Elem().HasPointers()) {
2048 break
2049 }
2050 v.reset(OpMIPS64MOVVaddr)
2051 v.Aux = symToAux(sym)
2052 v.AddArg(base)
2053 return true
2054 }
2055 return false
2056 }
2057 func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
2058 v_1 := v.Args[1]
2059 v_0 := v.Args[0]
2060 b := v.Block
2061 typ := &b.Func.Config.Types
2062
2063
2064 for {
2065 t := v.Type
2066 x := v_0
2067 y := v_1
2068 v.reset(OpMIPS64AND)
2069 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2070 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2071 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2072 v2.AuxInt = int64ToAuxInt(64)
2073 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2074 v3.AddArg(y)
2075 v1.AddArg2(v2, v3)
2076 v0.AddArg(v1)
2077 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2078 v4.AddArg2(x, v3)
2079 v.AddArg2(v0, v4)
2080 return true
2081 }
2082 }
2083 func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
2084 v_1 := v.Args[1]
2085 v_0 := v.Args[0]
2086 b := v.Block
2087 typ := &b.Func.Config.Types
2088
2089
2090 for {
2091 t := v.Type
2092 x := v_0
2093 y := v_1
2094 v.reset(OpMIPS64AND)
2095 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2096 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2097 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2098 v2.AuxInt = int64ToAuxInt(64)
2099 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2100 v3.AddArg(y)
2101 v1.AddArg2(v2, v3)
2102 v0.AddArg(v1)
2103 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2104 v4.AddArg2(x, v3)
2105 v.AddArg2(v0, v4)
2106 return true
2107 }
2108 }
2109 func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
2110 v_1 := v.Args[1]
2111 v_0 := v.Args[0]
2112 b := v.Block
2113 typ := &b.Func.Config.Types
2114
2115
2116 for {
2117 t := v.Type
2118 x := v_0
2119 y := v_1
2120 v.reset(OpMIPS64AND)
2121 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2122 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2123 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2124 v2.AuxInt = int64ToAuxInt(64)
2125 v1.AddArg2(v2, y)
2126 v0.AddArg(v1)
2127 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2128 v3.AddArg2(x, y)
2129 v.AddArg2(v0, v3)
2130 return true
2131 }
2132 }
2133 func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
2134 v_1 := v.Args[1]
2135 v_0 := v.Args[0]
2136 b := v.Block
2137 typ := &b.Func.Config.Types
2138
2139
2140 for {
2141 t := v.Type
2142 x := v_0
2143 y := v_1
2144 v.reset(OpMIPS64AND)
2145 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2146 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2147 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2148 v2.AuxInt = int64ToAuxInt(64)
2149 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2150 v3.AddArg(y)
2151 v1.AddArg2(v2, v3)
2152 v0.AddArg(v1)
2153 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2154 v4.AddArg2(x, v3)
2155 v.AddArg2(v0, v4)
2156 return true
2157 }
2158 }
2159 func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
2160 v_1 := v.Args[1]
2161 v_0 := v.Args[0]
2162 b := v.Block
2163 typ := &b.Func.Config.Types
2164
2165
2166 for {
2167 t := v.Type
2168 x := v_0
2169 y := v_1
2170 v.reset(OpMIPS64AND)
2171 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2172 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2173 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2174 v2.AuxInt = int64ToAuxInt(64)
2175 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2176 v3.AddArg(y)
2177 v1.AddArg2(v2, v3)
2178 v0.AddArg(v1)
2179 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2180 v4.AddArg2(x, v3)
2181 v.AddArg2(v0, v4)
2182 return true
2183 }
2184 }
2185 func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
2186 v_1 := v.Args[1]
2187 v_0 := v.Args[0]
2188 b := v.Block
2189 typ := &b.Func.Config.Types
2190
2191
2192 for {
2193 t := v.Type
2194 x := v_0
2195 y := v_1
2196 v.reset(OpMIPS64AND)
2197 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2198 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2199 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2200 v2.AuxInt = int64ToAuxInt(64)
2201 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2202 v3.AddArg(y)
2203 v1.AddArg2(v2, v3)
2204 v0.AddArg(v1)
2205 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2206 v4.AddArg2(x, v3)
2207 v.AddArg2(v0, v4)
2208 return true
2209 }
2210 }
2211 func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
2212 v_1 := v.Args[1]
2213 v_0 := v.Args[0]
2214 b := v.Block
2215 typ := &b.Func.Config.Types
2216
2217
2218 for {
2219 t := v.Type
2220 x := v_0
2221 y := v_1
2222 v.reset(OpMIPS64AND)
2223 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2224 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2225 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2226 v2.AuxInt = int64ToAuxInt(64)
2227 v1.AddArg2(v2, y)
2228 v0.AddArg(v1)
2229 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2230 v3.AddArg2(x, y)
2231 v.AddArg2(v0, v3)
2232 return true
2233 }
2234 }
2235 func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
2236 v_1 := v.Args[1]
2237 v_0 := v.Args[0]
2238 b := v.Block
2239 typ := &b.Func.Config.Types
2240
2241
2242 for {
2243 t := v.Type
2244 x := v_0
2245 y := v_1
2246 v.reset(OpMIPS64AND)
2247 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2248 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2249 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2250 v2.AuxInt = int64ToAuxInt(64)
2251 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2252 v3.AddArg(y)
2253 v1.AddArg2(v2, v3)
2254 v0.AddArg(v1)
2255 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2256 v4.AddArg2(x, v3)
2257 v.AddArg2(v0, v4)
2258 return true
2259 }
2260 }
2261 func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
2262 v_1 := v.Args[1]
2263 v_0 := v.Args[0]
2264 b := v.Block
2265 typ := &b.Func.Config.Types
2266
2267
2268 for {
2269 t := v.Type
2270 x := v_0
2271 y := v_1
2272 v.reset(OpMIPS64AND)
2273 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2274 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2275 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2276 v2.AuxInt = int64ToAuxInt(64)
2277 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2278 v3.AddArg(y)
2279 v1.AddArg2(v2, v3)
2280 v0.AddArg(v1)
2281 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2282 v4.AddArg2(x, v3)
2283 v.AddArg2(v0, v4)
2284 return true
2285 }
2286 }
2287 func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
2288 v_1 := v.Args[1]
2289 v_0 := v.Args[0]
2290 b := v.Block
2291 typ := &b.Func.Config.Types
2292
2293
2294 for {
2295 t := v.Type
2296 x := v_0
2297 y := v_1
2298 v.reset(OpMIPS64AND)
2299 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2300 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2301 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2302 v2.AuxInt = int64ToAuxInt(64)
2303 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2304 v3.AddArg(y)
2305 v1.AddArg2(v2, v3)
2306 v0.AddArg(v1)
2307 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2308 v4.AddArg2(x, v3)
2309 v.AddArg2(v0, v4)
2310 return true
2311 }
2312 }
2313 func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
2314 v_1 := v.Args[1]
2315 v_0 := v.Args[0]
2316 b := v.Block
2317 typ := &b.Func.Config.Types
2318
2319
2320 for {
2321 t := v.Type
2322 x := v_0
2323 y := v_1
2324 v.reset(OpMIPS64AND)
2325 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2326 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2327 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2328 v2.AuxInt = int64ToAuxInt(64)
2329 v1.AddArg2(v2, y)
2330 v0.AddArg(v1)
2331 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2332 v3.AddArg2(x, y)
2333 v.AddArg2(v0, v3)
2334 return true
2335 }
2336 }
2337 func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
2338 v_1 := v.Args[1]
2339 v_0 := v.Args[0]
2340 b := v.Block
2341 typ := &b.Func.Config.Types
2342
2343
2344 for {
2345 t := v.Type
2346 x := v_0
2347 y := v_1
2348 v.reset(OpMIPS64AND)
2349 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2350 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2351 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2352 v2.AuxInt = int64ToAuxInt(64)
2353 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2354 v3.AddArg(y)
2355 v1.AddArg2(v2, v3)
2356 v0.AddArg(v1)
2357 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2358 v4.AddArg2(x, v3)
2359 v.AddArg2(v0, v4)
2360 return true
2361 }
2362 }
2363 func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
2364 v_1 := v.Args[1]
2365 v_0 := v.Args[0]
2366 b := v.Block
2367 typ := &b.Func.Config.Types
2368
2369
2370 for {
2371 t := v.Type
2372 x := v_0
2373 y := v_1
2374 v.reset(OpMIPS64AND)
2375 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2376 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2377 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2378 v2.AuxInt = int64ToAuxInt(64)
2379 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2380 v3.AddArg(y)
2381 v1.AddArg2(v2, v3)
2382 v0.AddArg(v1)
2383 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2384 v4.AddArg2(x, v3)
2385 v.AddArg2(v0, v4)
2386 return true
2387 }
2388 }
2389 func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
2390 v_1 := v.Args[1]
2391 v_0 := v.Args[0]
2392 b := v.Block
2393 typ := &b.Func.Config.Types
2394
2395
2396 for {
2397 t := v.Type
2398 x := v_0
2399 y := v_1
2400 v.reset(OpMIPS64AND)
2401 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2402 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2403 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2404 v2.AuxInt = int64ToAuxInt(64)
2405 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2406 v3.AddArg(y)
2407 v1.AddArg2(v2, v3)
2408 v0.AddArg(v1)
2409 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2410 v4.AddArg2(x, v3)
2411 v.AddArg2(v0, v4)
2412 return true
2413 }
2414 }
2415 func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
2416 v_1 := v.Args[1]
2417 v_0 := v.Args[0]
2418 b := v.Block
2419 typ := &b.Func.Config.Types
2420
2421
2422 for {
2423 t := v.Type
2424 x := v_0
2425 y := v_1
2426 v.reset(OpMIPS64AND)
2427 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2428 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2429 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2430 v2.AuxInt = int64ToAuxInt(64)
2431 v1.AddArg2(v2, y)
2432 v0.AddArg(v1)
2433 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2434 v3.AddArg2(x, y)
2435 v.AddArg2(v0, v3)
2436 return true
2437 }
2438 }
2439 func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
2440 v_1 := v.Args[1]
2441 v_0 := v.Args[0]
2442 b := v.Block
2443 typ := &b.Func.Config.Types
2444
2445
2446 for {
2447 t := v.Type
2448 x := v_0
2449 y := v_1
2450 v.reset(OpMIPS64AND)
2451 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2452 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2453 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2454 v2.AuxInt = int64ToAuxInt(64)
2455 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2456 v3.AddArg(y)
2457 v1.AddArg2(v2, v3)
2458 v0.AddArg(v1)
2459 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2460 v4.AddArg2(x, v3)
2461 v.AddArg2(v0, v4)
2462 return true
2463 }
2464 }
2465 func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
2466 v_1 := v.Args[1]
2467 v_0 := v.Args[0]
2468
2469
2470
2471 for {
2472 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2473 x := v_0
2474 if v_1.Op != OpMIPS64MOVVconst {
2475 continue
2476 }
2477 t := v_1.Type
2478 c := auxIntToInt64(v_1.AuxInt)
2479 if !(is32Bit(c) && !t.IsPtr()) {
2480 continue
2481 }
2482 v.reset(OpMIPS64ADDVconst)
2483 v.AuxInt = int64ToAuxInt(c)
2484 v.AddArg(x)
2485 return true
2486 }
2487 break
2488 }
2489
2490
2491 for {
2492 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2493 x := v_0
2494 if v_1.Op != OpMIPS64NEGV {
2495 continue
2496 }
2497 y := v_1.Args[0]
2498 v.reset(OpMIPS64SUBV)
2499 v.AddArg2(x, y)
2500 return true
2501 }
2502 break
2503 }
2504 return false
2505 }
2506 func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
2507 v_0 := v.Args[0]
2508
2509
2510
2511 for {
2512 off1 := auxIntToInt64(v.AuxInt)
2513 if v_0.Op != OpMIPS64MOVVaddr {
2514 break
2515 }
2516 off2 := auxIntToInt32(v_0.AuxInt)
2517 sym := auxToSym(v_0.Aux)
2518 ptr := v_0.Args[0]
2519 if !(is32Bit(off1 + int64(off2))) {
2520 break
2521 }
2522 v.reset(OpMIPS64MOVVaddr)
2523 v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
2524 v.Aux = symToAux(sym)
2525 v.AddArg(ptr)
2526 return true
2527 }
2528
2529
2530 for {
2531 if auxIntToInt64(v.AuxInt) != 0 {
2532 break
2533 }
2534 x := v_0
2535 v.copyOf(x)
2536 return true
2537 }
2538
2539
2540 for {
2541 c := auxIntToInt64(v.AuxInt)
2542 if v_0.Op != OpMIPS64MOVVconst {
2543 break
2544 }
2545 d := auxIntToInt64(v_0.AuxInt)
2546 v.reset(OpMIPS64MOVVconst)
2547 v.AuxInt = int64ToAuxInt(c + d)
2548 return true
2549 }
2550
2551
2552
2553 for {
2554 c := auxIntToInt64(v.AuxInt)
2555 if v_0.Op != OpMIPS64ADDVconst {
2556 break
2557 }
2558 d := auxIntToInt64(v_0.AuxInt)
2559 x := v_0.Args[0]
2560 if !(is32Bit(c + d)) {
2561 break
2562 }
2563 v.reset(OpMIPS64ADDVconst)
2564 v.AuxInt = int64ToAuxInt(c + d)
2565 v.AddArg(x)
2566 return true
2567 }
2568
2569
2570
2571 for {
2572 c := auxIntToInt64(v.AuxInt)
2573 if v_0.Op != OpMIPS64SUBVconst {
2574 break
2575 }
2576 d := auxIntToInt64(v_0.AuxInt)
2577 x := v_0.Args[0]
2578 if !(is32Bit(c - d)) {
2579 break
2580 }
2581 v.reset(OpMIPS64ADDVconst)
2582 v.AuxInt = int64ToAuxInt(c - d)
2583 v.AddArg(x)
2584 return true
2585 }
2586 return false
2587 }
2588 func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
2589 v_1 := v.Args[1]
2590 v_0 := v.Args[0]
2591
2592
2593
2594 for {
2595 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2596 x := v_0
2597 if v_1.Op != OpMIPS64MOVVconst {
2598 continue
2599 }
2600 c := auxIntToInt64(v_1.AuxInt)
2601 if !(is32Bit(c)) {
2602 continue
2603 }
2604 v.reset(OpMIPS64ANDconst)
2605 v.AuxInt = int64ToAuxInt(c)
2606 v.AddArg(x)
2607 return true
2608 }
2609 break
2610 }
2611
2612
2613 for {
2614 x := v_0
2615 if x != v_1 {
2616 break
2617 }
2618 v.copyOf(x)
2619 return true
2620 }
2621 return false
2622 }
2623 func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
2624 v_0 := v.Args[0]
2625
2626
2627 for {
2628 if auxIntToInt64(v.AuxInt) != 0 {
2629 break
2630 }
2631 v.reset(OpMIPS64MOVVconst)
2632 v.AuxInt = int64ToAuxInt(0)
2633 return true
2634 }
2635
2636
2637 for {
2638 if auxIntToInt64(v.AuxInt) != -1 {
2639 break
2640 }
2641 x := v_0
2642 v.copyOf(x)
2643 return true
2644 }
2645
2646
2647 for {
2648 c := auxIntToInt64(v.AuxInt)
2649 if v_0.Op != OpMIPS64MOVVconst {
2650 break
2651 }
2652 d := auxIntToInt64(v_0.AuxInt)
2653 v.reset(OpMIPS64MOVVconst)
2654 v.AuxInt = int64ToAuxInt(c & d)
2655 return true
2656 }
2657
2658
2659 for {
2660 c := auxIntToInt64(v.AuxInt)
2661 if v_0.Op != OpMIPS64ANDconst {
2662 break
2663 }
2664 d := auxIntToInt64(v_0.AuxInt)
2665 x := v_0.Args[0]
2666 v.reset(OpMIPS64ANDconst)
2667 v.AuxInt = int64ToAuxInt(c & d)
2668 v.AddArg(x)
2669 return true
2670 }
2671 return false
2672 }
2673 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v *Value) bool {
2674 v_2 := v.Args[2]
2675 v_1 := v.Args[1]
2676 v_0 := v.Args[0]
2677
2678
2679
2680 for {
2681 ptr := v_0
2682 if v_1.Op != OpMIPS64MOVVconst {
2683 break
2684 }
2685 c := auxIntToInt64(v_1.AuxInt)
2686 mem := v_2
2687 if !(is32Bit(c)) {
2688 break
2689 }
2690 v.reset(OpMIPS64LoweredAtomicAddconst32)
2691 v.AuxInt = int32ToAuxInt(int32(c))
2692 v.AddArg2(ptr, mem)
2693 return true
2694 }
2695 return false
2696 }
2697 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v *Value) bool {
2698 v_2 := v.Args[2]
2699 v_1 := v.Args[1]
2700 v_0 := v.Args[0]
2701
2702
2703
2704 for {
2705 ptr := v_0
2706 if v_1.Op != OpMIPS64MOVVconst {
2707 break
2708 }
2709 c := auxIntToInt64(v_1.AuxInt)
2710 mem := v_2
2711 if !(is32Bit(c)) {
2712 break
2713 }
2714 v.reset(OpMIPS64LoweredAtomicAddconst64)
2715 v.AuxInt = int64ToAuxInt(c)
2716 v.AddArg2(ptr, mem)
2717 return true
2718 }
2719 return false
2720 }
2721 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v *Value) bool {
2722 v_2 := v.Args[2]
2723 v_1 := v.Args[1]
2724 v_0 := v.Args[0]
2725
2726
2727 for {
2728 ptr := v_0
2729 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2730 break
2731 }
2732 mem := v_2
2733 v.reset(OpMIPS64LoweredAtomicStorezero32)
2734 v.AddArg2(ptr, mem)
2735 return true
2736 }
2737 return false
2738 }
2739 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v *Value) bool {
2740 v_2 := v.Args[2]
2741 v_1 := v.Args[1]
2742 v_0 := v.Args[0]
2743
2744
2745 for {
2746 ptr := v_0
2747 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2748 break
2749 }
2750 mem := v_2
2751 v.reset(OpMIPS64LoweredAtomicStorezero64)
2752 v.AddArg2(ptr, mem)
2753 return true
2754 }
2755 return false
2756 }
2757 func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
2758 v_1 := v.Args[1]
2759 v_0 := v.Args[0]
2760 b := v.Block
2761 config := b.Func.Config
2762
2763
2764
2765 for {
2766 off1 := auxIntToInt32(v.AuxInt)
2767 sym := auxToSym(v.Aux)
2768 if v_0.Op != OpMIPS64ADDVconst {
2769 break
2770 }
2771 off2 := auxIntToInt64(v_0.AuxInt)
2772 ptr := v_0.Args[0]
2773 mem := v_1
2774 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2775 break
2776 }
2777 v.reset(OpMIPS64MOVBUload)
2778 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2779 v.Aux = symToAux(sym)
2780 v.AddArg2(ptr, mem)
2781 return true
2782 }
2783
2784
2785
2786 for {
2787 off1 := auxIntToInt32(v.AuxInt)
2788 sym1 := auxToSym(v.Aux)
2789 if v_0.Op != OpMIPS64MOVVaddr {
2790 break
2791 }
2792 off2 := auxIntToInt32(v_0.AuxInt)
2793 sym2 := auxToSym(v_0.Aux)
2794 ptr := v_0.Args[0]
2795 mem := v_1
2796 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2797 break
2798 }
2799 v.reset(OpMIPS64MOVBUload)
2800 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2801 v.Aux = symToAux(mergeSym(sym1, sym2))
2802 v.AddArg2(ptr, mem)
2803 return true
2804 }
2805
2806
2807
2808 for {
2809 off := auxIntToInt32(v.AuxInt)
2810 sym := auxToSym(v.Aux)
2811 if v_0.Op != OpSB || !(symIsRO(sym)) {
2812 break
2813 }
2814 v.reset(OpMIPS64MOVVconst)
2815 v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
2816 return true
2817 }
2818 return false
2819 }
2820 func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
2821 v_0 := v.Args[0]
2822
2823
2824 for {
2825 x := v_0
2826 if x.Op != OpMIPS64MOVBUload {
2827 break
2828 }
2829 v.reset(OpMIPS64MOVVreg)
2830 v.AddArg(x)
2831 return true
2832 }
2833
2834
2835 for {
2836 x := v_0
2837 if x.Op != OpMIPS64MOVBUreg {
2838 break
2839 }
2840 v.reset(OpMIPS64MOVVreg)
2841 v.AddArg(x)
2842 return true
2843 }
2844
2845
2846 for {
2847 if v_0.Op != OpMIPS64MOVVconst {
2848 break
2849 }
2850 c := auxIntToInt64(v_0.AuxInt)
2851 v.reset(OpMIPS64MOVVconst)
2852 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
2853 return true
2854 }
2855 return false
2856 }
2857 func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
2858 v_1 := v.Args[1]
2859 v_0 := v.Args[0]
2860 b := v.Block
2861 config := b.Func.Config
2862
2863
2864
2865 for {
2866 off1 := auxIntToInt32(v.AuxInt)
2867 sym := auxToSym(v.Aux)
2868 if v_0.Op != OpMIPS64ADDVconst {
2869 break
2870 }
2871 off2 := auxIntToInt64(v_0.AuxInt)
2872 ptr := v_0.Args[0]
2873 mem := v_1
2874 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2875 break
2876 }
2877 v.reset(OpMIPS64MOVBload)
2878 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2879 v.Aux = symToAux(sym)
2880 v.AddArg2(ptr, mem)
2881 return true
2882 }
2883
2884
2885
2886 for {
2887 off1 := auxIntToInt32(v.AuxInt)
2888 sym1 := auxToSym(v.Aux)
2889 if v_0.Op != OpMIPS64MOVVaddr {
2890 break
2891 }
2892 off2 := auxIntToInt32(v_0.AuxInt)
2893 sym2 := auxToSym(v_0.Aux)
2894 ptr := v_0.Args[0]
2895 mem := v_1
2896 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2897 break
2898 }
2899 v.reset(OpMIPS64MOVBload)
2900 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2901 v.Aux = symToAux(mergeSym(sym1, sym2))
2902 v.AddArg2(ptr, mem)
2903 return true
2904 }
2905
2906
2907
2908 for {
2909 off := auxIntToInt32(v.AuxInt)
2910 sym := auxToSym(v.Aux)
2911 if v_0.Op != OpSB || !(symIsRO(sym)) {
2912 break
2913 }
2914 v.reset(OpMIPS64MOVVconst)
2915 v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
2916 return true
2917 }
2918 return false
2919 }
2920 func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
2921 v_0 := v.Args[0]
2922
2923
2924 for {
2925 x := v_0
2926 if x.Op != OpMIPS64MOVBload {
2927 break
2928 }
2929 v.reset(OpMIPS64MOVVreg)
2930 v.AddArg(x)
2931 return true
2932 }
2933
2934
2935 for {
2936 x := v_0
2937 if x.Op != OpMIPS64MOVBreg {
2938 break
2939 }
2940 v.reset(OpMIPS64MOVVreg)
2941 v.AddArg(x)
2942 return true
2943 }
2944
2945
2946 for {
2947 if v_0.Op != OpMIPS64MOVVconst {
2948 break
2949 }
2950 c := auxIntToInt64(v_0.AuxInt)
2951 v.reset(OpMIPS64MOVVconst)
2952 v.AuxInt = int64ToAuxInt(int64(int8(c)))
2953 return true
2954 }
2955 return false
2956 }
2957 func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
2958 v_2 := v.Args[2]
2959 v_1 := v.Args[1]
2960 v_0 := v.Args[0]
2961 b := v.Block
2962 config := b.Func.Config
2963
2964
2965
2966 for {
2967 off1 := auxIntToInt32(v.AuxInt)
2968 sym := auxToSym(v.Aux)
2969 if v_0.Op != OpMIPS64ADDVconst {
2970 break
2971 }
2972 off2 := auxIntToInt64(v_0.AuxInt)
2973 ptr := v_0.Args[0]
2974 val := v_1
2975 mem := v_2
2976 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2977 break
2978 }
2979 v.reset(OpMIPS64MOVBstore)
2980 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2981 v.Aux = symToAux(sym)
2982 v.AddArg3(ptr, val, mem)
2983 return true
2984 }
2985
2986
2987
2988 for {
2989 off1 := auxIntToInt32(v.AuxInt)
2990 sym1 := auxToSym(v.Aux)
2991 if v_0.Op != OpMIPS64MOVVaddr {
2992 break
2993 }
2994 off2 := auxIntToInt32(v_0.AuxInt)
2995 sym2 := auxToSym(v_0.Aux)
2996 ptr := v_0.Args[0]
2997 val := v_1
2998 mem := v_2
2999 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3000 break
3001 }
3002 v.reset(OpMIPS64MOVBstore)
3003 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3004 v.Aux = symToAux(mergeSym(sym1, sym2))
3005 v.AddArg3(ptr, val, mem)
3006 return true
3007 }
3008
3009
3010 for {
3011 off := auxIntToInt32(v.AuxInt)
3012 sym := auxToSym(v.Aux)
3013 ptr := v_0
3014 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
3015 break
3016 }
3017 mem := v_2
3018 v.reset(OpMIPS64MOVBstorezero)
3019 v.AuxInt = int32ToAuxInt(off)
3020 v.Aux = symToAux(sym)
3021 v.AddArg2(ptr, mem)
3022 return true
3023 }
3024
3025
3026 for {
3027 off := auxIntToInt32(v.AuxInt)
3028 sym := auxToSym(v.Aux)
3029 ptr := v_0
3030 if v_1.Op != OpMIPS64MOVBreg {
3031 break
3032 }
3033 x := v_1.Args[0]
3034 mem := v_2
3035 v.reset(OpMIPS64MOVBstore)
3036 v.AuxInt = int32ToAuxInt(off)
3037 v.Aux = symToAux(sym)
3038 v.AddArg3(ptr, x, mem)
3039 return true
3040 }
3041
3042
3043 for {
3044 off := auxIntToInt32(v.AuxInt)
3045 sym := auxToSym(v.Aux)
3046 ptr := v_0
3047 if v_1.Op != OpMIPS64MOVBUreg {
3048 break
3049 }
3050 x := v_1.Args[0]
3051 mem := v_2
3052 v.reset(OpMIPS64MOVBstore)
3053 v.AuxInt = int32ToAuxInt(off)
3054 v.Aux = symToAux(sym)
3055 v.AddArg3(ptr, x, mem)
3056 return true
3057 }
3058
3059
3060 for {
3061 off := auxIntToInt32(v.AuxInt)
3062 sym := auxToSym(v.Aux)
3063 ptr := v_0
3064 if v_1.Op != OpMIPS64MOVHreg {
3065 break
3066 }
3067 x := v_1.Args[0]
3068 mem := v_2
3069 v.reset(OpMIPS64MOVBstore)
3070 v.AuxInt = int32ToAuxInt(off)
3071 v.Aux = symToAux(sym)
3072 v.AddArg3(ptr, x, mem)
3073 return true
3074 }
3075
3076
3077 for {
3078 off := auxIntToInt32(v.AuxInt)
3079 sym := auxToSym(v.Aux)
3080 ptr := v_0
3081 if v_1.Op != OpMIPS64MOVHUreg {
3082 break
3083 }
3084 x := v_1.Args[0]
3085 mem := v_2
3086 v.reset(OpMIPS64MOVBstore)
3087 v.AuxInt = int32ToAuxInt(off)
3088 v.Aux = symToAux(sym)
3089 v.AddArg3(ptr, x, mem)
3090 return true
3091 }
3092
3093
3094 for {
3095 off := auxIntToInt32(v.AuxInt)
3096 sym := auxToSym(v.Aux)
3097 ptr := v_0
3098 if v_1.Op != OpMIPS64MOVWreg {
3099 break
3100 }
3101 x := v_1.Args[0]
3102 mem := v_2
3103 v.reset(OpMIPS64MOVBstore)
3104 v.AuxInt = int32ToAuxInt(off)
3105 v.Aux = symToAux(sym)
3106 v.AddArg3(ptr, x, mem)
3107 return true
3108 }
3109
3110
3111 for {
3112 off := auxIntToInt32(v.AuxInt)
3113 sym := auxToSym(v.Aux)
3114 ptr := v_0
3115 if v_1.Op != OpMIPS64MOVWUreg {
3116 break
3117 }
3118 x := v_1.Args[0]
3119 mem := v_2
3120 v.reset(OpMIPS64MOVBstore)
3121 v.AuxInt = int32ToAuxInt(off)
3122 v.Aux = symToAux(sym)
3123 v.AddArg3(ptr, x, mem)
3124 return true
3125 }
3126 return false
3127 }
3128 func rewriteValueMIPS64_OpMIPS64MOVBstorezero(v *Value) bool {
3129 v_1 := v.Args[1]
3130 v_0 := v.Args[0]
3131 b := v.Block
3132 config := b.Func.Config
3133
3134
3135
3136 for {
3137 off1 := auxIntToInt32(v.AuxInt)
3138 sym := auxToSym(v.Aux)
3139 if v_0.Op != OpMIPS64ADDVconst {
3140 break
3141 }
3142 off2 := auxIntToInt64(v_0.AuxInt)
3143 ptr := v_0.Args[0]
3144 mem := v_1
3145 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3146 break
3147 }
3148 v.reset(OpMIPS64MOVBstorezero)
3149 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3150 v.Aux = symToAux(sym)
3151 v.AddArg2(ptr, mem)
3152 return true
3153 }
3154
3155
3156
3157 for {
3158 off1 := auxIntToInt32(v.AuxInt)
3159 sym1 := auxToSym(v.Aux)
3160 if v_0.Op != OpMIPS64MOVVaddr {
3161 break
3162 }
3163 off2 := auxIntToInt32(v_0.AuxInt)
3164 sym2 := auxToSym(v_0.Aux)
3165 ptr := v_0.Args[0]
3166 mem := v_1
3167 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3168 break
3169 }
3170 v.reset(OpMIPS64MOVBstorezero)
3171 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3172 v.Aux = symToAux(mergeSym(sym1, sym2))
3173 v.AddArg2(ptr, mem)
3174 return true
3175 }
3176 return false
3177 }
3178 func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
3179 v_1 := v.Args[1]
3180 v_0 := v.Args[0]
3181 b := v.Block
3182 config := b.Func.Config
3183
3184
3185 for {
3186 off := auxIntToInt32(v.AuxInt)
3187 sym := auxToSym(v.Aux)
3188 ptr := v_0
3189 if v_1.Op != OpMIPS64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3190 break
3191 }
3192 val := v_1.Args[1]
3193 if ptr != v_1.Args[0] {
3194 break
3195 }
3196 v.reset(OpMIPS64MOVVgpfp)
3197 v.AddArg(val)
3198 return true
3199 }
3200
3201
3202
3203 for {
3204 off1 := auxIntToInt32(v.AuxInt)
3205 sym := auxToSym(v.Aux)
3206 if v_0.Op != OpMIPS64ADDVconst {
3207 break
3208 }
3209 off2 := auxIntToInt64(v_0.AuxInt)
3210 ptr := v_0.Args[0]
3211 mem := v_1
3212 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3213 break
3214 }
3215 v.reset(OpMIPS64MOVDload)
3216 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3217 v.Aux = symToAux(sym)
3218 v.AddArg2(ptr, mem)
3219 return true
3220 }
3221
3222
3223
3224 for {
3225 off1 := auxIntToInt32(v.AuxInt)
3226 sym1 := auxToSym(v.Aux)
3227 if v_0.Op != OpMIPS64MOVVaddr {
3228 break
3229 }
3230 off2 := auxIntToInt32(v_0.AuxInt)
3231 sym2 := auxToSym(v_0.Aux)
3232 ptr := v_0.Args[0]
3233 mem := v_1
3234 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3235 break
3236 }
3237 v.reset(OpMIPS64MOVDload)
3238 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3239 v.Aux = symToAux(mergeSym(sym1, sym2))
3240 v.AddArg2(ptr, mem)
3241 return true
3242 }
3243 return false
3244 }
3245 func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
3246 v_2 := v.Args[2]
3247 v_1 := v.Args[1]
3248 v_0 := v.Args[0]
3249 b := v.Block
3250 config := b.Func.Config
3251
3252
3253 for {
3254 off := auxIntToInt32(v.AuxInt)
3255 sym := auxToSym(v.Aux)
3256 ptr := v_0
3257 if v_1.Op != OpMIPS64MOVVgpfp {
3258 break
3259 }
3260 val := v_1.Args[0]
3261 mem := v_2
3262 v.reset(OpMIPS64MOVVstore)
3263 v.AuxInt = int32ToAuxInt(off)
3264 v.Aux = symToAux(sym)
3265 v.AddArg3(ptr, val, mem)
3266 return true
3267 }
3268
3269
3270
3271 for {
3272 off1 := auxIntToInt32(v.AuxInt)
3273 sym := auxToSym(v.Aux)
3274 if v_0.Op != OpMIPS64ADDVconst {
3275 break
3276 }
3277 off2 := auxIntToInt64(v_0.AuxInt)
3278 ptr := v_0.Args[0]
3279 val := v_1
3280 mem := v_2
3281 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3282 break
3283 }
3284 v.reset(OpMIPS64MOVDstore)
3285 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3286 v.Aux = symToAux(sym)
3287 v.AddArg3(ptr, val, mem)
3288 return true
3289 }
3290
3291
3292
3293 for {
3294 off1 := auxIntToInt32(v.AuxInt)
3295 sym1 := auxToSym(v.Aux)
3296 if v_0.Op != OpMIPS64MOVVaddr {
3297 break
3298 }
3299 off2 := auxIntToInt32(v_0.AuxInt)
3300 sym2 := auxToSym(v_0.Aux)
3301 ptr := v_0.Args[0]
3302 val := v_1
3303 mem := v_2
3304 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3305 break
3306 }
3307 v.reset(OpMIPS64MOVDstore)
3308 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3309 v.Aux = symToAux(mergeSym(sym1, sym2))
3310 v.AddArg3(ptr, val, mem)
3311 return true
3312 }
3313 return false
3314 }
3315 func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
3316 v_1 := v.Args[1]
3317 v_0 := v.Args[0]
3318 b := v.Block
3319 config := b.Func.Config
3320
3321
3322 for {
3323 off := auxIntToInt32(v.AuxInt)
3324 sym := auxToSym(v.Aux)
3325 ptr := v_0
3326 if v_1.Op != OpMIPS64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3327 break
3328 }
3329 val := v_1.Args[1]
3330 if ptr != v_1.Args[0] {
3331 break
3332 }
3333 v.reset(OpMIPS64MOVWgpfp)
3334 v.AddArg(val)
3335 return true
3336 }
3337
3338
3339
3340 for {
3341 off1 := auxIntToInt32(v.AuxInt)
3342 sym := auxToSym(v.Aux)
3343 if v_0.Op != OpMIPS64ADDVconst {
3344 break
3345 }
3346 off2 := auxIntToInt64(v_0.AuxInt)
3347 ptr := v_0.Args[0]
3348 mem := v_1
3349 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3350 break
3351 }
3352 v.reset(OpMIPS64MOVFload)
3353 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3354 v.Aux = symToAux(sym)
3355 v.AddArg2(ptr, mem)
3356 return true
3357 }
3358
3359
3360
3361 for {
3362 off1 := auxIntToInt32(v.AuxInt)
3363 sym1 := auxToSym(v.Aux)
3364 if v_0.Op != OpMIPS64MOVVaddr {
3365 break
3366 }
3367 off2 := auxIntToInt32(v_0.AuxInt)
3368 sym2 := auxToSym(v_0.Aux)
3369 ptr := v_0.Args[0]
3370 mem := v_1
3371 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3372 break
3373 }
3374 v.reset(OpMIPS64MOVFload)
3375 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3376 v.Aux = symToAux(mergeSym(sym1, sym2))
3377 v.AddArg2(ptr, mem)
3378 return true
3379 }
3380 return false
3381 }
3382 func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
3383 v_2 := v.Args[2]
3384 v_1 := v.Args[1]
3385 v_0 := v.Args[0]
3386 b := v.Block
3387 config := b.Func.Config
3388
3389
3390 for {
3391 off := auxIntToInt32(v.AuxInt)
3392 sym := auxToSym(v.Aux)
3393 ptr := v_0
3394 if v_1.Op != OpMIPS64MOVWgpfp {
3395 break
3396 }
3397 val := v_1.Args[0]
3398 mem := v_2
3399 v.reset(OpMIPS64MOVWstore)
3400 v.AuxInt = int32ToAuxInt(off)
3401 v.Aux = symToAux(sym)
3402 v.AddArg3(ptr, val, mem)
3403 return true
3404 }
3405
3406
3407
3408 for {
3409 off1 := auxIntToInt32(v.AuxInt)
3410 sym := auxToSym(v.Aux)
3411 if v_0.Op != OpMIPS64ADDVconst {
3412 break
3413 }
3414 off2 := auxIntToInt64(v_0.AuxInt)
3415 ptr := v_0.Args[0]
3416 val := v_1
3417 mem := v_2
3418 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3419 break
3420 }
3421 v.reset(OpMIPS64MOVFstore)
3422 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3423 v.Aux = symToAux(sym)
3424 v.AddArg3(ptr, val, mem)
3425 return true
3426 }
3427
3428
3429
3430 for {
3431 off1 := auxIntToInt32(v.AuxInt)
3432 sym1 := auxToSym(v.Aux)
3433 if v_0.Op != OpMIPS64MOVVaddr {
3434 break
3435 }
3436 off2 := auxIntToInt32(v_0.AuxInt)
3437 sym2 := auxToSym(v_0.Aux)
3438 ptr := v_0.Args[0]
3439 val := v_1
3440 mem := v_2
3441 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3442 break
3443 }
3444 v.reset(OpMIPS64MOVFstore)
3445 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3446 v.Aux = symToAux(mergeSym(sym1, sym2))
3447 v.AddArg3(ptr, val, mem)
3448 return true
3449 }
3450 return false
3451 }
3452 func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
3453 v_1 := v.Args[1]
3454 v_0 := v.Args[0]
3455 b := v.Block
3456 config := b.Func.Config
3457
3458
3459
3460 for {
3461 off1 := auxIntToInt32(v.AuxInt)
3462 sym := auxToSym(v.Aux)
3463 if v_0.Op != OpMIPS64ADDVconst {
3464 break
3465 }
3466 off2 := auxIntToInt64(v_0.AuxInt)
3467 ptr := v_0.Args[0]
3468 mem := v_1
3469 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3470 break
3471 }
3472 v.reset(OpMIPS64MOVHUload)
3473 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3474 v.Aux = symToAux(sym)
3475 v.AddArg2(ptr, mem)
3476 return true
3477 }
3478
3479
3480
3481 for {
3482 off1 := auxIntToInt32(v.AuxInt)
3483 sym1 := auxToSym(v.Aux)
3484 if v_0.Op != OpMIPS64MOVVaddr {
3485 break
3486 }
3487 off2 := auxIntToInt32(v_0.AuxInt)
3488 sym2 := auxToSym(v_0.Aux)
3489 ptr := v_0.Args[0]
3490 mem := v_1
3491 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3492 break
3493 }
3494 v.reset(OpMIPS64MOVHUload)
3495 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3496 v.Aux = symToAux(mergeSym(sym1, sym2))
3497 v.AddArg2(ptr, mem)
3498 return true
3499 }
3500
3501
3502
3503 for {
3504 off := auxIntToInt32(v.AuxInt)
3505 sym := auxToSym(v.Aux)
3506 if v_0.Op != OpSB || !(symIsRO(sym)) {
3507 break
3508 }
3509 v.reset(OpMIPS64MOVVconst)
3510 v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
3511 return true
3512 }
3513 return false
3514 }
3515 func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
3516 v_0 := v.Args[0]
3517
3518
3519 for {
3520 x := v_0
3521 if x.Op != OpMIPS64MOVBUload {
3522 break
3523 }
3524 v.reset(OpMIPS64MOVVreg)
3525 v.AddArg(x)
3526 return true
3527 }
3528
3529
3530 for {
3531 x := v_0
3532 if x.Op != OpMIPS64MOVHUload {
3533 break
3534 }
3535 v.reset(OpMIPS64MOVVreg)
3536 v.AddArg(x)
3537 return true
3538 }
3539
3540
3541 for {
3542 x := v_0
3543 if x.Op != OpMIPS64MOVBUreg {
3544 break
3545 }
3546 v.reset(OpMIPS64MOVVreg)
3547 v.AddArg(x)
3548 return true
3549 }
3550
3551
3552 for {
3553 x := v_0
3554 if x.Op != OpMIPS64MOVHUreg {
3555 break
3556 }
3557 v.reset(OpMIPS64MOVVreg)
3558 v.AddArg(x)
3559 return true
3560 }
3561
3562
3563 for {
3564 if v_0.Op != OpMIPS64MOVVconst {
3565 break
3566 }
3567 c := auxIntToInt64(v_0.AuxInt)
3568 v.reset(OpMIPS64MOVVconst)
3569 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
3570 return true
3571 }
3572 return false
3573 }
3574 func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
3575 v_1 := v.Args[1]
3576 v_0 := v.Args[0]
3577 b := v.Block
3578 config := b.Func.Config
3579
3580
3581
3582 for {
3583 off1 := auxIntToInt32(v.AuxInt)
3584 sym := auxToSym(v.Aux)
3585 if v_0.Op != OpMIPS64ADDVconst {
3586 break
3587 }
3588 off2 := auxIntToInt64(v_0.AuxInt)
3589 ptr := v_0.Args[0]
3590 mem := v_1
3591 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3592 break
3593 }
3594 v.reset(OpMIPS64MOVHload)
3595 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3596 v.Aux = symToAux(sym)
3597 v.AddArg2(ptr, mem)
3598 return true
3599 }
3600
3601
3602
3603 for {
3604 off1 := auxIntToInt32(v.AuxInt)
3605 sym1 := auxToSym(v.Aux)
3606 if v_0.Op != OpMIPS64MOVVaddr {
3607 break
3608 }
3609 off2 := auxIntToInt32(v_0.AuxInt)
3610 sym2 := auxToSym(v_0.Aux)
3611 ptr := v_0.Args[0]
3612 mem := v_1
3613 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3614 break
3615 }
3616 v.reset(OpMIPS64MOVHload)
3617 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3618 v.Aux = symToAux(mergeSym(sym1, sym2))
3619 v.AddArg2(ptr, mem)
3620 return true
3621 }
3622
3623
3624
3625 for {
3626 off := auxIntToInt32(v.AuxInt)
3627 sym := auxToSym(v.Aux)
3628 if v_0.Op != OpSB || !(symIsRO(sym)) {
3629 break
3630 }
3631 v.reset(OpMIPS64MOVVconst)
3632 v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
3633 return true
3634 }
3635 return false
3636 }
3637 func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
3638 v_0 := v.Args[0]
3639
3640
3641 for {
3642 x := v_0
3643 if x.Op != OpMIPS64MOVBload {
3644 break
3645 }
3646 v.reset(OpMIPS64MOVVreg)
3647 v.AddArg(x)
3648 return true
3649 }
3650
3651
3652 for {
3653 x := v_0
3654 if x.Op != OpMIPS64MOVBUload {
3655 break
3656 }
3657 v.reset(OpMIPS64MOVVreg)
3658 v.AddArg(x)
3659 return true
3660 }
3661
3662
3663 for {
3664 x := v_0
3665 if x.Op != OpMIPS64MOVHload {
3666 break
3667 }
3668 v.reset(OpMIPS64MOVVreg)
3669 v.AddArg(x)
3670 return true
3671 }
3672
3673
3674 for {
3675 x := v_0
3676 if x.Op != OpMIPS64MOVBreg {
3677 break
3678 }
3679 v.reset(OpMIPS64MOVVreg)
3680 v.AddArg(x)
3681 return true
3682 }
3683
3684
3685 for {
3686 x := v_0
3687 if x.Op != OpMIPS64MOVBUreg {
3688 break
3689 }
3690 v.reset(OpMIPS64MOVVreg)
3691 v.AddArg(x)
3692 return true
3693 }
3694
3695
3696 for {
3697 x := v_0
3698 if x.Op != OpMIPS64MOVHreg {
3699 break
3700 }
3701 v.reset(OpMIPS64MOVVreg)
3702 v.AddArg(x)
3703 return true
3704 }
3705
3706
3707 for {
3708 if v_0.Op != OpMIPS64MOVVconst {
3709 break
3710 }
3711 c := auxIntToInt64(v_0.AuxInt)
3712 v.reset(OpMIPS64MOVVconst)
3713 v.AuxInt = int64ToAuxInt(int64(int16(c)))
3714 return true
3715 }
3716 return false
3717 }
3718 func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
3719 v_2 := v.Args[2]
3720 v_1 := v.Args[1]
3721 v_0 := v.Args[0]
3722 b := v.Block
3723 config := b.Func.Config
3724
3725
3726
3727 for {
3728 off1 := auxIntToInt32(v.AuxInt)
3729 sym := auxToSym(v.Aux)
3730 if v_0.Op != OpMIPS64ADDVconst {
3731 break
3732 }
3733 off2 := auxIntToInt64(v_0.AuxInt)
3734 ptr := v_0.Args[0]
3735 val := v_1
3736 mem := v_2
3737 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3738 break
3739 }
3740 v.reset(OpMIPS64MOVHstore)
3741 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3742 v.Aux = symToAux(sym)
3743 v.AddArg3(ptr, val, mem)
3744 return true
3745 }
3746
3747
3748
3749 for {
3750 off1 := auxIntToInt32(v.AuxInt)
3751 sym1 := auxToSym(v.Aux)
3752 if v_0.Op != OpMIPS64MOVVaddr {
3753 break
3754 }
3755 off2 := auxIntToInt32(v_0.AuxInt)
3756 sym2 := auxToSym(v_0.Aux)
3757 ptr := v_0.Args[0]
3758 val := v_1
3759 mem := v_2
3760 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3761 break
3762 }
3763 v.reset(OpMIPS64MOVHstore)
3764 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3765 v.Aux = symToAux(mergeSym(sym1, sym2))
3766 v.AddArg3(ptr, val, mem)
3767 return true
3768 }
3769
3770
3771 for {
3772 off := auxIntToInt32(v.AuxInt)
3773 sym := auxToSym(v.Aux)
3774 ptr := v_0
3775 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
3776 break
3777 }
3778 mem := v_2
3779 v.reset(OpMIPS64MOVHstorezero)
3780 v.AuxInt = int32ToAuxInt(off)
3781 v.Aux = symToAux(sym)
3782 v.AddArg2(ptr, mem)
3783 return true
3784 }
3785
3786
3787 for {
3788 off := auxIntToInt32(v.AuxInt)
3789 sym := auxToSym(v.Aux)
3790 ptr := v_0
3791 if v_1.Op != OpMIPS64MOVHreg {
3792 break
3793 }
3794 x := v_1.Args[0]
3795 mem := v_2
3796 v.reset(OpMIPS64MOVHstore)
3797 v.AuxInt = int32ToAuxInt(off)
3798 v.Aux = symToAux(sym)
3799 v.AddArg3(ptr, x, mem)
3800 return true
3801 }
3802
3803
3804 for {
3805 off := auxIntToInt32(v.AuxInt)
3806 sym := auxToSym(v.Aux)
3807 ptr := v_0
3808 if v_1.Op != OpMIPS64MOVHUreg {
3809 break
3810 }
3811 x := v_1.Args[0]
3812 mem := v_2
3813 v.reset(OpMIPS64MOVHstore)
3814 v.AuxInt = int32ToAuxInt(off)
3815 v.Aux = symToAux(sym)
3816 v.AddArg3(ptr, x, mem)
3817 return true
3818 }
3819
3820
3821 for {
3822 off := auxIntToInt32(v.AuxInt)
3823 sym := auxToSym(v.Aux)
3824 ptr := v_0
3825 if v_1.Op != OpMIPS64MOVWreg {
3826 break
3827 }
3828 x := v_1.Args[0]
3829 mem := v_2
3830 v.reset(OpMIPS64MOVHstore)
3831 v.AuxInt = int32ToAuxInt(off)
3832 v.Aux = symToAux(sym)
3833 v.AddArg3(ptr, x, mem)
3834 return true
3835 }
3836
3837
3838 for {
3839 off := auxIntToInt32(v.AuxInt)
3840 sym := auxToSym(v.Aux)
3841 ptr := v_0
3842 if v_1.Op != OpMIPS64MOVWUreg {
3843 break
3844 }
3845 x := v_1.Args[0]
3846 mem := v_2
3847 v.reset(OpMIPS64MOVHstore)
3848 v.AuxInt = int32ToAuxInt(off)
3849 v.Aux = symToAux(sym)
3850 v.AddArg3(ptr, x, mem)
3851 return true
3852 }
3853 return false
3854 }
3855 func rewriteValueMIPS64_OpMIPS64MOVHstorezero(v *Value) bool {
3856 v_1 := v.Args[1]
3857 v_0 := v.Args[0]
3858 b := v.Block
3859 config := b.Func.Config
3860
3861
3862
3863 for {
3864 off1 := auxIntToInt32(v.AuxInt)
3865 sym := auxToSym(v.Aux)
3866 if v_0.Op != OpMIPS64ADDVconst {
3867 break
3868 }
3869 off2 := auxIntToInt64(v_0.AuxInt)
3870 ptr := v_0.Args[0]
3871 mem := v_1
3872 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3873 break
3874 }
3875 v.reset(OpMIPS64MOVHstorezero)
3876 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3877 v.Aux = symToAux(sym)
3878 v.AddArg2(ptr, mem)
3879 return true
3880 }
3881
3882
3883
3884 for {
3885 off1 := auxIntToInt32(v.AuxInt)
3886 sym1 := auxToSym(v.Aux)
3887 if v_0.Op != OpMIPS64MOVVaddr {
3888 break
3889 }
3890 off2 := auxIntToInt32(v_0.AuxInt)
3891 sym2 := auxToSym(v_0.Aux)
3892 ptr := v_0.Args[0]
3893 mem := v_1
3894 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3895 break
3896 }
3897 v.reset(OpMIPS64MOVHstorezero)
3898 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3899 v.Aux = symToAux(mergeSym(sym1, sym2))
3900 v.AddArg2(ptr, mem)
3901 return true
3902 }
3903 return false
3904 }
3905 func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
3906 v_1 := v.Args[1]
3907 v_0 := v.Args[0]
3908 b := v.Block
3909 config := b.Func.Config
3910
3911
3912 for {
3913 off := auxIntToInt32(v.AuxInt)
3914 sym := auxToSym(v.Aux)
3915 ptr := v_0
3916 if v_1.Op != OpMIPS64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3917 break
3918 }
3919 val := v_1.Args[1]
3920 if ptr != v_1.Args[0] {
3921 break
3922 }
3923 v.reset(OpMIPS64MOVVfpgp)
3924 v.AddArg(val)
3925 return true
3926 }
3927
3928
3929
3930 for {
3931 off1 := auxIntToInt32(v.AuxInt)
3932 sym := auxToSym(v.Aux)
3933 if v_0.Op != OpMIPS64ADDVconst {
3934 break
3935 }
3936 off2 := auxIntToInt64(v_0.AuxInt)
3937 ptr := v_0.Args[0]
3938 mem := v_1
3939 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3940 break
3941 }
3942 v.reset(OpMIPS64MOVVload)
3943 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3944 v.Aux = symToAux(sym)
3945 v.AddArg2(ptr, mem)
3946 return true
3947 }
3948
3949
3950
3951 for {
3952 off1 := auxIntToInt32(v.AuxInt)
3953 sym1 := auxToSym(v.Aux)
3954 if v_0.Op != OpMIPS64MOVVaddr {
3955 break
3956 }
3957 off2 := auxIntToInt32(v_0.AuxInt)
3958 sym2 := auxToSym(v_0.Aux)
3959 ptr := v_0.Args[0]
3960 mem := v_1
3961 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3962 break
3963 }
3964 v.reset(OpMIPS64MOVVload)
3965 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3966 v.Aux = symToAux(mergeSym(sym1, sym2))
3967 v.AddArg2(ptr, mem)
3968 return true
3969 }
3970
3971
3972
3973 for {
3974 off := auxIntToInt32(v.AuxInt)
3975 sym := auxToSym(v.Aux)
3976 if v_0.Op != OpSB || !(symIsRO(sym)) {
3977 break
3978 }
3979 v.reset(OpMIPS64MOVVconst)
3980 v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
3981 return true
3982 }
3983 return false
3984 }
3985 func rewriteValueMIPS64_OpMIPS64MOVVnop(v *Value) bool {
3986 v_0 := v.Args[0]
3987
3988
3989 for {
3990 if v_0.Op != OpMIPS64MOVVconst {
3991 break
3992 }
3993 c := auxIntToInt64(v_0.AuxInt)
3994 v.reset(OpMIPS64MOVVconst)
3995 v.AuxInt = int64ToAuxInt(c)
3996 return true
3997 }
3998 return false
3999 }
4000 func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
4001 v_0 := v.Args[0]
4002
4003
4004
4005 for {
4006 x := v_0
4007 if !(x.Uses == 1) {
4008 break
4009 }
4010 v.reset(OpMIPS64MOVVnop)
4011 v.AddArg(x)
4012 return true
4013 }
4014
4015
4016 for {
4017 if v_0.Op != OpMIPS64MOVVconst {
4018 break
4019 }
4020 c := auxIntToInt64(v_0.AuxInt)
4021 v.reset(OpMIPS64MOVVconst)
4022 v.AuxInt = int64ToAuxInt(c)
4023 return true
4024 }
4025 return false
4026 }
4027 func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
4028 v_2 := v.Args[2]
4029 v_1 := v.Args[1]
4030 v_0 := v.Args[0]
4031 b := v.Block
4032 config := b.Func.Config
4033
4034
4035 for {
4036 off := auxIntToInt32(v.AuxInt)
4037 sym := auxToSym(v.Aux)
4038 ptr := v_0
4039 if v_1.Op != OpMIPS64MOVVfpgp {
4040 break
4041 }
4042 val := v_1.Args[0]
4043 mem := v_2
4044 v.reset(OpMIPS64MOVDstore)
4045 v.AuxInt = int32ToAuxInt(off)
4046 v.Aux = symToAux(sym)
4047 v.AddArg3(ptr, val, mem)
4048 return true
4049 }
4050
4051
4052
4053 for {
4054 off1 := auxIntToInt32(v.AuxInt)
4055 sym := auxToSym(v.Aux)
4056 if v_0.Op != OpMIPS64ADDVconst {
4057 break
4058 }
4059 off2 := auxIntToInt64(v_0.AuxInt)
4060 ptr := v_0.Args[0]
4061 val := v_1
4062 mem := v_2
4063 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4064 break
4065 }
4066 v.reset(OpMIPS64MOVVstore)
4067 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4068 v.Aux = symToAux(sym)
4069 v.AddArg3(ptr, val, mem)
4070 return true
4071 }
4072
4073
4074
4075 for {
4076 off1 := auxIntToInt32(v.AuxInt)
4077 sym1 := auxToSym(v.Aux)
4078 if v_0.Op != OpMIPS64MOVVaddr {
4079 break
4080 }
4081 off2 := auxIntToInt32(v_0.AuxInt)
4082 sym2 := auxToSym(v_0.Aux)
4083 ptr := v_0.Args[0]
4084 val := v_1
4085 mem := v_2
4086 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4087 break
4088 }
4089 v.reset(OpMIPS64MOVVstore)
4090 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4091 v.Aux = symToAux(mergeSym(sym1, sym2))
4092 v.AddArg3(ptr, val, mem)
4093 return true
4094 }
4095
4096
4097 for {
4098 off := auxIntToInt32(v.AuxInt)
4099 sym := auxToSym(v.Aux)
4100 ptr := v_0
4101 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
4102 break
4103 }
4104 mem := v_2
4105 v.reset(OpMIPS64MOVVstorezero)
4106 v.AuxInt = int32ToAuxInt(off)
4107 v.Aux = symToAux(sym)
4108 v.AddArg2(ptr, mem)
4109 return true
4110 }
4111 return false
4112 }
4113 func rewriteValueMIPS64_OpMIPS64MOVVstorezero(v *Value) bool {
4114 v_1 := v.Args[1]
4115 v_0 := v.Args[0]
4116 b := v.Block
4117 config := b.Func.Config
4118
4119
4120
4121 for {
4122 off1 := auxIntToInt32(v.AuxInt)
4123 sym := auxToSym(v.Aux)
4124 if v_0.Op != OpMIPS64ADDVconst {
4125 break
4126 }
4127 off2 := auxIntToInt64(v_0.AuxInt)
4128 ptr := v_0.Args[0]
4129 mem := v_1
4130 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4131 break
4132 }
4133 v.reset(OpMIPS64MOVVstorezero)
4134 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4135 v.Aux = symToAux(sym)
4136 v.AddArg2(ptr, mem)
4137 return true
4138 }
4139
4140
4141
4142 for {
4143 off1 := auxIntToInt32(v.AuxInt)
4144 sym1 := auxToSym(v.Aux)
4145 if v_0.Op != OpMIPS64MOVVaddr {
4146 break
4147 }
4148 off2 := auxIntToInt32(v_0.AuxInt)
4149 sym2 := auxToSym(v_0.Aux)
4150 ptr := v_0.Args[0]
4151 mem := v_1
4152 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4153 break
4154 }
4155 v.reset(OpMIPS64MOVVstorezero)
4156 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4157 v.Aux = symToAux(mergeSym(sym1, sym2))
4158 v.AddArg2(ptr, mem)
4159 return true
4160 }
4161 return false
4162 }
4163 func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
4164 v_1 := v.Args[1]
4165 v_0 := v.Args[0]
4166 b := v.Block
4167 config := b.Func.Config
4168 typ := &b.Func.Config.Types
4169
4170
4171 for {
4172 off := auxIntToInt32(v.AuxInt)
4173 sym := auxToSym(v.Aux)
4174 ptr := v_0
4175 if v_1.Op != OpMIPS64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
4176 break
4177 }
4178 val := v_1.Args[1]
4179 if ptr != v_1.Args[0] {
4180 break
4181 }
4182 v.reset(OpZeroExt32to64)
4183 v0 := b.NewValue0(v_1.Pos, OpMIPS64MOVWfpgp, typ.Float32)
4184 v0.AddArg(val)
4185 v.AddArg(v0)
4186 return true
4187 }
4188
4189
4190
4191 for {
4192 off1 := auxIntToInt32(v.AuxInt)
4193 sym := auxToSym(v.Aux)
4194 if v_0.Op != OpMIPS64ADDVconst {
4195 break
4196 }
4197 off2 := auxIntToInt64(v_0.AuxInt)
4198 ptr := v_0.Args[0]
4199 mem := v_1
4200 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4201 break
4202 }
4203 v.reset(OpMIPS64MOVWUload)
4204 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4205 v.Aux = symToAux(sym)
4206 v.AddArg2(ptr, mem)
4207 return true
4208 }
4209
4210
4211
4212 for {
4213 off1 := auxIntToInt32(v.AuxInt)
4214 sym1 := auxToSym(v.Aux)
4215 if v_0.Op != OpMIPS64MOVVaddr {
4216 break
4217 }
4218 off2 := auxIntToInt32(v_0.AuxInt)
4219 sym2 := auxToSym(v_0.Aux)
4220 ptr := v_0.Args[0]
4221 mem := v_1
4222 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4223 break
4224 }
4225 v.reset(OpMIPS64MOVWUload)
4226 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4227 v.Aux = symToAux(mergeSym(sym1, sym2))
4228 v.AddArg2(ptr, mem)
4229 return true
4230 }
4231
4232
4233
4234 for {
4235 off := auxIntToInt32(v.AuxInt)
4236 sym := auxToSym(v.Aux)
4237 if v_0.Op != OpSB || !(symIsRO(sym)) {
4238 break
4239 }
4240 v.reset(OpMIPS64MOVVconst)
4241 v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
4242 return true
4243 }
4244 return false
4245 }
4246 func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
4247 v_0 := v.Args[0]
4248
4249
4250 for {
4251 x := v_0
4252 if x.Op != OpMIPS64MOVBUload {
4253 break
4254 }
4255 v.reset(OpMIPS64MOVVreg)
4256 v.AddArg(x)
4257 return true
4258 }
4259
4260
4261 for {
4262 x := v_0
4263 if x.Op != OpMIPS64MOVHUload {
4264 break
4265 }
4266 v.reset(OpMIPS64MOVVreg)
4267 v.AddArg(x)
4268 return true
4269 }
4270
4271
4272 for {
4273 x := v_0
4274 if x.Op != OpMIPS64MOVWUload {
4275 break
4276 }
4277 v.reset(OpMIPS64MOVVreg)
4278 v.AddArg(x)
4279 return true
4280 }
4281
4282
4283 for {
4284 x := v_0
4285 if x.Op != OpMIPS64MOVBUreg {
4286 break
4287 }
4288 v.reset(OpMIPS64MOVVreg)
4289 v.AddArg(x)
4290 return true
4291 }
4292
4293
4294 for {
4295 x := v_0
4296 if x.Op != OpMIPS64MOVHUreg {
4297 break
4298 }
4299 v.reset(OpMIPS64MOVVreg)
4300 v.AddArg(x)
4301 return true
4302 }
4303
4304
4305 for {
4306 x := v_0
4307 if x.Op != OpMIPS64MOVWUreg {
4308 break
4309 }
4310 v.reset(OpMIPS64MOVVreg)
4311 v.AddArg(x)
4312 return true
4313 }
4314
4315
4316 for {
4317 if v_0.Op != OpMIPS64MOVVconst {
4318 break
4319 }
4320 c := auxIntToInt64(v_0.AuxInt)
4321 v.reset(OpMIPS64MOVVconst)
4322 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
4323 return true
4324 }
4325 return false
4326 }
4327 func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
4328 v_1 := v.Args[1]
4329 v_0 := v.Args[0]
4330 b := v.Block
4331 config := b.Func.Config
4332
4333
4334
4335 for {
4336 off1 := auxIntToInt32(v.AuxInt)
4337 sym := auxToSym(v.Aux)
4338 if v_0.Op != OpMIPS64ADDVconst {
4339 break
4340 }
4341 off2 := auxIntToInt64(v_0.AuxInt)
4342 ptr := v_0.Args[0]
4343 mem := v_1
4344 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4345 break
4346 }
4347 v.reset(OpMIPS64MOVWload)
4348 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4349 v.Aux = symToAux(sym)
4350 v.AddArg2(ptr, mem)
4351 return true
4352 }
4353
4354
4355
4356 for {
4357 off1 := auxIntToInt32(v.AuxInt)
4358 sym1 := auxToSym(v.Aux)
4359 if v_0.Op != OpMIPS64MOVVaddr {
4360 break
4361 }
4362 off2 := auxIntToInt32(v_0.AuxInt)
4363 sym2 := auxToSym(v_0.Aux)
4364 ptr := v_0.Args[0]
4365 mem := v_1
4366 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4367 break
4368 }
4369 v.reset(OpMIPS64MOVWload)
4370 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4371 v.Aux = symToAux(mergeSym(sym1, sym2))
4372 v.AddArg2(ptr, mem)
4373 return true
4374 }
4375
4376
4377
4378 for {
4379 off := auxIntToInt32(v.AuxInt)
4380 sym := auxToSym(v.Aux)
4381 if v_0.Op != OpSB || !(symIsRO(sym)) {
4382 break
4383 }
4384 v.reset(OpMIPS64MOVVconst)
4385 v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
4386 return true
4387 }
4388 return false
4389 }
4390 func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
4391 v_0 := v.Args[0]
4392
4393
4394 for {
4395 x := v_0
4396 if x.Op != OpMIPS64MOVBload {
4397 break
4398 }
4399 v.reset(OpMIPS64MOVVreg)
4400 v.AddArg(x)
4401 return true
4402 }
4403
4404
4405 for {
4406 x := v_0
4407 if x.Op != OpMIPS64MOVBUload {
4408 break
4409 }
4410 v.reset(OpMIPS64MOVVreg)
4411 v.AddArg(x)
4412 return true
4413 }
4414
4415
4416 for {
4417 x := v_0
4418 if x.Op != OpMIPS64MOVHload {
4419 break
4420 }
4421 v.reset(OpMIPS64MOVVreg)
4422 v.AddArg(x)
4423 return true
4424 }
4425
4426
4427 for {
4428 x := v_0
4429 if x.Op != OpMIPS64MOVHUload {
4430 break
4431 }
4432 v.reset(OpMIPS64MOVVreg)
4433 v.AddArg(x)
4434 return true
4435 }
4436
4437
4438 for {
4439 x := v_0
4440 if x.Op != OpMIPS64MOVWload {
4441 break
4442 }
4443 v.reset(OpMIPS64MOVVreg)
4444 v.AddArg(x)
4445 return true
4446 }
4447
4448
4449 for {
4450 x := v_0
4451 if x.Op != OpMIPS64MOVBreg {
4452 break
4453 }
4454 v.reset(OpMIPS64MOVVreg)
4455 v.AddArg(x)
4456 return true
4457 }
4458
4459
4460 for {
4461 x := v_0
4462 if x.Op != OpMIPS64MOVBUreg {
4463 break
4464 }
4465 v.reset(OpMIPS64MOVVreg)
4466 v.AddArg(x)
4467 return true
4468 }
4469
4470
4471 for {
4472 x := v_0
4473 if x.Op != OpMIPS64MOVHreg {
4474 break
4475 }
4476 v.reset(OpMIPS64MOVVreg)
4477 v.AddArg(x)
4478 return true
4479 }
4480
4481
4482 for {
4483 x := v_0
4484 if x.Op != OpMIPS64MOVWreg {
4485 break
4486 }
4487 v.reset(OpMIPS64MOVVreg)
4488 v.AddArg(x)
4489 return true
4490 }
4491
4492
4493 for {
4494 if v_0.Op != OpMIPS64MOVVconst {
4495 break
4496 }
4497 c := auxIntToInt64(v_0.AuxInt)
4498 v.reset(OpMIPS64MOVVconst)
4499 v.AuxInt = int64ToAuxInt(int64(int32(c)))
4500 return true
4501 }
4502 return false
4503 }
4504 func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
4505 v_2 := v.Args[2]
4506 v_1 := v.Args[1]
4507 v_0 := v.Args[0]
4508 b := v.Block
4509 config := b.Func.Config
4510
4511
4512 for {
4513 off := auxIntToInt32(v.AuxInt)
4514 sym := auxToSym(v.Aux)
4515 ptr := v_0
4516 if v_1.Op != OpMIPS64MOVWfpgp {
4517 break
4518 }
4519 val := v_1.Args[0]
4520 mem := v_2
4521 v.reset(OpMIPS64MOVFstore)
4522 v.AuxInt = int32ToAuxInt(off)
4523 v.Aux = symToAux(sym)
4524 v.AddArg3(ptr, val, mem)
4525 return true
4526 }
4527
4528
4529
4530 for {
4531 off1 := auxIntToInt32(v.AuxInt)
4532 sym := auxToSym(v.Aux)
4533 if v_0.Op != OpMIPS64ADDVconst {
4534 break
4535 }
4536 off2 := auxIntToInt64(v_0.AuxInt)
4537 ptr := v_0.Args[0]
4538 val := v_1
4539 mem := v_2
4540 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4541 break
4542 }
4543 v.reset(OpMIPS64MOVWstore)
4544 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4545 v.Aux = symToAux(sym)
4546 v.AddArg3(ptr, val, mem)
4547 return true
4548 }
4549
4550
4551
4552 for {
4553 off1 := auxIntToInt32(v.AuxInt)
4554 sym1 := auxToSym(v.Aux)
4555 if v_0.Op != OpMIPS64MOVVaddr {
4556 break
4557 }
4558 off2 := auxIntToInt32(v_0.AuxInt)
4559 sym2 := auxToSym(v_0.Aux)
4560 ptr := v_0.Args[0]
4561 val := v_1
4562 mem := v_2
4563 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4564 break
4565 }
4566 v.reset(OpMIPS64MOVWstore)
4567 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4568 v.Aux = symToAux(mergeSym(sym1, sym2))
4569 v.AddArg3(ptr, val, mem)
4570 return true
4571 }
4572
4573
4574 for {
4575 off := auxIntToInt32(v.AuxInt)
4576 sym := auxToSym(v.Aux)
4577 ptr := v_0
4578 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
4579 break
4580 }
4581 mem := v_2
4582 v.reset(OpMIPS64MOVWstorezero)
4583 v.AuxInt = int32ToAuxInt(off)
4584 v.Aux = symToAux(sym)
4585 v.AddArg2(ptr, mem)
4586 return true
4587 }
4588
4589
4590 for {
4591 off := auxIntToInt32(v.AuxInt)
4592 sym := auxToSym(v.Aux)
4593 ptr := v_0
4594 if v_1.Op != OpMIPS64MOVWreg {
4595 break
4596 }
4597 x := v_1.Args[0]
4598 mem := v_2
4599 v.reset(OpMIPS64MOVWstore)
4600 v.AuxInt = int32ToAuxInt(off)
4601 v.Aux = symToAux(sym)
4602 v.AddArg3(ptr, x, mem)
4603 return true
4604 }
4605
4606
4607 for {
4608 off := auxIntToInt32(v.AuxInt)
4609 sym := auxToSym(v.Aux)
4610 ptr := v_0
4611 if v_1.Op != OpMIPS64MOVWUreg {
4612 break
4613 }
4614 x := v_1.Args[0]
4615 mem := v_2
4616 v.reset(OpMIPS64MOVWstore)
4617 v.AuxInt = int32ToAuxInt(off)
4618 v.Aux = symToAux(sym)
4619 v.AddArg3(ptr, x, mem)
4620 return true
4621 }
4622 return false
4623 }
4624 func rewriteValueMIPS64_OpMIPS64MOVWstorezero(v *Value) bool {
4625 v_1 := v.Args[1]
4626 v_0 := v.Args[0]
4627 b := v.Block
4628 config := b.Func.Config
4629
4630
4631
4632 for {
4633 off1 := auxIntToInt32(v.AuxInt)
4634 sym := auxToSym(v.Aux)
4635 if v_0.Op != OpMIPS64ADDVconst {
4636 break
4637 }
4638 off2 := auxIntToInt64(v_0.AuxInt)
4639 ptr := v_0.Args[0]
4640 mem := v_1
4641 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4642 break
4643 }
4644 v.reset(OpMIPS64MOVWstorezero)
4645 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4646 v.Aux = symToAux(sym)
4647 v.AddArg2(ptr, mem)
4648 return true
4649 }
4650
4651
4652
4653 for {
4654 off1 := auxIntToInt32(v.AuxInt)
4655 sym1 := auxToSym(v.Aux)
4656 if v_0.Op != OpMIPS64MOVVaddr {
4657 break
4658 }
4659 off2 := auxIntToInt32(v_0.AuxInt)
4660 sym2 := auxToSym(v_0.Aux)
4661 ptr := v_0.Args[0]
4662 mem := v_1
4663 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4664 break
4665 }
4666 v.reset(OpMIPS64MOVWstorezero)
4667 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4668 v.Aux = symToAux(mergeSym(sym1, sym2))
4669 v.AddArg2(ptr, mem)
4670 return true
4671 }
4672 return false
4673 }
4674 func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
4675 v_0 := v.Args[0]
4676
4677
4678 for {
4679 if v_0.Op != OpMIPS64MOVVconst {
4680 break
4681 }
4682 c := auxIntToInt64(v_0.AuxInt)
4683 v.reset(OpMIPS64MOVVconst)
4684 v.AuxInt = int64ToAuxInt(-c)
4685 return true
4686 }
4687 return false
4688 }
4689 func rewriteValueMIPS64_OpMIPS64NOR(v *Value) bool {
4690 v_1 := v.Args[1]
4691 v_0 := v.Args[0]
4692
4693
4694
4695 for {
4696 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4697 x := v_0
4698 if v_1.Op != OpMIPS64MOVVconst {
4699 continue
4700 }
4701 c := auxIntToInt64(v_1.AuxInt)
4702 if !(is32Bit(c)) {
4703 continue
4704 }
4705 v.reset(OpMIPS64NORconst)
4706 v.AuxInt = int64ToAuxInt(c)
4707 v.AddArg(x)
4708 return true
4709 }
4710 break
4711 }
4712 return false
4713 }
4714 func rewriteValueMIPS64_OpMIPS64NORconst(v *Value) bool {
4715 v_0 := v.Args[0]
4716
4717
4718 for {
4719 c := auxIntToInt64(v.AuxInt)
4720 if v_0.Op != OpMIPS64MOVVconst {
4721 break
4722 }
4723 d := auxIntToInt64(v_0.AuxInt)
4724 v.reset(OpMIPS64MOVVconst)
4725 v.AuxInt = int64ToAuxInt(^(c | d))
4726 return true
4727 }
4728 return false
4729 }
4730 func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
4731 v_1 := v.Args[1]
4732 v_0 := v.Args[0]
4733
4734
4735
4736 for {
4737 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4738 x := v_0
4739 if v_1.Op != OpMIPS64MOVVconst {
4740 continue
4741 }
4742 c := auxIntToInt64(v_1.AuxInt)
4743 if !(is32Bit(c)) {
4744 continue
4745 }
4746 v.reset(OpMIPS64ORconst)
4747 v.AuxInt = int64ToAuxInt(c)
4748 v.AddArg(x)
4749 return true
4750 }
4751 break
4752 }
4753
4754
4755 for {
4756 x := v_0
4757 if x != v_1 {
4758 break
4759 }
4760 v.copyOf(x)
4761 return true
4762 }
4763 return false
4764 }
4765 func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
4766 v_0 := v.Args[0]
4767
4768
4769 for {
4770 if auxIntToInt64(v.AuxInt) != 0 {
4771 break
4772 }
4773 x := v_0
4774 v.copyOf(x)
4775 return true
4776 }
4777
4778
4779 for {
4780 if auxIntToInt64(v.AuxInt) != -1 {
4781 break
4782 }
4783 v.reset(OpMIPS64MOVVconst)
4784 v.AuxInt = int64ToAuxInt(-1)
4785 return true
4786 }
4787
4788
4789 for {
4790 c := auxIntToInt64(v.AuxInt)
4791 if v_0.Op != OpMIPS64MOVVconst {
4792 break
4793 }
4794 d := auxIntToInt64(v_0.AuxInt)
4795 v.reset(OpMIPS64MOVVconst)
4796 v.AuxInt = int64ToAuxInt(c | d)
4797 return true
4798 }
4799
4800
4801
4802 for {
4803 c := auxIntToInt64(v.AuxInt)
4804 if v_0.Op != OpMIPS64ORconst {
4805 break
4806 }
4807 d := auxIntToInt64(v_0.AuxInt)
4808 x := v_0.Args[0]
4809 if !(is32Bit(c | d)) {
4810 break
4811 }
4812 v.reset(OpMIPS64ORconst)
4813 v.AuxInt = int64ToAuxInt(c | d)
4814 v.AddArg(x)
4815 return true
4816 }
4817 return false
4818 }
4819 func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
4820 v_1 := v.Args[1]
4821 v_0 := v.Args[0]
4822
4823
4824
4825 for {
4826 if v_0.Op != OpMIPS64MOVVconst {
4827 break
4828 }
4829 c := auxIntToInt64(v_0.AuxInt)
4830 x := v_1
4831 if !(is32Bit(c)) {
4832 break
4833 }
4834 v.reset(OpMIPS64SGTconst)
4835 v.AuxInt = int64ToAuxInt(c)
4836 v.AddArg(x)
4837 return true
4838 }
4839
4840
4841 for {
4842 x := v_0
4843 if x != v_1 {
4844 break
4845 }
4846 v.reset(OpMIPS64MOVVconst)
4847 v.AuxInt = int64ToAuxInt(0)
4848 return true
4849 }
4850 return false
4851 }
4852 func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
4853 v_1 := v.Args[1]
4854 v_0 := v.Args[0]
4855
4856
4857
4858 for {
4859 if v_0.Op != OpMIPS64MOVVconst {
4860 break
4861 }
4862 c := auxIntToInt64(v_0.AuxInt)
4863 x := v_1
4864 if !(is32Bit(c)) {
4865 break
4866 }
4867 v.reset(OpMIPS64SGTUconst)
4868 v.AuxInt = int64ToAuxInt(c)
4869 v.AddArg(x)
4870 return true
4871 }
4872
4873
4874 for {
4875 x := v_0
4876 if x != v_1 {
4877 break
4878 }
4879 v.reset(OpMIPS64MOVVconst)
4880 v.AuxInt = int64ToAuxInt(0)
4881 return true
4882 }
4883 return false
4884 }
4885 func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
4886 v_0 := v.Args[0]
4887
4888
4889
4890 for {
4891 c := auxIntToInt64(v.AuxInt)
4892 if v_0.Op != OpMIPS64MOVVconst {
4893 break
4894 }
4895 d := auxIntToInt64(v_0.AuxInt)
4896 if !(uint64(c) > uint64(d)) {
4897 break
4898 }
4899 v.reset(OpMIPS64MOVVconst)
4900 v.AuxInt = int64ToAuxInt(1)
4901 return true
4902 }
4903
4904
4905
4906 for {
4907 c := auxIntToInt64(v.AuxInt)
4908 if v_0.Op != OpMIPS64MOVVconst {
4909 break
4910 }
4911 d := auxIntToInt64(v_0.AuxInt)
4912 if !(uint64(c) <= uint64(d)) {
4913 break
4914 }
4915 v.reset(OpMIPS64MOVVconst)
4916 v.AuxInt = int64ToAuxInt(0)
4917 return true
4918 }
4919
4920
4921
4922 for {
4923 c := auxIntToInt64(v.AuxInt)
4924 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < uint64(c)) {
4925 break
4926 }
4927 v.reset(OpMIPS64MOVVconst)
4928 v.AuxInt = int64ToAuxInt(1)
4929 return true
4930 }
4931
4932
4933
4934 for {
4935 c := auxIntToInt64(v.AuxInt)
4936 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < uint64(c)) {
4937 break
4938 }
4939 v.reset(OpMIPS64MOVVconst)
4940 v.AuxInt = int64ToAuxInt(1)
4941 return true
4942 }
4943
4944
4945
4946 for {
4947 c := auxIntToInt64(v.AuxInt)
4948 if v_0.Op != OpMIPS64ANDconst {
4949 break
4950 }
4951 m := auxIntToInt64(v_0.AuxInt)
4952 if !(uint64(m) < uint64(c)) {
4953 break
4954 }
4955 v.reset(OpMIPS64MOVVconst)
4956 v.AuxInt = int64ToAuxInt(1)
4957 return true
4958 }
4959
4960
4961
4962 for {
4963 c := auxIntToInt64(v.AuxInt)
4964 if v_0.Op != OpMIPS64SRLVconst {
4965 break
4966 }
4967 d := auxIntToInt64(v_0.AuxInt)
4968 if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
4969 break
4970 }
4971 v.reset(OpMIPS64MOVVconst)
4972 v.AuxInt = int64ToAuxInt(1)
4973 return true
4974 }
4975 return false
4976 }
4977 func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
4978 v_0 := v.Args[0]
4979
4980
4981
4982 for {
4983 c := auxIntToInt64(v.AuxInt)
4984 if v_0.Op != OpMIPS64MOVVconst {
4985 break
4986 }
4987 d := auxIntToInt64(v_0.AuxInt)
4988 if !(c > d) {
4989 break
4990 }
4991 v.reset(OpMIPS64MOVVconst)
4992 v.AuxInt = int64ToAuxInt(1)
4993 return true
4994 }
4995
4996
4997
4998 for {
4999 c := auxIntToInt64(v.AuxInt)
5000 if v_0.Op != OpMIPS64MOVVconst {
5001 break
5002 }
5003 d := auxIntToInt64(v_0.AuxInt)
5004 if !(c <= d) {
5005 break
5006 }
5007 v.reset(OpMIPS64MOVVconst)
5008 v.AuxInt = int64ToAuxInt(0)
5009 return true
5010 }
5011
5012
5013
5014 for {
5015 c := auxIntToInt64(v.AuxInt)
5016 if v_0.Op != OpMIPS64MOVBreg || !(0x7f < c) {
5017 break
5018 }
5019 v.reset(OpMIPS64MOVVconst)
5020 v.AuxInt = int64ToAuxInt(1)
5021 return true
5022 }
5023
5024
5025
5026 for {
5027 c := auxIntToInt64(v.AuxInt)
5028 if v_0.Op != OpMIPS64MOVBreg || !(c <= -0x80) {
5029 break
5030 }
5031 v.reset(OpMIPS64MOVVconst)
5032 v.AuxInt = int64ToAuxInt(0)
5033 return true
5034 }
5035
5036
5037
5038 for {
5039 c := auxIntToInt64(v.AuxInt)
5040 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < c) {
5041 break
5042 }
5043 v.reset(OpMIPS64MOVVconst)
5044 v.AuxInt = int64ToAuxInt(1)
5045 return true
5046 }
5047
5048
5049
5050 for {
5051 c := auxIntToInt64(v.AuxInt)
5052 if v_0.Op != OpMIPS64MOVBUreg || !(c < 0) {
5053 break
5054 }
5055 v.reset(OpMIPS64MOVVconst)
5056 v.AuxInt = int64ToAuxInt(0)
5057 return true
5058 }
5059
5060
5061
5062 for {
5063 c := auxIntToInt64(v.AuxInt)
5064 if v_0.Op != OpMIPS64MOVHreg || !(0x7fff < c) {
5065 break
5066 }
5067 v.reset(OpMIPS64MOVVconst)
5068 v.AuxInt = int64ToAuxInt(1)
5069 return true
5070 }
5071
5072
5073
5074 for {
5075 c := auxIntToInt64(v.AuxInt)
5076 if v_0.Op != OpMIPS64MOVHreg || !(c <= -0x8000) {
5077 break
5078 }
5079 v.reset(OpMIPS64MOVVconst)
5080 v.AuxInt = int64ToAuxInt(0)
5081 return true
5082 }
5083
5084
5085
5086 for {
5087 c := auxIntToInt64(v.AuxInt)
5088 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < c) {
5089 break
5090 }
5091 v.reset(OpMIPS64MOVVconst)
5092 v.AuxInt = int64ToAuxInt(1)
5093 return true
5094 }
5095
5096
5097
5098 for {
5099 c := auxIntToInt64(v.AuxInt)
5100 if v_0.Op != OpMIPS64MOVHUreg || !(c < 0) {
5101 break
5102 }
5103 v.reset(OpMIPS64MOVVconst)
5104 v.AuxInt = int64ToAuxInt(0)
5105 return true
5106 }
5107
5108
5109
5110 for {
5111 c := auxIntToInt64(v.AuxInt)
5112 if v_0.Op != OpMIPS64MOVWUreg || !(c < 0) {
5113 break
5114 }
5115 v.reset(OpMIPS64MOVVconst)
5116 v.AuxInt = int64ToAuxInt(0)
5117 return true
5118 }
5119
5120
5121
5122 for {
5123 c := auxIntToInt64(v.AuxInt)
5124 if v_0.Op != OpMIPS64ANDconst {
5125 break
5126 }
5127 m := auxIntToInt64(v_0.AuxInt)
5128 if !(0 <= m && m < c) {
5129 break
5130 }
5131 v.reset(OpMIPS64MOVVconst)
5132 v.AuxInt = int64ToAuxInt(1)
5133 return true
5134 }
5135
5136
5137
5138 for {
5139 c := auxIntToInt64(v.AuxInt)
5140 if v_0.Op != OpMIPS64SRLVconst {
5141 break
5142 }
5143 d := auxIntToInt64(v_0.AuxInt)
5144 if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
5145 break
5146 }
5147 v.reset(OpMIPS64MOVVconst)
5148 v.AuxInt = int64ToAuxInt(1)
5149 return true
5150 }
5151 return false
5152 }
5153 func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
5154 v_1 := v.Args[1]
5155 v_0 := v.Args[0]
5156
5157
5158
5159 for {
5160 if v_1.Op != OpMIPS64MOVVconst {
5161 break
5162 }
5163 c := auxIntToInt64(v_1.AuxInt)
5164 if !(uint64(c) >= 64) {
5165 break
5166 }
5167 v.reset(OpMIPS64MOVVconst)
5168 v.AuxInt = int64ToAuxInt(0)
5169 return true
5170 }
5171
5172
5173 for {
5174 x := v_0
5175 if v_1.Op != OpMIPS64MOVVconst {
5176 break
5177 }
5178 c := auxIntToInt64(v_1.AuxInt)
5179 v.reset(OpMIPS64SLLVconst)
5180 v.AuxInt = int64ToAuxInt(c)
5181 v.AddArg(x)
5182 return true
5183 }
5184 return false
5185 }
5186 func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
5187 v_0 := v.Args[0]
5188
5189
5190 for {
5191 c := auxIntToInt64(v.AuxInt)
5192 if v_0.Op != OpMIPS64MOVVconst {
5193 break
5194 }
5195 d := auxIntToInt64(v_0.AuxInt)
5196 v.reset(OpMIPS64MOVVconst)
5197 v.AuxInt = int64ToAuxInt(d << uint64(c))
5198 return true
5199 }
5200 return false
5201 }
5202 func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
5203 v_1 := v.Args[1]
5204 v_0 := v.Args[0]
5205
5206
5207
5208 for {
5209 x := v_0
5210 if v_1.Op != OpMIPS64MOVVconst {
5211 break
5212 }
5213 c := auxIntToInt64(v_1.AuxInt)
5214 if !(uint64(c) >= 64) {
5215 break
5216 }
5217 v.reset(OpMIPS64SRAVconst)
5218 v.AuxInt = int64ToAuxInt(63)
5219 v.AddArg(x)
5220 return true
5221 }
5222
5223
5224 for {
5225 x := v_0
5226 if v_1.Op != OpMIPS64MOVVconst {
5227 break
5228 }
5229 c := auxIntToInt64(v_1.AuxInt)
5230 v.reset(OpMIPS64SRAVconst)
5231 v.AuxInt = int64ToAuxInt(c)
5232 v.AddArg(x)
5233 return true
5234 }
5235 return false
5236 }
5237 func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
5238 v_0 := v.Args[0]
5239
5240
5241 for {
5242 c := auxIntToInt64(v.AuxInt)
5243 if v_0.Op != OpMIPS64MOVVconst {
5244 break
5245 }
5246 d := auxIntToInt64(v_0.AuxInt)
5247 v.reset(OpMIPS64MOVVconst)
5248 v.AuxInt = int64ToAuxInt(d >> uint64(c))
5249 return true
5250 }
5251 return false
5252 }
5253 func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
5254 v_1 := v.Args[1]
5255 v_0 := v.Args[0]
5256
5257
5258
5259 for {
5260 if v_1.Op != OpMIPS64MOVVconst {
5261 break
5262 }
5263 c := auxIntToInt64(v_1.AuxInt)
5264 if !(uint64(c) >= 64) {
5265 break
5266 }
5267 v.reset(OpMIPS64MOVVconst)
5268 v.AuxInt = int64ToAuxInt(0)
5269 return true
5270 }
5271
5272
5273 for {
5274 x := v_0
5275 if v_1.Op != OpMIPS64MOVVconst {
5276 break
5277 }
5278 c := auxIntToInt64(v_1.AuxInt)
5279 v.reset(OpMIPS64SRLVconst)
5280 v.AuxInt = int64ToAuxInt(c)
5281 v.AddArg(x)
5282 return true
5283 }
5284 return false
5285 }
5286 func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
5287 v_0 := v.Args[0]
5288
5289
5290 for {
5291 c := auxIntToInt64(v.AuxInt)
5292 if v_0.Op != OpMIPS64MOVVconst {
5293 break
5294 }
5295 d := auxIntToInt64(v_0.AuxInt)
5296 v.reset(OpMIPS64MOVVconst)
5297 v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
5298 return true
5299 }
5300 return false
5301 }
5302 func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
5303 v_1 := v.Args[1]
5304 v_0 := v.Args[0]
5305
5306
5307
5308 for {
5309 x := v_0
5310 if v_1.Op != OpMIPS64MOVVconst {
5311 break
5312 }
5313 c := auxIntToInt64(v_1.AuxInt)
5314 if !(is32Bit(c)) {
5315 break
5316 }
5317 v.reset(OpMIPS64SUBVconst)
5318 v.AuxInt = int64ToAuxInt(c)
5319 v.AddArg(x)
5320 return true
5321 }
5322
5323
5324 for {
5325 x := v_0
5326 if x != v_1 {
5327 break
5328 }
5329 v.reset(OpMIPS64MOVVconst)
5330 v.AuxInt = int64ToAuxInt(0)
5331 return true
5332 }
5333
5334
5335 for {
5336 if v_0.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
5337 break
5338 }
5339 x := v_1
5340 v.reset(OpMIPS64NEGV)
5341 v.AddArg(x)
5342 return true
5343 }
5344 return false
5345 }
5346 func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
5347 v_0 := v.Args[0]
5348
5349
5350 for {
5351 if auxIntToInt64(v.AuxInt) != 0 {
5352 break
5353 }
5354 x := v_0
5355 v.copyOf(x)
5356 return true
5357 }
5358
5359
5360 for {
5361 c := auxIntToInt64(v.AuxInt)
5362 if v_0.Op != OpMIPS64MOVVconst {
5363 break
5364 }
5365 d := auxIntToInt64(v_0.AuxInt)
5366 v.reset(OpMIPS64MOVVconst)
5367 v.AuxInt = int64ToAuxInt(d - c)
5368 return true
5369 }
5370
5371
5372
5373 for {
5374 c := auxIntToInt64(v.AuxInt)
5375 if v_0.Op != OpMIPS64SUBVconst {
5376 break
5377 }
5378 d := auxIntToInt64(v_0.AuxInt)
5379 x := v_0.Args[0]
5380 if !(is32Bit(-c - d)) {
5381 break
5382 }
5383 v.reset(OpMIPS64ADDVconst)
5384 v.AuxInt = int64ToAuxInt(-c - d)
5385 v.AddArg(x)
5386 return true
5387 }
5388
5389
5390
5391 for {
5392 c := auxIntToInt64(v.AuxInt)
5393 if v_0.Op != OpMIPS64ADDVconst {
5394 break
5395 }
5396 d := auxIntToInt64(v_0.AuxInt)
5397 x := v_0.Args[0]
5398 if !(is32Bit(-c + d)) {
5399 break
5400 }
5401 v.reset(OpMIPS64ADDVconst)
5402 v.AuxInt = int64ToAuxInt(-c + d)
5403 v.AddArg(x)
5404 return true
5405 }
5406 return false
5407 }
5408 func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
5409 v_1 := v.Args[1]
5410 v_0 := v.Args[0]
5411
5412
5413
5414 for {
5415 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5416 x := v_0
5417 if v_1.Op != OpMIPS64MOVVconst {
5418 continue
5419 }
5420 c := auxIntToInt64(v_1.AuxInt)
5421 if !(is32Bit(c)) {
5422 continue
5423 }
5424 v.reset(OpMIPS64XORconst)
5425 v.AuxInt = int64ToAuxInt(c)
5426 v.AddArg(x)
5427 return true
5428 }
5429 break
5430 }
5431
5432
5433 for {
5434 x := v_0
5435 if x != v_1 {
5436 break
5437 }
5438 v.reset(OpMIPS64MOVVconst)
5439 v.AuxInt = int64ToAuxInt(0)
5440 return true
5441 }
5442 return false
5443 }
5444 func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
5445 v_0 := v.Args[0]
5446
5447
5448 for {
5449 if auxIntToInt64(v.AuxInt) != 0 {
5450 break
5451 }
5452 x := v_0
5453 v.copyOf(x)
5454 return true
5455 }
5456
5457
5458 for {
5459 if auxIntToInt64(v.AuxInt) != -1 {
5460 break
5461 }
5462 x := v_0
5463 v.reset(OpMIPS64NORconst)
5464 v.AuxInt = int64ToAuxInt(0)
5465 v.AddArg(x)
5466 return true
5467 }
5468
5469
5470 for {
5471 c := auxIntToInt64(v.AuxInt)
5472 if v_0.Op != OpMIPS64MOVVconst {
5473 break
5474 }
5475 d := auxIntToInt64(v_0.AuxInt)
5476 v.reset(OpMIPS64MOVVconst)
5477 v.AuxInt = int64ToAuxInt(c ^ d)
5478 return true
5479 }
5480
5481
5482
5483 for {
5484 c := auxIntToInt64(v.AuxInt)
5485 if v_0.Op != OpMIPS64XORconst {
5486 break
5487 }
5488 d := auxIntToInt64(v_0.AuxInt)
5489 x := v_0.Args[0]
5490 if !(is32Bit(c ^ d)) {
5491 break
5492 }
5493 v.reset(OpMIPS64XORconst)
5494 v.AuxInt = int64ToAuxInt(c ^ d)
5495 v.AddArg(x)
5496 return true
5497 }
5498 return false
5499 }
5500 func rewriteValueMIPS64_OpMod16(v *Value) bool {
5501 v_1 := v.Args[1]
5502 v_0 := v.Args[0]
5503 b := v.Block
5504 typ := &b.Func.Config.Types
5505
5506
5507 for {
5508 x := v_0
5509 y := v_1
5510 v.reset(OpSelect0)
5511 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5512 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5513 v1.AddArg(x)
5514 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5515 v2.AddArg(y)
5516 v0.AddArg2(v1, v2)
5517 v.AddArg(v0)
5518 return true
5519 }
5520 }
5521 func rewriteValueMIPS64_OpMod16u(v *Value) bool {
5522 v_1 := v.Args[1]
5523 v_0 := v.Args[0]
5524 b := v.Block
5525 typ := &b.Func.Config.Types
5526
5527
5528 for {
5529 x := v_0
5530 y := v_1
5531 v.reset(OpSelect0)
5532 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5533 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5534 v1.AddArg(x)
5535 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5536 v2.AddArg(y)
5537 v0.AddArg2(v1, v2)
5538 v.AddArg(v0)
5539 return true
5540 }
5541 }
5542 func rewriteValueMIPS64_OpMod32(v *Value) bool {
5543 v_1 := v.Args[1]
5544 v_0 := v.Args[0]
5545 b := v.Block
5546 typ := &b.Func.Config.Types
5547
5548
5549 for {
5550 x := v_0
5551 y := v_1
5552 v.reset(OpSelect0)
5553 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5554 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5555 v1.AddArg(x)
5556 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5557 v2.AddArg(y)
5558 v0.AddArg2(v1, v2)
5559 v.AddArg(v0)
5560 return true
5561 }
5562 }
5563 func rewriteValueMIPS64_OpMod32u(v *Value) bool {
5564 v_1 := v.Args[1]
5565 v_0 := v.Args[0]
5566 b := v.Block
5567 typ := &b.Func.Config.Types
5568
5569
5570 for {
5571 x := v_0
5572 y := v_1
5573 v.reset(OpSelect0)
5574 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5575 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5576 v1.AddArg(x)
5577 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5578 v2.AddArg(y)
5579 v0.AddArg2(v1, v2)
5580 v.AddArg(v0)
5581 return true
5582 }
5583 }
5584 func rewriteValueMIPS64_OpMod64(v *Value) bool {
5585 v_1 := v.Args[1]
5586 v_0 := v.Args[0]
5587 b := v.Block
5588 typ := &b.Func.Config.Types
5589
5590
5591 for {
5592 x := v_0
5593 y := v_1
5594 v.reset(OpSelect0)
5595 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5596 v0.AddArg2(x, y)
5597 v.AddArg(v0)
5598 return true
5599 }
5600 }
5601 func rewriteValueMIPS64_OpMod64u(v *Value) bool {
5602 v_1 := v.Args[1]
5603 v_0 := v.Args[0]
5604 b := v.Block
5605 typ := &b.Func.Config.Types
5606
5607
5608 for {
5609 x := v_0
5610 y := v_1
5611 v.reset(OpSelect0)
5612 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5613 v0.AddArg2(x, y)
5614 v.AddArg(v0)
5615 return true
5616 }
5617 }
5618 func rewriteValueMIPS64_OpMod8(v *Value) bool {
5619 v_1 := v.Args[1]
5620 v_0 := v.Args[0]
5621 b := v.Block
5622 typ := &b.Func.Config.Types
5623
5624
5625 for {
5626 x := v_0
5627 y := v_1
5628 v.reset(OpSelect0)
5629 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5630 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5631 v1.AddArg(x)
5632 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5633 v2.AddArg(y)
5634 v0.AddArg2(v1, v2)
5635 v.AddArg(v0)
5636 return true
5637 }
5638 }
5639 func rewriteValueMIPS64_OpMod8u(v *Value) bool {
5640 v_1 := v.Args[1]
5641 v_0 := v.Args[0]
5642 b := v.Block
5643 typ := &b.Func.Config.Types
5644
5645
5646 for {
5647 x := v_0
5648 y := v_1
5649 v.reset(OpSelect0)
5650 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5651 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5652 v1.AddArg(x)
5653 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5654 v2.AddArg(y)
5655 v0.AddArg2(v1, v2)
5656 v.AddArg(v0)
5657 return true
5658 }
5659 }
5660 func rewriteValueMIPS64_OpMove(v *Value) bool {
5661 v_2 := v.Args[2]
5662 v_1 := v.Args[1]
5663 v_0 := v.Args[0]
5664 b := v.Block
5665 config := b.Func.Config
5666 typ := &b.Func.Config.Types
5667
5668
5669 for {
5670 if auxIntToInt64(v.AuxInt) != 0 {
5671 break
5672 }
5673 mem := v_2
5674 v.copyOf(mem)
5675 return true
5676 }
5677
5678
5679 for {
5680 if auxIntToInt64(v.AuxInt) != 1 {
5681 break
5682 }
5683 dst := v_0
5684 src := v_1
5685 mem := v_2
5686 v.reset(OpMIPS64MOVBstore)
5687 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5688 v0.AddArg2(src, mem)
5689 v.AddArg3(dst, v0, mem)
5690 return true
5691 }
5692
5693
5694
5695 for {
5696 if auxIntToInt64(v.AuxInt) != 2 {
5697 break
5698 }
5699 t := auxToType(v.Aux)
5700 dst := v_0
5701 src := v_1
5702 mem := v_2
5703 if !(t.Alignment()%2 == 0) {
5704 break
5705 }
5706 v.reset(OpMIPS64MOVHstore)
5707 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5708 v0.AddArg2(src, mem)
5709 v.AddArg3(dst, v0, mem)
5710 return true
5711 }
5712
5713
5714 for {
5715 if auxIntToInt64(v.AuxInt) != 2 {
5716 break
5717 }
5718 dst := v_0
5719 src := v_1
5720 mem := v_2
5721 v.reset(OpMIPS64MOVBstore)
5722 v.AuxInt = int32ToAuxInt(1)
5723 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5724 v0.AuxInt = int32ToAuxInt(1)
5725 v0.AddArg2(src, mem)
5726 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5727 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5728 v2.AddArg2(src, mem)
5729 v1.AddArg3(dst, v2, mem)
5730 v.AddArg3(dst, v0, v1)
5731 return true
5732 }
5733
5734
5735
5736 for {
5737 if auxIntToInt64(v.AuxInt) != 4 {
5738 break
5739 }
5740 t := auxToType(v.Aux)
5741 dst := v_0
5742 src := v_1
5743 mem := v_2
5744 if !(t.Alignment()%4 == 0) {
5745 break
5746 }
5747 v.reset(OpMIPS64MOVWstore)
5748 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5749 v0.AddArg2(src, mem)
5750 v.AddArg3(dst, v0, mem)
5751 return true
5752 }
5753
5754
5755
5756 for {
5757 if auxIntToInt64(v.AuxInt) != 4 {
5758 break
5759 }
5760 t := auxToType(v.Aux)
5761 dst := v_0
5762 src := v_1
5763 mem := v_2
5764 if !(t.Alignment()%2 == 0) {
5765 break
5766 }
5767 v.reset(OpMIPS64MOVHstore)
5768 v.AuxInt = int32ToAuxInt(2)
5769 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5770 v0.AuxInt = int32ToAuxInt(2)
5771 v0.AddArg2(src, mem)
5772 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5773 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5774 v2.AddArg2(src, mem)
5775 v1.AddArg3(dst, v2, mem)
5776 v.AddArg3(dst, v0, v1)
5777 return true
5778 }
5779
5780
5781 for {
5782 if auxIntToInt64(v.AuxInt) != 4 {
5783 break
5784 }
5785 dst := v_0
5786 src := v_1
5787 mem := v_2
5788 v.reset(OpMIPS64MOVBstore)
5789 v.AuxInt = int32ToAuxInt(3)
5790 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5791 v0.AuxInt = int32ToAuxInt(3)
5792 v0.AddArg2(src, mem)
5793 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5794 v1.AuxInt = int32ToAuxInt(2)
5795 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5796 v2.AuxInt = int32ToAuxInt(2)
5797 v2.AddArg2(src, mem)
5798 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5799 v3.AuxInt = int32ToAuxInt(1)
5800 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5801 v4.AuxInt = int32ToAuxInt(1)
5802 v4.AddArg2(src, mem)
5803 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5804 v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5805 v6.AddArg2(src, mem)
5806 v5.AddArg3(dst, v6, mem)
5807 v3.AddArg3(dst, v4, v5)
5808 v1.AddArg3(dst, v2, v3)
5809 v.AddArg3(dst, v0, v1)
5810 return true
5811 }
5812
5813
5814
5815 for {
5816 if auxIntToInt64(v.AuxInt) != 8 {
5817 break
5818 }
5819 t := auxToType(v.Aux)
5820 dst := v_0
5821 src := v_1
5822 mem := v_2
5823 if !(t.Alignment()%8 == 0) {
5824 break
5825 }
5826 v.reset(OpMIPS64MOVVstore)
5827 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5828 v0.AddArg2(src, mem)
5829 v.AddArg3(dst, v0, mem)
5830 return true
5831 }
5832
5833
5834
5835 for {
5836 if auxIntToInt64(v.AuxInt) != 8 {
5837 break
5838 }
5839 t := auxToType(v.Aux)
5840 dst := v_0
5841 src := v_1
5842 mem := v_2
5843 if !(t.Alignment()%4 == 0) {
5844 break
5845 }
5846 v.reset(OpMIPS64MOVWstore)
5847 v.AuxInt = int32ToAuxInt(4)
5848 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5849 v0.AuxInt = int32ToAuxInt(4)
5850 v0.AddArg2(src, mem)
5851 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5852 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5853 v2.AddArg2(src, mem)
5854 v1.AddArg3(dst, v2, mem)
5855 v.AddArg3(dst, v0, v1)
5856 return true
5857 }
5858
5859
5860
5861 for {
5862 if auxIntToInt64(v.AuxInt) != 8 {
5863 break
5864 }
5865 t := auxToType(v.Aux)
5866 dst := v_0
5867 src := v_1
5868 mem := v_2
5869 if !(t.Alignment()%2 == 0) {
5870 break
5871 }
5872 v.reset(OpMIPS64MOVHstore)
5873 v.AuxInt = int32ToAuxInt(6)
5874 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5875 v0.AuxInt = int32ToAuxInt(6)
5876 v0.AddArg2(src, mem)
5877 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5878 v1.AuxInt = int32ToAuxInt(4)
5879 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5880 v2.AuxInt = int32ToAuxInt(4)
5881 v2.AddArg2(src, mem)
5882 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5883 v3.AuxInt = int32ToAuxInt(2)
5884 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5885 v4.AuxInt = int32ToAuxInt(2)
5886 v4.AddArg2(src, mem)
5887 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5888 v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5889 v6.AddArg2(src, mem)
5890 v5.AddArg3(dst, v6, mem)
5891 v3.AddArg3(dst, v4, v5)
5892 v1.AddArg3(dst, v2, v3)
5893 v.AddArg3(dst, v0, v1)
5894 return true
5895 }
5896
5897
5898 for {
5899 if auxIntToInt64(v.AuxInt) != 3 {
5900 break
5901 }
5902 dst := v_0
5903 src := v_1
5904 mem := v_2
5905 v.reset(OpMIPS64MOVBstore)
5906 v.AuxInt = int32ToAuxInt(2)
5907 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5908 v0.AuxInt = int32ToAuxInt(2)
5909 v0.AddArg2(src, mem)
5910 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5911 v1.AuxInt = int32ToAuxInt(1)
5912 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5913 v2.AuxInt = int32ToAuxInt(1)
5914 v2.AddArg2(src, mem)
5915 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5916 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5917 v4.AddArg2(src, mem)
5918 v3.AddArg3(dst, v4, mem)
5919 v1.AddArg3(dst, v2, v3)
5920 v.AddArg3(dst, v0, v1)
5921 return true
5922 }
5923
5924
5925
5926 for {
5927 if auxIntToInt64(v.AuxInt) != 6 {
5928 break
5929 }
5930 t := auxToType(v.Aux)
5931 dst := v_0
5932 src := v_1
5933 mem := v_2
5934 if !(t.Alignment()%2 == 0) {
5935 break
5936 }
5937 v.reset(OpMIPS64MOVHstore)
5938 v.AuxInt = int32ToAuxInt(4)
5939 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5940 v0.AuxInt = int32ToAuxInt(4)
5941 v0.AddArg2(src, mem)
5942 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5943 v1.AuxInt = int32ToAuxInt(2)
5944 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5945 v2.AuxInt = int32ToAuxInt(2)
5946 v2.AddArg2(src, mem)
5947 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5948 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5949 v4.AddArg2(src, mem)
5950 v3.AddArg3(dst, v4, mem)
5951 v1.AddArg3(dst, v2, v3)
5952 v.AddArg3(dst, v0, v1)
5953 return true
5954 }
5955
5956
5957
5958 for {
5959 if auxIntToInt64(v.AuxInt) != 12 {
5960 break
5961 }
5962 t := auxToType(v.Aux)
5963 dst := v_0
5964 src := v_1
5965 mem := v_2
5966 if !(t.Alignment()%4 == 0) {
5967 break
5968 }
5969 v.reset(OpMIPS64MOVWstore)
5970 v.AuxInt = int32ToAuxInt(8)
5971 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5972 v0.AuxInt = int32ToAuxInt(8)
5973 v0.AddArg2(src, mem)
5974 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5975 v1.AuxInt = int32ToAuxInt(4)
5976 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5977 v2.AuxInt = int32ToAuxInt(4)
5978 v2.AddArg2(src, mem)
5979 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5980 v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5981 v4.AddArg2(src, mem)
5982 v3.AddArg3(dst, v4, mem)
5983 v1.AddArg3(dst, v2, v3)
5984 v.AddArg3(dst, v0, v1)
5985 return true
5986 }
5987
5988
5989
5990 for {
5991 if auxIntToInt64(v.AuxInt) != 16 {
5992 break
5993 }
5994 t := auxToType(v.Aux)
5995 dst := v_0
5996 src := v_1
5997 mem := v_2
5998 if !(t.Alignment()%8 == 0) {
5999 break
6000 }
6001 v.reset(OpMIPS64MOVVstore)
6002 v.AuxInt = int32ToAuxInt(8)
6003 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6004 v0.AuxInt = int32ToAuxInt(8)
6005 v0.AddArg2(src, mem)
6006 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
6007 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6008 v2.AddArg2(src, mem)
6009 v1.AddArg3(dst, v2, mem)
6010 v.AddArg3(dst, v0, v1)
6011 return true
6012 }
6013
6014
6015
6016 for {
6017 if auxIntToInt64(v.AuxInt) != 24 {
6018 break
6019 }
6020 t := auxToType(v.Aux)
6021 dst := v_0
6022 src := v_1
6023 mem := v_2
6024 if !(t.Alignment()%8 == 0) {
6025 break
6026 }
6027 v.reset(OpMIPS64MOVVstore)
6028 v.AuxInt = int32ToAuxInt(16)
6029 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6030 v0.AuxInt = int32ToAuxInt(16)
6031 v0.AddArg2(src, mem)
6032 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
6033 v1.AuxInt = int32ToAuxInt(8)
6034 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6035 v2.AuxInt = int32ToAuxInt(8)
6036 v2.AddArg2(src, mem)
6037 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
6038 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6039 v4.AddArg2(src, mem)
6040 v3.AddArg3(dst, v4, mem)
6041 v1.AddArg3(dst, v2, v3)
6042 v.AddArg3(dst, v0, v1)
6043 return true
6044 }
6045
6046
6047
6048 for {
6049 s := auxIntToInt64(v.AuxInt)
6050 t := auxToType(v.Aux)
6051 dst := v_0
6052 src := v_1
6053 mem := v_2
6054 if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)) {
6055 break
6056 }
6057 v.reset(OpMIPS64DUFFCOPY)
6058 v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
6059 v.AddArg3(dst, src, mem)
6060 return true
6061 }
6062
6063
6064
6065 for {
6066 s := auxIntToInt64(v.AuxInt)
6067 t := auxToType(v.Aux)
6068 dst := v_0
6069 src := v_1
6070 mem := v_2
6071 if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
6072 break
6073 }
6074 v.reset(OpMIPS64LoweredMove)
6075 v.AuxInt = int64ToAuxInt(t.Alignment())
6076 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
6077 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
6078 v0.AddArg(src)
6079 v.AddArg4(dst, src, v0, mem)
6080 return true
6081 }
6082 return false
6083 }
6084 func rewriteValueMIPS64_OpMul16(v *Value) bool {
6085 v_1 := v.Args[1]
6086 v_0 := v.Args[0]
6087 b := v.Block
6088 typ := &b.Func.Config.Types
6089
6090
6091 for {
6092 x := v_0
6093 y := v_1
6094 v.reset(OpSelect1)
6095 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6096 v0.AddArg2(x, y)
6097 v.AddArg(v0)
6098 return true
6099 }
6100 }
6101 func rewriteValueMIPS64_OpMul32(v *Value) bool {
6102 v_1 := v.Args[1]
6103 v_0 := v.Args[0]
6104 b := v.Block
6105 typ := &b.Func.Config.Types
6106
6107
6108 for {
6109 x := v_0
6110 y := v_1
6111 v.reset(OpSelect1)
6112 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6113 v0.AddArg2(x, y)
6114 v.AddArg(v0)
6115 return true
6116 }
6117 }
6118 func rewriteValueMIPS64_OpMul64(v *Value) bool {
6119 v_1 := v.Args[1]
6120 v_0 := v.Args[0]
6121 b := v.Block
6122 typ := &b.Func.Config.Types
6123
6124
6125 for {
6126 x := v_0
6127 y := v_1
6128 v.reset(OpSelect1)
6129 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6130 v0.AddArg2(x, y)
6131 v.AddArg(v0)
6132 return true
6133 }
6134 }
6135 func rewriteValueMIPS64_OpMul8(v *Value) bool {
6136 v_1 := v.Args[1]
6137 v_0 := v.Args[0]
6138 b := v.Block
6139 typ := &b.Func.Config.Types
6140
6141
6142 for {
6143 x := v_0
6144 y := v_1
6145 v.reset(OpSelect1)
6146 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6147 v0.AddArg2(x, y)
6148 v.AddArg(v0)
6149 return true
6150 }
6151 }
6152 func rewriteValueMIPS64_OpNeq16(v *Value) bool {
6153 v_1 := v.Args[1]
6154 v_0 := v.Args[0]
6155 b := v.Block
6156 typ := &b.Func.Config.Types
6157
6158
6159 for {
6160 x := v_0
6161 y := v_1
6162 v.reset(OpMIPS64SGTU)
6163 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6164 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6165 v1.AddArg(x)
6166 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6167 v2.AddArg(y)
6168 v0.AddArg2(v1, v2)
6169 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6170 v3.AuxInt = int64ToAuxInt(0)
6171 v.AddArg2(v0, v3)
6172 return true
6173 }
6174 }
6175 func rewriteValueMIPS64_OpNeq32(v *Value) bool {
6176 v_1 := v.Args[1]
6177 v_0 := v.Args[0]
6178 b := v.Block
6179 typ := &b.Func.Config.Types
6180
6181
6182 for {
6183 x := v_0
6184 y := v_1
6185 v.reset(OpMIPS64SGTU)
6186 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6187 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6188 v1.AddArg(x)
6189 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6190 v2.AddArg(y)
6191 v0.AddArg2(v1, v2)
6192 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6193 v3.AuxInt = int64ToAuxInt(0)
6194 v.AddArg2(v0, v3)
6195 return true
6196 }
6197 }
6198 func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
6199 v_1 := v.Args[1]
6200 v_0 := v.Args[0]
6201 b := v.Block
6202
6203
6204 for {
6205 x := v_0
6206 y := v_1
6207 v.reset(OpMIPS64FPFlagFalse)
6208 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
6209 v0.AddArg2(x, y)
6210 v.AddArg(v0)
6211 return true
6212 }
6213 }
6214 func rewriteValueMIPS64_OpNeq64(v *Value) bool {
6215 v_1 := v.Args[1]
6216 v_0 := v.Args[0]
6217 b := v.Block
6218 typ := &b.Func.Config.Types
6219
6220
6221 for {
6222 x := v_0
6223 y := v_1
6224 v.reset(OpMIPS64SGTU)
6225 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6226 v0.AddArg2(x, y)
6227 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6228 v1.AuxInt = int64ToAuxInt(0)
6229 v.AddArg2(v0, v1)
6230 return true
6231 }
6232 }
6233 func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
6234 v_1 := v.Args[1]
6235 v_0 := v.Args[0]
6236 b := v.Block
6237
6238
6239 for {
6240 x := v_0
6241 y := v_1
6242 v.reset(OpMIPS64FPFlagFalse)
6243 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
6244 v0.AddArg2(x, y)
6245 v.AddArg(v0)
6246 return true
6247 }
6248 }
6249 func rewriteValueMIPS64_OpNeq8(v *Value) bool {
6250 v_1 := v.Args[1]
6251 v_0 := v.Args[0]
6252 b := v.Block
6253 typ := &b.Func.Config.Types
6254
6255
6256 for {
6257 x := v_0
6258 y := v_1
6259 v.reset(OpMIPS64SGTU)
6260 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6261 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6262 v1.AddArg(x)
6263 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6264 v2.AddArg(y)
6265 v0.AddArg2(v1, v2)
6266 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6267 v3.AuxInt = int64ToAuxInt(0)
6268 v.AddArg2(v0, v3)
6269 return true
6270 }
6271 }
6272 func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
6273 v_1 := v.Args[1]
6274 v_0 := v.Args[0]
6275 b := v.Block
6276 typ := &b.Func.Config.Types
6277
6278
6279 for {
6280 x := v_0
6281 y := v_1
6282 v.reset(OpMIPS64SGTU)
6283 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6284 v0.AddArg2(x, y)
6285 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6286 v1.AuxInt = int64ToAuxInt(0)
6287 v.AddArg2(v0, v1)
6288 return true
6289 }
6290 }
6291 func rewriteValueMIPS64_OpNot(v *Value) bool {
6292 v_0 := v.Args[0]
6293
6294
6295 for {
6296 x := v_0
6297 v.reset(OpMIPS64XORconst)
6298 v.AuxInt = int64ToAuxInt(1)
6299 v.AddArg(x)
6300 return true
6301 }
6302 }
6303 func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
6304 v_0 := v.Args[0]
6305
6306
6307
6308 for {
6309 off := auxIntToInt64(v.AuxInt)
6310 ptr := v_0
6311 if ptr.Op != OpSP || !(is32Bit(off)) {
6312 break
6313 }
6314 v.reset(OpMIPS64MOVVaddr)
6315 v.AuxInt = int32ToAuxInt(int32(off))
6316 v.AddArg(ptr)
6317 return true
6318 }
6319
6320
6321 for {
6322 off := auxIntToInt64(v.AuxInt)
6323 ptr := v_0
6324 v.reset(OpMIPS64ADDVconst)
6325 v.AuxInt = int64ToAuxInt(off)
6326 v.AddArg(ptr)
6327 return true
6328 }
6329 }
6330 func rewriteValueMIPS64_OpPanicBounds(v *Value) bool {
6331 v_2 := v.Args[2]
6332 v_1 := v.Args[1]
6333 v_0 := v.Args[0]
6334
6335
6336
6337 for {
6338 kind := auxIntToInt64(v.AuxInt)
6339 x := v_0
6340 y := v_1
6341 mem := v_2
6342 if !(boundsABI(kind) == 0) {
6343 break
6344 }
6345 v.reset(OpMIPS64LoweredPanicBoundsA)
6346 v.AuxInt = int64ToAuxInt(kind)
6347 v.AddArg3(x, y, mem)
6348 return true
6349 }
6350
6351
6352
6353 for {
6354 kind := auxIntToInt64(v.AuxInt)
6355 x := v_0
6356 y := v_1
6357 mem := v_2
6358 if !(boundsABI(kind) == 1) {
6359 break
6360 }
6361 v.reset(OpMIPS64LoweredPanicBoundsB)
6362 v.AuxInt = int64ToAuxInt(kind)
6363 v.AddArg3(x, y, mem)
6364 return true
6365 }
6366
6367
6368
6369 for {
6370 kind := auxIntToInt64(v.AuxInt)
6371 x := v_0
6372 y := v_1
6373 mem := v_2
6374 if !(boundsABI(kind) == 2) {
6375 break
6376 }
6377 v.reset(OpMIPS64LoweredPanicBoundsC)
6378 v.AuxInt = int64ToAuxInt(kind)
6379 v.AddArg3(x, y, mem)
6380 return true
6381 }
6382 return false
6383 }
6384 func rewriteValueMIPS64_OpRotateLeft16(v *Value) bool {
6385 v_1 := v.Args[1]
6386 v_0 := v.Args[0]
6387 b := v.Block
6388 typ := &b.Func.Config.Types
6389
6390
6391 for {
6392 t := v.Type
6393 x := v_0
6394 if v_1.Op != OpMIPS64MOVVconst {
6395 break
6396 }
6397 c := auxIntToInt64(v_1.AuxInt)
6398 v.reset(OpOr16)
6399 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
6400 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6401 v1.AuxInt = int64ToAuxInt(c & 15)
6402 v0.AddArg2(x, v1)
6403 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
6404 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6405 v3.AuxInt = int64ToAuxInt(-c & 15)
6406 v2.AddArg2(x, v3)
6407 v.AddArg2(v0, v2)
6408 return true
6409 }
6410 return false
6411 }
6412 func rewriteValueMIPS64_OpRotateLeft32(v *Value) bool {
6413 v_1 := v.Args[1]
6414 v_0 := v.Args[0]
6415 b := v.Block
6416 typ := &b.Func.Config.Types
6417
6418
6419 for {
6420 t := v.Type
6421 x := v_0
6422 if v_1.Op != OpMIPS64MOVVconst {
6423 break
6424 }
6425 c := auxIntToInt64(v_1.AuxInt)
6426 v.reset(OpOr32)
6427 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
6428 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6429 v1.AuxInt = int64ToAuxInt(c & 31)
6430 v0.AddArg2(x, v1)
6431 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
6432 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6433 v3.AuxInt = int64ToAuxInt(-c & 31)
6434 v2.AddArg2(x, v3)
6435 v.AddArg2(v0, v2)
6436 return true
6437 }
6438 return false
6439 }
6440 func rewriteValueMIPS64_OpRotateLeft64(v *Value) bool {
6441 v_1 := v.Args[1]
6442 v_0 := v.Args[0]
6443 b := v.Block
6444 typ := &b.Func.Config.Types
6445
6446
6447 for {
6448 t := v.Type
6449 x := v_0
6450 if v_1.Op != OpMIPS64MOVVconst {
6451 break
6452 }
6453 c := auxIntToInt64(v_1.AuxInt)
6454 v.reset(OpOr64)
6455 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
6456 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6457 v1.AuxInt = int64ToAuxInt(c & 63)
6458 v0.AddArg2(x, v1)
6459 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
6460 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6461 v3.AuxInt = int64ToAuxInt(-c & 63)
6462 v2.AddArg2(x, v3)
6463 v.AddArg2(v0, v2)
6464 return true
6465 }
6466 return false
6467 }
6468 func rewriteValueMIPS64_OpRotateLeft8(v *Value) bool {
6469 v_1 := v.Args[1]
6470 v_0 := v.Args[0]
6471 b := v.Block
6472 typ := &b.Func.Config.Types
6473
6474
6475 for {
6476 t := v.Type
6477 x := v_0
6478 if v_1.Op != OpMIPS64MOVVconst {
6479 break
6480 }
6481 c := auxIntToInt64(v_1.AuxInt)
6482 v.reset(OpOr8)
6483 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
6484 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6485 v1.AuxInt = int64ToAuxInt(c & 7)
6486 v0.AddArg2(x, v1)
6487 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
6488 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6489 v3.AuxInt = int64ToAuxInt(-c & 7)
6490 v2.AddArg2(x, v3)
6491 v.AddArg2(v0, v2)
6492 return true
6493 }
6494 return false
6495 }
6496 func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
6497 v_1 := v.Args[1]
6498 v_0 := v.Args[0]
6499 b := v.Block
6500 typ := &b.Func.Config.Types
6501
6502
6503 for {
6504 t := v.Type
6505 x := v_0
6506 y := v_1
6507 v.reset(OpMIPS64AND)
6508 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6509 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6510 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6511 v2.AuxInt = int64ToAuxInt(64)
6512 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6513 v3.AddArg(y)
6514 v1.AddArg2(v2, v3)
6515 v0.AddArg(v1)
6516 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6517 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6518 v5.AddArg(x)
6519 v4.AddArg2(v5, v3)
6520 v.AddArg2(v0, v4)
6521 return true
6522 }
6523 }
6524 func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
6525 v_1 := v.Args[1]
6526 v_0 := v.Args[0]
6527 b := v.Block
6528 typ := &b.Func.Config.Types
6529
6530
6531 for {
6532 t := v.Type
6533 x := v_0
6534 y := v_1
6535 v.reset(OpMIPS64AND)
6536 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6537 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6538 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6539 v2.AuxInt = int64ToAuxInt(64)
6540 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6541 v3.AddArg(y)
6542 v1.AddArg2(v2, v3)
6543 v0.AddArg(v1)
6544 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6545 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6546 v5.AddArg(x)
6547 v4.AddArg2(v5, v3)
6548 v.AddArg2(v0, v4)
6549 return true
6550 }
6551 }
6552 func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
6553 v_1 := v.Args[1]
6554 v_0 := v.Args[0]
6555 b := v.Block
6556 typ := &b.Func.Config.Types
6557
6558
6559 for {
6560 t := v.Type
6561 x := v_0
6562 y := v_1
6563 v.reset(OpMIPS64AND)
6564 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6565 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6566 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6567 v2.AuxInt = int64ToAuxInt(64)
6568 v1.AddArg2(v2, y)
6569 v0.AddArg(v1)
6570 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6571 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6572 v4.AddArg(x)
6573 v3.AddArg2(v4, y)
6574 v.AddArg2(v0, v3)
6575 return true
6576 }
6577 }
6578 func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
6579 v_1 := v.Args[1]
6580 v_0 := v.Args[0]
6581 b := v.Block
6582 typ := &b.Func.Config.Types
6583
6584
6585 for {
6586 t := v.Type
6587 x := v_0
6588 y := v_1
6589 v.reset(OpMIPS64AND)
6590 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6591 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6592 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6593 v2.AuxInt = int64ToAuxInt(64)
6594 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6595 v3.AddArg(y)
6596 v1.AddArg2(v2, v3)
6597 v0.AddArg(v1)
6598 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6599 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6600 v5.AddArg(x)
6601 v4.AddArg2(v5, v3)
6602 v.AddArg2(v0, v4)
6603 return true
6604 }
6605 }
6606 func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
6607 v_1 := v.Args[1]
6608 v_0 := v.Args[0]
6609 b := v.Block
6610 typ := &b.Func.Config.Types
6611
6612
6613 for {
6614 t := v.Type
6615 x := v_0
6616 y := v_1
6617 v.reset(OpMIPS64SRAV)
6618 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6619 v0.AddArg(x)
6620 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6621 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6622 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6623 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6624 v4.AddArg(y)
6625 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6626 v5.AuxInt = int64ToAuxInt(63)
6627 v3.AddArg2(v4, v5)
6628 v2.AddArg(v3)
6629 v1.AddArg2(v2, v4)
6630 v.AddArg2(v0, v1)
6631 return true
6632 }
6633 }
6634 func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
6635 v_1 := v.Args[1]
6636 v_0 := v.Args[0]
6637 b := v.Block
6638 typ := &b.Func.Config.Types
6639
6640
6641 for {
6642 t := v.Type
6643 x := v_0
6644 y := v_1
6645 v.reset(OpMIPS64SRAV)
6646 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6647 v0.AddArg(x)
6648 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6649 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6650 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6651 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6652 v4.AddArg(y)
6653 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6654 v5.AuxInt = int64ToAuxInt(63)
6655 v3.AddArg2(v4, v5)
6656 v2.AddArg(v3)
6657 v1.AddArg2(v2, v4)
6658 v.AddArg2(v0, v1)
6659 return true
6660 }
6661 }
6662 func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
6663 v_1 := v.Args[1]
6664 v_0 := v.Args[0]
6665 b := v.Block
6666 typ := &b.Func.Config.Types
6667
6668
6669 for {
6670 t := v.Type
6671 x := v_0
6672 y := v_1
6673 v.reset(OpMIPS64SRAV)
6674 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6675 v0.AddArg(x)
6676 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6677 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6678 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6679 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6680 v4.AuxInt = int64ToAuxInt(63)
6681 v3.AddArg2(y, v4)
6682 v2.AddArg(v3)
6683 v1.AddArg2(v2, y)
6684 v.AddArg2(v0, v1)
6685 return true
6686 }
6687 }
6688 func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
6689 v_1 := v.Args[1]
6690 v_0 := v.Args[0]
6691 b := v.Block
6692 typ := &b.Func.Config.Types
6693
6694
6695 for {
6696 t := v.Type
6697 x := v_0
6698 y := v_1
6699 v.reset(OpMIPS64SRAV)
6700 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6701 v0.AddArg(x)
6702 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6703 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6704 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6705 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6706 v4.AddArg(y)
6707 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6708 v5.AuxInt = int64ToAuxInt(63)
6709 v3.AddArg2(v4, v5)
6710 v2.AddArg(v3)
6711 v1.AddArg2(v2, v4)
6712 v.AddArg2(v0, v1)
6713 return true
6714 }
6715 }
6716 func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
6717 v_1 := v.Args[1]
6718 v_0 := v.Args[0]
6719 b := v.Block
6720 typ := &b.Func.Config.Types
6721
6722
6723 for {
6724 t := v.Type
6725 x := v_0
6726 y := v_1
6727 v.reset(OpMIPS64AND)
6728 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6729 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6730 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6731 v2.AuxInt = int64ToAuxInt(64)
6732 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6733 v3.AddArg(y)
6734 v1.AddArg2(v2, v3)
6735 v0.AddArg(v1)
6736 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6737 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6738 v5.AddArg(x)
6739 v4.AddArg2(v5, v3)
6740 v.AddArg2(v0, v4)
6741 return true
6742 }
6743 }
6744 func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
6745 v_1 := v.Args[1]
6746 v_0 := v.Args[0]
6747 b := v.Block
6748 typ := &b.Func.Config.Types
6749
6750
6751 for {
6752 t := v.Type
6753 x := v_0
6754 y := v_1
6755 v.reset(OpMIPS64AND)
6756 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6757 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6758 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6759 v2.AuxInt = int64ToAuxInt(64)
6760 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6761 v3.AddArg(y)
6762 v1.AddArg2(v2, v3)
6763 v0.AddArg(v1)
6764 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6765 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6766 v5.AddArg(x)
6767 v4.AddArg2(v5, v3)
6768 v.AddArg2(v0, v4)
6769 return true
6770 }
6771 }
6772 func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
6773 v_1 := v.Args[1]
6774 v_0 := v.Args[0]
6775 b := v.Block
6776 typ := &b.Func.Config.Types
6777
6778
6779 for {
6780 t := v.Type
6781 x := v_0
6782 y := v_1
6783 v.reset(OpMIPS64AND)
6784 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6785 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6786 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6787 v2.AuxInt = int64ToAuxInt(64)
6788 v1.AddArg2(v2, y)
6789 v0.AddArg(v1)
6790 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6791 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6792 v4.AddArg(x)
6793 v3.AddArg2(v4, y)
6794 v.AddArg2(v0, v3)
6795 return true
6796 }
6797 }
6798 func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
6799 v_1 := v.Args[1]
6800 v_0 := v.Args[0]
6801 b := v.Block
6802 typ := &b.Func.Config.Types
6803
6804
6805 for {
6806 t := v.Type
6807 x := v_0
6808 y := v_1
6809 v.reset(OpMIPS64AND)
6810 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6811 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6812 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6813 v2.AuxInt = int64ToAuxInt(64)
6814 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6815 v3.AddArg(y)
6816 v1.AddArg2(v2, v3)
6817 v0.AddArg(v1)
6818 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6819 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6820 v5.AddArg(x)
6821 v4.AddArg2(v5, v3)
6822 v.AddArg2(v0, v4)
6823 return true
6824 }
6825 }
6826 func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
6827 v_1 := v.Args[1]
6828 v_0 := v.Args[0]
6829 b := v.Block
6830 typ := &b.Func.Config.Types
6831
6832
6833 for {
6834 t := v.Type
6835 x := v_0
6836 y := v_1
6837 v.reset(OpMIPS64SRAV)
6838 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6839 v0.AddArg(x)
6840 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6841 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6842 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6843 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6844 v4.AddArg(y)
6845 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6846 v5.AuxInt = int64ToAuxInt(63)
6847 v3.AddArg2(v4, v5)
6848 v2.AddArg(v3)
6849 v1.AddArg2(v2, v4)
6850 v.AddArg2(v0, v1)
6851 return true
6852 }
6853 }
6854 func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
6855 v_1 := v.Args[1]
6856 v_0 := v.Args[0]
6857 b := v.Block
6858 typ := &b.Func.Config.Types
6859
6860
6861 for {
6862 t := v.Type
6863 x := v_0
6864 y := v_1
6865 v.reset(OpMIPS64SRAV)
6866 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6867 v0.AddArg(x)
6868 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6869 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6870 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6871 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6872 v4.AddArg(y)
6873 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6874 v5.AuxInt = int64ToAuxInt(63)
6875 v3.AddArg2(v4, v5)
6876 v2.AddArg(v3)
6877 v1.AddArg2(v2, v4)
6878 v.AddArg2(v0, v1)
6879 return true
6880 }
6881 }
6882 func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
6883 v_1 := v.Args[1]
6884 v_0 := v.Args[0]
6885 b := v.Block
6886 typ := &b.Func.Config.Types
6887
6888
6889 for {
6890 t := v.Type
6891 x := v_0
6892 y := v_1
6893 v.reset(OpMIPS64SRAV)
6894 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6895 v0.AddArg(x)
6896 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6897 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6898 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6899 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6900 v4.AuxInt = int64ToAuxInt(63)
6901 v3.AddArg2(y, v4)
6902 v2.AddArg(v3)
6903 v1.AddArg2(v2, y)
6904 v.AddArg2(v0, v1)
6905 return true
6906 }
6907 }
6908 func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
6909 v_1 := v.Args[1]
6910 v_0 := v.Args[0]
6911 b := v.Block
6912 typ := &b.Func.Config.Types
6913
6914
6915 for {
6916 t := v.Type
6917 x := v_0
6918 y := v_1
6919 v.reset(OpMIPS64SRAV)
6920 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6921 v0.AddArg(x)
6922 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6923 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6924 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6925 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6926 v4.AddArg(y)
6927 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6928 v5.AuxInt = int64ToAuxInt(63)
6929 v3.AddArg2(v4, v5)
6930 v2.AddArg(v3)
6931 v1.AddArg2(v2, v4)
6932 v.AddArg2(v0, v1)
6933 return true
6934 }
6935 }
6936 func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
6937 v_1 := v.Args[1]
6938 v_0 := v.Args[0]
6939 b := v.Block
6940 typ := &b.Func.Config.Types
6941
6942
6943 for {
6944 t := v.Type
6945 x := v_0
6946 y := v_1
6947 v.reset(OpMIPS64AND)
6948 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6949 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6950 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6951 v2.AuxInt = int64ToAuxInt(64)
6952 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6953 v3.AddArg(y)
6954 v1.AddArg2(v2, v3)
6955 v0.AddArg(v1)
6956 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6957 v4.AddArg2(x, v3)
6958 v.AddArg2(v0, v4)
6959 return true
6960 }
6961 }
6962 func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
6963 v_1 := v.Args[1]
6964 v_0 := v.Args[0]
6965 b := v.Block
6966 typ := &b.Func.Config.Types
6967
6968
6969 for {
6970 t := v.Type
6971 x := v_0
6972 y := v_1
6973 v.reset(OpMIPS64AND)
6974 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6975 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6976 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6977 v2.AuxInt = int64ToAuxInt(64)
6978 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6979 v3.AddArg(y)
6980 v1.AddArg2(v2, v3)
6981 v0.AddArg(v1)
6982 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6983 v4.AddArg2(x, v3)
6984 v.AddArg2(v0, v4)
6985 return true
6986 }
6987 }
6988 func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
6989 v_1 := v.Args[1]
6990 v_0 := v.Args[0]
6991 b := v.Block
6992 typ := &b.Func.Config.Types
6993
6994
6995 for {
6996 t := v.Type
6997 x := v_0
6998 y := v_1
6999 v.reset(OpMIPS64AND)
7000 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7001 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7002 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7003 v2.AuxInt = int64ToAuxInt(64)
7004 v1.AddArg2(v2, y)
7005 v0.AddArg(v1)
7006 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7007 v3.AddArg2(x, y)
7008 v.AddArg2(v0, v3)
7009 return true
7010 }
7011 }
7012 func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
7013 v_1 := v.Args[1]
7014 v_0 := v.Args[0]
7015 b := v.Block
7016 typ := &b.Func.Config.Types
7017
7018
7019 for {
7020 t := v.Type
7021 x := v_0
7022 y := v_1
7023 v.reset(OpMIPS64AND)
7024 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7025 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7026 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7027 v2.AuxInt = int64ToAuxInt(64)
7028 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7029 v3.AddArg(y)
7030 v1.AddArg2(v2, v3)
7031 v0.AddArg(v1)
7032 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7033 v4.AddArg2(x, v3)
7034 v.AddArg2(v0, v4)
7035 return true
7036 }
7037 }
7038 func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
7039 v_1 := v.Args[1]
7040 v_0 := v.Args[0]
7041 b := v.Block
7042 typ := &b.Func.Config.Types
7043
7044
7045 for {
7046 t := v.Type
7047 x := v_0
7048 y := v_1
7049 v.reset(OpMIPS64SRAV)
7050 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7051 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7052 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7053 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7054 v3.AddArg(y)
7055 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7056 v4.AuxInt = int64ToAuxInt(63)
7057 v2.AddArg2(v3, v4)
7058 v1.AddArg(v2)
7059 v0.AddArg2(v1, v3)
7060 v.AddArg2(x, v0)
7061 return true
7062 }
7063 }
7064 func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
7065 v_1 := v.Args[1]
7066 v_0 := v.Args[0]
7067 b := v.Block
7068 typ := &b.Func.Config.Types
7069
7070
7071 for {
7072 t := v.Type
7073 x := v_0
7074 y := v_1
7075 v.reset(OpMIPS64SRAV)
7076 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7077 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7078 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7079 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7080 v3.AddArg(y)
7081 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7082 v4.AuxInt = int64ToAuxInt(63)
7083 v2.AddArg2(v3, v4)
7084 v1.AddArg(v2)
7085 v0.AddArg2(v1, v3)
7086 v.AddArg2(x, v0)
7087 return true
7088 }
7089 }
7090 func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
7091 v_1 := v.Args[1]
7092 v_0 := v.Args[0]
7093 b := v.Block
7094 typ := &b.Func.Config.Types
7095
7096
7097 for {
7098 t := v.Type
7099 x := v_0
7100 y := v_1
7101 v.reset(OpMIPS64SRAV)
7102 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7103 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7104 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7105 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7106 v3.AuxInt = int64ToAuxInt(63)
7107 v2.AddArg2(y, v3)
7108 v1.AddArg(v2)
7109 v0.AddArg2(v1, y)
7110 v.AddArg2(x, v0)
7111 return true
7112 }
7113 }
7114 func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
7115 v_1 := v.Args[1]
7116 v_0 := v.Args[0]
7117 b := v.Block
7118 typ := &b.Func.Config.Types
7119
7120
7121 for {
7122 t := v.Type
7123 x := v_0
7124 y := v_1
7125 v.reset(OpMIPS64SRAV)
7126 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7127 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7128 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7129 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7130 v3.AddArg(y)
7131 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7132 v4.AuxInt = int64ToAuxInt(63)
7133 v2.AddArg2(v3, v4)
7134 v1.AddArg(v2)
7135 v0.AddArg2(v1, v3)
7136 v.AddArg2(x, v0)
7137 return true
7138 }
7139 }
7140 func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
7141 v_1 := v.Args[1]
7142 v_0 := v.Args[0]
7143 b := v.Block
7144 typ := &b.Func.Config.Types
7145
7146
7147 for {
7148 t := v.Type
7149 x := v_0
7150 y := v_1
7151 v.reset(OpMIPS64AND)
7152 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7153 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7154 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7155 v2.AuxInt = int64ToAuxInt(64)
7156 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7157 v3.AddArg(y)
7158 v1.AddArg2(v2, v3)
7159 v0.AddArg(v1)
7160 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7161 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7162 v5.AddArg(x)
7163 v4.AddArg2(v5, v3)
7164 v.AddArg2(v0, v4)
7165 return true
7166 }
7167 }
7168 func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
7169 v_1 := v.Args[1]
7170 v_0 := v.Args[0]
7171 b := v.Block
7172 typ := &b.Func.Config.Types
7173
7174
7175 for {
7176 t := v.Type
7177 x := v_0
7178 y := v_1
7179 v.reset(OpMIPS64AND)
7180 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7181 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7182 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7183 v2.AuxInt = int64ToAuxInt(64)
7184 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7185 v3.AddArg(y)
7186 v1.AddArg2(v2, v3)
7187 v0.AddArg(v1)
7188 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7189 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7190 v5.AddArg(x)
7191 v4.AddArg2(v5, v3)
7192 v.AddArg2(v0, v4)
7193 return true
7194 }
7195 }
7196 func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
7197 v_1 := v.Args[1]
7198 v_0 := v.Args[0]
7199 b := v.Block
7200 typ := &b.Func.Config.Types
7201
7202
7203 for {
7204 t := v.Type
7205 x := v_0
7206 y := v_1
7207 v.reset(OpMIPS64AND)
7208 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7209 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7210 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7211 v2.AuxInt = int64ToAuxInt(64)
7212 v1.AddArg2(v2, y)
7213 v0.AddArg(v1)
7214 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7215 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7216 v4.AddArg(x)
7217 v3.AddArg2(v4, y)
7218 v.AddArg2(v0, v3)
7219 return true
7220 }
7221 }
7222 func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
7223 v_1 := v.Args[1]
7224 v_0 := v.Args[0]
7225 b := v.Block
7226 typ := &b.Func.Config.Types
7227
7228
7229 for {
7230 t := v.Type
7231 x := v_0
7232 y := v_1
7233 v.reset(OpMIPS64AND)
7234 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7235 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7236 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7237 v2.AuxInt = int64ToAuxInt(64)
7238 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7239 v3.AddArg(y)
7240 v1.AddArg2(v2, v3)
7241 v0.AddArg(v1)
7242 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7243 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7244 v5.AddArg(x)
7245 v4.AddArg2(v5, v3)
7246 v.AddArg2(v0, v4)
7247 return true
7248 }
7249 }
7250 func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
7251 v_1 := v.Args[1]
7252 v_0 := v.Args[0]
7253 b := v.Block
7254 typ := &b.Func.Config.Types
7255
7256
7257 for {
7258 t := v.Type
7259 x := v_0
7260 y := v_1
7261 v.reset(OpMIPS64SRAV)
7262 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7263 v0.AddArg(x)
7264 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7265 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7266 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7267 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7268 v4.AddArg(y)
7269 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7270 v5.AuxInt = int64ToAuxInt(63)
7271 v3.AddArg2(v4, v5)
7272 v2.AddArg(v3)
7273 v1.AddArg2(v2, v4)
7274 v.AddArg2(v0, v1)
7275 return true
7276 }
7277 }
7278 func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
7279 v_1 := v.Args[1]
7280 v_0 := v.Args[0]
7281 b := v.Block
7282 typ := &b.Func.Config.Types
7283
7284
7285 for {
7286 t := v.Type
7287 x := v_0
7288 y := v_1
7289 v.reset(OpMIPS64SRAV)
7290 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7291 v0.AddArg(x)
7292 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7293 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7294 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7295 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7296 v4.AddArg(y)
7297 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7298 v5.AuxInt = int64ToAuxInt(63)
7299 v3.AddArg2(v4, v5)
7300 v2.AddArg(v3)
7301 v1.AddArg2(v2, v4)
7302 v.AddArg2(v0, v1)
7303 return true
7304 }
7305 }
7306 func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
7307 v_1 := v.Args[1]
7308 v_0 := v.Args[0]
7309 b := v.Block
7310 typ := &b.Func.Config.Types
7311
7312
7313 for {
7314 t := v.Type
7315 x := v_0
7316 y := v_1
7317 v.reset(OpMIPS64SRAV)
7318 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7319 v0.AddArg(x)
7320 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7321 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7322 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7323 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7324 v4.AuxInt = int64ToAuxInt(63)
7325 v3.AddArg2(y, v4)
7326 v2.AddArg(v3)
7327 v1.AddArg2(v2, y)
7328 v.AddArg2(v0, v1)
7329 return true
7330 }
7331 }
7332 func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
7333 v_1 := v.Args[1]
7334 v_0 := v.Args[0]
7335 b := v.Block
7336 typ := &b.Func.Config.Types
7337
7338
7339 for {
7340 t := v.Type
7341 x := v_0
7342 y := v_1
7343 v.reset(OpMIPS64SRAV)
7344 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7345 v0.AddArg(x)
7346 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7347 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7348 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7349 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7350 v4.AddArg(y)
7351 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7352 v5.AuxInt = int64ToAuxInt(63)
7353 v3.AddArg2(v4, v5)
7354 v2.AddArg(v3)
7355 v1.AddArg2(v2, v4)
7356 v.AddArg2(v0, v1)
7357 return true
7358 }
7359 }
7360 func rewriteValueMIPS64_OpSelect0(v *Value) bool {
7361 v_0 := v.Args[0]
7362 b := v.Block
7363 typ := &b.Func.Config.Types
7364
7365
7366 for {
7367 if v_0.Op != OpMul64uover {
7368 break
7369 }
7370 y := v_0.Args[1]
7371 x := v_0.Args[0]
7372 v.reset(OpSelect1)
7373 v.Type = typ.UInt64
7374 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
7375 v0.AddArg2(x, y)
7376 v.AddArg(v0)
7377 return true
7378 }
7379
7380
7381 for {
7382 t := v.Type
7383 if v_0.Op != OpAdd64carry {
7384 break
7385 }
7386 c := v_0.Args[2]
7387 x := v_0.Args[0]
7388 y := v_0.Args[1]
7389 v.reset(OpMIPS64ADDV)
7390 v0 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7391 v0.AddArg2(x, y)
7392 v.AddArg2(v0, c)
7393 return true
7394 }
7395
7396
7397 for {
7398 t := v.Type
7399 if v_0.Op != OpSub64borrow {
7400 break
7401 }
7402 c := v_0.Args[2]
7403 x := v_0.Args[0]
7404 y := v_0.Args[1]
7405 v.reset(OpMIPS64SUBV)
7406 v0 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7407 v0.AddArg2(x, y)
7408 v.AddArg2(v0, c)
7409 return true
7410 }
7411
7412
7413 for {
7414 if v_0.Op != OpMIPS64DIVVU {
7415 break
7416 }
7417 _ = v_0.Args[1]
7418 v_0_1 := v_0.Args[1]
7419 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7420 break
7421 }
7422 v.reset(OpMIPS64MOVVconst)
7423 v.AuxInt = int64ToAuxInt(0)
7424 return true
7425 }
7426
7427
7428
7429 for {
7430 if v_0.Op != OpMIPS64DIVVU {
7431 break
7432 }
7433 _ = v_0.Args[1]
7434 x := v_0.Args[0]
7435 v_0_1 := v_0.Args[1]
7436 if v_0_1.Op != OpMIPS64MOVVconst {
7437 break
7438 }
7439 c := auxIntToInt64(v_0_1.AuxInt)
7440 if !(isPowerOfTwo(c)) {
7441 break
7442 }
7443 v.reset(OpMIPS64ANDconst)
7444 v.AuxInt = int64ToAuxInt(c - 1)
7445 v.AddArg(x)
7446 return true
7447 }
7448
7449
7450
7451 for {
7452 if v_0.Op != OpMIPS64DIVV {
7453 break
7454 }
7455 _ = v_0.Args[1]
7456 v_0_0 := v_0.Args[0]
7457 if v_0_0.Op != OpMIPS64MOVVconst {
7458 break
7459 }
7460 c := auxIntToInt64(v_0_0.AuxInt)
7461 v_0_1 := v_0.Args[1]
7462 if v_0_1.Op != OpMIPS64MOVVconst {
7463 break
7464 }
7465 d := auxIntToInt64(v_0_1.AuxInt)
7466 if !(d != 0) {
7467 break
7468 }
7469 v.reset(OpMIPS64MOVVconst)
7470 v.AuxInt = int64ToAuxInt(c % d)
7471 return true
7472 }
7473
7474
7475
7476 for {
7477 if v_0.Op != OpMIPS64DIVVU {
7478 break
7479 }
7480 _ = v_0.Args[1]
7481 v_0_0 := v_0.Args[0]
7482 if v_0_0.Op != OpMIPS64MOVVconst {
7483 break
7484 }
7485 c := auxIntToInt64(v_0_0.AuxInt)
7486 v_0_1 := v_0.Args[1]
7487 if v_0_1.Op != OpMIPS64MOVVconst {
7488 break
7489 }
7490 d := auxIntToInt64(v_0_1.AuxInt)
7491 if !(d != 0) {
7492 break
7493 }
7494 v.reset(OpMIPS64MOVVconst)
7495 v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
7496 return true
7497 }
7498 return false
7499 }
7500 func rewriteValueMIPS64_OpSelect1(v *Value) bool {
7501 v_0 := v.Args[0]
7502 b := v.Block
7503 typ := &b.Func.Config.Types
7504
7505
7506 for {
7507 if v_0.Op != OpMul64uover {
7508 break
7509 }
7510 y := v_0.Args[1]
7511 x := v_0.Args[0]
7512 v.reset(OpMIPS64SGTU)
7513 v.Type = typ.Bool
7514 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
7515 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
7516 v1.AddArg2(x, y)
7517 v0.AddArg(v1)
7518 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7519 v2.AuxInt = int64ToAuxInt(0)
7520 v.AddArg2(v0, v2)
7521 return true
7522 }
7523
7524
7525 for {
7526 t := v.Type
7527 if v_0.Op != OpAdd64carry {
7528 break
7529 }
7530 c := v_0.Args[2]
7531 x := v_0.Args[0]
7532 y := v_0.Args[1]
7533 v.reset(OpMIPS64OR)
7534 v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7535 s := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7536 s.AddArg2(x, y)
7537 v0.AddArg2(x, s)
7538 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7539 v3 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7540 v3.AddArg2(s, c)
7541 v2.AddArg2(s, v3)
7542 v.AddArg2(v0, v2)
7543 return true
7544 }
7545
7546
7547 for {
7548 t := v.Type
7549 if v_0.Op != OpSub64borrow {
7550 break
7551 }
7552 c := v_0.Args[2]
7553 x := v_0.Args[0]
7554 y := v_0.Args[1]
7555 v.reset(OpMIPS64OR)
7556 v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7557 s := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7558 s.AddArg2(x, y)
7559 v0.AddArg2(s, x)
7560 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7561 v3 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7562 v3.AddArg2(s, c)
7563 v2.AddArg2(v3, s)
7564 v.AddArg2(v0, v2)
7565 return true
7566 }
7567
7568
7569 for {
7570 if v_0.Op != OpMIPS64MULVU {
7571 break
7572 }
7573 _ = v_0.Args[1]
7574 v_0_0 := v_0.Args[0]
7575 v_0_1 := v_0.Args[1]
7576 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7577 x := v_0_0
7578 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 {
7579 continue
7580 }
7581 v.reset(OpMIPS64NEGV)
7582 v.AddArg(x)
7583 return true
7584 }
7585 break
7586 }
7587
7588
7589 for {
7590 if v_0.Op != OpMIPS64MULVU {
7591 break
7592 }
7593 _ = v_0.Args[1]
7594 v_0_0 := v_0.Args[0]
7595 v_0_1 := v_0.Args[1]
7596 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7597 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
7598 continue
7599 }
7600 v.reset(OpMIPS64MOVVconst)
7601 v.AuxInt = int64ToAuxInt(0)
7602 return true
7603 }
7604 break
7605 }
7606
7607
7608 for {
7609 if v_0.Op != OpMIPS64MULVU {
7610 break
7611 }
7612 _ = v_0.Args[1]
7613 v_0_0 := v_0.Args[0]
7614 v_0_1 := v_0.Args[1]
7615 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7616 x := v_0_0
7617 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7618 continue
7619 }
7620 v.copyOf(x)
7621 return true
7622 }
7623 break
7624 }
7625
7626
7627
7628 for {
7629 if v_0.Op != OpMIPS64MULVU {
7630 break
7631 }
7632 _ = v_0.Args[1]
7633 v_0_0 := v_0.Args[0]
7634 v_0_1 := v_0.Args[1]
7635 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7636 x := v_0_0
7637 if v_0_1.Op != OpMIPS64MOVVconst {
7638 continue
7639 }
7640 c := auxIntToInt64(v_0_1.AuxInt)
7641 if !(isPowerOfTwo(c)) {
7642 continue
7643 }
7644 v.reset(OpMIPS64SLLVconst)
7645 v.AuxInt = int64ToAuxInt(log64(c))
7646 v.AddArg(x)
7647 return true
7648 }
7649 break
7650 }
7651
7652
7653 for {
7654 if v_0.Op != OpMIPS64DIVVU {
7655 break
7656 }
7657 _ = v_0.Args[1]
7658 x := v_0.Args[0]
7659 v_0_1 := v_0.Args[1]
7660 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7661 break
7662 }
7663 v.copyOf(x)
7664 return true
7665 }
7666
7667
7668
7669 for {
7670 if v_0.Op != OpMIPS64DIVVU {
7671 break
7672 }
7673 _ = v_0.Args[1]
7674 x := v_0.Args[0]
7675 v_0_1 := v_0.Args[1]
7676 if v_0_1.Op != OpMIPS64MOVVconst {
7677 break
7678 }
7679 c := auxIntToInt64(v_0_1.AuxInt)
7680 if !(isPowerOfTwo(c)) {
7681 break
7682 }
7683 v.reset(OpMIPS64SRLVconst)
7684 v.AuxInt = int64ToAuxInt(log64(c))
7685 v.AddArg(x)
7686 return true
7687 }
7688
7689
7690 for {
7691 if v_0.Op != OpMIPS64MULVU {
7692 break
7693 }
7694 _ = v_0.Args[1]
7695 v_0_0 := v_0.Args[0]
7696 v_0_1 := v_0.Args[1]
7697 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7698 if v_0_0.Op != OpMIPS64MOVVconst {
7699 continue
7700 }
7701 c := auxIntToInt64(v_0_0.AuxInt)
7702 if v_0_1.Op != OpMIPS64MOVVconst {
7703 continue
7704 }
7705 d := auxIntToInt64(v_0_1.AuxInt)
7706 v.reset(OpMIPS64MOVVconst)
7707 v.AuxInt = int64ToAuxInt(c * d)
7708 return true
7709 }
7710 break
7711 }
7712
7713
7714
7715 for {
7716 if v_0.Op != OpMIPS64DIVV {
7717 break
7718 }
7719 _ = v_0.Args[1]
7720 v_0_0 := v_0.Args[0]
7721 if v_0_0.Op != OpMIPS64MOVVconst {
7722 break
7723 }
7724 c := auxIntToInt64(v_0_0.AuxInt)
7725 v_0_1 := v_0.Args[1]
7726 if v_0_1.Op != OpMIPS64MOVVconst {
7727 break
7728 }
7729 d := auxIntToInt64(v_0_1.AuxInt)
7730 if !(d != 0) {
7731 break
7732 }
7733 v.reset(OpMIPS64MOVVconst)
7734 v.AuxInt = int64ToAuxInt(c / d)
7735 return true
7736 }
7737
7738
7739
7740 for {
7741 if v_0.Op != OpMIPS64DIVVU {
7742 break
7743 }
7744 _ = v_0.Args[1]
7745 v_0_0 := v_0.Args[0]
7746 if v_0_0.Op != OpMIPS64MOVVconst {
7747 break
7748 }
7749 c := auxIntToInt64(v_0_0.AuxInt)
7750 v_0_1 := v_0.Args[1]
7751 if v_0_1.Op != OpMIPS64MOVVconst {
7752 break
7753 }
7754 d := auxIntToInt64(v_0_1.AuxInt)
7755 if !(d != 0) {
7756 break
7757 }
7758 v.reset(OpMIPS64MOVVconst)
7759 v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
7760 return true
7761 }
7762 return false
7763 }
7764 func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
7765 v_0 := v.Args[0]
7766 b := v.Block
7767
7768
7769 for {
7770 t := v.Type
7771 x := v_0
7772 v.reset(OpMIPS64SRAVconst)
7773 v.AuxInt = int64ToAuxInt(63)
7774 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7775 v0.AddArg(x)
7776 v.AddArg(v0)
7777 return true
7778 }
7779 }
7780 func rewriteValueMIPS64_OpStore(v *Value) bool {
7781 v_2 := v.Args[2]
7782 v_1 := v.Args[1]
7783 v_0 := v.Args[0]
7784
7785
7786
7787 for {
7788 t := auxToType(v.Aux)
7789 ptr := v_0
7790 val := v_1
7791 mem := v_2
7792 if !(t.Size() == 1) {
7793 break
7794 }
7795 v.reset(OpMIPS64MOVBstore)
7796 v.AddArg3(ptr, val, mem)
7797 return true
7798 }
7799
7800
7801
7802 for {
7803 t := auxToType(v.Aux)
7804 ptr := v_0
7805 val := v_1
7806 mem := v_2
7807 if !(t.Size() == 2) {
7808 break
7809 }
7810 v.reset(OpMIPS64MOVHstore)
7811 v.AddArg3(ptr, val, mem)
7812 return true
7813 }
7814
7815
7816
7817 for {
7818 t := auxToType(v.Aux)
7819 ptr := v_0
7820 val := v_1
7821 mem := v_2
7822 if !(t.Size() == 4 && !t.IsFloat()) {
7823 break
7824 }
7825 v.reset(OpMIPS64MOVWstore)
7826 v.AddArg3(ptr, val, mem)
7827 return true
7828 }
7829
7830
7831
7832 for {
7833 t := auxToType(v.Aux)
7834 ptr := v_0
7835 val := v_1
7836 mem := v_2
7837 if !(t.Size() == 8 && !t.IsFloat()) {
7838 break
7839 }
7840 v.reset(OpMIPS64MOVVstore)
7841 v.AddArg3(ptr, val, mem)
7842 return true
7843 }
7844
7845
7846
7847 for {
7848 t := auxToType(v.Aux)
7849 ptr := v_0
7850 val := v_1
7851 mem := v_2
7852 if !(t.Size() == 4 && t.IsFloat()) {
7853 break
7854 }
7855 v.reset(OpMIPS64MOVFstore)
7856 v.AddArg3(ptr, val, mem)
7857 return true
7858 }
7859
7860
7861
7862 for {
7863 t := auxToType(v.Aux)
7864 ptr := v_0
7865 val := v_1
7866 mem := v_2
7867 if !(t.Size() == 8 && t.IsFloat()) {
7868 break
7869 }
7870 v.reset(OpMIPS64MOVDstore)
7871 v.AddArg3(ptr, val, mem)
7872 return true
7873 }
7874 return false
7875 }
7876 func rewriteValueMIPS64_OpZero(v *Value) bool {
7877 v_1 := v.Args[1]
7878 v_0 := v.Args[0]
7879 b := v.Block
7880 config := b.Func.Config
7881 typ := &b.Func.Config.Types
7882
7883
7884 for {
7885 if auxIntToInt64(v.AuxInt) != 0 {
7886 break
7887 }
7888 mem := v_1
7889 v.copyOf(mem)
7890 return true
7891 }
7892
7893
7894 for {
7895 if auxIntToInt64(v.AuxInt) != 1 {
7896 break
7897 }
7898 ptr := v_0
7899 mem := v_1
7900 v.reset(OpMIPS64MOVBstore)
7901 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7902 v0.AuxInt = int64ToAuxInt(0)
7903 v.AddArg3(ptr, v0, mem)
7904 return true
7905 }
7906
7907
7908
7909 for {
7910 if auxIntToInt64(v.AuxInt) != 2 {
7911 break
7912 }
7913 t := auxToType(v.Aux)
7914 ptr := v_0
7915 mem := v_1
7916 if !(t.Alignment()%2 == 0) {
7917 break
7918 }
7919 v.reset(OpMIPS64MOVHstore)
7920 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7921 v0.AuxInt = int64ToAuxInt(0)
7922 v.AddArg3(ptr, v0, mem)
7923 return true
7924 }
7925
7926
7927 for {
7928 if auxIntToInt64(v.AuxInt) != 2 {
7929 break
7930 }
7931 ptr := v_0
7932 mem := v_1
7933 v.reset(OpMIPS64MOVBstore)
7934 v.AuxInt = int32ToAuxInt(1)
7935 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7936 v0.AuxInt = int64ToAuxInt(0)
7937 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7938 v1.AuxInt = int32ToAuxInt(0)
7939 v1.AddArg3(ptr, v0, mem)
7940 v.AddArg3(ptr, v0, v1)
7941 return true
7942 }
7943
7944
7945
7946 for {
7947 if auxIntToInt64(v.AuxInt) != 4 {
7948 break
7949 }
7950 t := auxToType(v.Aux)
7951 ptr := v_0
7952 mem := v_1
7953 if !(t.Alignment()%4 == 0) {
7954 break
7955 }
7956 v.reset(OpMIPS64MOVWstore)
7957 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7958 v0.AuxInt = int64ToAuxInt(0)
7959 v.AddArg3(ptr, v0, mem)
7960 return true
7961 }
7962
7963
7964
7965 for {
7966 if auxIntToInt64(v.AuxInt) != 4 {
7967 break
7968 }
7969 t := auxToType(v.Aux)
7970 ptr := v_0
7971 mem := v_1
7972 if !(t.Alignment()%2 == 0) {
7973 break
7974 }
7975 v.reset(OpMIPS64MOVHstore)
7976 v.AuxInt = int32ToAuxInt(2)
7977 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7978 v0.AuxInt = int64ToAuxInt(0)
7979 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7980 v1.AuxInt = int32ToAuxInt(0)
7981 v1.AddArg3(ptr, v0, mem)
7982 v.AddArg3(ptr, v0, v1)
7983 return true
7984 }
7985
7986
7987 for {
7988 if auxIntToInt64(v.AuxInt) != 4 {
7989 break
7990 }
7991 ptr := v_0
7992 mem := v_1
7993 v.reset(OpMIPS64MOVBstore)
7994 v.AuxInt = int32ToAuxInt(3)
7995 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7996 v0.AuxInt = int64ToAuxInt(0)
7997 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7998 v1.AuxInt = int32ToAuxInt(2)
7999 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8000 v2.AuxInt = int32ToAuxInt(1)
8001 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8002 v3.AuxInt = int32ToAuxInt(0)
8003 v3.AddArg3(ptr, v0, mem)
8004 v2.AddArg3(ptr, v0, v3)
8005 v1.AddArg3(ptr, v0, v2)
8006 v.AddArg3(ptr, v0, v1)
8007 return true
8008 }
8009
8010
8011
8012 for {
8013 if auxIntToInt64(v.AuxInt) != 8 {
8014 break
8015 }
8016 t := auxToType(v.Aux)
8017 ptr := v_0
8018 mem := v_1
8019 if !(t.Alignment()%8 == 0) {
8020 break
8021 }
8022 v.reset(OpMIPS64MOVVstore)
8023 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8024 v0.AuxInt = int64ToAuxInt(0)
8025 v.AddArg3(ptr, v0, mem)
8026 return true
8027 }
8028
8029
8030
8031 for {
8032 if auxIntToInt64(v.AuxInt) != 8 {
8033 break
8034 }
8035 t := auxToType(v.Aux)
8036 ptr := v_0
8037 mem := v_1
8038 if !(t.Alignment()%4 == 0) {
8039 break
8040 }
8041 v.reset(OpMIPS64MOVWstore)
8042 v.AuxInt = int32ToAuxInt(4)
8043 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8044 v0.AuxInt = int64ToAuxInt(0)
8045 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8046 v1.AuxInt = int32ToAuxInt(0)
8047 v1.AddArg3(ptr, v0, mem)
8048 v.AddArg3(ptr, v0, v1)
8049 return true
8050 }
8051
8052
8053
8054 for {
8055 if auxIntToInt64(v.AuxInt) != 8 {
8056 break
8057 }
8058 t := auxToType(v.Aux)
8059 ptr := v_0
8060 mem := v_1
8061 if !(t.Alignment()%2 == 0) {
8062 break
8063 }
8064 v.reset(OpMIPS64MOVHstore)
8065 v.AuxInt = int32ToAuxInt(6)
8066 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8067 v0.AuxInt = int64ToAuxInt(0)
8068 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8069 v1.AuxInt = int32ToAuxInt(4)
8070 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8071 v2.AuxInt = int32ToAuxInt(2)
8072 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8073 v3.AuxInt = int32ToAuxInt(0)
8074 v3.AddArg3(ptr, v0, mem)
8075 v2.AddArg3(ptr, v0, v3)
8076 v1.AddArg3(ptr, v0, v2)
8077 v.AddArg3(ptr, v0, v1)
8078 return true
8079 }
8080
8081
8082 for {
8083 if auxIntToInt64(v.AuxInt) != 3 {
8084 break
8085 }
8086 ptr := v_0
8087 mem := v_1
8088 v.reset(OpMIPS64MOVBstore)
8089 v.AuxInt = int32ToAuxInt(2)
8090 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8091 v0.AuxInt = int64ToAuxInt(0)
8092 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8093 v1.AuxInt = int32ToAuxInt(1)
8094 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8095 v2.AuxInt = int32ToAuxInt(0)
8096 v2.AddArg3(ptr, v0, mem)
8097 v1.AddArg3(ptr, v0, v2)
8098 v.AddArg3(ptr, v0, v1)
8099 return true
8100 }
8101
8102
8103
8104 for {
8105 if auxIntToInt64(v.AuxInt) != 6 {
8106 break
8107 }
8108 t := auxToType(v.Aux)
8109 ptr := v_0
8110 mem := v_1
8111 if !(t.Alignment()%2 == 0) {
8112 break
8113 }
8114 v.reset(OpMIPS64MOVHstore)
8115 v.AuxInt = int32ToAuxInt(4)
8116 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8117 v0.AuxInt = int64ToAuxInt(0)
8118 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8119 v1.AuxInt = int32ToAuxInt(2)
8120 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8121 v2.AuxInt = int32ToAuxInt(0)
8122 v2.AddArg3(ptr, v0, mem)
8123 v1.AddArg3(ptr, v0, v2)
8124 v.AddArg3(ptr, v0, v1)
8125 return true
8126 }
8127
8128
8129
8130 for {
8131 if auxIntToInt64(v.AuxInt) != 12 {
8132 break
8133 }
8134 t := auxToType(v.Aux)
8135 ptr := v_0
8136 mem := v_1
8137 if !(t.Alignment()%4 == 0) {
8138 break
8139 }
8140 v.reset(OpMIPS64MOVWstore)
8141 v.AuxInt = int32ToAuxInt(8)
8142 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8143 v0.AuxInt = int64ToAuxInt(0)
8144 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8145 v1.AuxInt = int32ToAuxInt(4)
8146 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8147 v2.AuxInt = int32ToAuxInt(0)
8148 v2.AddArg3(ptr, v0, mem)
8149 v1.AddArg3(ptr, v0, v2)
8150 v.AddArg3(ptr, v0, v1)
8151 return true
8152 }
8153
8154
8155
8156 for {
8157 if auxIntToInt64(v.AuxInt) != 16 {
8158 break
8159 }
8160 t := auxToType(v.Aux)
8161 ptr := v_0
8162 mem := v_1
8163 if !(t.Alignment()%8 == 0) {
8164 break
8165 }
8166 v.reset(OpMIPS64MOVVstore)
8167 v.AuxInt = int32ToAuxInt(8)
8168 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8169 v0.AuxInt = int64ToAuxInt(0)
8170 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8171 v1.AuxInt = int32ToAuxInt(0)
8172 v1.AddArg3(ptr, v0, mem)
8173 v.AddArg3(ptr, v0, v1)
8174 return true
8175 }
8176
8177
8178
8179 for {
8180 if auxIntToInt64(v.AuxInt) != 24 {
8181 break
8182 }
8183 t := auxToType(v.Aux)
8184 ptr := v_0
8185 mem := v_1
8186 if !(t.Alignment()%8 == 0) {
8187 break
8188 }
8189 v.reset(OpMIPS64MOVVstore)
8190 v.AuxInt = int32ToAuxInt(16)
8191 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8192 v0.AuxInt = int64ToAuxInt(0)
8193 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8194 v1.AuxInt = int32ToAuxInt(8)
8195 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8196 v2.AuxInt = int32ToAuxInt(0)
8197 v2.AddArg3(ptr, v0, mem)
8198 v1.AddArg3(ptr, v0, v2)
8199 v.AddArg3(ptr, v0, v1)
8200 return true
8201 }
8202
8203
8204
8205 for {
8206 s := auxIntToInt64(v.AuxInt)
8207 t := auxToType(v.Aux)
8208 ptr := v_0
8209 mem := v_1
8210 if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0) {
8211 break
8212 }
8213 v.reset(OpMIPS64DUFFZERO)
8214 v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
8215 v.AddArg2(ptr, mem)
8216 return true
8217 }
8218
8219
8220
8221 for {
8222 s := auxIntToInt64(v.AuxInt)
8223 t := auxToType(v.Aux)
8224 ptr := v_0
8225 mem := v_1
8226 if !(s > 8*128 || t.Alignment()%8 != 0) {
8227 break
8228 }
8229 v.reset(OpMIPS64LoweredZero)
8230 v.AuxInt = int64ToAuxInt(t.Alignment())
8231 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
8232 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
8233 v0.AddArg(ptr)
8234 v.AddArg3(ptr, v0, mem)
8235 return true
8236 }
8237 return false
8238 }
8239 func rewriteBlockMIPS64(b *Block) bool {
8240 switch b.Kind {
8241 case BlockMIPS64EQ:
8242
8243
8244 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
8245 v_0 := b.Controls[0]
8246 cmp := v_0.Args[0]
8247 b.resetWithControl(BlockMIPS64FPF, cmp)
8248 return true
8249 }
8250
8251
8252 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
8253 v_0 := b.Controls[0]
8254 cmp := v_0.Args[0]
8255 b.resetWithControl(BlockMIPS64FPT, cmp)
8256 return true
8257 }
8258
8259
8260 for b.Controls[0].Op == OpMIPS64XORconst {
8261 v_0 := b.Controls[0]
8262 if auxIntToInt64(v_0.AuxInt) != 1 {
8263 break
8264 }
8265 cmp := v_0.Args[0]
8266 if cmp.Op != OpMIPS64SGT {
8267 break
8268 }
8269 b.resetWithControl(BlockMIPS64NE, cmp)
8270 return true
8271 }
8272
8273
8274 for b.Controls[0].Op == OpMIPS64XORconst {
8275 v_0 := b.Controls[0]
8276 if auxIntToInt64(v_0.AuxInt) != 1 {
8277 break
8278 }
8279 cmp := v_0.Args[0]
8280 if cmp.Op != OpMIPS64SGTU {
8281 break
8282 }
8283 b.resetWithControl(BlockMIPS64NE, cmp)
8284 return true
8285 }
8286
8287
8288 for b.Controls[0].Op == OpMIPS64XORconst {
8289 v_0 := b.Controls[0]
8290 if auxIntToInt64(v_0.AuxInt) != 1 {
8291 break
8292 }
8293 cmp := v_0.Args[0]
8294 if cmp.Op != OpMIPS64SGTconst {
8295 break
8296 }
8297 b.resetWithControl(BlockMIPS64NE, cmp)
8298 return true
8299 }
8300
8301
8302 for b.Controls[0].Op == OpMIPS64XORconst {
8303 v_0 := b.Controls[0]
8304 if auxIntToInt64(v_0.AuxInt) != 1 {
8305 break
8306 }
8307 cmp := v_0.Args[0]
8308 if cmp.Op != OpMIPS64SGTUconst {
8309 break
8310 }
8311 b.resetWithControl(BlockMIPS64NE, cmp)
8312 return true
8313 }
8314
8315
8316 for b.Controls[0].Op == OpMIPS64SGTUconst {
8317 v_0 := b.Controls[0]
8318 if auxIntToInt64(v_0.AuxInt) != 1 {
8319 break
8320 }
8321 x := v_0.Args[0]
8322 b.resetWithControl(BlockMIPS64NE, x)
8323 return true
8324 }
8325
8326
8327 for b.Controls[0].Op == OpMIPS64SGTU {
8328 v_0 := b.Controls[0]
8329 _ = v_0.Args[1]
8330 x := v_0.Args[0]
8331 v_0_1 := v_0.Args[1]
8332 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8333 break
8334 }
8335 b.resetWithControl(BlockMIPS64EQ, x)
8336 return true
8337 }
8338
8339
8340 for b.Controls[0].Op == OpMIPS64SGTconst {
8341 v_0 := b.Controls[0]
8342 if auxIntToInt64(v_0.AuxInt) != 0 {
8343 break
8344 }
8345 x := v_0.Args[0]
8346 b.resetWithControl(BlockMIPS64GEZ, x)
8347 return true
8348 }
8349
8350
8351 for b.Controls[0].Op == OpMIPS64SGT {
8352 v_0 := b.Controls[0]
8353 _ = v_0.Args[1]
8354 x := v_0.Args[0]
8355 v_0_1 := v_0.Args[1]
8356 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8357 break
8358 }
8359 b.resetWithControl(BlockMIPS64LEZ, x)
8360 return true
8361 }
8362
8363
8364 for b.Controls[0].Op == OpMIPS64MOVVconst {
8365 v_0 := b.Controls[0]
8366 if auxIntToInt64(v_0.AuxInt) != 0 {
8367 break
8368 }
8369 b.Reset(BlockFirst)
8370 return true
8371 }
8372
8373
8374
8375 for b.Controls[0].Op == OpMIPS64MOVVconst {
8376 v_0 := b.Controls[0]
8377 c := auxIntToInt64(v_0.AuxInt)
8378 if !(c != 0) {
8379 break
8380 }
8381 b.Reset(BlockFirst)
8382 b.swapSuccessors()
8383 return true
8384 }
8385 case BlockMIPS64GEZ:
8386
8387
8388
8389 for b.Controls[0].Op == OpMIPS64MOVVconst {
8390 v_0 := b.Controls[0]
8391 c := auxIntToInt64(v_0.AuxInt)
8392 if !(c >= 0) {
8393 break
8394 }
8395 b.Reset(BlockFirst)
8396 return true
8397 }
8398
8399
8400
8401 for b.Controls[0].Op == OpMIPS64MOVVconst {
8402 v_0 := b.Controls[0]
8403 c := auxIntToInt64(v_0.AuxInt)
8404 if !(c < 0) {
8405 break
8406 }
8407 b.Reset(BlockFirst)
8408 b.swapSuccessors()
8409 return true
8410 }
8411 case BlockMIPS64GTZ:
8412
8413
8414
8415 for b.Controls[0].Op == OpMIPS64MOVVconst {
8416 v_0 := b.Controls[0]
8417 c := auxIntToInt64(v_0.AuxInt)
8418 if !(c > 0) {
8419 break
8420 }
8421 b.Reset(BlockFirst)
8422 return true
8423 }
8424
8425
8426
8427 for b.Controls[0].Op == OpMIPS64MOVVconst {
8428 v_0 := b.Controls[0]
8429 c := auxIntToInt64(v_0.AuxInt)
8430 if !(c <= 0) {
8431 break
8432 }
8433 b.Reset(BlockFirst)
8434 b.swapSuccessors()
8435 return true
8436 }
8437 case BlockIf:
8438
8439
8440 for {
8441 cond := b.Controls[0]
8442 b.resetWithControl(BlockMIPS64NE, cond)
8443 return true
8444 }
8445 case BlockMIPS64LEZ:
8446
8447
8448
8449 for b.Controls[0].Op == OpMIPS64MOVVconst {
8450 v_0 := b.Controls[0]
8451 c := auxIntToInt64(v_0.AuxInt)
8452 if !(c <= 0) {
8453 break
8454 }
8455 b.Reset(BlockFirst)
8456 return true
8457 }
8458
8459
8460
8461 for b.Controls[0].Op == OpMIPS64MOVVconst {
8462 v_0 := b.Controls[0]
8463 c := auxIntToInt64(v_0.AuxInt)
8464 if !(c > 0) {
8465 break
8466 }
8467 b.Reset(BlockFirst)
8468 b.swapSuccessors()
8469 return true
8470 }
8471 case BlockMIPS64LTZ:
8472
8473
8474
8475 for b.Controls[0].Op == OpMIPS64MOVVconst {
8476 v_0 := b.Controls[0]
8477 c := auxIntToInt64(v_0.AuxInt)
8478 if !(c < 0) {
8479 break
8480 }
8481 b.Reset(BlockFirst)
8482 return true
8483 }
8484
8485
8486
8487 for b.Controls[0].Op == OpMIPS64MOVVconst {
8488 v_0 := b.Controls[0]
8489 c := auxIntToInt64(v_0.AuxInt)
8490 if !(c >= 0) {
8491 break
8492 }
8493 b.Reset(BlockFirst)
8494 b.swapSuccessors()
8495 return true
8496 }
8497 case BlockMIPS64NE:
8498
8499
8500 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
8501 v_0 := b.Controls[0]
8502 cmp := v_0.Args[0]
8503 b.resetWithControl(BlockMIPS64FPT, cmp)
8504 return true
8505 }
8506
8507
8508 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
8509 v_0 := b.Controls[0]
8510 cmp := v_0.Args[0]
8511 b.resetWithControl(BlockMIPS64FPF, cmp)
8512 return true
8513 }
8514
8515
8516 for b.Controls[0].Op == OpMIPS64XORconst {
8517 v_0 := b.Controls[0]
8518 if auxIntToInt64(v_0.AuxInt) != 1 {
8519 break
8520 }
8521 cmp := v_0.Args[0]
8522 if cmp.Op != OpMIPS64SGT {
8523 break
8524 }
8525 b.resetWithControl(BlockMIPS64EQ, cmp)
8526 return true
8527 }
8528
8529
8530 for b.Controls[0].Op == OpMIPS64XORconst {
8531 v_0 := b.Controls[0]
8532 if auxIntToInt64(v_0.AuxInt) != 1 {
8533 break
8534 }
8535 cmp := v_0.Args[0]
8536 if cmp.Op != OpMIPS64SGTU {
8537 break
8538 }
8539 b.resetWithControl(BlockMIPS64EQ, cmp)
8540 return true
8541 }
8542
8543
8544 for b.Controls[0].Op == OpMIPS64XORconst {
8545 v_0 := b.Controls[0]
8546 if auxIntToInt64(v_0.AuxInt) != 1 {
8547 break
8548 }
8549 cmp := v_0.Args[0]
8550 if cmp.Op != OpMIPS64SGTconst {
8551 break
8552 }
8553 b.resetWithControl(BlockMIPS64EQ, cmp)
8554 return true
8555 }
8556
8557
8558 for b.Controls[0].Op == OpMIPS64XORconst {
8559 v_0 := b.Controls[0]
8560 if auxIntToInt64(v_0.AuxInt) != 1 {
8561 break
8562 }
8563 cmp := v_0.Args[0]
8564 if cmp.Op != OpMIPS64SGTUconst {
8565 break
8566 }
8567 b.resetWithControl(BlockMIPS64EQ, cmp)
8568 return true
8569 }
8570
8571
8572 for b.Controls[0].Op == OpMIPS64SGTUconst {
8573 v_0 := b.Controls[0]
8574 if auxIntToInt64(v_0.AuxInt) != 1 {
8575 break
8576 }
8577 x := v_0.Args[0]
8578 b.resetWithControl(BlockMIPS64EQ, x)
8579 return true
8580 }
8581
8582
8583 for b.Controls[0].Op == OpMIPS64SGTU {
8584 v_0 := b.Controls[0]
8585 _ = v_0.Args[1]
8586 x := v_0.Args[0]
8587 v_0_1 := v_0.Args[1]
8588 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8589 break
8590 }
8591 b.resetWithControl(BlockMIPS64NE, x)
8592 return true
8593 }
8594
8595
8596 for b.Controls[0].Op == OpMIPS64SGTconst {
8597 v_0 := b.Controls[0]
8598 if auxIntToInt64(v_0.AuxInt) != 0 {
8599 break
8600 }
8601 x := v_0.Args[0]
8602 b.resetWithControl(BlockMIPS64LTZ, x)
8603 return true
8604 }
8605
8606
8607 for b.Controls[0].Op == OpMIPS64SGT {
8608 v_0 := b.Controls[0]
8609 _ = v_0.Args[1]
8610 x := v_0.Args[0]
8611 v_0_1 := v_0.Args[1]
8612 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8613 break
8614 }
8615 b.resetWithControl(BlockMIPS64GTZ, x)
8616 return true
8617 }
8618
8619
8620 for b.Controls[0].Op == OpMIPS64MOVVconst {
8621 v_0 := b.Controls[0]
8622 if auxIntToInt64(v_0.AuxInt) != 0 {
8623 break
8624 }
8625 b.Reset(BlockFirst)
8626 b.swapSuccessors()
8627 return true
8628 }
8629
8630
8631
8632 for b.Controls[0].Op == OpMIPS64MOVVconst {
8633 v_0 := b.Controls[0]
8634 c := auxIntToInt64(v_0.AuxInt)
8635 if !(c != 0) {
8636 break
8637 }
8638 b.Reset(BlockFirst)
8639 return true
8640 }
8641 }
8642 return false
8643 }
8644
View as plain text