1
2
3 package ssa
4
5 import "cmd/compile/internal/types"
6
7 func rewriteValueMIPS64(v *Value) bool {
8 switch v.Op {
9 case OpAbs:
10 v.Op = OpMIPS64ABSD
11 return true
12 case OpAdd16:
13 v.Op = OpMIPS64ADDV
14 return true
15 case OpAdd32:
16 v.Op = OpMIPS64ADDV
17 return true
18 case OpAdd32F:
19 v.Op = OpMIPS64ADDF
20 return true
21 case OpAdd64:
22 v.Op = OpMIPS64ADDV
23 return true
24 case OpAdd64F:
25 v.Op = OpMIPS64ADDD
26 return true
27 case OpAdd8:
28 v.Op = OpMIPS64ADDV
29 return true
30 case OpAddPtr:
31 v.Op = OpMIPS64ADDV
32 return true
33 case OpAddr:
34 return rewriteValueMIPS64_OpAddr(v)
35 case OpAnd16:
36 v.Op = OpMIPS64AND
37 return true
38 case OpAnd32:
39 v.Op = OpMIPS64AND
40 return true
41 case OpAnd64:
42 v.Op = OpMIPS64AND
43 return true
44 case OpAnd8:
45 v.Op = OpMIPS64AND
46 return true
47 case OpAndB:
48 v.Op = OpMIPS64AND
49 return true
50 case OpAtomicAdd32:
51 v.Op = OpMIPS64LoweredAtomicAdd32
52 return true
53 case OpAtomicAdd64:
54 v.Op = OpMIPS64LoweredAtomicAdd64
55 return true
56 case OpAtomicAnd32:
57 v.Op = OpMIPS64LoweredAtomicAnd32
58 return true
59 case OpAtomicAnd8:
60 return rewriteValueMIPS64_OpAtomicAnd8(v)
61 case OpAtomicCompareAndSwap32:
62 return rewriteValueMIPS64_OpAtomicCompareAndSwap32(v)
63 case OpAtomicCompareAndSwap64:
64 v.Op = OpMIPS64LoweredAtomicCas64
65 return true
66 case OpAtomicExchange32:
67 v.Op = OpMIPS64LoweredAtomicExchange32
68 return true
69 case OpAtomicExchange64:
70 v.Op = OpMIPS64LoweredAtomicExchange64
71 return true
72 case OpAtomicLoad32:
73 v.Op = OpMIPS64LoweredAtomicLoad32
74 return true
75 case OpAtomicLoad64:
76 v.Op = OpMIPS64LoweredAtomicLoad64
77 return true
78 case OpAtomicLoad8:
79 v.Op = OpMIPS64LoweredAtomicLoad8
80 return true
81 case OpAtomicLoadPtr:
82 v.Op = OpMIPS64LoweredAtomicLoad64
83 return true
84 case OpAtomicOr32:
85 v.Op = OpMIPS64LoweredAtomicOr32
86 return true
87 case OpAtomicOr8:
88 return rewriteValueMIPS64_OpAtomicOr8(v)
89 case OpAtomicStore32:
90 v.Op = OpMIPS64LoweredAtomicStore32
91 return true
92 case OpAtomicStore64:
93 v.Op = OpMIPS64LoweredAtomicStore64
94 return true
95 case OpAtomicStore8:
96 v.Op = OpMIPS64LoweredAtomicStore8
97 return true
98 case OpAtomicStorePtrNoWB:
99 v.Op = OpMIPS64LoweredAtomicStore64
100 return true
101 case OpAvg64u:
102 return rewriteValueMIPS64_OpAvg64u(v)
103 case OpClosureCall:
104 v.Op = OpMIPS64CALLclosure
105 return true
106 case OpCom16:
107 return rewriteValueMIPS64_OpCom16(v)
108 case OpCom32:
109 return rewriteValueMIPS64_OpCom32(v)
110 case OpCom64:
111 return rewriteValueMIPS64_OpCom64(v)
112 case OpCom8:
113 return rewriteValueMIPS64_OpCom8(v)
114 case OpConst16:
115 return rewriteValueMIPS64_OpConst16(v)
116 case OpConst32:
117 return rewriteValueMIPS64_OpConst32(v)
118 case OpConst32F:
119 return rewriteValueMIPS64_OpConst32F(v)
120 case OpConst64:
121 return rewriteValueMIPS64_OpConst64(v)
122 case OpConst64F:
123 return rewriteValueMIPS64_OpConst64F(v)
124 case OpConst8:
125 return rewriteValueMIPS64_OpConst8(v)
126 case OpConstBool:
127 return rewriteValueMIPS64_OpConstBool(v)
128 case OpConstNil:
129 return rewriteValueMIPS64_OpConstNil(v)
130 case OpCvt32Fto32:
131 v.Op = OpMIPS64TRUNCFW
132 return true
133 case OpCvt32Fto64:
134 v.Op = OpMIPS64TRUNCFV
135 return true
136 case OpCvt32Fto64F:
137 v.Op = OpMIPS64MOVFD
138 return true
139 case OpCvt32to32F:
140 v.Op = OpMIPS64MOVWF
141 return true
142 case OpCvt32to64F:
143 v.Op = OpMIPS64MOVWD
144 return true
145 case OpCvt64Fto32:
146 v.Op = OpMIPS64TRUNCDW
147 return true
148 case OpCvt64Fto32F:
149 v.Op = OpMIPS64MOVDF
150 return true
151 case OpCvt64Fto64:
152 v.Op = OpMIPS64TRUNCDV
153 return true
154 case OpCvt64to32F:
155 v.Op = OpMIPS64MOVVF
156 return true
157 case OpCvt64to64F:
158 v.Op = OpMIPS64MOVVD
159 return true
160 case OpCvtBoolToUint8:
161 v.Op = OpCopy
162 return true
163 case OpDiv16:
164 return rewriteValueMIPS64_OpDiv16(v)
165 case OpDiv16u:
166 return rewriteValueMIPS64_OpDiv16u(v)
167 case OpDiv32:
168 return rewriteValueMIPS64_OpDiv32(v)
169 case OpDiv32F:
170 v.Op = OpMIPS64DIVF
171 return true
172 case OpDiv32u:
173 return rewriteValueMIPS64_OpDiv32u(v)
174 case OpDiv64:
175 return rewriteValueMIPS64_OpDiv64(v)
176 case OpDiv64F:
177 v.Op = OpMIPS64DIVD
178 return true
179 case OpDiv64u:
180 return rewriteValueMIPS64_OpDiv64u(v)
181 case OpDiv8:
182 return rewriteValueMIPS64_OpDiv8(v)
183 case OpDiv8u:
184 return rewriteValueMIPS64_OpDiv8u(v)
185 case OpEq16:
186 return rewriteValueMIPS64_OpEq16(v)
187 case OpEq32:
188 return rewriteValueMIPS64_OpEq32(v)
189 case OpEq32F:
190 return rewriteValueMIPS64_OpEq32F(v)
191 case OpEq64:
192 return rewriteValueMIPS64_OpEq64(v)
193 case OpEq64F:
194 return rewriteValueMIPS64_OpEq64F(v)
195 case OpEq8:
196 return rewriteValueMIPS64_OpEq8(v)
197 case OpEqB:
198 return rewriteValueMIPS64_OpEqB(v)
199 case OpEqPtr:
200 return rewriteValueMIPS64_OpEqPtr(v)
201 case OpGetCallerPC:
202 v.Op = OpMIPS64LoweredGetCallerPC
203 return true
204 case OpGetCallerSP:
205 v.Op = OpMIPS64LoweredGetCallerSP
206 return true
207 case OpGetClosurePtr:
208 v.Op = OpMIPS64LoweredGetClosurePtr
209 return true
210 case OpHmul32:
211 return rewriteValueMIPS64_OpHmul32(v)
212 case OpHmul32u:
213 return rewriteValueMIPS64_OpHmul32u(v)
214 case OpHmul64:
215 return rewriteValueMIPS64_OpHmul64(v)
216 case OpHmul64u:
217 return rewriteValueMIPS64_OpHmul64u(v)
218 case OpInterCall:
219 v.Op = OpMIPS64CALLinter
220 return true
221 case OpIsInBounds:
222 return rewriteValueMIPS64_OpIsInBounds(v)
223 case OpIsNonNil:
224 return rewriteValueMIPS64_OpIsNonNil(v)
225 case OpIsSliceInBounds:
226 return rewriteValueMIPS64_OpIsSliceInBounds(v)
227 case OpLeq16:
228 return rewriteValueMIPS64_OpLeq16(v)
229 case OpLeq16U:
230 return rewriteValueMIPS64_OpLeq16U(v)
231 case OpLeq32:
232 return rewriteValueMIPS64_OpLeq32(v)
233 case OpLeq32F:
234 return rewriteValueMIPS64_OpLeq32F(v)
235 case OpLeq32U:
236 return rewriteValueMIPS64_OpLeq32U(v)
237 case OpLeq64:
238 return rewriteValueMIPS64_OpLeq64(v)
239 case OpLeq64F:
240 return rewriteValueMIPS64_OpLeq64F(v)
241 case OpLeq64U:
242 return rewriteValueMIPS64_OpLeq64U(v)
243 case OpLeq8:
244 return rewriteValueMIPS64_OpLeq8(v)
245 case OpLeq8U:
246 return rewriteValueMIPS64_OpLeq8U(v)
247 case OpLess16:
248 return rewriteValueMIPS64_OpLess16(v)
249 case OpLess16U:
250 return rewriteValueMIPS64_OpLess16U(v)
251 case OpLess32:
252 return rewriteValueMIPS64_OpLess32(v)
253 case OpLess32F:
254 return rewriteValueMIPS64_OpLess32F(v)
255 case OpLess32U:
256 return rewriteValueMIPS64_OpLess32U(v)
257 case OpLess64:
258 return rewriteValueMIPS64_OpLess64(v)
259 case OpLess64F:
260 return rewriteValueMIPS64_OpLess64F(v)
261 case OpLess64U:
262 return rewriteValueMIPS64_OpLess64U(v)
263 case OpLess8:
264 return rewriteValueMIPS64_OpLess8(v)
265 case OpLess8U:
266 return rewriteValueMIPS64_OpLess8U(v)
267 case OpLoad:
268 return rewriteValueMIPS64_OpLoad(v)
269 case OpLocalAddr:
270 return rewriteValueMIPS64_OpLocalAddr(v)
271 case OpLsh16x16:
272 return rewriteValueMIPS64_OpLsh16x16(v)
273 case OpLsh16x32:
274 return rewriteValueMIPS64_OpLsh16x32(v)
275 case OpLsh16x64:
276 return rewriteValueMIPS64_OpLsh16x64(v)
277 case OpLsh16x8:
278 return rewriteValueMIPS64_OpLsh16x8(v)
279 case OpLsh32x16:
280 return rewriteValueMIPS64_OpLsh32x16(v)
281 case OpLsh32x32:
282 return rewriteValueMIPS64_OpLsh32x32(v)
283 case OpLsh32x64:
284 return rewriteValueMIPS64_OpLsh32x64(v)
285 case OpLsh32x8:
286 return rewriteValueMIPS64_OpLsh32x8(v)
287 case OpLsh64x16:
288 return rewriteValueMIPS64_OpLsh64x16(v)
289 case OpLsh64x32:
290 return rewriteValueMIPS64_OpLsh64x32(v)
291 case OpLsh64x64:
292 return rewriteValueMIPS64_OpLsh64x64(v)
293 case OpLsh64x8:
294 return rewriteValueMIPS64_OpLsh64x8(v)
295 case OpLsh8x16:
296 return rewriteValueMIPS64_OpLsh8x16(v)
297 case OpLsh8x32:
298 return rewriteValueMIPS64_OpLsh8x32(v)
299 case OpLsh8x64:
300 return rewriteValueMIPS64_OpLsh8x64(v)
301 case OpLsh8x8:
302 return rewriteValueMIPS64_OpLsh8x8(v)
303 case OpMIPS64ADDV:
304 return rewriteValueMIPS64_OpMIPS64ADDV(v)
305 case OpMIPS64ADDVconst:
306 return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
307 case OpMIPS64AND:
308 return rewriteValueMIPS64_OpMIPS64AND(v)
309 case OpMIPS64ANDconst:
310 return rewriteValueMIPS64_OpMIPS64ANDconst(v)
311 case OpMIPS64LoweredAtomicAdd32:
312 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v)
313 case OpMIPS64LoweredAtomicAdd64:
314 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v)
315 case OpMIPS64LoweredAtomicStore32:
316 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v)
317 case OpMIPS64LoweredAtomicStore64:
318 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v)
319 case OpMIPS64LoweredPanicBoundsCR:
320 return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsCR(v)
321 case OpMIPS64LoweredPanicBoundsRC:
322 return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRC(v)
323 case OpMIPS64LoweredPanicBoundsRR:
324 return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRR(v)
325 case OpMIPS64MOVBUload:
326 return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
327 case OpMIPS64MOVBUreg:
328 return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
329 case OpMIPS64MOVBload:
330 return rewriteValueMIPS64_OpMIPS64MOVBload(v)
331 case OpMIPS64MOVBreg:
332 return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
333 case OpMIPS64MOVBstore:
334 return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
335 case OpMIPS64MOVDload:
336 return rewriteValueMIPS64_OpMIPS64MOVDload(v)
337 case OpMIPS64MOVDstore:
338 return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
339 case OpMIPS64MOVFload:
340 return rewriteValueMIPS64_OpMIPS64MOVFload(v)
341 case OpMIPS64MOVFstore:
342 return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
343 case OpMIPS64MOVHUload:
344 return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
345 case OpMIPS64MOVHUreg:
346 return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
347 case OpMIPS64MOVHload:
348 return rewriteValueMIPS64_OpMIPS64MOVHload(v)
349 case OpMIPS64MOVHreg:
350 return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
351 case OpMIPS64MOVHstore:
352 return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
353 case OpMIPS64MOVVload:
354 return rewriteValueMIPS64_OpMIPS64MOVVload(v)
355 case OpMIPS64MOVVnop:
356 return rewriteValueMIPS64_OpMIPS64MOVVnop(v)
357 case OpMIPS64MOVVreg:
358 return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
359 case OpMIPS64MOVVstore:
360 return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
361 case OpMIPS64MOVWUload:
362 return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
363 case OpMIPS64MOVWUreg:
364 return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
365 case OpMIPS64MOVWload:
366 return rewriteValueMIPS64_OpMIPS64MOVWload(v)
367 case OpMIPS64MOVWreg:
368 return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
369 case OpMIPS64MOVWstore:
370 return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
371 case OpMIPS64NEGV:
372 return rewriteValueMIPS64_OpMIPS64NEGV(v)
373 case OpMIPS64NOR:
374 return rewriteValueMIPS64_OpMIPS64NOR(v)
375 case OpMIPS64NORconst:
376 return rewriteValueMIPS64_OpMIPS64NORconst(v)
377 case OpMIPS64OR:
378 return rewriteValueMIPS64_OpMIPS64OR(v)
379 case OpMIPS64ORconst:
380 return rewriteValueMIPS64_OpMIPS64ORconst(v)
381 case OpMIPS64SGT:
382 return rewriteValueMIPS64_OpMIPS64SGT(v)
383 case OpMIPS64SGTU:
384 return rewriteValueMIPS64_OpMIPS64SGTU(v)
385 case OpMIPS64SGTUconst:
386 return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
387 case OpMIPS64SGTconst:
388 return rewriteValueMIPS64_OpMIPS64SGTconst(v)
389 case OpMIPS64SLLV:
390 return rewriteValueMIPS64_OpMIPS64SLLV(v)
391 case OpMIPS64SLLVconst:
392 return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
393 case OpMIPS64SRAV:
394 return rewriteValueMIPS64_OpMIPS64SRAV(v)
395 case OpMIPS64SRAVconst:
396 return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
397 case OpMIPS64SRLV:
398 return rewriteValueMIPS64_OpMIPS64SRLV(v)
399 case OpMIPS64SRLVconst:
400 return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
401 case OpMIPS64SUBV:
402 return rewriteValueMIPS64_OpMIPS64SUBV(v)
403 case OpMIPS64SUBVconst:
404 return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
405 case OpMIPS64XOR:
406 return rewriteValueMIPS64_OpMIPS64XOR(v)
407 case OpMIPS64XORconst:
408 return rewriteValueMIPS64_OpMIPS64XORconst(v)
409 case OpMod16:
410 return rewriteValueMIPS64_OpMod16(v)
411 case OpMod16u:
412 return rewriteValueMIPS64_OpMod16u(v)
413 case OpMod32:
414 return rewriteValueMIPS64_OpMod32(v)
415 case OpMod32u:
416 return rewriteValueMIPS64_OpMod32u(v)
417 case OpMod64:
418 return rewriteValueMIPS64_OpMod64(v)
419 case OpMod64u:
420 return rewriteValueMIPS64_OpMod64u(v)
421 case OpMod8:
422 return rewriteValueMIPS64_OpMod8(v)
423 case OpMod8u:
424 return rewriteValueMIPS64_OpMod8u(v)
425 case OpMove:
426 return rewriteValueMIPS64_OpMove(v)
427 case OpMul16:
428 return rewriteValueMIPS64_OpMul16(v)
429 case OpMul32:
430 return rewriteValueMIPS64_OpMul32(v)
431 case OpMul32F:
432 v.Op = OpMIPS64MULF
433 return true
434 case OpMul64:
435 return rewriteValueMIPS64_OpMul64(v)
436 case OpMul64F:
437 v.Op = OpMIPS64MULD
438 return true
439 case OpMul64uhilo:
440 v.Op = OpMIPS64MULVU
441 return true
442 case OpMul8:
443 return rewriteValueMIPS64_OpMul8(v)
444 case OpNeg16:
445 v.Op = OpMIPS64NEGV
446 return true
447 case OpNeg32:
448 v.Op = OpMIPS64NEGV
449 return true
450 case OpNeg32F:
451 v.Op = OpMIPS64NEGF
452 return true
453 case OpNeg64:
454 v.Op = OpMIPS64NEGV
455 return true
456 case OpNeg64F:
457 v.Op = OpMIPS64NEGD
458 return true
459 case OpNeg8:
460 v.Op = OpMIPS64NEGV
461 return true
462 case OpNeq16:
463 return rewriteValueMIPS64_OpNeq16(v)
464 case OpNeq32:
465 return rewriteValueMIPS64_OpNeq32(v)
466 case OpNeq32F:
467 return rewriteValueMIPS64_OpNeq32F(v)
468 case OpNeq64:
469 return rewriteValueMIPS64_OpNeq64(v)
470 case OpNeq64F:
471 return rewriteValueMIPS64_OpNeq64F(v)
472 case OpNeq8:
473 return rewriteValueMIPS64_OpNeq8(v)
474 case OpNeqB:
475 v.Op = OpMIPS64XOR
476 return true
477 case OpNeqPtr:
478 return rewriteValueMIPS64_OpNeqPtr(v)
479 case OpNilCheck:
480 v.Op = OpMIPS64LoweredNilCheck
481 return true
482 case OpNot:
483 return rewriteValueMIPS64_OpNot(v)
484 case OpOffPtr:
485 return rewriteValueMIPS64_OpOffPtr(v)
486 case OpOr16:
487 v.Op = OpMIPS64OR
488 return true
489 case OpOr32:
490 v.Op = OpMIPS64OR
491 return true
492 case OpOr64:
493 v.Op = OpMIPS64OR
494 return true
495 case OpOr8:
496 v.Op = OpMIPS64OR
497 return true
498 case OpOrB:
499 v.Op = OpMIPS64OR
500 return true
501 case OpPanicBounds:
502 v.Op = OpMIPS64LoweredPanicBoundsRR
503 return true
504 case OpPubBarrier:
505 v.Op = OpMIPS64LoweredPubBarrier
506 return true
507 case OpRotateLeft16:
508 return rewriteValueMIPS64_OpRotateLeft16(v)
509 case OpRotateLeft32:
510 return rewriteValueMIPS64_OpRotateLeft32(v)
511 case OpRotateLeft64:
512 return rewriteValueMIPS64_OpRotateLeft64(v)
513 case OpRotateLeft8:
514 return rewriteValueMIPS64_OpRotateLeft8(v)
515 case OpRound32F:
516 v.Op = OpCopy
517 return true
518 case OpRound64F:
519 v.Op = OpCopy
520 return true
521 case OpRsh16Ux16:
522 return rewriteValueMIPS64_OpRsh16Ux16(v)
523 case OpRsh16Ux32:
524 return rewriteValueMIPS64_OpRsh16Ux32(v)
525 case OpRsh16Ux64:
526 return rewriteValueMIPS64_OpRsh16Ux64(v)
527 case OpRsh16Ux8:
528 return rewriteValueMIPS64_OpRsh16Ux8(v)
529 case OpRsh16x16:
530 return rewriteValueMIPS64_OpRsh16x16(v)
531 case OpRsh16x32:
532 return rewriteValueMIPS64_OpRsh16x32(v)
533 case OpRsh16x64:
534 return rewriteValueMIPS64_OpRsh16x64(v)
535 case OpRsh16x8:
536 return rewriteValueMIPS64_OpRsh16x8(v)
537 case OpRsh32Ux16:
538 return rewriteValueMIPS64_OpRsh32Ux16(v)
539 case OpRsh32Ux32:
540 return rewriteValueMIPS64_OpRsh32Ux32(v)
541 case OpRsh32Ux64:
542 return rewriteValueMIPS64_OpRsh32Ux64(v)
543 case OpRsh32Ux8:
544 return rewriteValueMIPS64_OpRsh32Ux8(v)
545 case OpRsh32x16:
546 return rewriteValueMIPS64_OpRsh32x16(v)
547 case OpRsh32x32:
548 return rewriteValueMIPS64_OpRsh32x32(v)
549 case OpRsh32x64:
550 return rewriteValueMIPS64_OpRsh32x64(v)
551 case OpRsh32x8:
552 return rewriteValueMIPS64_OpRsh32x8(v)
553 case OpRsh64Ux16:
554 return rewriteValueMIPS64_OpRsh64Ux16(v)
555 case OpRsh64Ux32:
556 return rewriteValueMIPS64_OpRsh64Ux32(v)
557 case OpRsh64Ux64:
558 return rewriteValueMIPS64_OpRsh64Ux64(v)
559 case OpRsh64Ux8:
560 return rewriteValueMIPS64_OpRsh64Ux8(v)
561 case OpRsh64x16:
562 return rewriteValueMIPS64_OpRsh64x16(v)
563 case OpRsh64x32:
564 return rewriteValueMIPS64_OpRsh64x32(v)
565 case OpRsh64x64:
566 return rewriteValueMIPS64_OpRsh64x64(v)
567 case OpRsh64x8:
568 return rewriteValueMIPS64_OpRsh64x8(v)
569 case OpRsh8Ux16:
570 return rewriteValueMIPS64_OpRsh8Ux16(v)
571 case OpRsh8Ux32:
572 return rewriteValueMIPS64_OpRsh8Ux32(v)
573 case OpRsh8Ux64:
574 return rewriteValueMIPS64_OpRsh8Ux64(v)
575 case OpRsh8Ux8:
576 return rewriteValueMIPS64_OpRsh8Ux8(v)
577 case OpRsh8x16:
578 return rewriteValueMIPS64_OpRsh8x16(v)
579 case OpRsh8x32:
580 return rewriteValueMIPS64_OpRsh8x32(v)
581 case OpRsh8x64:
582 return rewriteValueMIPS64_OpRsh8x64(v)
583 case OpRsh8x8:
584 return rewriteValueMIPS64_OpRsh8x8(v)
585 case OpSelect0:
586 return rewriteValueMIPS64_OpSelect0(v)
587 case OpSelect1:
588 return rewriteValueMIPS64_OpSelect1(v)
589 case OpSignExt16to32:
590 v.Op = OpMIPS64MOVHreg
591 return true
592 case OpSignExt16to64:
593 v.Op = OpMIPS64MOVHreg
594 return true
595 case OpSignExt32to64:
596 v.Op = OpMIPS64MOVWreg
597 return true
598 case OpSignExt8to16:
599 v.Op = OpMIPS64MOVBreg
600 return true
601 case OpSignExt8to32:
602 v.Op = OpMIPS64MOVBreg
603 return true
604 case OpSignExt8to64:
605 v.Op = OpMIPS64MOVBreg
606 return true
607 case OpSlicemask:
608 return rewriteValueMIPS64_OpSlicemask(v)
609 case OpSqrt:
610 v.Op = OpMIPS64SQRTD
611 return true
612 case OpSqrt32:
613 v.Op = OpMIPS64SQRTF
614 return true
615 case OpStaticCall:
616 v.Op = OpMIPS64CALLstatic
617 return true
618 case OpStore:
619 return rewriteValueMIPS64_OpStore(v)
620 case OpSub16:
621 v.Op = OpMIPS64SUBV
622 return true
623 case OpSub32:
624 v.Op = OpMIPS64SUBV
625 return true
626 case OpSub32F:
627 v.Op = OpMIPS64SUBF
628 return true
629 case OpSub64:
630 v.Op = OpMIPS64SUBV
631 return true
632 case OpSub64F:
633 v.Op = OpMIPS64SUBD
634 return true
635 case OpSub8:
636 v.Op = OpMIPS64SUBV
637 return true
638 case OpSubPtr:
639 v.Op = OpMIPS64SUBV
640 return true
641 case OpTailCall:
642 v.Op = OpMIPS64CALLtail
643 return true
644 case OpTrunc16to8:
645 v.Op = OpCopy
646 return true
647 case OpTrunc32to16:
648 v.Op = OpCopy
649 return true
650 case OpTrunc32to8:
651 v.Op = OpCopy
652 return true
653 case OpTrunc64to16:
654 v.Op = OpCopy
655 return true
656 case OpTrunc64to32:
657 v.Op = OpCopy
658 return true
659 case OpTrunc64to8:
660 v.Op = OpCopy
661 return true
662 case OpWB:
663 v.Op = OpMIPS64LoweredWB
664 return true
665 case OpXor16:
666 v.Op = OpMIPS64XOR
667 return true
668 case OpXor32:
669 v.Op = OpMIPS64XOR
670 return true
671 case OpXor64:
672 v.Op = OpMIPS64XOR
673 return true
674 case OpXor8:
675 v.Op = OpMIPS64XOR
676 return true
677 case OpZero:
678 return rewriteValueMIPS64_OpZero(v)
679 case OpZeroExt16to32:
680 v.Op = OpMIPS64MOVHUreg
681 return true
682 case OpZeroExt16to64:
683 v.Op = OpMIPS64MOVHUreg
684 return true
685 case OpZeroExt32to64:
686 v.Op = OpMIPS64MOVWUreg
687 return true
688 case OpZeroExt8to16:
689 v.Op = OpMIPS64MOVBUreg
690 return true
691 case OpZeroExt8to32:
692 v.Op = OpMIPS64MOVBUreg
693 return true
694 case OpZeroExt8to64:
695 v.Op = OpMIPS64MOVBUreg
696 return true
697 }
698 return false
699 }
700 func rewriteValueMIPS64_OpAddr(v *Value) bool {
701 v_0 := v.Args[0]
702
703
704 for {
705 sym := auxToSym(v.Aux)
706 base := v_0
707 v.reset(OpMIPS64MOVVaddr)
708 v.Aux = symToAux(sym)
709 v.AddArg(base)
710 return true
711 }
712 }
713 func rewriteValueMIPS64_OpAtomicAnd8(v *Value) bool {
714 v_2 := v.Args[2]
715 v_1 := v.Args[1]
716 v_0 := v.Args[0]
717 b := v.Block
718 config := b.Func.Config
719 typ := &b.Func.Config.Types
720
721
722
723 for {
724 ptr := v_0
725 val := v_1
726 mem := v_2
727 if !(!config.BigEndian) {
728 break
729 }
730 v.reset(OpMIPS64LoweredAtomicAnd32)
731 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
732 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
733 v1.AuxInt = int64ToAuxInt(^3)
734 v0.AddArg2(v1, ptr)
735 v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
736 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
737 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
738 v4.AddArg(val)
739 v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
740 v5.AuxInt = int64ToAuxInt(3)
741 v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
742 v6.AuxInt = int64ToAuxInt(3)
743 v6.AddArg(ptr)
744 v5.AddArg(v6)
745 v3.AddArg2(v4, v5)
746 v7 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
747 v7.AuxInt = int64ToAuxInt(0)
748 v8 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
749 v9 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
750 v9.AuxInt = int64ToAuxInt(0xff)
751 v8.AddArg2(v9, v5)
752 v7.AddArg(v8)
753 v2.AddArg2(v3, v7)
754 v.AddArg3(v0, v2, mem)
755 return true
756 }
757
758
759
760 for {
761 ptr := v_0
762 val := v_1
763 mem := v_2
764 if !(config.BigEndian) {
765 break
766 }
767 v.reset(OpMIPS64LoweredAtomicAnd32)
768 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
769 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
770 v1.AuxInt = int64ToAuxInt(^3)
771 v0.AddArg2(v1, ptr)
772 v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
773 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
774 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
775 v4.AddArg(val)
776 v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
777 v5.AuxInt = int64ToAuxInt(3)
778 v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
779 v6.AuxInt = int64ToAuxInt(3)
780 v7 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
781 v7.AuxInt = int64ToAuxInt(3)
782 v7.AddArg(ptr)
783 v6.AddArg(v7)
784 v5.AddArg(v6)
785 v3.AddArg2(v4, v5)
786 v8 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
787 v8.AuxInt = int64ToAuxInt(0)
788 v9 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
789 v10 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
790 v10.AuxInt = int64ToAuxInt(0xff)
791 v9.AddArg2(v10, v5)
792 v8.AddArg(v9)
793 v2.AddArg2(v3, v8)
794 v.AddArg3(v0, v2, mem)
795 return true
796 }
797 return false
798 }
799 func rewriteValueMIPS64_OpAtomicCompareAndSwap32(v *Value) bool {
800 v_3 := v.Args[3]
801 v_2 := v.Args[2]
802 v_1 := v.Args[1]
803 v_0 := v.Args[0]
804 b := v.Block
805 typ := &b.Func.Config.Types
806
807
808 for {
809 ptr := v_0
810 old := v_1
811 new := v_2
812 mem := v_3
813 v.reset(OpMIPS64LoweredAtomicCas32)
814 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
815 v0.AddArg(old)
816 v.AddArg4(ptr, v0, new, mem)
817 return true
818 }
819 }
820 func rewriteValueMIPS64_OpAtomicOr8(v *Value) bool {
821 v_2 := v.Args[2]
822 v_1 := v.Args[1]
823 v_0 := v.Args[0]
824 b := v.Block
825 config := b.Func.Config
826 typ := &b.Func.Config.Types
827
828
829
830 for {
831 ptr := v_0
832 val := v_1
833 mem := v_2
834 if !(!config.BigEndian) {
835 break
836 }
837 v.reset(OpMIPS64LoweredAtomicOr32)
838 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
839 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
840 v1.AuxInt = int64ToAuxInt(^3)
841 v0.AddArg2(v1, ptr)
842 v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
843 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
844 v3.AddArg(val)
845 v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
846 v4.AuxInt = int64ToAuxInt(3)
847 v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
848 v5.AuxInt = int64ToAuxInt(3)
849 v5.AddArg(ptr)
850 v4.AddArg(v5)
851 v2.AddArg2(v3, v4)
852 v.AddArg3(v0, v2, mem)
853 return true
854 }
855
856
857
858 for {
859 ptr := v_0
860 val := v_1
861 mem := v_2
862 if !(config.BigEndian) {
863 break
864 }
865 v.reset(OpMIPS64LoweredAtomicOr32)
866 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
867 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
868 v1.AuxInt = int64ToAuxInt(^3)
869 v0.AddArg2(v1, ptr)
870 v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
871 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
872 v3.AddArg(val)
873 v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
874 v4.AuxInt = int64ToAuxInt(3)
875 v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
876 v5.AuxInt = int64ToAuxInt(3)
877 v6 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
878 v6.AuxInt = int64ToAuxInt(3)
879 v6.AddArg(ptr)
880 v5.AddArg(v6)
881 v4.AddArg(v5)
882 v2.AddArg2(v3, v4)
883 v.AddArg3(v0, v2, mem)
884 return true
885 }
886 return false
887 }
888 func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
889 v_1 := v.Args[1]
890 v_0 := v.Args[0]
891 b := v.Block
892
893
894 for {
895 t := v.Type
896 x := v_0
897 y := v_1
898 v.reset(OpMIPS64ADDV)
899 v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
900 v0.AuxInt = int64ToAuxInt(1)
901 v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
902 v1.AddArg2(x, y)
903 v0.AddArg(v1)
904 v.AddArg2(v0, y)
905 return true
906 }
907 }
908 func rewriteValueMIPS64_OpCom16(v *Value) bool {
909 v_0 := v.Args[0]
910 b := v.Block
911 typ := &b.Func.Config.Types
912
913
914 for {
915 x := v_0
916 v.reset(OpMIPS64NOR)
917 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
918 v0.AuxInt = int64ToAuxInt(0)
919 v.AddArg2(v0, x)
920 return true
921 }
922 }
923 func rewriteValueMIPS64_OpCom32(v *Value) bool {
924 v_0 := v.Args[0]
925 b := v.Block
926 typ := &b.Func.Config.Types
927
928
929 for {
930 x := v_0
931 v.reset(OpMIPS64NOR)
932 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
933 v0.AuxInt = int64ToAuxInt(0)
934 v.AddArg2(v0, x)
935 return true
936 }
937 }
938 func rewriteValueMIPS64_OpCom64(v *Value) bool {
939 v_0 := v.Args[0]
940 b := v.Block
941 typ := &b.Func.Config.Types
942
943
944 for {
945 x := v_0
946 v.reset(OpMIPS64NOR)
947 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
948 v0.AuxInt = int64ToAuxInt(0)
949 v.AddArg2(v0, x)
950 return true
951 }
952 }
953 func rewriteValueMIPS64_OpCom8(v *Value) bool {
954 v_0 := v.Args[0]
955 b := v.Block
956 typ := &b.Func.Config.Types
957
958
959 for {
960 x := v_0
961 v.reset(OpMIPS64NOR)
962 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
963 v0.AuxInt = int64ToAuxInt(0)
964 v.AddArg2(v0, x)
965 return true
966 }
967 }
968 func rewriteValueMIPS64_OpConst16(v *Value) bool {
969
970
971 for {
972 val := auxIntToInt16(v.AuxInt)
973 v.reset(OpMIPS64MOVVconst)
974 v.AuxInt = int64ToAuxInt(int64(val))
975 return true
976 }
977 }
978 func rewriteValueMIPS64_OpConst32(v *Value) bool {
979
980
981 for {
982 val := auxIntToInt32(v.AuxInt)
983 v.reset(OpMIPS64MOVVconst)
984 v.AuxInt = int64ToAuxInt(int64(val))
985 return true
986 }
987 }
988 func rewriteValueMIPS64_OpConst32F(v *Value) bool {
989
990
991 for {
992 val := auxIntToFloat32(v.AuxInt)
993 v.reset(OpMIPS64MOVFconst)
994 v.AuxInt = float64ToAuxInt(float64(val))
995 return true
996 }
997 }
998 func rewriteValueMIPS64_OpConst64(v *Value) bool {
999
1000
1001 for {
1002 val := auxIntToInt64(v.AuxInt)
1003 v.reset(OpMIPS64MOVVconst)
1004 v.AuxInt = int64ToAuxInt(int64(val))
1005 return true
1006 }
1007 }
1008 func rewriteValueMIPS64_OpConst64F(v *Value) bool {
1009
1010
1011 for {
1012 val := auxIntToFloat64(v.AuxInt)
1013 v.reset(OpMIPS64MOVDconst)
1014 v.AuxInt = float64ToAuxInt(float64(val))
1015 return true
1016 }
1017 }
1018 func rewriteValueMIPS64_OpConst8(v *Value) bool {
1019
1020
1021 for {
1022 val := auxIntToInt8(v.AuxInt)
1023 v.reset(OpMIPS64MOVVconst)
1024 v.AuxInt = int64ToAuxInt(int64(val))
1025 return true
1026 }
1027 }
1028 func rewriteValueMIPS64_OpConstBool(v *Value) bool {
1029
1030
1031 for {
1032 t := auxIntToBool(v.AuxInt)
1033 v.reset(OpMIPS64MOVVconst)
1034 v.AuxInt = int64ToAuxInt(int64(b2i(t)))
1035 return true
1036 }
1037 }
1038 func rewriteValueMIPS64_OpConstNil(v *Value) bool {
1039
1040
1041 for {
1042 v.reset(OpMIPS64MOVVconst)
1043 v.AuxInt = int64ToAuxInt(0)
1044 return true
1045 }
1046 }
1047 func rewriteValueMIPS64_OpDiv16(v *Value) bool {
1048 v_1 := v.Args[1]
1049 v_0 := v.Args[0]
1050 b := v.Block
1051 typ := &b.Func.Config.Types
1052
1053
1054 for {
1055 x := v_0
1056 y := v_1
1057 v.reset(OpSelect1)
1058 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1059 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1060 v1.AddArg(x)
1061 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1062 v2.AddArg(y)
1063 v0.AddArg2(v1, v2)
1064 v.AddArg(v0)
1065 return true
1066 }
1067 }
1068 func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
1069 v_1 := v.Args[1]
1070 v_0 := v.Args[0]
1071 b := v.Block
1072 typ := &b.Func.Config.Types
1073
1074
1075 for {
1076 x := v_0
1077 y := v_1
1078 v.reset(OpSelect1)
1079 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1080 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1081 v1.AddArg(x)
1082 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1083 v2.AddArg(y)
1084 v0.AddArg2(v1, v2)
1085 v.AddArg(v0)
1086 return true
1087 }
1088 }
1089 func rewriteValueMIPS64_OpDiv32(v *Value) bool {
1090 v_1 := v.Args[1]
1091 v_0 := v.Args[0]
1092 b := v.Block
1093 typ := &b.Func.Config.Types
1094
1095
1096 for {
1097 x := v_0
1098 y := v_1
1099 v.reset(OpSelect1)
1100 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1101 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1102 v1.AddArg(x)
1103 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1104 v2.AddArg(y)
1105 v0.AddArg2(v1, v2)
1106 v.AddArg(v0)
1107 return true
1108 }
1109 }
1110 func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
1111 v_1 := v.Args[1]
1112 v_0 := v.Args[0]
1113 b := v.Block
1114 typ := &b.Func.Config.Types
1115
1116
1117 for {
1118 x := v_0
1119 y := v_1
1120 v.reset(OpSelect1)
1121 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1122 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1123 v1.AddArg(x)
1124 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1125 v2.AddArg(y)
1126 v0.AddArg2(v1, v2)
1127 v.AddArg(v0)
1128 return true
1129 }
1130 }
1131 func rewriteValueMIPS64_OpDiv64(v *Value) bool {
1132 v_1 := v.Args[1]
1133 v_0 := v.Args[0]
1134 b := v.Block
1135 typ := &b.Func.Config.Types
1136
1137
1138 for {
1139 x := v_0
1140 y := v_1
1141 v.reset(OpSelect1)
1142 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1143 v0.AddArg2(x, y)
1144 v.AddArg(v0)
1145 return true
1146 }
1147 }
1148 func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
1149 v_1 := v.Args[1]
1150 v_0 := v.Args[0]
1151 b := v.Block
1152 typ := &b.Func.Config.Types
1153
1154
1155 for {
1156 x := v_0
1157 y := v_1
1158 v.reset(OpSelect1)
1159 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1160 v0.AddArg2(x, y)
1161 v.AddArg(v0)
1162 return true
1163 }
1164 }
1165 func rewriteValueMIPS64_OpDiv8(v *Value) bool {
1166 v_1 := v.Args[1]
1167 v_0 := v.Args[0]
1168 b := v.Block
1169 typ := &b.Func.Config.Types
1170
1171
1172 for {
1173 x := v_0
1174 y := v_1
1175 v.reset(OpSelect1)
1176 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1177 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1178 v1.AddArg(x)
1179 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1180 v2.AddArg(y)
1181 v0.AddArg2(v1, v2)
1182 v.AddArg(v0)
1183 return true
1184 }
1185 }
1186 func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
1187 v_1 := v.Args[1]
1188 v_0 := v.Args[0]
1189 b := v.Block
1190 typ := &b.Func.Config.Types
1191
1192
1193 for {
1194 x := v_0
1195 y := v_1
1196 v.reset(OpSelect1)
1197 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1198 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1199 v1.AddArg(x)
1200 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1201 v2.AddArg(y)
1202 v0.AddArg2(v1, v2)
1203 v.AddArg(v0)
1204 return true
1205 }
1206 }
1207 func rewriteValueMIPS64_OpEq16(v *Value) bool {
1208 v_1 := v.Args[1]
1209 v_0 := v.Args[0]
1210 b := v.Block
1211 typ := &b.Func.Config.Types
1212
1213
1214 for {
1215 x := v_0
1216 y := v_1
1217 v.reset(OpMIPS64SGTU)
1218 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1219 v0.AuxInt = int64ToAuxInt(1)
1220 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1221 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1222 v2.AddArg(x)
1223 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1224 v3.AddArg(y)
1225 v1.AddArg2(v2, v3)
1226 v.AddArg2(v0, v1)
1227 return true
1228 }
1229 }
1230 func rewriteValueMIPS64_OpEq32(v *Value) bool {
1231 v_1 := v.Args[1]
1232 v_0 := v.Args[0]
1233 b := v.Block
1234 typ := &b.Func.Config.Types
1235
1236
1237 for {
1238 x := v_0
1239 y := v_1
1240 v.reset(OpMIPS64SGTU)
1241 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1242 v0.AuxInt = int64ToAuxInt(1)
1243 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1244 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1245 v2.AddArg(x)
1246 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1247 v3.AddArg(y)
1248 v1.AddArg2(v2, v3)
1249 v.AddArg2(v0, v1)
1250 return true
1251 }
1252 }
1253 func rewriteValueMIPS64_OpEq32F(v *Value) bool {
1254 v_1 := v.Args[1]
1255 v_0 := v.Args[0]
1256 b := v.Block
1257
1258
1259 for {
1260 x := v_0
1261 y := v_1
1262 v.reset(OpMIPS64FPFlagTrue)
1263 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
1264 v0.AddArg2(x, y)
1265 v.AddArg(v0)
1266 return true
1267 }
1268 }
1269 func rewriteValueMIPS64_OpEq64(v *Value) bool {
1270 v_1 := v.Args[1]
1271 v_0 := v.Args[0]
1272 b := v.Block
1273 typ := &b.Func.Config.Types
1274
1275
1276 for {
1277 x := v_0
1278 y := v_1
1279 v.reset(OpMIPS64SGTU)
1280 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1281 v0.AuxInt = int64ToAuxInt(1)
1282 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1283 v1.AddArg2(x, y)
1284 v.AddArg2(v0, v1)
1285 return true
1286 }
1287 }
1288 func rewriteValueMIPS64_OpEq64F(v *Value) bool {
1289 v_1 := v.Args[1]
1290 v_0 := v.Args[0]
1291 b := v.Block
1292
1293
1294 for {
1295 x := v_0
1296 y := v_1
1297 v.reset(OpMIPS64FPFlagTrue)
1298 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
1299 v0.AddArg2(x, y)
1300 v.AddArg(v0)
1301 return true
1302 }
1303 }
1304 func rewriteValueMIPS64_OpEq8(v *Value) bool {
1305 v_1 := v.Args[1]
1306 v_0 := v.Args[0]
1307 b := v.Block
1308 typ := &b.Func.Config.Types
1309
1310
1311 for {
1312 x := v_0
1313 y := v_1
1314 v.reset(OpMIPS64SGTU)
1315 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1316 v0.AuxInt = int64ToAuxInt(1)
1317 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1318 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1319 v2.AddArg(x)
1320 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1321 v3.AddArg(y)
1322 v1.AddArg2(v2, v3)
1323 v.AddArg2(v0, v1)
1324 return true
1325 }
1326 }
1327 func rewriteValueMIPS64_OpEqB(v *Value) bool {
1328 v_1 := v.Args[1]
1329 v_0 := v.Args[0]
1330 b := v.Block
1331 typ := &b.Func.Config.Types
1332
1333
1334 for {
1335 x := v_0
1336 y := v_1
1337 v.reset(OpMIPS64XOR)
1338 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1339 v0.AuxInt = int64ToAuxInt(1)
1340 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
1341 v1.AddArg2(x, y)
1342 v.AddArg2(v0, v1)
1343 return true
1344 }
1345 }
1346 func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
1347 v_1 := v.Args[1]
1348 v_0 := v.Args[0]
1349 b := v.Block
1350 typ := &b.Func.Config.Types
1351
1352
1353 for {
1354 x := v_0
1355 y := v_1
1356 v.reset(OpMIPS64SGTU)
1357 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1358 v0.AuxInt = int64ToAuxInt(1)
1359 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1360 v1.AddArg2(x, y)
1361 v.AddArg2(v0, v1)
1362 return true
1363 }
1364 }
1365 func rewriteValueMIPS64_OpHmul32(v *Value) bool {
1366 v_1 := v.Args[1]
1367 v_0 := v.Args[0]
1368 b := v.Block
1369 typ := &b.Func.Config.Types
1370
1371
1372 for {
1373 x := v_0
1374 y := v_1
1375 v.reset(OpMIPS64SRAVconst)
1376 v.AuxInt = int64ToAuxInt(32)
1377 v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
1378 v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1379 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1380 v2.AddArg(x)
1381 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1382 v3.AddArg(y)
1383 v1.AddArg2(v2, v3)
1384 v0.AddArg(v1)
1385 v.AddArg(v0)
1386 return true
1387 }
1388 }
1389 func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
1390 v_1 := v.Args[1]
1391 v_0 := v.Args[0]
1392 b := v.Block
1393 typ := &b.Func.Config.Types
1394
1395
1396 for {
1397 x := v_0
1398 y := v_1
1399 v.reset(OpMIPS64SRLVconst)
1400 v.AuxInt = int64ToAuxInt(32)
1401 v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
1402 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1403 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1404 v2.AddArg(x)
1405 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1406 v3.AddArg(y)
1407 v1.AddArg2(v2, v3)
1408 v0.AddArg(v1)
1409 v.AddArg(v0)
1410 return true
1411 }
1412 }
1413 func rewriteValueMIPS64_OpHmul64(v *Value) bool {
1414 v_1 := v.Args[1]
1415 v_0 := v.Args[0]
1416 b := v.Block
1417 typ := &b.Func.Config.Types
1418
1419
1420 for {
1421 x := v_0
1422 y := v_1
1423 v.reset(OpSelect0)
1424 v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1425 v0.AddArg2(x, y)
1426 v.AddArg(v0)
1427 return true
1428 }
1429 }
1430 func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
1431 v_1 := v.Args[1]
1432 v_0 := v.Args[0]
1433 b := v.Block
1434 typ := &b.Func.Config.Types
1435
1436
1437 for {
1438 x := v_0
1439 y := v_1
1440 v.reset(OpSelect0)
1441 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1442 v0.AddArg2(x, y)
1443 v.AddArg(v0)
1444 return true
1445 }
1446 }
1447 func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
1448 v_1 := v.Args[1]
1449 v_0 := v.Args[0]
1450
1451
1452 for {
1453 idx := v_0
1454 len := v_1
1455 v.reset(OpMIPS64SGTU)
1456 v.AddArg2(len, idx)
1457 return true
1458 }
1459 }
1460 func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
1461 v_0 := v.Args[0]
1462 b := v.Block
1463 typ := &b.Func.Config.Types
1464
1465
1466 for {
1467 ptr := v_0
1468 v.reset(OpMIPS64SGTU)
1469 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1470 v0.AuxInt = int64ToAuxInt(0)
1471 v.AddArg2(ptr, v0)
1472 return true
1473 }
1474 }
1475 func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
1476 v_1 := v.Args[1]
1477 v_0 := v.Args[0]
1478 b := v.Block
1479 typ := &b.Func.Config.Types
1480
1481
1482 for {
1483 idx := v_0
1484 len := v_1
1485 v.reset(OpMIPS64XOR)
1486 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1487 v0.AuxInt = int64ToAuxInt(1)
1488 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1489 v1.AddArg2(idx, len)
1490 v.AddArg2(v0, v1)
1491 return true
1492 }
1493 }
1494 func rewriteValueMIPS64_OpLeq16(v *Value) bool {
1495 v_1 := v.Args[1]
1496 v_0 := v.Args[0]
1497 b := v.Block
1498 typ := &b.Func.Config.Types
1499
1500
1501 for {
1502 x := v_0
1503 y := v_1
1504 v.reset(OpMIPS64XOR)
1505 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1506 v0.AuxInt = int64ToAuxInt(1)
1507 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1508 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1509 v2.AddArg(x)
1510 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1511 v3.AddArg(y)
1512 v1.AddArg2(v2, v3)
1513 v.AddArg2(v0, v1)
1514 return true
1515 }
1516 }
1517 func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
1518 v_1 := v.Args[1]
1519 v_0 := v.Args[0]
1520 b := v.Block
1521 typ := &b.Func.Config.Types
1522
1523
1524 for {
1525 x := v_0
1526 y := v_1
1527 v.reset(OpMIPS64XOR)
1528 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1529 v0.AuxInt = int64ToAuxInt(1)
1530 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1531 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1532 v2.AddArg(x)
1533 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1534 v3.AddArg(y)
1535 v1.AddArg2(v2, v3)
1536 v.AddArg2(v0, v1)
1537 return true
1538 }
1539 }
1540 func rewriteValueMIPS64_OpLeq32(v *Value) bool {
1541 v_1 := v.Args[1]
1542 v_0 := v.Args[0]
1543 b := v.Block
1544 typ := &b.Func.Config.Types
1545
1546
1547 for {
1548 x := v_0
1549 y := v_1
1550 v.reset(OpMIPS64XOR)
1551 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1552 v0.AuxInt = int64ToAuxInt(1)
1553 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1554 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1555 v2.AddArg(x)
1556 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1557 v3.AddArg(y)
1558 v1.AddArg2(v2, v3)
1559 v.AddArg2(v0, v1)
1560 return true
1561 }
1562 }
1563 func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
1564 v_1 := v.Args[1]
1565 v_0 := v.Args[0]
1566 b := v.Block
1567
1568
1569 for {
1570 x := v_0
1571 y := v_1
1572 v.reset(OpMIPS64FPFlagTrue)
1573 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
1574 v0.AddArg2(y, x)
1575 v.AddArg(v0)
1576 return true
1577 }
1578 }
1579 func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
1580 v_1 := v.Args[1]
1581 v_0 := v.Args[0]
1582 b := v.Block
1583 typ := &b.Func.Config.Types
1584
1585
1586 for {
1587 x := v_0
1588 y := v_1
1589 v.reset(OpMIPS64XOR)
1590 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1591 v0.AuxInt = int64ToAuxInt(1)
1592 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1593 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1594 v2.AddArg(x)
1595 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1596 v3.AddArg(y)
1597 v1.AddArg2(v2, v3)
1598 v.AddArg2(v0, v1)
1599 return true
1600 }
1601 }
1602 func rewriteValueMIPS64_OpLeq64(v *Value) bool {
1603 v_1 := v.Args[1]
1604 v_0 := v.Args[0]
1605 b := v.Block
1606 typ := &b.Func.Config.Types
1607
1608
1609 for {
1610 x := v_0
1611 y := v_1
1612 v.reset(OpMIPS64XOR)
1613 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1614 v0.AuxInt = int64ToAuxInt(1)
1615 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1616 v1.AddArg2(x, y)
1617 v.AddArg2(v0, v1)
1618 return true
1619 }
1620 }
1621 func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
1622 v_1 := v.Args[1]
1623 v_0 := v.Args[0]
1624 b := v.Block
1625
1626
1627 for {
1628 x := v_0
1629 y := v_1
1630 v.reset(OpMIPS64FPFlagTrue)
1631 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
1632 v0.AddArg2(y, x)
1633 v.AddArg(v0)
1634 return true
1635 }
1636 }
1637 func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
1638 v_1 := v.Args[1]
1639 v_0 := v.Args[0]
1640 b := v.Block
1641 typ := &b.Func.Config.Types
1642
1643
1644 for {
1645 x := v_0
1646 y := v_1
1647 v.reset(OpMIPS64XOR)
1648 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1649 v0.AuxInt = int64ToAuxInt(1)
1650 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1651 v1.AddArg2(x, y)
1652 v.AddArg2(v0, v1)
1653 return true
1654 }
1655 }
1656 func rewriteValueMIPS64_OpLeq8(v *Value) bool {
1657 v_1 := v.Args[1]
1658 v_0 := v.Args[0]
1659 b := v.Block
1660 typ := &b.Func.Config.Types
1661
1662
1663 for {
1664 x := v_0
1665 y := v_1
1666 v.reset(OpMIPS64XOR)
1667 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1668 v0.AuxInt = int64ToAuxInt(1)
1669 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1670 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1671 v2.AddArg(x)
1672 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1673 v3.AddArg(y)
1674 v1.AddArg2(v2, v3)
1675 v.AddArg2(v0, v1)
1676 return true
1677 }
1678 }
1679 func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
1680 v_1 := v.Args[1]
1681 v_0 := v.Args[0]
1682 b := v.Block
1683 typ := &b.Func.Config.Types
1684
1685
1686 for {
1687 x := v_0
1688 y := v_1
1689 v.reset(OpMIPS64XOR)
1690 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1691 v0.AuxInt = int64ToAuxInt(1)
1692 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1693 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1694 v2.AddArg(x)
1695 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1696 v3.AddArg(y)
1697 v1.AddArg2(v2, v3)
1698 v.AddArg2(v0, v1)
1699 return true
1700 }
1701 }
1702 func rewriteValueMIPS64_OpLess16(v *Value) bool {
1703 v_1 := v.Args[1]
1704 v_0 := v.Args[0]
1705 b := v.Block
1706 typ := &b.Func.Config.Types
1707
1708
1709 for {
1710 x := v_0
1711 y := v_1
1712 v.reset(OpMIPS64SGT)
1713 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1714 v0.AddArg(y)
1715 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1716 v1.AddArg(x)
1717 v.AddArg2(v0, v1)
1718 return true
1719 }
1720 }
1721 func rewriteValueMIPS64_OpLess16U(v *Value) bool {
1722 v_1 := v.Args[1]
1723 v_0 := v.Args[0]
1724 b := v.Block
1725 typ := &b.Func.Config.Types
1726
1727
1728 for {
1729 x := v_0
1730 y := v_1
1731 v.reset(OpMIPS64SGTU)
1732 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1733 v0.AddArg(y)
1734 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1735 v1.AddArg(x)
1736 v.AddArg2(v0, v1)
1737 return true
1738 }
1739 }
1740 func rewriteValueMIPS64_OpLess32(v *Value) bool {
1741 v_1 := v.Args[1]
1742 v_0 := v.Args[0]
1743 b := v.Block
1744 typ := &b.Func.Config.Types
1745
1746
1747 for {
1748 x := v_0
1749 y := v_1
1750 v.reset(OpMIPS64SGT)
1751 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1752 v0.AddArg(y)
1753 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1754 v1.AddArg(x)
1755 v.AddArg2(v0, v1)
1756 return true
1757 }
1758 }
1759 func rewriteValueMIPS64_OpLess32F(v *Value) bool {
1760 v_1 := v.Args[1]
1761 v_0 := v.Args[0]
1762 b := v.Block
1763
1764
1765 for {
1766 x := v_0
1767 y := v_1
1768 v.reset(OpMIPS64FPFlagTrue)
1769 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
1770 v0.AddArg2(y, x)
1771 v.AddArg(v0)
1772 return true
1773 }
1774 }
1775 func rewriteValueMIPS64_OpLess32U(v *Value) bool {
1776 v_1 := v.Args[1]
1777 v_0 := v.Args[0]
1778 b := v.Block
1779 typ := &b.Func.Config.Types
1780
1781
1782 for {
1783 x := v_0
1784 y := v_1
1785 v.reset(OpMIPS64SGTU)
1786 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1787 v0.AddArg(y)
1788 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1789 v1.AddArg(x)
1790 v.AddArg2(v0, v1)
1791 return true
1792 }
1793 }
1794 func rewriteValueMIPS64_OpLess64(v *Value) bool {
1795 v_1 := v.Args[1]
1796 v_0 := v.Args[0]
1797
1798
1799 for {
1800 x := v_0
1801 y := v_1
1802 v.reset(OpMIPS64SGT)
1803 v.AddArg2(y, x)
1804 return true
1805 }
1806 }
1807 func rewriteValueMIPS64_OpLess64F(v *Value) bool {
1808 v_1 := v.Args[1]
1809 v_0 := v.Args[0]
1810 b := v.Block
1811
1812
1813 for {
1814 x := v_0
1815 y := v_1
1816 v.reset(OpMIPS64FPFlagTrue)
1817 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
1818 v0.AddArg2(y, x)
1819 v.AddArg(v0)
1820 return true
1821 }
1822 }
1823 func rewriteValueMIPS64_OpLess64U(v *Value) bool {
1824 v_1 := v.Args[1]
1825 v_0 := v.Args[0]
1826
1827
1828 for {
1829 x := v_0
1830 y := v_1
1831 v.reset(OpMIPS64SGTU)
1832 v.AddArg2(y, x)
1833 return true
1834 }
1835 }
1836 func rewriteValueMIPS64_OpLess8(v *Value) bool {
1837 v_1 := v.Args[1]
1838 v_0 := v.Args[0]
1839 b := v.Block
1840 typ := &b.Func.Config.Types
1841
1842
1843 for {
1844 x := v_0
1845 y := v_1
1846 v.reset(OpMIPS64SGT)
1847 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1848 v0.AddArg(y)
1849 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1850 v1.AddArg(x)
1851 v.AddArg2(v0, v1)
1852 return true
1853 }
1854 }
1855 func rewriteValueMIPS64_OpLess8U(v *Value) bool {
1856 v_1 := v.Args[1]
1857 v_0 := v.Args[0]
1858 b := v.Block
1859 typ := &b.Func.Config.Types
1860
1861
1862 for {
1863 x := v_0
1864 y := v_1
1865 v.reset(OpMIPS64SGTU)
1866 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1867 v0.AddArg(y)
1868 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1869 v1.AddArg(x)
1870 v.AddArg2(v0, v1)
1871 return true
1872 }
1873 }
1874 func rewriteValueMIPS64_OpLoad(v *Value) bool {
1875 v_1 := v.Args[1]
1876 v_0 := v.Args[0]
1877
1878
1879
1880 for {
1881 t := v.Type
1882 ptr := v_0
1883 mem := v_1
1884 if !(t.IsBoolean()) {
1885 break
1886 }
1887 v.reset(OpMIPS64MOVBUload)
1888 v.AddArg2(ptr, mem)
1889 return true
1890 }
1891
1892
1893
1894 for {
1895 t := v.Type
1896 ptr := v_0
1897 mem := v_1
1898 if !(is8BitInt(t) && t.IsSigned()) {
1899 break
1900 }
1901 v.reset(OpMIPS64MOVBload)
1902 v.AddArg2(ptr, mem)
1903 return true
1904 }
1905
1906
1907
1908 for {
1909 t := v.Type
1910 ptr := v_0
1911 mem := v_1
1912 if !(is8BitInt(t) && !t.IsSigned()) {
1913 break
1914 }
1915 v.reset(OpMIPS64MOVBUload)
1916 v.AddArg2(ptr, mem)
1917 return true
1918 }
1919
1920
1921
1922 for {
1923 t := v.Type
1924 ptr := v_0
1925 mem := v_1
1926 if !(is16BitInt(t) && t.IsSigned()) {
1927 break
1928 }
1929 v.reset(OpMIPS64MOVHload)
1930 v.AddArg2(ptr, mem)
1931 return true
1932 }
1933
1934
1935
1936 for {
1937 t := v.Type
1938 ptr := v_0
1939 mem := v_1
1940 if !(is16BitInt(t) && !t.IsSigned()) {
1941 break
1942 }
1943 v.reset(OpMIPS64MOVHUload)
1944 v.AddArg2(ptr, mem)
1945 return true
1946 }
1947
1948
1949
1950 for {
1951 t := v.Type
1952 ptr := v_0
1953 mem := v_1
1954 if !(is32BitInt(t) && t.IsSigned()) {
1955 break
1956 }
1957 v.reset(OpMIPS64MOVWload)
1958 v.AddArg2(ptr, mem)
1959 return true
1960 }
1961
1962
1963
1964 for {
1965 t := v.Type
1966 ptr := v_0
1967 mem := v_1
1968 if !(is32BitInt(t) && !t.IsSigned()) {
1969 break
1970 }
1971 v.reset(OpMIPS64MOVWUload)
1972 v.AddArg2(ptr, mem)
1973 return true
1974 }
1975
1976
1977
1978 for {
1979 t := v.Type
1980 ptr := v_0
1981 mem := v_1
1982 if !(is64BitInt(t) || isPtr(t)) {
1983 break
1984 }
1985 v.reset(OpMIPS64MOVVload)
1986 v.AddArg2(ptr, mem)
1987 return true
1988 }
1989
1990
1991
1992 for {
1993 t := v.Type
1994 ptr := v_0
1995 mem := v_1
1996 if !(is32BitFloat(t)) {
1997 break
1998 }
1999 v.reset(OpMIPS64MOVFload)
2000 v.AddArg2(ptr, mem)
2001 return true
2002 }
2003
2004
2005
2006 for {
2007 t := v.Type
2008 ptr := v_0
2009 mem := v_1
2010 if !(is64BitFloat(t)) {
2011 break
2012 }
2013 v.reset(OpMIPS64MOVDload)
2014 v.AddArg2(ptr, mem)
2015 return true
2016 }
2017 return false
2018 }
2019 func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
2020 v_1 := v.Args[1]
2021 v_0 := v.Args[0]
2022 b := v.Block
2023 typ := &b.Func.Config.Types
2024
2025
2026
2027 for {
2028 t := v.Type
2029 sym := auxToSym(v.Aux)
2030 base := v_0
2031 mem := v_1
2032 if !(t.Elem().HasPointers()) {
2033 break
2034 }
2035 v.reset(OpMIPS64MOVVaddr)
2036 v.Aux = symToAux(sym)
2037 v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
2038 v0.AddArg2(base, mem)
2039 v.AddArg(v0)
2040 return true
2041 }
2042
2043
2044
2045 for {
2046 t := v.Type
2047 sym := auxToSym(v.Aux)
2048 base := v_0
2049 if !(!t.Elem().HasPointers()) {
2050 break
2051 }
2052 v.reset(OpMIPS64MOVVaddr)
2053 v.Aux = symToAux(sym)
2054 v.AddArg(base)
2055 return true
2056 }
2057 return false
2058 }
2059 func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
2060 v_1 := v.Args[1]
2061 v_0 := v.Args[0]
2062 b := v.Block
2063 typ := &b.Func.Config.Types
2064
2065
2066 for {
2067 t := v.Type
2068 x := v_0
2069 y := v_1
2070 v.reset(OpMIPS64AND)
2071 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2072 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2073 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2074 v2.AuxInt = int64ToAuxInt(64)
2075 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2076 v3.AddArg(y)
2077 v1.AddArg2(v2, v3)
2078 v0.AddArg(v1)
2079 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2080 v4.AddArg2(x, v3)
2081 v.AddArg2(v0, v4)
2082 return true
2083 }
2084 }
2085 func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
2086 v_1 := v.Args[1]
2087 v_0 := v.Args[0]
2088 b := v.Block
2089 typ := &b.Func.Config.Types
2090
2091
2092 for {
2093 t := v.Type
2094 x := v_0
2095 y := v_1
2096 v.reset(OpMIPS64AND)
2097 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2098 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2099 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2100 v2.AuxInt = int64ToAuxInt(64)
2101 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2102 v3.AddArg(y)
2103 v1.AddArg2(v2, v3)
2104 v0.AddArg(v1)
2105 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2106 v4.AddArg2(x, v3)
2107 v.AddArg2(v0, v4)
2108 return true
2109 }
2110 }
2111 func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
2112 v_1 := v.Args[1]
2113 v_0 := v.Args[0]
2114 b := v.Block
2115 typ := &b.Func.Config.Types
2116
2117
2118 for {
2119 t := v.Type
2120 x := v_0
2121 y := v_1
2122 v.reset(OpMIPS64AND)
2123 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2124 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2125 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2126 v2.AuxInt = int64ToAuxInt(64)
2127 v1.AddArg2(v2, y)
2128 v0.AddArg(v1)
2129 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2130 v3.AddArg2(x, y)
2131 v.AddArg2(v0, v3)
2132 return true
2133 }
2134 }
2135 func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
2136 v_1 := v.Args[1]
2137 v_0 := v.Args[0]
2138 b := v.Block
2139 typ := &b.Func.Config.Types
2140
2141
2142 for {
2143 t := v.Type
2144 x := v_0
2145 y := v_1
2146 v.reset(OpMIPS64AND)
2147 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2148 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2149 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2150 v2.AuxInt = int64ToAuxInt(64)
2151 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2152 v3.AddArg(y)
2153 v1.AddArg2(v2, v3)
2154 v0.AddArg(v1)
2155 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2156 v4.AddArg2(x, v3)
2157 v.AddArg2(v0, v4)
2158 return true
2159 }
2160 }
2161 func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
2162 v_1 := v.Args[1]
2163 v_0 := v.Args[0]
2164 b := v.Block
2165 typ := &b.Func.Config.Types
2166
2167
2168 for {
2169 t := v.Type
2170 x := v_0
2171 y := v_1
2172 v.reset(OpMIPS64AND)
2173 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2174 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2175 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2176 v2.AuxInt = int64ToAuxInt(64)
2177 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2178 v3.AddArg(y)
2179 v1.AddArg2(v2, v3)
2180 v0.AddArg(v1)
2181 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2182 v4.AddArg2(x, v3)
2183 v.AddArg2(v0, v4)
2184 return true
2185 }
2186 }
2187 func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
2188 v_1 := v.Args[1]
2189 v_0 := v.Args[0]
2190 b := v.Block
2191 typ := &b.Func.Config.Types
2192
2193
2194 for {
2195 t := v.Type
2196 x := v_0
2197 y := v_1
2198 v.reset(OpMIPS64AND)
2199 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2200 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2201 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2202 v2.AuxInt = int64ToAuxInt(64)
2203 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2204 v3.AddArg(y)
2205 v1.AddArg2(v2, v3)
2206 v0.AddArg(v1)
2207 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2208 v4.AddArg2(x, v3)
2209 v.AddArg2(v0, v4)
2210 return true
2211 }
2212 }
2213 func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
2214 v_1 := v.Args[1]
2215 v_0 := v.Args[0]
2216 b := v.Block
2217 typ := &b.Func.Config.Types
2218
2219
2220 for {
2221 t := v.Type
2222 x := v_0
2223 y := v_1
2224 v.reset(OpMIPS64AND)
2225 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2226 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2227 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2228 v2.AuxInt = int64ToAuxInt(64)
2229 v1.AddArg2(v2, y)
2230 v0.AddArg(v1)
2231 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2232 v3.AddArg2(x, y)
2233 v.AddArg2(v0, v3)
2234 return true
2235 }
2236 }
2237 func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
2238 v_1 := v.Args[1]
2239 v_0 := v.Args[0]
2240 b := v.Block
2241 typ := &b.Func.Config.Types
2242
2243
2244 for {
2245 t := v.Type
2246 x := v_0
2247 y := v_1
2248 v.reset(OpMIPS64AND)
2249 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2250 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2251 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2252 v2.AuxInt = int64ToAuxInt(64)
2253 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2254 v3.AddArg(y)
2255 v1.AddArg2(v2, v3)
2256 v0.AddArg(v1)
2257 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2258 v4.AddArg2(x, v3)
2259 v.AddArg2(v0, v4)
2260 return true
2261 }
2262 }
2263 func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
2264 v_1 := v.Args[1]
2265 v_0 := v.Args[0]
2266 b := v.Block
2267 typ := &b.Func.Config.Types
2268
2269
2270 for {
2271 t := v.Type
2272 x := v_0
2273 y := v_1
2274 v.reset(OpMIPS64AND)
2275 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2276 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2277 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2278 v2.AuxInt = int64ToAuxInt(64)
2279 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2280 v3.AddArg(y)
2281 v1.AddArg2(v2, v3)
2282 v0.AddArg(v1)
2283 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2284 v4.AddArg2(x, v3)
2285 v.AddArg2(v0, v4)
2286 return true
2287 }
2288 }
2289 func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
2290 v_1 := v.Args[1]
2291 v_0 := v.Args[0]
2292 b := v.Block
2293 typ := &b.Func.Config.Types
2294
2295
2296 for {
2297 t := v.Type
2298 x := v_0
2299 y := v_1
2300 v.reset(OpMIPS64AND)
2301 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2302 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2303 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2304 v2.AuxInt = int64ToAuxInt(64)
2305 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2306 v3.AddArg(y)
2307 v1.AddArg2(v2, v3)
2308 v0.AddArg(v1)
2309 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2310 v4.AddArg2(x, v3)
2311 v.AddArg2(v0, v4)
2312 return true
2313 }
2314 }
2315 func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
2316 v_1 := v.Args[1]
2317 v_0 := v.Args[0]
2318 b := v.Block
2319 typ := &b.Func.Config.Types
2320
2321
2322 for {
2323 t := v.Type
2324 x := v_0
2325 y := v_1
2326 v.reset(OpMIPS64AND)
2327 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2328 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2329 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2330 v2.AuxInt = int64ToAuxInt(64)
2331 v1.AddArg2(v2, y)
2332 v0.AddArg(v1)
2333 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2334 v3.AddArg2(x, y)
2335 v.AddArg2(v0, v3)
2336 return true
2337 }
2338 }
2339 func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
2340 v_1 := v.Args[1]
2341 v_0 := v.Args[0]
2342 b := v.Block
2343 typ := &b.Func.Config.Types
2344
2345
2346 for {
2347 t := v.Type
2348 x := v_0
2349 y := v_1
2350 v.reset(OpMIPS64AND)
2351 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2352 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2353 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2354 v2.AuxInt = int64ToAuxInt(64)
2355 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2356 v3.AddArg(y)
2357 v1.AddArg2(v2, v3)
2358 v0.AddArg(v1)
2359 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2360 v4.AddArg2(x, v3)
2361 v.AddArg2(v0, v4)
2362 return true
2363 }
2364 }
2365 func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
2366 v_1 := v.Args[1]
2367 v_0 := v.Args[0]
2368 b := v.Block
2369 typ := &b.Func.Config.Types
2370
2371
2372 for {
2373 t := v.Type
2374 x := v_0
2375 y := v_1
2376 v.reset(OpMIPS64AND)
2377 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2378 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2379 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2380 v2.AuxInt = int64ToAuxInt(64)
2381 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2382 v3.AddArg(y)
2383 v1.AddArg2(v2, v3)
2384 v0.AddArg(v1)
2385 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2386 v4.AddArg2(x, v3)
2387 v.AddArg2(v0, v4)
2388 return true
2389 }
2390 }
2391 func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
2392 v_1 := v.Args[1]
2393 v_0 := v.Args[0]
2394 b := v.Block
2395 typ := &b.Func.Config.Types
2396
2397
2398 for {
2399 t := v.Type
2400 x := v_0
2401 y := v_1
2402 v.reset(OpMIPS64AND)
2403 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2404 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2405 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2406 v2.AuxInt = int64ToAuxInt(64)
2407 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2408 v3.AddArg(y)
2409 v1.AddArg2(v2, v3)
2410 v0.AddArg(v1)
2411 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2412 v4.AddArg2(x, v3)
2413 v.AddArg2(v0, v4)
2414 return true
2415 }
2416 }
2417 func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
2418 v_1 := v.Args[1]
2419 v_0 := v.Args[0]
2420 b := v.Block
2421 typ := &b.Func.Config.Types
2422
2423
2424 for {
2425 t := v.Type
2426 x := v_0
2427 y := v_1
2428 v.reset(OpMIPS64AND)
2429 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2430 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2431 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2432 v2.AuxInt = int64ToAuxInt(64)
2433 v1.AddArg2(v2, y)
2434 v0.AddArg(v1)
2435 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2436 v3.AddArg2(x, y)
2437 v.AddArg2(v0, v3)
2438 return true
2439 }
2440 }
2441 func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
2442 v_1 := v.Args[1]
2443 v_0 := v.Args[0]
2444 b := v.Block
2445 typ := &b.Func.Config.Types
2446
2447
2448 for {
2449 t := v.Type
2450 x := v_0
2451 y := v_1
2452 v.reset(OpMIPS64AND)
2453 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2454 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2455 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2456 v2.AuxInt = int64ToAuxInt(64)
2457 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2458 v3.AddArg(y)
2459 v1.AddArg2(v2, v3)
2460 v0.AddArg(v1)
2461 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2462 v4.AddArg2(x, v3)
2463 v.AddArg2(v0, v4)
2464 return true
2465 }
2466 }
2467 func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
2468 v_1 := v.Args[1]
2469 v_0 := v.Args[0]
2470
2471
2472
2473 for {
2474 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2475 x := v_0
2476 if v_1.Op != OpMIPS64MOVVconst {
2477 continue
2478 }
2479 t := v_1.Type
2480 c := auxIntToInt64(v_1.AuxInt)
2481 if !(is32Bit(c) && !t.IsPtr()) {
2482 continue
2483 }
2484 v.reset(OpMIPS64ADDVconst)
2485 v.AuxInt = int64ToAuxInt(c)
2486 v.AddArg(x)
2487 return true
2488 }
2489 break
2490 }
2491
2492
2493 for {
2494 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2495 x := v_0
2496 if v_1.Op != OpMIPS64NEGV {
2497 continue
2498 }
2499 y := v_1.Args[0]
2500 v.reset(OpMIPS64SUBV)
2501 v.AddArg2(x, y)
2502 return true
2503 }
2504 break
2505 }
2506 return false
2507 }
2508 func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
2509 v_0 := v.Args[0]
2510
2511
2512
2513 for {
2514 off1 := auxIntToInt64(v.AuxInt)
2515 if v_0.Op != OpMIPS64MOVVaddr {
2516 break
2517 }
2518 off2 := auxIntToInt32(v_0.AuxInt)
2519 sym := auxToSym(v_0.Aux)
2520 ptr := v_0.Args[0]
2521 if !(is32Bit(off1 + int64(off2))) {
2522 break
2523 }
2524 v.reset(OpMIPS64MOVVaddr)
2525 v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
2526 v.Aux = symToAux(sym)
2527 v.AddArg(ptr)
2528 return true
2529 }
2530
2531
2532 for {
2533 if auxIntToInt64(v.AuxInt) != 0 {
2534 break
2535 }
2536 x := v_0
2537 v.copyOf(x)
2538 return true
2539 }
2540
2541
2542 for {
2543 c := auxIntToInt64(v.AuxInt)
2544 if v_0.Op != OpMIPS64MOVVconst {
2545 break
2546 }
2547 d := auxIntToInt64(v_0.AuxInt)
2548 v.reset(OpMIPS64MOVVconst)
2549 v.AuxInt = int64ToAuxInt(c + d)
2550 return true
2551 }
2552
2553
2554
2555 for {
2556 c := auxIntToInt64(v.AuxInt)
2557 if v_0.Op != OpMIPS64ADDVconst {
2558 break
2559 }
2560 d := auxIntToInt64(v_0.AuxInt)
2561 x := v_0.Args[0]
2562 if !(is32Bit(c + d)) {
2563 break
2564 }
2565 v.reset(OpMIPS64ADDVconst)
2566 v.AuxInt = int64ToAuxInt(c + d)
2567 v.AddArg(x)
2568 return true
2569 }
2570
2571
2572
2573 for {
2574 c := auxIntToInt64(v.AuxInt)
2575 if v_0.Op != OpMIPS64SUBVconst {
2576 break
2577 }
2578 d := auxIntToInt64(v_0.AuxInt)
2579 x := v_0.Args[0]
2580 if !(is32Bit(c - d)) {
2581 break
2582 }
2583 v.reset(OpMIPS64ADDVconst)
2584 v.AuxInt = int64ToAuxInt(c - d)
2585 v.AddArg(x)
2586 return true
2587 }
2588 return false
2589 }
2590 func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
2591 v_1 := v.Args[1]
2592 v_0 := v.Args[0]
2593
2594
2595
2596 for {
2597 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2598 x := v_0
2599 if v_1.Op != OpMIPS64MOVVconst {
2600 continue
2601 }
2602 c := auxIntToInt64(v_1.AuxInt)
2603 if !(is32Bit(c)) {
2604 continue
2605 }
2606 v.reset(OpMIPS64ANDconst)
2607 v.AuxInt = int64ToAuxInt(c)
2608 v.AddArg(x)
2609 return true
2610 }
2611 break
2612 }
2613
2614
2615 for {
2616 x := v_0
2617 if x != v_1 {
2618 break
2619 }
2620 v.copyOf(x)
2621 return true
2622 }
2623 return false
2624 }
2625 func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
2626 v_0 := v.Args[0]
2627
2628
2629 for {
2630 if auxIntToInt64(v.AuxInt) != 0 {
2631 break
2632 }
2633 v.reset(OpMIPS64MOVVconst)
2634 v.AuxInt = int64ToAuxInt(0)
2635 return true
2636 }
2637
2638
2639 for {
2640 if auxIntToInt64(v.AuxInt) != -1 {
2641 break
2642 }
2643 x := v_0
2644 v.copyOf(x)
2645 return true
2646 }
2647
2648
2649 for {
2650 c := auxIntToInt64(v.AuxInt)
2651 if v_0.Op != OpMIPS64MOVVconst {
2652 break
2653 }
2654 d := auxIntToInt64(v_0.AuxInt)
2655 v.reset(OpMIPS64MOVVconst)
2656 v.AuxInt = int64ToAuxInt(c & d)
2657 return true
2658 }
2659
2660
2661 for {
2662 c := auxIntToInt64(v.AuxInt)
2663 if v_0.Op != OpMIPS64ANDconst {
2664 break
2665 }
2666 d := auxIntToInt64(v_0.AuxInt)
2667 x := v_0.Args[0]
2668 v.reset(OpMIPS64ANDconst)
2669 v.AuxInt = int64ToAuxInt(c & d)
2670 v.AddArg(x)
2671 return true
2672 }
2673 return false
2674 }
2675 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v *Value) bool {
2676 v_2 := v.Args[2]
2677 v_1 := v.Args[1]
2678 v_0 := v.Args[0]
2679
2680
2681
2682 for {
2683 ptr := v_0
2684 if v_1.Op != OpMIPS64MOVVconst {
2685 break
2686 }
2687 c := auxIntToInt64(v_1.AuxInt)
2688 mem := v_2
2689 if !(is32Bit(c)) {
2690 break
2691 }
2692 v.reset(OpMIPS64LoweredAtomicAddconst32)
2693 v.AuxInt = int32ToAuxInt(int32(c))
2694 v.AddArg2(ptr, mem)
2695 return true
2696 }
2697 return false
2698 }
2699 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v *Value) bool {
2700 v_2 := v.Args[2]
2701 v_1 := v.Args[1]
2702 v_0 := v.Args[0]
2703
2704
2705
2706 for {
2707 ptr := v_0
2708 if v_1.Op != OpMIPS64MOVVconst {
2709 break
2710 }
2711 c := auxIntToInt64(v_1.AuxInt)
2712 mem := v_2
2713 if !(is32Bit(c)) {
2714 break
2715 }
2716 v.reset(OpMIPS64LoweredAtomicAddconst64)
2717 v.AuxInt = int64ToAuxInt(c)
2718 v.AddArg2(ptr, mem)
2719 return true
2720 }
2721 return false
2722 }
2723 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v *Value) bool {
2724 v_2 := v.Args[2]
2725 v_1 := v.Args[1]
2726 v_0 := v.Args[0]
2727
2728
2729 for {
2730 ptr := v_0
2731 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2732 break
2733 }
2734 mem := v_2
2735 v.reset(OpMIPS64LoweredAtomicStorezero32)
2736 v.AddArg2(ptr, mem)
2737 return true
2738 }
2739 return false
2740 }
2741 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v *Value) bool {
2742 v_2 := v.Args[2]
2743 v_1 := v.Args[1]
2744 v_0 := v.Args[0]
2745
2746
2747 for {
2748 ptr := v_0
2749 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2750 break
2751 }
2752 mem := v_2
2753 v.reset(OpMIPS64LoweredAtomicStorezero64)
2754 v.AddArg2(ptr, mem)
2755 return true
2756 }
2757 return false
2758 }
2759 func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsCR(v *Value) bool {
2760 v_1 := v.Args[1]
2761 v_0 := v.Args[0]
2762
2763
2764 for {
2765 kind := auxIntToInt64(v.AuxInt)
2766 p := auxToPanicBoundsC(v.Aux)
2767 if v_0.Op != OpMIPS64MOVVconst {
2768 break
2769 }
2770 c := auxIntToInt64(v_0.AuxInt)
2771 mem := v_1
2772 v.reset(OpMIPS64LoweredPanicBoundsCC)
2773 v.AuxInt = int64ToAuxInt(kind)
2774 v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: p.C, Cy: c})
2775 v.AddArg(mem)
2776 return true
2777 }
2778 return false
2779 }
2780 func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRC(v *Value) bool {
2781 v_1 := v.Args[1]
2782 v_0 := v.Args[0]
2783
2784
2785 for {
2786 kind := auxIntToInt64(v.AuxInt)
2787 p := auxToPanicBoundsC(v.Aux)
2788 if v_0.Op != OpMIPS64MOVVconst {
2789 break
2790 }
2791 c := auxIntToInt64(v_0.AuxInt)
2792 mem := v_1
2793 v.reset(OpMIPS64LoweredPanicBoundsCC)
2794 v.AuxInt = int64ToAuxInt(kind)
2795 v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: c, Cy: p.C})
2796 v.AddArg(mem)
2797 return true
2798 }
2799 return false
2800 }
2801 func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRR(v *Value) bool {
2802 v_2 := v.Args[2]
2803 v_1 := v.Args[1]
2804 v_0 := v.Args[0]
2805
2806
2807 for {
2808 kind := auxIntToInt64(v.AuxInt)
2809 x := v_0
2810 if v_1.Op != OpMIPS64MOVVconst {
2811 break
2812 }
2813 c := auxIntToInt64(v_1.AuxInt)
2814 mem := v_2
2815 v.reset(OpMIPS64LoweredPanicBoundsRC)
2816 v.AuxInt = int64ToAuxInt(kind)
2817 v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
2818 v.AddArg2(x, mem)
2819 return true
2820 }
2821
2822
2823 for {
2824 kind := auxIntToInt64(v.AuxInt)
2825 if v_0.Op != OpMIPS64MOVVconst {
2826 break
2827 }
2828 c := auxIntToInt64(v_0.AuxInt)
2829 y := v_1
2830 mem := v_2
2831 v.reset(OpMIPS64LoweredPanicBoundsCR)
2832 v.AuxInt = int64ToAuxInt(kind)
2833 v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
2834 v.AddArg2(y, mem)
2835 return true
2836 }
2837 return false
2838 }
2839 func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
2840 v_1 := v.Args[1]
2841 v_0 := v.Args[0]
2842 b := v.Block
2843 config := b.Func.Config
2844
2845
2846
2847 for {
2848 off1 := auxIntToInt32(v.AuxInt)
2849 sym := auxToSym(v.Aux)
2850 if v_0.Op != OpMIPS64ADDVconst {
2851 break
2852 }
2853 off2 := auxIntToInt64(v_0.AuxInt)
2854 ptr := v_0.Args[0]
2855 mem := v_1
2856 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2857 break
2858 }
2859 v.reset(OpMIPS64MOVBUload)
2860 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2861 v.Aux = symToAux(sym)
2862 v.AddArg2(ptr, mem)
2863 return true
2864 }
2865
2866
2867
2868 for {
2869 off1 := auxIntToInt32(v.AuxInt)
2870 sym1 := auxToSym(v.Aux)
2871 if v_0.Op != OpMIPS64MOVVaddr {
2872 break
2873 }
2874 off2 := auxIntToInt32(v_0.AuxInt)
2875 sym2 := auxToSym(v_0.Aux)
2876 ptr := v_0.Args[0]
2877 mem := v_1
2878 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2879 break
2880 }
2881 v.reset(OpMIPS64MOVBUload)
2882 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2883 v.Aux = symToAux(mergeSym(sym1, sym2))
2884 v.AddArg2(ptr, mem)
2885 return true
2886 }
2887
2888
2889
2890 for {
2891 off := auxIntToInt32(v.AuxInt)
2892 sym := auxToSym(v.Aux)
2893 if v_0.Op != OpSB || !(symIsRO(sym)) {
2894 break
2895 }
2896 v.reset(OpMIPS64MOVVconst)
2897 v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
2898 return true
2899 }
2900 return false
2901 }
2902 func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
2903 v_0 := v.Args[0]
2904
2905
2906 for {
2907 x := v_0
2908 if x.Op != OpMIPS64MOVBUload {
2909 break
2910 }
2911 v.reset(OpMIPS64MOVVreg)
2912 v.AddArg(x)
2913 return true
2914 }
2915
2916
2917 for {
2918 x := v_0
2919 if x.Op != OpMIPS64MOVBUreg {
2920 break
2921 }
2922 v.reset(OpMIPS64MOVVreg)
2923 v.AddArg(x)
2924 return true
2925 }
2926
2927
2928 for {
2929 if v_0.Op != OpMIPS64MOVVconst {
2930 break
2931 }
2932 c := auxIntToInt64(v_0.AuxInt)
2933 v.reset(OpMIPS64MOVVconst)
2934 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
2935 return true
2936 }
2937 return false
2938 }
2939 func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
2940 v_1 := v.Args[1]
2941 v_0 := v.Args[0]
2942 b := v.Block
2943 config := b.Func.Config
2944
2945
2946
2947 for {
2948 off1 := auxIntToInt32(v.AuxInt)
2949 sym := auxToSym(v.Aux)
2950 if v_0.Op != OpMIPS64ADDVconst {
2951 break
2952 }
2953 off2 := auxIntToInt64(v_0.AuxInt)
2954 ptr := v_0.Args[0]
2955 mem := v_1
2956 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2957 break
2958 }
2959 v.reset(OpMIPS64MOVBload)
2960 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2961 v.Aux = symToAux(sym)
2962 v.AddArg2(ptr, mem)
2963 return true
2964 }
2965
2966
2967
2968 for {
2969 off1 := auxIntToInt32(v.AuxInt)
2970 sym1 := auxToSym(v.Aux)
2971 if v_0.Op != OpMIPS64MOVVaddr {
2972 break
2973 }
2974 off2 := auxIntToInt32(v_0.AuxInt)
2975 sym2 := auxToSym(v_0.Aux)
2976 ptr := v_0.Args[0]
2977 mem := v_1
2978 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2979 break
2980 }
2981 v.reset(OpMIPS64MOVBload)
2982 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2983 v.Aux = symToAux(mergeSym(sym1, sym2))
2984 v.AddArg2(ptr, mem)
2985 return true
2986 }
2987
2988
2989
2990 for {
2991 off := auxIntToInt32(v.AuxInt)
2992 sym := auxToSym(v.Aux)
2993 if v_0.Op != OpSB || !(symIsRO(sym)) {
2994 break
2995 }
2996 v.reset(OpMIPS64MOVVconst)
2997 v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
2998 return true
2999 }
3000 return false
3001 }
3002 func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
3003 v_0 := v.Args[0]
3004
3005
3006 for {
3007 x := v_0
3008 if x.Op != OpMIPS64MOVBload {
3009 break
3010 }
3011 v.reset(OpMIPS64MOVVreg)
3012 v.AddArg(x)
3013 return true
3014 }
3015
3016
3017 for {
3018 x := v_0
3019 if x.Op != OpMIPS64MOVBreg {
3020 break
3021 }
3022 v.reset(OpMIPS64MOVVreg)
3023 v.AddArg(x)
3024 return true
3025 }
3026
3027
3028 for {
3029 if v_0.Op != OpMIPS64MOVVconst {
3030 break
3031 }
3032 c := auxIntToInt64(v_0.AuxInt)
3033 v.reset(OpMIPS64MOVVconst)
3034 v.AuxInt = int64ToAuxInt(int64(int8(c)))
3035 return true
3036 }
3037 return false
3038 }
3039 func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
3040 v_2 := v.Args[2]
3041 v_1 := v.Args[1]
3042 v_0 := v.Args[0]
3043 b := v.Block
3044 config := b.Func.Config
3045
3046
3047
3048 for {
3049 off1 := auxIntToInt32(v.AuxInt)
3050 sym := auxToSym(v.Aux)
3051 if v_0.Op != OpMIPS64ADDVconst {
3052 break
3053 }
3054 off2 := auxIntToInt64(v_0.AuxInt)
3055 ptr := v_0.Args[0]
3056 val := v_1
3057 mem := v_2
3058 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3059 break
3060 }
3061 v.reset(OpMIPS64MOVBstore)
3062 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3063 v.Aux = symToAux(sym)
3064 v.AddArg3(ptr, val, mem)
3065 return true
3066 }
3067
3068
3069
3070 for {
3071 off1 := auxIntToInt32(v.AuxInt)
3072 sym1 := auxToSym(v.Aux)
3073 if v_0.Op != OpMIPS64MOVVaddr {
3074 break
3075 }
3076 off2 := auxIntToInt32(v_0.AuxInt)
3077 sym2 := auxToSym(v_0.Aux)
3078 ptr := v_0.Args[0]
3079 val := v_1
3080 mem := v_2
3081 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3082 break
3083 }
3084 v.reset(OpMIPS64MOVBstore)
3085 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3086 v.Aux = symToAux(mergeSym(sym1, sym2))
3087 v.AddArg3(ptr, val, mem)
3088 return true
3089 }
3090
3091
3092 for {
3093 off := auxIntToInt32(v.AuxInt)
3094 sym := auxToSym(v.Aux)
3095 ptr := v_0
3096 if v_1.Op != OpMIPS64MOVBreg {
3097 break
3098 }
3099 x := v_1.Args[0]
3100 mem := v_2
3101 v.reset(OpMIPS64MOVBstore)
3102 v.AuxInt = int32ToAuxInt(off)
3103 v.Aux = symToAux(sym)
3104 v.AddArg3(ptr, x, mem)
3105 return true
3106 }
3107
3108
3109 for {
3110 off := auxIntToInt32(v.AuxInt)
3111 sym := auxToSym(v.Aux)
3112 ptr := v_0
3113 if v_1.Op != OpMIPS64MOVBUreg {
3114 break
3115 }
3116 x := v_1.Args[0]
3117 mem := v_2
3118 v.reset(OpMIPS64MOVBstore)
3119 v.AuxInt = int32ToAuxInt(off)
3120 v.Aux = symToAux(sym)
3121 v.AddArg3(ptr, x, mem)
3122 return true
3123 }
3124
3125
3126 for {
3127 off := auxIntToInt32(v.AuxInt)
3128 sym := auxToSym(v.Aux)
3129 ptr := v_0
3130 if v_1.Op != OpMIPS64MOVHreg {
3131 break
3132 }
3133 x := v_1.Args[0]
3134 mem := v_2
3135 v.reset(OpMIPS64MOVBstore)
3136 v.AuxInt = int32ToAuxInt(off)
3137 v.Aux = symToAux(sym)
3138 v.AddArg3(ptr, x, mem)
3139 return true
3140 }
3141
3142
3143 for {
3144 off := auxIntToInt32(v.AuxInt)
3145 sym := auxToSym(v.Aux)
3146 ptr := v_0
3147 if v_1.Op != OpMIPS64MOVHUreg {
3148 break
3149 }
3150 x := v_1.Args[0]
3151 mem := v_2
3152 v.reset(OpMIPS64MOVBstore)
3153 v.AuxInt = int32ToAuxInt(off)
3154 v.Aux = symToAux(sym)
3155 v.AddArg3(ptr, x, mem)
3156 return true
3157 }
3158
3159
3160 for {
3161 off := auxIntToInt32(v.AuxInt)
3162 sym := auxToSym(v.Aux)
3163 ptr := v_0
3164 if v_1.Op != OpMIPS64MOVWreg {
3165 break
3166 }
3167 x := v_1.Args[0]
3168 mem := v_2
3169 v.reset(OpMIPS64MOVBstore)
3170 v.AuxInt = int32ToAuxInt(off)
3171 v.Aux = symToAux(sym)
3172 v.AddArg3(ptr, x, mem)
3173 return true
3174 }
3175
3176
3177 for {
3178 off := auxIntToInt32(v.AuxInt)
3179 sym := auxToSym(v.Aux)
3180 ptr := v_0
3181 if v_1.Op != OpMIPS64MOVWUreg {
3182 break
3183 }
3184 x := v_1.Args[0]
3185 mem := v_2
3186 v.reset(OpMIPS64MOVBstore)
3187 v.AuxInt = int32ToAuxInt(off)
3188 v.Aux = symToAux(sym)
3189 v.AddArg3(ptr, x, mem)
3190 return true
3191 }
3192 return false
3193 }
3194 func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
3195 v_1 := v.Args[1]
3196 v_0 := v.Args[0]
3197 b := v.Block
3198 config := b.Func.Config
3199
3200
3201 for {
3202 off := auxIntToInt32(v.AuxInt)
3203 sym := auxToSym(v.Aux)
3204 ptr := v_0
3205 if v_1.Op != OpMIPS64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3206 break
3207 }
3208 val := v_1.Args[1]
3209 if ptr != v_1.Args[0] {
3210 break
3211 }
3212 v.reset(OpMIPS64MOVVgpfp)
3213 v.AddArg(val)
3214 return true
3215 }
3216
3217
3218
3219 for {
3220 off1 := auxIntToInt32(v.AuxInt)
3221 sym := auxToSym(v.Aux)
3222 if v_0.Op != OpMIPS64ADDVconst {
3223 break
3224 }
3225 off2 := auxIntToInt64(v_0.AuxInt)
3226 ptr := v_0.Args[0]
3227 mem := v_1
3228 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3229 break
3230 }
3231 v.reset(OpMIPS64MOVDload)
3232 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3233 v.Aux = symToAux(sym)
3234 v.AddArg2(ptr, mem)
3235 return true
3236 }
3237
3238
3239
3240 for {
3241 off1 := auxIntToInt32(v.AuxInt)
3242 sym1 := auxToSym(v.Aux)
3243 if v_0.Op != OpMIPS64MOVVaddr {
3244 break
3245 }
3246 off2 := auxIntToInt32(v_0.AuxInt)
3247 sym2 := auxToSym(v_0.Aux)
3248 ptr := v_0.Args[0]
3249 mem := v_1
3250 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3251 break
3252 }
3253 v.reset(OpMIPS64MOVDload)
3254 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3255 v.Aux = symToAux(mergeSym(sym1, sym2))
3256 v.AddArg2(ptr, mem)
3257 return true
3258 }
3259 return false
3260 }
3261 func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
3262 v_2 := v.Args[2]
3263 v_1 := v.Args[1]
3264 v_0 := v.Args[0]
3265 b := v.Block
3266 config := b.Func.Config
3267
3268
3269 for {
3270 off := auxIntToInt32(v.AuxInt)
3271 sym := auxToSym(v.Aux)
3272 ptr := v_0
3273 if v_1.Op != OpMIPS64MOVVgpfp {
3274 break
3275 }
3276 val := v_1.Args[0]
3277 mem := v_2
3278 v.reset(OpMIPS64MOVVstore)
3279 v.AuxInt = int32ToAuxInt(off)
3280 v.Aux = symToAux(sym)
3281 v.AddArg3(ptr, val, mem)
3282 return true
3283 }
3284
3285
3286
3287 for {
3288 off1 := auxIntToInt32(v.AuxInt)
3289 sym := auxToSym(v.Aux)
3290 if v_0.Op != OpMIPS64ADDVconst {
3291 break
3292 }
3293 off2 := auxIntToInt64(v_0.AuxInt)
3294 ptr := v_0.Args[0]
3295 val := v_1
3296 mem := v_2
3297 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3298 break
3299 }
3300 v.reset(OpMIPS64MOVDstore)
3301 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3302 v.Aux = symToAux(sym)
3303 v.AddArg3(ptr, val, mem)
3304 return true
3305 }
3306
3307
3308
3309 for {
3310 off1 := auxIntToInt32(v.AuxInt)
3311 sym1 := auxToSym(v.Aux)
3312 if v_0.Op != OpMIPS64MOVVaddr {
3313 break
3314 }
3315 off2 := auxIntToInt32(v_0.AuxInt)
3316 sym2 := auxToSym(v_0.Aux)
3317 ptr := v_0.Args[0]
3318 val := v_1
3319 mem := v_2
3320 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3321 break
3322 }
3323 v.reset(OpMIPS64MOVDstore)
3324 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3325 v.Aux = symToAux(mergeSym(sym1, sym2))
3326 v.AddArg3(ptr, val, mem)
3327 return true
3328 }
3329 return false
3330 }
3331 func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
3332 v_1 := v.Args[1]
3333 v_0 := v.Args[0]
3334 b := v.Block
3335 config := b.Func.Config
3336
3337
3338 for {
3339 off := auxIntToInt32(v.AuxInt)
3340 sym := auxToSym(v.Aux)
3341 ptr := v_0
3342 if v_1.Op != OpMIPS64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3343 break
3344 }
3345 val := v_1.Args[1]
3346 if ptr != v_1.Args[0] {
3347 break
3348 }
3349 v.reset(OpMIPS64MOVWgpfp)
3350 v.AddArg(val)
3351 return true
3352 }
3353
3354
3355
3356 for {
3357 off1 := auxIntToInt32(v.AuxInt)
3358 sym := auxToSym(v.Aux)
3359 if v_0.Op != OpMIPS64ADDVconst {
3360 break
3361 }
3362 off2 := auxIntToInt64(v_0.AuxInt)
3363 ptr := v_0.Args[0]
3364 mem := v_1
3365 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3366 break
3367 }
3368 v.reset(OpMIPS64MOVFload)
3369 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3370 v.Aux = symToAux(sym)
3371 v.AddArg2(ptr, mem)
3372 return true
3373 }
3374
3375
3376
3377 for {
3378 off1 := auxIntToInt32(v.AuxInt)
3379 sym1 := auxToSym(v.Aux)
3380 if v_0.Op != OpMIPS64MOVVaddr {
3381 break
3382 }
3383 off2 := auxIntToInt32(v_0.AuxInt)
3384 sym2 := auxToSym(v_0.Aux)
3385 ptr := v_0.Args[0]
3386 mem := v_1
3387 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3388 break
3389 }
3390 v.reset(OpMIPS64MOVFload)
3391 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3392 v.Aux = symToAux(mergeSym(sym1, sym2))
3393 v.AddArg2(ptr, mem)
3394 return true
3395 }
3396 return false
3397 }
3398 func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
3399 v_2 := v.Args[2]
3400 v_1 := v.Args[1]
3401 v_0 := v.Args[0]
3402 b := v.Block
3403 config := b.Func.Config
3404
3405
3406 for {
3407 off := auxIntToInt32(v.AuxInt)
3408 sym := auxToSym(v.Aux)
3409 ptr := v_0
3410 if v_1.Op != OpMIPS64MOVWgpfp {
3411 break
3412 }
3413 val := v_1.Args[0]
3414 mem := v_2
3415 v.reset(OpMIPS64MOVWstore)
3416 v.AuxInt = int32ToAuxInt(off)
3417 v.Aux = symToAux(sym)
3418 v.AddArg3(ptr, val, mem)
3419 return true
3420 }
3421
3422
3423
3424 for {
3425 off1 := auxIntToInt32(v.AuxInt)
3426 sym := auxToSym(v.Aux)
3427 if v_0.Op != OpMIPS64ADDVconst {
3428 break
3429 }
3430 off2 := auxIntToInt64(v_0.AuxInt)
3431 ptr := v_0.Args[0]
3432 val := v_1
3433 mem := v_2
3434 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3435 break
3436 }
3437 v.reset(OpMIPS64MOVFstore)
3438 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3439 v.Aux = symToAux(sym)
3440 v.AddArg3(ptr, val, mem)
3441 return true
3442 }
3443
3444
3445
3446 for {
3447 off1 := auxIntToInt32(v.AuxInt)
3448 sym1 := auxToSym(v.Aux)
3449 if v_0.Op != OpMIPS64MOVVaddr {
3450 break
3451 }
3452 off2 := auxIntToInt32(v_0.AuxInt)
3453 sym2 := auxToSym(v_0.Aux)
3454 ptr := v_0.Args[0]
3455 val := v_1
3456 mem := v_2
3457 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3458 break
3459 }
3460 v.reset(OpMIPS64MOVFstore)
3461 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3462 v.Aux = symToAux(mergeSym(sym1, sym2))
3463 v.AddArg3(ptr, val, mem)
3464 return true
3465 }
3466 return false
3467 }
3468 func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
3469 v_1 := v.Args[1]
3470 v_0 := v.Args[0]
3471 b := v.Block
3472 config := b.Func.Config
3473
3474
3475
3476 for {
3477 off1 := auxIntToInt32(v.AuxInt)
3478 sym := auxToSym(v.Aux)
3479 if v_0.Op != OpMIPS64ADDVconst {
3480 break
3481 }
3482 off2 := auxIntToInt64(v_0.AuxInt)
3483 ptr := v_0.Args[0]
3484 mem := v_1
3485 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3486 break
3487 }
3488 v.reset(OpMIPS64MOVHUload)
3489 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3490 v.Aux = symToAux(sym)
3491 v.AddArg2(ptr, mem)
3492 return true
3493 }
3494
3495
3496
3497 for {
3498 off1 := auxIntToInt32(v.AuxInt)
3499 sym1 := auxToSym(v.Aux)
3500 if v_0.Op != OpMIPS64MOVVaddr {
3501 break
3502 }
3503 off2 := auxIntToInt32(v_0.AuxInt)
3504 sym2 := auxToSym(v_0.Aux)
3505 ptr := v_0.Args[0]
3506 mem := v_1
3507 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3508 break
3509 }
3510 v.reset(OpMIPS64MOVHUload)
3511 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3512 v.Aux = symToAux(mergeSym(sym1, sym2))
3513 v.AddArg2(ptr, mem)
3514 return true
3515 }
3516
3517
3518
3519 for {
3520 off := auxIntToInt32(v.AuxInt)
3521 sym := auxToSym(v.Aux)
3522 if v_0.Op != OpSB || !(symIsRO(sym)) {
3523 break
3524 }
3525 v.reset(OpMIPS64MOVVconst)
3526 v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
3527 return true
3528 }
3529 return false
3530 }
3531 func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
3532 v_0 := v.Args[0]
3533
3534
3535 for {
3536 x := v_0
3537 if x.Op != OpMIPS64MOVBUload {
3538 break
3539 }
3540 v.reset(OpMIPS64MOVVreg)
3541 v.AddArg(x)
3542 return true
3543 }
3544
3545
3546 for {
3547 x := v_0
3548 if x.Op != OpMIPS64MOVHUload {
3549 break
3550 }
3551 v.reset(OpMIPS64MOVVreg)
3552 v.AddArg(x)
3553 return true
3554 }
3555
3556
3557 for {
3558 x := v_0
3559 if x.Op != OpMIPS64MOVBUreg {
3560 break
3561 }
3562 v.reset(OpMIPS64MOVVreg)
3563 v.AddArg(x)
3564 return true
3565 }
3566
3567
3568 for {
3569 x := v_0
3570 if x.Op != OpMIPS64MOVHUreg {
3571 break
3572 }
3573 v.reset(OpMIPS64MOVVreg)
3574 v.AddArg(x)
3575 return true
3576 }
3577
3578
3579 for {
3580 if v_0.Op != OpMIPS64MOVVconst {
3581 break
3582 }
3583 c := auxIntToInt64(v_0.AuxInt)
3584 v.reset(OpMIPS64MOVVconst)
3585 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
3586 return true
3587 }
3588 return false
3589 }
3590 func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
3591 v_1 := v.Args[1]
3592 v_0 := v.Args[0]
3593 b := v.Block
3594 config := b.Func.Config
3595
3596
3597
3598 for {
3599 off1 := auxIntToInt32(v.AuxInt)
3600 sym := auxToSym(v.Aux)
3601 if v_0.Op != OpMIPS64ADDVconst {
3602 break
3603 }
3604 off2 := auxIntToInt64(v_0.AuxInt)
3605 ptr := v_0.Args[0]
3606 mem := v_1
3607 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3608 break
3609 }
3610 v.reset(OpMIPS64MOVHload)
3611 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3612 v.Aux = symToAux(sym)
3613 v.AddArg2(ptr, mem)
3614 return true
3615 }
3616
3617
3618
3619 for {
3620 off1 := auxIntToInt32(v.AuxInt)
3621 sym1 := auxToSym(v.Aux)
3622 if v_0.Op != OpMIPS64MOVVaddr {
3623 break
3624 }
3625 off2 := auxIntToInt32(v_0.AuxInt)
3626 sym2 := auxToSym(v_0.Aux)
3627 ptr := v_0.Args[0]
3628 mem := v_1
3629 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3630 break
3631 }
3632 v.reset(OpMIPS64MOVHload)
3633 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3634 v.Aux = symToAux(mergeSym(sym1, sym2))
3635 v.AddArg2(ptr, mem)
3636 return true
3637 }
3638
3639
3640
3641 for {
3642 off := auxIntToInt32(v.AuxInt)
3643 sym := auxToSym(v.Aux)
3644 if v_0.Op != OpSB || !(symIsRO(sym)) {
3645 break
3646 }
3647 v.reset(OpMIPS64MOVVconst)
3648 v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
3649 return true
3650 }
3651 return false
3652 }
3653 func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
3654 v_0 := v.Args[0]
3655
3656
3657 for {
3658 x := v_0
3659 if x.Op != OpMIPS64MOVBload {
3660 break
3661 }
3662 v.reset(OpMIPS64MOVVreg)
3663 v.AddArg(x)
3664 return true
3665 }
3666
3667
3668 for {
3669 x := v_0
3670 if x.Op != OpMIPS64MOVBUload {
3671 break
3672 }
3673 v.reset(OpMIPS64MOVVreg)
3674 v.AddArg(x)
3675 return true
3676 }
3677
3678
3679 for {
3680 x := v_0
3681 if x.Op != OpMIPS64MOVHload {
3682 break
3683 }
3684 v.reset(OpMIPS64MOVVreg)
3685 v.AddArg(x)
3686 return true
3687 }
3688
3689
3690 for {
3691 x := v_0
3692 if x.Op != OpMIPS64MOVBreg {
3693 break
3694 }
3695 v.reset(OpMIPS64MOVVreg)
3696 v.AddArg(x)
3697 return true
3698 }
3699
3700
3701 for {
3702 x := v_0
3703 if x.Op != OpMIPS64MOVBUreg {
3704 break
3705 }
3706 v.reset(OpMIPS64MOVVreg)
3707 v.AddArg(x)
3708 return true
3709 }
3710
3711
3712 for {
3713 x := v_0
3714 if x.Op != OpMIPS64MOVHreg {
3715 break
3716 }
3717 v.reset(OpMIPS64MOVVreg)
3718 v.AddArg(x)
3719 return true
3720 }
3721
3722
3723 for {
3724 if v_0.Op != OpMIPS64MOVVconst {
3725 break
3726 }
3727 c := auxIntToInt64(v_0.AuxInt)
3728 v.reset(OpMIPS64MOVVconst)
3729 v.AuxInt = int64ToAuxInt(int64(int16(c)))
3730 return true
3731 }
3732 return false
3733 }
3734 func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
3735 v_2 := v.Args[2]
3736 v_1 := v.Args[1]
3737 v_0 := v.Args[0]
3738 b := v.Block
3739 config := b.Func.Config
3740
3741
3742
3743 for {
3744 off1 := auxIntToInt32(v.AuxInt)
3745 sym := auxToSym(v.Aux)
3746 if v_0.Op != OpMIPS64ADDVconst {
3747 break
3748 }
3749 off2 := auxIntToInt64(v_0.AuxInt)
3750 ptr := v_0.Args[0]
3751 val := v_1
3752 mem := v_2
3753 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3754 break
3755 }
3756 v.reset(OpMIPS64MOVHstore)
3757 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3758 v.Aux = symToAux(sym)
3759 v.AddArg3(ptr, val, mem)
3760 return true
3761 }
3762
3763
3764
3765 for {
3766 off1 := auxIntToInt32(v.AuxInt)
3767 sym1 := auxToSym(v.Aux)
3768 if v_0.Op != OpMIPS64MOVVaddr {
3769 break
3770 }
3771 off2 := auxIntToInt32(v_0.AuxInt)
3772 sym2 := auxToSym(v_0.Aux)
3773 ptr := v_0.Args[0]
3774 val := v_1
3775 mem := v_2
3776 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3777 break
3778 }
3779 v.reset(OpMIPS64MOVHstore)
3780 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3781 v.Aux = symToAux(mergeSym(sym1, sym2))
3782 v.AddArg3(ptr, val, mem)
3783 return true
3784 }
3785
3786
3787 for {
3788 off := auxIntToInt32(v.AuxInt)
3789 sym := auxToSym(v.Aux)
3790 ptr := v_0
3791 if v_1.Op != OpMIPS64MOVHreg {
3792 break
3793 }
3794 x := v_1.Args[0]
3795 mem := v_2
3796 v.reset(OpMIPS64MOVHstore)
3797 v.AuxInt = int32ToAuxInt(off)
3798 v.Aux = symToAux(sym)
3799 v.AddArg3(ptr, x, mem)
3800 return true
3801 }
3802
3803
3804 for {
3805 off := auxIntToInt32(v.AuxInt)
3806 sym := auxToSym(v.Aux)
3807 ptr := v_0
3808 if v_1.Op != OpMIPS64MOVHUreg {
3809 break
3810 }
3811 x := v_1.Args[0]
3812 mem := v_2
3813 v.reset(OpMIPS64MOVHstore)
3814 v.AuxInt = int32ToAuxInt(off)
3815 v.Aux = symToAux(sym)
3816 v.AddArg3(ptr, x, mem)
3817 return true
3818 }
3819
3820
3821 for {
3822 off := auxIntToInt32(v.AuxInt)
3823 sym := auxToSym(v.Aux)
3824 ptr := v_0
3825 if v_1.Op != OpMIPS64MOVWreg {
3826 break
3827 }
3828 x := v_1.Args[0]
3829 mem := v_2
3830 v.reset(OpMIPS64MOVHstore)
3831 v.AuxInt = int32ToAuxInt(off)
3832 v.Aux = symToAux(sym)
3833 v.AddArg3(ptr, x, mem)
3834 return true
3835 }
3836
3837
3838 for {
3839 off := auxIntToInt32(v.AuxInt)
3840 sym := auxToSym(v.Aux)
3841 ptr := v_0
3842 if v_1.Op != OpMIPS64MOVWUreg {
3843 break
3844 }
3845 x := v_1.Args[0]
3846 mem := v_2
3847 v.reset(OpMIPS64MOVHstore)
3848 v.AuxInt = int32ToAuxInt(off)
3849 v.Aux = symToAux(sym)
3850 v.AddArg3(ptr, x, mem)
3851 return true
3852 }
3853 return false
3854 }
3855 func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
3856 v_1 := v.Args[1]
3857 v_0 := v.Args[0]
3858 b := v.Block
3859 config := b.Func.Config
3860
3861
3862 for {
3863 off := auxIntToInt32(v.AuxInt)
3864 sym := auxToSym(v.Aux)
3865 ptr := v_0
3866 if v_1.Op != OpMIPS64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3867 break
3868 }
3869 val := v_1.Args[1]
3870 if ptr != v_1.Args[0] {
3871 break
3872 }
3873 v.reset(OpMIPS64MOVVfpgp)
3874 v.AddArg(val)
3875 return true
3876 }
3877
3878
3879
3880 for {
3881 off1 := auxIntToInt32(v.AuxInt)
3882 sym := auxToSym(v.Aux)
3883 if v_0.Op != OpMIPS64ADDVconst {
3884 break
3885 }
3886 off2 := auxIntToInt64(v_0.AuxInt)
3887 ptr := v_0.Args[0]
3888 mem := v_1
3889 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3890 break
3891 }
3892 v.reset(OpMIPS64MOVVload)
3893 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3894 v.Aux = symToAux(sym)
3895 v.AddArg2(ptr, mem)
3896 return true
3897 }
3898
3899
3900
3901 for {
3902 off1 := auxIntToInt32(v.AuxInt)
3903 sym1 := auxToSym(v.Aux)
3904 if v_0.Op != OpMIPS64MOVVaddr {
3905 break
3906 }
3907 off2 := auxIntToInt32(v_0.AuxInt)
3908 sym2 := auxToSym(v_0.Aux)
3909 ptr := v_0.Args[0]
3910 mem := v_1
3911 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3912 break
3913 }
3914 v.reset(OpMIPS64MOVVload)
3915 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3916 v.Aux = symToAux(mergeSym(sym1, sym2))
3917 v.AddArg2(ptr, mem)
3918 return true
3919 }
3920
3921
3922
3923 for {
3924 off := auxIntToInt32(v.AuxInt)
3925 sym := auxToSym(v.Aux)
3926 if v_0.Op != OpSB || !(symIsRO(sym)) {
3927 break
3928 }
3929 v.reset(OpMIPS64MOVVconst)
3930 v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
3931 return true
3932 }
3933 return false
3934 }
3935 func rewriteValueMIPS64_OpMIPS64MOVVnop(v *Value) bool {
3936 v_0 := v.Args[0]
3937
3938
3939 for {
3940 if v_0.Op != OpMIPS64MOVVconst {
3941 break
3942 }
3943 c := auxIntToInt64(v_0.AuxInt)
3944 v.reset(OpMIPS64MOVVconst)
3945 v.AuxInt = int64ToAuxInt(c)
3946 return true
3947 }
3948 return false
3949 }
3950 func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
3951 v_0 := v.Args[0]
3952
3953
3954
3955 for {
3956 x := v_0
3957 if !(x.Uses == 1) {
3958 break
3959 }
3960 v.reset(OpMIPS64MOVVnop)
3961 v.AddArg(x)
3962 return true
3963 }
3964
3965
3966 for {
3967 if v_0.Op != OpMIPS64MOVVconst {
3968 break
3969 }
3970 c := auxIntToInt64(v_0.AuxInt)
3971 v.reset(OpMIPS64MOVVconst)
3972 v.AuxInt = int64ToAuxInt(c)
3973 return true
3974 }
3975 return false
3976 }
3977 func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
3978 v_2 := v.Args[2]
3979 v_1 := v.Args[1]
3980 v_0 := v.Args[0]
3981 b := v.Block
3982 config := b.Func.Config
3983
3984
3985 for {
3986 off := auxIntToInt32(v.AuxInt)
3987 sym := auxToSym(v.Aux)
3988 ptr := v_0
3989 if v_1.Op != OpMIPS64MOVVfpgp {
3990 break
3991 }
3992 val := v_1.Args[0]
3993 mem := v_2
3994 v.reset(OpMIPS64MOVDstore)
3995 v.AuxInt = int32ToAuxInt(off)
3996 v.Aux = symToAux(sym)
3997 v.AddArg3(ptr, val, mem)
3998 return true
3999 }
4000
4001
4002
4003 for {
4004 off1 := auxIntToInt32(v.AuxInt)
4005 sym := auxToSym(v.Aux)
4006 if v_0.Op != OpMIPS64ADDVconst {
4007 break
4008 }
4009 off2 := auxIntToInt64(v_0.AuxInt)
4010 ptr := v_0.Args[0]
4011 val := v_1
4012 mem := v_2
4013 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4014 break
4015 }
4016 v.reset(OpMIPS64MOVVstore)
4017 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4018 v.Aux = symToAux(sym)
4019 v.AddArg3(ptr, val, mem)
4020 return true
4021 }
4022
4023
4024
4025 for {
4026 off1 := auxIntToInt32(v.AuxInt)
4027 sym1 := auxToSym(v.Aux)
4028 if v_0.Op != OpMIPS64MOVVaddr {
4029 break
4030 }
4031 off2 := auxIntToInt32(v_0.AuxInt)
4032 sym2 := auxToSym(v_0.Aux)
4033 ptr := v_0.Args[0]
4034 val := v_1
4035 mem := v_2
4036 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4037 break
4038 }
4039 v.reset(OpMIPS64MOVVstore)
4040 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4041 v.Aux = symToAux(mergeSym(sym1, sym2))
4042 v.AddArg3(ptr, val, mem)
4043 return true
4044 }
4045 return false
4046 }
4047 func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
4048 v_1 := v.Args[1]
4049 v_0 := v.Args[0]
4050 b := v.Block
4051 config := b.Func.Config
4052 typ := &b.Func.Config.Types
4053
4054
4055 for {
4056 off := auxIntToInt32(v.AuxInt)
4057 sym := auxToSym(v.Aux)
4058 ptr := v_0
4059 if v_1.Op != OpMIPS64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
4060 break
4061 }
4062 val := v_1.Args[1]
4063 if ptr != v_1.Args[0] {
4064 break
4065 }
4066 v.reset(OpZeroExt32to64)
4067 v0 := b.NewValue0(v_1.Pos, OpMIPS64MOVWfpgp, typ.Float32)
4068 v0.AddArg(val)
4069 v.AddArg(v0)
4070 return true
4071 }
4072
4073
4074
4075 for {
4076 off1 := auxIntToInt32(v.AuxInt)
4077 sym := auxToSym(v.Aux)
4078 if v_0.Op != OpMIPS64ADDVconst {
4079 break
4080 }
4081 off2 := auxIntToInt64(v_0.AuxInt)
4082 ptr := v_0.Args[0]
4083 mem := v_1
4084 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4085 break
4086 }
4087 v.reset(OpMIPS64MOVWUload)
4088 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4089 v.Aux = symToAux(sym)
4090 v.AddArg2(ptr, mem)
4091 return true
4092 }
4093
4094
4095
4096 for {
4097 off1 := auxIntToInt32(v.AuxInt)
4098 sym1 := auxToSym(v.Aux)
4099 if v_0.Op != OpMIPS64MOVVaddr {
4100 break
4101 }
4102 off2 := auxIntToInt32(v_0.AuxInt)
4103 sym2 := auxToSym(v_0.Aux)
4104 ptr := v_0.Args[0]
4105 mem := v_1
4106 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4107 break
4108 }
4109 v.reset(OpMIPS64MOVWUload)
4110 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4111 v.Aux = symToAux(mergeSym(sym1, sym2))
4112 v.AddArg2(ptr, mem)
4113 return true
4114 }
4115
4116
4117
4118 for {
4119 off := auxIntToInt32(v.AuxInt)
4120 sym := auxToSym(v.Aux)
4121 if v_0.Op != OpSB || !(symIsRO(sym)) {
4122 break
4123 }
4124 v.reset(OpMIPS64MOVVconst)
4125 v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
4126 return true
4127 }
4128 return false
4129 }
4130 func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
4131 v_0 := v.Args[0]
4132
4133
4134 for {
4135 x := v_0
4136 if x.Op != OpMIPS64MOVBUload {
4137 break
4138 }
4139 v.reset(OpMIPS64MOVVreg)
4140 v.AddArg(x)
4141 return true
4142 }
4143
4144
4145 for {
4146 x := v_0
4147 if x.Op != OpMIPS64MOVHUload {
4148 break
4149 }
4150 v.reset(OpMIPS64MOVVreg)
4151 v.AddArg(x)
4152 return true
4153 }
4154
4155
4156 for {
4157 x := v_0
4158 if x.Op != OpMIPS64MOVWUload {
4159 break
4160 }
4161 v.reset(OpMIPS64MOVVreg)
4162 v.AddArg(x)
4163 return true
4164 }
4165
4166
4167 for {
4168 x := v_0
4169 if x.Op != OpMIPS64MOVBUreg {
4170 break
4171 }
4172 v.reset(OpMIPS64MOVVreg)
4173 v.AddArg(x)
4174 return true
4175 }
4176
4177
4178 for {
4179 x := v_0
4180 if x.Op != OpMIPS64MOVHUreg {
4181 break
4182 }
4183 v.reset(OpMIPS64MOVVreg)
4184 v.AddArg(x)
4185 return true
4186 }
4187
4188
4189 for {
4190 x := v_0
4191 if x.Op != OpMIPS64MOVWUreg {
4192 break
4193 }
4194 v.reset(OpMIPS64MOVVreg)
4195 v.AddArg(x)
4196 return true
4197 }
4198
4199
4200 for {
4201 if v_0.Op != OpMIPS64MOVVconst {
4202 break
4203 }
4204 c := auxIntToInt64(v_0.AuxInt)
4205 v.reset(OpMIPS64MOVVconst)
4206 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
4207 return true
4208 }
4209 return false
4210 }
4211 func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
4212 v_1 := v.Args[1]
4213 v_0 := v.Args[0]
4214 b := v.Block
4215 config := b.Func.Config
4216
4217
4218
4219 for {
4220 off1 := auxIntToInt32(v.AuxInt)
4221 sym := auxToSym(v.Aux)
4222 if v_0.Op != OpMIPS64ADDVconst {
4223 break
4224 }
4225 off2 := auxIntToInt64(v_0.AuxInt)
4226 ptr := v_0.Args[0]
4227 mem := v_1
4228 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4229 break
4230 }
4231 v.reset(OpMIPS64MOVWload)
4232 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4233 v.Aux = symToAux(sym)
4234 v.AddArg2(ptr, mem)
4235 return true
4236 }
4237
4238
4239
4240 for {
4241 off1 := auxIntToInt32(v.AuxInt)
4242 sym1 := auxToSym(v.Aux)
4243 if v_0.Op != OpMIPS64MOVVaddr {
4244 break
4245 }
4246 off2 := auxIntToInt32(v_0.AuxInt)
4247 sym2 := auxToSym(v_0.Aux)
4248 ptr := v_0.Args[0]
4249 mem := v_1
4250 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4251 break
4252 }
4253 v.reset(OpMIPS64MOVWload)
4254 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4255 v.Aux = symToAux(mergeSym(sym1, sym2))
4256 v.AddArg2(ptr, mem)
4257 return true
4258 }
4259
4260
4261
4262 for {
4263 off := auxIntToInt32(v.AuxInt)
4264 sym := auxToSym(v.Aux)
4265 if v_0.Op != OpSB || !(symIsRO(sym)) {
4266 break
4267 }
4268 v.reset(OpMIPS64MOVVconst)
4269 v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
4270 return true
4271 }
4272 return false
4273 }
4274 func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
4275 v_0 := v.Args[0]
4276
4277
4278 for {
4279 x := v_0
4280 if x.Op != OpMIPS64MOVBload {
4281 break
4282 }
4283 v.reset(OpMIPS64MOVVreg)
4284 v.AddArg(x)
4285 return true
4286 }
4287
4288
4289 for {
4290 x := v_0
4291 if x.Op != OpMIPS64MOVBUload {
4292 break
4293 }
4294 v.reset(OpMIPS64MOVVreg)
4295 v.AddArg(x)
4296 return true
4297 }
4298
4299
4300 for {
4301 x := v_0
4302 if x.Op != OpMIPS64MOVHload {
4303 break
4304 }
4305 v.reset(OpMIPS64MOVVreg)
4306 v.AddArg(x)
4307 return true
4308 }
4309
4310
4311 for {
4312 x := v_0
4313 if x.Op != OpMIPS64MOVHUload {
4314 break
4315 }
4316 v.reset(OpMIPS64MOVVreg)
4317 v.AddArg(x)
4318 return true
4319 }
4320
4321
4322 for {
4323 x := v_0
4324 if x.Op != OpMIPS64MOVWload {
4325 break
4326 }
4327 v.reset(OpMIPS64MOVVreg)
4328 v.AddArg(x)
4329 return true
4330 }
4331
4332
4333 for {
4334 x := v_0
4335 if x.Op != OpMIPS64MOVBreg {
4336 break
4337 }
4338 v.reset(OpMIPS64MOVVreg)
4339 v.AddArg(x)
4340 return true
4341 }
4342
4343
4344 for {
4345 x := v_0
4346 if x.Op != OpMIPS64MOVBUreg {
4347 break
4348 }
4349 v.reset(OpMIPS64MOVVreg)
4350 v.AddArg(x)
4351 return true
4352 }
4353
4354
4355 for {
4356 x := v_0
4357 if x.Op != OpMIPS64MOVHreg {
4358 break
4359 }
4360 v.reset(OpMIPS64MOVVreg)
4361 v.AddArg(x)
4362 return true
4363 }
4364
4365
4366 for {
4367 x := v_0
4368 if x.Op != OpMIPS64MOVWreg {
4369 break
4370 }
4371 v.reset(OpMIPS64MOVVreg)
4372 v.AddArg(x)
4373 return true
4374 }
4375
4376
4377 for {
4378 if v_0.Op != OpMIPS64MOVVconst {
4379 break
4380 }
4381 c := auxIntToInt64(v_0.AuxInt)
4382 v.reset(OpMIPS64MOVVconst)
4383 v.AuxInt = int64ToAuxInt(int64(int32(c)))
4384 return true
4385 }
4386 return false
4387 }
4388 func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
4389 v_2 := v.Args[2]
4390 v_1 := v.Args[1]
4391 v_0 := v.Args[0]
4392 b := v.Block
4393 config := b.Func.Config
4394
4395
4396 for {
4397 off := auxIntToInt32(v.AuxInt)
4398 sym := auxToSym(v.Aux)
4399 ptr := v_0
4400 if v_1.Op != OpMIPS64MOVWfpgp {
4401 break
4402 }
4403 val := v_1.Args[0]
4404 mem := v_2
4405 v.reset(OpMIPS64MOVFstore)
4406 v.AuxInt = int32ToAuxInt(off)
4407 v.Aux = symToAux(sym)
4408 v.AddArg3(ptr, val, mem)
4409 return true
4410 }
4411
4412
4413
4414 for {
4415 off1 := auxIntToInt32(v.AuxInt)
4416 sym := auxToSym(v.Aux)
4417 if v_0.Op != OpMIPS64ADDVconst {
4418 break
4419 }
4420 off2 := auxIntToInt64(v_0.AuxInt)
4421 ptr := v_0.Args[0]
4422 val := v_1
4423 mem := v_2
4424 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4425 break
4426 }
4427 v.reset(OpMIPS64MOVWstore)
4428 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4429 v.Aux = symToAux(sym)
4430 v.AddArg3(ptr, val, mem)
4431 return true
4432 }
4433
4434
4435
4436 for {
4437 off1 := auxIntToInt32(v.AuxInt)
4438 sym1 := auxToSym(v.Aux)
4439 if v_0.Op != OpMIPS64MOVVaddr {
4440 break
4441 }
4442 off2 := auxIntToInt32(v_0.AuxInt)
4443 sym2 := auxToSym(v_0.Aux)
4444 ptr := v_0.Args[0]
4445 val := v_1
4446 mem := v_2
4447 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4448 break
4449 }
4450 v.reset(OpMIPS64MOVWstore)
4451 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4452 v.Aux = symToAux(mergeSym(sym1, sym2))
4453 v.AddArg3(ptr, val, mem)
4454 return true
4455 }
4456
4457
4458 for {
4459 off := auxIntToInt32(v.AuxInt)
4460 sym := auxToSym(v.Aux)
4461 ptr := v_0
4462 if v_1.Op != OpMIPS64MOVWreg {
4463 break
4464 }
4465 x := v_1.Args[0]
4466 mem := v_2
4467 v.reset(OpMIPS64MOVWstore)
4468 v.AuxInt = int32ToAuxInt(off)
4469 v.Aux = symToAux(sym)
4470 v.AddArg3(ptr, x, mem)
4471 return true
4472 }
4473
4474
4475 for {
4476 off := auxIntToInt32(v.AuxInt)
4477 sym := auxToSym(v.Aux)
4478 ptr := v_0
4479 if v_1.Op != OpMIPS64MOVWUreg {
4480 break
4481 }
4482 x := v_1.Args[0]
4483 mem := v_2
4484 v.reset(OpMIPS64MOVWstore)
4485 v.AuxInt = int32ToAuxInt(off)
4486 v.Aux = symToAux(sym)
4487 v.AddArg3(ptr, x, mem)
4488 return true
4489 }
4490 return false
4491 }
4492 func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
4493 v_0 := v.Args[0]
4494
4495
4496 for {
4497 if v_0.Op != OpMIPS64SUBV {
4498 break
4499 }
4500 y := v_0.Args[1]
4501 x := v_0.Args[0]
4502 v.reset(OpMIPS64SUBV)
4503 v.AddArg2(y, x)
4504 return true
4505 }
4506
4507
4508 for {
4509 if v_0.Op != OpMIPS64NEGV {
4510 break
4511 }
4512 x := v_0.Args[0]
4513 v.copyOf(x)
4514 return true
4515 }
4516
4517
4518 for {
4519 if v_0.Op != OpMIPS64MOVVconst {
4520 break
4521 }
4522 c := auxIntToInt64(v_0.AuxInt)
4523 v.reset(OpMIPS64MOVVconst)
4524 v.AuxInt = int64ToAuxInt(-c)
4525 return true
4526 }
4527 return false
4528 }
4529 func rewriteValueMIPS64_OpMIPS64NOR(v *Value) bool {
4530 v_1 := v.Args[1]
4531 v_0 := v.Args[0]
4532
4533
4534
4535 for {
4536 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4537 x := v_0
4538 if v_1.Op != OpMIPS64MOVVconst {
4539 continue
4540 }
4541 c := auxIntToInt64(v_1.AuxInt)
4542 if !(is32Bit(c)) {
4543 continue
4544 }
4545 v.reset(OpMIPS64NORconst)
4546 v.AuxInt = int64ToAuxInt(c)
4547 v.AddArg(x)
4548 return true
4549 }
4550 break
4551 }
4552 return false
4553 }
4554 func rewriteValueMIPS64_OpMIPS64NORconst(v *Value) bool {
4555 v_0 := v.Args[0]
4556
4557
4558 for {
4559 c := auxIntToInt64(v.AuxInt)
4560 if v_0.Op != OpMIPS64MOVVconst {
4561 break
4562 }
4563 d := auxIntToInt64(v_0.AuxInt)
4564 v.reset(OpMIPS64MOVVconst)
4565 v.AuxInt = int64ToAuxInt(^(c | d))
4566 return true
4567 }
4568 return false
4569 }
4570 func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
4571 v_1 := v.Args[1]
4572 v_0 := v.Args[0]
4573
4574
4575
4576 for {
4577 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4578 x := v_0
4579 if v_1.Op != OpMIPS64MOVVconst {
4580 continue
4581 }
4582 c := auxIntToInt64(v_1.AuxInt)
4583 if !(is32Bit(c)) {
4584 continue
4585 }
4586 v.reset(OpMIPS64ORconst)
4587 v.AuxInt = int64ToAuxInt(c)
4588 v.AddArg(x)
4589 return true
4590 }
4591 break
4592 }
4593
4594
4595 for {
4596 x := v_0
4597 if x != v_1 {
4598 break
4599 }
4600 v.copyOf(x)
4601 return true
4602 }
4603 return false
4604 }
4605 func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
4606 v_0 := v.Args[0]
4607
4608
4609 for {
4610 if auxIntToInt64(v.AuxInt) != 0 {
4611 break
4612 }
4613 x := v_0
4614 v.copyOf(x)
4615 return true
4616 }
4617
4618
4619 for {
4620 if auxIntToInt64(v.AuxInt) != -1 {
4621 break
4622 }
4623 v.reset(OpMIPS64MOVVconst)
4624 v.AuxInt = int64ToAuxInt(-1)
4625 return true
4626 }
4627
4628
4629 for {
4630 c := auxIntToInt64(v.AuxInt)
4631 if v_0.Op != OpMIPS64MOVVconst {
4632 break
4633 }
4634 d := auxIntToInt64(v_0.AuxInt)
4635 v.reset(OpMIPS64MOVVconst)
4636 v.AuxInt = int64ToAuxInt(c | d)
4637 return true
4638 }
4639
4640
4641
4642 for {
4643 c := auxIntToInt64(v.AuxInt)
4644 if v_0.Op != OpMIPS64ORconst {
4645 break
4646 }
4647 d := auxIntToInt64(v_0.AuxInt)
4648 x := v_0.Args[0]
4649 if !(is32Bit(c | d)) {
4650 break
4651 }
4652 v.reset(OpMIPS64ORconst)
4653 v.AuxInt = int64ToAuxInt(c | d)
4654 v.AddArg(x)
4655 return true
4656 }
4657 return false
4658 }
4659 func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
4660 v_1 := v.Args[1]
4661 v_0 := v.Args[0]
4662
4663
4664
4665 for {
4666 if v_0.Op != OpMIPS64MOVVconst {
4667 break
4668 }
4669 c := auxIntToInt64(v_0.AuxInt)
4670 x := v_1
4671 if !(is32Bit(c)) {
4672 break
4673 }
4674 v.reset(OpMIPS64SGTconst)
4675 v.AuxInt = int64ToAuxInt(c)
4676 v.AddArg(x)
4677 return true
4678 }
4679
4680
4681 for {
4682 x := v_0
4683 if x != v_1 {
4684 break
4685 }
4686 v.reset(OpMIPS64MOVVconst)
4687 v.AuxInt = int64ToAuxInt(0)
4688 return true
4689 }
4690 return false
4691 }
4692 func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
4693 v_1 := v.Args[1]
4694 v_0 := v.Args[0]
4695
4696
4697
4698 for {
4699 if v_0.Op != OpMIPS64MOVVconst {
4700 break
4701 }
4702 c := auxIntToInt64(v_0.AuxInt)
4703 x := v_1
4704 if !(is32Bit(c)) {
4705 break
4706 }
4707 v.reset(OpMIPS64SGTUconst)
4708 v.AuxInt = int64ToAuxInt(c)
4709 v.AddArg(x)
4710 return true
4711 }
4712
4713
4714 for {
4715 x := v_0
4716 if x != v_1 {
4717 break
4718 }
4719 v.reset(OpMIPS64MOVVconst)
4720 v.AuxInt = int64ToAuxInt(0)
4721 return true
4722 }
4723 return false
4724 }
4725 func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
4726 v_0 := v.Args[0]
4727
4728
4729
4730 for {
4731 c := auxIntToInt64(v.AuxInt)
4732 if v_0.Op != OpMIPS64MOVVconst {
4733 break
4734 }
4735 d := auxIntToInt64(v_0.AuxInt)
4736 if !(uint64(c) > uint64(d)) {
4737 break
4738 }
4739 v.reset(OpMIPS64MOVVconst)
4740 v.AuxInt = int64ToAuxInt(1)
4741 return true
4742 }
4743
4744
4745
4746 for {
4747 c := auxIntToInt64(v.AuxInt)
4748 if v_0.Op != OpMIPS64MOVVconst {
4749 break
4750 }
4751 d := auxIntToInt64(v_0.AuxInt)
4752 if !(uint64(c) <= uint64(d)) {
4753 break
4754 }
4755 v.reset(OpMIPS64MOVVconst)
4756 v.AuxInt = int64ToAuxInt(0)
4757 return true
4758 }
4759
4760
4761
4762 for {
4763 c := auxIntToInt64(v.AuxInt)
4764 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < uint64(c)) {
4765 break
4766 }
4767 v.reset(OpMIPS64MOVVconst)
4768 v.AuxInt = int64ToAuxInt(1)
4769 return true
4770 }
4771
4772
4773
4774 for {
4775 c := auxIntToInt64(v.AuxInt)
4776 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < uint64(c)) {
4777 break
4778 }
4779 v.reset(OpMIPS64MOVVconst)
4780 v.AuxInt = int64ToAuxInt(1)
4781 return true
4782 }
4783
4784
4785
4786 for {
4787 c := auxIntToInt64(v.AuxInt)
4788 if v_0.Op != OpMIPS64ANDconst {
4789 break
4790 }
4791 m := auxIntToInt64(v_0.AuxInt)
4792 if !(uint64(m) < uint64(c)) {
4793 break
4794 }
4795 v.reset(OpMIPS64MOVVconst)
4796 v.AuxInt = int64ToAuxInt(1)
4797 return true
4798 }
4799
4800
4801
4802 for {
4803 c := auxIntToInt64(v.AuxInt)
4804 if v_0.Op != OpMIPS64SRLVconst {
4805 break
4806 }
4807 d := auxIntToInt64(v_0.AuxInt)
4808 if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
4809 break
4810 }
4811 v.reset(OpMIPS64MOVVconst)
4812 v.AuxInt = int64ToAuxInt(1)
4813 return true
4814 }
4815 return false
4816 }
4817 func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
4818 v_0 := v.Args[0]
4819
4820
4821
4822 for {
4823 c := auxIntToInt64(v.AuxInt)
4824 if v_0.Op != OpMIPS64MOVVconst {
4825 break
4826 }
4827 d := auxIntToInt64(v_0.AuxInt)
4828 if !(c > d) {
4829 break
4830 }
4831 v.reset(OpMIPS64MOVVconst)
4832 v.AuxInt = int64ToAuxInt(1)
4833 return true
4834 }
4835
4836
4837
4838 for {
4839 c := auxIntToInt64(v.AuxInt)
4840 if v_0.Op != OpMIPS64MOVVconst {
4841 break
4842 }
4843 d := auxIntToInt64(v_0.AuxInt)
4844 if !(c <= d) {
4845 break
4846 }
4847 v.reset(OpMIPS64MOVVconst)
4848 v.AuxInt = int64ToAuxInt(0)
4849 return true
4850 }
4851
4852
4853
4854 for {
4855 c := auxIntToInt64(v.AuxInt)
4856 if v_0.Op != OpMIPS64MOVBreg || !(0x7f < c) {
4857 break
4858 }
4859 v.reset(OpMIPS64MOVVconst)
4860 v.AuxInt = int64ToAuxInt(1)
4861 return true
4862 }
4863
4864
4865
4866 for {
4867 c := auxIntToInt64(v.AuxInt)
4868 if v_0.Op != OpMIPS64MOVBreg || !(c <= -0x80) {
4869 break
4870 }
4871 v.reset(OpMIPS64MOVVconst)
4872 v.AuxInt = int64ToAuxInt(0)
4873 return true
4874 }
4875
4876
4877
4878 for {
4879 c := auxIntToInt64(v.AuxInt)
4880 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < c) {
4881 break
4882 }
4883 v.reset(OpMIPS64MOVVconst)
4884 v.AuxInt = int64ToAuxInt(1)
4885 return true
4886 }
4887
4888
4889
4890 for {
4891 c := auxIntToInt64(v.AuxInt)
4892 if v_0.Op != OpMIPS64MOVBUreg || !(c < 0) {
4893 break
4894 }
4895 v.reset(OpMIPS64MOVVconst)
4896 v.AuxInt = int64ToAuxInt(0)
4897 return true
4898 }
4899
4900
4901
4902 for {
4903 c := auxIntToInt64(v.AuxInt)
4904 if v_0.Op != OpMIPS64MOVHreg || !(0x7fff < c) {
4905 break
4906 }
4907 v.reset(OpMIPS64MOVVconst)
4908 v.AuxInt = int64ToAuxInt(1)
4909 return true
4910 }
4911
4912
4913
4914 for {
4915 c := auxIntToInt64(v.AuxInt)
4916 if v_0.Op != OpMIPS64MOVHreg || !(c <= -0x8000) {
4917 break
4918 }
4919 v.reset(OpMIPS64MOVVconst)
4920 v.AuxInt = int64ToAuxInt(0)
4921 return true
4922 }
4923
4924
4925
4926 for {
4927 c := auxIntToInt64(v.AuxInt)
4928 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < c) {
4929 break
4930 }
4931 v.reset(OpMIPS64MOVVconst)
4932 v.AuxInt = int64ToAuxInt(1)
4933 return true
4934 }
4935
4936
4937
4938 for {
4939 c := auxIntToInt64(v.AuxInt)
4940 if v_0.Op != OpMIPS64MOVHUreg || !(c < 0) {
4941 break
4942 }
4943 v.reset(OpMIPS64MOVVconst)
4944 v.AuxInt = int64ToAuxInt(0)
4945 return true
4946 }
4947
4948
4949
4950 for {
4951 c := auxIntToInt64(v.AuxInt)
4952 if v_0.Op != OpMIPS64MOVWUreg || !(c < 0) {
4953 break
4954 }
4955 v.reset(OpMIPS64MOVVconst)
4956 v.AuxInt = int64ToAuxInt(0)
4957 return true
4958 }
4959
4960
4961
4962 for {
4963 c := auxIntToInt64(v.AuxInt)
4964 if v_0.Op != OpMIPS64ANDconst {
4965 break
4966 }
4967 m := auxIntToInt64(v_0.AuxInt)
4968 if !(0 <= m && m < c) {
4969 break
4970 }
4971 v.reset(OpMIPS64MOVVconst)
4972 v.AuxInt = int64ToAuxInt(1)
4973 return true
4974 }
4975
4976
4977
4978 for {
4979 c := auxIntToInt64(v.AuxInt)
4980 if v_0.Op != OpMIPS64SRLVconst {
4981 break
4982 }
4983 d := auxIntToInt64(v_0.AuxInt)
4984 if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
4985 break
4986 }
4987 v.reset(OpMIPS64MOVVconst)
4988 v.AuxInt = int64ToAuxInt(1)
4989 return true
4990 }
4991 return false
4992 }
4993 func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
4994 v_1 := v.Args[1]
4995 v_0 := v.Args[0]
4996
4997
4998
4999 for {
5000 if v_1.Op != OpMIPS64MOVVconst {
5001 break
5002 }
5003 c := auxIntToInt64(v_1.AuxInt)
5004 if !(uint64(c) >= 64) {
5005 break
5006 }
5007 v.reset(OpMIPS64MOVVconst)
5008 v.AuxInt = int64ToAuxInt(0)
5009 return true
5010 }
5011
5012
5013 for {
5014 x := v_0
5015 if v_1.Op != OpMIPS64MOVVconst {
5016 break
5017 }
5018 c := auxIntToInt64(v_1.AuxInt)
5019 v.reset(OpMIPS64SLLVconst)
5020 v.AuxInt = int64ToAuxInt(c)
5021 v.AddArg(x)
5022 return true
5023 }
5024 return false
5025 }
5026 func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
5027 v_0 := v.Args[0]
5028
5029
5030 for {
5031 c := auxIntToInt64(v.AuxInt)
5032 if v_0.Op != OpMIPS64MOVVconst {
5033 break
5034 }
5035 d := auxIntToInt64(v_0.AuxInt)
5036 v.reset(OpMIPS64MOVVconst)
5037 v.AuxInt = int64ToAuxInt(d << uint64(c))
5038 return true
5039 }
5040 return false
5041 }
5042 func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
5043 v_1 := v.Args[1]
5044 v_0 := v.Args[0]
5045
5046
5047
5048 for {
5049 x := v_0
5050 if v_1.Op != OpMIPS64MOVVconst {
5051 break
5052 }
5053 c := auxIntToInt64(v_1.AuxInt)
5054 if !(uint64(c) >= 64) {
5055 break
5056 }
5057 v.reset(OpMIPS64SRAVconst)
5058 v.AuxInt = int64ToAuxInt(63)
5059 v.AddArg(x)
5060 return true
5061 }
5062
5063
5064 for {
5065 x := v_0
5066 if v_1.Op != OpMIPS64MOVVconst {
5067 break
5068 }
5069 c := auxIntToInt64(v_1.AuxInt)
5070 v.reset(OpMIPS64SRAVconst)
5071 v.AuxInt = int64ToAuxInt(c)
5072 v.AddArg(x)
5073 return true
5074 }
5075 return false
5076 }
5077 func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
5078 v_0 := v.Args[0]
5079
5080
5081 for {
5082 c := auxIntToInt64(v.AuxInt)
5083 if v_0.Op != OpMIPS64MOVVconst {
5084 break
5085 }
5086 d := auxIntToInt64(v_0.AuxInt)
5087 v.reset(OpMIPS64MOVVconst)
5088 v.AuxInt = int64ToAuxInt(d >> uint64(c))
5089 return true
5090 }
5091 return false
5092 }
5093 func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
5094 v_1 := v.Args[1]
5095 v_0 := v.Args[0]
5096
5097
5098
5099 for {
5100 if v_1.Op != OpMIPS64MOVVconst {
5101 break
5102 }
5103 c := auxIntToInt64(v_1.AuxInt)
5104 if !(uint64(c) >= 64) {
5105 break
5106 }
5107 v.reset(OpMIPS64MOVVconst)
5108 v.AuxInt = int64ToAuxInt(0)
5109 return true
5110 }
5111
5112
5113 for {
5114 x := v_0
5115 if v_1.Op != OpMIPS64MOVVconst {
5116 break
5117 }
5118 c := auxIntToInt64(v_1.AuxInt)
5119 v.reset(OpMIPS64SRLVconst)
5120 v.AuxInt = int64ToAuxInt(c)
5121 v.AddArg(x)
5122 return true
5123 }
5124 return false
5125 }
5126 func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
5127 v_0 := v.Args[0]
5128
5129
5130 for {
5131 c := auxIntToInt64(v.AuxInt)
5132 if v_0.Op != OpMIPS64MOVVconst {
5133 break
5134 }
5135 d := auxIntToInt64(v_0.AuxInt)
5136 v.reset(OpMIPS64MOVVconst)
5137 v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
5138 return true
5139 }
5140 return false
5141 }
5142 func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
5143 v_1 := v.Args[1]
5144 v_0 := v.Args[0]
5145
5146
5147
5148 for {
5149 x := v_0
5150 if v_1.Op != OpMIPS64MOVVconst {
5151 break
5152 }
5153 c := auxIntToInt64(v_1.AuxInt)
5154 if !(is32Bit(c)) {
5155 break
5156 }
5157 v.reset(OpMIPS64SUBVconst)
5158 v.AuxInt = int64ToAuxInt(c)
5159 v.AddArg(x)
5160 return true
5161 }
5162
5163
5164 for {
5165 x := v_0
5166 if v_1.Op != OpMIPS64NEGV {
5167 break
5168 }
5169 y := v_1.Args[0]
5170 v.reset(OpMIPS64ADDV)
5171 v.AddArg2(x, y)
5172 return true
5173 }
5174
5175
5176 for {
5177 x := v_0
5178 if x != v_1 {
5179 break
5180 }
5181 v.reset(OpMIPS64MOVVconst)
5182 v.AuxInt = int64ToAuxInt(0)
5183 return true
5184 }
5185
5186
5187 for {
5188 if v_0.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
5189 break
5190 }
5191 x := v_1
5192 v.reset(OpMIPS64NEGV)
5193 v.AddArg(x)
5194 return true
5195 }
5196 return false
5197 }
5198 func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
5199 v_0 := v.Args[0]
5200
5201
5202 for {
5203 if auxIntToInt64(v.AuxInt) != 0 {
5204 break
5205 }
5206 x := v_0
5207 v.copyOf(x)
5208 return true
5209 }
5210
5211
5212 for {
5213 c := auxIntToInt64(v.AuxInt)
5214 if v_0.Op != OpMIPS64MOVVconst {
5215 break
5216 }
5217 d := auxIntToInt64(v_0.AuxInt)
5218 v.reset(OpMIPS64MOVVconst)
5219 v.AuxInt = int64ToAuxInt(d - c)
5220 return true
5221 }
5222
5223
5224
5225 for {
5226 c := auxIntToInt64(v.AuxInt)
5227 if v_0.Op != OpMIPS64SUBVconst {
5228 break
5229 }
5230 d := auxIntToInt64(v_0.AuxInt)
5231 x := v_0.Args[0]
5232 if !(is32Bit(-c - d)) {
5233 break
5234 }
5235 v.reset(OpMIPS64ADDVconst)
5236 v.AuxInt = int64ToAuxInt(-c - d)
5237 v.AddArg(x)
5238 return true
5239 }
5240
5241
5242
5243 for {
5244 c := auxIntToInt64(v.AuxInt)
5245 if v_0.Op != OpMIPS64ADDVconst {
5246 break
5247 }
5248 d := auxIntToInt64(v_0.AuxInt)
5249 x := v_0.Args[0]
5250 if !(is32Bit(-c + d)) {
5251 break
5252 }
5253 v.reset(OpMIPS64ADDVconst)
5254 v.AuxInt = int64ToAuxInt(-c + d)
5255 v.AddArg(x)
5256 return true
5257 }
5258 return false
5259 }
5260 func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
5261 v_1 := v.Args[1]
5262 v_0 := v.Args[0]
5263
5264
5265
5266 for {
5267 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5268 x := v_0
5269 if v_1.Op != OpMIPS64MOVVconst {
5270 continue
5271 }
5272 c := auxIntToInt64(v_1.AuxInt)
5273 if !(is32Bit(c)) {
5274 continue
5275 }
5276 v.reset(OpMIPS64XORconst)
5277 v.AuxInt = int64ToAuxInt(c)
5278 v.AddArg(x)
5279 return true
5280 }
5281 break
5282 }
5283
5284
5285 for {
5286 x := v_0
5287 if x != v_1 {
5288 break
5289 }
5290 v.reset(OpMIPS64MOVVconst)
5291 v.AuxInt = int64ToAuxInt(0)
5292 return true
5293 }
5294 return false
5295 }
5296 func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
5297 v_0 := v.Args[0]
5298
5299
5300 for {
5301 if auxIntToInt64(v.AuxInt) != 0 {
5302 break
5303 }
5304 x := v_0
5305 v.copyOf(x)
5306 return true
5307 }
5308
5309
5310 for {
5311 if auxIntToInt64(v.AuxInt) != -1 {
5312 break
5313 }
5314 x := v_0
5315 v.reset(OpMIPS64NORconst)
5316 v.AuxInt = int64ToAuxInt(0)
5317 v.AddArg(x)
5318 return true
5319 }
5320
5321
5322 for {
5323 c := auxIntToInt64(v.AuxInt)
5324 if v_0.Op != OpMIPS64MOVVconst {
5325 break
5326 }
5327 d := auxIntToInt64(v_0.AuxInt)
5328 v.reset(OpMIPS64MOVVconst)
5329 v.AuxInt = int64ToAuxInt(c ^ d)
5330 return true
5331 }
5332
5333
5334
5335 for {
5336 c := auxIntToInt64(v.AuxInt)
5337 if v_0.Op != OpMIPS64XORconst {
5338 break
5339 }
5340 d := auxIntToInt64(v_0.AuxInt)
5341 x := v_0.Args[0]
5342 if !(is32Bit(c ^ d)) {
5343 break
5344 }
5345 v.reset(OpMIPS64XORconst)
5346 v.AuxInt = int64ToAuxInt(c ^ d)
5347 v.AddArg(x)
5348 return true
5349 }
5350 return false
5351 }
5352 func rewriteValueMIPS64_OpMod16(v *Value) bool {
5353 v_1 := v.Args[1]
5354 v_0 := v.Args[0]
5355 b := v.Block
5356 typ := &b.Func.Config.Types
5357
5358
5359 for {
5360 x := v_0
5361 y := v_1
5362 v.reset(OpSelect0)
5363 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5364 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5365 v1.AddArg(x)
5366 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5367 v2.AddArg(y)
5368 v0.AddArg2(v1, v2)
5369 v.AddArg(v0)
5370 return true
5371 }
5372 }
5373 func rewriteValueMIPS64_OpMod16u(v *Value) bool {
5374 v_1 := v.Args[1]
5375 v_0 := v.Args[0]
5376 b := v.Block
5377 typ := &b.Func.Config.Types
5378
5379
5380 for {
5381 x := v_0
5382 y := v_1
5383 v.reset(OpSelect0)
5384 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5385 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5386 v1.AddArg(x)
5387 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5388 v2.AddArg(y)
5389 v0.AddArg2(v1, v2)
5390 v.AddArg(v0)
5391 return true
5392 }
5393 }
5394 func rewriteValueMIPS64_OpMod32(v *Value) bool {
5395 v_1 := v.Args[1]
5396 v_0 := v.Args[0]
5397 b := v.Block
5398 typ := &b.Func.Config.Types
5399
5400
5401 for {
5402 x := v_0
5403 y := v_1
5404 v.reset(OpSelect0)
5405 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5406 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5407 v1.AddArg(x)
5408 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5409 v2.AddArg(y)
5410 v0.AddArg2(v1, v2)
5411 v.AddArg(v0)
5412 return true
5413 }
5414 }
5415 func rewriteValueMIPS64_OpMod32u(v *Value) bool {
5416 v_1 := v.Args[1]
5417 v_0 := v.Args[0]
5418 b := v.Block
5419 typ := &b.Func.Config.Types
5420
5421
5422 for {
5423 x := v_0
5424 y := v_1
5425 v.reset(OpSelect0)
5426 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5427 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5428 v1.AddArg(x)
5429 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5430 v2.AddArg(y)
5431 v0.AddArg2(v1, v2)
5432 v.AddArg(v0)
5433 return true
5434 }
5435 }
5436 func rewriteValueMIPS64_OpMod64(v *Value) bool {
5437 v_1 := v.Args[1]
5438 v_0 := v.Args[0]
5439 b := v.Block
5440 typ := &b.Func.Config.Types
5441
5442
5443 for {
5444 x := v_0
5445 y := v_1
5446 v.reset(OpSelect0)
5447 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5448 v0.AddArg2(x, y)
5449 v.AddArg(v0)
5450 return true
5451 }
5452 }
5453 func rewriteValueMIPS64_OpMod64u(v *Value) bool {
5454 v_1 := v.Args[1]
5455 v_0 := v.Args[0]
5456 b := v.Block
5457 typ := &b.Func.Config.Types
5458
5459
5460 for {
5461 x := v_0
5462 y := v_1
5463 v.reset(OpSelect0)
5464 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5465 v0.AddArg2(x, y)
5466 v.AddArg(v0)
5467 return true
5468 }
5469 }
5470 func rewriteValueMIPS64_OpMod8(v *Value) bool {
5471 v_1 := v.Args[1]
5472 v_0 := v.Args[0]
5473 b := v.Block
5474 typ := &b.Func.Config.Types
5475
5476
5477 for {
5478 x := v_0
5479 y := v_1
5480 v.reset(OpSelect0)
5481 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5482 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5483 v1.AddArg(x)
5484 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5485 v2.AddArg(y)
5486 v0.AddArg2(v1, v2)
5487 v.AddArg(v0)
5488 return true
5489 }
5490 }
5491 func rewriteValueMIPS64_OpMod8u(v *Value) bool {
5492 v_1 := v.Args[1]
5493 v_0 := v.Args[0]
5494 b := v.Block
5495 typ := &b.Func.Config.Types
5496
5497
5498 for {
5499 x := v_0
5500 y := v_1
5501 v.reset(OpSelect0)
5502 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5503 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5504 v1.AddArg(x)
5505 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5506 v2.AddArg(y)
5507 v0.AddArg2(v1, v2)
5508 v.AddArg(v0)
5509 return true
5510 }
5511 }
5512 func rewriteValueMIPS64_OpMove(v *Value) bool {
5513 v_2 := v.Args[2]
5514 v_1 := v.Args[1]
5515 v_0 := v.Args[0]
5516 b := v.Block
5517 config := b.Func.Config
5518 typ := &b.Func.Config.Types
5519
5520
5521 for {
5522 if auxIntToInt64(v.AuxInt) != 0 {
5523 break
5524 }
5525 mem := v_2
5526 v.copyOf(mem)
5527 return true
5528 }
5529
5530
5531 for {
5532 if auxIntToInt64(v.AuxInt) != 1 {
5533 break
5534 }
5535 dst := v_0
5536 src := v_1
5537 mem := v_2
5538 v.reset(OpMIPS64MOVBstore)
5539 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5540 v0.AddArg2(src, mem)
5541 v.AddArg3(dst, v0, mem)
5542 return true
5543 }
5544
5545
5546
5547 for {
5548 if auxIntToInt64(v.AuxInt) != 2 {
5549 break
5550 }
5551 t := auxToType(v.Aux)
5552 dst := v_0
5553 src := v_1
5554 mem := v_2
5555 if !(t.Alignment()%2 == 0) {
5556 break
5557 }
5558 v.reset(OpMIPS64MOVHstore)
5559 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5560 v0.AddArg2(src, mem)
5561 v.AddArg3(dst, v0, mem)
5562 return true
5563 }
5564
5565
5566 for {
5567 if auxIntToInt64(v.AuxInt) != 2 {
5568 break
5569 }
5570 dst := v_0
5571 src := v_1
5572 mem := v_2
5573 v.reset(OpMIPS64MOVBstore)
5574 v.AuxInt = int32ToAuxInt(1)
5575 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5576 v0.AuxInt = int32ToAuxInt(1)
5577 v0.AddArg2(src, mem)
5578 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5579 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5580 v2.AddArg2(src, mem)
5581 v1.AddArg3(dst, v2, mem)
5582 v.AddArg3(dst, v0, v1)
5583 return true
5584 }
5585
5586
5587
5588 for {
5589 if auxIntToInt64(v.AuxInt) != 4 {
5590 break
5591 }
5592 t := auxToType(v.Aux)
5593 dst := v_0
5594 src := v_1
5595 mem := v_2
5596 if !(t.Alignment()%4 == 0) {
5597 break
5598 }
5599 v.reset(OpMIPS64MOVWstore)
5600 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5601 v0.AddArg2(src, mem)
5602 v.AddArg3(dst, v0, mem)
5603 return true
5604 }
5605
5606
5607
5608 for {
5609 if auxIntToInt64(v.AuxInt) != 4 {
5610 break
5611 }
5612 t := auxToType(v.Aux)
5613 dst := v_0
5614 src := v_1
5615 mem := v_2
5616 if !(t.Alignment()%2 == 0) {
5617 break
5618 }
5619 v.reset(OpMIPS64MOVHstore)
5620 v.AuxInt = int32ToAuxInt(2)
5621 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5622 v0.AuxInt = int32ToAuxInt(2)
5623 v0.AddArg2(src, mem)
5624 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5625 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5626 v2.AddArg2(src, mem)
5627 v1.AddArg3(dst, v2, mem)
5628 v.AddArg3(dst, v0, v1)
5629 return true
5630 }
5631
5632
5633 for {
5634 if auxIntToInt64(v.AuxInt) != 4 {
5635 break
5636 }
5637 dst := v_0
5638 src := v_1
5639 mem := v_2
5640 v.reset(OpMIPS64MOVBstore)
5641 v.AuxInt = int32ToAuxInt(3)
5642 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5643 v0.AuxInt = int32ToAuxInt(3)
5644 v0.AddArg2(src, mem)
5645 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5646 v1.AuxInt = int32ToAuxInt(2)
5647 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5648 v2.AuxInt = int32ToAuxInt(2)
5649 v2.AddArg2(src, mem)
5650 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5651 v3.AuxInt = int32ToAuxInt(1)
5652 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5653 v4.AuxInt = int32ToAuxInt(1)
5654 v4.AddArg2(src, mem)
5655 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5656 v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5657 v6.AddArg2(src, mem)
5658 v5.AddArg3(dst, v6, mem)
5659 v3.AddArg3(dst, v4, v5)
5660 v1.AddArg3(dst, v2, v3)
5661 v.AddArg3(dst, v0, v1)
5662 return true
5663 }
5664
5665
5666
5667 for {
5668 if auxIntToInt64(v.AuxInt) != 8 {
5669 break
5670 }
5671 t := auxToType(v.Aux)
5672 dst := v_0
5673 src := v_1
5674 mem := v_2
5675 if !(t.Alignment()%8 == 0) {
5676 break
5677 }
5678 v.reset(OpMIPS64MOVVstore)
5679 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5680 v0.AddArg2(src, mem)
5681 v.AddArg3(dst, v0, mem)
5682 return true
5683 }
5684
5685
5686
5687 for {
5688 if auxIntToInt64(v.AuxInt) != 8 {
5689 break
5690 }
5691 t := auxToType(v.Aux)
5692 dst := v_0
5693 src := v_1
5694 mem := v_2
5695 if !(t.Alignment()%4 == 0) {
5696 break
5697 }
5698 v.reset(OpMIPS64MOVWstore)
5699 v.AuxInt = int32ToAuxInt(4)
5700 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5701 v0.AuxInt = int32ToAuxInt(4)
5702 v0.AddArg2(src, mem)
5703 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5704 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5705 v2.AddArg2(src, mem)
5706 v1.AddArg3(dst, v2, mem)
5707 v.AddArg3(dst, v0, v1)
5708 return true
5709 }
5710
5711
5712
5713 for {
5714 if auxIntToInt64(v.AuxInt) != 8 {
5715 break
5716 }
5717 t := auxToType(v.Aux)
5718 dst := v_0
5719 src := v_1
5720 mem := v_2
5721 if !(t.Alignment()%2 == 0) {
5722 break
5723 }
5724 v.reset(OpMIPS64MOVHstore)
5725 v.AuxInt = int32ToAuxInt(6)
5726 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5727 v0.AuxInt = int32ToAuxInt(6)
5728 v0.AddArg2(src, mem)
5729 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5730 v1.AuxInt = int32ToAuxInt(4)
5731 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5732 v2.AuxInt = int32ToAuxInt(4)
5733 v2.AddArg2(src, mem)
5734 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5735 v3.AuxInt = int32ToAuxInt(2)
5736 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5737 v4.AuxInt = int32ToAuxInt(2)
5738 v4.AddArg2(src, mem)
5739 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5740 v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5741 v6.AddArg2(src, mem)
5742 v5.AddArg3(dst, v6, mem)
5743 v3.AddArg3(dst, v4, v5)
5744 v1.AddArg3(dst, v2, v3)
5745 v.AddArg3(dst, v0, v1)
5746 return true
5747 }
5748
5749
5750 for {
5751 if auxIntToInt64(v.AuxInt) != 3 {
5752 break
5753 }
5754 dst := v_0
5755 src := v_1
5756 mem := v_2
5757 v.reset(OpMIPS64MOVBstore)
5758 v.AuxInt = int32ToAuxInt(2)
5759 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5760 v0.AuxInt = int32ToAuxInt(2)
5761 v0.AddArg2(src, mem)
5762 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5763 v1.AuxInt = int32ToAuxInt(1)
5764 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5765 v2.AuxInt = int32ToAuxInt(1)
5766 v2.AddArg2(src, mem)
5767 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5768 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5769 v4.AddArg2(src, mem)
5770 v3.AddArg3(dst, v4, mem)
5771 v1.AddArg3(dst, v2, v3)
5772 v.AddArg3(dst, v0, v1)
5773 return true
5774 }
5775
5776
5777
5778 for {
5779 if auxIntToInt64(v.AuxInt) != 6 {
5780 break
5781 }
5782 t := auxToType(v.Aux)
5783 dst := v_0
5784 src := v_1
5785 mem := v_2
5786 if !(t.Alignment()%2 == 0) {
5787 break
5788 }
5789 v.reset(OpMIPS64MOVHstore)
5790 v.AuxInt = int32ToAuxInt(4)
5791 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5792 v0.AuxInt = int32ToAuxInt(4)
5793 v0.AddArg2(src, mem)
5794 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5795 v1.AuxInt = int32ToAuxInt(2)
5796 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5797 v2.AuxInt = int32ToAuxInt(2)
5798 v2.AddArg2(src, mem)
5799 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5800 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5801 v4.AddArg2(src, mem)
5802 v3.AddArg3(dst, v4, mem)
5803 v1.AddArg3(dst, v2, v3)
5804 v.AddArg3(dst, v0, v1)
5805 return true
5806 }
5807
5808
5809
5810 for {
5811 if auxIntToInt64(v.AuxInt) != 12 {
5812 break
5813 }
5814 t := auxToType(v.Aux)
5815 dst := v_0
5816 src := v_1
5817 mem := v_2
5818 if !(t.Alignment()%4 == 0) {
5819 break
5820 }
5821 v.reset(OpMIPS64MOVWstore)
5822 v.AuxInt = int32ToAuxInt(8)
5823 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5824 v0.AuxInt = int32ToAuxInt(8)
5825 v0.AddArg2(src, mem)
5826 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5827 v1.AuxInt = int32ToAuxInt(4)
5828 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5829 v2.AuxInt = int32ToAuxInt(4)
5830 v2.AddArg2(src, mem)
5831 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5832 v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5833 v4.AddArg2(src, mem)
5834 v3.AddArg3(dst, v4, mem)
5835 v1.AddArg3(dst, v2, v3)
5836 v.AddArg3(dst, v0, v1)
5837 return true
5838 }
5839
5840
5841
5842 for {
5843 if auxIntToInt64(v.AuxInt) != 16 {
5844 break
5845 }
5846 t := auxToType(v.Aux)
5847 dst := v_0
5848 src := v_1
5849 mem := v_2
5850 if !(t.Alignment()%8 == 0) {
5851 break
5852 }
5853 v.reset(OpMIPS64MOVVstore)
5854 v.AuxInt = int32ToAuxInt(8)
5855 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5856 v0.AuxInt = int32ToAuxInt(8)
5857 v0.AddArg2(src, mem)
5858 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
5859 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5860 v2.AddArg2(src, mem)
5861 v1.AddArg3(dst, v2, mem)
5862 v.AddArg3(dst, v0, v1)
5863 return true
5864 }
5865
5866
5867
5868 for {
5869 if auxIntToInt64(v.AuxInt) != 24 {
5870 break
5871 }
5872 t := auxToType(v.Aux)
5873 dst := v_0
5874 src := v_1
5875 mem := v_2
5876 if !(t.Alignment()%8 == 0) {
5877 break
5878 }
5879 v.reset(OpMIPS64MOVVstore)
5880 v.AuxInt = int32ToAuxInt(16)
5881 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5882 v0.AuxInt = int32ToAuxInt(16)
5883 v0.AddArg2(src, mem)
5884 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
5885 v1.AuxInt = int32ToAuxInt(8)
5886 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5887 v2.AuxInt = int32ToAuxInt(8)
5888 v2.AddArg2(src, mem)
5889 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
5890 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5891 v4.AddArg2(src, mem)
5892 v3.AddArg3(dst, v4, mem)
5893 v1.AddArg3(dst, v2, v3)
5894 v.AddArg3(dst, v0, v1)
5895 return true
5896 }
5897
5898
5899
5900 for {
5901 s := auxIntToInt64(v.AuxInt)
5902 t := auxToType(v.Aux)
5903 dst := v_0
5904 src := v_1
5905 mem := v_2
5906 if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)) {
5907 break
5908 }
5909 v.reset(OpMIPS64DUFFCOPY)
5910 v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
5911 v.AddArg3(dst, src, mem)
5912 return true
5913 }
5914
5915
5916
5917 for {
5918 s := auxIntToInt64(v.AuxInt)
5919 t := auxToType(v.Aux)
5920 dst := v_0
5921 src := v_1
5922 mem := v_2
5923 if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
5924 break
5925 }
5926 v.reset(OpMIPS64LoweredMove)
5927 v.AuxInt = int64ToAuxInt(t.Alignment())
5928 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
5929 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
5930 v0.AddArg(src)
5931 v.AddArg4(dst, src, v0, mem)
5932 return true
5933 }
5934 return false
5935 }
5936 func rewriteValueMIPS64_OpMul16(v *Value) bool {
5937 v_1 := v.Args[1]
5938 v_0 := v.Args[0]
5939 b := v.Block
5940 typ := &b.Func.Config.Types
5941
5942
5943 for {
5944 x := v_0
5945 y := v_1
5946 v.reset(OpSelect1)
5947 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
5948 v0.AddArg2(x, y)
5949 v.AddArg(v0)
5950 return true
5951 }
5952 }
5953 func rewriteValueMIPS64_OpMul32(v *Value) bool {
5954 v_1 := v.Args[1]
5955 v_0 := v.Args[0]
5956 b := v.Block
5957 typ := &b.Func.Config.Types
5958
5959
5960 for {
5961 x := v_0
5962 y := v_1
5963 v.reset(OpSelect1)
5964 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
5965 v0.AddArg2(x, y)
5966 v.AddArg(v0)
5967 return true
5968 }
5969 }
5970 func rewriteValueMIPS64_OpMul64(v *Value) bool {
5971 v_1 := v.Args[1]
5972 v_0 := v.Args[0]
5973 b := v.Block
5974 typ := &b.Func.Config.Types
5975
5976
5977 for {
5978 x := v_0
5979 y := v_1
5980 v.reset(OpSelect1)
5981 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
5982 v0.AddArg2(x, y)
5983 v.AddArg(v0)
5984 return true
5985 }
5986 }
5987 func rewriteValueMIPS64_OpMul8(v *Value) bool {
5988 v_1 := v.Args[1]
5989 v_0 := v.Args[0]
5990 b := v.Block
5991 typ := &b.Func.Config.Types
5992
5993
5994 for {
5995 x := v_0
5996 y := v_1
5997 v.reset(OpSelect1)
5998 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
5999 v0.AddArg2(x, y)
6000 v.AddArg(v0)
6001 return true
6002 }
6003 }
6004 func rewriteValueMIPS64_OpNeq16(v *Value) bool {
6005 v_1 := v.Args[1]
6006 v_0 := v.Args[0]
6007 b := v.Block
6008 typ := &b.Func.Config.Types
6009
6010
6011 for {
6012 x := v_0
6013 y := v_1
6014 v.reset(OpMIPS64SGTU)
6015 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6016 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6017 v1.AddArg(x)
6018 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6019 v2.AddArg(y)
6020 v0.AddArg2(v1, v2)
6021 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6022 v3.AuxInt = int64ToAuxInt(0)
6023 v.AddArg2(v0, v3)
6024 return true
6025 }
6026 }
6027 func rewriteValueMIPS64_OpNeq32(v *Value) bool {
6028 v_1 := v.Args[1]
6029 v_0 := v.Args[0]
6030 b := v.Block
6031 typ := &b.Func.Config.Types
6032
6033
6034 for {
6035 x := v_0
6036 y := v_1
6037 v.reset(OpMIPS64SGTU)
6038 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6039 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6040 v1.AddArg(x)
6041 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6042 v2.AddArg(y)
6043 v0.AddArg2(v1, v2)
6044 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6045 v3.AuxInt = int64ToAuxInt(0)
6046 v.AddArg2(v0, v3)
6047 return true
6048 }
6049 }
6050 func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
6051 v_1 := v.Args[1]
6052 v_0 := v.Args[0]
6053 b := v.Block
6054
6055
6056 for {
6057 x := v_0
6058 y := v_1
6059 v.reset(OpMIPS64FPFlagFalse)
6060 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
6061 v0.AddArg2(x, y)
6062 v.AddArg(v0)
6063 return true
6064 }
6065 }
6066 func rewriteValueMIPS64_OpNeq64(v *Value) bool {
6067 v_1 := v.Args[1]
6068 v_0 := v.Args[0]
6069 b := v.Block
6070 typ := &b.Func.Config.Types
6071
6072
6073 for {
6074 x := v_0
6075 y := v_1
6076 v.reset(OpMIPS64SGTU)
6077 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6078 v0.AddArg2(x, y)
6079 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6080 v1.AuxInt = int64ToAuxInt(0)
6081 v.AddArg2(v0, v1)
6082 return true
6083 }
6084 }
6085 func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
6086 v_1 := v.Args[1]
6087 v_0 := v.Args[0]
6088 b := v.Block
6089
6090
6091 for {
6092 x := v_0
6093 y := v_1
6094 v.reset(OpMIPS64FPFlagFalse)
6095 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
6096 v0.AddArg2(x, y)
6097 v.AddArg(v0)
6098 return true
6099 }
6100 }
6101 func rewriteValueMIPS64_OpNeq8(v *Value) bool {
6102 v_1 := v.Args[1]
6103 v_0 := v.Args[0]
6104 b := v.Block
6105 typ := &b.Func.Config.Types
6106
6107
6108 for {
6109 x := v_0
6110 y := v_1
6111 v.reset(OpMIPS64SGTU)
6112 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6113 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6114 v1.AddArg(x)
6115 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6116 v2.AddArg(y)
6117 v0.AddArg2(v1, v2)
6118 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6119 v3.AuxInt = int64ToAuxInt(0)
6120 v.AddArg2(v0, v3)
6121 return true
6122 }
6123 }
6124 func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
6125 v_1 := v.Args[1]
6126 v_0 := v.Args[0]
6127 b := v.Block
6128 typ := &b.Func.Config.Types
6129
6130
6131 for {
6132 x := v_0
6133 y := v_1
6134 v.reset(OpMIPS64SGTU)
6135 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6136 v0.AddArg2(x, y)
6137 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6138 v1.AuxInt = int64ToAuxInt(0)
6139 v.AddArg2(v0, v1)
6140 return true
6141 }
6142 }
6143 func rewriteValueMIPS64_OpNot(v *Value) bool {
6144 v_0 := v.Args[0]
6145
6146
6147 for {
6148 x := v_0
6149 v.reset(OpMIPS64XORconst)
6150 v.AuxInt = int64ToAuxInt(1)
6151 v.AddArg(x)
6152 return true
6153 }
6154 }
6155 func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
6156 v_0 := v.Args[0]
6157
6158
6159
6160 for {
6161 off := auxIntToInt64(v.AuxInt)
6162 ptr := v_0
6163 if ptr.Op != OpSP || !(is32Bit(off)) {
6164 break
6165 }
6166 v.reset(OpMIPS64MOVVaddr)
6167 v.AuxInt = int32ToAuxInt(int32(off))
6168 v.AddArg(ptr)
6169 return true
6170 }
6171
6172
6173 for {
6174 off := auxIntToInt64(v.AuxInt)
6175 ptr := v_0
6176 v.reset(OpMIPS64ADDVconst)
6177 v.AuxInt = int64ToAuxInt(off)
6178 v.AddArg(ptr)
6179 return true
6180 }
6181 }
6182 func rewriteValueMIPS64_OpRotateLeft16(v *Value) bool {
6183 v_1 := v.Args[1]
6184 v_0 := v.Args[0]
6185 b := v.Block
6186 typ := &b.Func.Config.Types
6187
6188
6189 for {
6190 t := v.Type
6191 x := v_0
6192 if v_1.Op != OpMIPS64MOVVconst {
6193 break
6194 }
6195 c := auxIntToInt64(v_1.AuxInt)
6196 v.reset(OpOr16)
6197 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
6198 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6199 v1.AuxInt = int64ToAuxInt(c & 15)
6200 v0.AddArg2(x, v1)
6201 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
6202 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6203 v3.AuxInt = int64ToAuxInt(-c & 15)
6204 v2.AddArg2(x, v3)
6205 v.AddArg2(v0, v2)
6206 return true
6207 }
6208 return false
6209 }
6210 func rewriteValueMIPS64_OpRotateLeft32(v *Value) bool {
6211 v_1 := v.Args[1]
6212 v_0 := v.Args[0]
6213 b := v.Block
6214 typ := &b.Func.Config.Types
6215
6216
6217 for {
6218 t := v.Type
6219 x := v_0
6220 if v_1.Op != OpMIPS64MOVVconst {
6221 break
6222 }
6223 c := auxIntToInt64(v_1.AuxInt)
6224 v.reset(OpOr32)
6225 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
6226 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6227 v1.AuxInt = int64ToAuxInt(c & 31)
6228 v0.AddArg2(x, v1)
6229 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
6230 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6231 v3.AuxInt = int64ToAuxInt(-c & 31)
6232 v2.AddArg2(x, v3)
6233 v.AddArg2(v0, v2)
6234 return true
6235 }
6236 return false
6237 }
6238 func rewriteValueMIPS64_OpRotateLeft64(v *Value) bool {
6239 v_1 := v.Args[1]
6240 v_0 := v.Args[0]
6241 b := v.Block
6242 typ := &b.Func.Config.Types
6243
6244
6245 for {
6246 t := v.Type
6247 x := v_0
6248 if v_1.Op != OpMIPS64MOVVconst {
6249 break
6250 }
6251 c := auxIntToInt64(v_1.AuxInt)
6252 v.reset(OpOr64)
6253 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
6254 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6255 v1.AuxInt = int64ToAuxInt(c & 63)
6256 v0.AddArg2(x, v1)
6257 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
6258 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6259 v3.AuxInt = int64ToAuxInt(-c & 63)
6260 v2.AddArg2(x, v3)
6261 v.AddArg2(v0, v2)
6262 return true
6263 }
6264 return false
6265 }
6266 func rewriteValueMIPS64_OpRotateLeft8(v *Value) bool {
6267 v_1 := v.Args[1]
6268 v_0 := v.Args[0]
6269 b := v.Block
6270 typ := &b.Func.Config.Types
6271
6272
6273 for {
6274 t := v.Type
6275 x := v_0
6276 if v_1.Op != OpMIPS64MOVVconst {
6277 break
6278 }
6279 c := auxIntToInt64(v_1.AuxInt)
6280 v.reset(OpOr8)
6281 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
6282 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6283 v1.AuxInt = int64ToAuxInt(c & 7)
6284 v0.AddArg2(x, v1)
6285 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
6286 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6287 v3.AuxInt = int64ToAuxInt(-c & 7)
6288 v2.AddArg2(x, v3)
6289 v.AddArg2(v0, v2)
6290 return true
6291 }
6292 return false
6293 }
6294 func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
6295 v_1 := v.Args[1]
6296 v_0 := v.Args[0]
6297 b := v.Block
6298 typ := &b.Func.Config.Types
6299
6300
6301 for {
6302 t := v.Type
6303 x := v_0
6304 y := v_1
6305 v.reset(OpMIPS64AND)
6306 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6307 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6308 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6309 v2.AuxInt = int64ToAuxInt(64)
6310 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6311 v3.AddArg(y)
6312 v1.AddArg2(v2, v3)
6313 v0.AddArg(v1)
6314 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6315 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6316 v5.AddArg(x)
6317 v4.AddArg2(v5, v3)
6318 v.AddArg2(v0, v4)
6319 return true
6320 }
6321 }
6322 func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
6323 v_1 := v.Args[1]
6324 v_0 := v.Args[0]
6325 b := v.Block
6326 typ := &b.Func.Config.Types
6327
6328
6329 for {
6330 t := v.Type
6331 x := v_0
6332 y := v_1
6333 v.reset(OpMIPS64AND)
6334 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6335 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6336 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6337 v2.AuxInt = int64ToAuxInt(64)
6338 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6339 v3.AddArg(y)
6340 v1.AddArg2(v2, v3)
6341 v0.AddArg(v1)
6342 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6343 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6344 v5.AddArg(x)
6345 v4.AddArg2(v5, v3)
6346 v.AddArg2(v0, v4)
6347 return true
6348 }
6349 }
6350 func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
6351 v_1 := v.Args[1]
6352 v_0 := v.Args[0]
6353 b := v.Block
6354 typ := &b.Func.Config.Types
6355
6356
6357 for {
6358 t := v.Type
6359 x := v_0
6360 y := v_1
6361 v.reset(OpMIPS64AND)
6362 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6363 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6364 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6365 v2.AuxInt = int64ToAuxInt(64)
6366 v1.AddArg2(v2, y)
6367 v0.AddArg(v1)
6368 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6369 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6370 v4.AddArg(x)
6371 v3.AddArg2(v4, y)
6372 v.AddArg2(v0, v3)
6373 return true
6374 }
6375 }
6376 func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
6377 v_1 := v.Args[1]
6378 v_0 := v.Args[0]
6379 b := v.Block
6380 typ := &b.Func.Config.Types
6381
6382
6383 for {
6384 t := v.Type
6385 x := v_0
6386 y := v_1
6387 v.reset(OpMIPS64AND)
6388 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6389 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6390 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6391 v2.AuxInt = int64ToAuxInt(64)
6392 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6393 v3.AddArg(y)
6394 v1.AddArg2(v2, v3)
6395 v0.AddArg(v1)
6396 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6397 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6398 v5.AddArg(x)
6399 v4.AddArg2(v5, v3)
6400 v.AddArg2(v0, v4)
6401 return true
6402 }
6403 }
6404 func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
6405 v_1 := v.Args[1]
6406 v_0 := v.Args[0]
6407 b := v.Block
6408 typ := &b.Func.Config.Types
6409
6410
6411 for {
6412 t := v.Type
6413 x := v_0
6414 y := v_1
6415 v.reset(OpMIPS64SRAV)
6416 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6417 v0.AddArg(x)
6418 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6419 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6420 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6421 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6422 v4.AddArg(y)
6423 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6424 v5.AuxInt = int64ToAuxInt(63)
6425 v3.AddArg2(v4, v5)
6426 v2.AddArg(v3)
6427 v1.AddArg2(v2, v4)
6428 v.AddArg2(v0, v1)
6429 return true
6430 }
6431 }
6432 func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
6433 v_1 := v.Args[1]
6434 v_0 := v.Args[0]
6435 b := v.Block
6436 typ := &b.Func.Config.Types
6437
6438
6439 for {
6440 t := v.Type
6441 x := v_0
6442 y := v_1
6443 v.reset(OpMIPS64SRAV)
6444 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6445 v0.AddArg(x)
6446 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6447 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6448 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6449 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6450 v4.AddArg(y)
6451 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6452 v5.AuxInt = int64ToAuxInt(63)
6453 v3.AddArg2(v4, v5)
6454 v2.AddArg(v3)
6455 v1.AddArg2(v2, v4)
6456 v.AddArg2(v0, v1)
6457 return true
6458 }
6459 }
6460 func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
6461 v_1 := v.Args[1]
6462 v_0 := v.Args[0]
6463 b := v.Block
6464 typ := &b.Func.Config.Types
6465
6466
6467 for {
6468 t := v.Type
6469 x := v_0
6470 y := v_1
6471 v.reset(OpMIPS64SRAV)
6472 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6473 v0.AddArg(x)
6474 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6475 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6476 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6477 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6478 v4.AuxInt = int64ToAuxInt(63)
6479 v3.AddArg2(y, v4)
6480 v2.AddArg(v3)
6481 v1.AddArg2(v2, y)
6482 v.AddArg2(v0, v1)
6483 return true
6484 }
6485 }
6486 func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
6487 v_1 := v.Args[1]
6488 v_0 := v.Args[0]
6489 b := v.Block
6490 typ := &b.Func.Config.Types
6491
6492
6493 for {
6494 t := v.Type
6495 x := v_0
6496 y := v_1
6497 v.reset(OpMIPS64SRAV)
6498 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6499 v0.AddArg(x)
6500 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6501 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6502 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6503 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6504 v4.AddArg(y)
6505 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6506 v5.AuxInt = int64ToAuxInt(63)
6507 v3.AddArg2(v4, v5)
6508 v2.AddArg(v3)
6509 v1.AddArg2(v2, v4)
6510 v.AddArg2(v0, v1)
6511 return true
6512 }
6513 }
6514 func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
6515 v_1 := v.Args[1]
6516 v_0 := v.Args[0]
6517 b := v.Block
6518 typ := &b.Func.Config.Types
6519
6520
6521 for {
6522 t := v.Type
6523 x := v_0
6524 y := v_1
6525 v.reset(OpMIPS64AND)
6526 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6527 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6528 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6529 v2.AuxInt = int64ToAuxInt(64)
6530 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6531 v3.AddArg(y)
6532 v1.AddArg2(v2, v3)
6533 v0.AddArg(v1)
6534 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6535 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6536 v5.AddArg(x)
6537 v4.AddArg2(v5, v3)
6538 v.AddArg2(v0, v4)
6539 return true
6540 }
6541 }
6542 func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
6543 v_1 := v.Args[1]
6544 v_0 := v.Args[0]
6545 b := v.Block
6546 typ := &b.Func.Config.Types
6547
6548
6549 for {
6550 t := v.Type
6551 x := v_0
6552 y := v_1
6553 v.reset(OpMIPS64AND)
6554 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6555 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6556 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6557 v2.AuxInt = int64ToAuxInt(64)
6558 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6559 v3.AddArg(y)
6560 v1.AddArg2(v2, v3)
6561 v0.AddArg(v1)
6562 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6563 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6564 v5.AddArg(x)
6565 v4.AddArg2(v5, v3)
6566 v.AddArg2(v0, v4)
6567 return true
6568 }
6569 }
6570 func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
6571 v_1 := v.Args[1]
6572 v_0 := v.Args[0]
6573 b := v.Block
6574 typ := &b.Func.Config.Types
6575
6576
6577 for {
6578 t := v.Type
6579 x := v_0
6580 y := v_1
6581 v.reset(OpMIPS64AND)
6582 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6583 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6584 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6585 v2.AuxInt = int64ToAuxInt(64)
6586 v1.AddArg2(v2, y)
6587 v0.AddArg(v1)
6588 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6589 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6590 v4.AddArg(x)
6591 v3.AddArg2(v4, y)
6592 v.AddArg2(v0, v3)
6593 return true
6594 }
6595 }
6596 func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
6597 v_1 := v.Args[1]
6598 v_0 := v.Args[0]
6599 b := v.Block
6600 typ := &b.Func.Config.Types
6601
6602
6603 for {
6604 t := v.Type
6605 x := v_0
6606 y := v_1
6607 v.reset(OpMIPS64AND)
6608 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6609 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6610 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6611 v2.AuxInt = int64ToAuxInt(64)
6612 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6613 v3.AddArg(y)
6614 v1.AddArg2(v2, v3)
6615 v0.AddArg(v1)
6616 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6617 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6618 v5.AddArg(x)
6619 v4.AddArg2(v5, v3)
6620 v.AddArg2(v0, v4)
6621 return true
6622 }
6623 }
6624 func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
6625 v_1 := v.Args[1]
6626 v_0 := v.Args[0]
6627 b := v.Block
6628 typ := &b.Func.Config.Types
6629
6630
6631 for {
6632 t := v.Type
6633 x := v_0
6634 y := v_1
6635 v.reset(OpMIPS64SRAV)
6636 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6637 v0.AddArg(x)
6638 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6639 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6640 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6641 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6642 v4.AddArg(y)
6643 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6644 v5.AuxInt = int64ToAuxInt(63)
6645 v3.AddArg2(v4, v5)
6646 v2.AddArg(v3)
6647 v1.AddArg2(v2, v4)
6648 v.AddArg2(v0, v1)
6649 return true
6650 }
6651 }
6652 func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
6653 v_1 := v.Args[1]
6654 v_0 := v.Args[0]
6655 b := v.Block
6656 typ := &b.Func.Config.Types
6657
6658
6659 for {
6660 t := v.Type
6661 x := v_0
6662 y := v_1
6663 v.reset(OpMIPS64SRAV)
6664 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6665 v0.AddArg(x)
6666 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6667 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6668 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6669 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6670 v4.AddArg(y)
6671 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6672 v5.AuxInt = int64ToAuxInt(63)
6673 v3.AddArg2(v4, v5)
6674 v2.AddArg(v3)
6675 v1.AddArg2(v2, v4)
6676 v.AddArg2(v0, v1)
6677 return true
6678 }
6679 }
6680 func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
6681 v_1 := v.Args[1]
6682 v_0 := v.Args[0]
6683 b := v.Block
6684 typ := &b.Func.Config.Types
6685
6686
6687 for {
6688 t := v.Type
6689 x := v_0
6690 y := v_1
6691 v.reset(OpMIPS64SRAV)
6692 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6693 v0.AddArg(x)
6694 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6695 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6696 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6697 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6698 v4.AuxInt = int64ToAuxInt(63)
6699 v3.AddArg2(y, v4)
6700 v2.AddArg(v3)
6701 v1.AddArg2(v2, y)
6702 v.AddArg2(v0, v1)
6703 return true
6704 }
6705 }
6706 func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
6707 v_1 := v.Args[1]
6708 v_0 := v.Args[0]
6709 b := v.Block
6710 typ := &b.Func.Config.Types
6711
6712
6713 for {
6714 t := v.Type
6715 x := v_0
6716 y := v_1
6717 v.reset(OpMIPS64SRAV)
6718 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6719 v0.AddArg(x)
6720 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6721 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6722 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6723 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6724 v4.AddArg(y)
6725 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6726 v5.AuxInt = int64ToAuxInt(63)
6727 v3.AddArg2(v4, v5)
6728 v2.AddArg(v3)
6729 v1.AddArg2(v2, v4)
6730 v.AddArg2(v0, v1)
6731 return true
6732 }
6733 }
6734 func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
6735 v_1 := v.Args[1]
6736 v_0 := v.Args[0]
6737 b := v.Block
6738 typ := &b.Func.Config.Types
6739
6740
6741 for {
6742 t := v.Type
6743 x := v_0
6744 y := v_1
6745 v.reset(OpMIPS64AND)
6746 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6747 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6748 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6749 v2.AuxInt = int64ToAuxInt(64)
6750 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6751 v3.AddArg(y)
6752 v1.AddArg2(v2, v3)
6753 v0.AddArg(v1)
6754 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6755 v4.AddArg2(x, v3)
6756 v.AddArg2(v0, v4)
6757 return true
6758 }
6759 }
6760 func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
6761 v_1 := v.Args[1]
6762 v_0 := v.Args[0]
6763 b := v.Block
6764 typ := &b.Func.Config.Types
6765
6766
6767 for {
6768 t := v.Type
6769 x := v_0
6770 y := v_1
6771 v.reset(OpMIPS64AND)
6772 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6773 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6774 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6775 v2.AuxInt = int64ToAuxInt(64)
6776 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6777 v3.AddArg(y)
6778 v1.AddArg2(v2, v3)
6779 v0.AddArg(v1)
6780 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6781 v4.AddArg2(x, v3)
6782 v.AddArg2(v0, v4)
6783 return true
6784 }
6785 }
6786 func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
6787 v_1 := v.Args[1]
6788 v_0 := v.Args[0]
6789 b := v.Block
6790 typ := &b.Func.Config.Types
6791
6792
6793 for {
6794 t := v.Type
6795 x := v_0
6796 y := v_1
6797 v.reset(OpMIPS64AND)
6798 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6799 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6800 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6801 v2.AuxInt = int64ToAuxInt(64)
6802 v1.AddArg2(v2, y)
6803 v0.AddArg(v1)
6804 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6805 v3.AddArg2(x, y)
6806 v.AddArg2(v0, v3)
6807 return true
6808 }
6809 }
6810 func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
6811 v_1 := v.Args[1]
6812 v_0 := v.Args[0]
6813 b := v.Block
6814 typ := &b.Func.Config.Types
6815
6816
6817 for {
6818 t := v.Type
6819 x := v_0
6820 y := v_1
6821 v.reset(OpMIPS64AND)
6822 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6823 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6824 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6825 v2.AuxInt = int64ToAuxInt(64)
6826 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6827 v3.AddArg(y)
6828 v1.AddArg2(v2, v3)
6829 v0.AddArg(v1)
6830 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6831 v4.AddArg2(x, v3)
6832 v.AddArg2(v0, v4)
6833 return true
6834 }
6835 }
6836 func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
6837 v_1 := v.Args[1]
6838 v_0 := v.Args[0]
6839 b := v.Block
6840 typ := &b.Func.Config.Types
6841
6842
6843 for {
6844 t := v.Type
6845 x := v_0
6846 y := v_1
6847 v.reset(OpMIPS64SRAV)
6848 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6849 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6850 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6851 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6852 v3.AddArg(y)
6853 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6854 v4.AuxInt = int64ToAuxInt(63)
6855 v2.AddArg2(v3, v4)
6856 v1.AddArg(v2)
6857 v0.AddArg2(v1, v3)
6858 v.AddArg2(x, v0)
6859 return true
6860 }
6861 }
6862 func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
6863 v_1 := v.Args[1]
6864 v_0 := v.Args[0]
6865 b := v.Block
6866 typ := &b.Func.Config.Types
6867
6868
6869 for {
6870 t := v.Type
6871 x := v_0
6872 y := v_1
6873 v.reset(OpMIPS64SRAV)
6874 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6875 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6876 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6877 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6878 v3.AddArg(y)
6879 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6880 v4.AuxInt = int64ToAuxInt(63)
6881 v2.AddArg2(v3, v4)
6882 v1.AddArg(v2)
6883 v0.AddArg2(v1, v3)
6884 v.AddArg2(x, v0)
6885 return true
6886 }
6887 }
6888 func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
6889 v_1 := v.Args[1]
6890 v_0 := v.Args[0]
6891 b := v.Block
6892 typ := &b.Func.Config.Types
6893
6894
6895 for {
6896 t := v.Type
6897 x := v_0
6898 y := v_1
6899 v.reset(OpMIPS64SRAV)
6900 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6901 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6902 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6903 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6904 v3.AuxInt = int64ToAuxInt(63)
6905 v2.AddArg2(y, v3)
6906 v1.AddArg(v2)
6907 v0.AddArg2(v1, y)
6908 v.AddArg2(x, v0)
6909 return true
6910 }
6911 }
6912 func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
6913 v_1 := v.Args[1]
6914 v_0 := v.Args[0]
6915 b := v.Block
6916 typ := &b.Func.Config.Types
6917
6918
6919 for {
6920 t := v.Type
6921 x := v_0
6922 y := v_1
6923 v.reset(OpMIPS64SRAV)
6924 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6925 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6926 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6927 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6928 v3.AddArg(y)
6929 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6930 v4.AuxInt = int64ToAuxInt(63)
6931 v2.AddArg2(v3, v4)
6932 v1.AddArg(v2)
6933 v0.AddArg2(v1, v3)
6934 v.AddArg2(x, v0)
6935 return true
6936 }
6937 }
6938 func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
6939 v_1 := v.Args[1]
6940 v_0 := v.Args[0]
6941 b := v.Block
6942 typ := &b.Func.Config.Types
6943
6944
6945 for {
6946 t := v.Type
6947 x := v_0
6948 y := v_1
6949 v.reset(OpMIPS64AND)
6950 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6951 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6952 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6953 v2.AuxInt = int64ToAuxInt(64)
6954 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6955 v3.AddArg(y)
6956 v1.AddArg2(v2, v3)
6957 v0.AddArg(v1)
6958 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6959 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6960 v5.AddArg(x)
6961 v4.AddArg2(v5, v3)
6962 v.AddArg2(v0, v4)
6963 return true
6964 }
6965 }
6966 func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
6967 v_1 := v.Args[1]
6968 v_0 := v.Args[0]
6969 b := v.Block
6970 typ := &b.Func.Config.Types
6971
6972
6973 for {
6974 t := v.Type
6975 x := v_0
6976 y := v_1
6977 v.reset(OpMIPS64AND)
6978 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6979 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6980 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6981 v2.AuxInt = int64ToAuxInt(64)
6982 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6983 v3.AddArg(y)
6984 v1.AddArg2(v2, v3)
6985 v0.AddArg(v1)
6986 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6987 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6988 v5.AddArg(x)
6989 v4.AddArg2(v5, v3)
6990 v.AddArg2(v0, v4)
6991 return true
6992 }
6993 }
6994 func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
6995 v_1 := v.Args[1]
6996 v_0 := v.Args[0]
6997 b := v.Block
6998 typ := &b.Func.Config.Types
6999
7000
7001 for {
7002 t := v.Type
7003 x := v_0
7004 y := v_1
7005 v.reset(OpMIPS64AND)
7006 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7007 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7008 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7009 v2.AuxInt = int64ToAuxInt(64)
7010 v1.AddArg2(v2, y)
7011 v0.AddArg(v1)
7012 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7013 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7014 v4.AddArg(x)
7015 v3.AddArg2(v4, y)
7016 v.AddArg2(v0, v3)
7017 return true
7018 }
7019 }
7020 func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
7021 v_1 := v.Args[1]
7022 v_0 := v.Args[0]
7023 b := v.Block
7024 typ := &b.Func.Config.Types
7025
7026
7027 for {
7028 t := v.Type
7029 x := v_0
7030 y := v_1
7031 v.reset(OpMIPS64AND)
7032 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7033 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7034 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7035 v2.AuxInt = int64ToAuxInt(64)
7036 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7037 v3.AddArg(y)
7038 v1.AddArg2(v2, v3)
7039 v0.AddArg(v1)
7040 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7041 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7042 v5.AddArg(x)
7043 v4.AddArg2(v5, v3)
7044 v.AddArg2(v0, v4)
7045 return true
7046 }
7047 }
7048 func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
7049 v_1 := v.Args[1]
7050 v_0 := v.Args[0]
7051 b := v.Block
7052 typ := &b.Func.Config.Types
7053
7054
7055 for {
7056 t := v.Type
7057 x := v_0
7058 y := v_1
7059 v.reset(OpMIPS64SRAV)
7060 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7061 v0.AddArg(x)
7062 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7063 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7064 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7065 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7066 v4.AddArg(y)
7067 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7068 v5.AuxInt = int64ToAuxInt(63)
7069 v3.AddArg2(v4, v5)
7070 v2.AddArg(v3)
7071 v1.AddArg2(v2, v4)
7072 v.AddArg2(v0, v1)
7073 return true
7074 }
7075 }
7076 func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
7077 v_1 := v.Args[1]
7078 v_0 := v.Args[0]
7079 b := v.Block
7080 typ := &b.Func.Config.Types
7081
7082
7083 for {
7084 t := v.Type
7085 x := v_0
7086 y := v_1
7087 v.reset(OpMIPS64SRAV)
7088 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7089 v0.AddArg(x)
7090 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7091 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7092 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7093 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7094 v4.AddArg(y)
7095 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7096 v5.AuxInt = int64ToAuxInt(63)
7097 v3.AddArg2(v4, v5)
7098 v2.AddArg(v3)
7099 v1.AddArg2(v2, v4)
7100 v.AddArg2(v0, v1)
7101 return true
7102 }
7103 }
7104 func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
7105 v_1 := v.Args[1]
7106 v_0 := v.Args[0]
7107 b := v.Block
7108 typ := &b.Func.Config.Types
7109
7110
7111 for {
7112 t := v.Type
7113 x := v_0
7114 y := v_1
7115 v.reset(OpMIPS64SRAV)
7116 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7117 v0.AddArg(x)
7118 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7119 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7120 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7121 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7122 v4.AuxInt = int64ToAuxInt(63)
7123 v3.AddArg2(y, v4)
7124 v2.AddArg(v3)
7125 v1.AddArg2(v2, y)
7126 v.AddArg2(v0, v1)
7127 return true
7128 }
7129 }
7130 func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
7131 v_1 := v.Args[1]
7132 v_0 := v.Args[0]
7133 b := v.Block
7134 typ := &b.Func.Config.Types
7135
7136
7137 for {
7138 t := v.Type
7139 x := v_0
7140 y := v_1
7141 v.reset(OpMIPS64SRAV)
7142 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7143 v0.AddArg(x)
7144 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7145 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7146 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7147 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7148 v4.AddArg(y)
7149 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7150 v5.AuxInt = int64ToAuxInt(63)
7151 v3.AddArg2(v4, v5)
7152 v2.AddArg(v3)
7153 v1.AddArg2(v2, v4)
7154 v.AddArg2(v0, v1)
7155 return true
7156 }
7157 }
7158 func rewriteValueMIPS64_OpSelect0(v *Value) bool {
7159 v_0 := v.Args[0]
7160 b := v.Block
7161 typ := &b.Func.Config.Types
7162
7163
7164 for {
7165 if v_0.Op != OpMul64uover {
7166 break
7167 }
7168 y := v_0.Args[1]
7169 x := v_0.Args[0]
7170 v.reset(OpSelect1)
7171 v.Type = typ.UInt64
7172 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
7173 v0.AddArg2(x, y)
7174 v.AddArg(v0)
7175 return true
7176 }
7177
7178
7179 for {
7180 t := v.Type
7181 if v_0.Op != OpAdd64carry {
7182 break
7183 }
7184 c := v_0.Args[2]
7185 x := v_0.Args[0]
7186 y := v_0.Args[1]
7187 v.reset(OpMIPS64ADDV)
7188 v0 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7189 v0.AddArg2(x, y)
7190 v.AddArg2(v0, c)
7191 return true
7192 }
7193
7194
7195 for {
7196 t := v.Type
7197 if v_0.Op != OpSub64borrow {
7198 break
7199 }
7200 c := v_0.Args[2]
7201 x := v_0.Args[0]
7202 y := v_0.Args[1]
7203 v.reset(OpMIPS64SUBV)
7204 v0 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7205 v0.AddArg2(x, y)
7206 v.AddArg2(v0, c)
7207 return true
7208 }
7209
7210
7211 for {
7212 if v_0.Op != OpMIPS64DIVVU {
7213 break
7214 }
7215 _ = v_0.Args[1]
7216 v_0_1 := v_0.Args[1]
7217 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7218 break
7219 }
7220 v.reset(OpMIPS64MOVVconst)
7221 v.AuxInt = int64ToAuxInt(0)
7222 return true
7223 }
7224
7225
7226
7227 for {
7228 if v_0.Op != OpMIPS64DIVVU {
7229 break
7230 }
7231 _ = v_0.Args[1]
7232 x := v_0.Args[0]
7233 v_0_1 := v_0.Args[1]
7234 if v_0_1.Op != OpMIPS64MOVVconst {
7235 break
7236 }
7237 c := auxIntToInt64(v_0_1.AuxInt)
7238 if !(isPowerOfTwo(c)) {
7239 break
7240 }
7241 v.reset(OpMIPS64ANDconst)
7242 v.AuxInt = int64ToAuxInt(c - 1)
7243 v.AddArg(x)
7244 return true
7245 }
7246
7247
7248
7249 for {
7250 if v_0.Op != OpMIPS64DIVV {
7251 break
7252 }
7253 _ = v_0.Args[1]
7254 v_0_0 := v_0.Args[0]
7255 if v_0_0.Op != OpMIPS64MOVVconst {
7256 break
7257 }
7258 c := auxIntToInt64(v_0_0.AuxInt)
7259 v_0_1 := v_0.Args[1]
7260 if v_0_1.Op != OpMIPS64MOVVconst {
7261 break
7262 }
7263 d := auxIntToInt64(v_0_1.AuxInt)
7264 if !(d != 0) {
7265 break
7266 }
7267 v.reset(OpMIPS64MOVVconst)
7268 v.AuxInt = int64ToAuxInt(c % d)
7269 return true
7270 }
7271
7272
7273
7274 for {
7275 if v_0.Op != OpMIPS64DIVVU {
7276 break
7277 }
7278 _ = v_0.Args[1]
7279 v_0_0 := v_0.Args[0]
7280 if v_0_0.Op != OpMIPS64MOVVconst {
7281 break
7282 }
7283 c := auxIntToInt64(v_0_0.AuxInt)
7284 v_0_1 := v_0.Args[1]
7285 if v_0_1.Op != OpMIPS64MOVVconst {
7286 break
7287 }
7288 d := auxIntToInt64(v_0_1.AuxInt)
7289 if !(d != 0) {
7290 break
7291 }
7292 v.reset(OpMIPS64MOVVconst)
7293 v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
7294 return true
7295 }
7296 return false
7297 }
7298 func rewriteValueMIPS64_OpSelect1(v *Value) bool {
7299 v_0 := v.Args[0]
7300 b := v.Block
7301 typ := &b.Func.Config.Types
7302
7303
7304 for {
7305 if v_0.Op != OpMul64uover {
7306 break
7307 }
7308 y := v_0.Args[1]
7309 x := v_0.Args[0]
7310 v.reset(OpMIPS64SGTU)
7311 v.Type = typ.Bool
7312 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
7313 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
7314 v1.AddArg2(x, y)
7315 v0.AddArg(v1)
7316 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7317 v2.AuxInt = int64ToAuxInt(0)
7318 v.AddArg2(v0, v2)
7319 return true
7320 }
7321
7322
7323 for {
7324 t := v.Type
7325 if v_0.Op != OpAdd64carry {
7326 break
7327 }
7328 c := v_0.Args[2]
7329 x := v_0.Args[0]
7330 y := v_0.Args[1]
7331 v.reset(OpMIPS64OR)
7332 v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7333 s := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7334 s.AddArg2(x, y)
7335 v0.AddArg2(x, s)
7336 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7337 v3 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7338 v3.AddArg2(s, c)
7339 v2.AddArg2(s, v3)
7340 v.AddArg2(v0, v2)
7341 return true
7342 }
7343
7344
7345 for {
7346 t := v.Type
7347 if v_0.Op != OpSub64borrow {
7348 break
7349 }
7350 c := v_0.Args[2]
7351 x := v_0.Args[0]
7352 y := v_0.Args[1]
7353 v.reset(OpMIPS64OR)
7354 v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7355 s := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7356 s.AddArg2(x, y)
7357 v0.AddArg2(s, x)
7358 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7359 v3 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7360 v3.AddArg2(s, c)
7361 v2.AddArg2(v3, s)
7362 v.AddArg2(v0, v2)
7363 return true
7364 }
7365
7366
7367 for {
7368 if v_0.Op != OpMIPS64MULVU {
7369 break
7370 }
7371 _ = v_0.Args[1]
7372 v_0_0 := v_0.Args[0]
7373 v_0_1 := v_0.Args[1]
7374 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7375 x := v_0_0
7376 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 {
7377 continue
7378 }
7379 v.reset(OpMIPS64NEGV)
7380 v.AddArg(x)
7381 return true
7382 }
7383 break
7384 }
7385
7386
7387 for {
7388 if v_0.Op != OpMIPS64MULVU {
7389 break
7390 }
7391 _ = v_0.Args[1]
7392 v_0_0 := v_0.Args[0]
7393 v_0_1 := v_0.Args[1]
7394 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7395 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
7396 continue
7397 }
7398 v.reset(OpMIPS64MOVVconst)
7399 v.AuxInt = int64ToAuxInt(0)
7400 return true
7401 }
7402 break
7403 }
7404
7405
7406 for {
7407 if v_0.Op != OpMIPS64MULVU {
7408 break
7409 }
7410 _ = v_0.Args[1]
7411 v_0_0 := v_0.Args[0]
7412 v_0_1 := v_0.Args[1]
7413 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7414 x := v_0_0
7415 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7416 continue
7417 }
7418 v.copyOf(x)
7419 return true
7420 }
7421 break
7422 }
7423
7424
7425
7426 for {
7427 if v_0.Op != OpMIPS64MULVU {
7428 break
7429 }
7430 _ = v_0.Args[1]
7431 v_0_0 := v_0.Args[0]
7432 v_0_1 := v_0.Args[1]
7433 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7434 x := v_0_0
7435 if v_0_1.Op != OpMIPS64MOVVconst {
7436 continue
7437 }
7438 c := auxIntToInt64(v_0_1.AuxInt)
7439 if !(isPowerOfTwo(c)) {
7440 continue
7441 }
7442 v.reset(OpMIPS64SLLVconst)
7443 v.AuxInt = int64ToAuxInt(log64(c))
7444 v.AddArg(x)
7445 return true
7446 }
7447 break
7448 }
7449
7450
7451 for {
7452 if v_0.Op != OpMIPS64DIVVU {
7453 break
7454 }
7455 _ = v_0.Args[1]
7456 x := v_0.Args[0]
7457 v_0_1 := v_0.Args[1]
7458 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7459 break
7460 }
7461 v.copyOf(x)
7462 return true
7463 }
7464
7465
7466
7467 for {
7468 if v_0.Op != OpMIPS64DIVVU {
7469 break
7470 }
7471 _ = v_0.Args[1]
7472 x := v_0.Args[0]
7473 v_0_1 := v_0.Args[1]
7474 if v_0_1.Op != OpMIPS64MOVVconst {
7475 break
7476 }
7477 c := auxIntToInt64(v_0_1.AuxInt)
7478 if !(isPowerOfTwo(c)) {
7479 break
7480 }
7481 v.reset(OpMIPS64SRLVconst)
7482 v.AuxInt = int64ToAuxInt(log64(c))
7483 v.AddArg(x)
7484 return true
7485 }
7486
7487
7488 for {
7489 if v_0.Op != OpMIPS64MULVU {
7490 break
7491 }
7492 _ = v_0.Args[1]
7493 v_0_0 := v_0.Args[0]
7494 v_0_1 := v_0.Args[1]
7495 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7496 if v_0_0.Op != OpMIPS64MOVVconst {
7497 continue
7498 }
7499 c := auxIntToInt64(v_0_0.AuxInt)
7500 if v_0_1.Op != OpMIPS64MOVVconst {
7501 continue
7502 }
7503 d := auxIntToInt64(v_0_1.AuxInt)
7504 v.reset(OpMIPS64MOVVconst)
7505 v.AuxInt = int64ToAuxInt(c * d)
7506 return true
7507 }
7508 break
7509 }
7510
7511
7512
7513 for {
7514 if v_0.Op != OpMIPS64DIVV {
7515 break
7516 }
7517 _ = v_0.Args[1]
7518 v_0_0 := v_0.Args[0]
7519 if v_0_0.Op != OpMIPS64MOVVconst {
7520 break
7521 }
7522 c := auxIntToInt64(v_0_0.AuxInt)
7523 v_0_1 := v_0.Args[1]
7524 if v_0_1.Op != OpMIPS64MOVVconst {
7525 break
7526 }
7527 d := auxIntToInt64(v_0_1.AuxInt)
7528 if !(d != 0) {
7529 break
7530 }
7531 v.reset(OpMIPS64MOVVconst)
7532 v.AuxInt = int64ToAuxInt(c / d)
7533 return true
7534 }
7535
7536
7537
7538 for {
7539 if v_0.Op != OpMIPS64DIVVU {
7540 break
7541 }
7542 _ = v_0.Args[1]
7543 v_0_0 := v_0.Args[0]
7544 if v_0_0.Op != OpMIPS64MOVVconst {
7545 break
7546 }
7547 c := auxIntToInt64(v_0_0.AuxInt)
7548 v_0_1 := v_0.Args[1]
7549 if v_0_1.Op != OpMIPS64MOVVconst {
7550 break
7551 }
7552 d := auxIntToInt64(v_0_1.AuxInt)
7553 if !(d != 0) {
7554 break
7555 }
7556 v.reset(OpMIPS64MOVVconst)
7557 v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
7558 return true
7559 }
7560 return false
7561 }
7562 func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
7563 v_0 := v.Args[0]
7564 b := v.Block
7565
7566
7567 for {
7568 t := v.Type
7569 x := v_0
7570 v.reset(OpMIPS64SRAVconst)
7571 v.AuxInt = int64ToAuxInt(63)
7572 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7573 v0.AddArg(x)
7574 v.AddArg(v0)
7575 return true
7576 }
7577 }
7578 func rewriteValueMIPS64_OpStore(v *Value) bool {
7579 v_2 := v.Args[2]
7580 v_1 := v.Args[1]
7581 v_0 := v.Args[0]
7582
7583
7584
7585 for {
7586 t := auxToType(v.Aux)
7587 ptr := v_0
7588 val := v_1
7589 mem := v_2
7590 if !(t.Size() == 1) {
7591 break
7592 }
7593 v.reset(OpMIPS64MOVBstore)
7594 v.AddArg3(ptr, val, mem)
7595 return true
7596 }
7597
7598
7599
7600 for {
7601 t := auxToType(v.Aux)
7602 ptr := v_0
7603 val := v_1
7604 mem := v_2
7605 if !(t.Size() == 2) {
7606 break
7607 }
7608 v.reset(OpMIPS64MOVHstore)
7609 v.AddArg3(ptr, val, mem)
7610 return true
7611 }
7612
7613
7614
7615 for {
7616 t := auxToType(v.Aux)
7617 ptr := v_0
7618 val := v_1
7619 mem := v_2
7620 if !(t.Size() == 4 && !t.IsFloat()) {
7621 break
7622 }
7623 v.reset(OpMIPS64MOVWstore)
7624 v.AddArg3(ptr, val, mem)
7625 return true
7626 }
7627
7628
7629
7630 for {
7631 t := auxToType(v.Aux)
7632 ptr := v_0
7633 val := v_1
7634 mem := v_2
7635 if !(t.Size() == 8 && !t.IsFloat()) {
7636 break
7637 }
7638 v.reset(OpMIPS64MOVVstore)
7639 v.AddArg3(ptr, val, mem)
7640 return true
7641 }
7642
7643
7644
7645 for {
7646 t := auxToType(v.Aux)
7647 ptr := v_0
7648 val := v_1
7649 mem := v_2
7650 if !(t.Size() == 4 && t.IsFloat()) {
7651 break
7652 }
7653 v.reset(OpMIPS64MOVFstore)
7654 v.AddArg3(ptr, val, mem)
7655 return true
7656 }
7657
7658
7659
7660 for {
7661 t := auxToType(v.Aux)
7662 ptr := v_0
7663 val := v_1
7664 mem := v_2
7665 if !(t.Size() == 8 && t.IsFloat()) {
7666 break
7667 }
7668 v.reset(OpMIPS64MOVDstore)
7669 v.AddArg3(ptr, val, mem)
7670 return true
7671 }
7672 return false
7673 }
7674 func rewriteValueMIPS64_OpZero(v *Value) bool {
7675 v_1 := v.Args[1]
7676 v_0 := v.Args[0]
7677 b := v.Block
7678 config := b.Func.Config
7679 typ := &b.Func.Config.Types
7680
7681
7682 for {
7683 if auxIntToInt64(v.AuxInt) != 0 {
7684 break
7685 }
7686 mem := v_1
7687 v.copyOf(mem)
7688 return true
7689 }
7690
7691
7692 for {
7693 if auxIntToInt64(v.AuxInt) != 1 {
7694 break
7695 }
7696 ptr := v_0
7697 mem := v_1
7698 v.reset(OpMIPS64MOVBstore)
7699 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7700 v0.AuxInt = int64ToAuxInt(0)
7701 v.AddArg3(ptr, v0, mem)
7702 return true
7703 }
7704
7705
7706
7707 for {
7708 if auxIntToInt64(v.AuxInt) != 2 {
7709 break
7710 }
7711 t := auxToType(v.Aux)
7712 ptr := v_0
7713 mem := v_1
7714 if !(t.Alignment()%2 == 0) {
7715 break
7716 }
7717 v.reset(OpMIPS64MOVHstore)
7718 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7719 v0.AuxInt = int64ToAuxInt(0)
7720 v.AddArg3(ptr, v0, mem)
7721 return true
7722 }
7723
7724
7725 for {
7726 if auxIntToInt64(v.AuxInt) != 2 {
7727 break
7728 }
7729 ptr := v_0
7730 mem := v_1
7731 v.reset(OpMIPS64MOVBstore)
7732 v.AuxInt = int32ToAuxInt(1)
7733 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7734 v0.AuxInt = int64ToAuxInt(0)
7735 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7736 v1.AuxInt = int32ToAuxInt(0)
7737 v1.AddArg3(ptr, v0, mem)
7738 v.AddArg3(ptr, v0, v1)
7739 return true
7740 }
7741
7742
7743
7744 for {
7745 if auxIntToInt64(v.AuxInt) != 4 {
7746 break
7747 }
7748 t := auxToType(v.Aux)
7749 ptr := v_0
7750 mem := v_1
7751 if !(t.Alignment()%4 == 0) {
7752 break
7753 }
7754 v.reset(OpMIPS64MOVWstore)
7755 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7756 v0.AuxInt = int64ToAuxInt(0)
7757 v.AddArg3(ptr, v0, mem)
7758 return true
7759 }
7760
7761
7762
7763 for {
7764 if auxIntToInt64(v.AuxInt) != 4 {
7765 break
7766 }
7767 t := auxToType(v.Aux)
7768 ptr := v_0
7769 mem := v_1
7770 if !(t.Alignment()%2 == 0) {
7771 break
7772 }
7773 v.reset(OpMIPS64MOVHstore)
7774 v.AuxInt = int32ToAuxInt(2)
7775 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7776 v0.AuxInt = int64ToAuxInt(0)
7777 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7778 v1.AuxInt = int32ToAuxInt(0)
7779 v1.AddArg3(ptr, v0, mem)
7780 v.AddArg3(ptr, v0, v1)
7781 return true
7782 }
7783
7784
7785 for {
7786 if auxIntToInt64(v.AuxInt) != 4 {
7787 break
7788 }
7789 ptr := v_0
7790 mem := v_1
7791 v.reset(OpMIPS64MOVBstore)
7792 v.AuxInt = int32ToAuxInt(3)
7793 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7794 v0.AuxInt = int64ToAuxInt(0)
7795 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7796 v1.AuxInt = int32ToAuxInt(2)
7797 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7798 v2.AuxInt = int32ToAuxInt(1)
7799 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7800 v3.AuxInt = int32ToAuxInt(0)
7801 v3.AddArg3(ptr, v0, mem)
7802 v2.AddArg3(ptr, v0, v3)
7803 v1.AddArg3(ptr, v0, v2)
7804 v.AddArg3(ptr, v0, v1)
7805 return true
7806 }
7807
7808
7809
7810 for {
7811 if auxIntToInt64(v.AuxInt) != 8 {
7812 break
7813 }
7814 t := auxToType(v.Aux)
7815 ptr := v_0
7816 mem := v_1
7817 if !(t.Alignment()%8 == 0) {
7818 break
7819 }
7820 v.reset(OpMIPS64MOVVstore)
7821 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7822 v0.AuxInt = int64ToAuxInt(0)
7823 v.AddArg3(ptr, v0, mem)
7824 return true
7825 }
7826
7827
7828
7829 for {
7830 if auxIntToInt64(v.AuxInt) != 8 {
7831 break
7832 }
7833 t := auxToType(v.Aux)
7834 ptr := v_0
7835 mem := v_1
7836 if !(t.Alignment()%4 == 0) {
7837 break
7838 }
7839 v.reset(OpMIPS64MOVWstore)
7840 v.AuxInt = int32ToAuxInt(4)
7841 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7842 v0.AuxInt = int64ToAuxInt(0)
7843 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
7844 v1.AuxInt = int32ToAuxInt(0)
7845 v1.AddArg3(ptr, v0, mem)
7846 v.AddArg3(ptr, v0, v1)
7847 return true
7848 }
7849
7850
7851
7852 for {
7853 if auxIntToInt64(v.AuxInt) != 8 {
7854 break
7855 }
7856 t := auxToType(v.Aux)
7857 ptr := v_0
7858 mem := v_1
7859 if !(t.Alignment()%2 == 0) {
7860 break
7861 }
7862 v.reset(OpMIPS64MOVHstore)
7863 v.AuxInt = int32ToAuxInt(6)
7864 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7865 v0.AuxInt = int64ToAuxInt(0)
7866 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7867 v1.AuxInt = int32ToAuxInt(4)
7868 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7869 v2.AuxInt = int32ToAuxInt(2)
7870 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7871 v3.AuxInt = int32ToAuxInt(0)
7872 v3.AddArg3(ptr, v0, mem)
7873 v2.AddArg3(ptr, v0, v3)
7874 v1.AddArg3(ptr, v0, v2)
7875 v.AddArg3(ptr, v0, v1)
7876 return true
7877 }
7878
7879
7880 for {
7881 if auxIntToInt64(v.AuxInt) != 3 {
7882 break
7883 }
7884 ptr := v_0
7885 mem := v_1
7886 v.reset(OpMIPS64MOVBstore)
7887 v.AuxInt = int32ToAuxInt(2)
7888 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7889 v0.AuxInt = int64ToAuxInt(0)
7890 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7891 v1.AuxInt = int32ToAuxInt(1)
7892 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7893 v2.AuxInt = int32ToAuxInt(0)
7894 v2.AddArg3(ptr, v0, mem)
7895 v1.AddArg3(ptr, v0, v2)
7896 v.AddArg3(ptr, v0, v1)
7897 return true
7898 }
7899
7900
7901
7902 for {
7903 if auxIntToInt64(v.AuxInt) != 6 {
7904 break
7905 }
7906 t := auxToType(v.Aux)
7907 ptr := v_0
7908 mem := v_1
7909 if !(t.Alignment()%2 == 0) {
7910 break
7911 }
7912 v.reset(OpMIPS64MOVHstore)
7913 v.AuxInt = int32ToAuxInt(4)
7914 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7915 v0.AuxInt = int64ToAuxInt(0)
7916 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7917 v1.AuxInt = int32ToAuxInt(2)
7918 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7919 v2.AuxInt = int32ToAuxInt(0)
7920 v2.AddArg3(ptr, v0, mem)
7921 v1.AddArg3(ptr, v0, v2)
7922 v.AddArg3(ptr, v0, v1)
7923 return true
7924 }
7925
7926
7927
7928 for {
7929 if auxIntToInt64(v.AuxInt) != 12 {
7930 break
7931 }
7932 t := auxToType(v.Aux)
7933 ptr := v_0
7934 mem := v_1
7935 if !(t.Alignment()%4 == 0) {
7936 break
7937 }
7938 v.reset(OpMIPS64MOVWstore)
7939 v.AuxInt = int32ToAuxInt(8)
7940 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7941 v0.AuxInt = int64ToAuxInt(0)
7942 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
7943 v1.AuxInt = int32ToAuxInt(4)
7944 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
7945 v2.AuxInt = int32ToAuxInt(0)
7946 v2.AddArg3(ptr, v0, mem)
7947 v1.AddArg3(ptr, v0, v2)
7948 v.AddArg3(ptr, v0, v1)
7949 return true
7950 }
7951
7952
7953
7954 for {
7955 if auxIntToInt64(v.AuxInt) != 16 {
7956 break
7957 }
7958 t := auxToType(v.Aux)
7959 ptr := v_0
7960 mem := v_1
7961 if !(t.Alignment()%8 == 0) {
7962 break
7963 }
7964 v.reset(OpMIPS64MOVVstore)
7965 v.AuxInt = int32ToAuxInt(8)
7966 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7967 v0.AuxInt = int64ToAuxInt(0)
7968 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
7969 v1.AuxInt = int32ToAuxInt(0)
7970 v1.AddArg3(ptr, v0, mem)
7971 v.AddArg3(ptr, v0, v1)
7972 return true
7973 }
7974
7975
7976
7977 for {
7978 if auxIntToInt64(v.AuxInt) != 24 {
7979 break
7980 }
7981 t := auxToType(v.Aux)
7982 ptr := v_0
7983 mem := v_1
7984 if !(t.Alignment()%8 == 0) {
7985 break
7986 }
7987 v.reset(OpMIPS64MOVVstore)
7988 v.AuxInt = int32ToAuxInt(16)
7989 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7990 v0.AuxInt = int64ToAuxInt(0)
7991 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
7992 v1.AuxInt = int32ToAuxInt(8)
7993 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
7994 v2.AuxInt = int32ToAuxInt(0)
7995 v2.AddArg3(ptr, v0, mem)
7996 v1.AddArg3(ptr, v0, v2)
7997 v.AddArg3(ptr, v0, v1)
7998 return true
7999 }
8000
8001
8002
8003 for {
8004 s := auxIntToInt64(v.AuxInt)
8005 t := auxToType(v.Aux)
8006 ptr := v_0
8007 mem := v_1
8008 if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0) {
8009 break
8010 }
8011 v.reset(OpMIPS64DUFFZERO)
8012 v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
8013 v.AddArg2(ptr, mem)
8014 return true
8015 }
8016
8017
8018
8019 for {
8020 s := auxIntToInt64(v.AuxInt)
8021 t := auxToType(v.Aux)
8022 ptr := v_0
8023 mem := v_1
8024 if !(s > 8*128 || t.Alignment()%8 != 0) {
8025 break
8026 }
8027 v.reset(OpMIPS64LoweredZero)
8028 v.AuxInt = int64ToAuxInt(t.Alignment())
8029 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
8030 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
8031 v0.AddArg(ptr)
8032 v.AddArg3(ptr, v0, mem)
8033 return true
8034 }
8035 return false
8036 }
8037 func rewriteBlockMIPS64(b *Block) bool {
8038 switch b.Kind {
8039 case BlockMIPS64EQ:
8040
8041
8042 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
8043 v_0 := b.Controls[0]
8044 cmp := v_0.Args[0]
8045 b.resetWithControl(BlockMIPS64FPF, cmp)
8046 return true
8047 }
8048
8049
8050 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
8051 v_0 := b.Controls[0]
8052 cmp := v_0.Args[0]
8053 b.resetWithControl(BlockMIPS64FPT, cmp)
8054 return true
8055 }
8056
8057
8058 for b.Controls[0].Op == OpMIPS64XORconst {
8059 v_0 := b.Controls[0]
8060 if auxIntToInt64(v_0.AuxInt) != 1 {
8061 break
8062 }
8063 cmp := v_0.Args[0]
8064 if cmp.Op != OpMIPS64SGT {
8065 break
8066 }
8067 b.resetWithControl(BlockMIPS64NE, cmp)
8068 return true
8069 }
8070
8071
8072 for b.Controls[0].Op == OpMIPS64XORconst {
8073 v_0 := b.Controls[0]
8074 if auxIntToInt64(v_0.AuxInt) != 1 {
8075 break
8076 }
8077 cmp := v_0.Args[0]
8078 if cmp.Op != OpMIPS64SGTU {
8079 break
8080 }
8081 b.resetWithControl(BlockMIPS64NE, cmp)
8082 return true
8083 }
8084
8085
8086 for b.Controls[0].Op == OpMIPS64XORconst {
8087 v_0 := b.Controls[0]
8088 if auxIntToInt64(v_0.AuxInt) != 1 {
8089 break
8090 }
8091 cmp := v_0.Args[0]
8092 if cmp.Op != OpMIPS64SGTconst {
8093 break
8094 }
8095 b.resetWithControl(BlockMIPS64NE, cmp)
8096 return true
8097 }
8098
8099
8100 for b.Controls[0].Op == OpMIPS64XORconst {
8101 v_0 := b.Controls[0]
8102 if auxIntToInt64(v_0.AuxInt) != 1 {
8103 break
8104 }
8105 cmp := v_0.Args[0]
8106 if cmp.Op != OpMIPS64SGTUconst {
8107 break
8108 }
8109 b.resetWithControl(BlockMIPS64NE, cmp)
8110 return true
8111 }
8112
8113
8114 for b.Controls[0].Op == OpMIPS64SGTUconst {
8115 v_0 := b.Controls[0]
8116 if auxIntToInt64(v_0.AuxInt) != 1 {
8117 break
8118 }
8119 x := v_0.Args[0]
8120 b.resetWithControl(BlockMIPS64NE, x)
8121 return true
8122 }
8123
8124
8125 for b.Controls[0].Op == OpMIPS64SGTU {
8126 v_0 := b.Controls[0]
8127 _ = v_0.Args[1]
8128 x := v_0.Args[0]
8129 v_0_1 := v_0.Args[1]
8130 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8131 break
8132 }
8133 b.resetWithControl(BlockMIPS64EQ, x)
8134 return true
8135 }
8136
8137
8138 for b.Controls[0].Op == OpMIPS64SGTconst {
8139 v_0 := b.Controls[0]
8140 if auxIntToInt64(v_0.AuxInt) != 0 {
8141 break
8142 }
8143 x := v_0.Args[0]
8144 b.resetWithControl(BlockMIPS64GEZ, x)
8145 return true
8146 }
8147
8148
8149 for b.Controls[0].Op == OpMIPS64SGT {
8150 v_0 := b.Controls[0]
8151 _ = v_0.Args[1]
8152 x := v_0.Args[0]
8153 v_0_1 := v_0.Args[1]
8154 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8155 break
8156 }
8157 b.resetWithControl(BlockMIPS64LEZ, x)
8158 return true
8159 }
8160
8161
8162 for b.Controls[0].Op == OpMIPS64MOVVconst {
8163 v_0 := b.Controls[0]
8164 if auxIntToInt64(v_0.AuxInt) != 0 {
8165 break
8166 }
8167 b.Reset(BlockFirst)
8168 return true
8169 }
8170
8171
8172
8173 for b.Controls[0].Op == OpMIPS64MOVVconst {
8174 v_0 := b.Controls[0]
8175 c := auxIntToInt64(v_0.AuxInt)
8176 if !(c != 0) {
8177 break
8178 }
8179 b.Reset(BlockFirst)
8180 b.swapSuccessors()
8181 return true
8182 }
8183 case BlockMIPS64GEZ:
8184
8185
8186
8187 for b.Controls[0].Op == OpMIPS64MOVVconst {
8188 v_0 := b.Controls[0]
8189 c := auxIntToInt64(v_0.AuxInt)
8190 if !(c >= 0) {
8191 break
8192 }
8193 b.Reset(BlockFirst)
8194 return true
8195 }
8196
8197
8198
8199 for b.Controls[0].Op == OpMIPS64MOVVconst {
8200 v_0 := b.Controls[0]
8201 c := auxIntToInt64(v_0.AuxInt)
8202 if !(c < 0) {
8203 break
8204 }
8205 b.Reset(BlockFirst)
8206 b.swapSuccessors()
8207 return true
8208 }
8209 case BlockMIPS64GTZ:
8210
8211
8212
8213 for b.Controls[0].Op == OpMIPS64MOVVconst {
8214 v_0 := b.Controls[0]
8215 c := auxIntToInt64(v_0.AuxInt)
8216 if !(c > 0) {
8217 break
8218 }
8219 b.Reset(BlockFirst)
8220 return true
8221 }
8222
8223
8224
8225 for b.Controls[0].Op == OpMIPS64MOVVconst {
8226 v_0 := b.Controls[0]
8227 c := auxIntToInt64(v_0.AuxInt)
8228 if !(c <= 0) {
8229 break
8230 }
8231 b.Reset(BlockFirst)
8232 b.swapSuccessors()
8233 return true
8234 }
8235 case BlockIf:
8236
8237
8238 for {
8239 cond := b.Controls[0]
8240 b.resetWithControl(BlockMIPS64NE, cond)
8241 return true
8242 }
8243 case BlockMIPS64LEZ:
8244
8245
8246
8247 for b.Controls[0].Op == OpMIPS64MOVVconst {
8248 v_0 := b.Controls[0]
8249 c := auxIntToInt64(v_0.AuxInt)
8250 if !(c <= 0) {
8251 break
8252 }
8253 b.Reset(BlockFirst)
8254 return true
8255 }
8256
8257
8258
8259 for b.Controls[0].Op == OpMIPS64MOVVconst {
8260 v_0 := b.Controls[0]
8261 c := auxIntToInt64(v_0.AuxInt)
8262 if !(c > 0) {
8263 break
8264 }
8265 b.Reset(BlockFirst)
8266 b.swapSuccessors()
8267 return true
8268 }
8269 case BlockMIPS64LTZ:
8270
8271
8272
8273 for b.Controls[0].Op == OpMIPS64MOVVconst {
8274 v_0 := b.Controls[0]
8275 c := auxIntToInt64(v_0.AuxInt)
8276 if !(c < 0) {
8277 break
8278 }
8279 b.Reset(BlockFirst)
8280 return true
8281 }
8282
8283
8284
8285 for b.Controls[0].Op == OpMIPS64MOVVconst {
8286 v_0 := b.Controls[0]
8287 c := auxIntToInt64(v_0.AuxInt)
8288 if !(c >= 0) {
8289 break
8290 }
8291 b.Reset(BlockFirst)
8292 b.swapSuccessors()
8293 return true
8294 }
8295 case BlockMIPS64NE:
8296
8297
8298 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
8299 v_0 := b.Controls[0]
8300 cmp := v_0.Args[0]
8301 b.resetWithControl(BlockMIPS64FPT, cmp)
8302 return true
8303 }
8304
8305
8306 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
8307 v_0 := b.Controls[0]
8308 cmp := v_0.Args[0]
8309 b.resetWithControl(BlockMIPS64FPF, cmp)
8310 return true
8311 }
8312
8313
8314 for b.Controls[0].Op == OpMIPS64XORconst {
8315 v_0 := b.Controls[0]
8316 if auxIntToInt64(v_0.AuxInt) != 1 {
8317 break
8318 }
8319 cmp := v_0.Args[0]
8320 if cmp.Op != OpMIPS64SGT {
8321 break
8322 }
8323 b.resetWithControl(BlockMIPS64EQ, cmp)
8324 return true
8325 }
8326
8327
8328 for b.Controls[0].Op == OpMIPS64XORconst {
8329 v_0 := b.Controls[0]
8330 if auxIntToInt64(v_0.AuxInt) != 1 {
8331 break
8332 }
8333 cmp := v_0.Args[0]
8334 if cmp.Op != OpMIPS64SGTU {
8335 break
8336 }
8337 b.resetWithControl(BlockMIPS64EQ, cmp)
8338 return true
8339 }
8340
8341
8342 for b.Controls[0].Op == OpMIPS64XORconst {
8343 v_0 := b.Controls[0]
8344 if auxIntToInt64(v_0.AuxInt) != 1 {
8345 break
8346 }
8347 cmp := v_0.Args[0]
8348 if cmp.Op != OpMIPS64SGTconst {
8349 break
8350 }
8351 b.resetWithControl(BlockMIPS64EQ, cmp)
8352 return true
8353 }
8354
8355
8356 for b.Controls[0].Op == OpMIPS64XORconst {
8357 v_0 := b.Controls[0]
8358 if auxIntToInt64(v_0.AuxInt) != 1 {
8359 break
8360 }
8361 cmp := v_0.Args[0]
8362 if cmp.Op != OpMIPS64SGTUconst {
8363 break
8364 }
8365 b.resetWithControl(BlockMIPS64EQ, cmp)
8366 return true
8367 }
8368
8369
8370 for b.Controls[0].Op == OpMIPS64SGTUconst {
8371 v_0 := b.Controls[0]
8372 if auxIntToInt64(v_0.AuxInt) != 1 {
8373 break
8374 }
8375 x := v_0.Args[0]
8376 b.resetWithControl(BlockMIPS64EQ, x)
8377 return true
8378 }
8379
8380
8381 for b.Controls[0].Op == OpMIPS64SGTU {
8382 v_0 := b.Controls[0]
8383 _ = v_0.Args[1]
8384 x := v_0.Args[0]
8385 v_0_1 := v_0.Args[1]
8386 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8387 break
8388 }
8389 b.resetWithControl(BlockMIPS64NE, x)
8390 return true
8391 }
8392
8393
8394 for b.Controls[0].Op == OpMIPS64SGTconst {
8395 v_0 := b.Controls[0]
8396 if auxIntToInt64(v_0.AuxInt) != 0 {
8397 break
8398 }
8399 x := v_0.Args[0]
8400 b.resetWithControl(BlockMIPS64LTZ, x)
8401 return true
8402 }
8403
8404
8405 for b.Controls[0].Op == OpMIPS64SGT {
8406 v_0 := b.Controls[0]
8407 _ = v_0.Args[1]
8408 x := v_0.Args[0]
8409 v_0_1 := v_0.Args[1]
8410 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8411 break
8412 }
8413 b.resetWithControl(BlockMIPS64GTZ, x)
8414 return true
8415 }
8416
8417
8418 for b.Controls[0].Op == OpMIPS64MOVVconst {
8419 v_0 := b.Controls[0]
8420 if auxIntToInt64(v_0.AuxInt) != 0 {
8421 break
8422 }
8423 b.Reset(BlockFirst)
8424 b.swapSuccessors()
8425 return true
8426 }
8427
8428
8429
8430 for b.Controls[0].Op == OpMIPS64MOVVconst {
8431 v_0 := b.Controls[0]
8432 c := auxIntToInt64(v_0.AuxInt)
8433 if !(c != 0) {
8434 break
8435 }
8436 b.Reset(BlockFirst)
8437 return true
8438 }
8439 }
8440 return false
8441 }
8442
View as plain text