1
2
3 package ssa
4
5 func rewriteValueARM64latelower(v *Value) bool {
6 switch v.Op {
7 case OpARM64ADDSconstflags:
8 return rewriteValueARM64latelower_OpARM64ADDSconstflags(v)
9 case OpARM64ADDconst:
10 return rewriteValueARM64latelower_OpARM64ADDconst(v)
11 case OpARM64ANDconst:
12 return rewriteValueARM64latelower_OpARM64ANDconst(v)
13 case OpARM64CMNWconst:
14 return rewriteValueARM64latelower_OpARM64CMNWconst(v)
15 case OpARM64CMNconst:
16 return rewriteValueARM64latelower_OpARM64CMNconst(v)
17 case OpARM64CMPWconst:
18 return rewriteValueARM64latelower_OpARM64CMPWconst(v)
19 case OpARM64CMPconst:
20 return rewriteValueARM64latelower_OpARM64CMPconst(v)
21 case OpARM64MOVBUreg:
22 return rewriteValueARM64latelower_OpARM64MOVBUreg(v)
23 case OpARM64MOVBreg:
24 return rewriteValueARM64latelower_OpARM64MOVBreg(v)
25 case OpARM64MOVDconst:
26 return rewriteValueARM64latelower_OpARM64MOVDconst(v)
27 case OpARM64MOVDnop:
28 return rewriteValueARM64latelower_OpARM64MOVDnop(v)
29 case OpARM64MOVDreg:
30 return rewriteValueARM64latelower_OpARM64MOVDreg(v)
31 case OpARM64MOVHUreg:
32 return rewriteValueARM64latelower_OpARM64MOVHUreg(v)
33 case OpARM64MOVHreg:
34 return rewriteValueARM64latelower_OpARM64MOVHreg(v)
35 case OpARM64MOVWUreg:
36 return rewriteValueARM64latelower_OpARM64MOVWUreg(v)
37 case OpARM64MOVWreg:
38 return rewriteValueARM64latelower_OpARM64MOVWreg(v)
39 case OpARM64ORconst:
40 return rewriteValueARM64latelower_OpARM64ORconst(v)
41 case OpARM64SUBconst:
42 return rewriteValueARM64latelower_OpARM64SUBconst(v)
43 case OpARM64TSTWconst:
44 return rewriteValueARM64latelower_OpARM64TSTWconst(v)
45 case OpARM64TSTconst:
46 return rewriteValueARM64latelower_OpARM64TSTconst(v)
47 case OpARM64XORconst:
48 return rewriteValueARM64latelower_OpARM64XORconst(v)
49 }
50 return false
51 }
52 func rewriteValueARM64latelower_OpARM64ADDSconstflags(v *Value) bool {
53 v_0 := v.Args[0]
54 b := v.Block
55 typ := &b.Func.Config.Types
56
57
58
59 for {
60 c := auxIntToInt64(v.AuxInt)
61 x := v_0
62 if !(!isARM64addcon(c)) {
63 break
64 }
65 v.reset(OpARM64ADDSflags)
66 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
67 v0.AuxInt = int64ToAuxInt(c)
68 v.AddArg2(x, v0)
69 return true
70 }
71 return false
72 }
73 func rewriteValueARM64latelower_OpARM64ADDconst(v *Value) bool {
74 v_0 := v.Args[0]
75 b := v.Block
76 typ := &b.Func.Config.Types
77
78
79
80 for {
81 c := auxIntToInt64(v.AuxInt)
82 x := v_0
83 if !(!isARM64addcon(c)) {
84 break
85 }
86 v.reset(OpARM64ADD)
87 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
88 v0.AuxInt = int64ToAuxInt(c)
89 v.AddArg2(x, v0)
90 return true
91 }
92 return false
93 }
94 func rewriteValueARM64latelower_OpARM64ANDconst(v *Value) bool {
95 v_0 := v.Args[0]
96 b := v.Block
97 typ := &b.Func.Config.Types
98
99
100
101 for {
102 c := auxIntToInt64(v.AuxInt)
103 x := v_0
104 if !(!isARM64bitcon(uint64(c))) {
105 break
106 }
107 v.reset(OpARM64AND)
108 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
109 v0.AuxInt = int64ToAuxInt(c)
110 v.AddArg2(x, v0)
111 return true
112 }
113 return false
114 }
115 func rewriteValueARM64latelower_OpARM64CMNWconst(v *Value) bool {
116 v_0 := v.Args[0]
117 b := v.Block
118 typ := &b.Func.Config.Types
119
120
121
122 for {
123 c := auxIntToInt32(v.AuxInt)
124 x := v_0
125 if !(!isARM64addcon(int64(c))) {
126 break
127 }
128 v.reset(OpARM64CMNW)
129 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
130 v0.AuxInt = int64ToAuxInt(int64(c))
131 v.AddArg2(x, v0)
132 return true
133 }
134 return false
135 }
136 func rewriteValueARM64latelower_OpARM64CMNconst(v *Value) bool {
137 v_0 := v.Args[0]
138 b := v.Block
139 typ := &b.Func.Config.Types
140
141
142
143 for {
144 c := auxIntToInt64(v.AuxInt)
145 x := v_0
146 if !(!isARM64addcon(c)) {
147 break
148 }
149 v.reset(OpARM64CMN)
150 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
151 v0.AuxInt = int64ToAuxInt(c)
152 v.AddArg2(x, v0)
153 return true
154 }
155 return false
156 }
157 func rewriteValueARM64latelower_OpARM64CMPWconst(v *Value) bool {
158 v_0 := v.Args[0]
159 b := v.Block
160 typ := &b.Func.Config.Types
161
162
163
164 for {
165 c := auxIntToInt32(v.AuxInt)
166 x := v_0
167 if !(!isARM64addcon(int64(c))) {
168 break
169 }
170 v.reset(OpARM64CMPW)
171 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
172 v0.AuxInt = int64ToAuxInt(int64(c))
173 v.AddArg2(x, v0)
174 return true
175 }
176 return false
177 }
178 func rewriteValueARM64latelower_OpARM64CMPconst(v *Value) bool {
179 v_0 := v.Args[0]
180 b := v.Block
181 typ := &b.Func.Config.Types
182
183
184
185 for {
186 c := auxIntToInt64(v.AuxInt)
187 x := v_0
188 if !(!isARM64addcon(c)) {
189 break
190 }
191 v.reset(OpARM64CMP)
192 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
193 v0.AuxInt = int64ToAuxInt(c)
194 v.AddArg2(x, v0)
195 return true
196 }
197 return false
198 }
199 func rewriteValueARM64latelower_OpARM64MOVBUreg(v *Value) bool {
200 v_0 := v.Args[0]
201
202
203 for {
204 x := v_0
205 if x.Op != OpARM64Equal {
206 break
207 }
208 v.copyOf(x)
209 return true
210 }
211
212
213 for {
214 x := v_0
215 if x.Op != OpARM64NotEqual {
216 break
217 }
218 v.copyOf(x)
219 return true
220 }
221
222
223 for {
224 x := v_0
225 if x.Op != OpARM64LessThan {
226 break
227 }
228 v.copyOf(x)
229 return true
230 }
231
232
233 for {
234 x := v_0
235 if x.Op != OpARM64LessThanU {
236 break
237 }
238 v.copyOf(x)
239 return true
240 }
241
242
243 for {
244 x := v_0
245 if x.Op != OpARM64LessThanF {
246 break
247 }
248 v.copyOf(x)
249 return true
250 }
251
252
253 for {
254 x := v_0
255 if x.Op != OpARM64LessEqual {
256 break
257 }
258 v.copyOf(x)
259 return true
260 }
261
262
263 for {
264 x := v_0
265 if x.Op != OpARM64LessEqualU {
266 break
267 }
268 v.copyOf(x)
269 return true
270 }
271
272
273 for {
274 x := v_0
275 if x.Op != OpARM64LessEqualF {
276 break
277 }
278 v.copyOf(x)
279 return true
280 }
281
282
283 for {
284 x := v_0
285 if x.Op != OpARM64GreaterThan {
286 break
287 }
288 v.copyOf(x)
289 return true
290 }
291
292
293 for {
294 x := v_0
295 if x.Op != OpARM64GreaterThanU {
296 break
297 }
298 v.copyOf(x)
299 return true
300 }
301
302
303 for {
304 x := v_0
305 if x.Op != OpARM64GreaterThanF {
306 break
307 }
308 v.copyOf(x)
309 return true
310 }
311
312
313 for {
314 x := v_0
315 if x.Op != OpARM64GreaterEqual {
316 break
317 }
318 v.copyOf(x)
319 return true
320 }
321
322
323 for {
324 x := v_0
325 if x.Op != OpARM64GreaterEqualU {
326 break
327 }
328 v.copyOf(x)
329 return true
330 }
331
332
333 for {
334 x := v_0
335 if x.Op != OpARM64GreaterEqualF {
336 break
337 }
338 v.copyOf(x)
339 return true
340 }
341
342
343 for {
344 x := v_0
345 if x.Op != OpARM64MOVBUload {
346 break
347 }
348 v.reset(OpARM64MOVDreg)
349 v.AddArg(x)
350 return true
351 }
352
353
354 for {
355 x := v_0
356 if x.Op != OpARM64MOVBUloadidx {
357 break
358 }
359 v.reset(OpARM64MOVDreg)
360 v.AddArg(x)
361 return true
362 }
363
364
365 for {
366 x := v_0
367 if x.Op != OpARM64MOVBUreg {
368 break
369 }
370 v.reset(OpARM64MOVDreg)
371 v.AddArg(x)
372 return true
373 }
374 return false
375 }
376 func rewriteValueARM64latelower_OpARM64MOVBreg(v *Value) bool {
377 v_0 := v.Args[0]
378
379
380 for {
381 x := v_0
382 if x.Op != OpARM64MOVBload {
383 break
384 }
385 v.reset(OpARM64MOVDreg)
386 v.AddArg(x)
387 return true
388 }
389
390
391 for {
392 x := v_0
393 if x.Op != OpARM64MOVBloadidx {
394 break
395 }
396 v.reset(OpARM64MOVDreg)
397 v.AddArg(x)
398 return true
399 }
400
401
402 for {
403 x := v_0
404 if x.Op != OpARM64MOVBreg {
405 break
406 }
407 v.reset(OpARM64MOVDreg)
408 v.AddArg(x)
409 return true
410 }
411 return false
412 }
413 func rewriteValueARM64latelower_OpARM64MOVDconst(v *Value) bool {
414
415
416 for {
417 if auxIntToInt64(v.AuxInt) != 0 {
418 break
419 }
420 v.reset(OpARM64ZERO)
421 return true
422 }
423 return false
424 }
425 func rewriteValueARM64latelower_OpARM64MOVDnop(v *Value) bool {
426 v_0 := v.Args[0]
427
428
429 for {
430 if v_0.Op != OpARM64MOVDconst {
431 break
432 }
433 c := auxIntToInt64(v_0.AuxInt)
434 v.reset(OpARM64MOVDconst)
435 v.AuxInt = int64ToAuxInt(c)
436 return true
437 }
438 return false
439 }
440 func rewriteValueARM64latelower_OpARM64MOVDreg(v *Value) bool {
441 v_0 := v.Args[0]
442
443
444
445 for {
446 x := v_0
447 if !(x.Uses == 1) {
448 break
449 }
450 v.reset(OpARM64MOVDnop)
451 v.AddArg(x)
452 return true
453 }
454 return false
455 }
456 func rewriteValueARM64latelower_OpARM64MOVHUreg(v *Value) bool {
457 v_0 := v.Args[0]
458
459
460 for {
461 x := v_0
462 if x.Op != OpARM64MOVBUload {
463 break
464 }
465 v.reset(OpARM64MOVDreg)
466 v.AddArg(x)
467 return true
468 }
469
470
471 for {
472 x := v_0
473 if x.Op != OpARM64MOVHUload {
474 break
475 }
476 v.reset(OpARM64MOVDreg)
477 v.AddArg(x)
478 return true
479 }
480
481
482 for {
483 x := v_0
484 if x.Op != OpARM64MOVBUloadidx {
485 break
486 }
487 v.reset(OpARM64MOVDreg)
488 v.AddArg(x)
489 return true
490 }
491
492
493 for {
494 x := v_0
495 if x.Op != OpARM64MOVHUloadidx {
496 break
497 }
498 v.reset(OpARM64MOVDreg)
499 v.AddArg(x)
500 return true
501 }
502
503
504 for {
505 x := v_0
506 if x.Op != OpARM64MOVHUloadidx2 {
507 break
508 }
509 v.reset(OpARM64MOVDreg)
510 v.AddArg(x)
511 return true
512 }
513
514
515 for {
516 x := v_0
517 if x.Op != OpARM64MOVBUreg {
518 break
519 }
520 v.reset(OpARM64MOVDreg)
521 v.AddArg(x)
522 return true
523 }
524
525
526 for {
527 x := v_0
528 if x.Op != OpARM64MOVHUreg {
529 break
530 }
531 v.reset(OpARM64MOVDreg)
532 v.AddArg(x)
533 return true
534 }
535 return false
536 }
537 func rewriteValueARM64latelower_OpARM64MOVHreg(v *Value) bool {
538 v_0 := v.Args[0]
539
540
541 for {
542 x := v_0
543 if x.Op != OpARM64MOVBload {
544 break
545 }
546 v.reset(OpARM64MOVDreg)
547 v.AddArg(x)
548 return true
549 }
550
551
552 for {
553 x := v_0
554 if x.Op != OpARM64MOVBUload {
555 break
556 }
557 v.reset(OpARM64MOVDreg)
558 v.AddArg(x)
559 return true
560 }
561
562
563 for {
564 x := v_0
565 if x.Op != OpARM64MOVHload {
566 break
567 }
568 v.reset(OpARM64MOVDreg)
569 v.AddArg(x)
570 return true
571 }
572
573
574 for {
575 x := v_0
576 if x.Op != OpARM64MOVBloadidx {
577 break
578 }
579 v.reset(OpARM64MOVDreg)
580 v.AddArg(x)
581 return true
582 }
583
584
585 for {
586 x := v_0
587 if x.Op != OpARM64MOVBUloadidx {
588 break
589 }
590 v.reset(OpARM64MOVDreg)
591 v.AddArg(x)
592 return true
593 }
594
595
596 for {
597 x := v_0
598 if x.Op != OpARM64MOVHloadidx {
599 break
600 }
601 v.reset(OpARM64MOVDreg)
602 v.AddArg(x)
603 return true
604 }
605
606
607 for {
608 x := v_0
609 if x.Op != OpARM64MOVHloadidx2 {
610 break
611 }
612 v.reset(OpARM64MOVDreg)
613 v.AddArg(x)
614 return true
615 }
616
617
618 for {
619 x := v_0
620 if x.Op != OpARM64MOVBreg {
621 break
622 }
623 v.reset(OpARM64MOVDreg)
624 v.AddArg(x)
625 return true
626 }
627
628
629 for {
630 x := v_0
631 if x.Op != OpARM64MOVBUreg {
632 break
633 }
634 v.reset(OpARM64MOVDreg)
635 v.AddArg(x)
636 return true
637 }
638
639
640 for {
641 x := v_0
642 if x.Op != OpARM64MOVHreg {
643 break
644 }
645 v.reset(OpARM64MOVDreg)
646 v.AddArg(x)
647 return true
648 }
649 return false
650 }
651 func rewriteValueARM64latelower_OpARM64MOVWUreg(v *Value) bool {
652 v_0 := v.Args[0]
653
654
655
656 for {
657 x := v_0
658 if !(zeroUpper32Bits(x, 3)) {
659 break
660 }
661 v.copyOf(x)
662 return true
663 }
664
665
666 for {
667 x := v_0
668 if x.Op != OpARM64MOVBUload {
669 break
670 }
671 v.reset(OpARM64MOVDreg)
672 v.AddArg(x)
673 return true
674 }
675
676
677 for {
678 x := v_0
679 if x.Op != OpARM64MOVHUload {
680 break
681 }
682 v.reset(OpARM64MOVDreg)
683 v.AddArg(x)
684 return true
685 }
686
687
688 for {
689 x := v_0
690 if x.Op != OpARM64MOVWUload {
691 break
692 }
693 v.reset(OpARM64MOVDreg)
694 v.AddArg(x)
695 return true
696 }
697
698
699 for {
700 x := v_0
701 if x.Op != OpARM64MOVBUloadidx {
702 break
703 }
704 v.reset(OpARM64MOVDreg)
705 v.AddArg(x)
706 return true
707 }
708
709
710 for {
711 x := v_0
712 if x.Op != OpARM64MOVHUloadidx {
713 break
714 }
715 v.reset(OpARM64MOVDreg)
716 v.AddArg(x)
717 return true
718 }
719
720
721 for {
722 x := v_0
723 if x.Op != OpARM64MOVWUloadidx {
724 break
725 }
726 v.reset(OpARM64MOVDreg)
727 v.AddArg(x)
728 return true
729 }
730
731
732 for {
733 x := v_0
734 if x.Op != OpARM64MOVHUloadidx2 {
735 break
736 }
737 v.reset(OpARM64MOVDreg)
738 v.AddArg(x)
739 return true
740 }
741
742
743 for {
744 x := v_0
745 if x.Op != OpARM64MOVWUloadidx4 {
746 break
747 }
748 v.reset(OpARM64MOVDreg)
749 v.AddArg(x)
750 return true
751 }
752
753
754 for {
755 x := v_0
756 if x.Op != OpARM64MOVBUreg {
757 break
758 }
759 v.reset(OpARM64MOVDreg)
760 v.AddArg(x)
761 return true
762 }
763
764
765 for {
766 x := v_0
767 if x.Op != OpARM64MOVHUreg {
768 break
769 }
770 v.reset(OpARM64MOVDreg)
771 v.AddArg(x)
772 return true
773 }
774
775
776 for {
777 x := v_0
778 if x.Op != OpARM64MOVWUreg {
779 break
780 }
781 v.reset(OpARM64MOVDreg)
782 v.AddArg(x)
783 return true
784 }
785 return false
786 }
787 func rewriteValueARM64latelower_OpARM64MOVWreg(v *Value) bool {
788 v_0 := v.Args[0]
789
790
791 for {
792 x := v_0
793 if x.Op != OpARM64MOVBload {
794 break
795 }
796 v.reset(OpARM64MOVDreg)
797 v.AddArg(x)
798 return true
799 }
800
801
802 for {
803 x := v_0
804 if x.Op != OpARM64MOVBUload {
805 break
806 }
807 v.reset(OpARM64MOVDreg)
808 v.AddArg(x)
809 return true
810 }
811
812
813 for {
814 x := v_0
815 if x.Op != OpARM64MOVHload {
816 break
817 }
818 v.reset(OpARM64MOVDreg)
819 v.AddArg(x)
820 return true
821 }
822
823
824 for {
825 x := v_0
826 if x.Op != OpARM64MOVHUload {
827 break
828 }
829 v.reset(OpARM64MOVDreg)
830 v.AddArg(x)
831 return true
832 }
833
834
835 for {
836 x := v_0
837 if x.Op != OpARM64MOVWload {
838 break
839 }
840 v.reset(OpARM64MOVDreg)
841 v.AddArg(x)
842 return true
843 }
844
845
846 for {
847 x := v_0
848 if x.Op != OpARM64MOVBloadidx {
849 break
850 }
851 v.reset(OpARM64MOVDreg)
852 v.AddArg(x)
853 return true
854 }
855
856
857 for {
858 x := v_0
859 if x.Op != OpARM64MOVBUloadidx {
860 break
861 }
862 v.reset(OpARM64MOVDreg)
863 v.AddArg(x)
864 return true
865 }
866
867
868 for {
869 x := v_0
870 if x.Op != OpARM64MOVHloadidx {
871 break
872 }
873 v.reset(OpARM64MOVDreg)
874 v.AddArg(x)
875 return true
876 }
877
878
879 for {
880 x := v_0
881 if x.Op != OpARM64MOVHUloadidx {
882 break
883 }
884 v.reset(OpARM64MOVDreg)
885 v.AddArg(x)
886 return true
887 }
888
889
890 for {
891 x := v_0
892 if x.Op != OpARM64MOVWloadidx {
893 break
894 }
895 v.reset(OpARM64MOVDreg)
896 v.AddArg(x)
897 return true
898 }
899
900
901 for {
902 x := v_0
903 if x.Op != OpARM64MOVHloadidx2 {
904 break
905 }
906 v.reset(OpARM64MOVDreg)
907 v.AddArg(x)
908 return true
909 }
910
911
912 for {
913 x := v_0
914 if x.Op != OpARM64MOVHUloadidx2 {
915 break
916 }
917 v.reset(OpARM64MOVDreg)
918 v.AddArg(x)
919 return true
920 }
921
922
923 for {
924 x := v_0
925 if x.Op != OpARM64MOVWloadidx4 {
926 break
927 }
928 v.reset(OpARM64MOVDreg)
929 v.AddArg(x)
930 return true
931 }
932
933
934 for {
935 x := v_0
936 if x.Op != OpARM64MOVBreg {
937 break
938 }
939 v.reset(OpARM64MOVDreg)
940 v.AddArg(x)
941 return true
942 }
943
944
945 for {
946 x := v_0
947 if x.Op != OpARM64MOVBUreg {
948 break
949 }
950 v.reset(OpARM64MOVDreg)
951 v.AddArg(x)
952 return true
953 }
954
955
956 for {
957 x := v_0
958 if x.Op != OpARM64MOVHreg {
959 break
960 }
961 v.reset(OpARM64MOVDreg)
962 v.AddArg(x)
963 return true
964 }
965
966
967 for {
968 x := v_0
969 if x.Op != OpARM64MOVWreg {
970 break
971 }
972 v.reset(OpARM64MOVDreg)
973 v.AddArg(x)
974 return true
975 }
976 return false
977 }
978 func rewriteValueARM64latelower_OpARM64ORconst(v *Value) bool {
979 v_0 := v.Args[0]
980 b := v.Block
981 typ := &b.Func.Config.Types
982
983
984
985 for {
986 c := auxIntToInt64(v.AuxInt)
987 x := v_0
988 if !(!isARM64bitcon(uint64(c))) {
989 break
990 }
991 v.reset(OpARM64OR)
992 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
993 v0.AuxInt = int64ToAuxInt(c)
994 v.AddArg2(x, v0)
995 return true
996 }
997 return false
998 }
999 func rewriteValueARM64latelower_OpARM64SUBconst(v *Value) bool {
1000 v_0 := v.Args[0]
1001 b := v.Block
1002 typ := &b.Func.Config.Types
1003
1004
1005
1006 for {
1007 c := auxIntToInt64(v.AuxInt)
1008 x := v_0
1009 if !(!isARM64addcon(c)) {
1010 break
1011 }
1012 v.reset(OpARM64SUB)
1013 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
1014 v0.AuxInt = int64ToAuxInt(c)
1015 v.AddArg2(x, v0)
1016 return true
1017 }
1018 return false
1019 }
1020 func rewriteValueARM64latelower_OpARM64TSTWconst(v *Value) bool {
1021 v_0 := v.Args[0]
1022 b := v.Block
1023 typ := &b.Func.Config.Types
1024
1025
1026
1027 for {
1028 c := auxIntToInt32(v.AuxInt)
1029 x := v_0
1030 if !(!isARM64bitcon(uint64(c) | uint64(c)<<32)) {
1031 break
1032 }
1033 v.reset(OpARM64TSTW)
1034 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
1035 v0.AuxInt = int64ToAuxInt(int64(c))
1036 v.AddArg2(x, v0)
1037 return true
1038 }
1039 return false
1040 }
1041 func rewriteValueARM64latelower_OpARM64TSTconst(v *Value) bool {
1042 v_0 := v.Args[0]
1043 b := v.Block
1044 typ := &b.Func.Config.Types
1045
1046
1047
1048 for {
1049 c := auxIntToInt64(v.AuxInt)
1050 x := v_0
1051 if !(!isARM64bitcon(uint64(c))) {
1052 break
1053 }
1054 v.reset(OpARM64TST)
1055 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
1056 v0.AuxInt = int64ToAuxInt(c)
1057 v.AddArg2(x, v0)
1058 return true
1059 }
1060 return false
1061 }
1062 func rewriteValueARM64latelower_OpARM64XORconst(v *Value) bool {
1063 v_0 := v.Args[0]
1064 b := v.Block
1065 typ := &b.Func.Config.Types
1066
1067
1068
1069 for {
1070 c := auxIntToInt64(v.AuxInt)
1071 x := v_0
1072 if !(!isARM64bitcon(uint64(c))) {
1073 break
1074 }
1075 v.reset(OpARM64XOR)
1076 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
1077 v0.AuxInt = int64ToAuxInt(c)
1078 v.AddArg2(x, v0)
1079 return true
1080 }
1081 return false
1082 }
1083 func rewriteBlockARM64latelower(b *Block) bool {
1084 return false
1085 }
1086
View as plain text