1
2
3
4
5 package arm64
6
7 import (
8 "math"
9
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/logopt"
13 "cmd/compile/internal/objw"
14 "cmd/compile/internal/ssa"
15 "cmd/compile/internal/ssagen"
16 "cmd/compile/internal/types"
17 "cmd/internal/obj"
18 "cmd/internal/obj/arm64"
19 )
20
21
22 func loadByType(t *types.Type) obj.As {
23 if t.IsFloat() {
24 switch t.Size() {
25 case 4:
26 return arm64.AFMOVS
27 case 8:
28 return arm64.AFMOVD
29 }
30 } else {
31 switch t.Size() {
32 case 1:
33 if t.IsSigned() {
34 return arm64.AMOVB
35 } else {
36 return arm64.AMOVBU
37 }
38 case 2:
39 if t.IsSigned() {
40 return arm64.AMOVH
41 } else {
42 return arm64.AMOVHU
43 }
44 case 4:
45 if t.IsSigned() {
46 return arm64.AMOVW
47 } else {
48 return arm64.AMOVWU
49 }
50 case 8:
51 return arm64.AMOVD
52 }
53 }
54 panic("bad load type")
55 }
56
57
58 func storeByType(t *types.Type) obj.As {
59 if t.IsFloat() {
60 switch t.Size() {
61 case 4:
62 return arm64.AFMOVS
63 case 8:
64 return arm64.AFMOVD
65 }
66 } else {
67 switch t.Size() {
68 case 1:
69 return arm64.AMOVB
70 case 2:
71 return arm64.AMOVH
72 case 4:
73 return arm64.AMOVW
74 case 8:
75 return arm64.AMOVD
76 }
77 }
78 panic("bad store type")
79 }
80
81
82
83 func loadByType2(t *types.Type) obj.As {
84 if t.IsFloat() {
85 switch t.Size() {
86 case 4:
87 return arm64.AFLDPS
88 case 8:
89 return arm64.AFLDPD
90 }
91 } else {
92 switch t.Size() {
93 case 4:
94 return arm64.ALDPW
95 case 8:
96 return arm64.ALDP
97 }
98 }
99 return obj.AXXX
100 }
101
102
103
104 func storeByType2(t *types.Type) obj.As {
105 if t.IsFloat() {
106 switch t.Size() {
107 case 4:
108 return arm64.AFSTPS
109 case 8:
110 return arm64.AFSTPD
111 }
112 } else {
113 switch t.Size() {
114 case 4:
115 return arm64.ASTPW
116 case 8:
117 return arm64.ASTP
118 }
119 }
120 return obj.AXXX
121 }
122
123
124 func makeshift(v *ssa.Value, reg int16, typ int64, s int64) int64 {
125 if s < 0 || s >= 64 {
126 v.Fatalf("shift out of range: %d", s)
127 }
128 return int64(reg&31)<<16 | typ | (s&63)<<10
129 }
130
131
132 func genshift(s *ssagen.State, v *ssa.Value, as obj.As, r0, r1, r int16, typ int64, n int64) *obj.Prog {
133 p := s.Prog(as)
134 p.From.Type = obj.TYPE_SHIFT
135 p.From.Offset = makeshift(v, r1, typ, n)
136 p.Reg = r0
137 if r != 0 {
138 p.To.Type = obj.TYPE_REG
139 p.To.Reg = r
140 }
141 return p
142 }
143
144
145
146 func genIndexedOperand(op ssa.Op, base, idx int16) obj.Addr {
147
148 mop := obj.Addr{Type: obj.TYPE_MEM, Reg: base}
149 switch op {
150 case ssa.OpARM64MOVDloadidx8, ssa.OpARM64MOVDstoreidx8,
151 ssa.OpARM64FMOVDloadidx8, ssa.OpARM64FMOVDstoreidx8:
152 mop.Index = arm64.REG_LSL | 3<<5 | idx&31
153 case ssa.OpARM64MOVWloadidx4, ssa.OpARM64MOVWUloadidx4, ssa.OpARM64MOVWstoreidx4,
154 ssa.OpARM64FMOVSloadidx4, ssa.OpARM64FMOVSstoreidx4:
155 mop.Index = arm64.REG_LSL | 2<<5 | idx&31
156 case ssa.OpARM64MOVHloadidx2, ssa.OpARM64MOVHUloadidx2, ssa.OpARM64MOVHstoreidx2:
157 mop.Index = arm64.REG_LSL | 1<<5 | idx&31
158 default:
159 mop.Index = idx
160 }
161 return mop
162 }
163
164 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
165 switch v.Op {
166 case ssa.OpCopy, ssa.OpARM64MOVDreg:
167 if v.Type.IsMemory() {
168 return
169 }
170 x := v.Args[0].Reg()
171 y := v.Reg()
172 if x == y {
173 return
174 }
175 as := arm64.AMOVD
176 if v.Type.IsFloat() {
177 switch v.Type.Size() {
178 case 4:
179 as = arm64.AFMOVS
180 case 8:
181 as = arm64.AFMOVD
182 default:
183 panic("bad float size")
184 }
185 }
186 p := s.Prog(as)
187 p.From.Type = obj.TYPE_REG
188 p.From.Reg = x
189 p.To.Type = obj.TYPE_REG
190 p.To.Reg = y
191 case ssa.OpARM64MOVDnop, ssa.OpARM64ZERO:
192
193 case ssa.OpLoadReg:
194 if v.Type.IsFlags() {
195 v.Fatalf("load flags not implemented: %v", v.LongString())
196 return
197 }
198 p := s.Prog(loadByType(v.Type))
199 ssagen.AddrAuto(&p.From, v.Args[0])
200 p.To.Type = obj.TYPE_REG
201 p.To.Reg = v.Reg()
202 case ssa.OpStoreReg:
203 if v.Type.IsFlags() {
204 v.Fatalf("store flags not implemented: %v", v.LongString())
205 return
206 }
207 p := s.Prog(storeByType(v.Type))
208 p.From.Type = obj.TYPE_REG
209 p.From.Reg = v.Args[0].Reg()
210 ssagen.AddrAuto(&p.To, v)
211 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
212 ssagen.CheckArgReg(v)
213
214
215 args := v.Block.Func.RegArgs
216 if len(args) == 0 {
217 break
218 }
219 v.Block.Func.RegArgs = nil
220
221 for i := 0; i < len(args); i++ {
222 a := args[i]
223
224 addr := ssagen.SpillSlotAddr(a, arm64.REGSP, base.Ctxt.Arch.FixedFrameSize)
225
226 if i < len(args)-1 {
227 b := args[i+1]
228 if a.Type.Size() == b.Type.Size() &&
229 a.Type.IsFloat() == b.Type.IsFloat() &&
230 b.Offset == a.Offset+a.Type.Size() {
231 ld := loadByType2(a.Type)
232 st := storeByType2(a.Type)
233 if ld != obj.AXXX && st != obj.AXXX {
234 s.FuncInfo().AddSpill(obj.RegSpill{Reg: a.Reg, Reg2: b.Reg, Addr: addr, Unspill: ld, Spill: st})
235 i++
236 continue
237 }
238 }
239 }
240
241 s.FuncInfo().AddSpill(obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
242 }
243
244 case ssa.OpARM64ADD,
245 ssa.OpARM64SUB,
246 ssa.OpARM64AND,
247 ssa.OpARM64OR,
248 ssa.OpARM64XOR,
249 ssa.OpARM64BIC,
250 ssa.OpARM64EON,
251 ssa.OpARM64ORN,
252 ssa.OpARM64MUL,
253 ssa.OpARM64MULW,
254 ssa.OpARM64MNEG,
255 ssa.OpARM64MNEGW,
256 ssa.OpARM64MULH,
257 ssa.OpARM64UMULH,
258 ssa.OpARM64MULL,
259 ssa.OpARM64UMULL,
260 ssa.OpARM64DIV,
261 ssa.OpARM64UDIV,
262 ssa.OpARM64DIVW,
263 ssa.OpARM64UDIVW,
264 ssa.OpARM64MOD,
265 ssa.OpARM64UMOD,
266 ssa.OpARM64MODW,
267 ssa.OpARM64UMODW,
268 ssa.OpARM64SLL,
269 ssa.OpARM64SRL,
270 ssa.OpARM64SRA,
271 ssa.OpARM64FADDS,
272 ssa.OpARM64FADDD,
273 ssa.OpARM64FSUBS,
274 ssa.OpARM64FSUBD,
275 ssa.OpARM64FMULS,
276 ssa.OpARM64FMULD,
277 ssa.OpARM64FNMULS,
278 ssa.OpARM64FNMULD,
279 ssa.OpARM64FDIVS,
280 ssa.OpARM64FDIVD,
281 ssa.OpARM64FMINS,
282 ssa.OpARM64FMIND,
283 ssa.OpARM64FMAXS,
284 ssa.OpARM64FMAXD,
285 ssa.OpARM64ROR,
286 ssa.OpARM64RORW:
287 r := v.Reg()
288 r1 := v.Args[0].Reg()
289 r2 := v.Args[1].Reg()
290 p := s.Prog(v.Op.Asm())
291 p.From.Type = obj.TYPE_REG
292 p.From.Reg = r2
293 p.Reg = r1
294 p.To.Type = obj.TYPE_REG
295 p.To.Reg = r
296 case ssa.OpARM64FMADDS,
297 ssa.OpARM64FMADDD,
298 ssa.OpARM64FNMADDS,
299 ssa.OpARM64FNMADDD,
300 ssa.OpARM64FMSUBS,
301 ssa.OpARM64FMSUBD,
302 ssa.OpARM64FNMSUBS,
303 ssa.OpARM64FNMSUBD,
304 ssa.OpARM64MADD,
305 ssa.OpARM64MADDW,
306 ssa.OpARM64MSUB,
307 ssa.OpARM64MSUBW:
308 rt := v.Reg()
309 ra := v.Args[0].Reg()
310 rm := v.Args[1].Reg()
311 rn := v.Args[2].Reg()
312 p := s.Prog(v.Op.Asm())
313 p.Reg = ra
314 p.From.Type = obj.TYPE_REG
315 p.From.Reg = rm
316 p.AddRestSourceReg(rn)
317 p.To.Type = obj.TYPE_REG
318 p.To.Reg = rt
319 case ssa.OpARM64ADDconst,
320 ssa.OpARM64SUBconst,
321 ssa.OpARM64ANDconst,
322 ssa.OpARM64ORconst,
323 ssa.OpARM64XORconst,
324 ssa.OpARM64SLLconst,
325 ssa.OpARM64SRLconst,
326 ssa.OpARM64SRAconst,
327 ssa.OpARM64RORconst,
328 ssa.OpARM64RORWconst:
329 p := s.Prog(v.Op.Asm())
330 p.From.Type = obj.TYPE_CONST
331 p.From.Offset = v.AuxInt
332 p.Reg = v.Args[0].Reg()
333 p.To.Type = obj.TYPE_REG
334 p.To.Reg = v.Reg()
335 case ssa.OpARM64ADDSconstflags:
336 p := s.Prog(v.Op.Asm())
337 p.From.Type = obj.TYPE_CONST
338 p.From.Offset = v.AuxInt
339 p.Reg = v.Args[0].Reg()
340 p.To.Type = obj.TYPE_REG
341 p.To.Reg = v.Reg0()
342 case ssa.OpARM64ADCzerocarry:
343 p := s.Prog(v.Op.Asm())
344 p.From.Type = obj.TYPE_REG
345 p.From.Reg = arm64.REGZERO
346 p.Reg = arm64.REGZERO
347 p.To.Type = obj.TYPE_REG
348 p.To.Reg = v.Reg()
349 case ssa.OpARM64ADCSflags,
350 ssa.OpARM64ADDSflags,
351 ssa.OpARM64SBCSflags,
352 ssa.OpARM64SUBSflags:
353 r := v.Reg0()
354 r1 := v.Args[0].Reg()
355 r2 := v.Args[1].Reg()
356 p := s.Prog(v.Op.Asm())
357 p.From.Type = obj.TYPE_REG
358 p.From.Reg = r2
359 p.Reg = r1
360 p.To.Type = obj.TYPE_REG
361 p.To.Reg = r
362 case ssa.OpARM64NEGSflags:
363 p := s.Prog(v.Op.Asm())
364 p.From.Type = obj.TYPE_REG
365 p.From.Reg = v.Args[0].Reg()
366 p.To.Type = obj.TYPE_REG
367 p.To.Reg = v.Reg0()
368 case ssa.OpARM64NGCzerocarry:
369 p := s.Prog(v.Op.Asm())
370 p.From.Type = obj.TYPE_REG
371 p.From.Reg = arm64.REGZERO
372 p.To.Type = obj.TYPE_REG
373 p.To.Reg = v.Reg()
374 case ssa.OpARM64EXTRconst,
375 ssa.OpARM64EXTRWconst:
376 p := s.Prog(v.Op.Asm())
377 p.From.Type = obj.TYPE_CONST
378 p.From.Offset = v.AuxInt
379 p.AddRestSourceReg(v.Args[0].Reg())
380 p.Reg = v.Args[1].Reg()
381 p.To.Type = obj.TYPE_REG
382 p.To.Reg = v.Reg()
383 case ssa.OpARM64MVNshiftLL, ssa.OpARM64NEGshiftLL:
384 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LL, v.AuxInt)
385 case ssa.OpARM64MVNshiftRL, ssa.OpARM64NEGshiftRL:
386 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
387 case ssa.OpARM64MVNshiftRA, ssa.OpARM64NEGshiftRA:
388 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
389 case ssa.OpARM64MVNshiftRO:
390 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
391 case ssa.OpARM64ADDshiftLL,
392 ssa.OpARM64SUBshiftLL,
393 ssa.OpARM64ANDshiftLL,
394 ssa.OpARM64ORshiftLL,
395 ssa.OpARM64XORshiftLL,
396 ssa.OpARM64EONshiftLL,
397 ssa.OpARM64ORNshiftLL,
398 ssa.OpARM64BICshiftLL:
399 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_LL, v.AuxInt)
400 case ssa.OpARM64ADDshiftRL,
401 ssa.OpARM64SUBshiftRL,
402 ssa.OpARM64ANDshiftRL,
403 ssa.OpARM64ORshiftRL,
404 ssa.OpARM64XORshiftRL,
405 ssa.OpARM64EONshiftRL,
406 ssa.OpARM64ORNshiftRL,
407 ssa.OpARM64BICshiftRL:
408 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
409 case ssa.OpARM64ADDshiftRA,
410 ssa.OpARM64SUBshiftRA,
411 ssa.OpARM64ANDshiftRA,
412 ssa.OpARM64ORshiftRA,
413 ssa.OpARM64XORshiftRA,
414 ssa.OpARM64EONshiftRA,
415 ssa.OpARM64ORNshiftRA,
416 ssa.OpARM64BICshiftRA:
417 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
418 case ssa.OpARM64ANDshiftRO,
419 ssa.OpARM64ORshiftRO,
420 ssa.OpARM64XORshiftRO,
421 ssa.OpARM64EONshiftRO,
422 ssa.OpARM64ORNshiftRO,
423 ssa.OpARM64BICshiftRO:
424 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
425 case ssa.OpARM64MOVDconst:
426 p := s.Prog(v.Op.Asm())
427 p.From.Type = obj.TYPE_CONST
428 p.From.Offset = v.AuxInt
429 p.To.Type = obj.TYPE_REG
430 p.To.Reg = v.Reg()
431 case ssa.OpARM64FMOVSconst,
432 ssa.OpARM64FMOVDconst:
433 p := s.Prog(v.Op.Asm())
434 p.From.Type = obj.TYPE_FCONST
435 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
436 p.To.Type = obj.TYPE_REG
437 p.To.Reg = v.Reg()
438 case ssa.OpARM64FCMPS0,
439 ssa.OpARM64FCMPD0:
440 p := s.Prog(v.Op.Asm())
441 p.From.Type = obj.TYPE_FCONST
442 p.From.Val = math.Float64frombits(0)
443 p.Reg = v.Args[0].Reg()
444 case ssa.OpARM64CMP,
445 ssa.OpARM64CMPW,
446 ssa.OpARM64CMN,
447 ssa.OpARM64CMNW,
448 ssa.OpARM64TST,
449 ssa.OpARM64TSTW,
450 ssa.OpARM64FCMPS,
451 ssa.OpARM64FCMPD:
452 p := s.Prog(v.Op.Asm())
453 p.From.Type = obj.TYPE_REG
454 p.From.Reg = v.Args[1].Reg()
455 p.Reg = v.Args[0].Reg()
456 case ssa.OpARM64CMPconst,
457 ssa.OpARM64CMPWconst,
458 ssa.OpARM64CMNconst,
459 ssa.OpARM64CMNWconst,
460 ssa.OpARM64TSTconst,
461 ssa.OpARM64TSTWconst:
462 p := s.Prog(v.Op.Asm())
463 p.From.Type = obj.TYPE_CONST
464 p.From.Offset = v.AuxInt
465 p.Reg = v.Args[0].Reg()
466 case ssa.OpARM64CMPshiftLL, ssa.OpARM64CMNshiftLL, ssa.OpARM64TSTshiftLL:
467 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LL, v.AuxInt)
468 case ssa.OpARM64CMPshiftRL, ssa.OpARM64CMNshiftRL, ssa.OpARM64TSTshiftRL:
469 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LR, v.AuxInt)
470 case ssa.OpARM64CMPshiftRA, ssa.OpARM64CMNshiftRA, ssa.OpARM64TSTshiftRA:
471 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_AR, v.AuxInt)
472 case ssa.OpARM64TSTshiftRO:
473 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_ROR, v.AuxInt)
474 case ssa.OpARM64MOVDaddr:
475 p := s.Prog(arm64.AMOVD)
476 p.From.Type = obj.TYPE_ADDR
477 p.From.Reg = v.Args[0].Reg()
478 p.To.Type = obj.TYPE_REG
479 p.To.Reg = v.Reg()
480
481 var wantreg string
482
483
484
485
486
487 switch v.Aux.(type) {
488 default:
489 v.Fatalf("aux is of unknown type %T", v.Aux)
490 case *obj.LSym:
491 wantreg = "SB"
492 ssagen.AddAux(&p.From, v)
493 case *ir.Name:
494 wantreg = "SP"
495 ssagen.AddAux(&p.From, v)
496 case nil:
497
498 wantreg = "SP"
499 p.From.Offset = v.AuxInt
500 }
501 if reg := v.Args[0].RegName(); reg != wantreg {
502 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
503 }
504 case ssa.OpARM64MOVBload,
505 ssa.OpARM64MOVBUload,
506 ssa.OpARM64MOVHload,
507 ssa.OpARM64MOVHUload,
508 ssa.OpARM64MOVWload,
509 ssa.OpARM64MOVWUload,
510 ssa.OpARM64MOVDload,
511 ssa.OpARM64FMOVSload,
512 ssa.OpARM64FMOVDload:
513 p := s.Prog(v.Op.Asm())
514 p.From.Type = obj.TYPE_MEM
515 p.From.Reg = v.Args[0].Reg()
516 ssagen.AddAux(&p.From, v)
517 p.To.Type = obj.TYPE_REG
518 p.To.Reg = v.Reg()
519 case ssa.OpARM64LDP, ssa.OpARM64LDPW, ssa.OpARM64LDPSW, ssa.OpARM64FLDPD, ssa.OpARM64FLDPS:
520 p := s.Prog(v.Op.Asm())
521 p.From.Type = obj.TYPE_MEM
522 p.From.Reg = v.Args[0].Reg()
523 ssagen.AddAux(&p.From, v)
524 p.To.Type = obj.TYPE_REGREG
525 p.To.Reg = v.Reg0()
526 p.To.Offset = int64(v.Reg1())
527 case ssa.OpARM64MOVBloadidx,
528 ssa.OpARM64MOVBUloadidx,
529 ssa.OpARM64MOVHloadidx,
530 ssa.OpARM64MOVHUloadidx,
531 ssa.OpARM64MOVWloadidx,
532 ssa.OpARM64MOVWUloadidx,
533 ssa.OpARM64MOVDloadidx,
534 ssa.OpARM64FMOVSloadidx,
535 ssa.OpARM64FMOVDloadidx,
536 ssa.OpARM64MOVHloadidx2,
537 ssa.OpARM64MOVHUloadidx2,
538 ssa.OpARM64MOVWloadidx4,
539 ssa.OpARM64MOVWUloadidx4,
540 ssa.OpARM64MOVDloadidx8,
541 ssa.OpARM64FMOVDloadidx8,
542 ssa.OpARM64FMOVSloadidx4:
543 p := s.Prog(v.Op.Asm())
544 p.From = genIndexedOperand(v.Op, v.Args[0].Reg(), v.Args[1].Reg())
545 p.To.Type = obj.TYPE_REG
546 p.To.Reg = v.Reg()
547 case ssa.OpARM64LDAR,
548 ssa.OpARM64LDARB,
549 ssa.OpARM64LDARW:
550 p := s.Prog(v.Op.Asm())
551 p.From.Type = obj.TYPE_MEM
552 p.From.Reg = v.Args[0].Reg()
553 ssagen.AddAux(&p.From, v)
554 p.To.Type = obj.TYPE_REG
555 p.To.Reg = v.Reg0()
556 case ssa.OpARM64MOVBstore,
557 ssa.OpARM64MOVHstore,
558 ssa.OpARM64MOVWstore,
559 ssa.OpARM64MOVDstore,
560 ssa.OpARM64FMOVSstore,
561 ssa.OpARM64FMOVDstore,
562 ssa.OpARM64STLRB,
563 ssa.OpARM64STLR,
564 ssa.OpARM64STLRW:
565 p := s.Prog(v.Op.Asm())
566 p.From.Type = obj.TYPE_REG
567 p.From.Reg = v.Args[1].Reg()
568 p.To.Type = obj.TYPE_MEM
569 p.To.Reg = v.Args[0].Reg()
570 ssagen.AddAux(&p.To, v)
571 case ssa.OpARM64MOVBstoreidx,
572 ssa.OpARM64MOVHstoreidx,
573 ssa.OpARM64MOVWstoreidx,
574 ssa.OpARM64MOVDstoreidx,
575 ssa.OpARM64FMOVSstoreidx,
576 ssa.OpARM64FMOVDstoreidx,
577 ssa.OpARM64MOVHstoreidx2,
578 ssa.OpARM64MOVWstoreidx4,
579 ssa.OpARM64FMOVSstoreidx4,
580 ssa.OpARM64MOVDstoreidx8,
581 ssa.OpARM64FMOVDstoreidx8:
582 p := s.Prog(v.Op.Asm())
583 p.To = genIndexedOperand(v.Op, v.Args[0].Reg(), v.Args[1].Reg())
584 p.From.Type = obj.TYPE_REG
585 p.From.Reg = v.Args[2].Reg()
586 case ssa.OpARM64STP, ssa.OpARM64STPW, ssa.OpARM64FSTPD, ssa.OpARM64FSTPS:
587 p := s.Prog(v.Op.Asm())
588 p.From.Type = obj.TYPE_REGREG
589 p.From.Reg = v.Args[1].Reg()
590 p.From.Offset = int64(v.Args[2].Reg())
591 p.To.Type = obj.TYPE_MEM
592 p.To.Reg = v.Args[0].Reg()
593 ssagen.AddAux(&p.To, v)
594 case ssa.OpARM64BFI,
595 ssa.OpARM64BFXIL:
596 p := s.Prog(v.Op.Asm())
597 p.From.Type = obj.TYPE_CONST
598 p.From.Offset = v.AuxInt >> 8
599 p.AddRestSourceConst(v.AuxInt & 0xff)
600 p.Reg = v.Args[1].Reg()
601 p.To.Type = obj.TYPE_REG
602 p.To.Reg = v.Reg()
603 case ssa.OpARM64SBFIZ,
604 ssa.OpARM64SBFX,
605 ssa.OpARM64UBFIZ,
606 ssa.OpARM64UBFX:
607 p := s.Prog(v.Op.Asm())
608 p.From.Type = obj.TYPE_CONST
609 p.From.Offset = v.AuxInt >> 8
610 p.AddRestSourceConst(v.AuxInt & 0xff)
611 p.Reg = v.Args[0].Reg()
612 p.To.Type = obj.TYPE_REG
613 p.To.Reg = v.Reg()
614 case ssa.OpARM64LoweredAtomicExchange64,
615 ssa.OpARM64LoweredAtomicExchange32,
616 ssa.OpARM64LoweredAtomicExchange8:
617
618
619
620 var ld, st obj.As
621 switch v.Op {
622 case ssa.OpARM64LoweredAtomicExchange8:
623 ld = arm64.ALDAXRB
624 st = arm64.ASTLXRB
625 case ssa.OpARM64LoweredAtomicExchange32:
626 ld = arm64.ALDAXRW
627 st = arm64.ASTLXRW
628 case ssa.OpARM64LoweredAtomicExchange64:
629 ld = arm64.ALDAXR
630 st = arm64.ASTLXR
631 }
632 r0 := v.Args[0].Reg()
633 r1 := v.Args[1].Reg()
634 out := v.Reg0()
635 p := s.Prog(ld)
636 p.From.Type = obj.TYPE_MEM
637 p.From.Reg = r0
638 p.To.Type = obj.TYPE_REG
639 p.To.Reg = out
640 p1 := s.Prog(st)
641 p1.From.Type = obj.TYPE_REG
642 p1.From.Reg = r1
643 p1.To.Type = obj.TYPE_MEM
644 p1.To.Reg = r0
645 p1.RegTo2 = arm64.REGTMP
646 p2 := s.Prog(arm64.ACBNZ)
647 p2.From.Type = obj.TYPE_REG
648 p2.From.Reg = arm64.REGTMP
649 p2.To.Type = obj.TYPE_BRANCH
650 p2.To.SetTarget(p)
651 case ssa.OpARM64LoweredAtomicExchange64Variant,
652 ssa.OpARM64LoweredAtomicExchange32Variant,
653 ssa.OpARM64LoweredAtomicExchange8Variant:
654 var swap obj.As
655 switch v.Op {
656 case ssa.OpARM64LoweredAtomicExchange8Variant:
657 swap = arm64.ASWPALB
658 case ssa.OpARM64LoweredAtomicExchange32Variant:
659 swap = arm64.ASWPALW
660 case ssa.OpARM64LoweredAtomicExchange64Variant:
661 swap = arm64.ASWPALD
662 }
663 r0 := v.Args[0].Reg()
664 r1 := v.Args[1].Reg()
665 out := v.Reg0()
666
667
668 p := s.Prog(swap)
669 p.From.Type = obj.TYPE_REG
670 p.From.Reg = r1
671 p.To.Type = obj.TYPE_MEM
672 p.To.Reg = r0
673 p.RegTo2 = out
674
675 case ssa.OpARM64LoweredAtomicAdd64,
676 ssa.OpARM64LoweredAtomicAdd32:
677
678
679
680
681 ld := arm64.ALDAXR
682 st := arm64.ASTLXR
683 if v.Op == ssa.OpARM64LoweredAtomicAdd32 {
684 ld = arm64.ALDAXRW
685 st = arm64.ASTLXRW
686 }
687 r0 := v.Args[0].Reg()
688 r1 := v.Args[1].Reg()
689 out := v.Reg0()
690 p := s.Prog(ld)
691 p.From.Type = obj.TYPE_MEM
692 p.From.Reg = r0
693 p.To.Type = obj.TYPE_REG
694 p.To.Reg = out
695 p1 := s.Prog(arm64.AADD)
696 p1.From.Type = obj.TYPE_REG
697 p1.From.Reg = r1
698 p1.To.Type = obj.TYPE_REG
699 p1.To.Reg = out
700 p2 := s.Prog(st)
701 p2.From.Type = obj.TYPE_REG
702 p2.From.Reg = out
703 p2.To.Type = obj.TYPE_MEM
704 p2.To.Reg = r0
705 p2.RegTo2 = arm64.REGTMP
706 p3 := s.Prog(arm64.ACBNZ)
707 p3.From.Type = obj.TYPE_REG
708 p3.From.Reg = arm64.REGTMP
709 p3.To.Type = obj.TYPE_BRANCH
710 p3.To.SetTarget(p)
711 case ssa.OpARM64LoweredAtomicAdd64Variant,
712 ssa.OpARM64LoweredAtomicAdd32Variant:
713
714
715 op := arm64.ALDADDALD
716 if v.Op == ssa.OpARM64LoweredAtomicAdd32Variant {
717 op = arm64.ALDADDALW
718 }
719 r0 := v.Args[0].Reg()
720 r1 := v.Args[1].Reg()
721 out := v.Reg0()
722 p := s.Prog(op)
723 p.From.Type = obj.TYPE_REG
724 p.From.Reg = r1
725 p.To.Type = obj.TYPE_MEM
726 p.To.Reg = r0
727 p.RegTo2 = out
728 p1 := s.Prog(arm64.AADD)
729 p1.From.Type = obj.TYPE_REG
730 p1.From.Reg = r1
731 p1.To.Type = obj.TYPE_REG
732 p1.To.Reg = out
733 case ssa.OpARM64LoweredAtomicCas64,
734 ssa.OpARM64LoweredAtomicCas32:
735
736
737
738
739
740
741 ld := arm64.ALDAXR
742 st := arm64.ASTLXR
743 cmp := arm64.ACMP
744 if v.Op == ssa.OpARM64LoweredAtomicCas32 {
745 ld = arm64.ALDAXRW
746 st = arm64.ASTLXRW
747 cmp = arm64.ACMPW
748 }
749 r0 := v.Args[0].Reg()
750 r1 := v.Args[1].Reg()
751 r2 := v.Args[2].Reg()
752 out := v.Reg0()
753 p := s.Prog(ld)
754 p.From.Type = obj.TYPE_MEM
755 p.From.Reg = r0
756 p.To.Type = obj.TYPE_REG
757 p.To.Reg = arm64.REGTMP
758 p1 := s.Prog(cmp)
759 p1.From.Type = obj.TYPE_REG
760 p1.From.Reg = r1
761 p1.Reg = arm64.REGTMP
762 p2 := s.Prog(arm64.ABNE)
763 p2.To.Type = obj.TYPE_BRANCH
764 p3 := s.Prog(st)
765 p3.From.Type = obj.TYPE_REG
766 p3.From.Reg = r2
767 p3.To.Type = obj.TYPE_MEM
768 p3.To.Reg = r0
769 p3.RegTo2 = arm64.REGTMP
770 p4 := s.Prog(arm64.ACBNZ)
771 p4.From.Type = obj.TYPE_REG
772 p4.From.Reg = arm64.REGTMP
773 p4.To.Type = obj.TYPE_BRANCH
774 p4.To.SetTarget(p)
775 p5 := s.Prog(arm64.ACSET)
776 p5.From.Type = obj.TYPE_SPECIAL
777 p5.From.Offset = int64(arm64.SPOP_EQ)
778 p5.To.Type = obj.TYPE_REG
779 p5.To.Reg = out
780 p2.To.SetTarget(p5)
781 case ssa.OpARM64LoweredAtomicCas64Variant,
782 ssa.OpARM64LoweredAtomicCas32Variant:
783
784
785
786
787
788
789
790 cas := arm64.ACASALD
791 cmp := arm64.ACMP
792 mov := arm64.AMOVD
793 if v.Op == ssa.OpARM64LoweredAtomicCas32Variant {
794 cas = arm64.ACASALW
795 cmp = arm64.ACMPW
796 mov = arm64.AMOVW
797 }
798 r0 := v.Args[0].Reg()
799 r1 := v.Args[1].Reg()
800 r2 := v.Args[2].Reg()
801 out := v.Reg0()
802
803
804 p := s.Prog(mov)
805 p.From.Type = obj.TYPE_REG
806 p.From.Reg = r1
807 p.To.Type = obj.TYPE_REG
808 p.To.Reg = arm64.REGTMP
809
810
811 p1 := s.Prog(cas)
812 p1.From.Type = obj.TYPE_REG
813 p1.From.Reg = arm64.REGTMP
814 p1.To.Type = obj.TYPE_MEM
815 p1.To.Reg = r0
816 p1.RegTo2 = r2
817
818
819 p2 := s.Prog(cmp)
820 p2.From.Type = obj.TYPE_REG
821 p2.From.Reg = r1
822 p2.Reg = arm64.REGTMP
823
824
825 p3 := s.Prog(arm64.ACSET)
826 p3.From.Type = obj.TYPE_SPECIAL
827 p3.From.Offset = int64(arm64.SPOP_EQ)
828 p3.To.Type = obj.TYPE_REG
829 p3.To.Reg = out
830
831 case ssa.OpARM64LoweredAtomicAnd64,
832 ssa.OpARM64LoweredAtomicOr64,
833 ssa.OpARM64LoweredAtomicAnd32,
834 ssa.OpARM64LoweredAtomicOr32,
835 ssa.OpARM64LoweredAtomicAnd8,
836 ssa.OpARM64LoweredAtomicOr8:
837
838
839
840
841 ld := arm64.ALDAXR
842 st := arm64.ASTLXR
843 if v.Op == ssa.OpARM64LoweredAtomicAnd32 || v.Op == ssa.OpARM64LoweredAtomicOr32 {
844 ld = arm64.ALDAXRW
845 st = arm64.ASTLXRW
846 }
847 if v.Op == ssa.OpARM64LoweredAtomicAnd8 || v.Op == ssa.OpARM64LoweredAtomicOr8 {
848 ld = arm64.ALDAXRB
849 st = arm64.ASTLXRB
850 }
851 r0 := v.Args[0].Reg()
852 r1 := v.Args[1].Reg()
853 out := v.Reg0()
854 tmp := v.RegTmp()
855 p := s.Prog(ld)
856 p.From.Type = obj.TYPE_MEM
857 p.From.Reg = r0
858 p.To.Type = obj.TYPE_REG
859 p.To.Reg = out
860 p1 := s.Prog(v.Op.Asm())
861 p1.From.Type = obj.TYPE_REG
862 p1.From.Reg = r1
863 p1.Reg = out
864 p1.To.Type = obj.TYPE_REG
865 p1.To.Reg = tmp
866 p2 := s.Prog(st)
867 p2.From.Type = obj.TYPE_REG
868 p2.From.Reg = tmp
869 p2.To.Type = obj.TYPE_MEM
870 p2.To.Reg = r0
871 p2.RegTo2 = arm64.REGTMP
872 p3 := s.Prog(arm64.ACBNZ)
873 p3.From.Type = obj.TYPE_REG
874 p3.From.Reg = arm64.REGTMP
875 p3.To.Type = obj.TYPE_BRANCH
876 p3.To.SetTarget(p)
877
878 case ssa.OpARM64LoweredAtomicAnd8Variant,
879 ssa.OpARM64LoweredAtomicAnd32Variant,
880 ssa.OpARM64LoweredAtomicAnd64Variant:
881 atomic_clear := arm64.ALDCLRALD
882 if v.Op == ssa.OpARM64LoweredAtomicAnd32Variant {
883 atomic_clear = arm64.ALDCLRALW
884 }
885 if v.Op == ssa.OpARM64LoweredAtomicAnd8Variant {
886 atomic_clear = arm64.ALDCLRALB
887 }
888 r0 := v.Args[0].Reg()
889 r1 := v.Args[1].Reg()
890 out := v.Reg0()
891
892
893 p := s.Prog(arm64.AMVN)
894 p.From.Type = obj.TYPE_REG
895 p.From.Reg = r1
896 p.To.Type = obj.TYPE_REG
897 p.To.Reg = arm64.REGTMP
898
899
900 p1 := s.Prog(atomic_clear)
901 p1.From.Type = obj.TYPE_REG
902 p1.From.Reg = arm64.REGTMP
903 p1.To.Type = obj.TYPE_MEM
904 p1.To.Reg = r0
905 p1.RegTo2 = out
906
907 case ssa.OpARM64LoweredAtomicOr8Variant,
908 ssa.OpARM64LoweredAtomicOr32Variant,
909 ssa.OpARM64LoweredAtomicOr64Variant:
910 atomic_or := arm64.ALDORALD
911 if v.Op == ssa.OpARM64LoweredAtomicOr32Variant {
912 atomic_or = arm64.ALDORALW
913 }
914 if v.Op == ssa.OpARM64LoweredAtomicOr8Variant {
915 atomic_or = arm64.ALDORALB
916 }
917 r0 := v.Args[0].Reg()
918 r1 := v.Args[1].Reg()
919 out := v.Reg0()
920
921
922 p := s.Prog(atomic_or)
923 p.From.Type = obj.TYPE_REG
924 p.From.Reg = r1
925 p.To.Type = obj.TYPE_MEM
926 p.To.Reg = r0
927 p.RegTo2 = out
928
929 case ssa.OpARM64MOVBreg,
930 ssa.OpARM64MOVBUreg,
931 ssa.OpARM64MOVHreg,
932 ssa.OpARM64MOVHUreg,
933 ssa.OpARM64MOVWreg,
934 ssa.OpARM64MOVWUreg:
935 a := v.Args[0]
936 for a.Op == ssa.OpCopy || a.Op == ssa.OpARM64MOVDreg {
937 a = a.Args[0]
938 }
939 if a.Op == ssa.OpLoadReg {
940 t := a.Type
941 switch {
942 case v.Op == ssa.OpARM64MOVBreg && t.Size() == 1 && t.IsSigned(),
943 v.Op == ssa.OpARM64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
944 v.Op == ssa.OpARM64MOVHreg && t.Size() == 2 && t.IsSigned(),
945 v.Op == ssa.OpARM64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
946 v.Op == ssa.OpARM64MOVWreg && t.Size() == 4 && t.IsSigned(),
947 v.Op == ssa.OpARM64MOVWUreg && t.Size() == 4 && !t.IsSigned():
948
949 if v.Reg() == v.Args[0].Reg() {
950 return
951 }
952 p := s.Prog(arm64.AMOVD)
953 p.From.Type = obj.TYPE_REG
954 p.From.Reg = v.Args[0].Reg()
955 p.To.Type = obj.TYPE_REG
956 p.To.Reg = v.Reg()
957 return
958 default:
959 }
960 }
961 fallthrough
962 case ssa.OpARM64MVN,
963 ssa.OpARM64NEG,
964 ssa.OpARM64FABSD,
965 ssa.OpARM64FMOVDfpgp,
966 ssa.OpARM64FMOVDgpfp,
967 ssa.OpARM64FMOVSfpgp,
968 ssa.OpARM64FMOVSgpfp,
969 ssa.OpARM64FNEGS,
970 ssa.OpARM64FNEGD,
971 ssa.OpARM64FSQRTS,
972 ssa.OpARM64FSQRTD,
973 ssa.OpARM64FCVTZSSW,
974 ssa.OpARM64FCVTZSDW,
975 ssa.OpARM64FCVTZUSW,
976 ssa.OpARM64FCVTZUDW,
977 ssa.OpARM64FCVTZSS,
978 ssa.OpARM64FCVTZSD,
979 ssa.OpARM64FCVTZUS,
980 ssa.OpARM64FCVTZUD,
981 ssa.OpARM64SCVTFWS,
982 ssa.OpARM64SCVTFWD,
983 ssa.OpARM64SCVTFS,
984 ssa.OpARM64SCVTFD,
985 ssa.OpARM64UCVTFWS,
986 ssa.OpARM64UCVTFWD,
987 ssa.OpARM64UCVTFS,
988 ssa.OpARM64UCVTFD,
989 ssa.OpARM64FCVTSD,
990 ssa.OpARM64FCVTDS,
991 ssa.OpARM64REV,
992 ssa.OpARM64REVW,
993 ssa.OpARM64REV16,
994 ssa.OpARM64REV16W,
995 ssa.OpARM64RBIT,
996 ssa.OpARM64RBITW,
997 ssa.OpARM64CLZ,
998 ssa.OpARM64CLZW,
999 ssa.OpARM64FRINTAD,
1000 ssa.OpARM64FRINTMD,
1001 ssa.OpARM64FRINTND,
1002 ssa.OpARM64FRINTPD,
1003 ssa.OpARM64FRINTZD:
1004 p := s.Prog(v.Op.Asm())
1005 p.From.Type = obj.TYPE_REG
1006 p.From.Reg = v.Args[0].Reg()
1007 p.To.Type = obj.TYPE_REG
1008 p.To.Reg = v.Reg()
1009 case ssa.OpARM64LoweredRound32F, ssa.OpARM64LoweredRound64F:
1010
1011 case ssa.OpARM64VCNT:
1012 p := s.Prog(v.Op.Asm())
1013 p.From.Type = obj.TYPE_REG
1014 p.From.Reg = (v.Args[0].Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
1015 p.To.Type = obj.TYPE_REG
1016 p.To.Reg = (v.Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
1017 case ssa.OpARM64VUADDLV:
1018 p := s.Prog(v.Op.Asm())
1019 p.From.Type = obj.TYPE_REG
1020 p.From.Reg = (v.Args[0].Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
1021 p.To.Type = obj.TYPE_REG
1022 p.To.Reg = v.Reg() - arm64.REG_F0 + arm64.REG_V0
1023 case ssa.OpARM64CSEL, ssa.OpARM64CSEL0:
1024 r1 := int16(arm64.REGZERO)
1025 if v.Op != ssa.OpARM64CSEL0 {
1026 r1 = v.Args[1].Reg()
1027 }
1028 p := s.Prog(v.Op.Asm())
1029 p.From.Type = obj.TYPE_SPECIAL
1030 condCode := condBits[ssa.Op(v.AuxInt)]
1031 p.From.Offset = int64(condCode)
1032 p.Reg = v.Args[0].Reg()
1033 p.AddRestSourceReg(r1)
1034 p.To.Type = obj.TYPE_REG
1035 p.To.Reg = v.Reg()
1036 case ssa.OpARM64CSINC, ssa.OpARM64CSINV, ssa.OpARM64CSNEG:
1037 p := s.Prog(v.Op.Asm())
1038 p.From.Type = obj.TYPE_SPECIAL
1039 condCode := condBits[ssa.Op(v.AuxInt)]
1040 p.From.Offset = int64(condCode)
1041 p.Reg = v.Args[0].Reg()
1042 p.AddRestSourceReg(v.Args[1].Reg())
1043 p.To.Type = obj.TYPE_REG
1044 p.To.Reg = v.Reg()
1045 case ssa.OpARM64CSETM:
1046 p := s.Prog(arm64.ACSETM)
1047 p.From.Type = obj.TYPE_SPECIAL
1048 condCode := condBits[ssa.Op(v.AuxInt)]
1049 p.From.Offset = int64(condCode)
1050 p.To.Type = obj.TYPE_REG
1051 p.To.Reg = v.Reg()
1052 case ssa.OpARM64DUFFZERO:
1053
1054 p := s.Prog(obj.ADUFFZERO)
1055 p.To.Type = obj.TYPE_MEM
1056 p.To.Name = obj.NAME_EXTERN
1057 p.To.Sym = ir.Syms.Duffzero
1058 p.To.Offset = v.AuxInt
1059 case ssa.OpARM64LoweredZero:
1060
1061
1062
1063
1064 p := s.Prog(arm64.ASTP)
1065 p.Scond = arm64.C_XPOST
1066 p.From.Type = obj.TYPE_REGREG
1067 p.From.Reg = arm64.REGZERO
1068 p.From.Offset = int64(arm64.REGZERO)
1069 p.To.Type = obj.TYPE_MEM
1070 p.To.Reg = arm64.REG_R16
1071 p.To.Offset = 16
1072 p2 := s.Prog(arm64.ACMP)
1073 p2.From.Type = obj.TYPE_REG
1074 p2.From.Reg = v.Args[1].Reg()
1075 p2.Reg = arm64.REG_R16
1076 p3 := s.Prog(arm64.ABLE)
1077 p3.To.Type = obj.TYPE_BRANCH
1078 p3.To.SetTarget(p)
1079 case ssa.OpARM64DUFFCOPY:
1080 p := s.Prog(obj.ADUFFCOPY)
1081 p.To.Type = obj.TYPE_MEM
1082 p.To.Name = obj.NAME_EXTERN
1083 p.To.Sym = ir.Syms.Duffcopy
1084 p.To.Offset = v.AuxInt
1085 case ssa.OpARM64LoweredMove:
1086
1087
1088
1089
1090
1091 p := s.Prog(arm64.ALDP)
1092 p.Scond = arm64.C_XPOST
1093 p.From.Type = obj.TYPE_MEM
1094 p.From.Reg = arm64.REG_R16
1095 p.From.Offset = 16
1096 p.To.Type = obj.TYPE_REGREG
1097 p.To.Reg = arm64.REG_R25
1098 p.To.Offset = int64(arm64.REGTMP)
1099 p2 := s.Prog(arm64.ASTP)
1100 p2.Scond = arm64.C_XPOST
1101 p2.From.Type = obj.TYPE_REGREG
1102 p2.From.Reg = arm64.REG_R25
1103 p2.From.Offset = int64(arm64.REGTMP)
1104 p2.To.Type = obj.TYPE_MEM
1105 p2.To.Reg = arm64.REG_R17
1106 p2.To.Offset = 16
1107 p3 := s.Prog(arm64.ACMP)
1108 p3.From.Type = obj.TYPE_REG
1109 p3.From.Reg = v.Args[2].Reg()
1110 p3.Reg = arm64.REG_R16
1111 p4 := s.Prog(arm64.ABLE)
1112 p4.To.Type = obj.TYPE_BRANCH
1113 p4.To.SetTarget(p)
1114 case ssa.OpARM64CALLstatic, ssa.OpARM64CALLclosure, ssa.OpARM64CALLinter:
1115 s.Call(v)
1116 case ssa.OpARM64CALLtail:
1117 s.TailCall(v)
1118 case ssa.OpARM64LoweredWB:
1119 p := s.Prog(obj.ACALL)
1120 p.To.Type = obj.TYPE_MEM
1121 p.To.Name = obj.NAME_EXTERN
1122
1123 p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
1124
1125 case ssa.OpARM64LoweredPanicBoundsA, ssa.OpARM64LoweredPanicBoundsB, ssa.OpARM64LoweredPanicBoundsC:
1126 p := s.Prog(obj.ACALL)
1127 p.To.Type = obj.TYPE_MEM
1128 p.To.Name = obj.NAME_EXTERN
1129 p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
1130 s.UseArgs(16)
1131 case ssa.OpARM64LoweredNilCheck:
1132
1133 p := s.Prog(arm64.AMOVB)
1134 p.From.Type = obj.TYPE_MEM
1135 p.From.Reg = v.Args[0].Reg()
1136 ssagen.AddAux(&p.From, v)
1137 p.To.Type = obj.TYPE_REG
1138 p.To.Reg = arm64.REGTMP
1139 if logopt.Enabled() {
1140 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
1141 }
1142 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
1143 base.WarnfAt(v.Pos, "generated nil check")
1144 }
1145 case ssa.OpARM64Equal,
1146 ssa.OpARM64NotEqual,
1147 ssa.OpARM64LessThan,
1148 ssa.OpARM64LessEqual,
1149 ssa.OpARM64GreaterThan,
1150 ssa.OpARM64GreaterEqual,
1151 ssa.OpARM64LessThanU,
1152 ssa.OpARM64LessEqualU,
1153 ssa.OpARM64GreaterThanU,
1154 ssa.OpARM64GreaterEqualU,
1155 ssa.OpARM64LessThanF,
1156 ssa.OpARM64LessEqualF,
1157 ssa.OpARM64GreaterThanF,
1158 ssa.OpARM64GreaterEqualF,
1159 ssa.OpARM64NotLessThanF,
1160 ssa.OpARM64NotLessEqualF,
1161 ssa.OpARM64NotGreaterThanF,
1162 ssa.OpARM64NotGreaterEqualF,
1163 ssa.OpARM64LessThanNoov,
1164 ssa.OpARM64GreaterEqualNoov:
1165
1166 p := s.Prog(arm64.ACSET)
1167 p.From.Type = obj.TYPE_SPECIAL
1168 condCode := condBits[v.Op]
1169 p.From.Offset = int64(condCode)
1170 p.To.Type = obj.TYPE_REG
1171 p.To.Reg = v.Reg()
1172 case ssa.OpARM64PRFM:
1173 p := s.Prog(v.Op.Asm())
1174 p.From.Type = obj.TYPE_MEM
1175 p.From.Reg = v.Args[0].Reg()
1176 p.To.Type = obj.TYPE_CONST
1177 p.To.Offset = v.AuxInt
1178 case ssa.OpARM64LoweredGetClosurePtr:
1179
1180 ssagen.CheckLoweredGetClosurePtr(v)
1181 case ssa.OpARM64LoweredGetCallerSP:
1182
1183 p := s.Prog(arm64.AMOVD)
1184 p.From.Type = obj.TYPE_ADDR
1185 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
1186 p.From.Name = obj.NAME_PARAM
1187 p.To.Type = obj.TYPE_REG
1188 p.To.Reg = v.Reg()
1189 case ssa.OpARM64LoweredGetCallerPC:
1190 p := s.Prog(obj.AGETCALLERPC)
1191 p.To.Type = obj.TYPE_REG
1192 p.To.Reg = v.Reg()
1193 case ssa.OpARM64DMB:
1194 p := s.Prog(v.Op.Asm())
1195 p.From.Type = obj.TYPE_CONST
1196 p.From.Offset = v.AuxInt
1197 case ssa.OpARM64FlagConstant:
1198 v.Fatalf("FlagConstant op should never make it to codegen %v", v.LongString())
1199 case ssa.OpARM64InvertFlags:
1200 v.Fatalf("InvertFlags should never make it to codegen %v", v.LongString())
1201 case ssa.OpClobber:
1202
1203
1204
1205 p := s.Prog(arm64.AMOVW)
1206 p.From.Type = obj.TYPE_CONST
1207 p.From.Offset = 0xdeaddead
1208 p.To.Type = obj.TYPE_REG
1209 p.To.Reg = arm64.REGTMP
1210 p = s.Prog(arm64.AMOVW)
1211 p.From.Type = obj.TYPE_REG
1212 p.From.Reg = arm64.REGTMP
1213 p.To.Type = obj.TYPE_MEM
1214 p.To.Reg = arm64.REGSP
1215 ssagen.AddAux(&p.To, v)
1216 p = s.Prog(arm64.AMOVW)
1217 p.From.Type = obj.TYPE_REG
1218 p.From.Reg = arm64.REGTMP
1219 p.To.Type = obj.TYPE_MEM
1220 p.To.Reg = arm64.REGSP
1221 ssagen.AddAux2(&p.To, v, v.AuxInt+4)
1222 case ssa.OpClobberReg:
1223 x := uint64(0xdeaddeaddeaddead)
1224 p := s.Prog(arm64.AMOVD)
1225 p.From.Type = obj.TYPE_CONST
1226 p.From.Offset = int64(x)
1227 p.To.Type = obj.TYPE_REG
1228 p.To.Reg = v.Reg()
1229 default:
1230 v.Fatalf("genValue not implemented: %s", v.LongString())
1231 }
1232 }
1233
1234 var condBits = map[ssa.Op]arm64.SpecialOperand{
1235 ssa.OpARM64Equal: arm64.SPOP_EQ,
1236 ssa.OpARM64NotEqual: arm64.SPOP_NE,
1237 ssa.OpARM64LessThan: arm64.SPOP_LT,
1238 ssa.OpARM64LessThanU: arm64.SPOP_LO,
1239 ssa.OpARM64LessEqual: arm64.SPOP_LE,
1240 ssa.OpARM64LessEqualU: arm64.SPOP_LS,
1241 ssa.OpARM64GreaterThan: arm64.SPOP_GT,
1242 ssa.OpARM64GreaterThanU: arm64.SPOP_HI,
1243 ssa.OpARM64GreaterEqual: arm64.SPOP_GE,
1244 ssa.OpARM64GreaterEqualU: arm64.SPOP_HS,
1245 ssa.OpARM64LessThanF: arm64.SPOP_MI,
1246 ssa.OpARM64LessEqualF: arm64.SPOP_LS,
1247 ssa.OpARM64GreaterThanF: arm64.SPOP_GT,
1248 ssa.OpARM64GreaterEqualF: arm64.SPOP_GE,
1249
1250
1251 ssa.OpARM64NotLessThanF: arm64.SPOP_PL,
1252 ssa.OpARM64NotLessEqualF: arm64.SPOP_HI,
1253 ssa.OpARM64NotGreaterThanF: arm64.SPOP_LE,
1254 ssa.OpARM64NotGreaterEqualF: arm64.SPOP_LT,
1255
1256 ssa.OpARM64LessThanNoov: arm64.SPOP_MI,
1257 ssa.OpARM64GreaterEqualNoov: arm64.SPOP_PL,
1258 }
1259
1260 var blockJump = map[ssa.BlockKind]struct {
1261 asm, invasm obj.As
1262 }{
1263 ssa.BlockARM64EQ: {arm64.ABEQ, arm64.ABNE},
1264 ssa.BlockARM64NE: {arm64.ABNE, arm64.ABEQ},
1265 ssa.BlockARM64LT: {arm64.ABLT, arm64.ABGE},
1266 ssa.BlockARM64GE: {arm64.ABGE, arm64.ABLT},
1267 ssa.BlockARM64LE: {arm64.ABLE, arm64.ABGT},
1268 ssa.BlockARM64GT: {arm64.ABGT, arm64.ABLE},
1269 ssa.BlockARM64ULT: {arm64.ABLO, arm64.ABHS},
1270 ssa.BlockARM64UGE: {arm64.ABHS, arm64.ABLO},
1271 ssa.BlockARM64UGT: {arm64.ABHI, arm64.ABLS},
1272 ssa.BlockARM64ULE: {arm64.ABLS, arm64.ABHI},
1273 ssa.BlockARM64Z: {arm64.ACBZ, arm64.ACBNZ},
1274 ssa.BlockARM64NZ: {arm64.ACBNZ, arm64.ACBZ},
1275 ssa.BlockARM64ZW: {arm64.ACBZW, arm64.ACBNZW},
1276 ssa.BlockARM64NZW: {arm64.ACBNZW, arm64.ACBZW},
1277 ssa.BlockARM64TBZ: {arm64.ATBZ, arm64.ATBNZ},
1278 ssa.BlockARM64TBNZ: {arm64.ATBNZ, arm64.ATBZ},
1279 ssa.BlockARM64FLT: {arm64.ABMI, arm64.ABPL},
1280 ssa.BlockARM64FGE: {arm64.ABGE, arm64.ABLT},
1281 ssa.BlockARM64FLE: {arm64.ABLS, arm64.ABHI},
1282 ssa.BlockARM64FGT: {arm64.ABGT, arm64.ABLE},
1283 ssa.BlockARM64LTnoov: {arm64.ABMI, arm64.ABPL},
1284 ssa.BlockARM64GEnoov: {arm64.ABPL, arm64.ABMI},
1285 }
1286
1287
1288 var leJumps = [2][2]ssagen.IndexJump{
1289 {{Jump: arm64.ABEQ, Index: 0}, {Jump: arm64.ABPL, Index: 1}},
1290 {{Jump: arm64.ABMI, Index: 0}, {Jump: arm64.ABEQ, Index: 0}},
1291 }
1292
1293
1294 var gtJumps = [2][2]ssagen.IndexJump{
1295 {{Jump: arm64.ABMI, Index: 1}, {Jump: arm64.ABEQ, Index: 1}},
1296 {{Jump: arm64.ABEQ, Index: 1}, {Jump: arm64.ABPL, Index: 0}},
1297 }
1298
1299 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
1300 switch b.Kind {
1301 case ssa.BlockPlain, ssa.BlockDefer:
1302 if b.Succs[0].Block() != next {
1303 p := s.Prog(obj.AJMP)
1304 p.To.Type = obj.TYPE_BRANCH
1305 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
1306 }
1307
1308 case ssa.BlockExit, ssa.BlockRetJmp:
1309
1310 case ssa.BlockRet:
1311 s.Prog(obj.ARET)
1312
1313 case ssa.BlockARM64EQ, ssa.BlockARM64NE,
1314 ssa.BlockARM64LT, ssa.BlockARM64GE,
1315 ssa.BlockARM64LE, ssa.BlockARM64GT,
1316 ssa.BlockARM64ULT, ssa.BlockARM64UGT,
1317 ssa.BlockARM64ULE, ssa.BlockARM64UGE,
1318 ssa.BlockARM64Z, ssa.BlockARM64NZ,
1319 ssa.BlockARM64ZW, ssa.BlockARM64NZW,
1320 ssa.BlockARM64FLT, ssa.BlockARM64FGE,
1321 ssa.BlockARM64FLE, ssa.BlockARM64FGT,
1322 ssa.BlockARM64LTnoov, ssa.BlockARM64GEnoov:
1323 jmp := blockJump[b.Kind]
1324 var p *obj.Prog
1325 switch next {
1326 case b.Succs[0].Block():
1327 p = s.Br(jmp.invasm, b.Succs[1].Block())
1328 case b.Succs[1].Block():
1329 p = s.Br(jmp.asm, b.Succs[0].Block())
1330 default:
1331 if b.Likely != ssa.BranchUnlikely {
1332 p = s.Br(jmp.asm, b.Succs[0].Block())
1333 s.Br(obj.AJMP, b.Succs[1].Block())
1334 } else {
1335 p = s.Br(jmp.invasm, b.Succs[1].Block())
1336 s.Br(obj.AJMP, b.Succs[0].Block())
1337 }
1338 }
1339 if !b.Controls[0].Type.IsFlags() {
1340 p.From.Type = obj.TYPE_REG
1341 p.From.Reg = b.Controls[0].Reg()
1342 }
1343 case ssa.BlockARM64TBZ, ssa.BlockARM64TBNZ:
1344 jmp := blockJump[b.Kind]
1345 var p *obj.Prog
1346 switch next {
1347 case b.Succs[0].Block():
1348 p = s.Br(jmp.invasm, b.Succs[1].Block())
1349 case b.Succs[1].Block():
1350 p = s.Br(jmp.asm, b.Succs[0].Block())
1351 default:
1352 if b.Likely != ssa.BranchUnlikely {
1353 p = s.Br(jmp.asm, b.Succs[0].Block())
1354 s.Br(obj.AJMP, b.Succs[1].Block())
1355 } else {
1356 p = s.Br(jmp.invasm, b.Succs[1].Block())
1357 s.Br(obj.AJMP, b.Succs[0].Block())
1358 }
1359 }
1360 p.From.Offset = b.AuxInt
1361 p.From.Type = obj.TYPE_CONST
1362 p.Reg = b.Controls[0].Reg()
1363
1364 case ssa.BlockARM64LEnoov:
1365 s.CombJump(b, next, &leJumps)
1366 case ssa.BlockARM64GTnoov:
1367 s.CombJump(b, next, >Jumps)
1368
1369 case ssa.BlockARM64JUMPTABLE:
1370
1371
1372 p := s.Prog(arm64.AMOVD)
1373 p.From = genIndexedOperand(ssa.OpARM64MOVDloadidx8, b.Controls[1].Reg(), b.Controls[0].Reg())
1374 p.To.Type = obj.TYPE_REG
1375 p.To.Reg = arm64.REGTMP
1376 p = s.Prog(obj.AJMP)
1377 p.To.Type = obj.TYPE_MEM
1378 p.To.Reg = arm64.REGTMP
1379
1380 s.JumpTables = append(s.JumpTables, b)
1381
1382 default:
1383 b.Fatalf("branch not implemented: %s", b.LongString())
1384 }
1385 }
1386
1387 func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1388 p := s.Prog(loadByType(t))
1389 p.From.Type = obj.TYPE_MEM
1390 p.From.Name = obj.NAME_AUTO
1391 p.From.Sym = n.Linksym()
1392 p.From.Offset = n.FrameOffset() + off
1393 p.To.Type = obj.TYPE_REG
1394 p.To.Reg = reg
1395 return p
1396 }
1397
1398 func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1399 p = pp.Append(p, storeByType(t), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
1400 p.To.Name = obj.NAME_PARAM
1401 p.To.Sym = n.Linksym()
1402 p.Pos = p.Pos.WithNotStmt()
1403 return p
1404 }
1405
View as plain text