1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmp"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "internal/goexperiment"
16 "internal/runtime/gc"
17 "os"
18 "path/filepath"
19 "slices"
20 "strings"
21
22 "cmd/compile/internal/abi"
23 "cmd/compile/internal/base"
24 "cmd/compile/internal/ir"
25 "cmd/compile/internal/liveness"
26 "cmd/compile/internal/objw"
27 "cmd/compile/internal/reflectdata"
28 "cmd/compile/internal/rttype"
29 "cmd/compile/internal/ssa"
30 "cmd/compile/internal/staticdata"
31 "cmd/compile/internal/typecheck"
32 "cmd/compile/internal/types"
33 "cmd/internal/obj"
34 "cmd/internal/objabi"
35 "cmd/internal/src"
36 "cmd/internal/sys"
37
38 rtabi "internal/abi"
39 )
40
41 var ssaConfig *ssa.Config
42 var ssaCaches []ssa.Cache
43
44 var ssaDump string
45 var ssaDir string
46 var ssaDumpStdout bool
47 var ssaDumpCFG string
48 const ssaDumpFile = "ssa.html"
49
50
51 var ssaDumpInlined []*ir.Func
52
53
54
55
56 const maxAggregatedHeapAllocation = 16
57
58 func DumpInline(fn *ir.Func) {
59 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
60 ssaDumpInlined = append(ssaDumpInlined, fn)
61 }
62 }
63
64 func InitEnv() {
65 ssaDump = os.Getenv("GOSSAFUNC")
66 ssaDir = os.Getenv("GOSSADIR")
67 if ssaDump != "" {
68 if strings.HasSuffix(ssaDump, "+") {
69 ssaDump = ssaDump[:len(ssaDump)-1]
70 ssaDumpStdout = true
71 }
72 spl := strings.Split(ssaDump, ":")
73 if len(spl) > 1 {
74 ssaDump = spl[0]
75 ssaDumpCFG = spl[1]
76 }
77 }
78 }
79
80 func InitConfig() {
81 types_ := ssa.NewTypes()
82
83 if Arch.SoftFloat {
84 softfloatInit()
85 }
86
87
88
89 _ = types.NewPtr(types.Types[types.TINTER])
90 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
91 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
92 _ = types.NewPtr(types.NewPtr(types.ByteType))
93 _ = types.NewPtr(types.NewSlice(types.ByteType))
94 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
95 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
96 _ = types.NewPtr(types.Types[types.TINT16])
97 _ = types.NewPtr(types.Types[types.TINT64])
98 _ = types.NewPtr(types.ErrorType)
99 _ = types.NewPtr(reflectdata.MapType())
100 _ = types.NewPtr(deferstruct())
101 types.NewPtrCacheEnabled = false
102 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
103 ssaConfig.Race = base.Flag.Race
104 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
105
106
107 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
108 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
109 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
110 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
111 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
112 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
113 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
114 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
115 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
116 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
117 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
118 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
119 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
120 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
121 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
122 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
123 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
124 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
125 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
126 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
127 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
128 ir.Syms.GrowsliceBuf = typecheck.LookupRuntimeFunc("growsliceBuf")
129 ir.Syms.GrowsliceBufNoAlias = typecheck.LookupRuntimeFunc("growsliceBufNoAlias")
130 ir.Syms.GrowsliceNoAlias = typecheck.LookupRuntimeFunc("growsliceNoAlias")
131 ir.Syms.MoveSlice = typecheck.LookupRuntimeFunc("moveSlice")
132 ir.Syms.MoveSliceNoScan = typecheck.LookupRuntimeFunc("moveSliceNoScan")
133 ir.Syms.MoveSliceNoCap = typecheck.LookupRuntimeFunc("moveSliceNoCap")
134 ir.Syms.MoveSliceNoCapNoScan = typecheck.LookupRuntimeFunc("moveSliceNoCapNoScan")
135 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
136 for i := 1; i < len(ir.Syms.MallocGCSmallNoScan); i++ {
137 ir.Syms.MallocGCSmallNoScan[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallNoScanSC%d", i))
138 }
139 for i := 1; i < len(ir.Syms.MallocGCSmallScanNoHeader); i++ {
140 ir.Syms.MallocGCSmallScanNoHeader[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallScanNoHeaderSC%d", i))
141 }
142 for i := 1; i < len(ir.Syms.MallocGCTiny); i++ {
143 ir.Syms.MallocGCTiny[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcTinySize%d", i))
144 }
145 ir.Syms.MallocGC = typecheck.LookupRuntimeFunc("mallocgc")
146 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
147 ir.Syms.Memequal = typecheck.LookupRuntimeFunc("memequal")
148 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
149 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
150 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
151 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
152 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
153 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
154 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
155 ir.Syms.PanicBounds = typecheck.LookupRuntimeFunc("panicBounds")
156 ir.Syms.PanicExtend = typecheck.LookupRuntimeFunc("panicExtend")
157 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
158 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
159 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
160 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
161 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
162 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
163 ir.Syms.PanicSimdImm = typecheck.LookupRuntimeFunc("panicSimdImm")
164 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
165 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
166 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
167 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
168 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
169 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
170 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
171 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
172 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
173 ir.Syms.X86HasAVX = typecheck.LookupRuntimeVar("x86HasAVX")
174 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
175 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
176 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
177 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
178 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
179 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
180 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
181 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
182 ir.Syms.RISCV64HasZbb = typecheck.LookupRuntimeVar("riscv64HasZbb")
183 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
184 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
185 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
186 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
187 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
188 ir.Syms.ZeroVal = typecheck.LookupRuntimeVar("zeroVal")
189
190 if Arch.LinkArch.Family == sys.Wasm {
191 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
192 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
193 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
194 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
195 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
196 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
197 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
198 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
199 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
200 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
201 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
202 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
203 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
204 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
205 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
206 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
207 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
208 }
209
210
211 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
212 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
213 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
214 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
215 }
216
217 func InitTables() {
218 initIntrinsics(nil)
219 }
220
221
222
223
224
225
226
227
228 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
229 return ssaConfig.ABI0.Copy()
230 }
231
232
233
234 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
235 if buildcfg.Experiment.RegabiArgs {
236
237 if fn == nil {
238 return abi1
239 }
240 switch fn.ABI {
241 case obj.ABI0:
242 return abi0
243 case obj.ABIInternal:
244
245
246 return abi1
247 }
248 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
249 panic("not reachable")
250 }
251
252 a := abi0
253 if fn != nil {
254 if fn.Pragma&ir.RegisterParams != 0 {
255 a = abi1
256 }
257 }
258 return a
259 }
260
261
262
263
264
265
266
267
268
269
270
271
272 func (s *state) emitOpenDeferInfo() {
273 firstOffset := s.openDefers[0].closureNode.FrameOffset()
274
275
276 for i, r := range s.openDefers {
277 have := r.closureNode.FrameOffset()
278 want := firstOffset + int64(i)*int64(types.PtrSize)
279 if have != want {
280 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
281 }
282 }
283
284 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
285 x.Set(obj.AttrContentAddressable, true)
286 x.Align = 1
287 s.curfn.LSym.Func().OpenCodedDeferInfo = x
288
289 off := 0
290 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
291 off = objw.Uvarint(x, off, uint64(-firstOffset))
292 }
293
294
295
296 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
297 name := ir.FuncName(fn)
298
299 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
300
301 printssa := false
302
303
304 if strings.Contains(ssaDump, name) {
305 nameOptABI := name
306 if l := len(ssaDump); l > 1 && ssaDump[l-2] == ',' {
307 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
308 } else if strings.HasSuffix(ssaDump, ">") {
309 l := len(ssaDump)
310 if l >= 3 && ssaDump[l-3] == '<' {
311 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
312 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
313 }
314 }
315 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
316 printssa = nameOptABI == ssaDump ||
317 pkgDotName == ssaDump ||
318 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
319 }
320
321 var astBuf *bytes.Buffer
322 if printssa {
323 astBuf = &bytes.Buffer{}
324 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
325 if ssaDumpStdout {
326 fmt.Println("generating SSA for", name)
327 fmt.Print(astBuf.String())
328 }
329 }
330
331 var s state
332 s.pushLine(fn.Pos())
333 defer s.popLine()
334
335 s.hasdefer = fn.HasDefer()
336 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
337 s.cgoUnsafeArgs = true
338 }
339 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
340
341 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
342 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
343 s.instrumentMemory = true
344 }
345 if base.Flag.Race {
346 s.instrumentEnterExit = true
347 }
348 }
349
350 fe := ssafn{
351 curfn: fn,
352 log: printssa && ssaDumpStdout,
353 }
354 s.curfn = fn
355
356 cache := &ssaCaches[worker]
357 cache.Reset()
358
359 s.f = ssaConfig.NewFunc(&fe, cache)
360 s.config = ssaConfig
361 s.f.Type = fn.Type()
362 s.f.Name = name
363 s.f.PrintOrHtmlSSA = printssa
364 if fn.Pragma&ir.Nosplit != 0 {
365 s.f.NoSplit = true
366 }
367 s.f.ABI0 = ssaConfig.ABI0
368 s.f.ABI1 = ssaConfig.ABI1
369 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
370 s.f.ABISelf = abiSelf
371
372 s.panics = map[funcLine]*ssa.Block{}
373 s.softFloat = s.config.SoftFloat
374
375
376 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
377 s.f.Entry.Pos = fn.Pos()
378 s.f.IsPgoHot = isPgoHot
379
380 if printssa {
381 ssaDF := ssaDumpFile
382 if ssaDir != "" {
383 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
384 ssaD := filepath.Dir(ssaDF)
385 os.MkdirAll(ssaD, 0755)
386 }
387 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
388
389 dumpSourcesColumn(s.f.HTMLWriter, fn)
390 s.f.HTMLWriter.WriteAST("AST", astBuf)
391 }
392
393
394 s.labels = map[string]*ssaLabel{}
395 s.fwdVars = map[ir.Node]*ssa.Value{}
396 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
397
398 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
399 switch {
400 case base.Debug.NoOpenDefer != 0:
401 s.hasOpenDefers = false
402 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
403
404
405
406
407
408 s.hasOpenDefers = false
409 }
410 if s.hasOpenDefers && s.instrumentEnterExit {
411
412
413
414 s.hasOpenDefers = false
415 }
416 if s.hasOpenDefers {
417
418
419 for _, f := range s.curfn.Type().Results() {
420 if !f.Nname.(*ir.Name).OnStack() {
421 s.hasOpenDefers = false
422 break
423 }
424 }
425 }
426 if s.hasOpenDefers &&
427 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
428
429
430
431
432
433 s.hasOpenDefers = false
434 }
435
436 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
437 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
438
439 s.startBlock(s.f.Entry)
440 s.vars[memVar] = s.startmem
441 if s.hasOpenDefers {
442
443
444
445 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
446 deferBitsTemp.SetAddrtaken(true)
447 s.deferBitsTemp = deferBitsTemp
448
449 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
450 s.vars[deferBitsVar] = startDeferBits
451 s.deferBitsAddr = s.addr(deferBitsTemp)
452 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
453
454
455
456
457
458 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
459 }
460
461 var params *abi.ABIParamResultInfo
462 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
463
464
465
466
467
468
469 var debugInfo ssa.FuncDebug
470 for _, n := range fn.Dcl {
471 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
472 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
473 }
474 }
475 fn.DebugInfo = &debugInfo
476
477
478 s.decladdrs = map[*ir.Name]*ssa.Value{}
479 for _, n := range fn.Dcl {
480 switch n.Class {
481 case ir.PPARAM:
482
483 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
484 case ir.PPARAMOUT:
485 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
486 case ir.PAUTO:
487
488
489 default:
490 s.Fatalf("local variable with class %v unimplemented", n.Class)
491 }
492 }
493
494 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
495
496
497 for _, n := range fn.Dcl {
498 if n.Class == ir.PPARAM {
499 if s.canSSA(n) {
500 v := s.newValue0A(ssa.OpArg, n.Type(), n)
501 s.vars[n] = v
502 s.addNamedValue(n, v)
503 } else {
504 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
505 if len(paramAssignment.Registers) > 0 {
506 if ssa.CanSSA(n.Type()) {
507 v := s.newValue0A(ssa.OpArg, n.Type(), n)
508 s.store(n.Type(), s.decladdrs[n], v)
509 } else {
510
511
512 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
513 }
514 }
515 }
516 }
517 }
518
519
520 if fn.Needctxt() {
521 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
522 if fn.RangeParent != nil && base.Flag.N != 0 {
523
524
525
526 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
527 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
528 cloSlot.SetUsed(true)
529 cloSlot.SetEsc(ir.EscNever)
530 cloSlot.SetAddrtaken(true)
531 s.f.CloSlot = cloSlot
532 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
533 addr := s.addr(cloSlot)
534 s.store(s.f.Config.Types.BytePtr, addr, clo)
535
536 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
537 }
538 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
539 for {
540 n, typ, offset := csiter.Next()
541 if n == nil {
542 break
543 }
544
545 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
546
547
548
549
550
551
552
553
554
555 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
556 n.Class = ir.PAUTO
557 fn.Dcl = append(fn.Dcl, n)
558 s.assign(n, s.load(n.Type(), ptr), false, 0)
559 continue
560 }
561
562 if !n.Byval() {
563 ptr = s.load(typ, ptr)
564 }
565 s.setHeapaddr(fn.Pos(), n, ptr)
566 }
567 }
568
569
570 if s.instrumentEnterExit {
571 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
572 }
573 s.zeroResults()
574 s.paramsToHeap()
575 s.stmtList(fn.Body)
576
577
578 if s.curBlock != nil {
579 s.pushLine(fn.Endlineno)
580 s.exit()
581 s.popLine()
582 }
583
584 for _, b := range s.f.Blocks {
585 if b.Pos != src.NoXPos {
586 s.updateUnsetPredPos(b)
587 }
588 }
589
590 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
591
592 s.insertPhis()
593
594
595 ssa.Compile(s.f)
596
597 fe.AllocFrame(s.f)
598
599 if len(s.openDefers) != 0 {
600 s.emitOpenDeferInfo()
601 }
602
603
604
605
606
607
608 for _, p := range params.InParams() {
609 typs, offs := p.RegisterTypesAndOffsets()
610 if len(offs) < len(typs) {
611 s.Fatalf("len(offs)=%d < len(typs)=%d, params=\n%s", len(offs), len(typs), params)
612 }
613 for i, t := range typs {
614 o := offs[i]
615 fo := p.FrameOffset(params)
616 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
617 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
618 }
619 }
620
621 return s.f
622 }
623
624 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
625 typs, offs := paramAssignment.RegisterTypesAndOffsets()
626 for i, t := range typs {
627 if pointersOnly && !t.IsPtrShaped() {
628 continue
629 }
630 r := paramAssignment.Registers[i]
631 o := offs[i]
632 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
633 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
634 v := s.newValue0I(op, t, reg)
635 v.Aux = aux
636 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
637 s.store(t, p, v)
638 }
639 }
640
641
642
643
644
645
646
647 func (s *state) zeroResults() {
648 for _, f := range s.curfn.Type().Results() {
649 n := f.Nname.(*ir.Name)
650 if !n.OnStack() {
651
652
653
654 continue
655 }
656
657 if typ := n.Type(); ssa.CanSSA(typ) {
658 s.assign(n, s.zeroVal(typ), false, 0)
659 } else {
660 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
661 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
662 }
663 s.zero(n.Type(), s.decladdrs[n])
664 }
665 }
666 }
667
668
669
670 func (s *state) paramsToHeap() {
671 do := func(params []*types.Field) {
672 for _, f := range params {
673 if f.Nname == nil {
674 continue
675 }
676 n := f.Nname.(*ir.Name)
677 if ir.IsBlank(n) || n.OnStack() {
678 continue
679 }
680 s.newHeapaddr(n)
681 if n.Class == ir.PPARAM {
682 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
683 }
684 }
685 }
686
687 typ := s.curfn.Type()
688 do(typ.Recvs())
689 do(typ.Params())
690 do(typ.Results())
691 }
692
693
694
695
696 func allocSizeAndAlign(t *types.Type) (int64, int64) {
697 size, align := t.Size(), t.Alignment()
698 if types.PtrSize == 4 && align == 4 && size >= 8 {
699
700 size = types.RoundUp(size, 8)
701 align = 8
702 }
703 return size, align
704 }
705 func allocSize(t *types.Type) int64 {
706 size, _ := allocSizeAndAlign(t)
707 return size
708 }
709 func allocAlign(t *types.Type) int64 {
710 _, align := allocSizeAndAlign(t)
711 return align
712 }
713
714
715 func (s *state) newHeapaddr(n *ir.Name) {
716 size := allocSize(n.Type())
717 if n.Type().HasPointers() || size >= maxAggregatedHeapAllocation || size == 0 {
718 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type()))
719 return
720 }
721
722
723
724 var used int64
725 for _, v := range s.pendingHeapAllocations {
726 used += allocSize(v.Type.Elem())
727 }
728 if used+size > maxAggregatedHeapAllocation {
729 s.flushPendingHeapAllocations()
730 }
731
732 var allocCall *ssa.Value
733 if len(s.pendingHeapAllocations) == 0 {
734
735
736
737 allocCall = s.newObjectNonSpecialized(n.Type(), nil)
738 } else {
739 allocCall = s.pendingHeapAllocations[0].Args[0]
740 }
741
742 v := s.newValue1I(ssa.OpOffPtr, n.Type().PtrTo(), 0, allocCall)
743
744
745 s.pendingHeapAllocations = append(s.pendingHeapAllocations, v)
746
747
748 s.setHeapaddr(n.Pos(), n, v)
749 }
750
751 func (s *state) flushPendingHeapAllocations() {
752 pending := s.pendingHeapAllocations
753 if len(pending) == 0 {
754 return
755 }
756 s.pendingHeapAllocations = nil
757 ptr := pending[0].Args[0]
758 call := ptr.Args[0]
759
760 if len(pending) == 1 {
761
762 v := pending[0]
763 v.Op = ssa.OpCopy
764 return
765 }
766
767
768
769
770 slices.SortStableFunc(pending, func(x, y *ssa.Value) int {
771 return cmp.Compare(allocAlign(y.Type.Elem()), allocAlign(x.Type.Elem()))
772 })
773
774
775 var size int64
776 for _, v := range pending {
777 v.AuxInt = size
778 size += allocSize(v.Type.Elem())
779 }
780 align := allocAlign(pending[0].Type.Elem())
781 size = types.RoundUp(size, align)
782
783
784 args := []*ssa.Value{
785 s.constInt(types.Types[types.TUINTPTR], size),
786 s.constNil(call.Args[0].Type),
787 s.constBool(true),
788 call.Args[1],
789 }
790 mallocSym := ir.Syms.MallocGC
791 if specialMallocSym := s.specializedMallocSym(size, false); specialMallocSym != nil {
792 mallocSym = specialMallocSym
793 }
794 call.Aux = ssa.StaticAuxCall(mallocSym, s.f.ABIDefault.ABIAnalyzeTypes(
795 []*types.Type{args[0].Type, args[1].Type, args[2].Type},
796 []*types.Type{types.Types[types.TUNSAFEPTR]},
797 ))
798 call.AuxInt = 4 * s.config.PtrSize
799 call.SetArgs4(args[0], args[1], args[2], args[3])
800
801
802 call.Type = types.NewTuple(types.Types[types.TUNSAFEPTR], types.TypeMem)
803 ptr.Type = types.Types[types.TUNSAFEPTR]
804 }
805
806 func (s *state) specializedMallocSym(size int64, hasPointers bool) *obj.LSym {
807 if !s.sizeSpecializedMallocEnabled() {
808 return nil
809 }
810 ptrSize := s.config.PtrSize
811 ptrBits := ptrSize * 8
812 minSizeForMallocHeader := ptrSize * ptrBits
813 heapBitsInSpan := size <= minSizeForMallocHeader
814 if !heapBitsInSpan {
815 return nil
816 }
817 divRoundUp := func(n, a uintptr) uintptr { return (n + a - 1) / a }
818 sizeClass := gc.SizeToSizeClass8[divRoundUp(uintptr(size), gc.SmallSizeDiv)]
819 if hasPointers {
820 return ir.Syms.MallocGCSmallScanNoHeader[sizeClass]
821 }
822 if size < gc.TinySize {
823 return ir.Syms.MallocGCTiny[size]
824 }
825 return ir.Syms.MallocGCSmallNoScan[sizeClass]
826 }
827
828 func (s *state) sizeSpecializedMallocEnabled() bool {
829 if base.Flag.CompilingRuntime {
830
831
832
833
834
835
836
837 return false
838 }
839
840 return buildcfg.Experiment.SizeSpecializedMalloc && !base.Flag.Cfg.Instrumenting
841 }
842
843
844
845 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
846 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
847 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
848 }
849
850
851 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
852 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
853 addr.SetUsed(true)
854 types.CalcSize(addr.Type())
855
856 if n.Class == ir.PPARAMOUT {
857 addr.SetIsOutputParamHeapAddr(true)
858 }
859
860 n.Heapaddr = addr
861 s.assign(addr, ptr, false, 0)
862 }
863
864
865 func (s *state) newObject(typ *types.Type) *ssa.Value {
866 if typ.Size() == 0 {
867 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
868 }
869 rtype := s.reflectType(typ)
870 if specialMallocSym := s.specializedMallocSym(typ.Size(), typ.HasPointers()); specialMallocSym != nil {
871 return s.rtcall(specialMallocSym, true, []*types.Type{types.NewPtr(typ)},
872 s.constInt(types.Types[types.TUINTPTR], typ.Size()),
873 rtype,
874 s.constBool(true),
875 )[0]
876 }
877 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
878 }
879
880
881
882 func (s *state) newObjectNonSpecialized(typ *types.Type, rtype *ssa.Value) *ssa.Value {
883 if typ.Size() == 0 {
884 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
885 }
886 if rtype == nil {
887 rtype = s.reflectType(typ)
888 }
889 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
890 }
891
892 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
893 if !n.Type().IsPtr() {
894 s.Fatalf("expected pointer type: %v", n.Type())
895 }
896 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
897 if count != nil {
898 if !elem.IsArray() {
899 s.Fatalf("expected array type: %v", elem)
900 }
901 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
902 }
903 size := elem.Size()
904
905 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
906 return
907 }
908 if count == nil {
909 count = s.constInt(types.Types[types.TUINTPTR], 1)
910 }
911 if count.Type.Size() != s.config.PtrSize {
912 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
913 }
914 var rtype *ssa.Value
915 if rtypeExpr != nil {
916 rtype = s.expr(rtypeExpr)
917 } else {
918 rtype = s.reflectType(elem)
919 }
920 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
921 }
922
923
924
925 func (s *state) reflectType(typ *types.Type) *ssa.Value {
926
927
928 lsym := reflectdata.TypeLinksym(typ)
929 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
930 }
931
932 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
933
934 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
935 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
936 if err != nil {
937 writer.Logf("cannot read sources for function %v: %v", fn, err)
938 }
939
940
941 var inlFns []*ssa.FuncLines
942 for _, fi := range ssaDumpInlined {
943 elno := fi.Endlineno
944 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
945 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
946 if err != nil {
947 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
948 continue
949 }
950 inlFns = append(inlFns, fnLines)
951 }
952
953 slices.SortFunc(inlFns, ssa.ByTopoCmp)
954 if targetFn != nil {
955 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
956 }
957
958 writer.WriteSources("sources", inlFns)
959 }
960
961 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
962 f, err := os.Open(os.ExpandEnv(file))
963 if err != nil {
964 return nil, err
965 }
966 defer f.Close()
967 var lines []string
968 ln := uint(1)
969 scanner := bufio.NewScanner(f)
970 for scanner.Scan() && ln <= end {
971 if ln >= start {
972 lines = append(lines, scanner.Text())
973 }
974 ln++
975 }
976 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
977 }
978
979
980
981
982 func (s *state) updateUnsetPredPos(b *ssa.Block) {
983 if b.Pos == src.NoXPos {
984 s.Fatalf("Block %s should have a position", b)
985 }
986 bestPos := src.NoXPos
987 for _, e := range b.Preds {
988 p := e.Block()
989 if !p.LackingPos() {
990 continue
991 }
992 if bestPos == src.NoXPos {
993 bestPos = b.Pos
994 for _, v := range b.Values {
995 if v.LackingPos() {
996 continue
997 }
998 if v.Pos != src.NoXPos {
999
1000
1001 bestPos = v.Pos
1002 break
1003 }
1004 }
1005 }
1006 p.Pos = bestPos
1007 s.updateUnsetPredPos(p)
1008 }
1009 }
1010
1011
1012 type openDeferInfo struct {
1013
1014 n *ir.CallExpr
1015
1016
1017 closure *ssa.Value
1018
1019
1020
1021 closureNode *ir.Name
1022 }
1023
1024 type state struct {
1025
1026 config *ssa.Config
1027
1028
1029 f *ssa.Func
1030
1031
1032 curfn *ir.Func
1033
1034
1035 labels map[string]*ssaLabel
1036
1037
1038 breakTo *ssa.Block
1039 continueTo *ssa.Block
1040
1041
1042 curBlock *ssa.Block
1043
1044
1045
1046
1047 vars map[ir.Node]*ssa.Value
1048
1049
1050
1051
1052 fwdVars map[ir.Node]*ssa.Value
1053
1054
1055 defvars []map[ir.Node]*ssa.Value
1056
1057
1058 decladdrs map[*ir.Name]*ssa.Value
1059
1060
1061 startmem *ssa.Value
1062 sp *ssa.Value
1063 sb *ssa.Value
1064
1065 deferBitsAddr *ssa.Value
1066 deferBitsTemp *ir.Name
1067
1068
1069 line []src.XPos
1070
1071 lastPos src.XPos
1072
1073
1074
1075 panics map[funcLine]*ssa.Block
1076
1077 cgoUnsafeArgs bool
1078 hasdefer bool
1079 softFloat bool
1080 hasOpenDefers bool
1081 checkPtrEnabled bool
1082 instrumentEnterExit bool
1083 instrumentMemory bool
1084
1085
1086
1087
1088 openDefers []*openDeferInfo
1089
1090
1091
1092
1093 lastDeferExit *ssa.Block
1094 lastDeferFinalBlock *ssa.Block
1095 lastDeferCount int
1096
1097 prevCall *ssa.Value
1098
1099
1100
1101
1102 pendingHeapAllocations []*ssa.Value
1103
1104
1105 appendTargets map[ir.Node]bool
1106
1107
1108 blockStarts []src.XPos
1109
1110
1111
1112 backingStores map[ir.Node]*backingStoreInfo
1113 }
1114
1115 type backingStoreInfo struct {
1116
1117 K int64
1118
1119 store *ir.Name
1120
1121 used *ir.Name
1122
1123
1124
1125 usedStatic bool
1126 }
1127
1128 type funcLine struct {
1129 f *obj.LSym
1130 base *src.PosBase
1131 line uint
1132 }
1133
1134 type ssaLabel struct {
1135 target *ssa.Block
1136 breakTarget *ssa.Block
1137 continueTarget *ssa.Block
1138 }
1139
1140
1141 func (s *state) label(sym *types.Sym) *ssaLabel {
1142 lab := s.labels[sym.Name]
1143 if lab == nil {
1144 lab = new(ssaLabel)
1145 s.labels[sym.Name] = lab
1146 }
1147 return lab
1148 }
1149
1150 func (s *state) Logf(msg string, args ...any) { s.f.Logf(msg, args...) }
1151 func (s *state) Log() bool { return s.f.Log() }
1152 func (s *state) Fatalf(msg string, args ...any) {
1153 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
1154 }
1155 func (s *state) Warnl(pos src.XPos, msg string, args ...any) { s.f.Warnl(pos, msg, args...) }
1156 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
1157
1158 func ssaMarker(name string) *ir.Name {
1159 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
1160 }
1161
1162 var (
1163
1164 memVar = ssaMarker("mem")
1165
1166
1167 ptrVar = ssaMarker("ptr")
1168 lenVar = ssaMarker("len")
1169 capVar = ssaMarker("cap")
1170 typVar = ssaMarker("typ")
1171 okVar = ssaMarker("ok")
1172 deferBitsVar = ssaMarker("deferBits")
1173 hashVar = ssaMarker("hash")
1174 )
1175
1176
1177 func (s *state) startBlock(b *ssa.Block) {
1178 if s.curBlock != nil {
1179 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
1180 }
1181 s.curBlock = b
1182 s.vars = map[ir.Node]*ssa.Value{}
1183 clear(s.fwdVars)
1184 for len(s.blockStarts) <= int(b.ID) {
1185 s.blockStarts = append(s.blockStarts, src.NoXPos)
1186 }
1187 }
1188
1189
1190
1191
1192 func (s *state) endBlock() *ssa.Block {
1193 b := s.curBlock
1194 if b == nil {
1195 return nil
1196 }
1197
1198 s.flushPendingHeapAllocations()
1199
1200 for len(s.defvars) <= int(b.ID) {
1201 s.defvars = append(s.defvars, nil)
1202 }
1203 s.defvars[b.ID] = s.vars
1204 s.curBlock = nil
1205 s.vars = nil
1206 if b.LackingPos() {
1207
1208
1209
1210 b.Pos = src.NoXPos
1211 } else {
1212 b.Pos = s.lastPos
1213 if s.blockStarts[b.ID] == src.NoXPos {
1214 s.blockStarts[b.ID] = s.lastPos
1215 }
1216 }
1217 return b
1218 }
1219
1220
1221 func (s *state) pushLine(line src.XPos) {
1222 if !line.IsKnown() {
1223
1224
1225 line = s.peekPos()
1226 if base.Flag.K != 0 {
1227 base.Warn("buildssa: unknown position (line 0)")
1228 }
1229 } else {
1230 s.lastPos = line
1231 }
1232
1233
1234 if b := s.curBlock; b != nil && s.blockStarts[b.ID] == src.NoXPos {
1235 s.blockStarts[b.ID] = line
1236 }
1237
1238 s.line = append(s.line, line)
1239 }
1240
1241
1242 func (s *state) popLine() {
1243 s.line = s.line[:len(s.line)-1]
1244 }
1245
1246
1247 func (s *state) peekPos() src.XPos {
1248 return s.line[len(s.line)-1]
1249 }
1250
1251
1252 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1253 return s.curBlock.NewValue0(s.peekPos(), op, t)
1254 }
1255
1256
1257 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1258 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1259 }
1260
1261
1262 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1263 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1264 }
1265
1266
1267 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1268 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1269 }
1270
1271
1272 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1273 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1274 }
1275
1276
1277
1278
1279 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1280 if isStmt {
1281 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1282 }
1283 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1284 }
1285
1286
1287 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1288 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1289 }
1290
1291
1292 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1293 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1294 }
1295
1296
1297 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1298 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1299 }
1300
1301
1302
1303
1304 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1305 if isStmt {
1306 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1307 }
1308 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1309 }
1310
1311
1312 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1313 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1314 }
1315
1316
1317 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1318 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1319 }
1320
1321
1322 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1323 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1324 }
1325
1326
1327 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1328 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1329 }
1330
1331
1332
1333
1334 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1335 if isStmt {
1336 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1337 }
1338 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1339 }
1340
1341
1342 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1343 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1344 }
1345
1346
1347 func (s *state) newValue4A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1348 return s.curBlock.NewValue4A(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1349 }
1350
1351
1352 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1353 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1354 }
1355
1356 func (s *state) entryBlock() *ssa.Block {
1357 b := s.f.Entry
1358 if base.Flag.N > 0 && s.curBlock != nil {
1359
1360
1361
1362
1363 b = s.curBlock
1364 }
1365 return b
1366 }
1367
1368
1369 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1370 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1371 }
1372
1373
1374 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1375 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1376 }
1377
1378
1379 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1380 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1381 }
1382
1383
1384 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1385 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1386 }
1387
1388
1389 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1390 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1391 }
1392
1393
1394 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1395 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1396 }
1397
1398
1399 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1400 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1401 }
1402
1403
1404 func (s *state) constSlice(t *types.Type) *ssa.Value {
1405 return s.f.ConstSlice(t)
1406 }
1407 func (s *state) constInterface(t *types.Type) *ssa.Value {
1408 return s.f.ConstInterface(t)
1409 }
1410 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1411 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1412 return s.f.ConstEmptyString(t)
1413 }
1414 func (s *state) constBool(c bool) *ssa.Value {
1415 return s.f.ConstBool(types.Types[types.TBOOL], c)
1416 }
1417 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1418 return s.f.ConstInt8(t, c)
1419 }
1420 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1421 return s.f.ConstInt16(t, c)
1422 }
1423 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1424 return s.f.ConstInt32(t, c)
1425 }
1426 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1427 return s.f.ConstInt64(t, c)
1428 }
1429 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1430 return s.f.ConstFloat32(t, c)
1431 }
1432 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1433 return s.f.ConstFloat64(t, c)
1434 }
1435 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1436 if s.config.PtrSize == 8 {
1437 return s.constInt64(t, c)
1438 }
1439 if int64(int32(c)) != c {
1440 s.Fatalf("integer constant too big %d", c)
1441 }
1442 return s.constInt32(t, int32(c))
1443 }
1444
1445
1446
1447 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1448 if s.softFloat {
1449 if c, ok := s.sfcall(op, arg); ok {
1450 return c
1451 }
1452 }
1453 return s.newValue1(op, t, arg)
1454 }
1455 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1456 if s.softFloat {
1457 if c, ok := s.sfcall(op, arg0, arg1); ok {
1458 return c
1459 }
1460 }
1461 return s.newValue2(op, t, arg0, arg1)
1462 }
1463
1464 type instrumentKind uint8
1465
1466 const (
1467 instrumentRead = iota
1468 instrumentWrite
1469 instrumentMove
1470 )
1471
1472 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1473 s.instrument2(t, addr, nil, kind)
1474 }
1475
1476
1477
1478
1479 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1480 if !(base.Flag.MSan || base.Flag.ASan) || !isStructNotSIMD(t) {
1481 s.instrument(t, addr, kind)
1482 return
1483 }
1484 for _, f := range t.Fields() {
1485 if f.Sym.IsBlank() {
1486 continue
1487 }
1488 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1489 s.instrumentFields(f.Type, offptr, kind)
1490 }
1491 }
1492
1493 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1494 if base.Flag.MSan {
1495 s.instrument2(t, dst, src, instrumentMove)
1496 } else {
1497 s.instrument(t, src, instrumentRead)
1498 s.instrument(t, dst, instrumentWrite)
1499 }
1500 }
1501
1502 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1503 if !s.instrumentMemory {
1504 return
1505 }
1506
1507 w := t.Size()
1508 if w == 0 {
1509 return
1510 }
1511
1512 if ssa.IsSanitizerSafeAddr(addr) {
1513 return
1514 }
1515
1516 var fn *obj.LSym
1517 needWidth := false
1518
1519 if addr2 != nil && kind != instrumentMove {
1520 panic("instrument2: non-nil addr2 for non-move instrumentation")
1521 }
1522
1523 if base.Flag.MSan {
1524 switch kind {
1525 case instrumentRead:
1526 fn = ir.Syms.Msanread
1527 case instrumentWrite:
1528 fn = ir.Syms.Msanwrite
1529 case instrumentMove:
1530 fn = ir.Syms.Msanmove
1531 default:
1532 panic("unreachable")
1533 }
1534 needWidth = true
1535 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1536
1537
1538
1539 switch kind {
1540 case instrumentRead:
1541 fn = ir.Syms.Racereadrange
1542 case instrumentWrite:
1543 fn = ir.Syms.Racewriterange
1544 default:
1545 panic("unreachable")
1546 }
1547 needWidth = true
1548 } else if base.Flag.Race {
1549
1550
1551 switch kind {
1552 case instrumentRead:
1553 fn = ir.Syms.Raceread
1554 case instrumentWrite:
1555 fn = ir.Syms.Racewrite
1556 default:
1557 panic("unreachable")
1558 }
1559 } else if base.Flag.ASan {
1560 switch kind {
1561 case instrumentRead:
1562 fn = ir.Syms.Asanread
1563 case instrumentWrite:
1564 fn = ir.Syms.Asanwrite
1565 default:
1566 panic("unreachable")
1567 }
1568 needWidth = true
1569 } else {
1570 panic("unreachable")
1571 }
1572
1573 args := []*ssa.Value{addr}
1574 if addr2 != nil {
1575 args = append(args, addr2)
1576 }
1577 if needWidth {
1578 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1579 }
1580 s.rtcall(fn, true, nil, args...)
1581 }
1582
1583 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1584 s.instrumentFields(t, src, instrumentRead)
1585 return s.rawLoad(t, src)
1586 }
1587
1588 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1589 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1590 }
1591
1592 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1593 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1594 }
1595
1596 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1597 s.instrument(t, dst, instrumentWrite)
1598 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1599 store.Aux = t
1600 s.vars[memVar] = store
1601 }
1602
1603 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1604 s.moveWhichMayOverlap(t, dst, src, false)
1605 }
1606 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1607 s.instrumentMove(t, dst, src)
1608 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632 if t.HasPointers() {
1633 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1634
1635
1636
1637
1638 s.curfn.SetWBPos(s.peekPos())
1639 } else {
1640 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1641 }
1642 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1643 return
1644 }
1645 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1646 store.Aux = t
1647 s.vars[memVar] = store
1648 }
1649
1650
1651 func (s *state) stmtList(l ir.Nodes) {
1652 for _, n := range l {
1653 s.stmt(n)
1654 }
1655 }
1656
1657
1658 func (s *state) stmt(n ir.Node) {
1659 s.pushLine(n.Pos())
1660 defer s.popLine()
1661
1662
1663
1664 if s.curBlock == nil && n.Op() != ir.OLABEL {
1665 return
1666 }
1667
1668 s.stmtList(n.Init())
1669 switch n.Op() {
1670
1671 case ir.OBLOCK:
1672 n := n.(*ir.BlockStmt)
1673 s.stmtList(n.List)
1674
1675 case ir.OFALL:
1676
1677
1678 case ir.OCALLFUNC:
1679 n := n.(*ir.CallExpr)
1680 if ir.IsIntrinsicCall(n) {
1681 s.intrinsicCall(n)
1682 return
1683 }
1684 fallthrough
1685
1686 case ir.OCALLINTER:
1687 n := n.(*ir.CallExpr)
1688 s.callResult(n, callNormal)
1689 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1690 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1691 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1692 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1693 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1694 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1695 fn == "panicrangestate") {
1696 m := s.mem()
1697 b := s.endBlock()
1698 b.Kind = ssa.BlockExit
1699 b.SetControl(m)
1700
1701
1702
1703 }
1704 }
1705 case ir.ODEFER:
1706 n := n.(*ir.GoDeferStmt)
1707 if base.Debug.Defer > 0 {
1708 var defertype string
1709 if s.hasOpenDefers {
1710 defertype = "open-coded"
1711 } else if n.Esc() == ir.EscNever {
1712 defertype = "stack-allocated"
1713 } else {
1714 defertype = "heap-allocated"
1715 }
1716 base.WarnfAt(n.Pos(), "%s defer", defertype)
1717 }
1718 if s.hasOpenDefers {
1719 s.openDeferRecord(n.Call.(*ir.CallExpr))
1720 } else {
1721 d := callDefer
1722 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1723 d = callDeferStack
1724 }
1725 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1726 }
1727 case ir.OGO:
1728 n := n.(*ir.GoDeferStmt)
1729 s.callResult(n.Call.(*ir.CallExpr), callGo)
1730
1731 case ir.OAS2DOTTYPE:
1732 n := n.(*ir.AssignListStmt)
1733 var res, resok *ssa.Value
1734 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1735 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1736 } else {
1737 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1738 }
1739 deref := false
1740 if !ssa.CanSSA(n.Rhs[0].Type()) {
1741 if res.Op != ssa.OpLoad {
1742 s.Fatalf("dottype of non-load")
1743 }
1744 mem := s.mem()
1745 if res.Args[1] != mem {
1746 s.Fatalf("memory no longer live from 2-result dottype load")
1747 }
1748 deref = true
1749 res = res.Args[0]
1750 }
1751 s.assign(n.Lhs[0], res, deref, 0)
1752 s.assign(n.Lhs[1], resok, false, 0)
1753 return
1754
1755 case ir.OAS2FUNC:
1756
1757 n := n.(*ir.AssignListStmt)
1758 call := n.Rhs[0].(*ir.CallExpr)
1759 if !ir.IsIntrinsicCall(call) {
1760 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1761 }
1762 v := s.intrinsicCall(call)
1763 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1764 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1765 s.assign(n.Lhs[0], v1, false, 0)
1766 s.assign(n.Lhs[1], v2, false, 0)
1767 return
1768
1769 case ir.ODCL:
1770 n := n.(*ir.Decl)
1771 if v := n.X; v.Esc() == ir.EscHeap {
1772 s.newHeapaddr(v)
1773 }
1774
1775 case ir.OLABEL:
1776 n := n.(*ir.LabelStmt)
1777 sym := n.Label
1778 if sym.IsBlank() {
1779
1780 break
1781 }
1782 lab := s.label(sym)
1783
1784
1785 if lab.target == nil {
1786 lab.target = s.f.NewBlock(ssa.BlockPlain)
1787 }
1788
1789
1790
1791 if s.curBlock != nil {
1792 b := s.endBlock()
1793 b.AddEdgeTo(lab.target)
1794 }
1795 s.startBlock(lab.target)
1796
1797 case ir.OGOTO:
1798 n := n.(*ir.BranchStmt)
1799 sym := n.Label
1800
1801 lab := s.label(sym)
1802 if lab.target == nil {
1803 lab.target = s.f.NewBlock(ssa.BlockPlain)
1804 }
1805
1806 b := s.endBlock()
1807 b.Pos = s.lastPos.WithIsStmt()
1808 b.AddEdgeTo(lab.target)
1809
1810 case ir.OAS:
1811 n := n.(*ir.AssignStmt)
1812 if n.X == n.Y && n.X.Op() == ir.ONAME {
1813
1814
1815
1816
1817
1818
1819
1820 return
1821 }
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1833 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1834 p := n.Y.(*ir.StarExpr).X
1835 for p.Op() == ir.OCONVNOP {
1836 p = p.(*ir.ConvExpr).X
1837 }
1838 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1839
1840
1841 mayOverlap = false
1842 }
1843 }
1844
1845
1846 rhs := n.Y
1847 if rhs != nil {
1848 switch rhs.Op() {
1849 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1850
1851
1852
1853 if !ir.IsZero(rhs) {
1854 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1855 }
1856 rhs = nil
1857 case ir.OAPPEND:
1858 rhs := rhs.(*ir.CallExpr)
1859
1860
1861
1862 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1863 break
1864 }
1865
1866
1867
1868 if s.canSSA(n.X) {
1869 if base.Debug.Append > 0 {
1870 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1871 }
1872 break
1873 }
1874 if base.Debug.Append > 0 {
1875 base.WarnfAt(n.Pos(), "append: len-only update")
1876 }
1877 s.append(rhs, true)
1878 return
1879 }
1880 }
1881
1882 if ir.IsBlank(n.X) {
1883
1884
1885 if rhs != nil {
1886 s.expr(rhs)
1887 }
1888 return
1889 }
1890
1891 var t *types.Type
1892 if n.Y != nil {
1893 t = n.Y.Type()
1894 } else {
1895 t = n.X.Type()
1896 }
1897
1898 var r *ssa.Value
1899 deref := !ssa.CanSSA(t)
1900 if deref {
1901 if rhs == nil {
1902 r = nil
1903 } else {
1904 r = s.addr(rhs)
1905 }
1906 } else {
1907 if rhs == nil {
1908 r = s.zeroVal(t)
1909 } else {
1910 r = s.expr(rhs)
1911 }
1912 }
1913
1914 var skip skipMask
1915 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1916
1917
1918 rhs := rhs.(*ir.SliceExpr)
1919 i, j, k := rhs.Low, rhs.High, rhs.Max
1920 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1921
1922 i = nil
1923 }
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934 if i == nil {
1935 skip |= skipPtr
1936 if j == nil {
1937 skip |= skipLen
1938 }
1939 if k == nil {
1940 skip |= skipCap
1941 }
1942 }
1943 }
1944
1945 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1946
1947 case ir.OIF:
1948 n := n.(*ir.IfStmt)
1949 if ir.IsConst(n.Cond, constant.Bool) {
1950 s.stmtList(n.Cond.Init())
1951 if ir.BoolVal(n.Cond) {
1952 s.stmtList(n.Body)
1953 } else {
1954 s.stmtList(n.Else)
1955 }
1956 break
1957 }
1958
1959 bEnd := s.f.NewBlock(ssa.BlockPlain)
1960 var likely int8
1961 if n.Likely {
1962 likely = 1
1963 }
1964 var bThen *ssa.Block
1965 if len(n.Body) != 0 {
1966 bThen = s.f.NewBlock(ssa.BlockPlain)
1967 } else {
1968 bThen = bEnd
1969 }
1970 var bElse *ssa.Block
1971 if len(n.Else) != 0 {
1972 bElse = s.f.NewBlock(ssa.BlockPlain)
1973 } else {
1974 bElse = bEnd
1975 }
1976 s.condBranch(n.Cond, bThen, bElse, likely)
1977
1978 if len(n.Body) != 0 {
1979 s.startBlock(bThen)
1980 s.stmtList(n.Body)
1981 if b := s.endBlock(); b != nil {
1982 b.AddEdgeTo(bEnd)
1983 }
1984 }
1985 if len(n.Else) != 0 {
1986 s.startBlock(bElse)
1987 s.stmtList(n.Else)
1988 if b := s.endBlock(); b != nil {
1989 b.AddEdgeTo(bEnd)
1990 }
1991 }
1992 s.startBlock(bEnd)
1993
1994 case ir.ORETURN:
1995 n := n.(*ir.ReturnStmt)
1996 s.stmtList(n.Results)
1997 b := s.exit()
1998 b.Pos = s.lastPos.WithIsStmt()
1999
2000 case ir.OTAILCALL:
2001 n := n.(*ir.TailCallStmt)
2002 s.callResult(n.Call, callTail)
2003 call := s.mem()
2004 b := s.endBlock()
2005 b.Kind = ssa.BlockRetJmp
2006 b.SetControl(call)
2007
2008 case ir.OCONTINUE, ir.OBREAK:
2009 n := n.(*ir.BranchStmt)
2010 var to *ssa.Block
2011 if n.Label == nil {
2012
2013 switch n.Op() {
2014 case ir.OCONTINUE:
2015 to = s.continueTo
2016 case ir.OBREAK:
2017 to = s.breakTo
2018 }
2019 } else {
2020
2021 sym := n.Label
2022 lab := s.label(sym)
2023 switch n.Op() {
2024 case ir.OCONTINUE:
2025 to = lab.continueTarget
2026 case ir.OBREAK:
2027 to = lab.breakTarget
2028 }
2029 }
2030
2031 b := s.endBlock()
2032 b.Pos = s.lastPos.WithIsStmt()
2033 b.AddEdgeTo(to)
2034
2035 case ir.OFOR:
2036
2037
2038 n := n.(*ir.ForStmt)
2039 base.Assert(!n.DistinctVars)
2040 bCond := s.f.NewBlock(ssa.BlockPlain)
2041 bBody := s.f.NewBlock(ssa.BlockPlain)
2042 bIncr := s.f.NewBlock(ssa.BlockPlain)
2043 bEnd := s.f.NewBlock(ssa.BlockPlain)
2044
2045
2046 bBody.Pos = n.Pos()
2047
2048
2049 b := s.endBlock()
2050 b.AddEdgeTo(bCond)
2051
2052
2053 s.startBlock(bCond)
2054 if n.Cond != nil {
2055 s.condBranch(n.Cond, bBody, bEnd, 1)
2056 } else {
2057 b := s.endBlock()
2058 b.Kind = ssa.BlockPlain
2059 b.AddEdgeTo(bBody)
2060 }
2061
2062
2063 prevContinue := s.continueTo
2064 prevBreak := s.breakTo
2065 s.continueTo = bIncr
2066 s.breakTo = bEnd
2067 var lab *ssaLabel
2068 if sym := n.Label; sym != nil {
2069
2070 lab = s.label(sym)
2071 lab.continueTarget = bIncr
2072 lab.breakTarget = bEnd
2073 }
2074
2075
2076 s.startBlock(bBody)
2077 s.stmtList(n.Body)
2078
2079
2080 s.continueTo = prevContinue
2081 s.breakTo = prevBreak
2082 if lab != nil {
2083 lab.continueTarget = nil
2084 lab.breakTarget = nil
2085 }
2086
2087
2088 if b := s.endBlock(); b != nil {
2089 b.AddEdgeTo(bIncr)
2090 }
2091
2092
2093 s.startBlock(bIncr)
2094 if n.Post != nil {
2095 s.stmt(n.Post)
2096 }
2097 if b := s.endBlock(); b != nil {
2098 b.AddEdgeTo(bCond)
2099
2100
2101 if b.Pos == src.NoXPos {
2102 b.Pos = bCond.Pos
2103 }
2104 }
2105
2106 s.startBlock(bEnd)
2107
2108 case ir.OSWITCH, ir.OSELECT:
2109
2110
2111 bEnd := s.f.NewBlock(ssa.BlockPlain)
2112
2113 prevBreak := s.breakTo
2114 s.breakTo = bEnd
2115 var sym *types.Sym
2116 var body ir.Nodes
2117 if n.Op() == ir.OSWITCH {
2118 n := n.(*ir.SwitchStmt)
2119 sym = n.Label
2120 body = n.Compiled
2121 } else {
2122 n := n.(*ir.SelectStmt)
2123 sym = n.Label
2124 body = n.Compiled
2125 }
2126
2127 var lab *ssaLabel
2128 if sym != nil {
2129
2130 lab = s.label(sym)
2131 lab.breakTarget = bEnd
2132 }
2133
2134
2135 s.stmtList(body)
2136
2137 s.breakTo = prevBreak
2138 if lab != nil {
2139 lab.breakTarget = nil
2140 }
2141
2142
2143
2144 if s.curBlock != nil {
2145 m := s.mem()
2146 b := s.endBlock()
2147 b.Kind = ssa.BlockExit
2148 b.SetControl(m)
2149 }
2150 s.startBlock(bEnd)
2151
2152 case ir.OJUMPTABLE:
2153 n := n.(*ir.JumpTableStmt)
2154
2155
2156 jt := s.f.NewBlock(ssa.BlockJumpTable)
2157 bEnd := s.f.NewBlock(ssa.BlockPlain)
2158
2159
2160 idx := s.expr(n.Idx)
2161 unsigned := idx.Type.IsUnsigned()
2162
2163
2164 t := types.Types[types.TUINTPTR]
2165 idx = s.conv(nil, idx, idx.Type, t)
2166
2167
2168
2169
2170
2171
2172
2173 var min, max uint64
2174 if unsigned {
2175 min, _ = constant.Uint64Val(n.Cases[0])
2176 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
2177 } else {
2178 mn, _ := constant.Int64Val(n.Cases[0])
2179 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
2180 min = uint64(mn)
2181 max = uint64(mx)
2182 }
2183
2184 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
2185 width := s.uintptrConstant(max - min)
2186 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
2187 b := s.endBlock()
2188 b.Kind = ssa.BlockIf
2189 b.SetControl(cmp)
2190 b.AddEdgeTo(jt)
2191 b.AddEdgeTo(bEnd)
2192 b.Likely = ssa.BranchLikely
2193
2194
2195 s.startBlock(jt)
2196 jt.Pos = n.Pos()
2197 if base.Flag.Cfg.SpectreIndex {
2198 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
2199 }
2200 jt.SetControl(idx)
2201
2202
2203 table := make([]*ssa.Block, max-min+1)
2204 for i := range table {
2205 table[i] = bEnd
2206 }
2207 for i := range n.Targets {
2208 c := n.Cases[i]
2209 lab := s.label(n.Targets[i])
2210 if lab.target == nil {
2211 lab.target = s.f.NewBlock(ssa.BlockPlain)
2212 }
2213 var val uint64
2214 if unsigned {
2215 val, _ = constant.Uint64Val(c)
2216 } else {
2217 vl, _ := constant.Int64Val(c)
2218 val = uint64(vl)
2219 }
2220
2221 table[val-min] = lab.target
2222 }
2223 for _, t := range table {
2224 jt.AddEdgeTo(t)
2225 }
2226 s.endBlock()
2227
2228 s.startBlock(bEnd)
2229
2230 case ir.OINTERFACESWITCH:
2231 n := n.(*ir.InterfaceSwitchStmt)
2232 typs := s.f.Config.Types
2233
2234 t := s.expr(n.RuntimeType)
2235 h := s.expr(n.Hash)
2236 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2237
2238
2239 var merge *ssa.Block
2240 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
2241
2242
2243 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2244 s.Fatalf("atomic load not available")
2245 }
2246 merge = s.f.NewBlock(ssa.BlockPlain)
2247 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2248 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2249 loopHead := s.f.NewBlock(ssa.BlockPlain)
2250 loopBody := s.f.NewBlock(ssa.BlockPlain)
2251
2252
2253 var mul, and, add, zext ssa.Op
2254 if s.config.PtrSize == 4 {
2255 mul = ssa.OpMul32
2256 and = ssa.OpAnd32
2257 add = ssa.OpAdd32
2258 zext = ssa.OpCopy
2259 } else {
2260 mul = ssa.OpMul64
2261 and = ssa.OpAnd64
2262 add = ssa.OpAdd64
2263 zext = ssa.OpZeroExt32to64
2264 }
2265
2266
2267
2268 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2269 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2270 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2271
2272
2273 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2274
2275
2276 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2277
2278 b := s.endBlock()
2279 b.AddEdgeTo(loopHead)
2280
2281
2282
2283 s.startBlock(loopHead)
2284 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2285 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2286 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2287 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2288
2289 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2290
2291
2292
2293 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2294 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2295 b = s.endBlock()
2296 b.Kind = ssa.BlockIf
2297 b.SetControl(cmp1)
2298 b.AddEdgeTo(cacheHit)
2299 b.AddEdgeTo(loopBody)
2300
2301
2302
2303 s.startBlock(loopBody)
2304 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2305 b = s.endBlock()
2306 b.Kind = ssa.BlockIf
2307 b.SetControl(cmp2)
2308 b.AddEdgeTo(cacheMiss)
2309 b.AddEdgeTo(loopHead)
2310
2311
2312
2313
2314 s.startBlock(cacheHit)
2315 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2316 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2317 s.assign(n.Case, eCase, false, 0)
2318 s.assign(n.Itab, eItab, false, 0)
2319 b = s.endBlock()
2320 b.AddEdgeTo(merge)
2321
2322
2323 s.startBlock(cacheMiss)
2324 }
2325
2326 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2327 s.assign(n.Case, r[0], false, 0)
2328 s.assign(n.Itab, r[1], false, 0)
2329
2330 if merge != nil {
2331
2332 b := s.endBlock()
2333 b.Kind = ssa.BlockPlain
2334 b.AddEdgeTo(merge)
2335 s.startBlock(merge)
2336 }
2337
2338 case ir.OCHECKNIL:
2339 n := n.(*ir.UnaryExpr)
2340 p := s.expr(n.X)
2341 _ = s.nilCheck(p)
2342
2343
2344 case ir.OINLMARK:
2345 n := n.(*ir.InlineMarkStmt)
2346 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2347
2348 default:
2349 s.Fatalf("unhandled stmt %v", n.Op())
2350 }
2351 }
2352
2353
2354
2355 const shareDeferExits = false
2356
2357
2358
2359
2360 func (s *state) exit() *ssa.Block {
2361 if s.hasdefer {
2362 if s.hasOpenDefers {
2363 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2364 if s.curBlock.Kind != ssa.BlockPlain {
2365 panic("Block for an exit should be BlockPlain")
2366 }
2367 s.curBlock.AddEdgeTo(s.lastDeferExit)
2368 s.endBlock()
2369 return s.lastDeferFinalBlock
2370 }
2371 s.openDeferExit()
2372 } else {
2373
2374
2375
2376
2377
2378
2379
2380
2381 s.pushLine(s.curfn.Endlineno)
2382 s.rtcall(ir.Syms.Deferreturn, true, nil)
2383 s.popLine()
2384 }
2385 }
2386
2387
2388
2389 resultFields := s.curfn.Type().Results()
2390 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2391
2392 for i, f := range resultFields {
2393 n := f.Nname.(*ir.Name)
2394 if s.canSSA(n) {
2395 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2396
2397 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2398 }
2399 results[i] = s.variable(n, n.Type())
2400 } else if !n.OnStack() {
2401
2402 if n.Type().HasPointers() {
2403 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2404 }
2405 ha := s.expr(n.Heapaddr)
2406 s.instrumentFields(n.Type(), ha, instrumentRead)
2407 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2408 } else {
2409
2410
2411
2412 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2413 }
2414 }
2415
2416
2417
2418
2419 if s.instrumentEnterExit {
2420 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2421 }
2422
2423 results[len(results)-1] = s.mem()
2424 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2425 m.AddArgs(results...)
2426
2427 b := s.endBlock()
2428 b.Kind = ssa.BlockRet
2429 b.SetControl(m)
2430 if s.hasdefer && s.hasOpenDefers {
2431 s.lastDeferFinalBlock = b
2432 }
2433 return b
2434 }
2435
2436 type opAndType struct {
2437 op ir.Op
2438 etype types.Kind
2439 }
2440
2441 var opToSSA = map[opAndType]ssa.Op{
2442 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2443 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2444 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2445 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2446 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2447 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2448 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2449 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2450 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2451 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2452
2453 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2454 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2455 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2456 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2457 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2458 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2459 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2460 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2461 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2462 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2463
2464 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2465
2466 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2467 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2468 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2469 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2470 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2471 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2472 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2473 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2474 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2475 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2476
2477 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2478 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2479 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2480 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2481 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2482 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2483 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2484 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2485
2486 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2487 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2488 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2489 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2490
2491 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2492 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2493 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2494 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2495 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2496 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2497 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2498 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2499 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2500 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2501
2502 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2503 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2504
2505 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2506 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2507 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2508 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2509 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2510 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2511 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2512 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2513
2514 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2515 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2516 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2517 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2518 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2519 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2520 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2521 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2522
2523 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2524 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2525 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2526 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2527 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2528 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2529 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2530 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2531
2532 {ir.OOR, types.TINT8}: ssa.OpOr8,
2533 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2534 {ir.OOR, types.TINT16}: ssa.OpOr16,
2535 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2536 {ir.OOR, types.TINT32}: ssa.OpOr32,
2537 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2538 {ir.OOR, types.TINT64}: ssa.OpOr64,
2539 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2540
2541 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2542 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2543 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2544 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2545 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2546 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2547 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2548 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2549
2550 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2551 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2552 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2553 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2554 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2555 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2556 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2557 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2558 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2559 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2560 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2561 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2562 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2563 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2564 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2565 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2566 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2567 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2568 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2569
2570 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2571 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2572 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2573 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2574 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2575 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2576 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2577 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2578 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2579 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2580 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2581 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2582 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2583 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2584 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2585 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2586 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2587 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2588 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2589
2590 {ir.OLT, types.TINT8}: ssa.OpLess8,
2591 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2592 {ir.OLT, types.TINT16}: ssa.OpLess16,
2593 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2594 {ir.OLT, types.TINT32}: ssa.OpLess32,
2595 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2596 {ir.OLT, types.TINT64}: ssa.OpLess64,
2597 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2598 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2599 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2600
2601 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2602 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2603 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2604 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2605 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2606 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2607 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2608 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2609 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2610 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2611 }
2612
2613 func (s *state) concreteEtype(t *types.Type) types.Kind {
2614 e := t.Kind()
2615 switch e {
2616 default:
2617 return e
2618 case types.TINT:
2619 if s.config.PtrSize == 8 {
2620 return types.TINT64
2621 }
2622 return types.TINT32
2623 case types.TUINT:
2624 if s.config.PtrSize == 8 {
2625 return types.TUINT64
2626 }
2627 return types.TUINT32
2628 case types.TUINTPTR:
2629 if s.config.PtrSize == 8 {
2630 return types.TUINT64
2631 }
2632 return types.TUINT32
2633 }
2634 }
2635
2636 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2637 etype := s.concreteEtype(t)
2638 x, ok := opToSSA[opAndType{op, etype}]
2639 if !ok {
2640 s.Fatalf("unhandled binary op %v %s", op, etype)
2641 }
2642 return x
2643 }
2644
2645 type opAndTwoTypes struct {
2646 op ir.Op
2647 etype1 types.Kind
2648 etype2 types.Kind
2649 }
2650
2651 type twoTypes struct {
2652 etype1 types.Kind
2653 etype2 types.Kind
2654 }
2655
2656 type twoOpsAndType struct {
2657 op1 ssa.Op
2658 op2 ssa.Op
2659 intermediateType types.Kind
2660 }
2661
2662 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2663
2664 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2665 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2666 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2667 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2668
2669 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2670 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2671 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2672 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2673
2674 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2675 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2676 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2677 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2678
2679 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2680 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2681 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2682 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2683
2684 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2685 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2686 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2687 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2688
2689 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2690 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2691 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2692 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2693
2694 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2695 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2696 {types.TFLOAT32, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2697 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2698
2699 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2700 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2701 {types.TFLOAT64, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2702 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2703
2704
2705 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2706 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2707 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2708 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2709 }
2710
2711
2712
2713 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2714 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2715 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2716 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2717 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2718 }
2719
2720
2721 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2722 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2723 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2724 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2725 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2726 }
2727
2728 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2729 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2730 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2731 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2732 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2733 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2734 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2735 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2736 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2737
2738 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2739 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2740 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2741 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2742 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2743 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2744 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2745 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2746
2747 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2748 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2749 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2750 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2751 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2752 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2753 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2754 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2755
2756 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2757 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2758 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2759 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2760 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2761 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2762 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2763 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2764
2765 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2766 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2767 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2768 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2769 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2770 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2771 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2772 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2773
2774 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2775 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2776 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2777 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2778 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2779 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2780 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2781 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2782
2783 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2784 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2785 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2786 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2787 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2788 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2789 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2790 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2791
2792 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2793 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2794 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2795 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2796 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2797 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2798 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2799 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2800 }
2801
2802 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2803 etype1 := s.concreteEtype(t)
2804 etype2 := s.concreteEtype(u)
2805 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2806 if !ok {
2807 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2808 }
2809 return x
2810 }
2811
2812 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2813 if s.config.PtrSize == 4 {
2814 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2815 }
2816 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2817 }
2818
2819 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2820 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2821
2822 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2823 }
2824 if ft.IsInteger() && tt.IsInteger() {
2825 var op ssa.Op
2826 if tt.Size() == ft.Size() {
2827 op = ssa.OpCopy
2828 } else if tt.Size() < ft.Size() {
2829
2830 switch 10*ft.Size() + tt.Size() {
2831 case 21:
2832 op = ssa.OpTrunc16to8
2833 case 41:
2834 op = ssa.OpTrunc32to8
2835 case 42:
2836 op = ssa.OpTrunc32to16
2837 case 81:
2838 op = ssa.OpTrunc64to8
2839 case 82:
2840 op = ssa.OpTrunc64to16
2841 case 84:
2842 op = ssa.OpTrunc64to32
2843 default:
2844 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2845 }
2846 } else if ft.IsSigned() {
2847
2848 switch 10*ft.Size() + tt.Size() {
2849 case 12:
2850 op = ssa.OpSignExt8to16
2851 case 14:
2852 op = ssa.OpSignExt8to32
2853 case 18:
2854 op = ssa.OpSignExt8to64
2855 case 24:
2856 op = ssa.OpSignExt16to32
2857 case 28:
2858 op = ssa.OpSignExt16to64
2859 case 48:
2860 op = ssa.OpSignExt32to64
2861 default:
2862 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2863 }
2864 } else {
2865
2866 switch 10*ft.Size() + tt.Size() {
2867 case 12:
2868 op = ssa.OpZeroExt8to16
2869 case 14:
2870 op = ssa.OpZeroExt8to32
2871 case 18:
2872 op = ssa.OpZeroExt8to64
2873 case 24:
2874 op = ssa.OpZeroExt16to32
2875 case 28:
2876 op = ssa.OpZeroExt16to64
2877 case 48:
2878 op = ssa.OpZeroExt32to64
2879 default:
2880 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2881 }
2882 }
2883 return s.newValue1(op, tt, v)
2884 }
2885
2886 if ft.IsComplex() && tt.IsComplex() {
2887 var op ssa.Op
2888 if ft.Size() == tt.Size() {
2889 switch ft.Size() {
2890 case 8:
2891 op = ssa.OpRound32F
2892 case 16:
2893 op = ssa.OpRound64F
2894 default:
2895 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2896 }
2897 } else if ft.Size() == 8 && tt.Size() == 16 {
2898 op = ssa.OpCvt32Fto64F
2899 } else if ft.Size() == 16 && tt.Size() == 8 {
2900 op = ssa.OpCvt64Fto32F
2901 } else {
2902 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2903 }
2904 ftp := types.FloatForComplex(ft)
2905 ttp := types.FloatForComplex(tt)
2906 return s.newValue2(ssa.OpComplexMake, tt,
2907 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2908 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2909 }
2910
2911 if tt.IsComplex() {
2912
2913 et := types.FloatForComplex(tt)
2914 v = s.conv(n, v, ft, et)
2915 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2916 }
2917
2918 if ft.IsFloat() || tt.IsFloat() {
2919 cft, ctt := s.concreteEtype(ft), s.concreteEtype(tt)
2920 conv, ok := fpConvOpToSSA[twoTypes{cft, ctt}]
2921
2922
2923 if ctt == types.TUINT32 && ft.IsFloat() && !base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil) {
2924
2925 conv.op1 = ssa.OpCvt64Fto64
2926 if cft == types.TFLOAT32 {
2927 conv.op1 = ssa.OpCvt32Fto64
2928 }
2929 conv.op2 = ssa.OpTrunc64to32
2930
2931 }
2932 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2933 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2934 conv = conv1
2935 }
2936 }
2937 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2938 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2939 conv = conv1
2940 }
2941 }
2942
2943 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2944 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2945
2946 if tt.Size() == 4 {
2947 return s.uint32Tofloat32(n, v, ft, tt)
2948 }
2949 if tt.Size() == 8 {
2950 return s.uint32Tofloat64(n, v, ft, tt)
2951 }
2952 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2953
2954 if ft.Size() == 4 {
2955 return s.float32ToUint32(n, v, ft, tt)
2956 }
2957 if ft.Size() == 8 {
2958 return s.float64ToUint32(n, v, ft, tt)
2959 }
2960 }
2961 }
2962
2963 if !ok {
2964 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2965 }
2966 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2967
2968 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2969
2970 if op1 == ssa.OpCopy {
2971 if op2 == ssa.OpCopy {
2972 return v
2973 }
2974 return s.newValueOrSfCall1(op2, tt, v)
2975 }
2976 if op2 == ssa.OpCopy {
2977 return s.newValueOrSfCall1(op1, tt, v)
2978 }
2979 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2980 }
2981
2982 if ft.IsInteger() {
2983
2984 if tt.Size() == 4 {
2985 return s.uint64Tofloat32(n, v, ft, tt)
2986 }
2987 if tt.Size() == 8 {
2988 return s.uint64Tofloat64(n, v, ft, tt)
2989 }
2990 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2991 }
2992
2993 if ft.Size() == 4 {
2994 switch tt.Size() {
2995 case 8:
2996 return s.float32ToUint64(n, v, ft, tt)
2997 case 4, 2, 1:
2998
2999 return s.float32ToUint32(n, v, ft, tt)
3000 }
3001 }
3002 if ft.Size() == 8 {
3003 switch tt.Size() {
3004 case 8:
3005 return s.float64ToUint64(n, v, ft, tt)
3006 case 4, 2, 1:
3007
3008 return s.float64ToUint32(n, v, ft, tt)
3009 }
3010
3011 }
3012 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
3013 return nil
3014 }
3015
3016 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
3017 return nil
3018 }
3019
3020
3021 func (s *state) expr(n ir.Node) *ssa.Value {
3022 return s.exprCheckPtr(n, true)
3023 }
3024
3025 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
3026 if ir.HasUniquePos(n) {
3027
3028
3029 s.pushLine(n.Pos())
3030 defer s.popLine()
3031 }
3032
3033 s.stmtList(n.Init())
3034 switch n.Op() {
3035 case ir.OBYTES2STRTMP:
3036 n := n.(*ir.ConvExpr)
3037 slice := s.expr(n.X)
3038 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
3039 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3040 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
3041 case ir.OSTR2BYTESTMP:
3042 n := n.(*ir.ConvExpr)
3043 str := s.expr(n.X)
3044 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
3045 if !n.NonNil() {
3046
3047
3048
3049 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
3050 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
3051 ptr = s.ternary(cond, ptr, zerobase)
3052 }
3053 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
3054 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
3055 case ir.OCFUNC:
3056 n := n.(*ir.UnaryExpr)
3057 aux := n.X.(*ir.Name).Linksym()
3058
3059
3060 if aux.ABI() != obj.ABIInternal {
3061 s.Fatalf("expected ABIInternal: %v", aux.ABI())
3062 }
3063 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
3064 case ir.ONAME:
3065 n := n.(*ir.Name)
3066 if n.Class == ir.PFUNC {
3067
3068 sym := staticdata.FuncLinksym(n)
3069 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
3070 }
3071 if s.canSSA(n) {
3072 return s.variable(n, n.Type())
3073 }
3074 return s.load(n.Type(), s.addr(n))
3075 case ir.OLINKSYMOFFSET:
3076 n := n.(*ir.LinksymOffsetExpr)
3077 return s.load(n.Type(), s.addr(n))
3078 case ir.ONIL:
3079 n := n.(*ir.NilExpr)
3080 t := n.Type()
3081 switch {
3082 case t.IsSlice():
3083 return s.constSlice(t)
3084 case t.IsInterface():
3085 return s.constInterface(t)
3086 default:
3087 return s.constNil(t)
3088 }
3089 case ir.OLITERAL:
3090 switch u := n.Val(); u.Kind() {
3091 case constant.Int:
3092 i := ir.IntVal(n.Type(), u)
3093 switch n.Type().Size() {
3094 case 1:
3095 return s.constInt8(n.Type(), int8(i))
3096 case 2:
3097 return s.constInt16(n.Type(), int16(i))
3098 case 4:
3099 return s.constInt32(n.Type(), int32(i))
3100 case 8:
3101 return s.constInt64(n.Type(), i)
3102 default:
3103 s.Fatalf("bad integer size %d", n.Type().Size())
3104 return nil
3105 }
3106 case constant.String:
3107 i := constant.StringVal(u)
3108 if i == "" {
3109 return s.constEmptyString(n.Type())
3110 }
3111 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
3112 case constant.Bool:
3113 return s.constBool(constant.BoolVal(u))
3114 case constant.Float:
3115 f, _ := constant.Float64Val(u)
3116 switch n.Type().Size() {
3117 case 4:
3118 return s.constFloat32(n.Type(), f)
3119 case 8:
3120 return s.constFloat64(n.Type(), f)
3121 default:
3122 s.Fatalf("bad float size %d", n.Type().Size())
3123 return nil
3124 }
3125 case constant.Complex:
3126 re, _ := constant.Float64Val(constant.Real(u))
3127 im, _ := constant.Float64Val(constant.Imag(u))
3128 switch n.Type().Size() {
3129 case 8:
3130 pt := types.Types[types.TFLOAT32]
3131 return s.newValue2(ssa.OpComplexMake, n.Type(),
3132 s.constFloat32(pt, re),
3133 s.constFloat32(pt, im))
3134 case 16:
3135 pt := types.Types[types.TFLOAT64]
3136 return s.newValue2(ssa.OpComplexMake, n.Type(),
3137 s.constFloat64(pt, re),
3138 s.constFloat64(pt, im))
3139 default:
3140 s.Fatalf("bad complex size %d", n.Type().Size())
3141 return nil
3142 }
3143 default:
3144 s.Fatalf("unhandled OLITERAL %v", u.Kind())
3145 return nil
3146 }
3147 case ir.OCONVNOP:
3148 n := n.(*ir.ConvExpr)
3149 to := n.Type()
3150 from := n.X.Type()
3151
3152
3153
3154 x := s.expr(n.X)
3155 if to == from {
3156 return x
3157 }
3158
3159
3160
3161
3162
3163 if to.IsPtrShaped() != from.IsPtrShaped() {
3164 return s.newValue2(ssa.OpConvert, to, x, s.mem())
3165 }
3166
3167 v := s.newValue1(ssa.OpCopy, to, x)
3168
3169
3170 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
3171 return v
3172 }
3173
3174
3175 if from.Kind() == to.Kind() {
3176 return v
3177 }
3178
3179
3180 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
3181 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
3182 s.checkPtrAlignment(n, v, nil)
3183 }
3184 return v
3185 }
3186
3187
3188 mt := types.NewPtr(reflectdata.MapType())
3189 if to.Kind() == types.TMAP && from == mt {
3190 return v
3191 }
3192
3193 types.CalcSize(from)
3194 types.CalcSize(to)
3195 if from.Size() != to.Size() {
3196 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
3197 return nil
3198 }
3199 if etypesign(from.Kind()) != etypesign(to.Kind()) {
3200 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
3201 return nil
3202 }
3203
3204 if base.Flag.Cfg.Instrumenting {
3205
3206
3207
3208 return v
3209 }
3210
3211 if etypesign(from.Kind()) == 0 {
3212 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
3213 return nil
3214 }
3215
3216
3217 return v
3218
3219 case ir.OCONV:
3220 n := n.(*ir.ConvExpr)
3221 x := s.expr(n.X)
3222 return s.conv(n, x, n.X.Type(), n.Type())
3223
3224 case ir.ODOTTYPE:
3225 n := n.(*ir.TypeAssertExpr)
3226 res, _ := s.dottype(n, false)
3227 return res
3228
3229 case ir.ODYNAMICDOTTYPE:
3230 n := n.(*ir.DynamicTypeAssertExpr)
3231 res, _ := s.dynamicDottype(n, false)
3232 return res
3233
3234
3235 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3236 n := n.(*ir.BinaryExpr)
3237 a := s.expr(n.X)
3238 b := s.expr(n.Y)
3239 if n.X.Type().IsComplex() {
3240 pt := types.FloatForComplex(n.X.Type())
3241 op := s.ssaOp(ir.OEQ, pt)
3242 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3243 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3244 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3245 switch n.Op() {
3246 case ir.OEQ:
3247 return c
3248 case ir.ONE:
3249 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3250 default:
3251 s.Fatalf("ordered complex compare %v", n.Op())
3252 }
3253 }
3254
3255
3256 op := n.Op()
3257 switch op {
3258 case ir.OGE:
3259 op, a, b = ir.OLE, b, a
3260 case ir.OGT:
3261 op, a, b = ir.OLT, b, a
3262 }
3263 if n.X.Type().IsFloat() {
3264
3265 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3266 }
3267
3268 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3269 case ir.OMUL:
3270 n := n.(*ir.BinaryExpr)
3271 a := s.expr(n.X)
3272 b := s.expr(n.Y)
3273 if n.Type().IsComplex() {
3274 mulop := ssa.OpMul64F
3275 addop := ssa.OpAdd64F
3276 subop := ssa.OpSub64F
3277 pt := types.FloatForComplex(n.Type())
3278 wt := types.Types[types.TFLOAT64]
3279
3280 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3281 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3282 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3283 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3284
3285 if pt != wt {
3286 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3287 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3288 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3289 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3290 }
3291
3292 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3293 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3294
3295 if pt != wt {
3296 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3297 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3298 }
3299
3300 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3301 }
3302
3303 if n.Type().IsFloat() {
3304 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3305 }
3306
3307 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3308
3309 case ir.ODIV:
3310 n := n.(*ir.BinaryExpr)
3311 a := s.expr(n.X)
3312 b := s.expr(n.Y)
3313 if n.Type().IsComplex() {
3314
3315
3316
3317 mulop := ssa.OpMul64F
3318 addop := ssa.OpAdd64F
3319 subop := ssa.OpSub64F
3320 divop := ssa.OpDiv64F
3321 pt := types.FloatForComplex(n.Type())
3322 wt := types.Types[types.TFLOAT64]
3323
3324 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3325 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3326 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3327 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3328
3329 if pt != wt {
3330 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3331 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3332 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3333 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3334 }
3335
3336 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3337 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3338 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3339
3340
3341
3342
3343
3344 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3345 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3346
3347 if pt != wt {
3348 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3349 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3350 }
3351 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3352 }
3353 if n.Type().IsFloat() {
3354 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3355 }
3356 return s.intDivide(n, a, b)
3357 case ir.OMOD:
3358 n := n.(*ir.BinaryExpr)
3359 a := s.expr(n.X)
3360 b := s.expr(n.Y)
3361 return s.intDivide(n, a, b)
3362 case ir.OADD, ir.OSUB:
3363 n := n.(*ir.BinaryExpr)
3364 a := s.expr(n.X)
3365 b := s.expr(n.Y)
3366 if n.Type().IsComplex() {
3367 pt := types.FloatForComplex(n.Type())
3368 op := s.ssaOp(n.Op(), pt)
3369 return s.newValue2(ssa.OpComplexMake, n.Type(),
3370 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3371 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3372 }
3373 if n.Type().IsFloat() {
3374 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3375 }
3376 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3377 case ir.OAND, ir.OOR, ir.OXOR:
3378 n := n.(*ir.BinaryExpr)
3379 a := s.expr(n.X)
3380 b := s.expr(n.Y)
3381 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3382 case ir.OANDNOT:
3383 n := n.(*ir.BinaryExpr)
3384 a := s.expr(n.X)
3385 b := s.expr(n.Y)
3386 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3387 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3388 case ir.OLSH, ir.ORSH:
3389 n := n.(*ir.BinaryExpr)
3390 a := s.expr(n.X)
3391 b := s.expr(n.Y)
3392 bt := b.Type
3393 if bt.IsSigned() {
3394 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3395 s.check(cmp, ir.Syms.Panicshift)
3396 bt = bt.ToUnsigned()
3397 }
3398 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3399 case ir.OANDAND, ir.OOROR:
3400
3401
3402
3403
3404
3405
3406
3407
3408
3409
3410
3411
3412
3413 n := n.(*ir.LogicalExpr)
3414 el := s.expr(n.X)
3415 s.vars[n] = el
3416
3417 b := s.endBlock()
3418 b.Kind = ssa.BlockIf
3419 b.SetControl(el)
3420
3421
3422
3423
3424
3425 bRight := s.f.NewBlock(ssa.BlockPlain)
3426 bResult := s.f.NewBlock(ssa.BlockPlain)
3427 if n.Op() == ir.OANDAND {
3428 b.AddEdgeTo(bRight)
3429 b.AddEdgeTo(bResult)
3430 } else if n.Op() == ir.OOROR {
3431 b.AddEdgeTo(bResult)
3432 b.AddEdgeTo(bRight)
3433 }
3434
3435 s.startBlock(bRight)
3436 er := s.expr(n.Y)
3437 s.vars[n] = er
3438
3439 b = s.endBlock()
3440 b.AddEdgeTo(bResult)
3441
3442 s.startBlock(bResult)
3443 return s.variable(n, types.Types[types.TBOOL])
3444 case ir.OCOMPLEX:
3445 n := n.(*ir.BinaryExpr)
3446 r := s.expr(n.X)
3447 i := s.expr(n.Y)
3448 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3449
3450
3451 case ir.ONEG:
3452 n := n.(*ir.UnaryExpr)
3453 a := s.expr(n.X)
3454 if n.Type().IsComplex() {
3455 tp := types.FloatForComplex(n.Type())
3456 negop := s.ssaOp(n.Op(), tp)
3457 return s.newValue2(ssa.OpComplexMake, n.Type(),
3458 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3459 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3460 }
3461 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3462 case ir.ONOT, ir.OBITNOT:
3463 n := n.(*ir.UnaryExpr)
3464 a := s.expr(n.X)
3465 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3466 case ir.OIMAG, ir.OREAL:
3467 n := n.(*ir.UnaryExpr)
3468 a := s.expr(n.X)
3469 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3470 case ir.OPLUS:
3471 n := n.(*ir.UnaryExpr)
3472 return s.expr(n.X)
3473
3474 case ir.OADDR:
3475 n := n.(*ir.AddrExpr)
3476 return s.addr(n.X)
3477
3478 case ir.ORESULT:
3479 n := n.(*ir.ResultExpr)
3480 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3481 panic("Expected to see a previous call")
3482 }
3483 which := n.Index
3484 if which == -1 {
3485 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3486 }
3487 return s.resultOfCall(s.prevCall, which, n.Type())
3488
3489 case ir.ODEREF:
3490 n := n.(*ir.StarExpr)
3491 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3492 return s.load(n.Type(), p)
3493
3494 case ir.ODOT:
3495 n := n.(*ir.SelectorExpr)
3496 if n.X.Op() == ir.OSTRUCTLIT {
3497
3498
3499
3500 if !ir.IsZero(n.X) {
3501 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3502 }
3503 return s.zeroVal(n.Type())
3504 }
3505
3506
3507
3508
3509 if ir.IsAddressable(n) && !s.canSSA(n) {
3510 p := s.addr(n)
3511 return s.load(n.Type(), p)
3512 }
3513 v := s.expr(n.X)
3514 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3515
3516 case ir.ODOTPTR:
3517 n := n.(*ir.SelectorExpr)
3518 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3519 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3520 return s.load(n.Type(), p)
3521
3522 case ir.OINDEX:
3523 n := n.(*ir.IndexExpr)
3524 switch {
3525 case n.X.Type().IsString():
3526 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3527
3528
3529
3530 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3531 }
3532 a := s.expr(n.X)
3533 i := s.expr(n.Index)
3534 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3535 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3536 ptrtyp := s.f.Config.Types.BytePtr
3537 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3538 if ir.IsConst(n.Index, constant.Int) {
3539 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3540 } else {
3541 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3542 }
3543 return s.load(types.Types[types.TUINT8], ptr)
3544 case n.X.Type().IsSlice():
3545 p := s.addr(n)
3546 return s.load(n.X.Type().Elem(), p)
3547 case n.X.Type().IsArray():
3548 if ssa.CanSSA(n.X.Type()) {
3549
3550 bound := n.X.Type().NumElem()
3551 a := s.expr(n.X)
3552 i := s.expr(n.Index)
3553 if bound == 0 {
3554
3555
3556 z := s.constInt(types.Types[types.TINT], 0)
3557 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3558
3559
3560 return s.zeroVal(n.Type())
3561 }
3562 len := s.constInt(types.Types[types.TINT], bound)
3563 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3564 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3565 }
3566 p := s.addr(n)
3567 return s.load(n.X.Type().Elem(), p)
3568 default:
3569 s.Fatalf("bad type for index %v", n.X.Type())
3570 return nil
3571 }
3572
3573 case ir.OLEN, ir.OCAP:
3574 n := n.(*ir.UnaryExpr)
3575
3576
3577 a := s.expr(n.X)
3578 t := n.X.Type()
3579 switch {
3580 case t.IsSlice():
3581 op := ssa.OpSliceLen
3582 if n.Op() == ir.OCAP {
3583 op = ssa.OpSliceCap
3584 }
3585 return s.newValue1(op, types.Types[types.TINT], a)
3586 case t.IsString():
3587 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3588 case t.IsMap(), t.IsChan():
3589 return s.referenceTypeBuiltin(n, a)
3590 case t.IsArray():
3591 return s.constInt(types.Types[types.TINT], t.NumElem())
3592 case t.IsPtr() && t.Elem().IsArray():
3593 return s.constInt(types.Types[types.TINT], t.Elem().NumElem())
3594 default:
3595 s.Fatalf("bad type in len/cap: %v", t)
3596 return nil
3597 }
3598
3599 case ir.OSPTR:
3600 n := n.(*ir.UnaryExpr)
3601 a := s.expr(n.X)
3602 if n.X.Type().IsSlice() {
3603 if n.Bounded() {
3604 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3605 }
3606 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3607 } else {
3608 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3609 }
3610
3611 case ir.OITAB:
3612 n := n.(*ir.UnaryExpr)
3613 a := s.expr(n.X)
3614 return s.newValue1(ssa.OpITab, n.Type(), a)
3615
3616 case ir.OIDATA:
3617 n := n.(*ir.UnaryExpr)
3618 a := s.expr(n.X)
3619 return s.newValue1(ssa.OpIData, n.Type(), a)
3620
3621 case ir.OMAKEFACE:
3622 n := n.(*ir.BinaryExpr)
3623 tab := s.expr(n.X)
3624 data := s.expr(n.Y)
3625 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3626
3627 case ir.OSLICEHEADER:
3628 n := n.(*ir.SliceHeaderExpr)
3629 p := s.expr(n.Ptr)
3630 l := s.expr(n.Len)
3631 c := s.expr(n.Cap)
3632 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3633
3634 case ir.OSTRINGHEADER:
3635 n := n.(*ir.StringHeaderExpr)
3636 p := s.expr(n.Ptr)
3637 l := s.expr(n.Len)
3638 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3639
3640 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3641 n := n.(*ir.SliceExpr)
3642 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3643 v := s.exprCheckPtr(n.X, !check)
3644 var i, j, k *ssa.Value
3645 if n.Low != nil {
3646 i = s.expr(n.Low)
3647 }
3648 if n.High != nil {
3649 j = s.expr(n.High)
3650 }
3651 if n.Max != nil {
3652 k = s.expr(n.Max)
3653 }
3654 p, l, c := s.slice(v, i, j, k, n.Bounded())
3655 if check {
3656
3657 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3658 }
3659 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3660
3661 case ir.OSLICESTR:
3662 n := n.(*ir.SliceExpr)
3663 v := s.expr(n.X)
3664 var i, j *ssa.Value
3665 if n.Low != nil {
3666 i = s.expr(n.Low)
3667 }
3668 if n.High != nil {
3669 j = s.expr(n.High)
3670 }
3671 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3672 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3673
3674 case ir.OSLICE2ARRPTR:
3675
3676
3677
3678
3679 n := n.(*ir.ConvExpr)
3680 v := s.expr(n.X)
3681 nelem := n.Type().Elem().NumElem()
3682 arrlen := s.constInt(types.Types[types.TINT], nelem)
3683 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3684 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3685 op := ssa.OpSlicePtr
3686 if nelem == 0 {
3687 op = ssa.OpSlicePtrUnchecked
3688 }
3689 return s.newValue1(op, n.Type(), v)
3690
3691 case ir.OCALLFUNC:
3692 n := n.(*ir.CallExpr)
3693 if ir.IsIntrinsicCall(n) {
3694 return s.intrinsicCall(n)
3695 }
3696 fallthrough
3697
3698 case ir.OCALLINTER:
3699 n := n.(*ir.CallExpr)
3700 return s.callResult(n, callNormal)
3701
3702 case ir.OGETG:
3703 n := n.(*ir.CallExpr)
3704 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3705
3706 case ir.OGETCALLERSP:
3707 n := n.(*ir.CallExpr)
3708 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3709
3710 case ir.OAPPEND:
3711 return s.append(n.(*ir.CallExpr), false)
3712
3713 case ir.OMOVE2HEAP:
3714 return s.move2heap(n.(*ir.MoveToHeapExpr))
3715
3716 case ir.OMIN, ir.OMAX:
3717 return s.minMax(n.(*ir.CallExpr))
3718
3719 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3720
3721
3722
3723 n := n.(*ir.CompLitExpr)
3724 if !ir.IsZero(n) {
3725 s.Fatalf("literal with nonzero value in SSA: %v", n)
3726 }
3727 return s.zeroVal(n.Type())
3728
3729 case ir.ONEW:
3730 n := n.(*ir.UnaryExpr)
3731 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3732 return s.newObjectNonSpecialized(n.Type().Elem(), s.expr(x.RType))
3733 }
3734 return s.newObject(n.Type().Elem())
3735
3736 case ir.OUNSAFEADD:
3737 n := n.(*ir.BinaryExpr)
3738 ptr := s.expr(n.X)
3739 len := s.expr(n.Y)
3740
3741
3742
3743 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3744
3745 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3746
3747 default:
3748 s.Fatalf("unhandled expr %v", n.Op())
3749 return nil
3750 }
3751 }
3752
3753 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3754 aux := c.Aux.(*ssa.AuxCall)
3755 pa := aux.ParamAssignmentForResult(which)
3756
3757
3758 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3759 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3760 return s.rawLoad(t, addr)
3761 }
3762 return s.newValue1I(ssa.OpSelectN, t, which, c)
3763 }
3764
3765 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3766 aux := c.Aux.(*ssa.AuxCall)
3767 pa := aux.ParamAssignmentForResult(which)
3768 if len(pa.Registers) == 0 {
3769 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3770 }
3771 _, addr := s.temp(c.Pos, t)
3772 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3773 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3774 return addr
3775 }
3776
3777
3778 func (s *state) getBackingStoreInfoForAppend(n *ir.CallExpr) *backingStoreInfo {
3779 if n.Esc() != ir.EscNone {
3780 return nil
3781 }
3782 return s.getBackingStoreInfo(n.Args[0])
3783 }
3784 func (s *state) getBackingStoreInfo(n ir.Node) *backingStoreInfo {
3785 t := n.Type()
3786 et := t.Elem()
3787 maxStackSize := int64(base.Debug.VariableMakeThreshold)
3788 if et.Size() == 0 || et.Size() > maxStackSize {
3789 return nil
3790 }
3791 if base.Flag.N != 0 {
3792 return nil
3793 }
3794 if !base.VariableMakeHash.MatchPos(n.Pos(), nil) {
3795 return nil
3796 }
3797 i := s.backingStores[n]
3798 if i != nil {
3799 return i
3800 }
3801
3802
3803 K := maxStackSize / et.Size()
3804 KT := types.NewArray(et, K)
3805 KT.SetNoalg(true)
3806 types.CalcArraySize(KT)
3807
3808 align := types.NewArray(types.Types[types.TUINTPTR], 0)
3809 types.CalcArraySize(align)
3810 storeTyp := types.NewStruct([]*types.Field{
3811 {Sym: types.BlankSym, Type: align},
3812 {Sym: types.BlankSym, Type: KT},
3813 })
3814 storeTyp.SetNoalg(true)
3815 types.CalcStructSize(storeTyp)
3816
3817
3818 backingStore := typecheck.TempAt(n.Pos(), s.curfn, storeTyp)
3819 backingStore.SetAddrtaken(true)
3820
3821
3822 used := typecheck.TempAt(n.Pos(), s.curfn, types.Types[types.TBOOL])
3823 if s.curBlock == s.f.Entry {
3824 s.vars[used] = s.constBool(false)
3825 } else {
3826
3827 s.defvars[s.f.Entry.ID][used] = s.constBool(false)
3828 }
3829
3830
3831 if s.backingStores == nil {
3832 s.backingStores = map[ir.Node]*backingStoreInfo{}
3833 }
3834 i = &backingStoreInfo{K: K, store: backingStore, used: used, usedStatic: false}
3835 s.backingStores[n] = i
3836 return i
3837 }
3838
3839
3840
3841
3842
3843
3844
3845
3846
3847 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3848
3849
3850
3851
3852
3853
3854
3855
3856
3857
3858
3859
3860
3861
3862
3863
3864
3865
3866
3867
3868
3869
3870
3871
3872
3873
3874
3875
3876
3877
3878
3879
3880 et := n.Type().Elem()
3881 pt := types.NewPtr(et)
3882
3883
3884 sn := n.Args[0]
3885 var slice, addr *ssa.Value
3886 if inplace {
3887 addr = s.addr(sn)
3888 slice = s.load(n.Type(), addr)
3889 } else {
3890 slice = s.expr(sn)
3891 }
3892
3893
3894 grow := s.f.NewBlock(ssa.BlockPlain)
3895 assign := s.f.NewBlock(ssa.BlockPlain)
3896
3897
3898 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3899 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3900 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3901
3902
3903 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3904 oldLen := l
3905 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3906
3907
3908 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3909
3910
3911 s.vars[ptrVar] = p
3912 s.vars[lenVar] = l
3913 if !inplace {
3914 s.vars[capVar] = c
3915 }
3916
3917 b := s.endBlock()
3918 b.Kind = ssa.BlockIf
3919 b.Likely = ssa.BranchUnlikely
3920 b.SetControl(cmp)
3921 b.AddEdgeTo(grow)
3922 b.AddEdgeTo(assign)
3923
3924
3925
3926
3927
3928
3929
3930
3931
3932
3933
3934
3935
3936
3937
3938
3939
3940
3941
3942
3943
3944
3945
3946 var info *backingStoreInfo
3947 if !inplace {
3948 info = s.getBackingStoreInfoForAppend(n)
3949 }
3950
3951 if !inplace && info != nil && !n.UseBuf && !info.usedStatic {
3952
3953
3954
3955
3956
3957
3958
3959
3960
3961
3962
3963
3964
3965
3966
3967
3968
3969
3970
3971
3972
3973
3974
3975 info.usedStatic = true
3976
3977
3978 usedTestBlock := s.f.NewBlock(ssa.BlockPlain)
3979 oldLenTestBlock := s.f.NewBlock(ssa.BlockPlain)
3980 bodyBlock := s.f.NewBlock(ssa.BlockPlain)
3981 growSlice := s.f.NewBlock(ssa.BlockPlain)
3982 tInt := types.Types[types.TINT]
3983 tBool := types.Types[types.TBOOL]
3984
3985
3986 s.startBlock(grow)
3987 kTest := s.newValue2(s.ssaOp(ir.OLE, tInt), tBool, l, s.constInt(tInt, info.K))
3988 b := s.endBlock()
3989 b.Kind = ssa.BlockIf
3990 b.SetControl(kTest)
3991 b.AddEdgeTo(usedTestBlock)
3992 b.AddEdgeTo(growSlice)
3993 b.Likely = ssa.BranchLikely
3994
3995
3996 s.startBlock(usedTestBlock)
3997 usedTest := s.newValue1(ssa.OpNot, tBool, s.expr(info.used))
3998 b = s.endBlock()
3999 b.Kind = ssa.BlockIf
4000 b.SetControl(usedTest)
4001 b.AddEdgeTo(oldLenTestBlock)
4002 b.AddEdgeTo(growSlice)
4003 b.Likely = ssa.BranchLikely
4004
4005
4006 s.startBlock(oldLenTestBlock)
4007 oldLenTest := s.newValue2(s.ssaOp(ir.OEQ, tInt), tBool, oldLen, s.constInt(tInt, 0))
4008 b = s.endBlock()
4009 b.Kind = ssa.BlockIf
4010 b.SetControl(oldLenTest)
4011 b.AddEdgeTo(bodyBlock)
4012 b.AddEdgeTo(growSlice)
4013 b.Likely = ssa.BranchLikely
4014
4015
4016 s.startBlock(bodyBlock)
4017 if et.HasPointers() {
4018 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, info.store, s.mem())
4019 }
4020 addr := s.addr(info.store)
4021 s.zero(info.store.Type(), addr)
4022
4023
4024 s.vars[ptrVar] = addr
4025 s.vars[lenVar] = l
4026 s.vars[capVar] = s.constInt(tInt, info.K)
4027
4028
4029 s.assign(info.used, s.constBool(true), false, 0)
4030 b = s.endBlock()
4031 b.AddEdgeTo(assign)
4032
4033
4034 grow = growSlice
4035 }
4036
4037
4038 s.startBlock(grow)
4039 taddr := s.expr(n.Fun)
4040 var r []*ssa.Value
4041 if info != nil && n.UseBuf {
4042
4043 if et.HasPointers() && !info.usedStatic {
4044
4045
4046
4047 mem := s.defvars[s.f.Entry.ID][memVar]
4048 mem = s.f.Entry.NewValue1A(n.Pos(), ssa.OpVarDef, types.TypeMem, info.store, mem)
4049 addr := s.f.Entry.NewValue2A(n.Pos(), ssa.OpLocalAddr, types.NewPtr(info.store.Type()), info.store, s.sp, mem)
4050 mem = s.f.Entry.NewValue2I(n.Pos(), ssa.OpZero, types.TypeMem, info.store.Type().Size(), addr, mem)
4051 mem.Aux = info.store.Type()
4052 s.defvars[s.f.Entry.ID][memVar] = mem
4053 info.usedStatic = true
4054 }
4055 fn := ir.Syms.GrowsliceBuf
4056 if goexperiment.RuntimeFreegc && n.AppendNoAlias && !et.HasPointers() {
4057
4058
4059
4060
4061 fn = ir.Syms.GrowsliceBufNoAlias
4062 }
4063 r = s.rtcall(fn, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr, s.addr(info.store), s.constInt(types.Types[types.TINT], info.K))
4064 } else {
4065 fn := ir.Syms.Growslice
4066 if goexperiment.RuntimeFreegc && n.AppendNoAlias && !et.HasPointers() {
4067
4068
4069
4070
4071 fn = ir.Syms.GrowsliceNoAlias
4072 }
4073 r = s.rtcall(fn, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
4074 }
4075
4076
4077 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
4078 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
4079 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
4080
4081 s.vars[ptrVar] = p
4082 s.vars[lenVar] = l
4083 s.vars[capVar] = c
4084 if inplace {
4085 if sn.Op() == ir.ONAME {
4086 sn := sn.(*ir.Name)
4087 if sn.Class != ir.PEXTERN {
4088
4089 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
4090 }
4091 }
4092 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
4093 s.store(types.Types[types.TINT], capaddr, c)
4094 s.store(pt, addr, p)
4095 }
4096
4097 b = s.endBlock()
4098 b.AddEdgeTo(assign)
4099
4100
4101 s.startBlock(assign)
4102 p = s.variable(ptrVar, pt)
4103 l = s.variable(lenVar, types.Types[types.TINT])
4104 if !inplace {
4105 c = s.variable(capVar, types.Types[types.TINT])
4106 }
4107
4108 if inplace {
4109
4110
4111 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
4112 s.store(types.Types[types.TINT], lenaddr, l)
4113 }
4114
4115
4116 type argRec struct {
4117
4118
4119 v *ssa.Value
4120 store bool
4121 }
4122 args := make([]argRec, 0, len(n.Args[1:]))
4123 for _, n := range n.Args[1:] {
4124 if ssa.CanSSA(n.Type()) {
4125 args = append(args, argRec{v: s.expr(n), store: true})
4126 } else {
4127 v := s.addr(n)
4128 args = append(args, argRec{v: v})
4129 }
4130 }
4131
4132
4133 oldLen = s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
4134 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
4135 for i, arg := range args {
4136 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
4137 if arg.store {
4138 s.storeType(et, addr, arg.v, 0, true)
4139 } else {
4140 s.move(et, addr, arg.v)
4141 }
4142 }
4143
4144
4145
4146
4147
4148 delete(s.vars, ptrVar)
4149 delete(s.vars, lenVar)
4150 if !inplace {
4151 delete(s.vars, capVar)
4152 }
4153
4154
4155 if inplace {
4156 return nil
4157 }
4158 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
4159 }
4160
4161 func (s *state) move2heap(n *ir.MoveToHeapExpr) *ssa.Value {
4162
4163
4164
4165
4166
4167
4168
4169
4170 slice := s.expr(n.Slice)
4171 et := slice.Type.Elem()
4172 pt := types.NewPtr(et)
4173
4174 info := s.getBackingStoreInfo(n)
4175 if info == nil {
4176
4177
4178 return slice
4179 }
4180
4181
4182 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
4183 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
4184 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
4185
4186 moveBlock := s.f.NewBlock(ssa.BlockPlain)
4187 mergeBlock := s.f.NewBlock(ssa.BlockPlain)
4188
4189 s.vars[ptrVar] = p
4190 s.vars[lenVar] = l
4191 s.vars[capVar] = c
4192
4193
4194
4195 sub := ssa.OpSub64
4196 less := ssa.OpLess64U
4197 if s.config.PtrSize == 4 {
4198 sub = ssa.OpSub32
4199 less = ssa.OpLess32U
4200 }
4201 callerSP := s.newValue1(ssa.OpGetCallerSP, types.Types[types.TUINTPTR], s.mem())
4202 frameSize := s.newValue2(sub, types.Types[types.TUINTPTR], callerSP, s.sp)
4203 pInt := s.newValue2(ssa.OpConvert, types.Types[types.TUINTPTR], p, s.mem())
4204 off := s.newValue2(sub, types.Types[types.TUINTPTR], pInt, s.sp)
4205 cond := s.newValue2(less, types.Types[types.TBOOL], off, frameSize)
4206
4207 b := s.endBlock()
4208 b.Kind = ssa.BlockIf
4209 b.Likely = ssa.BranchUnlikely
4210 b.SetControl(cond)
4211 b.AddEdgeTo(moveBlock)
4212 b.AddEdgeTo(mergeBlock)
4213
4214
4215 s.startBlock(moveBlock)
4216 var newSlice *ssa.Value
4217 if et.HasPointers() {
4218 typ := s.expr(n.RType)
4219 if n.PreserveCapacity {
4220 newSlice = s.rtcall(ir.Syms.MoveSlice, true, []*types.Type{slice.Type}, typ, p, l, c)[0]
4221 } else {
4222 newSlice = s.rtcall(ir.Syms.MoveSliceNoCap, true, []*types.Type{slice.Type}, typ, p, l)[0]
4223 }
4224 } else {
4225 elemSize := s.constInt(types.Types[types.TUINTPTR], et.Size())
4226 if n.PreserveCapacity {
4227 newSlice = s.rtcall(ir.Syms.MoveSliceNoScan, true, []*types.Type{slice.Type}, elemSize, p, l, c)[0]
4228 } else {
4229 newSlice = s.rtcall(ir.Syms.MoveSliceNoCapNoScan, true, []*types.Type{slice.Type}, elemSize, p, l)[0]
4230 }
4231 }
4232
4233 s.vars[ptrVar] = s.newValue1(ssa.OpSlicePtr, pt, newSlice)
4234 s.vars[lenVar] = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], newSlice)
4235 s.vars[capVar] = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], newSlice)
4236 b = s.endBlock()
4237 b.AddEdgeTo(mergeBlock)
4238
4239
4240 s.startBlock(mergeBlock)
4241 p = s.variable(ptrVar, pt)
4242 l = s.variable(lenVar, types.Types[types.TINT])
4243 c = s.variable(capVar, types.Types[types.TINT])
4244 delete(s.vars, ptrVar)
4245 delete(s.vars, lenVar)
4246 delete(s.vars, capVar)
4247 return s.newValue3(ssa.OpSliceMake, slice.Type, p, l, c)
4248 }
4249
4250
4251 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
4252
4253
4254
4255 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
4256 x := s.expr(n.Args[0])
4257 for _, arg := range n.Args[1:] {
4258 x = op(x, s.expr(arg))
4259 }
4260 return x
4261 }
4262
4263 typ := n.Type()
4264
4265 if typ.IsFloat() || typ.IsString() {
4266
4267
4268
4269
4270
4271
4272
4273
4274 if typ.IsFloat() {
4275 hasIntrinsic := false
4276 switch Arch.LinkArch.Family {
4277 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64, sys.S390X:
4278 hasIntrinsic = true
4279 case sys.PPC64:
4280 hasIntrinsic = buildcfg.GOPPC64 >= 9
4281 }
4282
4283 if hasIntrinsic {
4284 var op ssa.Op
4285 switch {
4286 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
4287 op = ssa.OpMin64F
4288 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
4289 op = ssa.OpMax64F
4290 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
4291 op = ssa.OpMin32F
4292 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
4293 op = ssa.OpMax32F
4294 }
4295 return fold(func(x, a *ssa.Value) *ssa.Value {
4296 return s.newValue2(op, typ, x, a)
4297 })
4298 }
4299 }
4300 var name string
4301 switch typ.Kind() {
4302 case types.TFLOAT32:
4303 switch n.Op() {
4304 case ir.OMIN:
4305 name = "fmin32"
4306 case ir.OMAX:
4307 name = "fmax32"
4308 }
4309 case types.TFLOAT64:
4310 switch n.Op() {
4311 case ir.OMIN:
4312 name = "fmin64"
4313 case ir.OMAX:
4314 name = "fmax64"
4315 }
4316 case types.TSTRING:
4317 switch n.Op() {
4318 case ir.OMIN:
4319 name = "strmin"
4320 case ir.OMAX:
4321 name = "strmax"
4322 }
4323 }
4324 fn := typecheck.LookupRuntimeFunc(name)
4325
4326 return fold(func(x, a *ssa.Value) *ssa.Value {
4327 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
4328 })
4329 }
4330
4331 if typ.IsInteger() {
4332 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
4333 var op ssa.Op
4334 switch {
4335 case typ.IsSigned() && n.Op() == ir.OMIN:
4336 op = ssa.OpMin64
4337 case typ.IsSigned() && n.Op() == ir.OMAX:
4338 op = ssa.OpMax64
4339 case typ.IsUnsigned() && n.Op() == ir.OMIN:
4340 op = ssa.OpMin64u
4341 case typ.IsUnsigned() && n.Op() == ir.OMAX:
4342 op = ssa.OpMax64u
4343 }
4344 return fold(func(x, a *ssa.Value) *ssa.Value {
4345 return s.newValue2(op, typ, x, a)
4346 })
4347 }
4348 }
4349
4350 lt := s.ssaOp(ir.OLT, typ)
4351
4352 return fold(func(x, a *ssa.Value) *ssa.Value {
4353 switch n.Op() {
4354 case ir.OMIN:
4355
4356 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
4357 case ir.OMAX:
4358
4359 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
4360 }
4361 panic("unreachable")
4362 })
4363 }
4364
4365
4366 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
4367
4368
4369 ternaryVar := ssaMarker("ternary")
4370
4371 bThen := s.f.NewBlock(ssa.BlockPlain)
4372 bElse := s.f.NewBlock(ssa.BlockPlain)
4373 bEnd := s.f.NewBlock(ssa.BlockPlain)
4374
4375 b := s.endBlock()
4376 b.Kind = ssa.BlockIf
4377 b.SetControl(cond)
4378 b.AddEdgeTo(bThen)
4379 b.AddEdgeTo(bElse)
4380
4381 s.startBlock(bThen)
4382 s.vars[ternaryVar] = x
4383 s.endBlock().AddEdgeTo(bEnd)
4384
4385 s.startBlock(bElse)
4386 s.vars[ternaryVar] = y
4387 s.endBlock().AddEdgeTo(bEnd)
4388
4389 s.startBlock(bEnd)
4390 r := s.variable(ternaryVar, x.Type)
4391 delete(s.vars, ternaryVar)
4392 return r
4393 }
4394
4395
4396
4397
4398
4399 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
4400 switch cond.Op() {
4401 case ir.OANDAND:
4402 cond := cond.(*ir.LogicalExpr)
4403 mid := s.f.NewBlock(ssa.BlockPlain)
4404 s.stmtList(cond.Init())
4405 s.condBranch(cond.X, mid, no, max(likely, 0))
4406 s.startBlock(mid)
4407 s.condBranch(cond.Y, yes, no, likely)
4408 return
4409
4410
4411
4412
4413
4414
4415 case ir.OOROR:
4416 cond := cond.(*ir.LogicalExpr)
4417 mid := s.f.NewBlock(ssa.BlockPlain)
4418 s.stmtList(cond.Init())
4419 s.condBranch(cond.X, yes, mid, min(likely, 0))
4420 s.startBlock(mid)
4421 s.condBranch(cond.Y, yes, no, likely)
4422 return
4423
4424
4425
4426 case ir.ONOT:
4427 cond := cond.(*ir.UnaryExpr)
4428 s.stmtList(cond.Init())
4429 s.condBranch(cond.X, no, yes, -likely)
4430 return
4431 case ir.OCONVNOP:
4432 cond := cond.(*ir.ConvExpr)
4433 s.stmtList(cond.Init())
4434 s.condBranch(cond.X, yes, no, likely)
4435 return
4436 }
4437 c := s.expr(cond)
4438 b := s.endBlock()
4439 b.Kind = ssa.BlockIf
4440 b.SetControl(c)
4441 b.Likely = ssa.BranchPrediction(likely)
4442 b.AddEdgeTo(yes)
4443 b.AddEdgeTo(no)
4444 }
4445
4446 type skipMask uint8
4447
4448 const (
4449 skipPtr skipMask = 1 << iota
4450 skipLen
4451 skipCap
4452 )
4453
4454
4455
4456
4457
4458
4459
4460 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
4461 s.assignWhichMayOverlap(left, right, deref, skip, false)
4462 }
4463 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
4464 if left.Op() == ir.ONAME && ir.IsBlank(left) {
4465 return
4466 }
4467 t := left.Type()
4468 types.CalcSize(t)
4469 if s.canSSA(left) {
4470 if deref {
4471 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
4472 }
4473 if left.Op() == ir.ODOT {
4474
4475
4476
4477
4478
4479
4480
4481
4482
4483
4484 left := left.(*ir.SelectorExpr)
4485 t := left.X.Type()
4486 nf := t.NumFields()
4487 idx := fieldIdx(left)
4488
4489
4490 old := s.expr(left.X)
4491
4492
4493 new := s.newValue0(ssa.OpStructMake, t)
4494
4495
4496 for i := 0; i < nf; i++ {
4497 if i == idx {
4498 new.AddArg(right)
4499 } else {
4500 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
4501 }
4502 }
4503
4504
4505 s.assign(left.X, new, false, 0)
4506
4507 return
4508 }
4509 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
4510 left := left.(*ir.IndexExpr)
4511 s.pushLine(left.Pos())
4512 defer s.popLine()
4513
4514
4515 t := left.X.Type()
4516 n := t.NumElem()
4517
4518 i := s.expr(left.Index)
4519 if n == 0 {
4520
4521
4522 z := s.constInt(types.Types[types.TINT], 0)
4523 s.boundsCheck(z, z, ssa.BoundsIndex, false)
4524 return
4525 }
4526 if n != 1 {
4527 s.Fatalf("assigning to non-1-length array")
4528 }
4529
4530 len := s.constInt(types.Types[types.TINT], 1)
4531 s.boundsCheck(i, len, ssa.BoundsIndex, false)
4532 v := s.newValue1(ssa.OpArrayMake1, t, right)
4533 s.assign(left.X, v, false, 0)
4534 return
4535 }
4536 left := left.(*ir.Name)
4537
4538 s.vars[left] = right
4539 s.addNamedValue(left, right)
4540 return
4541 }
4542
4543
4544
4545 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4546 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4547 }
4548
4549
4550 addr := s.addr(left)
4551 if ir.IsReflectHeaderDataField(left) {
4552
4553
4554
4555
4556
4557 t = types.Types[types.TUNSAFEPTR]
4558 }
4559 if deref {
4560
4561 if right == nil {
4562 s.zero(t, addr)
4563 } else {
4564 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4565 }
4566 return
4567 }
4568
4569 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4570 }
4571
4572
4573 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4574 switch {
4575 case t.IsInteger():
4576 switch t.Size() {
4577 case 1:
4578 return s.constInt8(t, 0)
4579 case 2:
4580 return s.constInt16(t, 0)
4581 case 4:
4582 return s.constInt32(t, 0)
4583 case 8:
4584 return s.constInt64(t, 0)
4585 default:
4586 s.Fatalf("bad sized integer type %v", t)
4587 }
4588 case t.IsFloat():
4589 switch t.Size() {
4590 case 4:
4591 return s.constFloat32(t, 0)
4592 case 8:
4593 return s.constFloat64(t, 0)
4594 default:
4595 s.Fatalf("bad sized float type %v", t)
4596 }
4597 case t.IsComplex():
4598 switch t.Size() {
4599 case 8:
4600 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4601 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4602 case 16:
4603 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4604 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4605 default:
4606 s.Fatalf("bad sized complex type %v", t)
4607 }
4608
4609 case t.IsString():
4610 return s.constEmptyString(t)
4611 case t.IsPtrShaped():
4612 return s.constNil(t)
4613 case t.IsBoolean():
4614 return s.constBool(false)
4615 case t.IsInterface():
4616 return s.constInterface(t)
4617 case t.IsSlice():
4618 return s.constSlice(t)
4619 case isStructNotSIMD(t):
4620 n := t.NumFields()
4621 v := s.entryNewValue0(ssa.OpStructMake, t)
4622 for i := 0; i < n; i++ {
4623 v.AddArg(s.zeroVal(t.FieldType(i)))
4624 }
4625 return v
4626 case t.IsArray():
4627 switch t.NumElem() {
4628 case 0:
4629 return s.entryNewValue0(ssa.OpArrayMake0, t)
4630 case 1:
4631 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4632 }
4633 case t.IsSIMD():
4634 return s.newValue0(ssa.OpZeroSIMD, t)
4635 }
4636 s.Fatalf("zero for type %v not implemented", t)
4637 return nil
4638 }
4639
4640 type callKind int8
4641
4642 const (
4643 callNormal callKind = iota
4644 callDefer
4645 callDeferStack
4646 callGo
4647 callTail
4648 )
4649
4650 type sfRtCallDef struct {
4651 rtfn *obj.LSym
4652 rtype types.Kind
4653 }
4654
4655 var softFloatOps map[ssa.Op]sfRtCallDef
4656
4657 func softfloatInit() {
4658
4659 softFloatOps = map[ssa.Op]sfRtCallDef{
4660 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4661 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4662 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4663 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4664 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4665 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4666 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4667 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4668
4669 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4670 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4671 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4672 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4673 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4674 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4675 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4676 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4677
4678 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4679 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4680 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4681 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4682 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4683 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4684 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4685 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4686 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4687 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4688 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4689 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4690 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4691 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4692 }
4693 }
4694
4695
4696
4697 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4698 f2i := func(t *types.Type) *types.Type {
4699 switch t.Kind() {
4700 case types.TFLOAT32:
4701 return types.Types[types.TUINT32]
4702 case types.TFLOAT64:
4703 return types.Types[types.TUINT64]
4704 }
4705 return t
4706 }
4707
4708 if callDef, ok := softFloatOps[op]; ok {
4709 switch op {
4710 case ssa.OpLess32F,
4711 ssa.OpLess64F,
4712 ssa.OpLeq32F,
4713 ssa.OpLeq64F:
4714 args[0], args[1] = args[1], args[0]
4715 case ssa.OpSub32F,
4716 ssa.OpSub64F:
4717 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4718 }
4719
4720
4721
4722 for i, a := range args {
4723 if a.Type.IsFloat() {
4724 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4725 }
4726 }
4727
4728 rt := types.Types[callDef.rtype]
4729 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4730 if rt.IsFloat() {
4731 result = s.newValue1(ssa.OpCopy, rt, result)
4732 }
4733 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4734 result = s.newValue1(ssa.OpNot, result.Type, result)
4735 }
4736 return result, true
4737 }
4738 return nil, false
4739 }
4740
4741
4742 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4743 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4744 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4745 return p0, p1
4746 }
4747
4748
4749 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4750 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4751 if ssa.IntrinsicsDebug > 0 {
4752 x := v
4753 if x == nil {
4754 x = s.mem()
4755 }
4756 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4757 x = x.Args[0]
4758 }
4759 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4760 }
4761 return v
4762 }
4763
4764
4765 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4766 args := make([]*ssa.Value, len(n.Args))
4767 for i, n := range n.Args {
4768 args[i] = s.expr(n)
4769 }
4770 return args
4771 }
4772
4773
4774
4775
4776
4777
4778
4779 func (s *state) openDeferRecord(n *ir.CallExpr) {
4780 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4781 s.Fatalf("defer call with arguments or results: %v", n)
4782 }
4783
4784 opendefer := &openDeferInfo{
4785 n: n,
4786 }
4787 fn := n.Fun
4788
4789
4790
4791 closureVal := s.expr(fn)
4792 closure := s.openDeferSave(fn.Type(), closureVal)
4793 opendefer.closureNode = closure.Aux.(*ir.Name)
4794 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4795 opendefer.closure = closure
4796 }
4797 index := len(s.openDefers)
4798 s.openDefers = append(s.openDefers, opendefer)
4799
4800
4801
4802 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4803 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4804 s.vars[deferBitsVar] = newDeferBits
4805 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4806 }
4807
4808
4809
4810
4811
4812
4813 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4814 if !ssa.CanSSA(t) {
4815 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4816 }
4817 if !t.HasPointers() {
4818 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4819 }
4820 pos := val.Pos
4821 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4822 temp.SetOpenDeferSlot(true)
4823 temp.SetFrameOffset(int64(len(s.openDefers)))
4824 var addrTemp *ssa.Value
4825
4826
4827 if s.curBlock.ID != s.f.Entry.ID {
4828
4829
4830
4831 if t.HasPointers() {
4832 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4833 }
4834 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4835 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4836 } else {
4837
4838
4839
4840 if t.HasPointers() {
4841 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4842 }
4843 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4844 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4845 }
4846
4847
4848
4849
4850
4851 temp.SetNeedzero(true)
4852
4853
4854 s.store(t, addrTemp, val)
4855 return addrTemp
4856 }
4857
4858
4859
4860
4861
4862 func (s *state) openDeferExit() {
4863 deferExit := s.f.NewBlock(ssa.BlockPlain)
4864 s.endBlock().AddEdgeTo(deferExit)
4865 s.startBlock(deferExit)
4866 s.lastDeferExit = deferExit
4867 s.lastDeferCount = len(s.openDefers)
4868 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4869
4870 for i := len(s.openDefers) - 1; i >= 0; i-- {
4871 r := s.openDefers[i]
4872 bCond := s.f.NewBlock(ssa.BlockPlain)
4873 bEnd := s.f.NewBlock(ssa.BlockPlain)
4874
4875 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4876
4877
4878 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4879 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4880 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4881 b := s.endBlock()
4882 b.Kind = ssa.BlockIf
4883 b.SetControl(eqVal)
4884 b.AddEdgeTo(bEnd)
4885 b.AddEdgeTo(bCond)
4886 bCond.AddEdgeTo(bEnd)
4887 s.startBlock(bCond)
4888
4889
4890
4891 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4892 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4893 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4894
4895
4896 s.vars[deferBitsVar] = maskedval
4897
4898
4899
4900
4901 fn := r.n.Fun
4902 stksize := fn.Type().ArgWidth()
4903 var callArgs []*ssa.Value
4904 var call *ssa.Value
4905 if r.closure != nil {
4906 v := s.load(r.closure.Type.Elem(), r.closure)
4907 s.maybeNilCheckClosure(v, callDefer)
4908 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4909 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4910 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4911 } else {
4912 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4913 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4914 }
4915 callArgs = append(callArgs, s.mem())
4916 call.AddArgs(callArgs...)
4917 call.AuxInt = stksize
4918 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4919
4920
4921
4922
4923 if r.closureNode != nil {
4924 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4925 }
4926
4927 s.endBlock()
4928 s.startBlock(bEnd)
4929 }
4930 }
4931
4932 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4933 return s.call(n, k, false, nil)
4934 }
4935
4936 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4937 return s.call(n, k, true, nil)
4938 }
4939
4940
4941
4942 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4943 s.prevCall = nil
4944 var calleeLSym *obj.LSym
4945 var closure *ssa.Value
4946 var codeptr *ssa.Value
4947 var dextra *ssa.Value
4948 var rcvr *ssa.Value
4949 fn := n.Fun
4950 var ACArgs []*types.Type
4951 var ACResults []*types.Type
4952 var callArgs []*ssa.Value
4953
4954 callABI := s.f.ABIDefault
4955
4956 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4957 s.Fatalf("go/defer call with arguments: %v", n)
4958 }
4959
4960 isCallDeferRangeFunc := false
4961
4962 switch n.Op() {
4963 case ir.OCALLFUNC:
4964 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4965 fn := fn.(*ir.Name)
4966 calleeLSym = callTargetLSym(fn)
4967 if buildcfg.Experiment.RegabiArgs {
4968
4969
4970
4971
4972
4973 if fn.Func != nil {
4974 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4975 }
4976 } else {
4977
4978 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4979 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4980 if inRegistersImported || inRegistersSamePackage {
4981 callABI = s.f.ABI1
4982 }
4983 }
4984 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
4985 isCallDeferRangeFunc = true
4986 }
4987 break
4988 }
4989 closure = s.expr(fn)
4990 if k != callDefer && k != callDeferStack {
4991
4992
4993 s.maybeNilCheckClosure(closure, k)
4994 }
4995 case ir.OCALLINTER:
4996 if fn.Op() != ir.ODOTINTER {
4997 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
4998 }
4999 fn := fn.(*ir.SelectorExpr)
5000 var iclosure *ssa.Value
5001 iclosure, rcvr = s.getClosureAndRcvr(fn)
5002 if k == callNormal {
5003 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
5004 } else {
5005 closure = iclosure
5006 }
5007 }
5008 if deferExtra != nil {
5009 dextra = s.expr(deferExtra)
5010 }
5011
5012 params := callABI.ABIAnalyze(n.Fun.Type(), false )
5013 types.CalcSize(fn.Type())
5014 stksize := params.ArgWidth()
5015
5016 res := n.Fun.Type().Results()
5017 if k == callNormal || k == callTail {
5018 for _, p := range params.OutParams() {
5019 ACResults = append(ACResults, p.Type)
5020 }
5021 }
5022
5023 var call *ssa.Value
5024 if k == callDeferStack {
5025 if stksize != 0 {
5026 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
5027 }
5028
5029 t := deferstruct()
5030 n, addr := s.temp(n.Pos(), t)
5031 n.SetNonMergeable(true)
5032 s.store(closure.Type,
5033 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
5034 closure)
5035
5036
5037 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5038 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5039 callArgs = append(callArgs, addr, s.mem())
5040 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5041 call.AddArgs(callArgs...)
5042 call.AuxInt = int64(types.PtrSize)
5043 } else {
5044
5045
5046 argStart := base.Ctxt.Arch.FixedFrameSize
5047
5048 if k != callNormal && k != callTail {
5049
5050 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5051 callArgs = append(callArgs, closure)
5052 stksize += int64(types.PtrSize)
5053 argStart += int64(types.PtrSize)
5054 if dextra != nil {
5055
5056 ACArgs = append(ACArgs, types.Types[types.TINTER])
5057 callArgs = append(callArgs, dextra)
5058 stksize += 2 * int64(types.PtrSize)
5059 argStart += 2 * int64(types.PtrSize)
5060 }
5061 }
5062
5063
5064 if rcvr != nil {
5065 callArgs = append(callArgs, rcvr)
5066 }
5067
5068
5069 t := n.Fun.Type()
5070 args := n.Args
5071
5072 for _, p := range params.InParams() {
5073 ACArgs = append(ACArgs, p.Type)
5074 }
5075
5076
5077
5078
5079 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
5080 b := s.endBlock()
5081 b.Kind = ssa.BlockPlain
5082 curb := s.f.NewBlock(ssa.BlockPlain)
5083 b.AddEdgeTo(curb)
5084 s.startBlock(curb)
5085 }
5086
5087 for i, n := range args {
5088 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
5089 }
5090
5091 callArgs = append(callArgs, s.mem())
5092
5093
5094 switch {
5095 case k == callDefer:
5096 sym := ir.Syms.Deferproc
5097 if dextra != nil {
5098 sym = ir.Syms.Deferprocat
5099 }
5100 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5101 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5102 case k == callGo:
5103 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5104 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5105 case closure != nil:
5106
5107
5108
5109
5110
5111 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
5112 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
5113 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
5114 case codeptr != nil:
5115
5116 aux := ssa.InterfaceAuxCall(params)
5117 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
5118 case calleeLSym != nil:
5119 aux := ssa.StaticAuxCall(calleeLSym, params)
5120 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5121 if k == callTail {
5122 call.Op = ssa.OpTailLECall
5123 stksize = 0
5124 }
5125 default:
5126 s.Fatalf("bad call type %v %v", n.Op(), n)
5127 }
5128 call.AddArgs(callArgs...)
5129 call.AuxInt = stksize
5130 }
5131 s.prevCall = call
5132 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
5133
5134 for _, v := range n.KeepAlive {
5135 if !v.Addrtaken() {
5136 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
5137 }
5138 switch v.Class {
5139 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
5140 default:
5141 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
5142 }
5143 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
5144 }
5145
5146
5147 if k == callDefer || k == callDeferStack || isCallDeferRangeFunc {
5148 b := s.endBlock()
5149 b.Kind = ssa.BlockDefer
5150 b.SetControl(call)
5151 bNext := s.f.NewBlock(ssa.BlockPlain)
5152 b.AddEdgeTo(bNext)
5153 r := s.f.DeferReturn
5154 if r == nil {
5155 r = s.f.NewBlock(ssa.BlockPlain)
5156 s.startBlock(r)
5157 s.exit()
5158 s.f.DeferReturn = r
5159 }
5160 b.AddEdgeTo(r)
5161 b.Likely = ssa.BranchLikely
5162 s.startBlock(bNext)
5163 }
5164
5165 if len(res) == 0 || k != callNormal {
5166
5167 return nil
5168 }
5169 fp := res[0]
5170 if returnResultAddr {
5171 return s.resultAddrOfCall(call, 0, fp.Type)
5172 }
5173 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
5174 }
5175
5176
5177
5178 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
5179 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
5180
5181
5182 s.nilCheck(closure)
5183 }
5184 }
5185
5186
5187
5188 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
5189 i := s.expr(fn.X)
5190 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
5191 s.nilCheck(itab)
5192 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
5193 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
5194 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
5195 return closure, rcvr
5196 }
5197
5198
5199
5200 func etypesign(e types.Kind) int8 {
5201 switch e {
5202 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
5203 return -1
5204 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
5205 return +1
5206 }
5207 return 0
5208 }
5209
5210
5211
5212 func (s *state) addr(n ir.Node) *ssa.Value {
5213 if n.Op() != ir.ONAME {
5214 s.pushLine(n.Pos())
5215 defer s.popLine()
5216 }
5217
5218 if s.canSSA(n) {
5219 s.Fatalf("addr of canSSA expression: %+v", n)
5220 }
5221
5222 t := types.NewPtr(n.Type())
5223 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
5224 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
5225
5226 if offset != 0 {
5227 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
5228 }
5229 return v
5230 }
5231 switch n.Op() {
5232 case ir.OLINKSYMOFFSET:
5233 no := n.(*ir.LinksymOffsetExpr)
5234 return linksymOffset(no.Linksym, no.Offset_)
5235 case ir.ONAME:
5236 n := n.(*ir.Name)
5237 if n.Heapaddr != nil {
5238 return s.expr(n.Heapaddr)
5239 }
5240 switch n.Class {
5241 case ir.PEXTERN:
5242
5243 return linksymOffset(n.Linksym(), 0)
5244 case ir.PPARAM:
5245
5246 v := s.decladdrs[n]
5247 if v != nil {
5248 return v
5249 }
5250 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
5251 return nil
5252 case ir.PAUTO:
5253 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
5254
5255 case ir.PPARAMOUT:
5256
5257
5258 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
5259 default:
5260 s.Fatalf("variable address class %v not implemented", n.Class)
5261 return nil
5262 }
5263 case ir.ORESULT:
5264
5265 n := n.(*ir.ResultExpr)
5266 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
5267 case ir.OINDEX:
5268 n := n.(*ir.IndexExpr)
5269 if n.X.Type().IsSlice() {
5270 a := s.expr(n.X)
5271 i := s.expr(n.Index)
5272 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
5273 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5274 p := s.newValue1(ssa.OpSlicePtr, t, a)
5275 return s.newValue2(ssa.OpPtrIndex, t, p, i)
5276 } else {
5277 a := s.addr(n.X)
5278 i := s.expr(n.Index)
5279 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
5280 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5281 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5282 }
5283 case ir.ODEREF:
5284 n := n.(*ir.StarExpr)
5285 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5286 case ir.ODOT:
5287 n := n.(*ir.SelectorExpr)
5288 p := s.addr(n.X)
5289 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5290 case ir.ODOTPTR:
5291 n := n.(*ir.SelectorExpr)
5292 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5293 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5294 case ir.OCONVNOP:
5295 n := n.(*ir.ConvExpr)
5296 if n.Type() == n.X.Type() {
5297 return s.addr(n.X)
5298 }
5299 addr := s.addr(n.X)
5300 return s.newValue1(ssa.OpCopy, t, addr)
5301 case ir.OCALLFUNC, ir.OCALLINTER:
5302 n := n.(*ir.CallExpr)
5303 return s.callAddr(n, callNormal)
5304 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5305 var v *ssa.Value
5306 if n.Op() == ir.ODOTTYPE {
5307 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5308 } else {
5309 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5310 }
5311 if v.Op != ssa.OpLoad {
5312 s.Fatalf("dottype of non-load")
5313 }
5314 if v.Args[1] != s.mem() {
5315 s.Fatalf("memory no longer live from dottype load")
5316 }
5317 return v.Args[0]
5318 default:
5319 s.Fatalf("unhandled addr %v", n.Op())
5320 return nil
5321 }
5322 }
5323
5324
5325
5326 func (s *state) canSSA(n ir.Node) bool {
5327 if base.Flag.N != 0 {
5328 return false
5329 }
5330 for {
5331 nn := n
5332 if nn.Op() == ir.ODOT {
5333 nn := nn.(*ir.SelectorExpr)
5334 n = nn.X
5335 continue
5336 }
5337 if nn.Op() == ir.OINDEX {
5338 nn := nn.(*ir.IndexExpr)
5339 if nn.X.Type().IsArray() {
5340 n = nn.X
5341 continue
5342 }
5343 }
5344 break
5345 }
5346 if n.Op() != ir.ONAME {
5347 return false
5348 }
5349 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5350 }
5351
5352 func (s *state) canSSAName(name *ir.Name) bool {
5353 if name.Addrtaken() || !name.OnStack() {
5354 return false
5355 }
5356 switch name.Class {
5357 case ir.PPARAMOUT:
5358 if s.hasdefer {
5359
5360
5361
5362
5363
5364 return false
5365 }
5366 if s.cgoUnsafeArgs {
5367
5368
5369 return false
5370 }
5371 }
5372 return true
5373
5374 }
5375
5376
5377 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5378 p := s.expr(n)
5379 if bounded || n.NonNil() {
5380 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5381 s.f.Warnl(lineno, "removed nil check")
5382 }
5383 return p
5384 }
5385 p = s.nilCheck(p)
5386 return p
5387 }
5388
5389
5390
5391
5392
5393
5394 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5395 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5396 return ptr
5397 }
5398 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5399 }
5400
5401
5402
5403
5404
5405
5406
5407 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5408 idx = s.extendIndex(idx, len, kind, bounded)
5409
5410 if bounded || base.Flag.B != 0 {
5411
5412
5413
5414
5415
5416
5417
5418
5419
5420
5421
5422
5423
5424
5425
5426
5427
5428
5429
5430
5431 return idx
5432 }
5433
5434 bNext := s.f.NewBlock(ssa.BlockPlain)
5435 bPanic := s.f.NewBlock(ssa.BlockExit)
5436
5437 if !idx.Type.IsSigned() {
5438 switch kind {
5439 case ssa.BoundsIndex:
5440 kind = ssa.BoundsIndexU
5441 case ssa.BoundsSliceAlen:
5442 kind = ssa.BoundsSliceAlenU
5443 case ssa.BoundsSliceAcap:
5444 kind = ssa.BoundsSliceAcapU
5445 case ssa.BoundsSliceB:
5446 kind = ssa.BoundsSliceBU
5447 case ssa.BoundsSlice3Alen:
5448 kind = ssa.BoundsSlice3AlenU
5449 case ssa.BoundsSlice3Acap:
5450 kind = ssa.BoundsSlice3AcapU
5451 case ssa.BoundsSlice3B:
5452 kind = ssa.BoundsSlice3BU
5453 case ssa.BoundsSlice3C:
5454 kind = ssa.BoundsSlice3CU
5455 }
5456 }
5457
5458 var cmp *ssa.Value
5459 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5460 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5461 } else {
5462 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5463 }
5464 b := s.endBlock()
5465 b.Kind = ssa.BlockIf
5466 b.SetControl(cmp)
5467 b.Likely = ssa.BranchLikely
5468 b.AddEdgeTo(bNext)
5469 b.AddEdgeTo(bPanic)
5470
5471 s.startBlock(bPanic)
5472 if Arch.LinkArch.Family == sys.Wasm {
5473
5474
5475 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5476 } else {
5477 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5478 s.endBlock().SetControl(mem)
5479 }
5480 s.startBlock(bNext)
5481
5482
5483 if base.Flag.Cfg.SpectreIndex {
5484 op := ssa.OpSpectreIndex
5485 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5486 op = ssa.OpSpectreSliceIndex
5487 }
5488 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5489 }
5490
5491 return idx
5492 }
5493
5494
5495 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5496 b := s.endBlock()
5497 b.Kind = ssa.BlockIf
5498 b.SetControl(cmp)
5499 b.Likely = ssa.BranchLikely
5500 bNext := s.f.NewBlock(ssa.BlockPlain)
5501 line := s.peekPos()
5502 pos := base.Ctxt.PosTable.Pos(line)
5503 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5504 bPanic := s.panics[fl]
5505 if bPanic == nil {
5506 bPanic = s.f.NewBlock(ssa.BlockPlain)
5507 s.panics[fl] = bPanic
5508 s.startBlock(bPanic)
5509
5510
5511 s.rtcall(fn, false, nil)
5512 }
5513 b.AddEdgeTo(bNext)
5514 b.AddEdgeTo(bPanic)
5515 s.startBlock(bNext)
5516 }
5517
5518 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5519 needcheck := true
5520 switch b.Op {
5521 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5522 if b.AuxInt != 0 {
5523 needcheck = false
5524 }
5525 }
5526 if needcheck {
5527
5528 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5529 s.check(cmp, ir.Syms.Panicdivide)
5530 }
5531 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5532 }
5533
5534
5535
5536
5537
5538 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5539 s.prevCall = nil
5540
5541 off := base.Ctxt.Arch.FixedFrameSize
5542 var callArgs []*ssa.Value
5543 var callArgTypes []*types.Type
5544
5545 for _, arg := range args {
5546 t := arg.Type
5547 off = types.RoundUp(off, t.Alignment())
5548 size := t.Size()
5549 callArgs = append(callArgs, arg)
5550 callArgTypes = append(callArgTypes, t)
5551 off += size
5552 }
5553 off = types.RoundUp(off, int64(types.RegSize))
5554
5555
5556 var call *ssa.Value
5557 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5558 callArgs = append(callArgs, s.mem())
5559 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5560 call.AddArgs(callArgs...)
5561 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5562
5563 if !returns {
5564
5565 b := s.endBlock()
5566 b.Kind = ssa.BlockExit
5567 b.SetControl(call)
5568 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5569 if len(results) > 0 {
5570 s.Fatalf("panic call can't have results")
5571 }
5572 return nil
5573 }
5574
5575
5576 res := make([]*ssa.Value, len(results))
5577 for i, t := range results {
5578 off = types.RoundUp(off, t.Alignment())
5579 res[i] = s.resultOfCall(call, int64(i), t)
5580 off += t.Size()
5581 }
5582 off = types.RoundUp(off, int64(types.PtrSize))
5583
5584
5585 call.AuxInt = off
5586
5587 return res
5588 }
5589
5590
5591 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5592 s.instrument(t, left, instrumentWrite)
5593
5594 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5595
5596 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5597 return
5598 }
5599
5600
5601
5602
5603
5604
5605 s.storeTypeScalars(t, left, right, skip)
5606 if skip&skipPtr == 0 && t.HasPointers() {
5607 s.storeTypePtrs(t, left, right)
5608 }
5609 }
5610
5611
5612 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5613 switch {
5614 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex() || t.IsSIMD():
5615 s.store(t, left, right)
5616 case t.IsPtrShaped():
5617 if t.IsPtr() && t.Elem().NotInHeap() {
5618 s.store(t, left, right)
5619 }
5620
5621 case t.IsString():
5622 if skip&skipLen != 0 {
5623 return
5624 }
5625 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5626 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5627 s.store(types.Types[types.TINT], lenAddr, len)
5628 case t.IsSlice():
5629 if skip&skipLen == 0 {
5630 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5631 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5632 s.store(types.Types[types.TINT], lenAddr, len)
5633 }
5634 if skip&skipCap == 0 {
5635 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5636 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5637 s.store(types.Types[types.TINT], capAddr, cap)
5638 }
5639 case t.IsInterface():
5640
5641 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5642 s.store(types.Types[types.TUINTPTR], left, itab)
5643 case isStructNotSIMD(t):
5644 n := t.NumFields()
5645 for i := 0; i < n; i++ {
5646 ft := t.FieldType(i)
5647 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5648 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5649 s.storeTypeScalars(ft, addr, val, 0)
5650 }
5651 case t.IsArray() && t.NumElem() == 0:
5652
5653 case t.IsArray() && t.NumElem() == 1:
5654 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5655 default:
5656 s.Fatalf("bad write barrier type %v", t)
5657 }
5658 }
5659
5660
5661 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5662 switch {
5663 case t.IsPtrShaped():
5664 if t.IsPtr() && t.Elem().NotInHeap() {
5665 break
5666 }
5667 s.store(t, left, right)
5668 case t.IsString():
5669 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5670 s.store(s.f.Config.Types.BytePtr, left, ptr)
5671 case t.IsSlice():
5672 elType := types.NewPtr(t.Elem())
5673 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5674 s.store(elType, left, ptr)
5675 case t.IsInterface():
5676
5677 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5678 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5679 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5680 case isStructNotSIMD(t):
5681 n := t.NumFields()
5682 for i := 0; i < n; i++ {
5683 ft := t.FieldType(i)
5684 if !ft.HasPointers() {
5685 continue
5686 }
5687 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5688 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5689 s.storeTypePtrs(ft, addr, val)
5690 }
5691 case t.IsArray() && t.NumElem() == 0:
5692
5693 case t.IsArray() && t.NumElem() == 1:
5694 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5695 default:
5696 s.Fatalf("bad write barrier type %v", t)
5697 }
5698 }
5699
5700
5701 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5702 var a *ssa.Value
5703 if !ssa.CanSSA(t) {
5704 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5705 } else {
5706 a = s.expr(n)
5707 }
5708 return a
5709 }
5710
5711
5712
5713
5714 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5715 t := v.Type
5716 var ptr, len, cap *ssa.Value
5717 switch {
5718 case t.IsSlice():
5719 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5720 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5721 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5722 case t.IsString():
5723 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5724 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5725 cap = len
5726 case t.IsPtr():
5727 if !t.Elem().IsArray() {
5728 s.Fatalf("bad ptr to array in slice %v\n", t)
5729 }
5730 nv := s.nilCheck(v)
5731 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5732 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5733 cap = len
5734 default:
5735 s.Fatalf("bad type in slice %v\n", t)
5736 }
5737
5738
5739 if i == nil {
5740 i = s.constInt(types.Types[types.TINT], 0)
5741 }
5742 if j == nil {
5743 j = len
5744 }
5745 three := true
5746 if k == nil {
5747 three = false
5748 k = cap
5749 }
5750
5751
5752
5753
5754 if three {
5755 if k != cap {
5756 kind := ssa.BoundsSlice3Alen
5757 if t.IsSlice() {
5758 kind = ssa.BoundsSlice3Acap
5759 }
5760 k = s.boundsCheck(k, cap, kind, bounded)
5761 }
5762 if j != k {
5763 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5764 }
5765 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5766 } else {
5767 if j != k {
5768 kind := ssa.BoundsSliceAlen
5769 if t.IsSlice() {
5770 kind = ssa.BoundsSliceAcap
5771 }
5772 j = s.boundsCheck(j, k, kind, bounded)
5773 }
5774 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5775 }
5776
5777
5778 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5779 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5780 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5781
5782
5783
5784
5785
5786 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5787 rcap := rlen
5788 if j != k && !t.IsString() {
5789 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5790 }
5791
5792 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5793
5794 return ptr, rlen, rcap
5795 }
5796
5797
5798
5799
5800
5801
5802
5803
5804
5805
5806
5807
5808
5809
5810
5811 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5812
5813
5814 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5815
5816
5817
5818 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5819 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5820
5821
5822 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5823
5824 return rptr, rlen, rcap
5825 }
5826
5827 type u642fcvtTab struct {
5828 leq, cvt2F, and, rsh, or, add ssa.Op
5829 one func(*state, *types.Type, int64) *ssa.Value
5830 }
5831
5832 var u64_f64 = u642fcvtTab{
5833 leq: ssa.OpLeq64,
5834 cvt2F: ssa.OpCvt64to64F,
5835 and: ssa.OpAnd64,
5836 rsh: ssa.OpRsh64Ux64,
5837 or: ssa.OpOr64,
5838 add: ssa.OpAdd64F,
5839 one: (*state).constInt64,
5840 }
5841
5842 var u64_f32 = u642fcvtTab{
5843 leq: ssa.OpLeq64,
5844 cvt2F: ssa.OpCvt64to32F,
5845 and: ssa.OpAnd64,
5846 rsh: ssa.OpRsh64Ux64,
5847 or: ssa.OpOr64,
5848 add: ssa.OpAdd32F,
5849 one: (*state).constInt64,
5850 }
5851
5852 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5853 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5854 }
5855
5856 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5857 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5858 }
5859
5860 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5861
5862
5863
5864
5865
5866
5867
5868
5869
5870
5871
5872
5873
5874
5875
5876
5877
5878
5879
5880
5881
5882
5883
5884
5885
5886 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5887
5888 b := s.endBlock()
5889 b.Kind = ssa.BlockIf
5890 b.SetControl(cmp)
5891 b.Likely = ssa.BranchLikely
5892
5893 bThen := s.f.NewBlock(ssa.BlockPlain)
5894 bElse := s.f.NewBlock(ssa.BlockPlain)
5895 bAfter := s.f.NewBlock(ssa.BlockPlain)
5896
5897 b.AddEdgeTo(bThen)
5898 s.startBlock(bThen)
5899 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5900 s.vars[n] = a0
5901 s.endBlock()
5902 bThen.AddEdgeTo(bAfter)
5903
5904 b.AddEdgeTo(bElse)
5905 s.startBlock(bElse)
5906 one := cvttab.one(s, ft, 1)
5907 y := s.newValue2(cvttab.and, ft, x, one)
5908 z := s.newValue2(cvttab.rsh, ft, x, one)
5909 z = s.newValue2(cvttab.or, ft, z, y)
5910 a := s.newValue1(cvttab.cvt2F, tt, z)
5911 a1 := s.newValue2(cvttab.add, tt, a, a)
5912 s.vars[n] = a1
5913 s.endBlock()
5914 bElse.AddEdgeTo(bAfter)
5915
5916 s.startBlock(bAfter)
5917 return s.variable(n, n.Type())
5918 }
5919
5920 type u322fcvtTab struct {
5921 cvtI2F, cvtF2F ssa.Op
5922 }
5923
5924 var u32_f64 = u322fcvtTab{
5925 cvtI2F: ssa.OpCvt32to64F,
5926 cvtF2F: ssa.OpCopy,
5927 }
5928
5929 var u32_f32 = u322fcvtTab{
5930 cvtI2F: ssa.OpCvt32to32F,
5931 cvtF2F: ssa.OpCvt64Fto32F,
5932 }
5933
5934 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5935 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5936 }
5937
5938 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5939 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5940 }
5941
5942 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5943
5944
5945
5946
5947
5948 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5949 b := s.endBlock()
5950 b.Kind = ssa.BlockIf
5951 b.SetControl(cmp)
5952 b.Likely = ssa.BranchLikely
5953
5954 bThen := s.f.NewBlock(ssa.BlockPlain)
5955 bElse := s.f.NewBlock(ssa.BlockPlain)
5956 bAfter := s.f.NewBlock(ssa.BlockPlain)
5957
5958 b.AddEdgeTo(bThen)
5959 s.startBlock(bThen)
5960 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5961 s.vars[n] = a0
5962 s.endBlock()
5963 bThen.AddEdgeTo(bAfter)
5964
5965 b.AddEdgeTo(bElse)
5966 s.startBlock(bElse)
5967 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5968 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5969 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
5970 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
5971
5972 s.vars[n] = a3
5973 s.endBlock()
5974 bElse.AddEdgeTo(bAfter)
5975
5976 s.startBlock(bAfter)
5977 return s.variable(n, n.Type())
5978 }
5979
5980
5981 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
5982 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
5983 s.Fatalf("node must be a map or a channel")
5984 }
5985 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
5986 s.Fatalf("cannot inline len(chan)")
5987 }
5988 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
5989 s.Fatalf("cannot inline cap(chan)")
5990 }
5991 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
5992 s.Fatalf("cannot inline cap(map)")
5993 }
5994
5995
5996
5997
5998
5999
6000
6001
6002 lenType := n.Type()
6003 nilValue := s.constNil(types.Types[types.TUINTPTR])
6004 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
6005 b := s.endBlock()
6006 b.Kind = ssa.BlockIf
6007 b.SetControl(cmp)
6008 b.Likely = ssa.BranchUnlikely
6009
6010 bThen := s.f.NewBlock(ssa.BlockPlain)
6011 bElse := s.f.NewBlock(ssa.BlockPlain)
6012 bAfter := s.f.NewBlock(ssa.BlockPlain)
6013
6014
6015 b.AddEdgeTo(bThen)
6016 s.startBlock(bThen)
6017 s.vars[n] = s.zeroVal(lenType)
6018 s.endBlock()
6019 bThen.AddEdgeTo(bAfter)
6020
6021 b.AddEdgeTo(bElse)
6022 s.startBlock(bElse)
6023 switch n.Op() {
6024 case ir.OLEN:
6025 if n.X.Type().IsMap() {
6026
6027 loadType := reflectdata.MapType().Field(0).Type
6028 load := s.load(loadType, x)
6029 s.vars[n] = s.conv(nil, load, loadType, lenType)
6030 } else {
6031
6032 s.vars[n] = s.load(lenType, x)
6033 }
6034 case ir.OCAP:
6035
6036 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
6037 s.vars[n] = s.load(lenType, sw)
6038 default:
6039 s.Fatalf("op must be OLEN or OCAP")
6040 }
6041 s.endBlock()
6042 bElse.AddEdgeTo(bAfter)
6043
6044 s.startBlock(bAfter)
6045 return s.variable(n, lenType)
6046 }
6047
6048 type f2uCvtTab struct {
6049 ltf, cvt2U, subf, or ssa.Op
6050 floatValue func(*state, *types.Type, float64) *ssa.Value
6051 intValue func(*state, *types.Type, int64) *ssa.Value
6052 cutoff uint64
6053 }
6054
6055 var f32_u64 = f2uCvtTab{
6056 ltf: ssa.OpLess32F,
6057 cvt2U: ssa.OpCvt32Fto64,
6058 subf: ssa.OpSub32F,
6059 or: ssa.OpOr64,
6060 floatValue: (*state).constFloat32,
6061 intValue: (*state).constInt64,
6062 cutoff: 1 << 63,
6063 }
6064
6065 var f64_u64 = f2uCvtTab{
6066 ltf: ssa.OpLess64F,
6067 cvt2U: ssa.OpCvt64Fto64,
6068 subf: ssa.OpSub64F,
6069 or: ssa.OpOr64,
6070 floatValue: (*state).constFloat64,
6071 intValue: (*state).constInt64,
6072 cutoff: 1 << 63,
6073 }
6074
6075 var f32_u32 = f2uCvtTab{
6076 ltf: ssa.OpLess32F,
6077 cvt2U: ssa.OpCvt32Fto32,
6078 subf: ssa.OpSub32F,
6079 or: ssa.OpOr32,
6080 floatValue: (*state).constFloat32,
6081 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6082 cutoff: 1 << 31,
6083 }
6084
6085 var f64_u32 = f2uCvtTab{
6086 ltf: ssa.OpLess64F,
6087 cvt2U: ssa.OpCvt64Fto32,
6088 subf: ssa.OpSub64F,
6089 or: ssa.OpOr32,
6090 floatValue: (*state).constFloat64,
6091 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6092 cutoff: 1 << 31,
6093 }
6094
6095 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6096 return s.floatToUint(&f32_u64, n, x, ft, tt)
6097 }
6098 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6099 return s.floatToUint(&f64_u64, n, x, ft, tt)
6100 }
6101
6102 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6103 return s.floatToUint(&f32_u32, n, x, ft, tt)
6104 }
6105
6106 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6107 return s.floatToUint(&f64_u32, n, x, ft, tt)
6108 }
6109
6110 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6111
6112
6113
6114
6115
6116
6117
6118
6119
6120
6121
6122
6123
6124 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
6125 cmp := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
6126 b := s.endBlock()
6127 b.Kind = ssa.BlockIf
6128 b.SetControl(cmp)
6129 b.Likely = ssa.BranchLikely
6130
6131 var bThen, bZero *ssa.Block
6132
6133 newConversion := base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil)
6134 if newConversion {
6135 bZero = s.f.NewBlock(ssa.BlockPlain)
6136 bThen = s.f.NewBlock(ssa.BlockIf)
6137 } else {
6138 bThen = s.f.NewBlock(ssa.BlockPlain)
6139 }
6140
6141 bElse := s.f.NewBlock(ssa.BlockPlain)
6142 bAfter := s.f.NewBlock(ssa.BlockPlain)
6143
6144 b.AddEdgeTo(bThen)
6145 s.startBlock(bThen)
6146 a0 := s.newValueOrSfCall1(cvttab.cvt2U, tt, x)
6147 s.vars[n] = a0
6148
6149 if newConversion {
6150 cmpz := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cvttab.floatValue(s, ft, 0.0))
6151 s.endBlock()
6152 bThen.SetControl(cmpz)
6153 bThen.AddEdgeTo(bZero)
6154 bThen.Likely = ssa.BranchUnlikely
6155 bThen.AddEdgeTo(bAfter)
6156
6157 s.startBlock(bZero)
6158 s.vars[n] = cvttab.intValue(s, tt, 0)
6159 s.endBlock()
6160 bZero.AddEdgeTo(bAfter)
6161 } else {
6162 s.endBlock()
6163 bThen.AddEdgeTo(bAfter)
6164 }
6165
6166 b.AddEdgeTo(bElse)
6167 s.startBlock(bElse)
6168 y := s.newValueOrSfCall2(cvttab.subf, ft, x, cutoff)
6169 y = s.newValueOrSfCall1(cvttab.cvt2U, tt, y)
6170 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
6171 a1 := s.newValue2(cvttab.or, tt, y, z)
6172 s.vars[n] = a1
6173 s.endBlock()
6174 bElse.AddEdgeTo(bAfter)
6175
6176 s.startBlock(bAfter)
6177 return s.variable(n, n.Type())
6178 }
6179
6180
6181
6182
6183 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6184 iface := s.expr(n.X)
6185 target := s.reflectType(n.Type())
6186 var targetItab *ssa.Value
6187 if n.ITab != nil {
6188 targetItab = s.expr(n.ITab)
6189 }
6190
6191 if n.UseNilPanic {
6192 if commaok {
6193 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && commaok == true")
6194 }
6195 if n.Type().IsInterface() {
6196
6197
6198 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && Type().IsInterface() == true")
6199 }
6200 typs := s.f.Config.Types
6201 iface = s.newValue2(
6202 ssa.OpIMake,
6203 iface.Type,
6204 s.nilCheck(s.newValue1(ssa.OpITab, typs.BytePtr, iface)),
6205 s.newValue1(ssa.OpIData, typs.BytePtr, iface),
6206 )
6207 }
6208
6209 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
6210 }
6211
6212 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6213 iface := s.expr(n.X)
6214 var source, target, targetItab *ssa.Value
6215 if n.SrcRType != nil {
6216 source = s.expr(n.SrcRType)
6217 }
6218 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
6219 byteptr := s.f.Config.Types.BytePtr
6220 targetItab = s.expr(n.ITab)
6221
6222
6223 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
6224 } else {
6225 target = s.expr(n.RType)
6226 }
6227 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
6228 }
6229
6230
6231
6232
6233
6234
6235
6236
6237
6238 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
6239 typs := s.f.Config.Types
6240 byteptr := typs.BytePtr
6241 if dst.IsInterface() {
6242 if dst.IsEmptyInterface() {
6243
6244
6245 if base.Debug.TypeAssert > 0 {
6246 base.WarnfAt(pos, "type assertion inlined")
6247 }
6248
6249
6250 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6251
6252 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6253
6254 if src.IsEmptyInterface() && commaok {
6255
6256 return iface, cond
6257 }
6258
6259
6260 b := s.endBlock()
6261 b.Kind = ssa.BlockIf
6262 b.SetControl(cond)
6263 b.Likely = ssa.BranchLikely
6264 bOk := s.f.NewBlock(ssa.BlockPlain)
6265 bFail := s.f.NewBlock(ssa.BlockPlain)
6266 b.AddEdgeTo(bOk)
6267 b.AddEdgeTo(bFail)
6268
6269 if !commaok {
6270
6271 s.startBlock(bFail)
6272 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6273
6274
6275 s.startBlock(bOk)
6276 if src.IsEmptyInterface() {
6277 res = iface
6278 return
6279 }
6280
6281 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6282 typ := s.load(byteptr, off)
6283 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6284 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
6285 return
6286 }
6287
6288 s.startBlock(bOk)
6289
6290
6291 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6292 s.vars[typVar] = s.load(byteptr, off)
6293 s.endBlock()
6294
6295
6296 s.startBlock(bFail)
6297 s.vars[typVar] = itab
6298 s.endBlock()
6299
6300
6301 bEnd := s.f.NewBlock(ssa.BlockPlain)
6302 bOk.AddEdgeTo(bEnd)
6303 bFail.AddEdgeTo(bEnd)
6304 s.startBlock(bEnd)
6305 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6306 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
6307 resok = cond
6308 delete(s.vars, typVar)
6309 return
6310 }
6311
6312 if base.Debug.TypeAssert > 0 {
6313 base.WarnfAt(pos, "type assertion not inlined")
6314 }
6315
6316 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6317 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6318
6319
6320 bNil := s.f.NewBlock(ssa.BlockPlain)
6321 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6322 bMerge := s.f.NewBlock(ssa.BlockPlain)
6323 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6324 b := s.endBlock()
6325 b.Kind = ssa.BlockIf
6326 b.SetControl(cond)
6327 b.Likely = ssa.BranchLikely
6328 b.AddEdgeTo(bNonNil)
6329 b.AddEdgeTo(bNil)
6330
6331 s.startBlock(bNil)
6332 if commaok {
6333 s.vars[typVar] = itab
6334 b := s.endBlock()
6335 b.AddEdgeTo(bMerge)
6336 } else {
6337
6338 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6339 }
6340
6341
6342 s.startBlock(bNonNil)
6343 typ := itab
6344 if !src.IsEmptyInterface() {
6345 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
6346 }
6347
6348
6349 var d *ssa.Value
6350 if descriptor != nil {
6351 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6352 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
6353
6354
6355 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
6356 s.Fatalf("atomic load not available")
6357 }
6358
6359 var mul, and, add, zext ssa.Op
6360 if s.config.PtrSize == 4 {
6361 mul = ssa.OpMul32
6362 and = ssa.OpAnd32
6363 add = ssa.OpAdd32
6364 zext = ssa.OpCopy
6365 } else {
6366 mul = ssa.OpMul64
6367 and = ssa.OpAnd64
6368 add = ssa.OpAdd64
6369 zext = ssa.OpZeroExt32to64
6370 }
6371
6372 loopHead := s.f.NewBlock(ssa.BlockPlain)
6373 loopBody := s.f.NewBlock(ssa.BlockPlain)
6374 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6375 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6376
6377
6378
6379 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6380 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6381 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6382
6383
6384 var hash *ssa.Value
6385 if src.IsEmptyInterface() {
6386 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6387 } else {
6388 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6389 }
6390 hash = s.newValue1(zext, typs.Uintptr, hash)
6391 s.vars[hashVar] = hash
6392
6393 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6394
6395 b := s.endBlock()
6396 b.AddEdgeTo(loopHead)
6397
6398
6399
6400 s.startBlock(loopHead)
6401 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6402 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6403 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6404 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6405
6406 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6407
6408
6409
6410 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6411 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6412 b = s.endBlock()
6413 b.Kind = ssa.BlockIf
6414 b.SetControl(cmp1)
6415 b.AddEdgeTo(cacheHit)
6416 b.AddEdgeTo(loopBody)
6417
6418
6419
6420 s.startBlock(loopBody)
6421 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6422 b = s.endBlock()
6423 b.Kind = ssa.BlockIf
6424 b.SetControl(cmp2)
6425 b.AddEdgeTo(cacheMiss)
6426 b.AddEdgeTo(loopHead)
6427
6428
6429
6430 s.startBlock(cacheHit)
6431 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6432 s.vars[typVar] = eItab
6433 b = s.endBlock()
6434 b.AddEdgeTo(bMerge)
6435
6436
6437 s.startBlock(cacheMiss)
6438 }
6439 }
6440
6441
6442 if descriptor != nil {
6443 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6444 } else {
6445 var fn *obj.LSym
6446 if commaok {
6447 fn = ir.Syms.AssertE2I2
6448 } else {
6449 fn = ir.Syms.AssertE2I
6450 }
6451 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6452 }
6453 s.vars[typVar] = itab
6454 b = s.endBlock()
6455 b.AddEdgeTo(bMerge)
6456
6457
6458 s.startBlock(bMerge)
6459 itab = s.variable(typVar, byteptr)
6460 var ok *ssa.Value
6461 if commaok {
6462 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6463 }
6464 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6465 }
6466
6467 if base.Debug.TypeAssert > 0 {
6468 base.WarnfAt(pos, "type assertion inlined")
6469 }
6470
6471
6472 direct := types.IsDirectIface(dst)
6473 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6474 if base.Debug.TypeAssert > 0 {
6475 base.WarnfAt(pos, "type assertion inlined")
6476 }
6477 var wantedFirstWord *ssa.Value
6478 if src.IsEmptyInterface() {
6479
6480 wantedFirstWord = target
6481 } else {
6482
6483 wantedFirstWord = targetItab
6484 }
6485
6486 var tmp ir.Node
6487 var addr *ssa.Value
6488 if commaok && !ssa.CanSSA(dst) {
6489
6490
6491 tmp, addr = s.temp(pos, dst)
6492 }
6493
6494 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6495 b := s.endBlock()
6496 b.Kind = ssa.BlockIf
6497 b.SetControl(cond)
6498 b.Likely = ssa.BranchLikely
6499
6500 bOk := s.f.NewBlock(ssa.BlockPlain)
6501 bFail := s.f.NewBlock(ssa.BlockPlain)
6502 b.AddEdgeTo(bOk)
6503 b.AddEdgeTo(bFail)
6504
6505 if !commaok {
6506
6507 s.startBlock(bFail)
6508 taddr := source
6509 if taddr == nil {
6510 taddr = s.reflectType(src)
6511 }
6512 if src.IsEmptyInterface() {
6513 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6514 } else {
6515 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6516 }
6517
6518
6519 s.startBlock(bOk)
6520 if direct {
6521 return s.newValue1(ssa.OpIData, dst, iface), nil
6522 }
6523 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6524 return s.load(dst, p), nil
6525 }
6526
6527
6528
6529 bEnd := s.f.NewBlock(ssa.BlockPlain)
6530
6531
6532 valVar := ssaMarker("val")
6533
6534
6535 s.startBlock(bOk)
6536 if tmp == nil {
6537 if direct {
6538 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6539 } else {
6540 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6541 s.vars[valVar] = s.load(dst, p)
6542 }
6543 } else {
6544 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6545 s.move(dst, addr, p)
6546 }
6547 s.vars[okVar] = s.constBool(true)
6548 s.endBlock()
6549 bOk.AddEdgeTo(bEnd)
6550
6551
6552 s.startBlock(bFail)
6553 if tmp == nil {
6554 s.vars[valVar] = s.zeroVal(dst)
6555 } else {
6556 s.zero(dst, addr)
6557 }
6558 s.vars[okVar] = s.constBool(false)
6559 s.endBlock()
6560 bFail.AddEdgeTo(bEnd)
6561
6562
6563 s.startBlock(bEnd)
6564 if tmp == nil {
6565 res = s.variable(valVar, dst)
6566 delete(s.vars, valVar)
6567 } else {
6568 res = s.load(dst, addr)
6569 }
6570 resok = s.variable(okVar, types.Types[types.TBOOL])
6571 delete(s.vars, okVar)
6572 return res, resok
6573 }
6574
6575
6576 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6577 tmp := typecheck.TempAt(pos, s.curfn, t)
6578 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6579 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6580 }
6581 addr := s.addr(tmp)
6582 return tmp, addr
6583 }
6584
6585
6586 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6587 v := s.vars[n]
6588 if v != nil {
6589 return v
6590 }
6591 v = s.fwdVars[n]
6592 if v != nil {
6593 return v
6594 }
6595
6596 if s.curBlock == s.f.Entry {
6597
6598 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6599 }
6600
6601
6602 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6603 s.fwdVars[n] = v
6604 if n.Op() == ir.ONAME {
6605 s.addNamedValue(n.(*ir.Name), v)
6606 }
6607 return v
6608 }
6609
6610 func (s *state) mem() *ssa.Value {
6611 return s.variable(memVar, types.TypeMem)
6612 }
6613
6614 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6615 if n.Class == ir.Pxxx {
6616
6617 return
6618 }
6619 if ir.IsAutoTmp(n) {
6620
6621 return
6622 }
6623 if n.Class == ir.PPARAMOUT {
6624
6625
6626 return
6627 }
6628 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6629 values, ok := s.f.NamedValues[loc]
6630 if !ok {
6631 s.f.Names = append(s.f.Names, &loc)
6632 s.f.CanonicalLocalSlots[loc] = &loc
6633 }
6634 s.f.NamedValues[loc] = append(values, v)
6635 }
6636
6637
6638 type Branch struct {
6639 P *obj.Prog
6640 B *ssa.Block
6641 }
6642
6643
6644 type State struct {
6645 ABI obj.ABI
6646
6647 pp *objw.Progs
6648
6649
6650
6651 Branches []Branch
6652
6653
6654 JumpTables []*ssa.Block
6655
6656
6657 bstart []*obj.Prog
6658
6659 maxarg int64
6660
6661
6662
6663 livenessMap liveness.Map
6664
6665
6666
6667 partLiveArgs map[*ir.Name]bool
6668
6669
6670
6671
6672 lineRunStart *obj.Prog
6673
6674
6675 OnWasmStackSkipped int
6676 }
6677
6678 func (s *State) FuncInfo() *obj.FuncInfo {
6679 return s.pp.CurFunc.LSym.Func()
6680 }
6681
6682
6683 func (s *State) Prog(as obj.As) *obj.Prog {
6684 p := s.pp.Prog(as)
6685 if objw.LosesStmtMark(as) {
6686 return p
6687 }
6688
6689
6690 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6691 s.lineRunStart = p
6692 } else if p.Pos.IsStmt() == src.PosIsStmt {
6693 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6694 p.Pos = p.Pos.WithNotStmt()
6695 }
6696 return p
6697 }
6698
6699
6700 func (s *State) Pc() *obj.Prog {
6701 return s.pp.Next
6702 }
6703
6704
6705 func (s *State) SetPos(pos src.XPos) {
6706 s.pp.Pos = pos
6707 }
6708
6709
6710
6711
6712 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6713 p := s.Prog(op)
6714 p.To.Type = obj.TYPE_BRANCH
6715 s.Branches = append(s.Branches, Branch{P: p, B: target})
6716 return p
6717 }
6718
6719
6720
6721
6722
6723
6724 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6725 switch v.Op {
6726 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6727
6728 s.SetPos(v.Pos.WithNotStmt())
6729 default:
6730 p := v.Pos
6731 if p != src.NoXPos {
6732
6733
6734
6735
6736 if p.IsStmt() != src.PosIsStmt {
6737 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6738
6739
6740
6741
6742
6743
6744
6745
6746
6747
6748
6749
6750
6751 return
6752 }
6753 p = p.WithNotStmt()
6754
6755 }
6756 s.SetPos(p)
6757 } else {
6758 s.SetPos(s.pp.Pos.WithNotStmt())
6759 }
6760 }
6761 }
6762
6763
6764 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6765 ft := e.curfn.Type()
6766 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6767 return
6768 }
6769
6770 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6771 x.Set(obj.AttrContentAddressable, true)
6772 e.curfn.LSym.Func().ArgInfo = x
6773
6774
6775 p := pp.Prog(obj.AFUNCDATA)
6776 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6777 p.To.Type = obj.TYPE_MEM
6778 p.To.Name = obj.NAME_EXTERN
6779 p.To.Sym = x
6780 }
6781
6782
6783 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6784 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6785 x.Align = 1
6786
6787
6788
6789
6790 PtrSize := int64(types.PtrSize)
6791 uintptrTyp := types.Types[types.TUINTPTR]
6792
6793 isAggregate := func(t *types.Type) bool {
6794 return isStructNotSIMD(t) || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6795 }
6796
6797 wOff := 0
6798 n := 0
6799 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6800
6801
6802 write1 := func(sz, offset int64) {
6803 if offset >= rtabi.TraceArgsSpecial {
6804 writebyte(rtabi.TraceArgsOffsetTooLarge)
6805 } else {
6806 writebyte(uint8(offset))
6807 writebyte(uint8(sz))
6808 }
6809 n++
6810 }
6811
6812
6813
6814 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6815 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6816 if n >= rtabi.TraceArgsLimit {
6817 writebyte(rtabi.TraceArgsDotdotdot)
6818 return false
6819 }
6820 if !isAggregate(t) {
6821 write1(t.Size(), baseOffset)
6822 return true
6823 }
6824 writebyte(rtabi.TraceArgsStartAgg)
6825 depth++
6826 if depth >= rtabi.TraceArgsMaxDepth {
6827 writebyte(rtabi.TraceArgsDotdotdot)
6828 writebyte(rtabi.TraceArgsEndAgg)
6829 n++
6830 return true
6831 }
6832 switch {
6833 case t.IsInterface(), t.IsString():
6834 _ = visitType(baseOffset, uintptrTyp, depth) &&
6835 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6836 case t.IsSlice():
6837 _ = visitType(baseOffset, uintptrTyp, depth) &&
6838 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6839 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6840 case t.IsComplex():
6841 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6842 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6843 case t.IsArray():
6844 if t.NumElem() == 0 {
6845 n++
6846 break
6847 }
6848 for i := int64(0); i < t.NumElem(); i++ {
6849 if !visitType(baseOffset, t.Elem(), depth) {
6850 break
6851 }
6852 baseOffset += t.Elem().Size()
6853 }
6854 case isStructNotSIMD(t):
6855 if t.NumFields() == 0 {
6856 n++
6857 break
6858 }
6859 for _, field := range t.Fields() {
6860 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6861 break
6862 }
6863 }
6864 }
6865 writebyte(rtabi.TraceArgsEndAgg)
6866 return true
6867 }
6868
6869 start := 0
6870 if strings.Contains(f.LSym.Name, "[") {
6871
6872 start = 1
6873 }
6874
6875 for _, a := range abiInfo.InParams()[start:] {
6876 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6877 break
6878 }
6879 }
6880 writebyte(rtabi.TraceArgsEndSeq)
6881 if wOff > rtabi.TraceArgsMaxLen {
6882 base.Fatalf("ArgInfo too large")
6883 }
6884
6885 return x
6886 }
6887
6888
6889 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6890 if base.Ctxt.Flag_linkshared {
6891
6892
6893 return
6894 }
6895
6896 wfn := e.curfn.WrappedFunc
6897 if wfn == nil {
6898 return
6899 }
6900
6901 wsym := wfn.Linksym()
6902 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6903 objw.SymPtrOff(x, 0, wsym)
6904 x.Set(obj.AttrContentAddressable, true)
6905 x.Align = 4
6906 })
6907 e.curfn.LSym.Func().WrapInfo = x
6908
6909
6910 p := pp.Prog(obj.AFUNCDATA)
6911 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6912 p.To.Type = obj.TYPE_MEM
6913 p.To.Name = obj.NAME_EXTERN
6914 p.To.Sym = x
6915 }
6916
6917
6918 func genssa(f *ssa.Func, pp *objw.Progs) {
6919 var s State
6920 s.ABI = f.OwnAux.Fn.ABI()
6921
6922 e := f.Frontend().(*ssafn)
6923
6924 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6925
6926 var lv *liveness.Liveness
6927 s.livenessMap, s.partLiveArgs, lv = liveness.Compute(e.curfn, f, e.stkptrsize, pp, gatherPrintInfo)
6928 emitArgInfo(e, f, pp)
6929 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6930
6931 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6932 if openDeferInfo != nil {
6933
6934
6935 p := pp.Prog(obj.AFUNCDATA)
6936 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6937 p.To.Type = obj.TYPE_MEM
6938 p.To.Name = obj.NAME_EXTERN
6939 p.To.Sym = openDeferInfo
6940 }
6941
6942 emitWrappedFuncInfo(e, pp)
6943
6944
6945 s.bstart = make([]*obj.Prog, f.NumBlocks())
6946 s.pp = pp
6947 var progToValue map[*obj.Prog]*ssa.Value
6948 var progToBlock map[*obj.Prog]*ssa.Block
6949 var valueToProgAfter []*obj.Prog
6950 if gatherPrintInfo {
6951 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6952 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6953 f.Logf("genssa %s\n", f.Name)
6954 progToBlock[s.pp.Next] = f.Blocks[0]
6955 }
6956
6957 if base.Ctxt.Flag_locationlists {
6958 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6959 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6960 }
6961 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6962 clear(valueToProgAfter)
6963 }
6964
6965
6966
6967 firstPos := src.NoXPos
6968 for _, v := range f.Entry.Values {
6969 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6970 firstPos = v.Pos
6971 v.Pos = firstPos.WithDefaultStmt()
6972 break
6973 }
6974 }
6975
6976
6977
6978
6979 var inlMarks map[*obj.Prog]int32
6980 var inlMarkList []*obj.Prog
6981
6982
6983
6984 var inlMarksByPos map[src.XPos][]*obj.Prog
6985
6986 var argLiveIdx int = -1
6987
6988
6989
6990
6991
6992 var hotAlign, hotRequire int64
6993
6994 if base.Debug.AlignHot > 0 {
6995 switch base.Ctxt.Arch.Name {
6996
6997
6998
6999
7000
7001 case "amd64", "386":
7002
7003
7004
7005 hotAlign = 64
7006 hotRequire = 31
7007 }
7008 }
7009
7010
7011 for i, b := range f.Blocks {
7012
7013 s.lineRunStart = nil
7014 s.SetPos(s.pp.Pos.WithNotStmt())
7015
7016 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
7017
7018
7019
7020
7021
7022 p := s.pp.Prog(obj.APCALIGNMAX)
7023 p.From.SetConst(hotAlign)
7024 p.To.SetConst(hotRequire)
7025 }
7026
7027 s.bstart[b.ID] = s.pp.Next
7028
7029 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
7030 argLiveIdx = idx
7031 p := s.pp.Prog(obj.APCDATA)
7032 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7033 p.To.SetConst(int64(idx))
7034 }
7035
7036
7037 Arch.SSAMarkMoves(&s, b)
7038 for _, v := range b.Values {
7039 x := s.pp.Next
7040 s.DebugFriendlySetPosFrom(v)
7041
7042 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
7043 v.Fatalf("input[0] and output not in same register %s", v.LongString())
7044 }
7045
7046 switch v.Op {
7047 case ssa.OpInitMem:
7048
7049 case ssa.OpArg:
7050
7051 case ssa.OpSP, ssa.OpSB:
7052
7053 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
7054
7055 case ssa.OpGetG:
7056
7057
7058 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
7059
7060 case ssa.OpPhi:
7061 CheckLoweredPhi(v)
7062 case ssa.OpConvert:
7063
7064 if v.Args[0].Reg() != v.Reg() {
7065 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
7066 }
7067 case ssa.OpInlMark:
7068 p := Arch.Ginsnop(s.pp)
7069 if inlMarks == nil {
7070 inlMarks = map[*obj.Prog]int32{}
7071 inlMarksByPos = map[src.XPos][]*obj.Prog{}
7072 }
7073 inlMarks[p] = v.AuxInt32()
7074 inlMarkList = append(inlMarkList, p)
7075 pos := v.Pos.AtColumn1()
7076 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
7077 firstPos = src.NoXPos
7078
7079 default:
7080
7081 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7082 s.SetPos(firstPos)
7083 firstPos = src.NoXPos
7084 }
7085
7086
7087 s.pp.NextLive = s.livenessMap.Get(v)
7088 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
7089
7090
7091 Arch.SSAGenValue(&s, v)
7092 }
7093
7094 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
7095 argLiveIdx = idx
7096 p := s.pp.Prog(obj.APCDATA)
7097 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7098 p.To.SetConst(int64(idx))
7099 }
7100
7101 if base.Ctxt.Flag_locationlists {
7102 valueToProgAfter[v.ID] = s.pp.Next
7103 }
7104
7105 if gatherPrintInfo {
7106 for ; x != s.pp.Next; x = x.Link {
7107 progToValue[x] = v
7108 }
7109 }
7110 }
7111
7112 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
7113 p := Arch.Ginsnop(s.pp)
7114 p.Pos = p.Pos.WithIsStmt()
7115 if b.Pos == src.NoXPos {
7116 b.Pos = p.Pos
7117 if b.Pos == src.NoXPos {
7118 b.Pos = s.pp.Text.Pos
7119 }
7120 }
7121 b.Pos = b.Pos.WithBogusLine()
7122 }
7123
7124
7125
7126
7127
7128 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
7129
7130
7131 var next *ssa.Block
7132 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
7133
7134
7135
7136
7137 next = f.Blocks[i+1]
7138 }
7139 x := s.pp.Next
7140 s.SetPos(b.Pos)
7141 Arch.SSAGenBlock(&s, b, next)
7142 if gatherPrintInfo {
7143 for ; x != s.pp.Next; x = x.Link {
7144 progToBlock[x] = b
7145 }
7146 }
7147 }
7148 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
7149
7150
7151
7152
7153 Arch.Ginsnop(s.pp)
7154 }
7155 if openDeferInfo != nil {
7156
7157
7158
7159
7160
7161
7162
7163
7164 s.pp.NextLive = s.livenessMap.DeferReturn
7165 p := s.pp.Prog(obj.ACALL)
7166 p.To.Type = obj.TYPE_MEM
7167 p.To.Name = obj.NAME_EXTERN
7168 p.To.Sym = ir.Syms.Deferreturn
7169
7170
7171
7172
7173
7174 for _, o := range f.OwnAux.ABIInfo().OutParams() {
7175 n := o.Name
7176 rts, offs := o.RegisterTypesAndOffsets()
7177 for i := range o.Registers {
7178 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
7179 }
7180 }
7181
7182 s.pp.Prog(obj.ARET)
7183 }
7184
7185 if inlMarks != nil {
7186 hasCall := false
7187
7188
7189
7190
7191 for p := s.pp.Text; p != nil; p = p.Link {
7192 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
7193 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
7194
7195
7196
7197
7198
7199 continue
7200 }
7201 if _, ok := inlMarks[p]; ok {
7202
7203
7204 continue
7205 }
7206 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
7207 hasCall = true
7208 }
7209 pos := p.Pos.AtColumn1()
7210 marks := inlMarksByPos[pos]
7211 if len(marks) == 0 {
7212 continue
7213 }
7214 for _, m := range marks {
7215
7216
7217
7218 p.Pos = p.Pos.WithIsStmt()
7219 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
7220
7221 m.As = obj.ANOP
7222 m.Pos = src.NoXPos
7223 m.From = obj.Addr{}
7224 m.To = obj.Addr{}
7225 }
7226 delete(inlMarksByPos, pos)
7227 }
7228
7229 for _, p := range inlMarkList {
7230 if p.As != obj.ANOP {
7231 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
7232 }
7233 }
7234
7235 if e.stksize == 0 && !hasCall {
7236
7237
7238
7239
7240
7241
7242 for p := s.pp.Text; p != nil; p = p.Link {
7243 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
7244 continue
7245 }
7246 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
7247
7248 nop := Arch.Ginsnop(s.pp)
7249 nop.Pos = e.curfn.Pos().WithIsStmt()
7250
7251
7252
7253
7254
7255 for x := s.pp.Text; x != nil; x = x.Link {
7256 if x.Link == nop {
7257 x.Link = nop.Link
7258 break
7259 }
7260 }
7261
7262 for x := s.pp.Text; x != nil; x = x.Link {
7263 if x.Link == p {
7264 nop.Link = p
7265 x.Link = nop
7266 break
7267 }
7268 }
7269 }
7270 break
7271 }
7272 }
7273 }
7274
7275 if base.Ctxt.Flag_locationlists {
7276 var debugInfo *ssa.FuncDebug
7277 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
7278
7279
7280 debugInfo.EntryID = f.Entry.ID
7281 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
7282 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7283 } else {
7284 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
7285 }
7286 bstart := s.bstart
7287 idToIdx := make([]int, f.NumBlocks())
7288 for i, b := range f.Blocks {
7289 idToIdx[b.ID] = i
7290 }
7291
7292
7293
7294 debugInfo.GetPC = func(b, v ssa.ID) int64 {
7295 switch v {
7296 case ssa.BlockStart.ID:
7297 if b == f.Entry.ID {
7298 return 0
7299
7300 }
7301 return bstart[b].Pc
7302 case ssa.BlockEnd.ID:
7303 blk := f.Blocks[idToIdx[b]]
7304 nv := len(blk.Values)
7305 return valueToProgAfter[blk.Values[nv-1].ID].Pc
7306 case ssa.FuncEnd.ID:
7307 return e.curfn.LSym.Size
7308 default:
7309 return valueToProgAfter[v].Pc
7310 }
7311 }
7312 }
7313
7314
7315 for _, br := range s.Branches {
7316 br.P.To.SetTarget(s.bstart[br.B.ID])
7317 if br.P.Pos.IsStmt() != src.PosIsStmt {
7318 br.P.Pos = br.P.Pos.WithNotStmt()
7319 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7320 br.P.Pos = br.P.Pos.WithNotStmt()
7321 }
7322
7323 }
7324
7325
7326 for _, jt := range s.JumpTables {
7327
7328 targets := make([]*obj.Prog, len(jt.Succs))
7329 for i, e := range jt.Succs {
7330 targets[i] = s.bstart[e.Block().ID]
7331 }
7332
7333
7334
7335 fi := s.pp.CurFunc.LSym.Func()
7336 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7337 }
7338
7339 if e.log {
7340 filename := ""
7341 for p := s.pp.Text; p != nil; p = p.Link {
7342 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7343 filename = p.InnermostFilename()
7344 f.Logf("# %s\n", filename)
7345 }
7346
7347 var s string
7348 if v, ok := progToValue[p]; ok {
7349 s = v.String()
7350 } else if b, ok := progToBlock[p]; ok {
7351 s = b.String()
7352 } else {
7353 s = " "
7354 }
7355 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7356 }
7357 }
7358 if f.HTMLWriter != nil {
7359 var buf strings.Builder
7360 buf.WriteString("<code>")
7361 buf.WriteString("<dl class=\"ssa-gen\">")
7362 filename := ""
7363
7364 liveness := lv.Format(nil)
7365 if liveness != "" {
7366 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7367 buf.WriteString(html.EscapeString("# " + liveness))
7368 buf.WriteString("</dd>")
7369 }
7370
7371 for p := s.pp.Text; p != nil; p = p.Link {
7372
7373
7374 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7375 filename = p.InnermostFilename()
7376 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7377 buf.WriteString(html.EscapeString("# " + filename))
7378 buf.WriteString("</dd>")
7379 }
7380
7381 buf.WriteString("<dt class=\"ssa-prog-src\">")
7382 if v, ok := progToValue[p]; ok {
7383
7384
7385 if p.As != obj.APCDATA {
7386 if liveness := lv.Format(v); liveness != "" {
7387
7388 buf.WriteString("</dt><dd class=\"ssa-prog\">")
7389 buf.WriteString(html.EscapeString("# " + liveness))
7390 buf.WriteString("</dd>")
7391
7392 buf.WriteString("<dt class=\"ssa-prog-src\">")
7393 }
7394 }
7395
7396 buf.WriteString(v.HTML())
7397 } else if b, ok := progToBlock[p]; ok {
7398 buf.WriteString("<b>" + b.HTML() + "</b>")
7399 }
7400 buf.WriteString("</dt>")
7401 buf.WriteString("<dd class=\"ssa-prog\">")
7402 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7403 buf.WriteString("</dd>")
7404 }
7405 buf.WriteString("</dl>")
7406 buf.WriteString("</code>")
7407 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7408 }
7409 if ssa.GenssaDump[f.Name] {
7410 fi := f.DumpFileForPhase("genssa")
7411 if fi != nil {
7412
7413
7414 inliningDiffers := func(a, b []src.Pos) bool {
7415 if len(a) != len(b) {
7416 return true
7417 }
7418 for i := range a {
7419 if a[i].Filename() != b[i].Filename() {
7420 return true
7421 }
7422 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7423 return true
7424 }
7425 }
7426 return false
7427 }
7428
7429 var allPosOld []src.Pos
7430 var allPos []src.Pos
7431
7432 for p := s.pp.Text; p != nil; p = p.Link {
7433 if p.Pos.IsKnown() {
7434 allPos = allPos[:0]
7435 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7436 if inliningDiffers(allPos, allPosOld) {
7437 for _, pos := range allPos {
7438 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7439 }
7440 allPos, allPosOld = allPosOld, allPos
7441 }
7442 }
7443
7444 var s string
7445 if v, ok := progToValue[p]; ok {
7446 s = v.String()
7447 } else if b, ok := progToBlock[p]; ok {
7448 s = b.String()
7449 } else {
7450 s = " "
7451 }
7452 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7453 }
7454 fi.Close()
7455 }
7456 }
7457
7458 defframe(&s, e, f)
7459
7460 f.HTMLWriter.Close()
7461 f.HTMLWriter = nil
7462 }
7463
7464 func defframe(s *State, e *ssafn, f *ssa.Func) {
7465 pp := s.pp
7466
7467 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7468 frame := s.maxarg + e.stksize
7469 if Arch.PadFrame != nil {
7470 frame = Arch.PadFrame(frame)
7471 }
7472
7473
7474 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7475 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7476 pp.Text.To.Offset = frame
7477
7478 p := pp.Text
7479
7480
7481
7482
7483
7484
7485
7486
7487
7488
7489 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7490
7491
7492 type nameOff struct {
7493 n *ir.Name
7494 off int64
7495 }
7496 partLiveArgsSpilled := make(map[nameOff]bool)
7497 for _, v := range f.Entry.Values {
7498 if v.Op.IsCall() {
7499 break
7500 }
7501 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7502 continue
7503 }
7504 n, off := ssa.AutoVar(v)
7505 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7506 continue
7507 }
7508 partLiveArgsSpilled[nameOff{n, off}] = true
7509 }
7510
7511
7512 for _, a := range f.OwnAux.ABIInfo().InParams() {
7513 n := a.Name
7514 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7515 continue
7516 }
7517 rts, offs := a.RegisterTypesAndOffsets()
7518 for i := range a.Registers {
7519 if !rts[i].HasPointers() {
7520 continue
7521 }
7522 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7523 continue
7524 }
7525 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7526 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7527 }
7528 }
7529 }
7530
7531
7532
7533
7534 var lo, hi int64
7535
7536
7537
7538 var state uint32
7539
7540
7541
7542 for _, n := range e.curfn.Dcl {
7543 if !n.Needzero() {
7544 continue
7545 }
7546 if n.Class != ir.PAUTO {
7547 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7548 }
7549 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7550 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7551 }
7552
7553 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7554
7555 lo = n.FrameOffset()
7556 continue
7557 }
7558
7559
7560 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7561
7562
7563 lo = n.FrameOffset()
7564 hi = lo + n.Type().Size()
7565 }
7566
7567
7568 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7569 }
7570
7571
7572 type IndexJump struct {
7573 Jump obj.As
7574 Index int
7575 }
7576
7577 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7578 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7579 p.Pos = b.Pos
7580 }
7581
7582
7583
7584 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7585 switch next {
7586 case b.Succs[0].Block():
7587 s.oneJump(b, &jumps[0][0])
7588 s.oneJump(b, &jumps[0][1])
7589 case b.Succs[1].Block():
7590 s.oneJump(b, &jumps[1][0])
7591 s.oneJump(b, &jumps[1][1])
7592 default:
7593 var q *obj.Prog
7594 if b.Likely != ssa.BranchUnlikely {
7595 s.oneJump(b, &jumps[1][0])
7596 s.oneJump(b, &jumps[1][1])
7597 q = s.Br(obj.AJMP, b.Succs[1].Block())
7598 } else {
7599 s.oneJump(b, &jumps[0][0])
7600 s.oneJump(b, &jumps[0][1])
7601 q = s.Br(obj.AJMP, b.Succs[0].Block())
7602 }
7603 q.Pos = b.Pos
7604 }
7605 }
7606
7607
7608 func AddAux(a *obj.Addr, v *ssa.Value) {
7609 AddAux2(a, v, v.AuxInt)
7610 }
7611 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7612 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7613 v.Fatalf("bad AddAux addr %v", a)
7614 }
7615
7616 a.Offset += offset
7617
7618
7619 if v.Aux == nil {
7620 return
7621 }
7622
7623 switch n := v.Aux.(type) {
7624 case *ssa.AuxCall:
7625 a.Name = obj.NAME_EXTERN
7626 a.Sym = n.Fn
7627 case *obj.LSym:
7628 a.Name = obj.NAME_EXTERN
7629 a.Sym = n
7630 case *ir.Name:
7631 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7632 a.Name = obj.NAME_PARAM
7633 } else {
7634 a.Name = obj.NAME_AUTO
7635 }
7636 a.Sym = n.Linksym()
7637 a.Offset += n.FrameOffset()
7638 default:
7639 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7640 }
7641 }
7642
7643
7644
7645 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7646 size := idx.Type.Size()
7647 if size == s.config.PtrSize {
7648 return idx
7649 }
7650 if size > s.config.PtrSize {
7651
7652
7653 var lo *ssa.Value
7654 if idx.Type.IsSigned() {
7655 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7656 } else {
7657 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7658 }
7659 if bounded || base.Flag.B != 0 {
7660 return lo
7661 }
7662 bNext := s.f.NewBlock(ssa.BlockPlain)
7663 bPanic := s.f.NewBlock(ssa.BlockExit)
7664 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7665 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7666 if !idx.Type.IsSigned() {
7667 switch kind {
7668 case ssa.BoundsIndex:
7669 kind = ssa.BoundsIndexU
7670 case ssa.BoundsSliceAlen:
7671 kind = ssa.BoundsSliceAlenU
7672 case ssa.BoundsSliceAcap:
7673 kind = ssa.BoundsSliceAcapU
7674 case ssa.BoundsSliceB:
7675 kind = ssa.BoundsSliceBU
7676 case ssa.BoundsSlice3Alen:
7677 kind = ssa.BoundsSlice3AlenU
7678 case ssa.BoundsSlice3Acap:
7679 kind = ssa.BoundsSlice3AcapU
7680 case ssa.BoundsSlice3B:
7681 kind = ssa.BoundsSlice3BU
7682 case ssa.BoundsSlice3C:
7683 kind = ssa.BoundsSlice3CU
7684 }
7685 }
7686 b := s.endBlock()
7687 b.Kind = ssa.BlockIf
7688 b.SetControl(cmp)
7689 b.Likely = ssa.BranchLikely
7690 b.AddEdgeTo(bNext)
7691 b.AddEdgeTo(bPanic)
7692
7693 s.startBlock(bPanic)
7694 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7695 s.endBlock().SetControl(mem)
7696 s.startBlock(bNext)
7697
7698 return lo
7699 }
7700
7701
7702 var op ssa.Op
7703 if idx.Type.IsSigned() {
7704 switch 10*size + s.config.PtrSize {
7705 case 14:
7706 op = ssa.OpSignExt8to32
7707 case 18:
7708 op = ssa.OpSignExt8to64
7709 case 24:
7710 op = ssa.OpSignExt16to32
7711 case 28:
7712 op = ssa.OpSignExt16to64
7713 case 48:
7714 op = ssa.OpSignExt32to64
7715 default:
7716 s.Fatalf("bad signed index extension %s", idx.Type)
7717 }
7718 } else {
7719 switch 10*size + s.config.PtrSize {
7720 case 14:
7721 op = ssa.OpZeroExt8to32
7722 case 18:
7723 op = ssa.OpZeroExt8to64
7724 case 24:
7725 op = ssa.OpZeroExt16to32
7726 case 28:
7727 op = ssa.OpZeroExt16to64
7728 case 48:
7729 op = ssa.OpZeroExt32to64
7730 default:
7731 s.Fatalf("bad unsigned index extension %s", idx.Type)
7732 }
7733 }
7734 return s.newValue1(op, types.Types[types.TINT], idx)
7735 }
7736
7737
7738
7739 func CheckLoweredPhi(v *ssa.Value) {
7740 if v.Op != ssa.OpPhi {
7741 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7742 }
7743 if v.Type.IsMemory() {
7744 return
7745 }
7746 f := v.Block.Func
7747 loc := f.RegAlloc[v.ID]
7748 for _, a := range v.Args {
7749 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7750 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7751 }
7752 }
7753 }
7754
7755
7756
7757
7758
7759 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7760 entry := v.Block.Func.Entry
7761 if entry != v.Block {
7762 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7763 }
7764 for _, w := range entry.Values {
7765 if w == v {
7766 break
7767 }
7768 switch w.Op {
7769 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7770
7771 default:
7772 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7773 }
7774 }
7775 }
7776
7777
7778 func CheckArgReg(v *ssa.Value) {
7779 entry := v.Block.Func.Entry
7780 if entry != v.Block {
7781 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7782 }
7783 }
7784
7785 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7786 n, off := ssa.AutoVar(v)
7787 a.Type = obj.TYPE_MEM
7788 a.Sym = n.Linksym()
7789 a.Reg = int16(Arch.REGSP)
7790 a.Offset = n.FrameOffset() + off
7791 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7792 a.Name = obj.NAME_PARAM
7793 } else {
7794 a.Name = obj.NAME_AUTO
7795 }
7796 }
7797
7798
7799
7800 func (s *State) Call(v *ssa.Value) *obj.Prog {
7801 pPosIsStmt := s.pp.Pos.IsStmt()
7802 s.PrepareCall(v)
7803
7804 p := s.Prog(obj.ACALL)
7805 if pPosIsStmt == src.PosIsStmt {
7806 p.Pos = v.Pos.WithIsStmt()
7807 } else {
7808 p.Pos = v.Pos.WithNotStmt()
7809 }
7810 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7811 p.To.Type = obj.TYPE_MEM
7812 p.To.Name = obj.NAME_EXTERN
7813 p.To.Sym = sym.Fn
7814 } else {
7815
7816 switch Arch.LinkArch.Family {
7817 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7818 p.To.Type = obj.TYPE_REG
7819 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7820 p.To.Type = obj.TYPE_MEM
7821 default:
7822 base.Fatalf("unknown indirect call family")
7823 }
7824 p.To.Reg = v.Args[0].Reg()
7825 }
7826 return p
7827 }
7828
7829
7830
7831 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7832 p := s.Call(v)
7833 p.As = obj.ARET
7834 return p
7835 }
7836
7837
7838
7839
7840 func (s *State) PrepareCall(v *ssa.Value) {
7841 idx := s.livenessMap.Get(v)
7842 if !idx.StackMapValid() {
7843
7844 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7845 base.Fatalf("missing stack map index for %v", v.LongString())
7846 }
7847 }
7848
7849 call, ok := v.Aux.(*ssa.AuxCall)
7850
7851 if ok {
7852
7853
7854 if nowritebarrierrecCheck != nil {
7855 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7856 }
7857 }
7858
7859 if s.maxarg < v.AuxInt {
7860 s.maxarg = v.AuxInt
7861 }
7862 }
7863
7864
7865
7866 func (s *State) UseArgs(n int64) {
7867 if s.maxarg < n {
7868 s.maxarg = n
7869 }
7870 }
7871
7872
7873 func fieldIdx(n *ir.SelectorExpr) int {
7874 t := n.X.Type()
7875 if !isStructNotSIMD(t) {
7876 panic("ODOT's LHS is not a struct")
7877 }
7878
7879 for i, f := range t.Fields() {
7880 if f.Sym == n.Sel {
7881 if f.Offset != n.Offset() {
7882 panic("field offset doesn't match")
7883 }
7884 return i
7885 }
7886 }
7887 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7888
7889
7890
7891 }
7892
7893
7894
7895 type ssafn struct {
7896 curfn *ir.Func
7897 strings map[string]*obj.LSym
7898 stksize int64
7899 stkptrsize int64
7900
7901
7902
7903
7904
7905 stkalign int64
7906
7907 log bool
7908 }
7909
7910
7911
7912 func (e *ssafn) StringData(s string) *obj.LSym {
7913 if aux, ok := e.strings[s]; ok {
7914 return aux
7915 }
7916 if e.strings == nil {
7917 e.strings = make(map[string]*obj.LSym)
7918 }
7919 data := staticdata.StringSym(e.curfn.Pos(), s)
7920 e.strings[s] = data
7921 return data
7922 }
7923
7924
7925 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7926 node := parent.N
7927
7928 if node.Class != ir.PAUTO || node.Addrtaken() {
7929
7930 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7931 }
7932
7933 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7934 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7935 n.SetUsed(true)
7936 n.SetEsc(ir.EscNever)
7937 types.CalcSize(t)
7938 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7939 }
7940
7941
7942 func (e *ssafn) Logf(msg string, args ...any) {
7943 if e.log {
7944 fmt.Printf(msg, args...)
7945 }
7946 }
7947
7948 func (e *ssafn) Log() bool {
7949 return e.log
7950 }
7951
7952
7953 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...any) {
7954 base.Pos = pos
7955 nargs := append([]any{ir.FuncName(e.curfn)}, args...)
7956 base.Fatalf("'%s': "+msg, nargs...)
7957 }
7958
7959
7960
7961 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...any) {
7962 base.WarnfAt(pos, fmt_, args...)
7963 }
7964
7965 func (e *ssafn) Debug_checknil() bool {
7966 return base.Debug.Nil != 0
7967 }
7968
7969 func (e *ssafn) UseWriteBarrier() bool {
7970 return base.Flag.WB
7971 }
7972
7973 func (e *ssafn) Syslook(name string) *obj.LSym {
7974 switch name {
7975 case "goschedguarded":
7976 return ir.Syms.Goschedguarded
7977 case "writeBarrier":
7978 return ir.Syms.WriteBarrier
7979 case "wbZero":
7980 return ir.Syms.WBZero
7981 case "wbMove":
7982 return ir.Syms.WBMove
7983 case "cgoCheckMemmove":
7984 return ir.Syms.CgoCheckMemmove
7985 case "cgoCheckPtrWrite":
7986 return ir.Syms.CgoCheckPtrWrite
7987 }
7988 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
7989 return nil
7990 }
7991
7992 func (e *ssafn) Func() *ir.Func {
7993 return e.curfn
7994 }
7995
7996 func clobberBase(n ir.Node) ir.Node {
7997 if n.Op() == ir.ODOT {
7998 n := n.(*ir.SelectorExpr)
7999 if n.X.Type().NumFields() == 1 {
8000 return clobberBase(n.X)
8001 }
8002 }
8003 if n.Op() == ir.OINDEX {
8004 n := n.(*ir.IndexExpr)
8005 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
8006 return clobberBase(n.X)
8007 }
8008 }
8009 return n
8010 }
8011
8012
8013 func callTargetLSym(callee *ir.Name) *obj.LSym {
8014 if callee.Func == nil {
8015
8016
8017
8018 return callee.Linksym()
8019 }
8020
8021 return callee.LinksymABI(callee.Func.ABI)
8022 }
8023
8024
8025 const deferStructFnField = 4
8026
8027 var deferType *types.Type
8028
8029
8030
8031 func deferstruct() *types.Type {
8032 if deferType != nil {
8033 return deferType
8034 }
8035
8036 makefield := func(name string, t *types.Type) *types.Field {
8037 sym := (*types.Pkg)(nil).Lookup(name)
8038 return types.NewField(src.NoXPos, sym, t)
8039 }
8040
8041 fields := []*types.Field{
8042 makefield("heap", types.Types[types.TBOOL]),
8043 makefield("rangefunc", types.Types[types.TBOOL]),
8044 makefield("sp", types.Types[types.TUINTPTR]),
8045 makefield("pc", types.Types[types.TUINTPTR]),
8046
8047
8048
8049 makefield("fn", types.Types[types.TUINTPTR]),
8050 makefield("link", types.Types[types.TUINTPTR]),
8051 makefield("head", types.Types[types.TUINTPTR]),
8052 }
8053 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
8054 base.Fatalf("deferStructFnField is %q, not fn", name)
8055 }
8056
8057 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
8058 typ := types.NewNamed(n)
8059 n.SetType(typ)
8060 n.SetTypecheck(1)
8061
8062
8063 typ.SetUnderlying(types.NewStruct(fields))
8064 types.CalcStructSize(typ)
8065
8066 deferType = typ
8067 return typ
8068 }
8069
8070
8071
8072
8073
8074 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
8075 return obj.Addr{
8076 Name: obj.NAME_NONE,
8077 Type: obj.TYPE_MEM,
8078 Reg: baseReg,
8079 Offset: spill.Offset + extraOffset,
8080 }
8081 }
8082
8083 func isStructNotSIMD(t *types.Type) bool {
8084 return t.IsStruct() && !t.IsSIMD()
8085 }
8086
8087 var BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
8088
View as plain text