1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmp"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "os"
16 "path/filepath"
17 "slices"
18 "strings"
19
20 "cmd/compile/internal/abi"
21 "cmd/compile/internal/base"
22 "cmd/compile/internal/ir"
23 "cmd/compile/internal/liveness"
24 "cmd/compile/internal/objw"
25 "cmd/compile/internal/reflectdata"
26 "cmd/compile/internal/rttype"
27 "cmd/compile/internal/ssa"
28 "cmd/compile/internal/staticdata"
29 "cmd/compile/internal/typecheck"
30 "cmd/compile/internal/types"
31 "cmd/internal/obj"
32 "cmd/internal/objabi"
33 "cmd/internal/src"
34 "cmd/internal/sys"
35
36 rtabi "internal/abi"
37 )
38
39 var ssaConfig *ssa.Config
40 var ssaCaches []ssa.Cache
41
42 var ssaDump string
43 var ssaDir string
44 var ssaDumpStdout bool
45 var ssaDumpCFG string
46 const ssaDumpFile = "ssa.html"
47
48
49 var ssaDumpInlined []*ir.Func
50
51
52
53
54 const maxAggregatedHeapAllocation = 16
55
56 func DumpInline(fn *ir.Func) {
57 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
58 ssaDumpInlined = append(ssaDumpInlined, fn)
59 }
60 }
61
62 func InitEnv() {
63 ssaDump = os.Getenv("GOSSAFUNC")
64 ssaDir = os.Getenv("GOSSADIR")
65 if ssaDump != "" {
66 if strings.HasSuffix(ssaDump, "+") {
67 ssaDump = ssaDump[:len(ssaDump)-1]
68 ssaDumpStdout = true
69 }
70 spl := strings.Split(ssaDump, ":")
71 if len(spl) > 1 {
72 ssaDump = spl[0]
73 ssaDumpCFG = spl[1]
74 }
75 }
76 }
77
78 func InitConfig() {
79 types_ := ssa.NewTypes()
80
81 if Arch.SoftFloat {
82 softfloatInit()
83 }
84
85
86
87 _ = types.NewPtr(types.Types[types.TINTER])
88 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
89 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
90 _ = types.NewPtr(types.NewPtr(types.ByteType))
91 _ = types.NewPtr(types.NewSlice(types.ByteType))
92 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
93 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
94 _ = types.NewPtr(types.Types[types.TINT16])
95 _ = types.NewPtr(types.Types[types.TINT64])
96 _ = types.NewPtr(types.ErrorType)
97 if buildcfg.Experiment.SwissMap {
98 _ = types.NewPtr(reflectdata.SwissMapType())
99 } else {
100 _ = types.NewPtr(reflectdata.OldMapType())
101 }
102 _ = types.NewPtr(deferstruct())
103 types.NewPtrCacheEnabled = false
104 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
105 ssaConfig.Race = base.Flag.Race
106 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
107
108
109 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
110 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
111 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
112 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
113 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
114 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
115 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
116 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
117 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
118 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
119 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
120 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
121 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
122 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
123 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
124 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
125 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
126 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
127 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
128 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
129 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
130 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
131 ir.Syms.MallocGC = typecheck.LookupRuntimeFunc("mallocgc")
132 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
133 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
134 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
135 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
136 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
137 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
138 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
139 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
140 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
141 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
142 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
143 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
144 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
145 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
146 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
147 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
148 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
149 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
150 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
151 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
152 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
153 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
154 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
155 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
156 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
157 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
158 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
159 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
160 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
161 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
162 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
163 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
164 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
165 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
166 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
167 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
168
169 if Arch.LinkArch.Family == sys.Wasm {
170 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
171 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
172 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
173 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
174 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
175 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
176 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
177 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
178 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
179 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
180 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
181 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
182 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
183 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
184 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
185 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
186 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
187 } else {
188 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
189 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
190 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
191 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
192 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
193 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
194 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
195 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
196 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
197 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
198 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
199 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
200 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
201 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
202 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
203 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
204 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("panicSliceConvert")
205 }
206 if Arch.LinkArch.PtrSize == 4 {
207 ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
208 ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
209 ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
210 ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
211 ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
212 ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
213 ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
214 ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
215 ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
216 ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
217 ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
218 ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
219 ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
220 ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
221 ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
222 ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
223 }
224
225
226 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
227 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
228 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
229 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
230 }
231
232 func InitTables() {
233 initIntrinsics(nil)
234 }
235
236
237
238
239
240
241
242
243 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
244 return ssaConfig.ABI0.Copy()
245 }
246
247
248
249 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
250 if buildcfg.Experiment.RegabiArgs {
251
252 if fn == nil {
253 return abi1
254 }
255 switch fn.ABI {
256 case obj.ABI0:
257 return abi0
258 case obj.ABIInternal:
259
260
261 return abi1
262 }
263 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
264 panic("not reachable")
265 }
266
267 a := abi0
268 if fn != nil {
269 if fn.Pragma&ir.RegisterParams != 0 {
270 a = abi1
271 }
272 }
273 return a
274 }
275
276
277
278
279
280
281
282
283
284
285
286
287 func (s *state) emitOpenDeferInfo() {
288 firstOffset := s.openDefers[0].closureNode.FrameOffset()
289
290
291 for i, r := range s.openDefers {
292 have := r.closureNode.FrameOffset()
293 want := firstOffset + int64(i)*int64(types.PtrSize)
294 if have != want {
295 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
296 }
297 }
298
299 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
300 x.Set(obj.AttrContentAddressable, true)
301 s.curfn.LSym.Func().OpenCodedDeferInfo = x
302
303 off := 0
304 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
305 off = objw.Uvarint(x, off, uint64(-firstOffset))
306 }
307
308
309
310 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
311 name := ir.FuncName(fn)
312
313 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
314
315 printssa := false
316
317
318 if strings.Contains(ssaDump, name) {
319 nameOptABI := name
320 if l := len(ssaDump); l > 1 && ssaDump[l-2] == ',' {
321 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
322 } else if strings.HasSuffix(ssaDump, ">") {
323 l := len(ssaDump)
324 if l >= 3 && ssaDump[l-3] == '<' {
325 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
326 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
327 }
328 }
329 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
330 printssa = nameOptABI == ssaDump ||
331 pkgDotName == ssaDump ||
332 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
333 }
334
335 var astBuf *bytes.Buffer
336 if printssa {
337 astBuf = &bytes.Buffer{}
338 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
339 if ssaDumpStdout {
340 fmt.Println("generating SSA for", name)
341 fmt.Print(astBuf.String())
342 }
343 }
344
345 var s state
346 s.pushLine(fn.Pos())
347 defer s.popLine()
348
349 s.hasdefer = fn.HasDefer()
350 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
351 s.cgoUnsafeArgs = true
352 }
353 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
354
355 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
356 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
357 s.instrumentMemory = true
358 }
359 if base.Flag.Race {
360 s.instrumentEnterExit = true
361 }
362 }
363
364 fe := ssafn{
365 curfn: fn,
366 log: printssa && ssaDumpStdout,
367 }
368 s.curfn = fn
369
370 cache := &ssaCaches[worker]
371 cache.Reset()
372
373 s.f = ssaConfig.NewFunc(&fe, cache)
374 s.config = ssaConfig
375 s.f.Type = fn.Type()
376 s.f.Name = name
377 s.f.PrintOrHtmlSSA = printssa
378 if fn.Pragma&ir.Nosplit != 0 {
379 s.f.NoSplit = true
380 }
381 s.f.ABI0 = ssaConfig.ABI0
382 s.f.ABI1 = ssaConfig.ABI1
383 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
384 s.f.ABISelf = abiSelf
385
386 s.panics = map[funcLine]*ssa.Block{}
387 s.softFloat = s.config.SoftFloat
388
389
390 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
391 s.f.Entry.Pos = fn.Pos()
392 s.f.IsPgoHot = isPgoHot
393
394 if printssa {
395 ssaDF := ssaDumpFile
396 if ssaDir != "" {
397 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
398 ssaD := filepath.Dir(ssaDF)
399 os.MkdirAll(ssaD, 0755)
400 }
401 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
402
403 dumpSourcesColumn(s.f.HTMLWriter, fn)
404 s.f.HTMLWriter.WriteAST("AST", astBuf)
405 }
406
407
408 s.labels = map[string]*ssaLabel{}
409 s.fwdVars = map[ir.Node]*ssa.Value{}
410 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
411
412 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
413 switch {
414 case base.Debug.NoOpenDefer != 0:
415 s.hasOpenDefers = false
416 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
417
418
419
420
421
422 s.hasOpenDefers = false
423 }
424 if s.hasOpenDefers && s.instrumentEnterExit {
425
426
427
428 s.hasOpenDefers = false
429 }
430 if s.hasOpenDefers {
431
432
433 for _, f := range s.curfn.Type().Results() {
434 if !f.Nname.(*ir.Name).OnStack() {
435 s.hasOpenDefers = false
436 break
437 }
438 }
439 }
440 if s.hasOpenDefers &&
441 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
442
443
444
445
446
447 s.hasOpenDefers = false
448 }
449
450 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
451 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
452
453 s.startBlock(s.f.Entry)
454 s.vars[memVar] = s.startmem
455 if s.hasOpenDefers {
456
457
458
459 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
460 deferBitsTemp.SetAddrtaken(true)
461 s.deferBitsTemp = deferBitsTemp
462
463 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
464 s.vars[deferBitsVar] = startDeferBits
465 s.deferBitsAddr = s.addr(deferBitsTemp)
466 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
467
468
469
470
471
472 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
473 }
474
475 var params *abi.ABIParamResultInfo
476 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
477
478
479
480
481
482
483 var debugInfo ssa.FuncDebug
484 for _, n := range fn.Dcl {
485 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
486 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
487 }
488 }
489 fn.DebugInfo = &debugInfo
490
491
492 s.decladdrs = map[*ir.Name]*ssa.Value{}
493 for _, n := range fn.Dcl {
494 switch n.Class {
495 case ir.PPARAM:
496
497 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
498 case ir.PPARAMOUT:
499 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
500 case ir.PAUTO:
501
502
503 default:
504 s.Fatalf("local variable with class %v unimplemented", n.Class)
505 }
506 }
507
508 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
509
510
511 for _, n := range fn.Dcl {
512 if n.Class == ir.PPARAM {
513 if s.canSSA(n) {
514 v := s.newValue0A(ssa.OpArg, n.Type(), n)
515 s.vars[n] = v
516 s.addNamedValue(n, v)
517 } else {
518 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
519 if len(paramAssignment.Registers) > 0 {
520 if ssa.CanSSA(n.Type()) {
521 v := s.newValue0A(ssa.OpArg, n.Type(), n)
522 s.store(n.Type(), s.decladdrs[n], v)
523 } else {
524
525
526 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
527 }
528 }
529 }
530 }
531 }
532
533
534 if fn.Needctxt() {
535 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
536 if fn.RangeParent != nil && base.Flag.N != 0 {
537
538
539
540 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
541 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
542 cloSlot.SetUsed(true)
543 cloSlot.SetEsc(ir.EscNever)
544 cloSlot.SetAddrtaken(true)
545 s.f.CloSlot = cloSlot
546 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
547 addr := s.addr(cloSlot)
548 s.store(s.f.Config.Types.BytePtr, addr, clo)
549
550 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
551 }
552 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
553 for {
554 n, typ, offset := csiter.Next()
555 if n == nil {
556 break
557 }
558
559 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
560
561
562
563
564
565
566
567
568
569 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
570 n.Class = ir.PAUTO
571 fn.Dcl = append(fn.Dcl, n)
572 s.assign(n, s.load(n.Type(), ptr), false, 0)
573 continue
574 }
575
576 if !n.Byval() {
577 ptr = s.load(typ, ptr)
578 }
579 s.setHeapaddr(fn.Pos(), n, ptr)
580 }
581 }
582
583
584 if s.instrumentEnterExit {
585 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
586 }
587 s.zeroResults()
588 s.paramsToHeap()
589 s.stmtList(fn.Body)
590
591
592 if s.curBlock != nil {
593 s.pushLine(fn.Endlineno)
594 s.exit()
595 s.popLine()
596 }
597
598 for _, b := range s.f.Blocks {
599 if b.Pos != src.NoXPos {
600 s.updateUnsetPredPos(b)
601 }
602 }
603
604 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
605
606 s.insertPhis()
607
608
609 ssa.Compile(s.f)
610
611 fe.AllocFrame(s.f)
612
613 if len(s.openDefers) != 0 {
614 s.emitOpenDeferInfo()
615 }
616
617
618
619
620
621
622 for _, p := range params.InParams() {
623 typs, offs := p.RegisterTypesAndOffsets()
624 for i, t := range typs {
625 o := offs[i]
626 fo := p.FrameOffset(params)
627 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
628 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
629 }
630 }
631
632 return s.f
633 }
634
635 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
636 typs, offs := paramAssignment.RegisterTypesAndOffsets()
637 for i, t := range typs {
638 if pointersOnly && !t.IsPtrShaped() {
639 continue
640 }
641 r := paramAssignment.Registers[i]
642 o := offs[i]
643 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
644 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
645 v := s.newValue0I(op, t, reg)
646 v.Aux = aux
647 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
648 s.store(t, p, v)
649 }
650 }
651
652
653
654
655
656
657
658 func (s *state) zeroResults() {
659 for _, f := range s.curfn.Type().Results() {
660 n := f.Nname.(*ir.Name)
661 if !n.OnStack() {
662
663
664
665 continue
666 }
667
668 if typ := n.Type(); ssa.CanSSA(typ) {
669 s.assign(n, s.zeroVal(typ), false, 0)
670 } else {
671 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
672 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
673 }
674 s.zero(n.Type(), s.decladdrs[n])
675 }
676 }
677 }
678
679
680
681 func (s *state) paramsToHeap() {
682 do := func(params []*types.Field) {
683 for _, f := range params {
684 if f.Nname == nil {
685 continue
686 }
687 n := f.Nname.(*ir.Name)
688 if ir.IsBlank(n) || n.OnStack() {
689 continue
690 }
691 s.newHeapaddr(n)
692 if n.Class == ir.PPARAM {
693 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
694 }
695 }
696 }
697
698 typ := s.curfn.Type()
699 do(typ.Recvs())
700 do(typ.Params())
701 do(typ.Results())
702 }
703
704
705
706
707 func allocSizeAndAlign(t *types.Type) (int64, int64) {
708 size, align := t.Size(), t.Alignment()
709 if types.PtrSize == 4 && align == 4 && size >= 8 {
710
711 size = types.RoundUp(size, 8)
712 align = 8
713 }
714 return size, align
715 }
716 func allocSize(t *types.Type) int64 {
717 size, _ := allocSizeAndAlign(t)
718 return size
719 }
720 func allocAlign(t *types.Type) int64 {
721 _, align := allocSizeAndAlign(t)
722 return align
723 }
724
725
726 func (s *state) newHeapaddr(n *ir.Name) {
727 size := allocSize(n.Type())
728 if n.Type().HasPointers() || size >= maxAggregatedHeapAllocation || size == 0 {
729 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type(), nil))
730 return
731 }
732
733
734
735 var used int64
736 for _, v := range s.pendingHeapAllocations {
737 used += allocSize(v.Type.Elem())
738 }
739 if used+size > maxAggregatedHeapAllocation {
740 s.flushPendingHeapAllocations()
741 }
742
743 var allocCall *ssa.Value
744 if len(s.pendingHeapAllocations) == 0 {
745
746
747
748 allocCall = s.newObject(n.Type(), nil)
749 } else {
750 allocCall = s.pendingHeapAllocations[0].Args[0]
751 }
752
753 v := s.newValue1I(ssa.OpOffPtr, n.Type().PtrTo(), 0, allocCall)
754
755
756 s.pendingHeapAllocations = append(s.pendingHeapAllocations, v)
757
758
759 s.setHeapaddr(n.Pos(), n, v)
760 }
761
762 func (s *state) flushPendingHeapAllocations() {
763 pending := s.pendingHeapAllocations
764 if len(pending) == 0 {
765 return
766 }
767 s.pendingHeapAllocations = nil
768 ptr := pending[0].Args[0]
769 call := ptr.Args[0]
770
771 if len(pending) == 1 {
772
773 v := pending[0]
774 v.Op = ssa.OpCopy
775 return
776 }
777
778
779
780
781 slices.SortStableFunc(pending, func(x, y *ssa.Value) int {
782 return cmp.Compare(allocAlign(y.Type.Elem()), allocAlign(x.Type.Elem()))
783 })
784
785
786 var size int64
787 for _, v := range pending {
788 v.AuxInt = size
789 size += allocSize(v.Type.Elem())
790 }
791 align := allocAlign(pending[0].Type.Elem())
792 size = types.RoundUp(size, align)
793
794
795 args := []*ssa.Value{
796 s.constInt(types.Types[types.TUINTPTR], size),
797 s.constNil(call.Args[0].Type),
798 s.constBool(true),
799 call.Args[1],
800 }
801 call.Aux = ssa.StaticAuxCall(ir.Syms.MallocGC, s.f.ABIDefault.ABIAnalyzeTypes(
802 []*types.Type{args[0].Type, args[1].Type, args[2].Type},
803 []*types.Type{types.Types[types.TUNSAFEPTR]},
804 ))
805 call.AuxInt = 4 * s.config.PtrSize
806 call.SetArgs4(args[0], args[1], args[2], args[3])
807
808
809 call.Type = types.NewTuple(types.Types[types.TUNSAFEPTR], types.TypeMem)
810 ptr.Type = types.Types[types.TUNSAFEPTR]
811 }
812
813
814
815 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
816 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
817 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
818 }
819
820
821 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
822 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
823 addr.SetUsed(true)
824 types.CalcSize(addr.Type())
825
826 if n.Class == ir.PPARAMOUT {
827 addr.SetIsOutputParamHeapAddr(true)
828 }
829
830 n.Heapaddr = addr
831 s.assign(addr, ptr, false, 0)
832 }
833
834
835 func (s *state) newObject(typ *types.Type, rtype *ssa.Value) *ssa.Value {
836 if typ.Size() == 0 {
837 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
838 }
839 if rtype == nil {
840 rtype = s.reflectType(typ)
841 }
842 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
843 }
844
845 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
846 if !n.Type().IsPtr() {
847 s.Fatalf("expected pointer type: %v", n.Type())
848 }
849 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
850 if count != nil {
851 if !elem.IsArray() {
852 s.Fatalf("expected array type: %v", elem)
853 }
854 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
855 }
856 size := elem.Size()
857
858 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
859 return
860 }
861 if count == nil {
862 count = s.constInt(types.Types[types.TUINTPTR], 1)
863 }
864 if count.Type.Size() != s.config.PtrSize {
865 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
866 }
867 var rtype *ssa.Value
868 if rtypeExpr != nil {
869 rtype = s.expr(rtypeExpr)
870 } else {
871 rtype = s.reflectType(elem)
872 }
873 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
874 }
875
876
877
878 func (s *state) reflectType(typ *types.Type) *ssa.Value {
879
880
881 lsym := reflectdata.TypeLinksym(typ)
882 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
883 }
884
885 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
886
887 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
888 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
889 if err != nil {
890 writer.Logf("cannot read sources for function %v: %v", fn, err)
891 }
892
893
894 var inlFns []*ssa.FuncLines
895 for _, fi := range ssaDumpInlined {
896 elno := fi.Endlineno
897 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
898 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
899 if err != nil {
900 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
901 continue
902 }
903 inlFns = append(inlFns, fnLines)
904 }
905
906 slices.SortFunc(inlFns, ssa.ByTopoCmp)
907 if targetFn != nil {
908 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
909 }
910
911 writer.WriteSources("sources", inlFns)
912 }
913
914 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
915 f, err := os.Open(os.ExpandEnv(file))
916 if err != nil {
917 return nil, err
918 }
919 defer f.Close()
920 var lines []string
921 ln := uint(1)
922 scanner := bufio.NewScanner(f)
923 for scanner.Scan() && ln <= end {
924 if ln >= start {
925 lines = append(lines, scanner.Text())
926 }
927 ln++
928 }
929 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
930 }
931
932
933
934
935 func (s *state) updateUnsetPredPos(b *ssa.Block) {
936 if b.Pos == src.NoXPos {
937 s.Fatalf("Block %s should have a position", b)
938 }
939 bestPos := src.NoXPos
940 for _, e := range b.Preds {
941 p := e.Block()
942 if !p.LackingPos() {
943 continue
944 }
945 if bestPos == src.NoXPos {
946 bestPos = b.Pos
947 for _, v := range b.Values {
948 if v.LackingPos() {
949 continue
950 }
951 if v.Pos != src.NoXPos {
952
953
954 bestPos = v.Pos
955 break
956 }
957 }
958 }
959 p.Pos = bestPos
960 s.updateUnsetPredPos(p)
961 }
962 }
963
964
965 type openDeferInfo struct {
966
967 n *ir.CallExpr
968
969
970 closure *ssa.Value
971
972
973
974 closureNode *ir.Name
975 }
976
977 type state struct {
978
979 config *ssa.Config
980
981
982 f *ssa.Func
983
984
985 curfn *ir.Func
986
987
988 labels map[string]*ssaLabel
989
990
991 breakTo *ssa.Block
992 continueTo *ssa.Block
993
994
995 curBlock *ssa.Block
996
997
998
999
1000 vars map[ir.Node]*ssa.Value
1001
1002
1003
1004
1005 fwdVars map[ir.Node]*ssa.Value
1006
1007
1008 defvars []map[ir.Node]*ssa.Value
1009
1010
1011 decladdrs map[*ir.Name]*ssa.Value
1012
1013
1014 startmem *ssa.Value
1015 sp *ssa.Value
1016 sb *ssa.Value
1017
1018 deferBitsAddr *ssa.Value
1019 deferBitsTemp *ir.Name
1020
1021
1022 line []src.XPos
1023
1024 lastPos src.XPos
1025
1026
1027
1028 panics map[funcLine]*ssa.Block
1029
1030 cgoUnsafeArgs bool
1031 hasdefer bool
1032 softFloat bool
1033 hasOpenDefers bool
1034 checkPtrEnabled bool
1035 instrumentEnterExit bool
1036 instrumentMemory bool
1037
1038
1039
1040
1041 openDefers []*openDeferInfo
1042
1043
1044
1045
1046 lastDeferExit *ssa.Block
1047 lastDeferFinalBlock *ssa.Block
1048 lastDeferCount int
1049
1050 prevCall *ssa.Value
1051
1052
1053
1054
1055 pendingHeapAllocations []*ssa.Value
1056 }
1057
1058 type funcLine struct {
1059 f *obj.LSym
1060 base *src.PosBase
1061 line uint
1062 }
1063
1064 type ssaLabel struct {
1065 target *ssa.Block
1066 breakTarget *ssa.Block
1067 continueTarget *ssa.Block
1068 }
1069
1070
1071 func (s *state) label(sym *types.Sym) *ssaLabel {
1072 lab := s.labels[sym.Name]
1073 if lab == nil {
1074 lab = new(ssaLabel)
1075 s.labels[sym.Name] = lab
1076 }
1077 return lab
1078 }
1079
1080 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
1081 func (s *state) Log() bool { return s.f.Log() }
1082 func (s *state) Fatalf(msg string, args ...interface{}) {
1083 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
1084 }
1085 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
1086 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
1087
1088 func ssaMarker(name string) *ir.Name {
1089 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
1090 }
1091
1092 var (
1093
1094 memVar = ssaMarker("mem")
1095
1096
1097 ptrVar = ssaMarker("ptr")
1098 lenVar = ssaMarker("len")
1099 capVar = ssaMarker("cap")
1100 typVar = ssaMarker("typ")
1101 okVar = ssaMarker("ok")
1102 deferBitsVar = ssaMarker("deferBits")
1103 hashVar = ssaMarker("hash")
1104 )
1105
1106
1107 func (s *state) startBlock(b *ssa.Block) {
1108 if s.curBlock != nil {
1109 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
1110 }
1111 s.curBlock = b
1112 s.vars = map[ir.Node]*ssa.Value{}
1113 clear(s.fwdVars)
1114 }
1115
1116
1117
1118
1119 func (s *state) endBlock() *ssa.Block {
1120 b := s.curBlock
1121 if b == nil {
1122 return nil
1123 }
1124
1125 s.flushPendingHeapAllocations()
1126
1127 for len(s.defvars) <= int(b.ID) {
1128 s.defvars = append(s.defvars, nil)
1129 }
1130 s.defvars[b.ID] = s.vars
1131 s.curBlock = nil
1132 s.vars = nil
1133 if b.LackingPos() {
1134
1135
1136
1137 b.Pos = src.NoXPos
1138 } else {
1139 b.Pos = s.lastPos
1140 }
1141 return b
1142 }
1143
1144
1145 func (s *state) pushLine(line src.XPos) {
1146 if !line.IsKnown() {
1147
1148
1149 line = s.peekPos()
1150 if base.Flag.K != 0 {
1151 base.Warn("buildssa: unknown position (line 0)")
1152 }
1153 } else {
1154 s.lastPos = line
1155 }
1156
1157 s.line = append(s.line, line)
1158 }
1159
1160
1161 func (s *state) popLine() {
1162 s.line = s.line[:len(s.line)-1]
1163 }
1164
1165
1166 func (s *state) peekPos() src.XPos {
1167 return s.line[len(s.line)-1]
1168 }
1169
1170
1171 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1172 return s.curBlock.NewValue0(s.peekPos(), op, t)
1173 }
1174
1175
1176 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1177 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1178 }
1179
1180
1181 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1182 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1183 }
1184
1185
1186 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1187 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1188 }
1189
1190
1191 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1192 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1193 }
1194
1195
1196
1197
1198 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1199 if isStmt {
1200 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1201 }
1202 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1203 }
1204
1205
1206 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1207 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1208 }
1209
1210
1211 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1212 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1213 }
1214
1215
1216 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1217 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1218 }
1219
1220
1221
1222
1223 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1224 if isStmt {
1225 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1226 }
1227 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1228 }
1229
1230
1231 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1232 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1233 }
1234
1235
1236 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1237 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1238 }
1239
1240
1241 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1242 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1243 }
1244
1245
1246 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1247 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1248 }
1249
1250
1251
1252
1253 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1254 if isStmt {
1255 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1256 }
1257 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1258 }
1259
1260
1261 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1262 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1263 }
1264
1265
1266 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1267 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1268 }
1269
1270 func (s *state) entryBlock() *ssa.Block {
1271 b := s.f.Entry
1272 if base.Flag.N > 0 && s.curBlock != nil {
1273
1274
1275
1276
1277 b = s.curBlock
1278 }
1279 return b
1280 }
1281
1282
1283 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1284 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1285 }
1286
1287
1288 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1289 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1290 }
1291
1292
1293 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1294 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1295 }
1296
1297
1298 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1299 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1300 }
1301
1302
1303 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1304 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1305 }
1306
1307
1308 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1309 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1310 }
1311
1312
1313 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1314 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1315 }
1316
1317
1318 func (s *state) constSlice(t *types.Type) *ssa.Value {
1319 return s.f.ConstSlice(t)
1320 }
1321 func (s *state) constInterface(t *types.Type) *ssa.Value {
1322 return s.f.ConstInterface(t)
1323 }
1324 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1325 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1326 return s.f.ConstEmptyString(t)
1327 }
1328 func (s *state) constBool(c bool) *ssa.Value {
1329 return s.f.ConstBool(types.Types[types.TBOOL], c)
1330 }
1331 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1332 return s.f.ConstInt8(t, c)
1333 }
1334 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1335 return s.f.ConstInt16(t, c)
1336 }
1337 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1338 return s.f.ConstInt32(t, c)
1339 }
1340 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1341 return s.f.ConstInt64(t, c)
1342 }
1343 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1344 return s.f.ConstFloat32(t, c)
1345 }
1346 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1347 return s.f.ConstFloat64(t, c)
1348 }
1349 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1350 if s.config.PtrSize == 8 {
1351 return s.constInt64(t, c)
1352 }
1353 if int64(int32(c)) != c {
1354 s.Fatalf("integer constant too big %d", c)
1355 }
1356 return s.constInt32(t, int32(c))
1357 }
1358 func (s *state) constOffPtrSP(t *types.Type, c int64) *ssa.Value {
1359 return s.f.ConstOffPtrSP(t, c, s.sp)
1360 }
1361
1362
1363
1364 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1365 if s.softFloat {
1366 if c, ok := s.sfcall(op, arg); ok {
1367 return c
1368 }
1369 }
1370 return s.newValue1(op, t, arg)
1371 }
1372 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1373 if s.softFloat {
1374 if c, ok := s.sfcall(op, arg0, arg1); ok {
1375 return c
1376 }
1377 }
1378 return s.newValue2(op, t, arg0, arg1)
1379 }
1380
1381 type instrumentKind uint8
1382
1383 const (
1384 instrumentRead = iota
1385 instrumentWrite
1386 instrumentMove
1387 )
1388
1389 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1390 s.instrument2(t, addr, nil, kind)
1391 }
1392
1393
1394
1395
1396 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1397 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1398 s.instrument(t, addr, kind)
1399 return
1400 }
1401 for _, f := range t.Fields() {
1402 if f.Sym.IsBlank() {
1403 continue
1404 }
1405 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1406 s.instrumentFields(f.Type, offptr, kind)
1407 }
1408 }
1409
1410 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1411 if base.Flag.MSan {
1412 s.instrument2(t, dst, src, instrumentMove)
1413 } else {
1414 s.instrument(t, src, instrumentRead)
1415 s.instrument(t, dst, instrumentWrite)
1416 }
1417 }
1418
1419 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1420 if !s.instrumentMemory {
1421 return
1422 }
1423
1424 w := t.Size()
1425 if w == 0 {
1426 return
1427 }
1428
1429 if ssa.IsSanitizerSafeAddr(addr) {
1430 return
1431 }
1432
1433 var fn *obj.LSym
1434 needWidth := false
1435
1436 if addr2 != nil && kind != instrumentMove {
1437 panic("instrument2: non-nil addr2 for non-move instrumentation")
1438 }
1439
1440 if base.Flag.MSan {
1441 switch kind {
1442 case instrumentRead:
1443 fn = ir.Syms.Msanread
1444 case instrumentWrite:
1445 fn = ir.Syms.Msanwrite
1446 case instrumentMove:
1447 fn = ir.Syms.Msanmove
1448 default:
1449 panic("unreachable")
1450 }
1451 needWidth = true
1452 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1453
1454
1455
1456 switch kind {
1457 case instrumentRead:
1458 fn = ir.Syms.Racereadrange
1459 case instrumentWrite:
1460 fn = ir.Syms.Racewriterange
1461 default:
1462 panic("unreachable")
1463 }
1464 needWidth = true
1465 } else if base.Flag.Race {
1466
1467
1468 switch kind {
1469 case instrumentRead:
1470 fn = ir.Syms.Raceread
1471 case instrumentWrite:
1472 fn = ir.Syms.Racewrite
1473 default:
1474 panic("unreachable")
1475 }
1476 } else if base.Flag.ASan {
1477 switch kind {
1478 case instrumentRead:
1479 fn = ir.Syms.Asanread
1480 case instrumentWrite:
1481 fn = ir.Syms.Asanwrite
1482 default:
1483 panic("unreachable")
1484 }
1485 needWidth = true
1486 } else {
1487 panic("unreachable")
1488 }
1489
1490 args := []*ssa.Value{addr}
1491 if addr2 != nil {
1492 args = append(args, addr2)
1493 }
1494 if needWidth {
1495 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1496 }
1497 s.rtcall(fn, true, nil, args...)
1498 }
1499
1500 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1501 s.instrumentFields(t, src, instrumentRead)
1502 return s.rawLoad(t, src)
1503 }
1504
1505 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1506 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1507 }
1508
1509 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1510 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1511 }
1512
1513 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1514 s.instrument(t, dst, instrumentWrite)
1515 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1516 store.Aux = t
1517 s.vars[memVar] = store
1518 }
1519
1520 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1521 s.moveWhichMayOverlap(t, dst, src, false)
1522 }
1523 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1524 s.instrumentMove(t, dst, src)
1525 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549 if t.HasPointers() {
1550 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1551
1552
1553
1554
1555 s.curfn.SetWBPos(s.peekPos())
1556 } else {
1557 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1558 }
1559 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1560 return
1561 }
1562 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1563 store.Aux = t
1564 s.vars[memVar] = store
1565 }
1566
1567
1568 func (s *state) stmtList(l ir.Nodes) {
1569 for _, n := range l {
1570 s.stmt(n)
1571 }
1572 }
1573
1574
1575 func (s *state) stmt(n ir.Node) {
1576 s.pushLine(n.Pos())
1577 defer s.popLine()
1578
1579
1580
1581 if s.curBlock == nil && n.Op() != ir.OLABEL {
1582 return
1583 }
1584
1585 s.stmtList(n.Init())
1586 switch n.Op() {
1587
1588 case ir.OBLOCK:
1589 n := n.(*ir.BlockStmt)
1590 s.stmtList(n.List)
1591
1592 case ir.OFALL:
1593
1594
1595 case ir.OCALLFUNC:
1596 n := n.(*ir.CallExpr)
1597 if ir.IsIntrinsicCall(n) {
1598 s.intrinsicCall(n)
1599 return
1600 }
1601 fallthrough
1602
1603 case ir.OCALLINTER:
1604 n := n.(*ir.CallExpr)
1605 s.callResult(n, callNormal)
1606 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1607 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1608 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1609 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1610 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1611 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1612 fn == "panicrangestate") {
1613 m := s.mem()
1614 b := s.endBlock()
1615 b.Kind = ssa.BlockExit
1616 b.SetControl(m)
1617
1618
1619
1620 }
1621 }
1622 case ir.ODEFER:
1623 n := n.(*ir.GoDeferStmt)
1624 if base.Debug.Defer > 0 {
1625 var defertype string
1626 if s.hasOpenDefers {
1627 defertype = "open-coded"
1628 } else if n.Esc() == ir.EscNever {
1629 defertype = "stack-allocated"
1630 } else {
1631 defertype = "heap-allocated"
1632 }
1633 base.WarnfAt(n.Pos(), "%s defer", defertype)
1634 }
1635 if s.hasOpenDefers {
1636 s.openDeferRecord(n.Call.(*ir.CallExpr))
1637 } else {
1638 d := callDefer
1639 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1640 d = callDeferStack
1641 }
1642 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1643 }
1644 case ir.OGO:
1645 n := n.(*ir.GoDeferStmt)
1646 s.callResult(n.Call.(*ir.CallExpr), callGo)
1647
1648 case ir.OAS2DOTTYPE:
1649 n := n.(*ir.AssignListStmt)
1650 var res, resok *ssa.Value
1651 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1652 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1653 } else {
1654 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1655 }
1656 deref := false
1657 if !ssa.CanSSA(n.Rhs[0].Type()) {
1658 if res.Op != ssa.OpLoad {
1659 s.Fatalf("dottype of non-load")
1660 }
1661 mem := s.mem()
1662 if res.Args[1] != mem {
1663 s.Fatalf("memory no longer live from 2-result dottype load")
1664 }
1665 deref = true
1666 res = res.Args[0]
1667 }
1668 s.assign(n.Lhs[0], res, deref, 0)
1669 s.assign(n.Lhs[1], resok, false, 0)
1670 return
1671
1672 case ir.OAS2FUNC:
1673
1674 n := n.(*ir.AssignListStmt)
1675 call := n.Rhs[0].(*ir.CallExpr)
1676 if !ir.IsIntrinsicCall(call) {
1677 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1678 }
1679 v := s.intrinsicCall(call)
1680 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1681 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1682 s.assign(n.Lhs[0], v1, false, 0)
1683 s.assign(n.Lhs[1], v2, false, 0)
1684 return
1685
1686 case ir.ODCL:
1687 n := n.(*ir.Decl)
1688 if v := n.X; v.Esc() == ir.EscHeap {
1689 s.newHeapaddr(v)
1690 }
1691
1692 case ir.OLABEL:
1693 n := n.(*ir.LabelStmt)
1694 sym := n.Label
1695 if sym.IsBlank() {
1696
1697 break
1698 }
1699 lab := s.label(sym)
1700
1701
1702 if lab.target == nil {
1703 lab.target = s.f.NewBlock(ssa.BlockPlain)
1704 }
1705
1706
1707
1708 if s.curBlock != nil {
1709 b := s.endBlock()
1710 b.AddEdgeTo(lab.target)
1711 }
1712 s.startBlock(lab.target)
1713
1714 case ir.OGOTO:
1715 n := n.(*ir.BranchStmt)
1716 sym := n.Label
1717
1718 lab := s.label(sym)
1719 if lab.target == nil {
1720 lab.target = s.f.NewBlock(ssa.BlockPlain)
1721 }
1722
1723 b := s.endBlock()
1724 b.Pos = s.lastPos.WithIsStmt()
1725 b.AddEdgeTo(lab.target)
1726
1727 case ir.OAS:
1728 n := n.(*ir.AssignStmt)
1729 if n.X == n.Y && n.X.Op() == ir.ONAME {
1730
1731
1732
1733
1734
1735
1736
1737 return
1738 }
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1750 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1751 p := n.Y.(*ir.StarExpr).X
1752 for p.Op() == ir.OCONVNOP {
1753 p = p.(*ir.ConvExpr).X
1754 }
1755 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1756
1757
1758 mayOverlap = false
1759 }
1760 }
1761
1762
1763 rhs := n.Y
1764 if rhs != nil {
1765 switch rhs.Op() {
1766 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1767
1768
1769
1770 if !ir.IsZero(rhs) {
1771 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1772 }
1773 rhs = nil
1774 case ir.OAPPEND:
1775 rhs := rhs.(*ir.CallExpr)
1776
1777
1778
1779 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1780 break
1781 }
1782
1783
1784
1785 if s.canSSA(n.X) {
1786 if base.Debug.Append > 0 {
1787 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1788 }
1789 break
1790 }
1791 if base.Debug.Append > 0 {
1792 base.WarnfAt(n.Pos(), "append: len-only update")
1793 }
1794 s.append(rhs, true)
1795 return
1796 }
1797 }
1798
1799 if ir.IsBlank(n.X) {
1800
1801
1802 if rhs != nil {
1803 s.expr(rhs)
1804 }
1805 return
1806 }
1807
1808 var t *types.Type
1809 if n.Y != nil {
1810 t = n.Y.Type()
1811 } else {
1812 t = n.X.Type()
1813 }
1814
1815 var r *ssa.Value
1816 deref := !ssa.CanSSA(t)
1817 if deref {
1818 if rhs == nil {
1819 r = nil
1820 } else {
1821 r = s.addr(rhs)
1822 }
1823 } else {
1824 if rhs == nil {
1825 r = s.zeroVal(t)
1826 } else {
1827 r = s.expr(rhs)
1828 }
1829 }
1830
1831 var skip skipMask
1832 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1833
1834
1835 rhs := rhs.(*ir.SliceExpr)
1836 i, j, k := rhs.Low, rhs.High, rhs.Max
1837 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1838
1839 i = nil
1840 }
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851 if i == nil {
1852 skip |= skipPtr
1853 if j == nil {
1854 skip |= skipLen
1855 }
1856 if k == nil {
1857 skip |= skipCap
1858 }
1859 }
1860 }
1861
1862 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1863
1864 case ir.OIF:
1865 n := n.(*ir.IfStmt)
1866 if ir.IsConst(n.Cond, constant.Bool) {
1867 s.stmtList(n.Cond.Init())
1868 if ir.BoolVal(n.Cond) {
1869 s.stmtList(n.Body)
1870 } else {
1871 s.stmtList(n.Else)
1872 }
1873 break
1874 }
1875
1876 bEnd := s.f.NewBlock(ssa.BlockPlain)
1877 var likely int8
1878 if n.Likely {
1879 likely = 1
1880 }
1881 var bThen *ssa.Block
1882 if len(n.Body) != 0 {
1883 bThen = s.f.NewBlock(ssa.BlockPlain)
1884 } else {
1885 bThen = bEnd
1886 }
1887 var bElse *ssa.Block
1888 if len(n.Else) != 0 {
1889 bElse = s.f.NewBlock(ssa.BlockPlain)
1890 } else {
1891 bElse = bEnd
1892 }
1893 s.condBranch(n.Cond, bThen, bElse, likely)
1894
1895 if len(n.Body) != 0 {
1896 s.startBlock(bThen)
1897 s.stmtList(n.Body)
1898 if b := s.endBlock(); b != nil {
1899 b.AddEdgeTo(bEnd)
1900 }
1901 }
1902 if len(n.Else) != 0 {
1903 s.startBlock(bElse)
1904 s.stmtList(n.Else)
1905 if b := s.endBlock(); b != nil {
1906 b.AddEdgeTo(bEnd)
1907 }
1908 }
1909 s.startBlock(bEnd)
1910
1911 case ir.ORETURN:
1912 n := n.(*ir.ReturnStmt)
1913 s.stmtList(n.Results)
1914 b := s.exit()
1915 b.Pos = s.lastPos.WithIsStmt()
1916
1917 case ir.OTAILCALL:
1918 n := n.(*ir.TailCallStmt)
1919 s.callResult(n.Call.(*ir.CallExpr), callTail)
1920 call := s.mem()
1921 b := s.endBlock()
1922 b.Kind = ssa.BlockRetJmp
1923 b.SetControl(call)
1924
1925 case ir.OCONTINUE, ir.OBREAK:
1926 n := n.(*ir.BranchStmt)
1927 var to *ssa.Block
1928 if n.Label == nil {
1929
1930 switch n.Op() {
1931 case ir.OCONTINUE:
1932 to = s.continueTo
1933 case ir.OBREAK:
1934 to = s.breakTo
1935 }
1936 } else {
1937
1938 sym := n.Label
1939 lab := s.label(sym)
1940 switch n.Op() {
1941 case ir.OCONTINUE:
1942 to = lab.continueTarget
1943 case ir.OBREAK:
1944 to = lab.breakTarget
1945 }
1946 }
1947
1948 b := s.endBlock()
1949 b.Pos = s.lastPos.WithIsStmt()
1950 b.AddEdgeTo(to)
1951
1952 case ir.OFOR:
1953
1954
1955 n := n.(*ir.ForStmt)
1956 base.Assert(!n.DistinctVars)
1957 bCond := s.f.NewBlock(ssa.BlockPlain)
1958 bBody := s.f.NewBlock(ssa.BlockPlain)
1959 bIncr := s.f.NewBlock(ssa.BlockPlain)
1960 bEnd := s.f.NewBlock(ssa.BlockPlain)
1961
1962
1963 bBody.Pos = n.Pos()
1964
1965
1966 b := s.endBlock()
1967 b.AddEdgeTo(bCond)
1968
1969
1970 s.startBlock(bCond)
1971 if n.Cond != nil {
1972 s.condBranch(n.Cond, bBody, bEnd, 1)
1973 } else {
1974 b := s.endBlock()
1975 b.Kind = ssa.BlockPlain
1976 b.AddEdgeTo(bBody)
1977 }
1978
1979
1980 prevContinue := s.continueTo
1981 prevBreak := s.breakTo
1982 s.continueTo = bIncr
1983 s.breakTo = bEnd
1984 var lab *ssaLabel
1985 if sym := n.Label; sym != nil {
1986
1987 lab = s.label(sym)
1988 lab.continueTarget = bIncr
1989 lab.breakTarget = bEnd
1990 }
1991
1992
1993 s.startBlock(bBody)
1994 s.stmtList(n.Body)
1995
1996
1997 s.continueTo = prevContinue
1998 s.breakTo = prevBreak
1999 if lab != nil {
2000 lab.continueTarget = nil
2001 lab.breakTarget = nil
2002 }
2003
2004
2005 if b := s.endBlock(); b != nil {
2006 b.AddEdgeTo(bIncr)
2007 }
2008
2009
2010 s.startBlock(bIncr)
2011 if n.Post != nil {
2012 s.stmt(n.Post)
2013 }
2014 if b := s.endBlock(); b != nil {
2015 b.AddEdgeTo(bCond)
2016
2017
2018 if b.Pos == src.NoXPos {
2019 b.Pos = bCond.Pos
2020 }
2021 }
2022
2023 s.startBlock(bEnd)
2024
2025 case ir.OSWITCH, ir.OSELECT:
2026
2027
2028 bEnd := s.f.NewBlock(ssa.BlockPlain)
2029
2030 prevBreak := s.breakTo
2031 s.breakTo = bEnd
2032 var sym *types.Sym
2033 var body ir.Nodes
2034 if n.Op() == ir.OSWITCH {
2035 n := n.(*ir.SwitchStmt)
2036 sym = n.Label
2037 body = n.Compiled
2038 } else {
2039 n := n.(*ir.SelectStmt)
2040 sym = n.Label
2041 body = n.Compiled
2042 }
2043
2044 var lab *ssaLabel
2045 if sym != nil {
2046
2047 lab = s.label(sym)
2048 lab.breakTarget = bEnd
2049 }
2050
2051
2052 s.stmtList(body)
2053
2054 s.breakTo = prevBreak
2055 if lab != nil {
2056 lab.breakTarget = nil
2057 }
2058
2059
2060
2061 if s.curBlock != nil {
2062 m := s.mem()
2063 b := s.endBlock()
2064 b.Kind = ssa.BlockExit
2065 b.SetControl(m)
2066 }
2067 s.startBlock(bEnd)
2068
2069 case ir.OJUMPTABLE:
2070 n := n.(*ir.JumpTableStmt)
2071
2072
2073 jt := s.f.NewBlock(ssa.BlockJumpTable)
2074 bEnd := s.f.NewBlock(ssa.BlockPlain)
2075
2076
2077 idx := s.expr(n.Idx)
2078 unsigned := idx.Type.IsUnsigned()
2079
2080
2081 t := types.Types[types.TUINTPTR]
2082 idx = s.conv(nil, idx, idx.Type, t)
2083
2084
2085
2086
2087
2088
2089
2090 var min, max uint64
2091 if unsigned {
2092 min, _ = constant.Uint64Val(n.Cases[0])
2093 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
2094 } else {
2095 mn, _ := constant.Int64Val(n.Cases[0])
2096 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
2097 min = uint64(mn)
2098 max = uint64(mx)
2099 }
2100
2101 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
2102 width := s.uintptrConstant(max - min)
2103 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
2104 b := s.endBlock()
2105 b.Kind = ssa.BlockIf
2106 b.SetControl(cmp)
2107 b.AddEdgeTo(jt)
2108 b.AddEdgeTo(bEnd)
2109 b.Likely = ssa.BranchLikely
2110
2111
2112 s.startBlock(jt)
2113 jt.Pos = n.Pos()
2114 if base.Flag.Cfg.SpectreIndex {
2115 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
2116 }
2117 jt.SetControl(idx)
2118
2119
2120 table := make([]*ssa.Block, max-min+1)
2121 for i := range table {
2122 table[i] = bEnd
2123 }
2124 for i := range n.Targets {
2125 c := n.Cases[i]
2126 lab := s.label(n.Targets[i])
2127 if lab.target == nil {
2128 lab.target = s.f.NewBlock(ssa.BlockPlain)
2129 }
2130 var val uint64
2131 if unsigned {
2132 val, _ = constant.Uint64Val(c)
2133 } else {
2134 vl, _ := constant.Int64Val(c)
2135 val = uint64(vl)
2136 }
2137
2138 table[val-min] = lab.target
2139 }
2140 for _, t := range table {
2141 jt.AddEdgeTo(t)
2142 }
2143 s.endBlock()
2144
2145 s.startBlock(bEnd)
2146
2147 case ir.OINTERFACESWITCH:
2148 n := n.(*ir.InterfaceSwitchStmt)
2149 typs := s.f.Config.Types
2150
2151 t := s.expr(n.RuntimeType)
2152 h := s.expr(n.Hash)
2153 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2154
2155
2156 var merge *ssa.Block
2157 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
2158
2159
2160 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2161 s.Fatalf("atomic load not available")
2162 }
2163 merge = s.f.NewBlock(ssa.BlockPlain)
2164 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2165 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2166 loopHead := s.f.NewBlock(ssa.BlockPlain)
2167 loopBody := s.f.NewBlock(ssa.BlockPlain)
2168
2169
2170 var mul, and, add, zext ssa.Op
2171 if s.config.PtrSize == 4 {
2172 mul = ssa.OpMul32
2173 and = ssa.OpAnd32
2174 add = ssa.OpAdd32
2175 zext = ssa.OpCopy
2176 } else {
2177 mul = ssa.OpMul64
2178 and = ssa.OpAnd64
2179 add = ssa.OpAdd64
2180 zext = ssa.OpZeroExt32to64
2181 }
2182
2183
2184
2185 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2186 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2187 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2188
2189
2190 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2191
2192
2193 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2194
2195 b := s.endBlock()
2196 b.AddEdgeTo(loopHead)
2197
2198
2199
2200 s.startBlock(loopHead)
2201 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2202 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2203 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2204 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2205
2206 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2207
2208
2209
2210 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2211 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2212 b = s.endBlock()
2213 b.Kind = ssa.BlockIf
2214 b.SetControl(cmp1)
2215 b.AddEdgeTo(cacheHit)
2216 b.AddEdgeTo(loopBody)
2217
2218
2219
2220 s.startBlock(loopBody)
2221 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2222 b = s.endBlock()
2223 b.Kind = ssa.BlockIf
2224 b.SetControl(cmp2)
2225 b.AddEdgeTo(cacheMiss)
2226 b.AddEdgeTo(loopHead)
2227
2228
2229
2230
2231 s.startBlock(cacheHit)
2232 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2233 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2234 s.assign(n.Case, eCase, false, 0)
2235 s.assign(n.Itab, eItab, false, 0)
2236 b = s.endBlock()
2237 b.AddEdgeTo(merge)
2238
2239
2240 s.startBlock(cacheMiss)
2241 }
2242
2243 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2244 s.assign(n.Case, r[0], false, 0)
2245 s.assign(n.Itab, r[1], false, 0)
2246
2247 if merge != nil {
2248
2249 b := s.endBlock()
2250 b.Kind = ssa.BlockPlain
2251 b.AddEdgeTo(merge)
2252 s.startBlock(merge)
2253 }
2254
2255 case ir.OCHECKNIL:
2256 n := n.(*ir.UnaryExpr)
2257 p := s.expr(n.X)
2258 _ = s.nilCheck(p)
2259
2260
2261 case ir.OINLMARK:
2262 n := n.(*ir.InlineMarkStmt)
2263 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2264
2265 default:
2266 s.Fatalf("unhandled stmt %v", n.Op())
2267 }
2268 }
2269
2270
2271
2272 const shareDeferExits = false
2273
2274
2275
2276
2277 func (s *state) exit() *ssa.Block {
2278 if s.hasdefer {
2279 if s.hasOpenDefers {
2280 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2281 if s.curBlock.Kind != ssa.BlockPlain {
2282 panic("Block for an exit should be BlockPlain")
2283 }
2284 s.curBlock.AddEdgeTo(s.lastDeferExit)
2285 s.endBlock()
2286 return s.lastDeferFinalBlock
2287 }
2288 s.openDeferExit()
2289 } else {
2290
2291
2292
2293
2294
2295
2296
2297
2298 s.pushLine(s.curfn.Endlineno)
2299 s.rtcall(ir.Syms.Deferreturn, true, nil)
2300 s.popLine()
2301 }
2302 }
2303
2304
2305
2306 resultFields := s.curfn.Type().Results()
2307 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2308
2309 for i, f := range resultFields {
2310 n := f.Nname.(*ir.Name)
2311 if s.canSSA(n) {
2312 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2313
2314 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2315 }
2316 results[i] = s.variable(n, n.Type())
2317 } else if !n.OnStack() {
2318
2319 if n.Type().HasPointers() {
2320 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2321 }
2322 ha := s.expr(n.Heapaddr)
2323 s.instrumentFields(n.Type(), ha, instrumentRead)
2324 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2325 } else {
2326
2327
2328
2329 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2330 }
2331 }
2332
2333
2334
2335
2336 if s.instrumentEnterExit {
2337 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2338 }
2339
2340 results[len(results)-1] = s.mem()
2341 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2342 m.AddArgs(results...)
2343
2344 b := s.endBlock()
2345 b.Kind = ssa.BlockRet
2346 b.SetControl(m)
2347 if s.hasdefer && s.hasOpenDefers {
2348 s.lastDeferFinalBlock = b
2349 }
2350 return b
2351 }
2352
2353 type opAndType struct {
2354 op ir.Op
2355 etype types.Kind
2356 }
2357
2358 var opToSSA = map[opAndType]ssa.Op{
2359 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2360 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2361 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2362 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2363 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2364 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2365 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2366 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2367 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2368 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2369
2370 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2371 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2372 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2373 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2374 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2375 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2376 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2377 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2378 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2379 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2380
2381 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2382
2383 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2384 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2385 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2386 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2387 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2388 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2389 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2390 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2391 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2392 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2393
2394 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2395 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2396 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2397 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2398 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2399 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2400 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2401 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2402
2403 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2404 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2405 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2406 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2407
2408 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2409 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2410 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2411 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2412 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2413 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2414 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2415 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2416 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2417 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2418
2419 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2420 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2421
2422 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2423 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2424 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2425 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2426 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2427 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2428 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2429 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2430
2431 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2432 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2433 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2434 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2435 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2436 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2437 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2438 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2439
2440 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2441 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2442 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2443 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2444 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2445 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2446 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2447 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2448
2449 {ir.OOR, types.TINT8}: ssa.OpOr8,
2450 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2451 {ir.OOR, types.TINT16}: ssa.OpOr16,
2452 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2453 {ir.OOR, types.TINT32}: ssa.OpOr32,
2454 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2455 {ir.OOR, types.TINT64}: ssa.OpOr64,
2456 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2457
2458 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2459 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2460 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2461 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2462 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2463 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2464 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2465 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2466
2467 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2468 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2469 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2470 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2471 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2472 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2473 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2474 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2475 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2476 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2477 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2478 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2479 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2480 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2481 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2482 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2483 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2484 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2485 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2486
2487 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2488 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2489 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2490 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2491 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2492 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2493 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2494 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2495 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2496 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2497 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2498 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2499 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2500 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2501 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2502 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2503 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2504 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2505 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2506
2507 {ir.OLT, types.TINT8}: ssa.OpLess8,
2508 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2509 {ir.OLT, types.TINT16}: ssa.OpLess16,
2510 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2511 {ir.OLT, types.TINT32}: ssa.OpLess32,
2512 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2513 {ir.OLT, types.TINT64}: ssa.OpLess64,
2514 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2515 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2516 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2517
2518 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2519 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2520 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2521 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2522 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2523 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2524 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2525 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2526 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2527 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2528 }
2529
2530 func (s *state) concreteEtype(t *types.Type) types.Kind {
2531 e := t.Kind()
2532 switch e {
2533 default:
2534 return e
2535 case types.TINT:
2536 if s.config.PtrSize == 8 {
2537 return types.TINT64
2538 }
2539 return types.TINT32
2540 case types.TUINT:
2541 if s.config.PtrSize == 8 {
2542 return types.TUINT64
2543 }
2544 return types.TUINT32
2545 case types.TUINTPTR:
2546 if s.config.PtrSize == 8 {
2547 return types.TUINT64
2548 }
2549 return types.TUINT32
2550 }
2551 }
2552
2553 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2554 etype := s.concreteEtype(t)
2555 x, ok := opToSSA[opAndType{op, etype}]
2556 if !ok {
2557 s.Fatalf("unhandled binary op %v %s", op, etype)
2558 }
2559 return x
2560 }
2561
2562 type opAndTwoTypes struct {
2563 op ir.Op
2564 etype1 types.Kind
2565 etype2 types.Kind
2566 }
2567
2568 type twoTypes struct {
2569 etype1 types.Kind
2570 etype2 types.Kind
2571 }
2572
2573 type twoOpsAndType struct {
2574 op1 ssa.Op
2575 op2 ssa.Op
2576 intermediateType types.Kind
2577 }
2578
2579 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2580
2581 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2582 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2583 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2584 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2585
2586 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2587 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2588 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2589 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2590
2591 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2592 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2593 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2594 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2595
2596 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2597 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2598 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2599 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2600
2601 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2602 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2603 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2604 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2605
2606 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2607 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2608 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2609 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2610
2611 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2612 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2613 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto64, ssa.OpTrunc64to32, types.TINT64},
2614 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2615
2616 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2617 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2618 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto64, ssa.OpTrunc64to32, types.TINT64},
2619 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2620
2621
2622 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2623 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2624 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2625 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2626 }
2627
2628
2629
2630 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2631 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2632 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2633 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2634 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2635 }
2636
2637
2638 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2639 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2640 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2641 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2642 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2643 }
2644
2645 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2646 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2647 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2648 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2649 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2650 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2651 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2652 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2653 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2654
2655 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2656 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2657 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2658 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2659 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2660 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2661 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2662 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2663
2664 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2665 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2666 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2667 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2668 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2669 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2670 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2671 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2672
2673 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2674 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2675 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2676 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2677 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2678 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2679 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2680 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2681
2682 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2683 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2684 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2685 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2686 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2687 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2688 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2689 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2690
2691 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2692 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2693 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2694 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2695 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2696 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2697 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2698 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2699
2700 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2701 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2702 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2703 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2704 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2705 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2706 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2707 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2708
2709 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2710 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2711 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2712 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2713 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2714 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2715 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2716 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2717 }
2718
2719 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2720 etype1 := s.concreteEtype(t)
2721 etype2 := s.concreteEtype(u)
2722 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2723 if !ok {
2724 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2725 }
2726 return x
2727 }
2728
2729 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2730 if s.config.PtrSize == 4 {
2731 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2732 }
2733 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2734 }
2735
2736 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2737 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2738
2739 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2740 }
2741 if ft.IsInteger() && tt.IsInteger() {
2742 var op ssa.Op
2743 if tt.Size() == ft.Size() {
2744 op = ssa.OpCopy
2745 } else if tt.Size() < ft.Size() {
2746
2747 switch 10*ft.Size() + tt.Size() {
2748 case 21:
2749 op = ssa.OpTrunc16to8
2750 case 41:
2751 op = ssa.OpTrunc32to8
2752 case 42:
2753 op = ssa.OpTrunc32to16
2754 case 81:
2755 op = ssa.OpTrunc64to8
2756 case 82:
2757 op = ssa.OpTrunc64to16
2758 case 84:
2759 op = ssa.OpTrunc64to32
2760 default:
2761 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2762 }
2763 } else if ft.IsSigned() {
2764
2765 switch 10*ft.Size() + tt.Size() {
2766 case 12:
2767 op = ssa.OpSignExt8to16
2768 case 14:
2769 op = ssa.OpSignExt8to32
2770 case 18:
2771 op = ssa.OpSignExt8to64
2772 case 24:
2773 op = ssa.OpSignExt16to32
2774 case 28:
2775 op = ssa.OpSignExt16to64
2776 case 48:
2777 op = ssa.OpSignExt32to64
2778 default:
2779 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2780 }
2781 } else {
2782
2783 switch 10*ft.Size() + tt.Size() {
2784 case 12:
2785 op = ssa.OpZeroExt8to16
2786 case 14:
2787 op = ssa.OpZeroExt8to32
2788 case 18:
2789 op = ssa.OpZeroExt8to64
2790 case 24:
2791 op = ssa.OpZeroExt16to32
2792 case 28:
2793 op = ssa.OpZeroExt16to64
2794 case 48:
2795 op = ssa.OpZeroExt32to64
2796 default:
2797 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2798 }
2799 }
2800 return s.newValue1(op, tt, v)
2801 }
2802
2803 if ft.IsComplex() && tt.IsComplex() {
2804 var op ssa.Op
2805 if ft.Size() == tt.Size() {
2806 switch ft.Size() {
2807 case 8:
2808 op = ssa.OpRound32F
2809 case 16:
2810 op = ssa.OpRound64F
2811 default:
2812 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2813 }
2814 } else if ft.Size() == 8 && tt.Size() == 16 {
2815 op = ssa.OpCvt32Fto64F
2816 } else if ft.Size() == 16 && tt.Size() == 8 {
2817 op = ssa.OpCvt64Fto32F
2818 } else {
2819 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2820 }
2821 ftp := types.FloatForComplex(ft)
2822 ttp := types.FloatForComplex(tt)
2823 return s.newValue2(ssa.OpComplexMake, tt,
2824 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2825 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2826 }
2827
2828 if tt.IsComplex() {
2829
2830 et := types.FloatForComplex(tt)
2831 v = s.conv(n, v, ft, et)
2832 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2833 }
2834
2835 if ft.IsFloat() || tt.IsFloat() {
2836 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
2837 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2838 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2839 conv = conv1
2840 }
2841 }
2842 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2843 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2844 conv = conv1
2845 }
2846 }
2847
2848 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2849 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2850
2851 if tt.Size() == 4 {
2852 return s.uint32Tofloat32(n, v, ft, tt)
2853 }
2854 if tt.Size() == 8 {
2855 return s.uint32Tofloat64(n, v, ft, tt)
2856 }
2857 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2858
2859 if ft.Size() == 4 {
2860 return s.float32ToUint32(n, v, ft, tt)
2861 }
2862 if ft.Size() == 8 {
2863 return s.float64ToUint32(n, v, ft, tt)
2864 }
2865 }
2866 }
2867
2868 if !ok {
2869 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2870 }
2871 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2872
2873 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2874
2875 if op1 == ssa.OpCopy {
2876 if op2 == ssa.OpCopy {
2877 return v
2878 }
2879 return s.newValueOrSfCall1(op2, tt, v)
2880 }
2881 if op2 == ssa.OpCopy {
2882 return s.newValueOrSfCall1(op1, tt, v)
2883 }
2884 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2885 }
2886
2887 if ft.IsInteger() {
2888
2889 if tt.Size() == 4 {
2890 return s.uint64Tofloat32(n, v, ft, tt)
2891 }
2892 if tt.Size() == 8 {
2893 return s.uint64Tofloat64(n, v, ft, tt)
2894 }
2895 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2896 }
2897
2898 if ft.Size() == 4 {
2899 return s.float32ToUint64(n, v, ft, tt)
2900 }
2901 if ft.Size() == 8 {
2902 return s.float64ToUint64(n, v, ft, tt)
2903 }
2904 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2905 return nil
2906 }
2907
2908 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2909 return nil
2910 }
2911
2912
2913 func (s *state) expr(n ir.Node) *ssa.Value {
2914 return s.exprCheckPtr(n, true)
2915 }
2916
2917 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2918 if ir.HasUniquePos(n) {
2919
2920
2921 s.pushLine(n.Pos())
2922 defer s.popLine()
2923 }
2924
2925 s.stmtList(n.Init())
2926 switch n.Op() {
2927 case ir.OBYTES2STRTMP:
2928 n := n.(*ir.ConvExpr)
2929 slice := s.expr(n.X)
2930 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
2931 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
2932 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
2933 case ir.OSTR2BYTESTMP:
2934 n := n.(*ir.ConvExpr)
2935 str := s.expr(n.X)
2936 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
2937 if !n.NonNil() {
2938
2939
2940
2941 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
2942 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
2943 ptr = s.ternary(cond, ptr, zerobase)
2944 }
2945 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
2946 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
2947 case ir.OCFUNC:
2948 n := n.(*ir.UnaryExpr)
2949 aux := n.X.(*ir.Name).Linksym()
2950
2951
2952 if aux.ABI() != obj.ABIInternal {
2953 s.Fatalf("expected ABIInternal: %v", aux.ABI())
2954 }
2955 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
2956 case ir.ONAME:
2957 n := n.(*ir.Name)
2958 if n.Class == ir.PFUNC {
2959
2960 sym := staticdata.FuncLinksym(n)
2961 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
2962 }
2963 if s.canSSA(n) {
2964 return s.variable(n, n.Type())
2965 }
2966 return s.load(n.Type(), s.addr(n))
2967 case ir.OLINKSYMOFFSET:
2968 n := n.(*ir.LinksymOffsetExpr)
2969 return s.load(n.Type(), s.addr(n))
2970 case ir.ONIL:
2971 n := n.(*ir.NilExpr)
2972 t := n.Type()
2973 switch {
2974 case t.IsSlice():
2975 return s.constSlice(t)
2976 case t.IsInterface():
2977 return s.constInterface(t)
2978 default:
2979 return s.constNil(t)
2980 }
2981 case ir.OLITERAL:
2982 switch u := n.Val(); u.Kind() {
2983 case constant.Int:
2984 i := ir.IntVal(n.Type(), u)
2985 switch n.Type().Size() {
2986 case 1:
2987 return s.constInt8(n.Type(), int8(i))
2988 case 2:
2989 return s.constInt16(n.Type(), int16(i))
2990 case 4:
2991 return s.constInt32(n.Type(), int32(i))
2992 case 8:
2993 return s.constInt64(n.Type(), i)
2994 default:
2995 s.Fatalf("bad integer size %d", n.Type().Size())
2996 return nil
2997 }
2998 case constant.String:
2999 i := constant.StringVal(u)
3000 if i == "" {
3001 return s.constEmptyString(n.Type())
3002 }
3003 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
3004 case constant.Bool:
3005 return s.constBool(constant.BoolVal(u))
3006 case constant.Float:
3007 f, _ := constant.Float64Val(u)
3008 switch n.Type().Size() {
3009 case 4:
3010 return s.constFloat32(n.Type(), f)
3011 case 8:
3012 return s.constFloat64(n.Type(), f)
3013 default:
3014 s.Fatalf("bad float size %d", n.Type().Size())
3015 return nil
3016 }
3017 case constant.Complex:
3018 re, _ := constant.Float64Val(constant.Real(u))
3019 im, _ := constant.Float64Val(constant.Imag(u))
3020 switch n.Type().Size() {
3021 case 8:
3022 pt := types.Types[types.TFLOAT32]
3023 return s.newValue2(ssa.OpComplexMake, n.Type(),
3024 s.constFloat32(pt, re),
3025 s.constFloat32(pt, im))
3026 case 16:
3027 pt := types.Types[types.TFLOAT64]
3028 return s.newValue2(ssa.OpComplexMake, n.Type(),
3029 s.constFloat64(pt, re),
3030 s.constFloat64(pt, im))
3031 default:
3032 s.Fatalf("bad complex size %d", n.Type().Size())
3033 return nil
3034 }
3035 default:
3036 s.Fatalf("unhandled OLITERAL %v", u.Kind())
3037 return nil
3038 }
3039 case ir.OCONVNOP:
3040 n := n.(*ir.ConvExpr)
3041 to := n.Type()
3042 from := n.X.Type()
3043
3044
3045
3046 x := s.expr(n.X)
3047 if to == from {
3048 return x
3049 }
3050
3051
3052
3053
3054
3055 if to.IsPtrShaped() != from.IsPtrShaped() {
3056 return s.newValue2(ssa.OpConvert, to, x, s.mem())
3057 }
3058
3059 v := s.newValue1(ssa.OpCopy, to, x)
3060
3061
3062 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
3063 return v
3064 }
3065
3066
3067 if from.Kind() == to.Kind() {
3068 return v
3069 }
3070
3071
3072 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
3073 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
3074 s.checkPtrAlignment(n, v, nil)
3075 }
3076 return v
3077 }
3078
3079
3080 var mt *types.Type
3081 if buildcfg.Experiment.SwissMap {
3082 mt = types.NewPtr(reflectdata.SwissMapType())
3083 } else {
3084 mt = types.NewPtr(reflectdata.OldMapType())
3085 }
3086 if to.Kind() == types.TMAP && from == mt {
3087 return v
3088 }
3089
3090 types.CalcSize(from)
3091 types.CalcSize(to)
3092 if from.Size() != to.Size() {
3093 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
3094 return nil
3095 }
3096 if etypesign(from.Kind()) != etypesign(to.Kind()) {
3097 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
3098 return nil
3099 }
3100
3101 if base.Flag.Cfg.Instrumenting {
3102
3103
3104
3105 return v
3106 }
3107
3108 if etypesign(from.Kind()) == 0 {
3109 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
3110 return nil
3111 }
3112
3113
3114 return v
3115
3116 case ir.OCONV:
3117 n := n.(*ir.ConvExpr)
3118 x := s.expr(n.X)
3119 return s.conv(n, x, n.X.Type(), n.Type())
3120
3121 case ir.ODOTTYPE:
3122 n := n.(*ir.TypeAssertExpr)
3123 res, _ := s.dottype(n, false)
3124 return res
3125
3126 case ir.ODYNAMICDOTTYPE:
3127 n := n.(*ir.DynamicTypeAssertExpr)
3128 res, _ := s.dynamicDottype(n, false)
3129 return res
3130
3131
3132 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3133 n := n.(*ir.BinaryExpr)
3134 a := s.expr(n.X)
3135 b := s.expr(n.Y)
3136 if n.X.Type().IsComplex() {
3137 pt := types.FloatForComplex(n.X.Type())
3138 op := s.ssaOp(ir.OEQ, pt)
3139 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3140 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3141 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3142 switch n.Op() {
3143 case ir.OEQ:
3144 return c
3145 case ir.ONE:
3146 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3147 default:
3148 s.Fatalf("ordered complex compare %v", n.Op())
3149 }
3150 }
3151
3152
3153 op := n.Op()
3154 switch op {
3155 case ir.OGE:
3156 op, a, b = ir.OLE, b, a
3157 case ir.OGT:
3158 op, a, b = ir.OLT, b, a
3159 }
3160 if n.X.Type().IsFloat() {
3161
3162 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3163 }
3164
3165 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3166 case ir.OMUL:
3167 n := n.(*ir.BinaryExpr)
3168 a := s.expr(n.X)
3169 b := s.expr(n.Y)
3170 if n.Type().IsComplex() {
3171 mulop := ssa.OpMul64F
3172 addop := ssa.OpAdd64F
3173 subop := ssa.OpSub64F
3174 pt := types.FloatForComplex(n.Type())
3175 wt := types.Types[types.TFLOAT64]
3176
3177 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3178 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3179 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3180 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3181
3182 if pt != wt {
3183 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3184 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3185 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3186 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3187 }
3188
3189 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3190 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3191
3192 if pt != wt {
3193 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3194 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3195 }
3196
3197 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3198 }
3199
3200 if n.Type().IsFloat() {
3201 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3202 }
3203
3204 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3205
3206 case ir.ODIV:
3207 n := n.(*ir.BinaryExpr)
3208 a := s.expr(n.X)
3209 b := s.expr(n.Y)
3210 if n.Type().IsComplex() {
3211
3212
3213
3214 mulop := ssa.OpMul64F
3215 addop := ssa.OpAdd64F
3216 subop := ssa.OpSub64F
3217 divop := ssa.OpDiv64F
3218 pt := types.FloatForComplex(n.Type())
3219 wt := types.Types[types.TFLOAT64]
3220
3221 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3222 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3223 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3224 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3225
3226 if pt != wt {
3227 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3228 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3229 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3230 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3231 }
3232
3233 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3234 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3235 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3236
3237
3238
3239
3240
3241 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3242 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3243
3244 if pt != wt {
3245 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3246 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3247 }
3248 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3249 }
3250 if n.Type().IsFloat() {
3251 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3252 }
3253 return s.intDivide(n, a, b)
3254 case ir.OMOD:
3255 n := n.(*ir.BinaryExpr)
3256 a := s.expr(n.X)
3257 b := s.expr(n.Y)
3258 return s.intDivide(n, a, b)
3259 case ir.OADD, ir.OSUB:
3260 n := n.(*ir.BinaryExpr)
3261 a := s.expr(n.X)
3262 b := s.expr(n.Y)
3263 if n.Type().IsComplex() {
3264 pt := types.FloatForComplex(n.Type())
3265 op := s.ssaOp(n.Op(), pt)
3266 return s.newValue2(ssa.OpComplexMake, n.Type(),
3267 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3268 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3269 }
3270 if n.Type().IsFloat() {
3271 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3272 }
3273 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3274 case ir.OAND, ir.OOR, ir.OXOR:
3275 n := n.(*ir.BinaryExpr)
3276 a := s.expr(n.X)
3277 b := s.expr(n.Y)
3278 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3279 case ir.OANDNOT:
3280 n := n.(*ir.BinaryExpr)
3281 a := s.expr(n.X)
3282 b := s.expr(n.Y)
3283 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3284 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3285 case ir.OLSH, ir.ORSH:
3286 n := n.(*ir.BinaryExpr)
3287 a := s.expr(n.X)
3288 b := s.expr(n.Y)
3289 bt := b.Type
3290 if bt.IsSigned() {
3291 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3292 s.check(cmp, ir.Syms.Panicshift)
3293 bt = bt.ToUnsigned()
3294 }
3295 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3296 case ir.OANDAND, ir.OOROR:
3297
3298
3299
3300
3301
3302
3303
3304
3305
3306
3307
3308
3309
3310 n := n.(*ir.LogicalExpr)
3311 el := s.expr(n.X)
3312 s.vars[n] = el
3313
3314 b := s.endBlock()
3315 b.Kind = ssa.BlockIf
3316 b.SetControl(el)
3317
3318
3319
3320
3321
3322 bRight := s.f.NewBlock(ssa.BlockPlain)
3323 bResult := s.f.NewBlock(ssa.BlockPlain)
3324 if n.Op() == ir.OANDAND {
3325 b.AddEdgeTo(bRight)
3326 b.AddEdgeTo(bResult)
3327 } else if n.Op() == ir.OOROR {
3328 b.AddEdgeTo(bResult)
3329 b.AddEdgeTo(bRight)
3330 }
3331
3332 s.startBlock(bRight)
3333 er := s.expr(n.Y)
3334 s.vars[n] = er
3335
3336 b = s.endBlock()
3337 b.AddEdgeTo(bResult)
3338
3339 s.startBlock(bResult)
3340 return s.variable(n, types.Types[types.TBOOL])
3341 case ir.OCOMPLEX:
3342 n := n.(*ir.BinaryExpr)
3343 r := s.expr(n.X)
3344 i := s.expr(n.Y)
3345 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3346
3347
3348 case ir.ONEG:
3349 n := n.(*ir.UnaryExpr)
3350 a := s.expr(n.X)
3351 if n.Type().IsComplex() {
3352 tp := types.FloatForComplex(n.Type())
3353 negop := s.ssaOp(n.Op(), tp)
3354 return s.newValue2(ssa.OpComplexMake, n.Type(),
3355 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3356 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3357 }
3358 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3359 case ir.ONOT, ir.OBITNOT:
3360 n := n.(*ir.UnaryExpr)
3361 a := s.expr(n.X)
3362 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3363 case ir.OIMAG, ir.OREAL:
3364 n := n.(*ir.UnaryExpr)
3365 a := s.expr(n.X)
3366 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3367 case ir.OPLUS:
3368 n := n.(*ir.UnaryExpr)
3369 return s.expr(n.X)
3370
3371 case ir.OADDR:
3372 n := n.(*ir.AddrExpr)
3373 return s.addr(n.X)
3374
3375 case ir.ORESULT:
3376 n := n.(*ir.ResultExpr)
3377 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3378 panic("Expected to see a previous call")
3379 }
3380 which := n.Index
3381 if which == -1 {
3382 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3383 }
3384 return s.resultOfCall(s.prevCall, which, n.Type())
3385
3386 case ir.ODEREF:
3387 n := n.(*ir.StarExpr)
3388 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3389 return s.load(n.Type(), p)
3390
3391 case ir.ODOT:
3392 n := n.(*ir.SelectorExpr)
3393 if n.X.Op() == ir.OSTRUCTLIT {
3394
3395
3396
3397 if !ir.IsZero(n.X) {
3398 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3399 }
3400 return s.zeroVal(n.Type())
3401 }
3402
3403
3404
3405
3406 if ir.IsAddressable(n) && !s.canSSA(n) {
3407 p := s.addr(n)
3408 return s.load(n.Type(), p)
3409 }
3410 v := s.expr(n.X)
3411 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3412
3413 case ir.ODOTPTR:
3414 n := n.(*ir.SelectorExpr)
3415 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3416 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3417 return s.load(n.Type(), p)
3418
3419 case ir.OINDEX:
3420 n := n.(*ir.IndexExpr)
3421 switch {
3422 case n.X.Type().IsString():
3423 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3424
3425
3426
3427 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3428 }
3429 a := s.expr(n.X)
3430 i := s.expr(n.Index)
3431 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3432 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3433 ptrtyp := s.f.Config.Types.BytePtr
3434 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3435 if ir.IsConst(n.Index, constant.Int) {
3436 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3437 } else {
3438 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3439 }
3440 return s.load(types.Types[types.TUINT8], ptr)
3441 case n.X.Type().IsSlice():
3442 p := s.addr(n)
3443 return s.load(n.X.Type().Elem(), p)
3444 case n.X.Type().IsArray():
3445 if ssa.CanSSA(n.X.Type()) {
3446
3447 bound := n.X.Type().NumElem()
3448 a := s.expr(n.X)
3449 i := s.expr(n.Index)
3450 if bound == 0 {
3451
3452
3453 z := s.constInt(types.Types[types.TINT], 0)
3454 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3455
3456
3457 return s.zeroVal(n.Type())
3458 }
3459 len := s.constInt(types.Types[types.TINT], bound)
3460 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3461 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3462 }
3463 p := s.addr(n)
3464 return s.load(n.X.Type().Elem(), p)
3465 default:
3466 s.Fatalf("bad type for index %v", n.X.Type())
3467 return nil
3468 }
3469
3470 case ir.OLEN, ir.OCAP:
3471 n := n.(*ir.UnaryExpr)
3472 switch {
3473 case n.X.Type().IsSlice():
3474 op := ssa.OpSliceLen
3475 if n.Op() == ir.OCAP {
3476 op = ssa.OpSliceCap
3477 }
3478 return s.newValue1(op, types.Types[types.TINT], s.expr(n.X))
3479 case n.X.Type().IsString():
3480 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], s.expr(n.X))
3481 case n.X.Type().IsMap(), n.X.Type().IsChan():
3482 return s.referenceTypeBuiltin(n, s.expr(n.X))
3483 default:
3484 return s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
3485 }
3486
3487 case ir.OSPTR:
3488 n := n.(*ir.UnaryExpr)
3489 a := s.expr(n.X)
3490 if n.X.Type().IsSlice() {
3491 if n.Bounded() {
3492 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3493 }
3494 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3495 } else {
3496 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3497 }
3498
3499 case ir.OITAB:
3500 n := n.(*ir.UnaryExpr)
3501 a := s.expr(n.X)
3502 return s.newValue1(ssa.OpITab, n.Type(), a)
3503
3504 case ir.OIDATA:
3505 n := n.(*ir.UnaryExpr)
3506 a := s.expr(n.X)
3507 return s.newValue1(ssa.OpIData, n.Type(), a)
3508
3509 case ir.OMAKEFACE:
3510 n := n.(*ir.BinaryExpr)
3511 tab := s.expr(n.X)
3512 data := s.expr(n.Y)
3513 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3514
3515 case ir.OSLICEHEADER:
3516 n := n.(*ir.SliceHeaderExpr)
3517 p := s.expr(n.Ptr)
3518 l := s.expr(n.Len)
3519 c := s.expr(n.Cap)
3520 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3521
3522 case ir.OSTRINGHEADER:
3523 n := n.(*ir.StringHeaderExpr)
3524 p := s.expr(n.Ptr)
3525 l := s.expr(n.Len)
3526 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3527
3528 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3529 n := n.(*ir.SliceExpr)
3530 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3531 v := s.exprCheckPtr(n.X, !check)
3532 var i, j, k *ssa.Value
3533 if n.Low != nil {
3534 i = s.expr(n.Low)
3535 }
3536 if n.High != nil {
3537 j = s.expr(n.High)
3538 }
3539 if n.Max != nil {
3540 k = s.expr(n.Max)
3541 }
3542 p, l, c := s.slice(v, i, j, k, n.Bounded())
3543 if check {
3544
3545 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3546 }
3547 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3548
3549 case ir.OSLICESTR:
3550 n := n.(*ir.SliceExpr)
3551 v := s.expr(n.X)
3552 var i, j *ssa.Value
3553 if n.Low != nil {
3554 i = s.expr(n.Low)
3555 }
3556 if n.High != nil {
3557 j = s.expr(n.High)
3558 }
3559 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3560 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3561
3562 case ir.OSLICE2ARRPTR:
3563
3564
3565
3566
3567 n := n.(*ir.ConvExpr)
3568 v := s.expr(n.X)
3569 nelem := n.Type().Elem().NumElem()
3570 arrlen := s.constInt(types.Types[types.TINT], nelem)
3571 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3572 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3573 op := ssa.OpSlicePtr
3574 if nelem == 0 {
3575 op = ssa.OpSlicePtrUnchecked
3576 }
3577 return s.newValue1(op, n.Type(), v)
3578
3579 case ir.OCALLFUNC:
3580 n := n.(*ir.CallExpr)
3581 if ir.IsIntrinsicCall(n) {
3582 return s.intrinsicCall(n)
3583 }
3584 fallthrough
3585
3586 case ir.OCALLINTER:
3587 n := n.(*ir.CallExpr)
3588 return s.callResult(n, callNormal)
3589
3590 case ir.OGETG:
3591 n := n.(*ir.CallExpr)
3592 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3593
3594 case ir.OGETCALLERSP:
3595 n := n.(*ir.CallExpr)
3596 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3597
3598 case ir.OAPPEND:
3599 return s.append(n.(*ir.CallExpr), false)
3600
3601 case ir.OMIN, ir.OMAX:
3602 return s.minMax(n.(*ir.CallExpr))
3603
3604 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3605
3606
3607
3608 n := n.(*ir.CompLitExpr)
3609 if !ir.IsZero(n) {
3610 s.Fatalf("literal with nonzero value in SSA: %v", n)
3611 }
3612 return s.zeroVal(n.Type())
3613
3614 case ir.ONEW:
3615 n := n.(*ir.UnaryExpr)
3616 var rtype *ssa.Value
3617 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3618 rtype = s.expr(x.RType)
3619 }
3620 return s.newObject(n.Type().Elem(), rtype)
3621
3622 case ir.OUNSAFEADD:
3623 n := n.(*ir.BinaryExpr)
3624 ptr := s.expr(n.X)
3625 len := s.expr(n.Y)
3626
3627
3628
3629 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3630
3631 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3632
3633 default:
3634 s.Fatalf("unhandled expr %v", n.Op())
3635 return nil
3636 }
3637 }
3638
3639 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3640 aux := c.Aux.(*ssa.AuxCall)
3641 pa := aux.ParamAssignmentForResult(which)
3642
3643
3644 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3645 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3646 return s.rawLoad(t, addr)
3647 }
3648 return s.newValue1I(ssa.OpSelectN, t, which, c)
3649 }
3650
3651 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3652 aux := c.Aux.(*ssa.AuxCall)
3653 pa := aux.ParamAssignmentForResult(which)
3654 if len(pa.Registers) == 0 {
3655 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3656 }
3657 _, addr := s.temp(c.Pos, t)
3658 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3659 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3660 return addr
3661 }
3662
3663
3664
3665
3666
3667
3668
3669
3670
3671 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3672
3673
3674
3675
3676
3677
3678
3679
3680
3681
3682
3683
3684
3685
3686
3687
3688
3689
3690
3691
3692
3693
3694
3695
3696
3697
3698
3699
3700
3701
3702
3703
3704 et := n.Type().Elem()
3705 pt := types.NewPtr(et)
3706
3707
3708 sn := n.Args[0]
3709 var slice, addr *ssa.Value
3710 if inplace {
3711 addr = s.addr(sn)
3712 slice = s.load(n.Type(), addr)
3713 } else {
3714 slice = s.expr(sn)
3715 }
3716
3717
3718 grow := s.f.NewBlock(ssa.BlockPlain)
3719 assign := s.f.NewBlock(ssa.BlockPlain)
3720
3721
3722 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3723 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3724 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3725
3726
3727 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3728 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3729
3730
3731 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3732
3733
3734 s.vars[ptrVar] = p
3735 s.vars[lenVar] = l
3736 if !inplace {
3737 s.vars[capVar] = c
3738 }
3739
3740 b := s.endBlock()
3741 b.Kind = ssa.BlockIf
3742 b.Likely = ssa.BranchUnlikely
3743 b.SetControl(cmp)
3744 b.AddEdgeTo(grow)
3745 b.AddEdgeTo(assign)
3746
3747
3748 s.startBlock(grow)
3749 taddr := s.expr(n.Fun)
3750 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
3751
3752
3753 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
3754 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
3755 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
3756
3757 s.vars[ptrVar] = p
3758 s.vars[lenVar] = l
3759 s.vars[capVar] = c
3760 if inplace {
3761 if sn.Op() == ir.ONAME {
3762 sn := sn.(*ir.Name)
3763 if sn.Class != ir.PEXTERN {
3764
3765 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3766 }
3767 }
3768 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3769 s.store(types.Types[types.TINT], capaddr, c)
3770 s.store(pt, addr, p)
3771 }
3772
3773 b = s.endBlock()
3774 b.AddEdgeTo(assign)
3775
3776
3777 s.startBlock(assign)
3778 p = s.variable(ptrVar, pt)
3779 l = s.variable(lenVar, types.Types[types.TINT])
3780 if !inplace {
3781 c = s.variable(capVar, types.Types[types.TINT])
3782 }
3783
3784 if inplace {
3785
3786
3787 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3788 s.store(types.Types[types.TINT], lenaddr, l)
3789 }
3790
3791
3792 type argRec struct {
3793
3794
3795 v *ssa.Value
3796 store bool
3797 }
3798 args := make([]argRec, 0, len(n.Args[1:]))
3799 for _, n := range n.Args[1:] {
3800 if ssa.CanSSA(n.Type()) {
3801 args = append(args, argRec{v: s.expr(n), store: true})
3802 } else {
3803 v := s.addr(n)
3804 args = append(args, argRec{v: v})
3805 }
3806 }
3807
3808
3809 oldLen := s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3810 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
3811 for i, arg := range args {
3812 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
3813 if arg.store {
3814 s.storeType(et, addr, arg.v, 0, true)
3815 } else {
3816 s.move(et, addr, arg.v)
3817 }
3818 }
3819
3820
3821
3822
3823
3824 delete(s.vars, ptrVar)
3825 delete(s.vars, lenVar)
3826 if !inplace {
3827 delete(s.vars, capVar)
3828 }
3829
3830
3831 if inplace {
3832 return nil
3833 }
3834 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3835 }
3836
3837
3838 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
3839
3840
3841
3842 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
3843 x := s.expr(n.Args[0])
3844 for _, arg := range n.Args[1:] {
3845 x = op(x, s.expr(arg))
3846 }
3847 return x
3848 }
3849
3850 typ := n.Type()
3851
3852 if typ.IsFloat() || typ.IsString() {
3853
3854
3855
3856
3857
3858
3859
3860
3861 if typ.IsFloat() {
3862 hasIntrinsic := false
3863 switch Arch.LinkArch.Family {
3864 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64:
3865 hasIntrinsic = true
3866 case sys.PPC64:
3867 hasIntrinsic = buildcfg.GOPPC64 >= 9
3868 }
3869
3870 if hasIntrinsic {
3871 var op ssa.Op
3872 switch {
3873 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
3874 op = ssa.OpMin64F
3875 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
3876 op = ssa.OpMax64F
3877 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
3878 op = ssa.OpMin32F
3879 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
3880 op = ssa.OpMax32F
3881 }
3882 return fold(func(x, a *ssa.Value) *ssa.Value {
3883 return s.newValue2(op, typ, x, a)
3884 })
3885 }
3886 }
3887 var name string
3888 switch typ.Kind() {
3889 case types.TFLOAT32:
3890 switch n.Op() {
3891 case ir.OMIN:
3892 name = "fmin32"
3893 case ir.OMAX:
3894 name = "fmax32"
3895 }
3896 case types.TFLOAT64:
3897 switch n.Op() {
3898 case ir.OMIN:
3899 name = "fmin64"
3900 case ir.OMAX:
3901 name = "fmax64"
3902 }
3903 case types.TSTRING:
3904 switch n.Op() {
3905 case ir.OMIN:
3906 name = "strmin"
3907 case ir.OMAX:
3908 name = "strmax"
3909 }
3910 }
3911 fn := typecheck.LookupRuntimeFunc(name)
3912
3913 return fold(func(x, a *ssa.Value) *ssa.Value {
3914 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
3915 })
3916 }
3917
3918 if typ.IsInteger() {
3919 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
3920 var op ssa.Op
3921 switch {
3922 case typ.IsSigned() && n.Op() == ir.OMIN:
3923 op = ssa.OpMin64
3924 case typ.IsSigned() && n.Op() == ir.OMAX:
3925 op = ssa.OpMax64
3926 case typ.IsUnsigned() && n.Op() == ir.OMIN:
3927 op = ssa.OpMin64u
3928 case typ.IsUnsigned() && n.Op() == ir.OMAX:
3929 op = ssa.OpMax64u
3930 }
3931 return fold(func(x, a *ssa.Value) *ssa.Value {
3932 return s.newValue2(op, typ, x, a)
3933 })
3934 }
3935 }
3936
3937 lt := s.ssaOp(ir.OLT, typ)
3938
3939 return fold(func(x, a *ssa.Value) *ssa.Value {
3940 switch n.Op() {
3941 case ir.OMIN:
3942
3943 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
3944 case ir.OMAX:
3945
3946 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
3947 }
3948 panic("unreachable")
3949 })
3950 }
3951
3952
3953 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
3954
3955
3956 ternaryVar := ssaMarker("ternary")
3957
3958 bThen := s.f.NewBlock(ssa.BlockPlain)
3959 bElse := s.f.NewBlock(ssa.BlockPlain)
3960 bEnd := s.f.NewBlock(ssa.BlockPlain)
3961
3962 b := s.endBlock()
3963 b.Kind = ssa.BlockIf
3964 b.SetControl(cond)
3965 b.AddEdgeTo(bThen)
3966 b.AddEdgeTo(bElse)
3967
3968 s.startBlock(bThen)
3969 s.vars[ternaryVar] = x
3970 s.endBlock().AddEdgeTo(bEnd)
3971
3972 s.startBlock(bElse)
3973 s.vars[ternaryVar] = y
3974 s.endBlock().AddEdgeTo(bEnd)
3975
3976 s.startBlock(bEnd)
3977 r := s.variable(ternaryVar, x.Type)
3978 delete(s.vars, ternaryVar)
3979 return r
3980 }
3981
3982
3983
3984
3985
3986 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
3987 switch cond.Op() {
3988 case ir.OANDAND:
3989 cond := cond.(*ir.LogicalExpr)
3990 mid := s.f.NewBlock(ssa.BlockPlain)
3991 s.stmtList(cond.Init())
3992 s.condBranch(cond.X, mid, no, max(likely, 0))
3993 s.startBlock(mid)
3994 s.condBranch(cond.Y, yes, no, likely)
3995 return
3996
3997
3998
3999
4000
4001
4002 case ir.OOROR:
4003 cond := cond.(*ir.LogicalExpr)
4004 mid := s.f.NewBlock(ssa.BlockPlain)
4005 s.stmtList(cond.Init())
4006 s.condBranch(cond.X, yes, mid, min(likely, 0))
4007 s.startBlock(mid)
4008 s.condBranch(cond.Y, yes, no, likely)
4009 return
4010
4011
4012
4013 case ir.ONOT:
4014 cond := cond.(*ir.UnaryExpr)
4015 s.stmtList(cond.Init())
4016 s.condBranch(cond.X, no, yes, -likely)
4017 return
4018 case ir.OCONVNOP:
4019 cond := cond.(*ir.ConvExpr)
4020 s.stmtList(cond.Init())
4021 s.condBranch(cond.X, yes, no, likely)
4022 return
4023 }
4024 c := s.expr(cond)
4025 b := s.endBlock()
4026 b.Kind = ssa.BlockIf
4027 b.SetControl(c)
4028 b.Likely = ssa.BranchPrediction(likely)
4029 b.AddEdgeTo(yes)
4030 b.AddEdgeTo(no)
4031 }
4032
4033 type skipMask uint8
4034
4035 const (
4036 skipPtr skipMask = 1 << iota
4037 skipLen
4038 skipCap
4039 )
4040
4041
4042
4043
4044
4045
4046
4047 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
4048 s.assignWhichMayOverlap(left, right, deref, skip, false)
4049 }
4050 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
4051 if left.Op() == ir.ONAME && ir.IsBlank(left) {
4052 return
4053 }
4054 t := left.Type()
4055 types.CalcSize(t)
4056 if s.canSSA(left) {
4057 if deref {
4058 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
4059 }
4060 if left.Op() == ir.ODOT {
4061
4062
4063
4064
4065
4066
4067
4068
4069
4070
4071 left := left.(*ir.SelectorExpr)
4072 t := left.X.Type()
4073 nf := t.NumFields()
4074 idx := fieldIdx(left)
4075
4076
4077 old := s.expr(left.X)
4078
4079
4080 new := s.newValue0(ssa.OpStructMake, t)
4081
4082
4083 for i := 0; i < nf; i++ {
4084 if i == idx {
4085 new.AddArg(right)
4086 } else {
4087 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
4088 }
4089 }
4090
4091
4092 s.assign(left.X, new, false, 0)
4093
4094 return
4095 }
4096 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
4097 left := left.(*ir.IndexExpr)
4098 s.pushLine(left.Pos())
4099 defer s.popLine()
4100
4101
4102 t := left.X.Type()
4103 n := t.NumElem()
4104
4105 i := s.expr(left.Index)
4106 if n == 0 {
4107
4108
4109 z := s.constInt(types.Types[types.TINT], 0)
4110 s.boundsCheck(z, z, ssa.BoundsIndex, false)
4111 return
4112 }
4113 if n != 1 {
4114 s.Fatalf("assigning to non-1-length array")
4115 }
4116
4117 len := s.constInt(types.Types[types.TINT], 1)
4118 s.boundsCheck(i, len, ssa.BoundsIndex, false)
4119 v := s.newValue1(ssa.OpArrayMake1, t, right)
4120 s.assign(left.X, v, false, 0)
4121 return
4122 }
4123 left := left.(*ir.Name)
4124
4125 s.vars[left] = right
4126 s.addNamedValue(left, right)
4127 return
4128 }
4129
4130
4131
4132 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4133 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4134 }
4135
4136
4137 addr := s.addr(left)
4138 if ir.IsReflectHeaderDataField(left) {
4139
4140
4141
4142
4143
4144 t = types.Types[types.TUNSAFEPTR]
4145 }
4146 if deref {
4147
4148 if right == nil {
4149 s.zero(t, addr)
4150 } else {
4151 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4152 }
4153 return
4154 }
4155
4156 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4157 }
4158
4159
4160 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4161 switch {
4162 case t.IsInteger():
4163 switch t.Size() {
4164 case 1:
4165 return s.constInt8(t, 0)
4166 case 2:
4167 return s.constInt16(t, 0)
4168 case 4:
4169 return s.constInt32(t, 0)
4170 case 8:
4171 return s.constInt64(t, 0)
4172 default:
4173 s.Fatalf("bad sized integer type %v", t)
4174 }
4175 case t.IsFloat():
4176 switch t.Size() {
4177 case 4:
4178 return s.constFloat32(t, 0)
4179 case 8:
4180 return s.constFloat64(t, 0)
4181 default:
4182 s.Fatalf("bad sized float type %v", t)
4183 }
4184 case t.IsComplex():
4185 switch t.Size() {
4186 case 8:
4187 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4188 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4189 case 16:
4190 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4191 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4192 default:
4193 s.Fatalf("bad sized complex type %v", t)
4194 }
4195
4196 case t.IsString():
4197 return s.constEmptyString(t)
4198 case t.IsPtrShaped():
4199 return s.constNil(t)
4200 case t.IsBoolean():
4201 return s.constBool(false)
4202 case t.IsInterface():
4203 return s.constInterface(t)
4204 case t.IsSlice():
4205 return s.constSlice(t)
4206 case t.IsStruct():
4207 n := t.NumFields()
4208 v := s.entryNewValue0(ssa.OpStructMake, t)
4209 for i := 0; i < n; i++ {
4210 v.AddArg(s.zeroVal(t.FieldType(i)))
4211 }
4212 return v
4213 case t.IsArray():
4214 switch t.NumElem() {
4215 case 0:
4216 return s.entryNewValue0(ssa.OpArrayMake0, t)
4217 case 1:
4218 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4219 }
4220 }
4221 s.Fatalf("zero for type %v not implemented", t)
4222 return nil
4223 }
4224
4225 type callKind int8
4226
4227 const (
4228 callNormal callKind = iota
4229 callDefer
4230 callDeferStack
4231 callGo
4232 callTail
4233 )
4234
4235 type sfRtCallDef struct {
4236 rtfn *obj.LSym
4237 rtype types.Kind
4238 }
4239
4240 var softFloatOps map[ssa.Op]sfRtCallDef
4241
4242 func softfloatInit() {
4243
4244 softFloatOps = map[ssa.Op]sfRtCallDef{
4245 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4246 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4247 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4248 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4249 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4250 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4251 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4252 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4253
4254 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4255 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4256 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4257 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4258 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4259 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4260 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4261 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4262
4263 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4264 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4265 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4266 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4267 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4268 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4269 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4270 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4271 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4272 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4273 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4274 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4275 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4276 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4277 }
4278 }
4279
4280
4281
4282 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4283 f2i := func(t *types.Type) *types.Type {
4284 switch t.Kind() {
4285 case types.TFLOAT32:
4286 return types.Types[types.TUINT32]
4287 case types.TFLOAT64:
4288 return types.Types[types.TUINT64]
4289 }
4290 return t
4291 }
4292
4293 if callDef, ok := softFloatOps[op]; ok {
4294 switch op {
4295 case ssa.OpLess32F,
4296 ssa.OpLess64F,
4297 ssa.OpLeq32F,
4298 ssa.OpLeq64F:
4299 args[0], args[1] = args[1], args[0]
4300 case ssa.OpSub32F,
4301 ssa.OpSub64F:
4302 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4303 }
4304
4305
4306
4307 for i, a := range args {
4308 if a.Type.IsFloat() {
4309 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4310 }
4311 }
4312
4313 rt := types.Types[callDef.rtype]
4314 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4315 if rt.IsFloat() {
4316 result = s.newValue1(ssa.OpCopy, rt, result)
4317 }
4318 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4319 result = s.newValue1(ssa.OpNot, result.Type, result)
4320 }
4321 return result, true
4322 }
4323 return nil, false
4324 }
4325
4326
4327 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4328 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4329 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4330 return p0, p1
4331 }
4332
4333
4334 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4335 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4336 if ssa.IntrinsicsDebug > 0 {
4337 x := v
4338 if x == nil {
4339 x = s.mem()
4340 }
4341 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4342 x = x.Args[0]
4343 }
4344 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4345 }
4346 return v
4347 }
4348
4349
4350 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4351 args := make([]*ssa.Value, len(n.Args))
4352 for i, n := range n.Args {
4353 args[i] = s.expr(n)
4354 }
4355 return args
4356 }
4357
4358
4359
4360
4361
4362
4363
4364 func (s *state) openDeferRecord(n *ir.CallExpr) {
4365 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4366 s.Fatalf("defer call with arguments or results: %v", n)
4367 }
4368
4369 opendefer := &openDeferInfo{
4370 n: n,
4371 }
4372 fn := n.Fun
4373
4374
4375
4376 closureVal := s.expr(fn)
4377 closure := s.openDeferSave(fn.Type(), closureVal)
4378 opendefer.closureNode = closure.Aux.(*ir.Name)
4379 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4380 opendefer.closure = closure
4381 }
4382 index := len(s.openDefers)
4383 s.openDefers = append(s.openDefers, opendefer)
4384
4385
4386
4387 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4388 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4389 s.vars[deferBitsVar] = newDeferBits
4390 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4391 }
4392
4393
4394
4395
4396
4397
4398 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4399 if !ssa.CanSSA(t) {
4400 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4401 }
4402 if !t.HasPointers() {
4403 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4404 }
4405 pos := val.Pos
4406 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4407 temp.SetOpenDeferSlot(true)
4408 temp.SetFrameOffset(int64(len(s.openDefers)))
4409 var addrTemp *ssa.Value
4410
4411
4412 if s.curBlock.ID != s.f.Entry.ID {
4413
4414
4415
4416 if t.HasPointers() {
4417 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4418 }
4419 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4420 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4421 } else {
4422
4423
4424
4425 if t.HasPointers() {
4426 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4427 }
4428 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4429 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4430 }
4431
4432
4433
4434
4435
4436 temp.SetNeedzero(true)
4437
4438
4439 s.store(t, addrTemp, val)
4440 return addrTemp
4441 }
4442
4443
4444
4445
4446
4447 func (s *state) openDeferExit() {
4448 deferExit := s.f.NewBlock(ssa.BlockPlain)
4449 s.endBlock().AddEdgeTo(deferExit)
4450 s.startBlock(deferExit)
4451 s.lastDeferExit = deferExit
4452 s.lastDeferCount = len(s.openDefers)
4453 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4454
4455 for i := len(s.openDefers) - 1; i >= 0; i-- {
4456 r := s.openDefers[i]
4457 bCond := s.f.NewBlock(ssa.BlockPlain)
4458 bEnd := s.f.NewBlock(ssa.BlockPlain)
4459
4460 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4461
4462
4463 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4464 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4465 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4466 b := s.endBlock()
4467 b.Kind = ssa.BlockIf
4468 b.SetControl(eqVal)
4469 b.AddEdgeTo(bEnd)
4470 b.AddEdgeTo(bCond)
4471 bCond.AddEdgeTo(bEnd)
4472 s.startBlock(bCond)
4473
4474
4475
4476 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4477 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4478 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4479
4480
4481 s.vars[deferBitsVar] = maskedval
4482
4483
4484
4485
4486 fn := r.n.Fun
4487 stksize := fn.Type().ArgWidth()
4488 var callArgs []*ssa.Value
4489 var call *ssa.Value
4490 if r.closure != nil {
4491 v := s.load(r.closure.Type.Elem(), r.closure)
4492 s.maybeNilCheckClosure(v, callDefer)
4493 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4494 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4495 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4496 } else {
4497 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4498 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4499 }
4500 callArgs = append(callArgs, s.mem())
4501 call.AddArgs(callArgs...)
4502 call.AuxInt = stksize
4503 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4504
4505
4506
4507
4508 if r.closureNode != nil {
4509 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4510 }
4511
4512 s.endBlock()
4513 s.startBlock(bEnd)
4514 }
4515 }
4516
4517 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4518 return s.call(n, k, false, nil)
4519 }
4520
4521 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4522 return s.call(n, k, true, nil)
4523 }
4524
4525
4526
4527 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4528 s.prevCall = nil
4529 var calleeLSym *obj.LSym
4530 var closure *ssa.Value
4531 var codeptr *ssa.Value
4532 var dextra *ssa.Value
4533 var rcvr *ssa.Value
4534 fn := n.Fun
4535 var ACArgs []*types.Type
4536 var ACResults []*types.Type
4537 var callArgs []*ssa.Value
4538
4539 callABI := s.f.ABIDefault
4540
4541 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4542 s.Fatalf("go/defer call with arguments: %v", n)
4543 }
4544
4545 isCallDeferRangeFunc := false
4546
4547 switch n.Op() {
4548 case ir.OCALLFUNC:
4549 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4550 fn := fn.(*ir.Name)
4551 calleeLSym = callTargetLSym(fn)
4552 if buildcfg.Experiment.RegabiArgs {
4553
4554
4555
4556
4557
4558 if fn.Func != nil {
4559 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4560 }
4561 } else {
4562
4563 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4564 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4565 if inRegistersImported || inRegistersSamePackage {
4566 callABI = s.f.ABI1
4567 }
4568 }
4569 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
4570 isCallDeferRangeFunc = true
4571 }
4572 break
4573 }
4574 closure = s.expr(fn)
4575 if k != callDefer && k != callDeferStack {
4576
4577
4578 s.maybeNilCheckClosure(closure, k)
4579 }
4580 case ir.OCALLINTER:
4581 if fn.Op() != ir.ODOTINTER {
4582 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
4583 }
4584 fn := fn.(*ir.SelectorExpr)
4585 var iclosure *ssa.Value
4586 iclosure, rcvr = s.getClosureAndRcvr(fn)
4587 if k == callNormal {
4588 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
4589 } else {
4590 closure = iclosure
4591 }
4592 }
4593 if deferExtra != nil {
4594 dextra = s.expr(deferExtra)
4595 }
4596
4597 params := callABI.ABIAnalyze(n.Fun.Type(), false )
4598 types.CalcSize(fn.Type())
4599 stksize := params.ArgWidth()
4600
4601 res := n.Fun.Type().Results()
4602 if k == callNormal || k == callTail {
4603 for _, p := range params.OutParams() {
4604 ACResults = append(ACResults, p.Type)
4605 }
4606 }
4607
4608 var call *ssa.Value
4609 if k == callDeferStack {
4610 if stksize != 0 {
4611 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
4612 }
4613
4614 t := deferstruct()
4615 n, addr := s.temp(n.Pos(), t)
4616 n.SetNonMergeable(true)
4617 s.store(closure.Type,
4618 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
4619 closure)
4620
4621
4622 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4623 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4624 callArgs = append(callArgs, addr, s.mem())
4625 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4626 call.AddArgs(callArgs...)
4627 call.AuxInt = int64(types.PtrSize)
4628 } else {
4629
4630
4631 argStart := base.Ctxt.Arch.FixedFrameSize
4632
4633 if k != callNormal && k != callTail {
4634
4635 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4636 callArgs = append(callArgs, closure)
4637 stksize += int64(types.PtrSize)
4638 argStart += int64(types.PtrSize)
4639 if dextra != nil {
4640
4641 ACArgs = append(ACArgs, types.Types[types.TINTER])
4642 callArgs = append(callArgs, dextra)
4643 stksize += 2 * int64(types.PtrSize)
4644 argStart += 2 * int64(types.PtrSize)
4645 }
4646 }
4647
4648
4649 if rcvr != nil {
4650 callArgs = append(callArgs, rcvr)
4651 }
4652
4653
4654 t := n.Fun.Type()
4655 args := n.Args
4656
4657 for _, p := range params.InParams() {
4658 ACArgs = append(ACArgs, p.Type)
4659 }
4660
4661
4662
4663
4664 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
4665 b := s.endBlock()
4666 b.Kind = ssa.BlockPlain
4667 curb := s.f.NewBlock(ssa.BlockPlain)
4668 b.AddEdgeTo(curb)
4669 s.startBlock(curb)
4670 }
4671
4672 for i, n := range args {
4673 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
4674 }
4675
4676 callArgs = append(callArgs, s.mem())
4677
4678
4679 switch {
4680 case k == callDefer:
4681 sym := ir.Syms.Deferproc
4682 if dextra != nil {
4683 sym = ir.Syms.Deferprocat
4684 }
4685 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4686 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4687 case k == callGo:
4688 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4689 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4690 case closure != nil:
4691
4692
4693
4694
4695
4696 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
4697 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
4698 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
4699 case codeptr != nil:
4700
4701 aux := ssa.InterfaceAuxCall(params)
4702 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
4703 case calleeLSym != nil:
4704 aux := ssa.StaticAuxCall(calleeLSym, params)
4705 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4706 if k == callTail {
4707 call.Op = ssa.OpTailLECall
4708 stksize = 0
4709 }
4710 default:
4711 s.Fatalf("bad call type %v %v", n.Op(), n)
4712 }
4713 call.AddArgs(callArgs...)
4714 call.AuxInt = stksize
4715 }
4716 s.prevCall = call
4717 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
4718
4719 for _, v := range n.KeepAlive {
4720 if !v.Addrtaken() {
4721 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
4722 }
4723 switch v.Class {
4724 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
4725 default:
4726 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
4727 }
4728 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
4729 }
4730
4731
4732 if k == callDefer || k == callDeferStack || isCallDeferRangeFunc {
4733 b := s.endBlock()
4734 b.Kind = ssa.BlockDefer
4735 b.SetControl(call)
4736 bNext := s.f.NewBlock(ssa.BlockPlain)
4737 b.AddEdgeTo(bNext)
4738 r := s.f.DeferReturn
4739 if r == nil {
4740 r = s.f.NewBlock(ssa.BlockPlain)
4741 s.startBlock(r)
4742 s.exit()
4743 s.f.DeferReturn = r
4744 }
4745 b.AddEdgeTo(r)
4746 b.Likely = ssa.BranchLikely
4747 s.startBlock(bNext)
4748 }
4749
4750 if len(res) == 0 || k != callNormal {
4751
4752 return nil
4753 }
4754 fp := res[0]
4755 if returnResultAddr {
4756 return s.resultAddrOfCall(call, 0, fp.Type)
4757 }
4758 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
4759 }
4760
4761
4762
4763 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
4764 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
4765
4766
4767 s.nilCheck(closure)
4768 }
4769 }
4770
4771
4772
4773 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
4774 i := s.expr(fn.X)
4775 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
4776 s.nilCheck(itab)
4777 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
4778 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
4779 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
4780 return closure, rcvr
4781 }
4782
4783
4784
4785 func etypesign(e types.Kind) int8 {
4786 switch e {
4787 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
4788 return -1
4789 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
4790 return +1
4791 }
4792 return 0
4793 }
4794
4795
4796
4797 func (s *state) addr(n ir.Node) *ssa.Value {
4798 if n.Op() != ir.ONAME {
4799 s.pushLine(n.Pos())
4800 defer s.popLine()
4801 }
4802
4803 if s.canSSA(n) {
4804 s.Fatalf("addr of canSSA expression: %+v", n)
4805 }
4806
4807 t := types.NewPtr(n.Type())
4808 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
4809 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
4810
4811 if offset != 0 {
4812 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
4813 }
4814 return v
4815 }
4816 switch n.Op() {
4817 case ir.OLINKSYMOFFSET:
4818 no := n.(*ir.LinksymOffsetExpr)
4819 return linksymOffset(no.Linksym, no.Offset_)
4820 case ir.ONAME:
4821 n := n.(*ir.Name)
4822 if n.Heapaddr != nil {
4823 return s.expr(n.Heapaddr)
4824 }
4825 switch n.Class {
4826 case ir.PEXTERN:
4827
4828 return linksymOffset(n.Linksym(), 0)
4829 case ir.PPARAM:
4830
4831 v := s.decladdrs[n]
4832 if v != nil {
4833 return v
4834 }
4835 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
4836 return nil
4837 case ir.PAUTO:
4838 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
4839
4840 case ir.PPARAMOUT:
4841
4842
4843 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
4844 default:
4845 s.Fatalf("variable address class %v not implemented", n.Class)
4846 return nil
4847 }
4848 case ir.ORESULT:
4849
4850 n := n.(*ir.ResultExpr)
4851 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
4852 case ir.OINDEX:
4853 n := n.(*ir.IndexExpr)
4854 if n.X.Type().IsSlice() {
4855 a := s.expr(n.X)
4856 i := s.expr(n.Index)
4857 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
4858 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4859 p := s.newValue1(ssa.OpSlicePtr, t, a)
4860 return s.newValue2(ssa.OpPtrIndex, t, p, i)
4861 } else {
4862 a := s.addr(n.X)
4863 i := s.expr(n.Index)
4864 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
4865 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4866 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
4867 }
4868 case ir.ODEREF:
4869 n := n.(*ir.StarExpr)
4870 return s.exprPtr(n.X, n.Bounded(), n.Pos())
4871 case ir.ODOT:
4872 n := n.(*ir.SelectorExpr)
4873 p := s.addr(n.X)
4874 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
4875 case ir.ODOTPTR:
4876 n := n.(*ir.SelectorExpr)
4877 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
4878 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
4879 case ir.OCONVNOP:
4880 n := n.(*ir.ConvExpr)
4881 if n.Type() == n.X.Type() {
4882 return s.addr(n.X)
4883 }
4884 addr := s.addr(n.X)
4885 return s.newValue1(ssa.OpCopy, t, addr)
4886 case ir.OCALLFUNC, ir.OCALLINTER:
4887 n := n.(*ir.CallExpr)
4888 return s.callAddr(n, callNormal)
4889 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
4890 var v *ssa.Value
4891 if n.Op() == ir.ODOTTYPE {
4892 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
4893 } else {
4894 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
4895 }
4896 if v.Op != ssa.OpLoad {
4897 s.Fatalf("dottype of non-load")
4898 }
4899 if v.Args[1] != s.mem() {
4900 s.Fatalf("memory no longer live from dottype load")
4901 }
4902 return v.Args[0]
4903 default:
4904 s.Fatalf("unhandled addr %v", n.Op())
4905 return nil
4906 }
4907 }
4908
4909
4910
4911 func (s *state) canSSA(n ir.Node) bool {
4912 if base.Flag.N != 0 {
4913 return false
4914 }
4915 for {
4916 nn := n
4917 if nn.Op() == ir.ODOT {
4918 nn := nn.(*ir.SelectorExpr)
4919 n = nn.X
4920 continue
4921 }
4922 if nn.Op() == ir.OINDEX {
4923 nn := nn.(*ir.IndexExpr)
4924 if nn.X.Type().IsArray() {
4925 n = nn.X
4926 continue
4927 }
4928 }
4929 break
4930 }
4931 if n.Op() != ir.ONAME {
4932 return false
4933 }
4934 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
4935 }
4936
4937 func (s *state) canSSAName(name *ir.Name) bool {
4938 if name.Addrtaken() || !name.OnStack() {
4939 return false
4940 }
4941 switch name.Class {
4942 case ir.PPARAMOUT:
4943 if s.hasdefer {
4944
4945
4946
4947
4948
4949 return false
4950 }
4951 if s.cgoUnsafeArgs {
4952
4953
4954 return false
4955 }
4956 }
4957 return true
4958
4959 }
4960
4961
4962 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
4963 p := s.expr(n)
4964 if bounded || n.NonNil() {
4965 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
4966 s.f.Warnl(lineno, "removed nil check")
4967 }
4968 return p
4969 }
4970 p = s.nilCheck(p)
4971 return p
4972 }
4973
4974
4975
4976
4977
4978
4979 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
4980 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
4981 return ptr
4982 }
4983 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
4984 }
4985
4986
4987
4988
4989
4990
4991
4992 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
4993 idx = s.extendIndex(idx, len, kind, bounded)
4994
4995 if bounded || base.Flag.B != 0 {
4996
4997
4998
4999
5000
5001
5002
5003
5004
5005
5006
5007
5008
5009
5010
5011
5012
5013
5014
5015
5016 return idx
5017 }
5018
5019 bNext := s.f.NewBlock(ssa.BlockPlain)
5020 bPanic := s.f.NewBlock(ssa.BlockExit)
5021
5022 if !idx.Type.IsSigned() {
5023 switch kind {
5024 case ssa.BoundsIndex:
5025 kind = ssa.BoundsIndexU
5026 case ssa.BoundsSliceAlen:
5027 kind = ssa.BoundsSliceAlenU
5028 case ssa.BoundsSliceAcap:
5029 kind = ssa.BoundsSliceAcapU
5030 case ssa.BoundsSliceB:
5031 kind = ssa.BoundsSliceBU
5032 case ssa.BoundsSlice3Alen:
5033 kind = ssa.BoundsSlice3AlenU
5034 case ssa.BoundsSlice3Acap:
5035 kind = ssa.BoundsSlice3AcapU
5036 case ssa.BoundsSlice3B:
5037 kind = ssa.BoundsSlice3BU
5038 case ssa.BoundsSlice3C:
5039 kind = ssa.BoundsSlice3CU
5040 }
5041 }
5042
5043 var cmp *ssa.Value
5044 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5045 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5046 } else {
5047 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5048 }
5049 b := s.endBlock()
5050 b.Kind = ssa.BlockIf
5051 b.SetControl(cmp)
5052 b.Likely = ssa.BranchLikely
5053 b.AddEdgeTo(bNext)
5054 b.AddEdgeTo(bPanic)
5055
5056 s.startBlock(bPanic)
5057 if Arch.LinkArch.Family == sys.Wasm {
5058
5059
5060 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5061 } else {
5062 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5063 s.endBlock().SetControl(mem)
5064 }
5065 s.startBlock(bNext)
5066
5067
5068 if base.Flag.Cfg.SpectreIndex {
5069 op := ssa.OpSpectreIndex
5070 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5071 op = ssa.OpSpectreSliceIndex
5072 }
5073 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5074 }
5075
5076 return idx
5077 }
5078
5079
5080 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5081 b := s.endBlock()
5082 b.Kind = ssa.BlockIf
5083 b.SetControl(cmp)
5084 b.Likely = ssa.BranchLikely
5085 bNext := s.f.NewBlock(ssa.BlockPlain)
5086 line := s.peekPos()
5087 pos := base.Ctxt.PosTable.Pos(line)
5088 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5089 bPanic := s.panics[fl]
5090 if bPanic == nil {
5091 bPanic = s.f.NewBlock(ssa.BlockPlain)
5092 s.panics[fl] = bPanic
5093 s.startBlock(bPanic)
5094
5095
5096 s.rtcall(fn, false, nil)
5097 }
5098 b.AddEdgeTo(bNext)
5099 b.AddEdgeTo(bPanic)
5100 s.startBlock(bNext)
5101 }
5102
5103 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5104 needcheck := true
5105 switch b.Op {
5106 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5107 if b.AuxInt != 0 {
5108 needcheck = false
5109 }
5110 }
5111 if needcheck {
5112
5113 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5114 s.check(cmp, ir.Syms.Panicdivide)
5115 }
5116 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5117 }
5118
5119
5120
5121
5122
5123 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5124 s.prevCall = nil
5125
5126 off := base.Ctxt.Arch.FixedFrameSize
5127 var callArgs []*ssa.Value
5128 var callArgTypes []*types.Type
5129
5130 for _, arg := range args {
5131 t := arg.Type
5132 off = types.RoundUp(off, t.Alignment())
5133 size := t.Size()
5134 callArgs = append(callArgs, arg)
5135 callArgTypes = append(callArgTypes, t)
5136 off += size
5137 }
5138 off = types.RoundUp(off, int64(types.RegSize))
5139
5140
5141 var call *ssa.Value
5142 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5143 callArgs = append(callArgs, s.mem())
5144 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5145 call.AddArgs(callArgs...)
5146 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5147
5148 if !returns {
5149
5150 b := s.endBlock()
5151 b.Kind = ssa.BlockExit
5152 b.SetControl(call)
5153 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5154 if len(results) > 0 {
5155 s.Fatalf("panic call can't have results")
5156 }
5157 return nil
5158 }
5159
5160
5161 res := make([]*ssa.Value, len(results))
5162 for i, t := range results {
5163 off = types.RoundUp(off, t.Alignment())
5164 res[i] = s.resultOfCall(call, int64(i), t)
5165 off += t.Size()
5166 }
5167 off = types.RoundUp(off, int64(types.PtrSize))
5168
5169
5170 call.AuxInt = off
5171
5172 return res
5173 }
5174
5175
5176 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5177 s.instrument(t, left, instrumentWrite)
5178
5179 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5180
5181 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5182 return
5183 }
5184
5185
5186
5187
5188
5189
5190 s.storeTypeScalars(t, left, right, skip)
5191 if skip&skipPtr == 0 && t.HasPointers() {
5192 s.storeTypePtrs(t, left, right)
5193 }
5194 }
5195
5196
5197 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5198 switch {
5199 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
5200 s.store(t, left, right)
5201 case t.IsPtrShaped():
5202 if t.IsPtr() && t.Elem().NotInHeap() {
5203 s.store(t, left, right)
5204 }
5205
5206 case t.IsString():
5207 if skip&skipLen != 0 {
5208 return
5209 }
5210 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5211 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5212 s.store(types.Types[types.TINT], lenAddr, len)
5213 case t.IsSlice():
5214 if skip&skipLen == 0 {
5215 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5216 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5217 s.store(types.Types[types.TINT], lenAddr, len)
5218 }
5219 if skip&skipCap == 0 {
5220 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5221 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5222 s.store(types.Types[types.TINT], capAddr, cap)
5223 }
5224 case t.IsInterface():
5225
5226 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5227 s.store(types.Types[types.TUINTPTR], left, itab)
5228 case t.IsStruct():
5229 n := t.NumFields()
5230 for i := 0; i < n; i++ {
5231 ft := t.FieldType(i)
5232 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5233 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5234 s.storeTypeScalars(ft, addr, val, 0)
5235 }
5236 case t.IsArray() && t.NumElem() == 0:
5237
5238 case t.IsArray() && t.NumElem() == 1:
5239 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5240 default:
5241 s.Fatalf("bad write barrier type %v", t)
5242 }
5243 }
5244
5245
5246 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5247 switch {
5248 case t.IsPtrShaped():
5249 if t.IsPtr() && t.Elem().NotInHeap() {
5250 break
5251 }
5252 s.store(t, left, right)
5253 case t.IsString():
5254 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5255 s.store(s.f.Config.Types.BytePtr, left, ptr)
5256 case t.IsSlice():
5257 elType := types.NewPtr(t.Elem())
5258 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5259 s.store(elType, left, ptr)
5260 case t.IsInterface():
5261
5262 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5263 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5264 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5265 case t.IsStruct():
5266 n := t.NumFields()
5267 for i := 0; i < n; i++ {
5268 ft := t.FieldType(i)
5269 if !ft.HasPointers() {
5270 continue
5271 }
5272 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5273 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5274 s.storeTypePtrs(ft, addr, val)
5275 }
5276 case t.IsArray() && t.NumElem() == 0:
5277
5278 case t.IsArray() && t.NumElem() == 1:
5279 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5280 default:
5281 s.Fatalf("bad write barrier type %v", t)
5282 }
5283 }
5284
5285
5286 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5287 var a *ssa.Value
5288 if !ssa.CanSSA(t) {
5289 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5290 } else {
5291 a = s.expr(n)
5292 }
5293 return a
5294 }
5295
5296 func (s *state) storeArgWithBase(n ir.Node, t *types.Type, base *ssa.Value, off int64) {
5297 pt := types.NewPtr(t)
5298 var addr *ssa.Value
5299 if base == s.sp {
5300
5301 addr = s.constOffPtrSP(pt, off)
5302 } else {
5303 addr = s.newValue1I(ssa.OpOffPtr, pt, off, base)
5304 }
5305
5306 if !ssa.CanSSA(t) {
5307 a := s.addr(n)
5308 s.move(t, addr, a)
5309 return
5310 }
5311
5312 a := s.expr(n)
5313 s.storeType(t, addr, a, 0, false)
5314 }
5315
5316
5317
5318
5319 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5320 t := v.Type
5321 var ptr, len, cap *ssa.Value
5322 switch {
5323 case t.IsSlice():
5324 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5325 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5326 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5327 case t.IsString():
5328 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5329 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5330 cap = len
5331 case t.IsPtr():
5332 if !t.Elem().IsArray() {
5333 s.Fatalf("bad ptr to array in slice %v\n", t)
5334 }
5335 nv := s.nilCheck(v)
5336 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5337 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5338 cap = len
5339 default:
5340 s.Fatalf("bad type in slice %v\n", t)
5341 }
5342
5343
5344 if i == nil {
5345 i = s.constInt(types.Types[types.TINT], 0)
5346 }
5347 if j == nil {
5348 j = len
5349 }
5350 three := true
5351 if k == nil {
5352 three = false
5353 k = cap
5354 }
5355
5356
5357
5358
5359 if three {
5360 if k != cap {
5361 kind := ssa.BoundsSlice3Alen
5362 if t.IsSlice() {
5363 kind = ssa.BoundsSlice3Acap
5364 }
5365 k = s.boundsCheck(k, cap, kind, bounded)
5366 }
5367 if j != k {
5368 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5369 }
5370 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5371 } else {
5372 if j != k {
5373 kind := ssa.BoundsSliceAlen
5374 if t.IsSlice() {
5375 kind = ssa.BoundsSliceAcap
5376 }
5377 j = s.boundsCheck(j, k, kind, bounded)
5378 }
5379 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5380 }
5381
5382
5383 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5384 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5385 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5386
5387
5388
5389
5390
5391 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5392 rcap := rlen
5393 if j != k && !t.IsString() {
5394 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5395 }
5396
5397 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5398
5399 return ptr, rlen, rcap
5400 }
5401
5402
5403
5404
5405
5406
5407
5408
5409
5410
5411
5412
5413
5414
5415
5416 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5417
5418
5419 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5420
5421
5422
5423 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5424 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5425
5426
5427 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5428
5429 return rptr, rlen, rcap
5430 }
5431
5432 type u642fcvtTab struct {
5433 leq, cvt2F, and, rsh, or, add ssa.Op
5434 one func(*state, *types.Type, int64) *ssa.Value
5435 }
5436
5437 var u64_f64 = u642fcvtTab{
5438 leq: ssa.OpLeq64,
5439 cvt2F: ssa.OpCvt64to64F,
5440 and: ssa.OpAnd64,
5441 rsh: ssa.OpRsh64Ux64,
5442 or: ssa.OpOr64,
5443 add: ssa.OpAdd64F,
5444 one: (*state).constInt64,
5445 }
5446
5447 var u64_f32 = u642fcvtTab{
5448 leq: ssa.OpLeq64,
5449 cvt2F: ssa.OpCvt64to32F,
5450 and: ssa.OpAnd64,
5451 rsh: ssa.OpRsh64Ux64,
5452 or: ssa.OpOr64,
5453 add: ssa.OpAdd32F,
5454 one: (*state).constInt64,
5455 }
5456
5457 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5458 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5459 }
5460
5461 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5462 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5463 }
5464
5465 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5466
5467
5468
5469
5470
5471
5472
5473
5474
5475
5476
5477
5478
5479
5480
5481
5482
5483
5484
5485
5486
5487
5488
5489
5490 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5491 b := s.endBlock()
5492 b.Kind = ssa.BlockIf
5493 b.SetControl(cmp)
5494 b.Likely = ssa.BranchLikely
5495
5496 bThen := s.f.NewBlock(ssa.BlockPlain)
5497 bElse := s.f.NewBlock(ssa.BlockPlain)
5498 bAfter := s.f.NewBlock(ssa.BlockPlain)
5499
5500 b.AddEdgeTo(bThen)
5501 s.startBlock(bThen)
5502 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5503 s.vars[n] = a0
5504 s.endBlock()
5505 bThen.AddEdgeTo(bAfter)
5506
5507 b.AddEdgeTo(bElse)
5508 s.startBlock(bElse)
5509 one := cvttab.one(s, ft, 1)
5510 y := s.newValue2(cvttab.and, ft, x, one)
5511 z := s.newValue2(cvttab.rsh, ft, x, one)
5512 z = s.newValue2(cvttab.or, ft, z, y)
5513 a := s.newValue1(cvttab.cvt2F, tt, z)
5514 a1 := s.newValue2(cvttab.add, tt, a, a)
5515 s.vars[n] = a1
5516 s.endBlock()
5517 bElse.AddEdgeTo(bAfter)
5518
5519 s.startBlock(bAfter)
5520 return s.variable(n, n.Type())
5521 }
5522
5523 type u322fcvtTab struct {
5524 cvtI2F, cvtF2F ssa.Op
5525 }
5526
5527 var u32_f64 = u322fcvtTab{
5528 cvtI2F: ssa.OpCvt32to64F,
5529 cvtF2F: ssa.OpCopy,
5530 }
5531
5532 var u32_f32 = u322fcvtTab{
5533 cvtI2F: ssa.OpCvt32to32F,
5534 cvtF2F: ssa.OpCvt64Fto32F,
5535 }
5536
5537 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5538 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5539 }
5540
5541 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5542 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5543 }
5544
5545 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5546
5547
5548
5549
5550
5551 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5552 b := s.endBlock()
5553 b.Kind = ssa.BlockIf
5554 b.SetControl(cmp)
5555 b.Likely = ssa.BranchLikely
5556
5557 bThen := s.f.NewBlock(ssa.BlockPlain)
5558 bElse := s.f.NewBlock(ssa.BlockPlain)
5559 bAfter := s.f.NewBlock(ssa.BlockPlain)
5560
5561 b.AddEdgeTo(bThen)
5562 s.startBlock(bThen)
5563 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5564 s.vars[n] = a0
5565 s.endBlock()
5566 bThen.AddEdgeTo(bAfter)
5567
5568 b.AddEdgeTo(bElse)
5569 s.startBlock(bElse)
5570 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5571 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5572 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
5573 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
5574
5575 s.vars[n] = a3
5576 s.endBlock()
5577 bElse.AddEdgeTo(bAfter)
5578
5579 s.startBlock(bAfter)
5580 return s.variable(n, n.Type())
5581 }
5582
5583
5584 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
5585 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
5586 s.Fatalf("node must be a map or a channel")
5587 }
5588 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
5589 s.Fatalf("cannot inline len(chan)")
5590 }
5591 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
5592 s.Fatalf("cannot inline cap(chan)")
5593 }
5594 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
5595 s.Fatalf("cannot inline cap(map)")
5596 }
5597
5598
5599
5600
5601
5602
5603
5604
5605 lenType := n.Type()
5606 nilValue := s.constNil(types.Types[types.TUINTPTR])
5607 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
5608 b := s.endBlock()
5609 b.Kind = ssa.BlockIf
5610 b.SetControl(cmp)
5611 b.Likely = ssa.BranchUnlikely
5612
5613 bThen := s.f.NewBlock(ssa.BlockPlain)
5614 bElse := s.f.NewBlock(ssa.BlockPlain)
5615 bAfter := s.f.NewBlock(ssa.BlockPlain)
5616
5617
5618 b.AddEdgeTo(bThen)
5619 s.startBlock(bThen)
5620 s.vars[n] = s.zeroVal(lenType)
5621 s.endBlock()
5622 bThen.AddEdgeTo(bAfter)
5623
5624 b.AddEdgeTo(bElse)
5625 s.startBlock(bElse)
5626 switch n.Op() {
5627 case ir.OLEN:
5628 if buildcfg.Experiment.SwissMap && n.X.Type().IsMap() {
5629
5630 loadType := reflectdata.SwissMapType().Field(0).Type
5631 load := s.load(loadType, x)
5632 s.vars[n] = s.conv(nil, load, loadType, lenType)
5633 } else {
5634
5635 s.vars[n] = s.load(lenType, x)
5636 }
5637 case ir.OCAP:
5638
5639 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
5640 s.vars[n] = s.load(lenType, sw)
5641 default:
5642 s.Fatalf("op must be OLEN or OCAP")
5643 }
5644 s.endBlock()
5645 bElse.AddEdgeTo(bAfter)
5646
5647 s.startBlock(bAfter)
5648 return s.variable(n, lenType)
5649 }
5650
5651 type f2uCvtTab struct {
5652 ltf, cvt2U, subf, or ssa.Op
5653 floatValue func(*state, *types.Type, float64) *ssa.Value
5654 intValue func(*state, *types.Type, int64) *ssa.Value
5655 cutoff uint64
5656 }
5657
5658 var f32_u64 = f2uCvtTab{
5659 ltf: ssa.OpLess32F,
5660 cvt2U: ssa.OpCvt32Fto64,
5661 subf: ssa.OpSub32F,
5662 or: ssa.OpOr64,
5663 floatValue: (*state).constFloat32,
5664 intValue: (*state).constInt64,
5665 cutoff: 1 << 63,
5666 }
5667
5668 var f64_u64 = f2uCvtTab{
5669 ltf: ssa.OpLess64F,
5670 cvt2U: ssa.OpCvt64Fto64,
5671 subf: ssa.OpSub64F,
5672 or: ssa.OpOr64,
5673 floatValue: (*state).constFloat64,
5674 intValue: (*state).constInt64,
5675 cutoff: 1 << 63,
5676 }
5677
5678 var f32_u32 = f2uCvtTab{
5679 ltf: ssa.OpLess32F,
5680 cvt2U: ssa.OpCvt32Fto32,
5681 subf: ssa.OpSub32F,
5682 or: ssa.OpOr32,
5683 floatValue: (*state).constFloat32,
5684 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5685 cutoff: 1 << 31,
5686 }
5687
5688 var f64_u32 = f2uCvtTab{
5689 ltf: ssa.OpLess64F,
5690 cvt2U: ssa.OpCvt64Fto32,
5691 subf: ssa.OpSub64F,
5692 or: ssa.OpOr32,
5693 floatValue: (*state).constFloat64,
5694 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5695 cutoff: 1 << 31,
5696 }
5697
5698 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5699 return s.floatToUint(&f32_u64, n, x, ft, tt)
5700 }
5701 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5702 return s.floatToUint(&f64_u64, n, x, ft, tt)
5703 }
5704
5705 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5706 return s.floatToUint(&f32_u32, n, x, ft, tt)
5707 }
5708
5709 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5710 return s.floatToUint(&f64_u32, n, x, ft, tt)
5711 }
5712
5713 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5714
5715
5716
5717
5718
5719
5720
5721
5722 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
5723 cmp := s.newValue2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
5724 b := s.endBlock()
5725 b.Kind = ssa.BlockIf
5726 b.SetControl(cmp)
5727 b.Likely = ssa.BranchLikely
5728
5729 bThen := s.f.NewBlock(ssa.BlockPlain)
5730 bElse := s.f.NewBlock(ssa.BlockPlain)
5731 bAfter := s.f.NewBlock(ssa.BlockPlain)
5732
5733 b.AddEdgeTo(bThen)
5734 s.startBlock(bThen)
5735 a0 := s.newValue1(cvttab.cvt2U, tt, x)
5736 s.vars[n] = a0
5737 s.endBlock()
5738 bThen.AddEdgeTo(bAfter)
5739
5740 b.AddEdgeTo(bElse)
5741 s.startBlock(bElse)
5742 y := s.newValue2(cvttab.subf, ft, x, cutoff)
5743 y = s.newValue1(cvttab.cvt2U, tt, y)
5744 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
5745 a1 := s.newValue2(cvttab.or, tt, y, z)
5746 s.vars[n] = a1
5747 s.endBlock()
5748 bElse.AddEdgeTo(bAfter)
5749
5750 s.startBlock(bAfter)
5751 return s.variable(n, n.Type())
5752 }
5753
5754
5755
5756
5757 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5758 iface := s.expr(n.X)
5759 target := s.reflectType(n.Type())
5760 var targetItab *ssa.Value
5761 if n.ITab != nil {
5762 targetItab = s.expr(n.ITab)
5763 }
5764 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
5765 }
5766
5767 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5768 iface := s.expr(n.X)
5769 var source, target, targetItab *ssa.Value
5770 if n.SrcRType != nil {
5771 source = s.expr(n.SrcRType)
5772 }
5773 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
5774 byteptr := s.f.Config.Types.BytePtr
5775 targetItab = s.expr(n.ITab)
5776
5777
5778 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
5779 } else {
5780 target = s.expr(n.RType)
5781 }
5782 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
5783 }
5784
5785
5786
5787
5788
5789
5790
5791
5792
5793 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
5794 typs := s.f.Config.Types
5795 byteptr := typs.BytePtr
5796 if dst.IsInterface() {
5797 if dst.IsEmptyInterface() {
5798
5799
5800 if base.Debug.TypeAssert > 0 {
5801 base.WarnfAt(pos, "type assertion inlined")
5802 }
5803
5804
5805 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5806
5807 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5808
5809 if src.IsEmptyInterface() && commaok {
5810
5811 return iface, cond
5812 }
5813
5814
5815 b := s.endBlock()
5816 b.Kind = ssa.BlockIf
5817 b.SetControl(cond)
5818 b.Likely = ssa.BranchLikely
5819 bOk := s.f.NewBlock(ssa.BlockPlain)
5820 bFail := s.f.NewBlock(ssa.BlockPlain)
5821 b.AddEdgeTo(bOk)
5822 b.AddEdgeTo(bFail)
5823
5824 if !commaok {
5825
5826 s.startBlock(bFail)
5827 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
5828
5829
5830 s.startBlock(bOk)
5831 if src.IsEmptyInterface() {
5832 res = iface
5833 return
5834 }
5835
5836 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5837 typ := s.load(byteptr, off)
5838 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5839 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
5840 return
5841 }
5842
5843 s.startBlock(bOk)
5844
5845
5846 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5847 s.vars[typVar] = s.load(byteptr, off)
5848 s.endBlock()
5849
5850
5851 s.startBlock(bFail)
5852 s.vars[typVar] = itab
5853 s.endBlock()
5854
5855
5856 bEnd := s.f.NewBlock(ssa.BlockPlain)
5857 bOk.AddEdgeTo(bEnd)
5858 bFail.AddEdgeTo(bEnd)
5859 s.startBlock(bEnd)
5860 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5861 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
5862 resok = cond
5863 delete(s.vars, typVar)
5864 return
5865 }
5866
5867 if base.Debug.TypeAssert > 0 {
5868 base.WarnfAt(pos, "type assertion not inlined")
5869 }
5870
5871 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5872 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
5873
5874
5875 bNil := s.f.NewBlock(ssa.BlockPlain)
5876 bNonNil := s.f.NewBlock(ssa.BlockPlain)
5877 bMerge := s.f.NewBlock(ssa.BlockPlain)
5878 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5879 b := s.endBlock()
5880 b.Kind = ssa.BlockIf
5881 b.SetControl(cond)
5882 b.Likely = ssa.BranchLikely
5883 b.AddEdgeTo(bNonNil)
5884 b.AddEdgeTo(bNil)
5885
5886 s.startBlock(bNil)
5887 if commaok {
5888 s.vars[typVar] = itab
5889 b := s.endBlock()
5890 b.AddEdgeTo(bMerge)
5891 } else {
5892
5893 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
5894 }
5895
5896
5897 s.startBlock(bNonNil)
5898 typ := itab
5899 if !src.IsEmptyInterface() {
5900 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
5901 }
5902
5903
5904 var d *ssa.Value
5905 if descriptor != nil {
5906 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
5907 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
5908
5909
5910 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
5911 s.Fatalf("atomic load not available")
5912 }
5913
5914 var mul, and, add, zext ssa.Op
5915 if s.config.PtrSize == 4 {
5916 mul = ssa.OpMul32
5917 and = ssa.OpAnd32
5918 add = ssa.OpAdd32
5919 zext = ssa.OpCopy
5920 } else {
5921 mul = ssa.OpMul64
5922 and = ssa.OpAnd64
5923 add = ssa.OpAdd64
5924 zext = ssa.OpZeroExt32to64
5925 }
5926
5927 loopHead := s.f.NewBlock(ssa.BlockPlain)
5928 loopBody := s.f.NewBlock(ssa.BlockPlain)
5929 cacheHit := s.f.NewBlock(ssa.BlockPlain)
5930 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
5931
5932
5933
5934 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
5935 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
5936 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
5937
5938
5939 var hash *ssa.Value
5940 if src.IsEmptyInterface() {
5941 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
5942 } else {
5943 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
5944 }
5945 hash = s.newValue1(zext, typs.Uintptr, hash)
5946 s.vars[hashVar] = hash
5947
5948 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
5949
5950 b := s.endBlock()
5951 b.AddEdgeTo(loopHead)
5952
5953
5954
5955 s.startBlock(loopHead)
5956 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
5957 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
5958 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
5959 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
5960
5961 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
5962
5963
5964
5965 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
5966 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
5967 b = s.endBlock()
5968 b.Kind = ssa.BlockIf
5969 b.SetControl(cmp1)
5970 b.AddEdgeTo(cacheHit)
5971 b.AddEdgeTo(loopBody)
5972
5973
5974
5975 s.startBlock(loopBody)
5976 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
5977 b = s.endBlock()
5978 b.Kind = ssa.BlockIf
5979 b.SetControl(cmp2)
5980 b.AddEdgeTo(cacheMiss)
5981 b.AddEdgeTo(loopHead)
5982
5983
5984
5985 s.startBlock(cacheHit)
5986 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
5987 s.vars[typVar] = eItab
5988 b = s.endBlock()
5989 b.AddEdgeTo(bMerge)
5990
5991
5992 s.startBlock(cacheMiss)
5993 }
5994 }
5995
5996
5997 if descriptor != nil {
5998 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
5999 } else {
6000 var fn *obj.LSym
6001 if commaok {
6002 fn = ir.Syms.AssertE2I2
6003 } else {
6004 fn = ir.Syms.AssertE2I
6005 }
6006 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6007 }
6008 s.vars[typVar] = itab
6009 b = s.endBlock()
6010 b.AddEdgeTo(bMerge)
6011
6012
6013 s.startBlock(bMerge)
6014 itab = s.variable(typVar, byteptr)
6015 var ok *ssa.Value
6016 if commaok {
6017 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6018 }
6019 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6020 }
6021
6022 if base.Debug.TypeAssert > 0 {
6023 base.WarnfAt(pos, "type assertion inlined")
6024 }
6025
6026
6027 direct := types.IsDirectIface(dst)
6028 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6029 if base.Debug.TypeAssert > 0 {
6030 base.WarnfAt(pos, "type assertion inlined")
6031 }
6032 var wantedFirstWord *ssa.Value
6033 if src.IsEmptyInterface() {
6034
6035 wantedFirstWord = target
6036 } else {
6037
6038 wantedFirstWord = targetItab
6039 }
6040
6041 var tmp ir.Node
6042 var addr *ssa.Value
6043 if commaok && !ssa.CanSSA(dst) {
6044
6045
6046 tmp, addr = s.temp(pos, dst)
6047 }
6048
6049 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6050 b := s.endBlock()
6051 b.Kind = ssa.BlockIf
6052 b.SetControl(cond)
6053 b.Likely = ssa.BranchLikely
6054
6055 bOk := s.f.NewBlock(ssa.BlockPlain)
6056 bFail := s.f.NewBlock(ssa.BlockPlain)
6057 b.AddEdgeTo(bOk)
6058 b.AddEdgeTo(bFail)
6059
6060 if !commaok {
6061
6062 s.startBlock(bFail)
6063 taddr := source
6064 if taddr == nil {
6065 taddr = s.reflectType(src)
6066 }
6067 if src.IsEmptyInterface() {
6068 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6069 } else {
6070 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6071 }
6072
6073
6074 s.startBlock(bOk)
6075 if direct {
6076 return s.newValue1(ssa.OpIData, dst, iface), nil
6077 }
6078 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6079 return s.load(dst, p), nil
6080 }
6081
6082
6083
6084 bEnd := s.f.NewBlock(ssa.BlockPlain)
6085
6086
6087 valVar := ssaMarker("val")
6088
6089
6090 s.startBlock(bOk)
6091 if tmp == nil {
6092 if direct {
6093 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6094 } else {
6095 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6096 s.vars[valVar] = s.load(dst, p)
6097 }
6098 } else {
6099 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6100 s.move(dst, addr, p)
6101 }
6102 s.vars[okVar] = s.constBool(true)
6103 s.endBlock()
6104 bOk.AddEdgeTo(bEnd)
6105
6106
6107 s.startBlock(bFail)
6108 if tmp == nil {
6109 s.vars[valVar] = s.zeroVal(dst)
6110 } else {
6111 s.zero(dst, addr)
6112 }
6113 s.vars[okVar] = s.constBool(false)
6114 s.endBlock()
6115 bFail.AddEdgeTo(bEnd)
6116
6117
6118 s.startBlock(bEnd)
6119 if tmp == nil {
6120 res = s.variable(valVar, dst)
6121 delete(s.vars, valVar)
6122 } else {
6123 res = s.load(dst, addr)
6124 }
6125 resok = s.variable(okVar, types.Types[types.TBOOL])
6126 delete(s.vars, okVar)
6127 return res, resok
6128 }
6129
6130
6131 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6132 tmp := typecheck.TempAt(pos, s.curfn, t)
6133 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6134 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6135 }
6136 addr := s.addr(tmp)
6137 return tmp, addr
6138 }
6139
6140
6141 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6142 v := s.vars[n]
6143 if v != nil {
6144 return v
6145 }
6146 v = s.fwdVars[n]
6147 if v != nil {
6148 return v
6149 }
6150
6151 if s.curBlock == s.f.Entry {
6152
6153 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6154 }
6155
6156
6157 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6158 s.fwdVars[n] = v
6159 if n.Op() == ir.ONAME {
6160 s.addNamedValue(n.(*ir.Name), v)
6161 }
6162 return v
6163 }
6164
6165 func (s *state) mem() *ssa.Value {
6166 return s.variable(memVar, types.TypeMem)
6167 }
6168
6169 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6170 if n.Class == ir.Pxxx {
6171
6172 return
6173 }
6174 if ir.IsAutoTmp(n) {
6175
6176 return
6177 }
6178 if n.Class == ir.PPARAMOUT {
6179
6180
6181 return
6182 }
6183 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6184 values, ok := s.f.NamedValues[loc]
6185 if !ok {
6186 s.f.Names = append(s.f.Names, &loc)
6187 s.f.CanonicalLocalSlots[loc] = &loc
6188 }
6189 s.f.NamedValues[loc] = append(values, v)
6190 }
6191
6192
6193 type Branch struct {
6194 P *obj.Prog
6195 B *ssa.Block
6196 }
6197
6198
6199 type State struct {
6200 ABI obj.ABI
6201
6202 pp *objw.Progs
6203
6204
6205
6206 Branches []Branch
6207
6208
6209 JumpTables []*ssa.Block
6210
6211
6212 bstart []*obj.Prog
6213
6214 maxarg int64
6215
6216
6217
6218 livenessMap liveness.Map
6219
6220
6221
6222 partLiveArgs map[*ir.Name]bool
6223
6224
6225
6226
6227 lineRunStart *obj.Prog
6228
6229
6230 OnWasmStackSkipped int
6231 }
6232
6233 func (s *State) FuncInfo() *obj.FuncInfo {
6234 return s.pp.CurFunc.LSym.Func()
6235 }
6236
6237
6238 func (s *State) Prog(as obj.As) *obj.Prog {
6239 p := s.pp.Prog(as)
6240 if objw.LosesStmtMark(as) {
6241 return p
6242 }
6243
6244
6245 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6246 s.lineRunStart = p
6247 } else if p.Pos.IsStmt() == src.PosIsStmt {
6248 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6249 p.Pos = p.Pos.WithNotStmt()
6250 }
6251 return p
6252 }
6253
6254
6255 func (s *State) Pc() *obj.Prog {
6256 return s.pp.Next
6257 }
6258
6259
6260 func (s *State) SetPos(pos src.XPos) {
6261 s.pp.Pos = pos
6262 }
6263
6264
6265
6266
6267 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6268 p := s.Prog(op)
6269 p.To.Type = obj.TYPE_BRANCH
6270 s.Branches = append(s.Branches, Branch{P: p, B: target})
6271 return p
6272 }
6273
6274
6275
6276
6277
6278
6279 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6280 switch v.Op {
6281 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6282
6283 s.SetPos(v.Pos.WithNotStmt())
6284 default:
6285 p := v.Pos
6286 if p != src.NoXPos {
6287
6288
6289
6290
6291 if p.IsStmt() != src.PosIsStmt {
6292 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6293
6294
6295
6296
6297
6298
6299
6300
6301
6302
6303
6304
6305
6306 return
6307 }
6308 p = p.WithNotStmt()
6309
6310 }
6311 s.SetPos(p)
6312 } else {
6313 s.SetPos(s.pp.Pos.WithNotStmt())
6314 }
6315 }
6316 }
6317
6318
6319 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6320 ft := e.curfn.Type()
6321 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6322 return
6323 }
6324
6325 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6326 x.Set(obj.AttrContentAddressable, true)
6327 e.curfn.LSym.Func().ArgInfo = x
6328
6329
6330 p := pp.Prog(obj.AFUNCDATA)
6331 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6332 p.To.Type = obj.TYPE_MEM
6333 p.To.Name = obj.NAME_EXTERN
6334 p.To.Sym = x
6335 }
6336
6337
6338 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6339 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6340
6341
6342
6343
6344 PtrSize := int64(types.PtrSize)
6345 uintptrTyp := types.Types[types.TUINTPTR]
6346
6347 isAggregate := func(t *types.Type) bool {
6348 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6349 }
6350
6351 wOff := 0
6352 n := 0
6353 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6354
6355
6356 write1 := func(sz, offset int64) {
6357 if offset >= rtabi.TraceArgsSpecial {
6358 writebyte(rtabi.TraceArgsOffsetTooLarge)
6359 } else {
6360 writebyte(uint8(offset))
6361 writebyte(uint8(sz))
6362 }
6363 n++
6364 }
6365
6366
6367
6368 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6369 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6370 if n >= rtabi.TraceArgsLimit {
6371 writebyte(rtabi.TraceArgsDotdotdot)
6372 return false
6373 }
6374 if !isAggregate(t) {
6375 write1(t.Size(), baseOffset)
6376 return true
6377 }
6378 writebyte(rtabi.TraceArgsStartAgg)
6379 depth++
6380 if depth >= rtabi.TraceArgsMaxDepth {
6381 writebyte(rtabi.TraceArgsDotdotdot)
6382 writebyte(rtabi.TraceArgsEndAgg)
6383 n++
6384 return true
6385 }
6386 switch {
6387 case t.IsInterface(), t.IsString():
6388 _ = visitType(baseOffset, uintptrTyp, depth) &&
6389 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6390 case t.IsSlice():
6391 _ = visitType(baseOffset, uintptrTyp, depth) &&
6392 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6393 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6394 case t.IsComplex():
6395 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6396 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6397 case t.IsArray():
6398 if t.NumElem() == 0 {
6399 n++
6400 break
6401 }
6402 for i := int64(0); i < t.NumElem(); i++ {
6403 if !visitType(baseOffset, t.Elem(), depth) {
6404 break
6405 }
6406 baseOffset += t.Elem().Size()
6407 }
6408 case t.IsStruct():
6409 if t.NumFields() == 0 {
6410 n++
6411 break
6412 }
6413 for _, field := range t.Fields() {
6414 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6415 break
6416 }
6417 }
6418 }
6419 writebyte(rtabi.TraceArgsEndAgg)
6420 return true
6421 }
6422
6423 start := 0
6424 if strings.Contains(f.LSym.Name, "[") {
6425
6426 start = 1
6427 }
6428
6429 for _, a := range abiInfo.InParams()[start:] {
6430 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6431 break
6432 }
6433 }
6434 writebyte(rtabi.TraceArgsEndSeq)
6435 if wOff > rtabi.TraceArgsMaxLen {
6436 base.Fatalf("ArgInfo too large")
6437 }
6438
6439 return x
6440 }
6441
6442
6443 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6444 if base.Ctxt.Flag_linkshared {
6445
6446
6447 return
6448 }
6449
6450 wfn := e.curfn.WrappedFunc
6451 if wfn == nil {
6452 return
6453 }
6454
6455 wsym := wfn.Linksym()
6456 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6457 objw.SymPtrOff(x, 0, wsym)
6458 x.Set(obj.AttrContentAddressable, true)
6459 })
6460 e.curfn.LSym.Func().WrapInfo = x
6461
6462
6463 p := pp.Prog(obj.AFUNCDATA)
6464 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6465 p.To.Type = obj.TYPE_MEM
6466 p.To.Name = obj.NAME_EXTERN
6467 p.To.Sym = x
6468 }
6469
6470
6471 func genssa(f *ssa.Func, pp *objw.Progs) {
6472 var s State
6473 s.ABI = f.OwnAux.Fn.ABI()
6474
6475 e := f.Frontend().(*ssafn)
6476
6477 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6478
6479 var lv *liveness.Liveness
6480 s.livenessMap, s.partLiveArgs, lv = liveness.Compute(e.curfn, f, e.stkptrsize, pp, gatherPrintInfo)
6481 emitArgInfo(e, f, pp)
6482 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6483
6484 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6485 if openDeferInfo != nil {
6486
6487
6488 p := pp.Prog(obj.AFUNCDATA)
6489 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6490 p.To.Type = obj.TYPE_MEM
6491 p.To.Name = obj.NAME_EXTERN
6492 p.To.Sym = openDeferInfo
6493 }
6494
6495 emitWrappedFuncInfo(e, pp)
6496
6497
6498 s.bstart = make([]*obj.Prog, f.NumBlocks())
6499 s.pp = pp
6500 var progToValue map[*obj.Prog]*ssa.Value
6501 var progToBlock map[*obj.Prog]*ssa.Block
6502 var valueToProgAfter []*obj.Prog
6503 if gatherPrintInfo {
6504 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6505 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6506 f.Logf("genssa %s\n", f.Name)
6507 progToBlock[s.pp.Next] = f.Blocks[0]
6508 }
6509
6510 if base.Ctxt.Flag_locationlists {
6511 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6512 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6513 }
6514 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6515 clear(valueToProgAfter)
6516 }
6517
6518
6519
6520 firstPos := src.NoXPos
6521 for _, v := range f.Entry.Values {
6522 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6523 firstPos = v.Pos
6524 v.Pos = firstPos.WithDefaultStmt()
6525 break
6526 }
6527 }
6528
6529
6530
6531
6532 var inlMarks map[*obj.Prog]int32
6533 var inlMarkList []*obj.Prog
6534
6535
6536
6537 var inlMarksByPos map[src.XPos][]*obj.Prog
6538
6539 var argLiveIdx int = -1
6540
6541
6542
6543
6544
6545 var hotAlign, hotRequire int64
6546
6547 if base.Debug.AlignHot > 0 {
6548 switch base.Ctxt.Arch.Name {
6549
6550
6551
6552
6553
6554 case "amd64", "386":
6555
6556
6557
6558 hotAlign = 64
6559 hotRequire = 31
6560 }
6561 }
6562
6563
6564 for i, b := range f.Blocks {
6565
6566 s.lineRunStart = nil
6567 s.SetPos(s.pp.Pos.WithNotStmt())
6568
6569 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
6570
6571
6572
6573
6574
6575 p := s.pp.Prog(obj.APCALIGNMAX)
6576 p.From.SetConst(hotAlign)
6577 p.To.SetConst(hotRequire)
6578 }
6579
6580 s.bstart[b.ID] = s.pp.Next
6581
6582 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
6583 argLiveIdx = idx
6584 p := s.pp.Prog(obj.APCDATA)
6585 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6586 p.To.SetConst(int64(idx))
6587 }
6588
6589
6590 Arch.SSAMarkMoves(&s, b)
6591 for _, v := range b.Values {
6592 x := s.pp.Next
6593 s.DebugFriendlySetPosFrom(v)
6594
6595 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
6596 v.Fatalf("input[0] and output not in same register %s", v.LongString())
6597 }
6598
6599 switch v.Op {
6600 case ssa.OpInitMem:
6601
6602 case ssa.OpArg:
6603
6604 case ssa.OpSP, ssa.OpSB:
6605
6606 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
6607
6608 case ssa.OpGetG:
6609
6610
6611 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
6612
6613 case ssa.OpPhi:
6614 CheckLoweredPhi(v)
6615 case ssa.OpConvert:
6616
6617 if v.Args[0].Reg() != v.Reg() {
6618 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
6619 }
6620 case ssa.OpInlMark:
6621 p := Arch.Ginsnop(s.pp)
6622 if inlMarks == nil {
6623 inlMarks = map[*obj.Prog]int32{}
6624 inlMarksByPos = map[src.XPos][]*obj.Prog{}
6625 }
6626 inlMarks[p] = v.AuxInt32()
6627 inlMarkList = append(inlMarkList, p)
6628 pos := v.Pos.AtColumn1()
6629 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
6630 firstPos = src.NoXPos
6631
6632 default:
6633
6634 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6635 s.SetPos(firstPos)
6636 firstPos = src.NoXPos
6637 }
6638
6639
6640 s.pp.NextLive = s.livenessMap.Get(v)
6641 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
6642
6643
6644 Arch.SSAGenValue(&s, v)
6645 }
6646
6647 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
6648 argLiveIdx = idx
6649 p := s.pp.Prog(obj.APCDATA)
6650 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6651 p.To.SetConst(int64(idx))
6652 }
6653
6654 if base.Ctxt.Flag_locationlists {
6655 valueToProgAfter[v.ID] = s.pp.Next
6656 }
6657
6658 if gatherPrintInfo {
6659 for ; x != s.pp.Next; x = x.Link {
6660 progToValue[x] = v
6661 }
6662 }
6663 }
6664
6665 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
6666 p := Arch.Ginsnop(s.pp)
6667 p.Pos = p.Pos.WithIsStmt()
6668 if b.Pos == src.NoXPos {
6669 b.Pos = p.Pos
6670 if b.Pos == src.NoXPos {
6671 b.Pos = s.pp.Text.Pos
6672 }
6673 }
6674 b.Pos = b.Pos.WithBogusLine()
6675 }
6676
6677
6678
6679
6680
6681 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
6682
6683
6684 var next *ssa.Block
6685 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
6686
6687
6688
6689
6690 next = f.Blocks[i+1]
6691 }
6692 x := s.pp.Next
6693 s.SetPos(b.Pos)
6694 Arch.SSAGenBlock(&s, b, next)
6695 if gatherPrintInfo {
6696 for ; x != s.pp.Next; x = x.Link {
6697 progToBlock[x] = b
6698 }
6699 }
6700 }
6701 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
6702
6703
6704
6705
6706 Arch.Ginsnop(s.pp)
6707 }
6708 if openDeferInfo != nil {
6709
6710
6711
6712
6713
6714
6715
6716
6717 s.pp.NextLive = s.livenessMap.DeferReturn
6718 p := s.pp.Prog(obj.ACALL)
6719 p.To.Type = obj.TYPE_MEM
6720 p.To.Name = obj.NAME_EXTERN
6721 p.To.Sym = ir.Syms.Deferreturn
6722
6723
6724
6725
6726
6727 for _, o := range f.OwnAux.ABIInfo().OutParams() {
6728 n := o.Name
6729 rts, offs := o.RegisterTypesAndOffsets()
6730 for i := range o.Registers {
6731 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
6732 }
6733 }
6734
6735 s.pp.Prog(obj.ARET)
6736 }
6737
6738 if inlMarks != nil {
6739 hasCall := false
6740
6741
6742
6743
6744 for p := s.pp.Text; p != nil; p = p.Link {
6745 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
6746 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
6747
6748
6749
6750
6751
6752 continue
6753 }
6754 if _, ok := inlMarks[p]; ok {
6755
6756
6757 continue
6758 }
6759 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
6760 hasCall = true
6761 }
6762 pos := p.Pos.AtColumn1()
6763 marks := inlMarksByPos[pos]
6764 if len(marks) == 0 {
6765 continue
6766 }
6767 for _, m := range marks {
6768
6769
6770
6771 p.Pos = p.Pos.WithIsStmt()
6772 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
6773
6774 m.As = obj.ANOP
6775 m.Pos = src.NoXPos
6776 m.From = obj.Addr{}
6777 m.To = obj.Addr{}
6778 }
6779 delete(inlMarksByPos, pos)
6780 }
6781
6782 for _, p := range inlMarkList {
6783 if p.As != obj.ANOP {
6784 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
6785 }
6786 }
6787
6788 if e.stksize == 0 && !hasCall {
6789
6790
6791
6792
6793
6794
6795 for p := s.pp.Text; p != nil; p = p.Link {
6796 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
6797 continue
6798 }
6799 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
6800
6801 nop := Arch.Ginsnop(s.pp)
6802 nop.Pos = e.curfn.Pos().WithIsStmt()
6803
6804
6805
6806
6807
6808 for x := s.pp.Text; x != nil; x = x.Link {
6809 if x.Link == nop {
6810 x.Link = nop.Link
6811 break
6812 }
6813 }
6814
6815 for x := s.pp.Text; x != nil; x = x.Link {
6816 if x.Link == p {
6817 nop.Link = p
6818 x.Link = nop
6819 break
6820 }
6821 }
6822 }
6823 break
6824 }
6825 }
6826 }
6827
6828 if base.Ctxt.Flag_locationlists {
6829 var debugInfo *ssa.FuncDebug
6830 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
6831 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
6832 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
6833 } else {
6834 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
6835 }
6836 bstart := s.bstart
6837 idToIdx := make([]int, f.NumBlocks())
6838 for i, b := range f.Blocks {
6839 idToIdx[b.ID] = i
6840 }
6841
6842
6843
6844 debugInfo.GetPC = func(b, v ssa.ID) int64 {
6845 switch v {
6846 case ssa.BlockStart.ID:
6847 if b == f.Entry.ID {
6848 return 0
6849
6850 }
6851 return bstart[b].Pc
6852 case ssa.BlockEnd.ID:
6853 blk := f.Blocks[idToIdx[b]]
6854 nv := len(blk.Values)
6855 return valueToProgAfter[blk.Values[nv-1].ID].Pc
6856 case ssa.FuncEnd.ID:
6857 return e.curfn.LSym.Size
6858 default:
6859 return valueToProgAfter[v].Pc
6860 }
6861 }
6862 }
6863
6864
6865 for _, br := range s.Branches {
6866 br.P.To.SetTarget(s.bstart[br.B.ID])
6867 if br.P.Pos.IsStmt() != src.PosIsStmt {
6868 br.P.Pos = br.P.Pos.WithNotStmt()
6869 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
6870 br.P.Pos = br.P.Pos.WithNotStmt()
6871 }
6872
6873 }
6874
6875
6876 for _, jt := range s.JumpTables {
6877
6878 targets := make([]*obj.Prog, len(jt.Succs))
6879 for i, e := range jt.Succs {
6880 targets[i] = s.bstart[e.Block().ID]
6881 }
6882
6883
6884
6885 fi := s.pp.CurFunc.LSym.Func()
6886 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
6887 }
6888
6889 if e.log {
6890 filename := ""
6891 for p := s.pp.Text; p != nil; p = p.Link {
6892 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
6893 filename = p.InnermostFilename()
6894 f.Logf("# %s\n", filename)
6895 }
6896
6897 var s string
6898 if v, ok := progToValue[p]; ok {
6899 s = v.String()
6900 } else if b, ok := progToBlock[p]; ok {
6901 s = b.String()
6902 } else {
6903 s = " "
6904 }
6905 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
6906 }
6907 }
6908 if f.HTMLWriter != nil {
6909 var buf strings.Builder
6910 buf.WriteString("<code>")
6911 buf.WriteString("<dl class=\"ssa-gen\">")
6912 filename := ""
6913
6914 liveness := lv.Format(nil)
6915 if liveness != "" {
6916 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
6917 buf.WriteString(html.EscapeString("# " + liveness))
6918 buf.WriteString("</dd>")
6919 }
6920
6921 for p := s.pp.Text; p != nil; p = p.Link {
6922
6923
6924 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
6925 filename = p.InnermostFilename()
6926 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
6927 buf.WriteString(html.EscapeString("# " + filename))
6928 buf.WriteString("</dd>")
6929 }
6930
6931 buf.WriteString("<dt class=\"ssa-prog-src\">")
6932 if v, ok := progToValue[p]; ok {
6933
6934
6935 if p.As != obj.APCDATA {
6936 if liveness := lv.Format(v); liveness != "" {
6937
6938 buf.WriteString("</dt><dd class=\"ssa-prog\">")
6939 buf.WriteString(html.EscapeString("# " + liveness))
6940 buf.WriteString("</dd>")
6941
6942 buf.WriteString("<dt class=\"ssa-prog-src\">")
6943 }
6944 }
6945
6946 buf.WriteString(v.HTML())
6947 } else if b, ok := progToBlock[p]; ok {
6948 buf.WriteString("<b>" + b.HTML() + "</b>")
6949 }
6950 buf.WriteString("</dt>")
6951 buf.WriteString("<dd class=\"ssa-prog\">")
6952 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
6953 buf.WriteString("</dd>")
6954 }
6955 buf.WriteString("</dl>")
6956 buf.WriteString("</code>")
6957 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
6958 }
6959 if ssa.GenssaDump[f.Name] {
6960 fi := f.DumpFileForPhase("genssa")
6961 if fi != nil {
6962
6963
6964 inliningDiffers := func(a, b []src.Pos) bool {
6965 if len(a) != len(b) {
6966 return true
6967 }
6968 for i := range a {
6969 if a[i].Filename() != b[i].Filename() {
6970 return true
6971 }
6972 if i != len(a)-1 && a[i].Line() != b[i].Line() {
6973 return true
6974 }
6975 }
6976 return false
6977 }
6978
6979 var allPosOld []src.Pos
6980 var allPos []src.Pos
6981
6982 for p := s.pp.Text; p != nil; p = p.Link {
6983 if p.Pos.IsKnown() {
6984 allPos = allPos[:0]
6985 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
6986 if inliningDiffers(allPos, allPosOld) {
6987 for _, pos := range allPos {
6988 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
6989 }
6990 allPos, allPosOld = allPosOld, allPos
6991 }
6992 }
6993
6994 var s string
6995 if v, ok := progToValue[p]; ok {
6996 s = v.String()
6997 } else if b, ok := progToBlock[p]; ok {
6998 s = b.String()
6999 } else {
7000 s = " "
7001 }
7002 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7003 }
7004 fi.Close()
7005 }
7006 }
7007
7008 defframe(&s, e, f)
7009
7010 f.HTMLWriter.Close()
7011 f.HTMLWriter = nil
7012 }
7013
7014 func defframe(s *State, e *ssafn, f *ssa.Func) {
7015 pp := s.pp
7016
7017 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7018 frame := s.maxarg + e.stksize
7019 if Arch.PadFrame != nil {
7020 frame = Arch.PadFrame(frame)
7021 }
7022
7023
7024 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7025 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7026 pp.Text.To.Offset = frame
7027
7028 p := pp.Text
7029
7030
7031
7032
7033
7034
7035
7036
7037
7038
7039 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7040
7041
7042 type nameOff struct {
7043 n *ir.Name
7044 off int64
7045 }
7046 partLiveArgsSpilled := make(map[nameOff]bool)
7047 for _, v := range f.Entry.Values {
7048 if v.Op.IsCall() {
7049 break
7050 }
7051 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7052 continue
7053 }
7054 n, off := ssa.AutoVar(v)
7055 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7056 continue
7057 }
7058 partLiveArgsSpilled[nameOff{n, off}] = true
7059 }
7060
7061
7062 for _, a := range f.OwnAux.ABIInfo().InParams() {
7063 n := a.Name
7064 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7065 continue
7066 }
7067 rts, offs := a.RegisterTypesAndOffsets()
7068 for i := range a.Registers {
7069 if !rts[i].HasPointers() {
7070 continue
7071 }
7072 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7073 continue
7074 }
7075 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7076 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7077 }
7078 }
7079 }
7080
7081
7082
7083
7084 var lo, hi int64
7085
7086
7087
7088 var state uint32
7089
7090
7091
7092 for _, n := range e.curfn.Dcl {
7093 if !n.Needzero() {
7094 continue
7095 }
7096 if n.Class != ir.PAUTO {
7097 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7098 }
7099 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7100 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7101 }
7102
7103 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7104
7105 lo = n.FrameOffset()
7106 continue
7107 }
7108
7109
7110 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7111
7112
7113 lo = n.FrameOffset()
7114 hi = lo + n.Type().Size()
7115 }
7116
7117
7118 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7119 }
7120
7121
7122 type IndexJump struct {
7123 Jump obj.As
7124 Index int
7125 }
7126
7127 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7128 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7129 p.Pos = b.Pos
7130 }
7131
7132
7133
7134 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7135 switch next {
7136 case b.Succs[0].Block():
7137 s.oneJump(b, &jumps[0][0])
7138 s.oneJump(b, &jumps[0][1])
7139 case b.Succs[1].Block():
7140 s.oneJump(b, &jumps[1][0])
7141 s.oneJump(b, &jumps[1][1])
7142 default:
7143 var q *obj.Prog
7144 if b.Likely != ssa.BranchUnlikely {
7145 s.oneJump(b, &jumps[1][0])
7146 s.oneJump(b, &jumps[1][1])
7147 q = s.Br(obj.AJMP, b.Succs[1].Block())
7148 } else {
7149 s.oneJump(b, &jumps[0][0])
7150 s.oneJump(b, &jumps[0][1])
7151 q = s.Br(obj.AJMP, b.Succs[0].Block())
7152 }
7153 q.Pos = b.Pos
7154 }
7155 }
7156
7157
7158 func AddAux(a *obj.Addr, v *ssa.Value) {
7159 AddAux2(a, v, v.AuxInt)
7160 }
7161 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7162 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7163 v.Fatalf("bad AddAux addr %v", a)
7164 }
7165
7166 a.Offset += offset
7167
7168
7169 if v.Aux == nil {
7170 return
7171 }
7172
7173 switch n := v.Aux.(type) {
7174 case *ssa.AuxCall:
7175 a.Name = obj.NAME_EXTERN
7176 a.Sym = n.Fn
7177 case *obj.LSym:
7178 a.Name = obj.NAME_EXTERN
7179 a.Sym = n
7180 case *ir.Name:
7181 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7182 a.Name = obj.NAME_PARAM
7183 } else {
7184 a.Name = obj.NAME_AUTO
7185 }
7186 a.Sym = n.Linksym()
7187 a.Offset += n.FrameOffset()
7188 default:
7189 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7190 }
7191 }
7192
7193
7194
7195 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7196 size := idx.Type.Size()
7197 if size == s.config.PtrSize {
7198 return idx
7199 }
7200 if size > s.config.PtrSize {
7201
7202
7203 var lo *ssa.Value
7204 if idx.Type.IsSigned() {
7205 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7206 } else {
7207 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7208 }
7209 if bounded || base.Flag.B != 0 {
7210 return lo
7211 }
7212 bNext := s.f.NewBlock(ssa.BlockPlain)
7213 bPanic := s.f.NewBlock(ssa.BlockExit)
7214 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7215 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7216 if !idx.Type.IsSigned() {
7217 switch kind {
7218 case ssa.BoundsIndex:
7219 kind = ssa.BoundsIndexU
7220 case ssa.BoundsSliceAlen:
7221 kind = ssa.BoundsSliceAlenU
7222 case ssa.BoundsSliceAcap:
7223 kind = ssa.BoundsSliceAcapU
7224 case ssa.BoundsSliceB:
7225 kind = ssa.BoundsSliceBU
7226 case ssa.BoundsSlice3Alen:
7227 kind = ssa.BoundsSlice3AlenU
7228 case ssa.BoundsSlice3Acap:
7229 kind = ssa.BoundsSlice3AcapU
7230 case ssa.BoundsSlice3B:
7231 kind = ssa.BoundsSlice3BU
7232 case ssa.BoundsSlice3C:
7233 kind = ssa.BoundsSlice3CU
7234 }
7235 }
7236 b := s.endBlock()
7237 b.Kind = ssa.BlockIf
7238 b.SetControl(cmp)
7239 b.Likely = ssa.BranchLikely
7240 b.AddEdgeTo(bNext)
7241 b.AddEdgeTo(bPanic)
7242
7243 s.startBlock(bPanic)
7244 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7245 s.endBlock().SetControl(mem)
7246 s.startBlock(bNext)
7247
7248 return lo
7249 }
7250
7251
7252 var op ssa.Op
7253 if idx.Type.IsSigned() {
7254 switch 10*size + s.config.PtrSize {
7255 case 14:
7256 op = ssa.OpSignExt8to32
7257 case 18:
7258 op = ssa.OpSignExt8to64
7259 case 24:
7260 op = ssa.OpSignExt16to32
7261 case 28:
7262 op = ssa.OpSignExt16to64
7263 case 48:
7264 op = ssa.OpSignExt32to64
7265 default:
7266 s.Fatalf("bad signed index extension %s", idx.Type)
7267 }
7268 } else {
7269 switch 10*size + s.config.PtrSize {
7270 case 14:
7271 op = ssa.OpZeroExt8to32
7272 case 18:
7273 op = ssa.OpZeroExt8to64
7274 case 24:
7275 op = ssa.OpZeroExt16to32
7276 case 28:
7277 op = ssa.OpZeroExt16to64
7278 case 48:
7279 op = ssa.OpZeroExt32to64
7280 default:
7281 s.Fatalf("bad unsigned index extension %s", idx.Type)
7282 }
7283 }
7284 return s.newValue1(op, types.Types[types.TINT], idx)
7285 }
7286
7287
7288
7289 func CheckLoweredPhi(v *ssa.Value) {
7290 if v.Op != ssa.OpPhi {
7291 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7292 }
7293 if v.Type.IsMemory() {
7294 return
7295 }
7296 f := v.Block.Func
7297 loc := f.RegAlloc[v.ID]
7298 for _, a := range v.Args {
7299 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7300 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7301 }
7302 }
7303 }
7304
7305
7306
7307
7308
7309 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7310 entry := v.Block.Func.Entry
7311 if entry != v.Block {
7312 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7313 }
7314 for _, w := range entry.Values {
7315 if w == v {
7316 break
7317 }
7318 switch w.Op {
7319 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7320
7321 default:
7322 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7323 }
7324 }
7325 }
7326
7327
7328 func CheckArgReg(v *ssa.Value) {
7329 entry := v.Block.Func.Entry
7330 if entry != v.Block {
7331 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7332 }
7333 }
7334
7335 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7336 n, off := ssa.AutoVar(v)
7337 a.Type = obj.TYPE_MEM
7338 a.Sym = n.Linksym()
7339 a.Reg = int16(Arch.REGSP)
7340 a.Offset = n.FrameOffset() + off
7341 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7342 a.Name = obj.NAME_PARAM
7343 } else {
7344 a.Name = obj.NAME_AUTO
7345 }
7346 }
7347
7348
7349
7350 func (s *State) Call(v *ssa.Value) *obj.Prog {
7351 pPosIsStmt := s.pp.Pos.IsStmt()
7352 s.PrepareCall(v)
7353
7354 p := s.Prog(obj.ACALL)
7355 if pPosIsStmt == src.PosIsStmt {
7356 p.Pos = v.Pos.WithIsStmt()
7357 } else {
7358 p.Pos = v.Pos.WithNotStmt()
7359 }
7360 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7361 p.To.Type = obj.TYPE_MEM
7362 p.To.Name = obj.NAME_EXTERN
7363 p.To.Sym = sym.Fn
7364 } else {
7365
7366 switch Arch.LinkArch.Family {
7367 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7368 p.To.Type = obj.TYPE_REG
7369 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7370 p.To.Type = obj.TYPE_MEM
7371 default:
7372 base.Fatalf("unknown indirect call family")
7373 }
7374 p.To.Reg = v.Args[0].Reg()
7375 }
7376 return p
7377 }
7378
7379
7380
7381 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7382 p := s.Call(v)
7383 p.As = obj.ARET
7384 return p
7385 }
7386
7387
7388
7389
7390 func (s *State) PrepareCall(v *ssa.Value) {
7391 idx := s.livenessMap.Get(v)
7392 if !idx.StackMapValid() {
7393
7394 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7395 base.Fatalf("missing stack map index for %v", v.LongString())
7396 }
7397 }
7398
7399 call, ok := v.Aux.(*ssa.AuxCall)
7400
7401 if ok {
7402
7403
7404 if nowritebarrierrecCheck != nil {
7405 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7406 }
7407 }
7408
7409 if s.maxarg < v.AuxInt {
7410 s.maxarg = v.AuxInt
7411 }
7412 }
7413
7414
7415
7416 func (s *State) UseArgs(n int64) {
7417 if s.maxarg < n {
7418 s.maxarg = n
7419 }
7420 }
7421
7422
7423 func fieldIdx(n *ir.SelectorExpr) int {
7424 t := n.X.Type()
7425 if !t.IsStruct() {
7426 panic("ODOT's LHS is not a struct")
7427 }
7428
7429 for i, f := range t.Fields() {
7430 if f.Sym == n.Sel {
7431 if f.Offset != n.Offset() {
7432 panic("field offset doesn't match")
7433 }
7434 return i
7435 }
7436 }
7437 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7438
7439
7440
7441 }
7442
7443
7444
7445 type ssafn struct {
7446 curfn *ir.Func
7447 strings map[string]*obj.LSym
7448 stksize int64
7449 stkptrsize int64
7450
7451
7452
7453
7454
7455 stkalign int64
7456
7457 log bool
7458 }
7459
7460
7461
7462 func (e *ssafn) StringData(s string) *obj.LSym {
7463 if aux, ok := e.strings[s]; ok {
7464 return aux
7465 }
7466 if e.strings == nil {
7467 e.strings = make(map[string]*obj.LSym)
7468 }
7469 data := staticdata.StringSym(e.curfn.Pos(), s)
7470 e.strings[s] = data
7471 return data
7472 }
7473
7474
7475 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7476 node := parent.N
7477
7478 if node.Class != ir.PAUTO || node.Addrtaken() {
7479
7480 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7481 }
7482
7483 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7484 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7485 n.SetUsed(true)
7486 n.SetEsc(ir.EscNever)
7487 types.CalcSize(t)
7488 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7489 }
7490
7491
7492 func (e *ssafn) Logf(msg string, args ...interface{}) {
7493 if e.log {
7494 fmt.Printf(msg, args...)
7495 }
7496 }
7497
7498 func (e *ssafn) Log() bool {
7499 return e.log
7500 }
7501
7502
7503 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
7504 base.Pos = pos
7505 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
7506 base.Fatalf("'%s': "+msg, nargs...)
7507 }
7508
7509
7510
7511 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
7512 base.WarnfAt(pos, fmt_, args...)
7513 }
7514
7515 func (e *ssafn) Debug_checknil() bool {
7516 return base.Debug.Nil != 0
7517 }
7518
7519 func (e *ssafn) UseWriteBarrier() bool {
7520 return base.Flag.WB
7521 }
7522
7523 func (e *ssafn) Syslook(name string) *obj.LSym {
7524 switch name {
7525 case "goschedguarded":
7526 return ir.Syms.Goschedguarded
7527 case "writeBarrier":
7528 return ir.Syms.WriteBarrier
7529 case "wbZero":
7530 return ir.Syms.WBZero
7531 case "wbMove":
7532 return ir.Syms.WBMove
7533 case "cgoCheckMemmove":
7534 return ir.Syms.CgoCheckMemmove
7535 case "cgoCheckPtrWrite":
7536 return ir.Syms.CgoCheckPtrWrite
7537 }
7538 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
7539 return nil
7540 }
7541
7542 func (e *ssafn) Func() *ir.Func {
7543 return e.curfn
7544 }
7545
7546 func clobberBase(n ir.Node) ir.Node {
7547 if n.Op() == ir.ODOT {
7548 n := n.(*ir.SelectorExpr)
7549 if n.X.Type().NumFields() == 1 {
7550 return clobberBase(n.X)
7551 }
7552 }
7553 if n.Op() == ir.OINDEX {
7554 n := n.(*ir.IndexExpr)
7555 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
7556 return clobberBase(n.X)
7557 }
7558 }
7559 return n
7560 }
7561
7562
7563 func callTargetLSym(callee *ir.Name) *obj.LSym {
7564 if callee.Func == nil {
7565
7566
7567
7568 return callee.Linksym()
7569 }
7570
7571 return callee.LinksymABI(callee.Func.ABI)
7572 }
7573
7574
7575 const deferStructFnField = 4
7576
7577 var deferType *types.Type
7578
7579
7580
7581 func deferstruct() *types.Type {
7582 if deferType != nil {
7583 return deferType
7584 }
7585
7586 makefield := func(name string, t *types.Type) *types.Field {
7587 sym := (*types.Pkg)(nil).Lookup(name)
7588 return types.NewField(src.NoXPos, sym, t)
7589 }
7590
7591 fields := []*types.Field{
7592 makefield("heap", types.Types[types.TBOOL]),
7593 makefield("rangefunc", types.Types[types.TBOOL]),
7594 makefield("sp", types.Types[types.TUINTPTR]),
7595 makefield("pc", types.Types[types.TUINTPTR]),
7596
7597
7598
7599 makefield("fn", types.Types[types.TUINTPTR]),
7600 makefield("link", types.Types[types.TUINTPTR]),
7601 makefield("head", types.Types[types.TUINTPTR]),
7602 }
7603 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
7604 base.Fatalf("deferStructFnField is %q, not fn", name)
7605 }
7606
7607 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
7608 typ := types.NewNamed(n)
7609 n.SetType(typ)
7610 n.SetTypecheck(1)
7611
7612
7613 typ.SetUnderlying(types.NewStruct(fields))
7614 types.CalcStructSize(typ)
7615
7616 deferType = typ
7617 return typ
7618 }
7619
7620
7621
7622
7623
7624 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
7625 return obj.Addr{
7626 Name: obj.NAME_NONE,
7627 Type: obj.TYPE_MEM,
7628 Reg: baseReg,
7629 Offset: spill.Offset + extraOffset,
7630 }
7631 }
7632
7633 var (
7634 BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
7635 ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
7636 )
7637
View as plain text