1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "fmt"
11 "go/constant"
12 "html"
13 "internal/buildcfg"
14 "os"
15 "path/filepath"
16 "slices"
17 "strings"
18
19 "cmd/compile/internal/abi"
20 "cmd/compile/internal/base"
21 "cmd/compile/internal/ir"
22 "cmd/compile/internal/liveness"
23 "cmd/compile/internal/objw"
24 "cmd/compile/internal/reflectdata"
25 "cmd/compile/internal/rttype"
26 "cmd/compile/internal/ssa"
27 "cmd/compile/internal/staticdata"
28 "cmd/compile/internal/typecheck"
29 "cmd/compile/internal/types"
30 "cmd/internal/obj"
31 "cmd/internal/objabi"
32 "cmd/internal/src"
33 "cmd/internal/sys"
34
35 rtabi "internal/abi"
36 )
37
38 var ssaConfig *ssa.Config
39 var ssaCaches []ssa.Cache
40
41 var ssaDump string
42 var ssaDir string
43 var ssaDumpStdout bool
44 var ssaDumpCFG string
45 const ssaDumpFile = "ssa.html"
46
47
48 var ssaDumpInlined []*ir.Func
49
50 func DumpInline(fn *ir.Func) {
51 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
52 ssaDumpInlined = append(ssaDumpInlined, fn)
53 }
54 }
55
56 func InitEnv() {
57 ssaDump = os.Getenv("GOSSAFUNC")
58 ssaDir = os.Getenv("GOSSADIR")
59 if ssaDump != "" {
60 if strings.HasSuffix(ssaDump, "+") {
61 ssaDump = ssaDump[:len(ssaDump)-1]
62 ssaDumpStdout = true
63 }
64 spl := strings.Split(ssaDump, ":")
65 if len(spl) > 1 {
66 ssaDump = spl[0]
67 ssaDumpCFG = spl[1]
68 }
69 }
70 }
71
72 func InitConfig() {
73 types_ := ssa.NewTypes()
74
75 if Arch.SoftFloat {
76 softfloatInit()
77 }
78
79
80
81 _ = types.NewPtr(types.Types[types.TINTER])
82 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
83 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
84 _ = types.NewPtr(types.NewPtr(types.ByteType))
85 _ = types.NewPtr(types.NewSlice(types.ByteType))
86 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
87 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
88 _ = types.NewPtr(types.Types[types.TINT16])
89 _ = types.NewPtr(types.Types[types.TINT64])
90 _ = types.NewPtr(types.ErrorType)
91 if buildcfg.Experiment.SwissMap {
92 _ = types.NewPtr(reflectdata.SwissMapType())
93 } else {
94 _ = types.NewPtr(reflectdata.OldMapType())
95 }
96 _ = types.NewPtr(deferstruct())
97 types.NewPtrCacheEnabled = false
98 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
99 ssaConfig.Race = base.Flag.Race
100 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
101
102
103 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
104 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
105 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
106 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
107 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
108 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
109 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
110 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
111 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
112 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
113 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
114 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
115 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
116 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
117 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
118 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
119 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
120 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
121 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
122 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
123 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
124 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
125 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
126 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
127 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
128 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
129 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
130 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
131 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
132 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
133 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
134 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
135 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
136 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
137 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
138 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
139 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
140 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
141 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
142 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
143 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
144 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
145 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
146 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
147 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
148 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
149 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
150 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
151 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
152 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
153 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
154 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
155 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
156 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
157 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
158 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
159 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
160 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
161
162 if Arch.LinkArch.Family == sys.Wasm {
163 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
164 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
165 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
166 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
167 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
168 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
169 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
170 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
171 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
172 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
173 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
174 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
175 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
176 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
177 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
178 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
179 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
180 } else {
181 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
182 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
183 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
184 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
185 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
186 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
187 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
188 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
189 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
190 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
191 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
192 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
193 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
194 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
195 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
196 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
197 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("panicSliceConvert")
198 }
199 if Arch.LinkArch.PtrSize == 4 {
200 ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
201 ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
202 ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
203 ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
204 ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
205 ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
206 ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
207 ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
208 ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
209 ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
210 ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
211 ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
212 ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
213 ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
214 ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
215 ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
216 }
217
218
219 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
220 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
221 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
222 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
223 }
224
225 func InitTables() {
226 initIntrinsics(nil)
227 }
228
229
230
231
232
233
234
235
236 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
237 return ssaConfig.ABI0.Copy()
238 }
239
240
241
242 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
243 if buildcfg.Experiment.RegabiArgs {
244
245 if fn == nil {
246 return abi1
247 }
248 switch fn.ABI {
249 case obj.ABI0:
250 return abi0
251 case obj.ABIInternal:
252
253
254 return abi1
255 }
256 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
257 panic("not reachable")
258 }
259
260 a := abi0
261 if fn != nil {
262 if fn.Pragma&ir.RegisterParams != 0 {
263 a = abi1
264 }
265 }
266 return a
267 }
268
269
270
271
272
273
274
275
276
277
278
279
280 func (s *state) emitOpenDeferInfo() {
281 firstOffset := s.openDefers[0].closureNode.FrameOffset()
282
283
284 for i, r := range s.openDefers {
285 have := r.closureNode.FrameOffset()
286 want := firstOffset + int64(i)*int64(types.PtrSize)
287 if have != want {
288 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
289 }
290 }
291
292 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
293 x.Set(obj.AttrContentAddressable, true)
294 s.curfn.LSym.Func().OpenCodedDeferInfo = x
295
296 off := 0
297 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
298 off = objw.Uvarint(x, off, uint64(-firstOffset))
299 }
300
301
302
303 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
304 name := ir.FuncName(fn)
305
306 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
307
308 printssa := false
309
310
311 if strings.Contains(ssaDump, name) {
312 nameOptABI := name
313 if strings.Contains(ssaDump, ",") {
314 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
315 } else if strings.HasSuffix(ssaDump, ">") {
316 l := len(ssaDump)
317 if l >= 3 && ssaDump[l-3] == '<' {
318 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
319 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
320 }
321 }
322 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
323 printssa = nameOptABI == ssaDump ||
324 pkgDotName == ssaDump ||
325 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
326 }
327
328 var astBuf *bytes.Buffer
329 if printssa {
330 astBuf = &bytes.Buffer{}
331 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
332 if ssaDumpStdout {
333 fmt.Println("generating SSA for", name)
334 fmt.Print(astBuf.String())
335 }
336 }
337
338 var s state
339 s.pushLine(fn.Pos())
340 defer s.popLine()
341
342 s.hasdefer = fn.HasDefer()
343 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
344 s.cgoUnsafeArgs = true
345 }
346 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
347
348 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
349 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
350 s.instrumentMemory = true
351 }
352 if base.Flag.Race {
353 s.instrumentEnterExit = true
354 }
355 }
356
357 fe := ssafn{
358 curfn: fn,
359 log: printssa && ssaDumpStdout,
360 }
361 s.curfn = fn
362
363 cache := &ssaCaches[worker]
364 cache.Reset()
365
366 s.f = ssaConfig.NewFunc(&fe, cache)
367 s.config = ssaConfig
368 s.f.Type = fn.Type()
369 s.f.Name = name
370 s.f.PrintOrHtmlSSA = printssa
371 if fn.Pragma&ir.Nosplit != 0 {
372 s.f.NoSplit = true
373 }
374 s.f.ABI0 = ssaConfig.ABI0
375 s.f.ABI1 = ssaConfig.ABI1
376 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
377 s.f.ABISelf = abiSelf
378
379 s.panics = map[funcLine]*ssa.Block{}
380 s.softFloat = s.config.SoftFloat
381
382
383 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
384 s.f.Entry.Pos = fn.Pos()
385 s.f.IsPgoHot = isPgoHot
386
387 if printssa {
388 ssaDF := ssaDumpFile
389 if ssaDir != "" {
390 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
391 ssaD := filepath.Dir(ssaDF)
392 os.MkdirAll(ssaD, 0755)
393 }
394 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
395
396 dumpSourcesColumn(s.f.HTMLWriter, fn)
397 s.f.HTMLWriter.WriteAST("AST", astBuf)
398 }
399
400
401 s.labels = map[string]*ssaLabel{}
402 s.fwdVars = map[ir.Node]*ssa.Value{}
403 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
404
405 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
406 switch {
407 case base.Debug.NoOpenDefer != 0:
408 s.hasOpenDefers = false
409 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
410
411
412
413 s.hasOpenDefers = false
414 }
415 if s.hasOpenDefers && s.instrumentEnterExit {
416
417
418
419 s.hasOpenDefers = false
420 }
421 if s.hasOpenDefers {
422
423
424 for _, f := range s.curfn.Type().Results() {
425 if !f.Nname.(*ir.Name).OnStack() {
426 s.hasOpenDefers = false
427 break
428 }
429 }
430 }
431 if s.hasOpenDefers &&
432 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
433
434
435
436
437
438 s.hasOpenDefers = false
439 }
440
441 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
442 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
443
444 s.startBlock(s.f.Entry)
445 s.vars[memVar] = s.startmem
446 if s.hasOpenDefers {
447
448
449
450 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
451 deferBitsTemp.SetAddrtaken(true)
452 s.deferBitsTemp = deferBitsTemp
453
454 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
455 s.vars[deferBitsVar] = startDeferBits
456 s.deferBitsAddr = s.addr(deferBitsTemp)
457 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
458
459
460
461
462
463 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
464 }
465
466 var params *abi.ABIParamResultInfo
467 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
468
469
470
471
472
473
474 var debugInfo ssa.FuncDebug
475 for _, n := range fn.Dcl {
476 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
477 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
478 }
479 }
480 fn.DebugInfo = &debugInfo
481
482
483 s.decladdrs = map[*ir.Name]*ssa.Value{}
484 for _, n := range fn.Dcl {
485 switch n.Class {
486 case ir.PPARAM:
487
488 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
489 case ir.PPARAMOUT:
490 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
491 case ir.PAUTO:
492
493
494 default:
495 s.Fatalf("local variable with class %v unimplemented", n.Class)
496 }
497 }
498
499 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
500
501
502 for _, n := range fn.Dcl {
503 if n.Class == ir.PPARAM {
504 if s.canSSA(n) {
505 v := s.newValue0A(ssa.OpArg, n.Type(), n)
506 s.vars[n] = v
507 s.addNamedValue(n, v)
508 } else {
509 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
510 if len(paramAssignment.Registers) > 0 {
511 if ssa.CanSSA(n.Type()) {
512 v := s.newValue0A(ssa.OpArg, n.Type(), n)
513 s.store(n.Type(), s.decladdrs[n], v)
514 } else {
515
516
517 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
518 }
519 }
520 }
521 }
522 }
523
524
525 if fn.Needctxt() {
526 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
527 if fn.RangeParent != nil && base.Flag.N != 0 {
528
529
530
531 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
532 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
533 cloSlot.SetUsed(true)
534 cloSlot.SetEsc(ir.EscNever)
535 cloSlot.SetAddrtaken(true)
536 s.f.CloSlot = cloSlot
537 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
538 addr := s.addr(cloSlot)
539 s.store(s.f.Config.Types.BytePtr, addr, clo)
540
541 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
542 }
543 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
544 for {
545 n, typ, offset := csiter.Next()
546 if n == nil {
547 break
548 }
549
550 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
551
552
553
554
555
556
557
558
559
560 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
561 n.Class = ir.PAUTO
562 fn.Dcl = append(fn.Dcl, n)
563 s.assign(n, s.load(n.Type(), ptr), false, 0)
564 continue
565 }
566
567 if !n.Byval() {
568 ptr = s.load(typ, ptr)
569 }
570 s.setHeapaddr(fn.Pos(), n, ptr)
571 }
572 }
573
574
575 if s.instrumentEnterExit {
576 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
577 }
578 s.zeroResults()
579 s.paramsToHeap()
580 s.stmtList(fn.Body)
581
582
583 if s.curBlock != nil {
584 s.pushLine(fn.Endlineno)
585 s.exit()
586 s.popLine()
587 }
588
589 for _, b := range s.f.Blocks {
590 if b.Pos != src.NoXPos {
591 s.updateUnsetPredPos(b)
592 }
593 }
594
595 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
596
597 s.insertPhis()
598
599
600 ssa.Compile(s.f)
601
602 fe.AllocFrame(s.f)
603
604 if len(s.openDefers) != 0 {
605 s.emitOpenDeferInfo()
606 }
607
608
609
610
611
612
613 for _, p := range params.InParams() {
614 typs, offs := p.RegisterTypesAndOffsets()
615 for i, t := range typs {
616 o := offs[i]
617 fo := p.FrameOffset(params)
618 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
619 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
620 }
621 }
622
623 return s.f
624 }
625
626 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
627 typs, offs := paramAssignment.RegisterTypesAndOffsets()
628 for i, t := range typs {
629 if pointersOnly && !t.IsPtrShaped() {
630 continue
631 }
632 r := paramAssignment.Registers[i]
633 o := offs[i]
634 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
635 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
636 v := s.newValue0I(op, t, reg)
637 v.Aux = aux
638 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
639 s.store(t, p, v)
640 }
641 }
642
643
644
645
646
647
648
649 func (s *state) zeroResults() {
650 for _, f := range s.curfn.Type().Results() {
651 n := f.Nname.(*ir.Name)
652 if !n.OnStack() {
653
654
655
656 continue
657 }
658
659 if typ := n.Type(); ssa.CanSSA(typ) {
660 s.assign(n, s.zeroVal(typ), false, 0)
661 } else {
662 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
663 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
664 }
665 s.zero(n.Type(), s.decladdrs[n])
666 }
667 }
668 }
669
670
671
672 func (s *state) paramsToHeap() {
673 do := func(params []*types.Field) {
674 for _, f := range params {
675 if f.Nname == nil {
676 continue
677 }
678 n := f.Nname.(*ir.Name)
679 if ir.IsBlank(n) || n.OnStack() {
680 continue
681 }
682 s.newHeapaddr(n)
683 if n.Class == ir.PPARAM {
684 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
685 }
686 }
687 }
688
689 typ := s.curfn.Type()
690 do(typ.Recvs())
691 do(typ.Params())
692 do(typ.Results())
693 }
694
695
696 func (s *state) newHeapaddr(n *ir.Name) {
697 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type(), nil))
698 }
699
700
701
702 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
703 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
704 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
705 }
706
707
708 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
709 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
710 addr.SetUsed(true)
711 types.CalcSize(addr.Type())
712
713 if n.Class == ir.PPARAMOUT {
714 addr.SetIsOutputParamHeapAddr(true)
715 }
716
717 n.Heapaddr = addr
718 s.assign(addr, ptr, false, 0)
719 }
720
721
722 func (s *state) newObject(typ *types.Type, rtype *ssa.Value) *ssa.Value {
723 if typ.Size() == 0 {
724 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
725 }
726 if rtype == nil {
727 rtype = s.reflectType(typ)
728 }
729 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
730 }
731
732 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
733 if !n.Type().IsPtr() {
734 s.Fatalf("expected pointer type: %v", n.Type())
735 }
736 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
737 if count != nil {
738 if !elem.IsArray() {
739 s.Fatalf("expected array type: %v", elem)
740 }
741 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
742 }
743 size := elem.Size()
744
745 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
746 return
747 }
748 if count == nil {
749 count = s.constInt(types.Types[types.TUINTPTR], 1)
750 }
751 if count.Type.Size() != s.config.PtrSize {
752 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
753 }
754 var rtype *ssa.Value
755 if rtypeExpr != nil {
756 rtype = s.expr(rtypeExpr)
757 } else {
758 rtype = s.reflectType(elem)
759 }
760 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
761 }
762
763
764
765 func (s *state) reflectType(typ *types.Type) *ssa.Value {
766
767
768 lsym := reflectdata.TypeLinksym(typ)
769 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
770 }
771
772 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
773
774 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
775 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
776 if err != nil {
777 writer.Logf("cannot read sources for function %v: %v", fn, err)
778 }
779
780
781 var inlFns []*ssa.FuncLines
782 for _, fi := range ssaDumpInlined {
783 elno := fi.Endlineno
784 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
785 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
786 if err != nil {
787 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
788 continue
789 }
790 inlFns = append(inlFns, fnLines)
791 }
792
793 slices.SortFunc(inlFns, ssa.ByTopoCmp)
794 if targetFn != nil {
795 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
796 }
797
798 writer.WriteSources("sources", inlFns)
799 }
800
801 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
802 f, err := os.Open(os.ExpandEnv(file))
803 if err != nil {
804 return nil, err
805 }
806 defer f.Close()
807 var lines []string
808 ln := uint(1)
809 scanner := bufio.NewScanner(f)
810 for scanner.Scan() && ln <= end {
811 if ln >= start {
812 lines = append(lines, scanner.Text())
813 }
814 ln++
815 }
816 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
817 }
818
819
820
821
822 func (s *state) updateUnsetPredPos(b *ssa.Block) {
823 if b.Pos == src.NoXPos {
824 s.Fatalf("Block %s should have a position", b)
825 }
826 bestPos := src.NoXPos
827 for _, e := range b.Preds {
828 p := e.Block()
829 if !p.LackingPos() {
830 continue
831 }
832 if bestPos == src.NoXPos {
833 bestPos = b.Pos
834 for _, v := range b.Values {
835 if v.LackingPos() {
836 continue
837 }
838 if v.Pos != src.NoXPos {
839
840
841 bestPos = v.Pos
842 break
843 }
844 }
845 }
846 p.Pos = bestPos
847 s.updateUnsetPredPos(p)
848 }
849 }
850
851
852 type openDeferInfo struct {
853
854 n *ir.CallExpr
855
856
857 closure *ssa.Value
858
859
860
861 closureNode *ir.Name
862 }
863
864 type state struct {
865
866 config *ssa.Config
867
868
869 f *ssa.Func
870
871
872 curfn *ir.Func
873
874
875 labels map[string]*ssaLabel
876
877
878 breakTo *ssa.Block
879 continueTo *ssa.Block
880
881
882 curBlock *ssa.Block
883
884
885
886
887 vars map[ir.Node]*ssa.Value
888
889
890
891
892 fwdVars map[ir.Node]*ssa.Value
893
894
895 defvars []map[ir.Node]*ssa.Value
896
897
898 decladdrs map[*ir.Name]*ssa.Value
899
900
901 startmem *ssa.Value
902 sp *ssa.Value
903 sb *ssa.Value
904
905 deferBitsAddr *ssa.Value
906 deferBitsTemp *ir.Name
907
908
909 line []src.XPos
910
911 lastPos src.XPos
912
913
914
915 panics map[funcLine]*ssa.Block
916
917 cgoUnsafeArgs bool
918 hasdefer bool
919 softFloat bool
920 hasOpenDefers bool
921 checkPtrEnabled bool
922 instrumentEnterExit bool
923 instrumentMemory bool
924
925
926
927
928 openDefers []*openDeferInfo
929
930
931
932
933 lastDeferExit *ssa.Block
934 lastDeferFinalBlock *ssa.Block
935 lastDeferCount int
936
937 prevCall *ssa.Value
938 }
939
940 type funcLine struct {
941 f *obj.LSym
942 base *src.PosBase
943 line uint
944 }
945
946 type ssaLabel struct {
947 target *ssa.Block
948 breakTarget *ssa.Block
949 continueTarget *ssa.Block
950 }
951
952
953 func (s *state) label(sym *types.Sym) *ssaLabel {
954 lab := s.labels[sym.Name]
955 if lab == nil {
956 lab = new(ssaLabel)
957 s.labels[sym.Name] = lab
958 }
959 return lab
960 }
961
962 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
963 func (s *state) Log() bool { return s.f.Log() }
964 func (s *state) Fatalf(msg string, args ...interface{}) {
965 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
966 }
967 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
968 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
969
970 func ssaMarker(name string) *ir.Name {
971 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
972 }
973
974 var (
975
976 memVar = ssaMarker("mem")
977
978
979 ptrVar = ssaMarker("ptr")
980 lenVar = ssaMarker("len")
981 capVar = ssaMarker("cap")
982 typVar = ssaMarker("typ")
983 okVar = ssaMarker("ok")
984 deferBitsVar = ssaMarker("deferBits")
985 hashVar = ssaMarker("hash")
986 )
987
988
989 func (s *state) startBlock(b *ssa.Block) {
990 if s.curBlock != nil {
991 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
992 }
993 s.curBlock = b
994 s.vars = map[ir.Node]*ssa.Value{}
995 clear(s.fwdVars)
996 }
997
998
999
1000
1001 func (s *state) endBlock() *ssa.Block {
1002 b := s.curBlock
1003 if b == nil {
1004 return nil
1005 }
1006 for len(s.defvars) <= int(b.ID) {
1007 s.defvars = append(s.defvars, nil)
1008 }
1009 s.defvars[b.ID] = s.vars
1010 s.curBlock = nil
1011 s.vars = nil
1012 if b.LackingPos() {
1013
1014
1015
1016 b.Pos = src.NoXPos
1017 } else {
1018 b.Pos = s.lastPos
1019 }
1020 return b
1021 }
1022
1023
1024 func (s *state) pushLine(line src.XPos) {
1025 if !line.IsKnown() {
1026
1027
1028 line = s.peekPos()
1029 if base.Flag.K != 0 {
1030 base.Warn("buildssa: unknown position (line 0)")
1031 }
1032 } else {
1033 s.lastPos = line
1034 }
1035
1036 s.line = append(s.line, line)
1037 }
1038
1039
1040 func (s *state) popLine() {
1041 s.line = s.line[:len(s.line)-1]
1042 }
1043
1044
1045 func (s *state) peekPos() src.XPos {
1046 return s.line[len(s.line)-1]
1047 }
1048
1049
1050 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1051 return s.curBlock.NewValue0(s.peekPos(), op, t)
1052 }
1053
1054
1055 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1056 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1057 }
1058
1059
1060 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1061 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1062 }
1063
1064
1065 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1066 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1067 }
1068
1069
1070 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1071 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1072 }
1073
1074
1075
1076
1077 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1078 if isStmt {
1079 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1080 }
1081 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1082 }
1083
1084
1085 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1086 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1087 }
1088
1089
1090 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1091 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1092 }
1093
1094
1095 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1096 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1097 }
1098
1099
1100
1101
1102 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1103 if isStmt {
1104 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1105 }
1106 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1107 }
1108
1109
1110 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1111 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1112 }
1113
1114
1115 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1116 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1117 }
1118
1119
1120 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1121 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1122 }
1123
1124
1125 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1126 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1127 }
1128
1129
1130
1131
1132 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1133 if isStmt {
1134 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1135 }
1136 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1137 }
1138
1139
1140 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1141 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1142 }
1143
1144
1145 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1146 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1147 }
1148
1149 func (s *state) entryBlock() *ssa.Block {
1150 b := s.f.Entry
1151 if base.Flag.N > 0 && s.curBlock != nil {
1152
1153
1154
1155
1156 b = s.curBlock
1157 }
1158 return b
1159 }
1160
1161
1162 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1163 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1164 }
1165
1166
1167 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1168 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1169 }
1170
1171
1172 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1173 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1174 }
1175
1176
1177 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1178 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1179 }
1180
1181
1182 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1183 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1184 }
1185
1186
1187 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1188 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1189 }
1190
1191
1192 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1193 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1194 }
1195
1196
1197 func (s *state) constSlice(t *types.Type) *ssa.Value {
1198 return s.f.ConstSlice(t)
1199 }
1200 func (s *state) constInterface(t *types.Type) *ssa.Value {
1201 return s.f.ConstInterface(t)
1202 }
1203 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1204 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1205 return s.f.ConstEmptyString(t)
1206 }
1207 func (s *state) constBool(c bool) *ssa.Value {
1208 return s.f.ConstBool(types.Types[types.TBOOL], c)
1209 }
1210 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1211 return s.f.ConstInt8(t, c)
1212 }
1213 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1214 return s.f.ConstInt16(t, c)
1215 }
1216 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1217 return s.f.ConstInt32(t, c)
1218 }
1219 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1220 return s.f.ConstInt64(t, c)
1221 }
1222 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1223 return s.f.ConstFloat32(t, c)
1224 }
1225 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1226 return s.f.ConstFloat64(t, c)
1227 }
1228 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1229 if s.config.PtrSize == 8 {
1230 return s.constInt64(t, c)
1231 }
1232 if int64(int32(c)) != c {
1233 s.Fatalf("integer constant too big %d", c)
1234 }
1235 return s.constInt32(t, int32(c))
1236 }
1237 func (s *state) constOffPtrSP(t *types.Type, c int64) *ssa.Value {
1238 return s.f.ConstOffPtrSP(t, c, s.sp)
1239 }
1240
1241
1242
1243 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1244 if s.softFloat {
1245 if c, ok := s.sfcall(op, arg); ok {
1246 return c
1247 }
1248 }
1249 return s.newValue1(op, t, arg)
1250 }
1251 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1252 if s.softFloat {
1253 if c, ok := s.sfcall(op, arg0, arg1); ok {
1254 return c
1255 }
1256 }
1257 return s.newValue2(op, t, arg0, arg1)
1258 }
1259
1260 type instrumentKind uint8
1261
1262 const (
1263 instrumentRead = iota
1264 instrumentWrite
1265 instrumentMove
1266 )
1267
1268 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1269 s.instrument2(t, addr, nil, kind)
1270 }
1271
1272
1273
1274
1275 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1276 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1277 s.instrument(t, addr, kind)
1278 return
1279 }
1280 for _, f := range t.Fields() {
1281 if f.Sym.IsBlank() {
1282 continue
1283 }
1284 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1285 s.instrumentFields(f.Type, offptr, kind)
1286 }
1287 }
1288
1289 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1290 if base.Flag.MSan {
1291 s.instrument2(t, dst, src, instrumentMove)
1292 } else {
1293 s.instrument(t, src, instrumentRead)
1294 s.instrument(t, dst, instrumentWrite)
1295 }
1296 }
1297
1298 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1299 if !s.instrumentMemory {
1300 return
1301 }
1302
1303 w := t.Size()
1304 if w == 0 {
1305 return
1306 }
1307
1308 if ssa.IsSanitizerSafeAddr(addr) {
1309 return
1310 }
1311
1312 var fn *obj.LSym
1313 needWidth := false
1314
1315 if addr2 != nil && kind != instrumentMove {
1316 panic("instrument2: non-nil addr2 for non-move instrumentation")
1317 }
1318
1319 if base.Flag.MSan {
1320 switch kind {
1321 case instrumentRead:
1322 fn = ir.Syms.Msanread
1323 case instrumentWrite:
1324 fn = ir.Syms.Msanwrite
1325 case instrumentMove:
1326 fn = ir.Syms.Msanmove
1327 default:
1328 panic("unreachable")
1329 }
1330 needWidth = true
1331 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1332
1333
1334
1335 switch kind {
1336 case instrumentRead:
1337 fn = ir.Syms.Racereadrange
1338 case instrumentWrite:
1339 fn = ir.Syms.Racewriterange
1340 default:
1341 panic("unreachable")
1342 }
1343 needWidth = true
1344 } else if base.Flag.Race {
1345
1346
1347 switch kind {
1348 case instrumentRead:
1349 fn = ir.Syms.Raceread
1350 case instrumentWrite:
1351 fn = ir.Syms.Racewrite
1352 default:
1353 panic("unreachable")
1354 }
1355 } else if base.Flag.ASan {
1356 switch kind {
1357 case instrumentRead:
1358 fn = ir.Syms.Asanread
1359 case instrumentWrite:
1360 fn = ir.Syms.Asanwrite
1361 default:
1362 panic("unreachable")
1363 }
1364 needWidth = true
1365 } else {
1366 panic("unreachable")
1367 }
1368
1369 args := []*ssa.Value{addr}
1370 if addr2 != nil {
1371 args = append(args, addr2)
1372 }
1373 if needWidth {
1374 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1375 }
1376 s.rtcall(fn, true, nil, args...)
1377 }
1378
1379 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1380 s.instrumentFields(t, src, instrumentRead)
1381 return s.rawLoad(t, src)
1382 }
1383
1384 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1385 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1386 }
1387
1388 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1389 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1390 }
1391
1392 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1393 s.instrument(t, dst, instrumentWrite)
1394 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1395 store.Aux = t
1396 s.vars[memVar] = store
1397 }
1398
1399 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1400 s.moveWhichMayOverlap(t, dst, src, false)
1401 }
1402 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1403 s.instrumentMove(t, dst, src)
1404 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428 if t.HasPointers() {
1429 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1430
1431
1432
1433
1434 s.curfn.SetWBPos(s.peekPos())
1435 } else {
1436 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1437 }
1438 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1439 return
1440 }
1441 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1442 store.Aux = t
1443 s.vars[memVar] = store
1444 }
1445
1446
1447 func (s *state) stmtList(l ir.Nodes) {
1448 for _, n := range l {
1449 s.stmt(n)
1450 }
1451 }
1452
1453
1454 func (s *state) stmt(n ir.Node) {
1455 s.pushLine(n.Pos())
1456 defer s.popLine()
1457
1458
1459
1460 if s.curBlock == nil && n.Op() != ir.OLABEL {
1461 return
1462 }
1463
1464 s.stmtList(n.Init())
1465 switch n.Op() {
1466
1467 case ir.OBLOCK:
1468 n := n.(*ir.BlockStmt)
1469 s.stmtList(n.List)
1470
1471 case ir.OFALL:
1472
1473
1474 case ir.OCALLFUNC:
1475 n := n.(*ir.CallExpr)
1476 if ir.IsIntrinsicCall(n) {
1477 s.intrinsicCall(n)
1478 return
1479 }
1480 fallthrough
1481
1482 case ir.OCALLINTER:
1483 n := n.(*ir.CallExpr)
1484 s.callResult(n, callNormal)
1485 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1486 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1487 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1488 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1489 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1490 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1491 fn == "panicrangestate") {
1492 m := s.mem()
1493 b := s.endBlock()
1494 b.Kind = ssa.BlockExit
1495 b.SetControl(m)
1496
1497
1498
1499 }
1500 }
1501 case ir.ODEFER:
1502 n := n.(*ir.GoDeferStmt)
1503 if base.Debug.Defer > 0 {
1504 var defertype string
1505 if s.hasOpenDefers {
1506 defertype = "open-coded"
1507 } else if n.Esc() == ir.EscNever {
1508 defertype = "stack-allocated"
1509 } else {
1510 defertype = "heap-allocated"
1511 }
1512 base.WarnfAt(n.Pos(), "%s defer", defertype)
1513 }
1514 if s.hasOpenDefers {
1515 s.openDeferRecord(n.Call.(*ir.CallExpr))
1516 } else {
1517 d := callDefer
1518 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1519 d = callDeferStack
1520 }
1521 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1522 }
1523 case ir.OGO:
1524 n := n.(*ir.GoDeferStmt)
1525 s.callResult(n.Call.(*ir.CallExpr), callGo)
1526
1527 case ir.OAS2DOTTYPE:
1528 n := n.(*ir.AssignListStmt)
1529 var res, resok *ssa.Value
1530 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1531 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1532 } else {
1533 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1534 }
1535 deref := false
1536 if !ssa.CanSSA(n.Rhs[0].Type()) {
1537 if res.Op != ssa.OpLoad {
1538 s.Fatalf("dottype of non-load")
1539 }
1540 mem := s.mem()
1541 if res.Args[1] != mem {
1542 s.Fatalf("memory no longer live from 2-result dottype load")
1543 }
1544 deref = true
1545 res = res.Args[0]
1546 }
1547 s.assign(n.Lhs[0], res, deref, 0)
1548 s.assign(n.Lhs[1], resok, false, 0)
1549 return
1550
1551 case ir.OAS2FUNC:
1552
1553 n := n.(*ir.AssignListStmt)
1554 call := n.Rhs[0].(*ir.CallExpr)
1555 if !ir.IsIntrinsicCall(call) {
1556 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1557 }
1558 v := s.intrinsicCall(call)
1559 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1560 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1561 s.assign(n.Lhs[0], v1, false, 0)
1562 s.assign(n.Lhs[1], v2, false, 0)
1563 return
1564
1565 case ir.ODCL:
1566 n := n.(*ir.Decl)
1567 if v := n.X; v.Esc() == ir.EscHeap {
1568 s.newHeapaddr(v)
1569 }
1570
1571 case ir.OLABEL:
1572 n := n.(*ir.LabelStmt)
1573 sym := n.Label
1574 if sym.IsBlank() {
1575
1576 break
1577 }
1578 lab := s.label(sym)
1579
1580
1581 if lab.target == nil {
1582 lab.target = s.f.NewBlock(ssa.BlockPlain)
1583 }
1584
1585
1586
1587 if s.curBlock != nil {
1588 b := s.endBlock()
1589 b.AddEdgeTo(lab.target)
1590 }
1591 s.startBlock(lab.target)
1592
1593 case ir.OGOTO:
1594 n := n.(*ir.BranchStmt)
1595 sym := n.Label
1596
1597 lab := s.label(sym)
1598 if lab.target == nil {
1599 lab.target = s.f.NewBlock(ssa.BlockPlain)
1600 }
1601
1602 b := s.endBlock()
1603 b.Pos = s.lastPos.WithIsStmt()
1604 b.AddEdgeTo(lab.target)
1605
1606 case ir.OAS:
1607 n := n.(*ir.AssignStmt)
1608 if n.X == n.Y && n.X.Op() == ir.ONAME {
1609
1610
1611
1612
1613
1614
1615
1616 return
1617 }
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1629 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1630 p := n.Y.(*ir.StarExpr).X
1631 for p.Op() == ir.OCONVNOP {
1632 p = p.(*ir.ConvExpr).X
1633 }
1634 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1635
1636
1637 mayOverlap = false
1638 }
1639 }
1640
1641
1642 rhs := n.Y
1643 if rhs != nil {
1644 switch rhs.Op() {
1645 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1646
1647
1648
1649 if !ir.IsZero(rhs) {
1650 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1651 }
1652 rhs = nil
1653 case ir.OAPPEND:
1654 rhs := rhs.(*ir.CallExpr)
1655
1656
1657
1658 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1659 break
1660 }
1661
1662
1663
1664 if s.canSSA(n.X) {
1665 if base.Debug.Append > 0 {
1666 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1667 }
1668 break
1669 }
1670 if base.Debug.Append > 0 {
1671 base.WarnfAt(n.Pos(), "append: len-only update")
1672 }
1673 s.append(rhs, true)
1674 return
1675 }
1676 }
1677
1678 if ir.IsBlank(n.X) {
1679
1680
1681 if rhs != nil {
1682 s.expr(rhs)
1683 }
1684 return
1685 }
1686
1687 var t *types.Type
1688 if n.Y != nil {
1689 t = n.Y.Type()
1690 } else {
1691 t = n.X.Type()
1692 }
1693
1694 var r *ssa.Value
1695 deref := !ssa.CanSSA(t)
1696 if deref {
1697 if rhs == nil {
1698 r = nil
1699 } else {
1700 r = s.addr(rhs)
1701 }
1702 } else {
1703 if rhs == nil {
1704 r = s.zeroVal(t)
1705 } else {
1706 r = s.expr(rhs)
1707 }
1708 }
1709
1710 var skip skipMask
1711 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1712
1713
1714 rhs := rhs.(*ir.SliceExpr)
1715 i, j, k := rhs.Low, rhs.High, rhs.Max
1716 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1717
1718 i = nil
1719 }
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730 if i == nil {
1731 skip |= skipPtr
1732 if j == nil {
1733 skip |= skipLen
1734 }
1735 if k == nil {
1736 skip |= skipCap
1737 }
1738 }
1739 }
1740
1741 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1742
1743 case ir.OIF:
1744 n := n.(*ir.IfStmt)
1745 if ir.IsConst(n.Cond, constant.Bool) {
1746 s.stmtList(n.Cond.Init())
1747 if ir.BoolVal(n.Cond) {
1748 s.stmtList(n.Body)
1749 } else {
1750 s.stmtList(n.Else)
1751 }
1752 break
1753 }
1754
1755 bEnd := s.f.NewBlock(ssa.BlockPlain)
1756 var likely int8
1757 if n.Likely {
1758 likely = 1
1759 }
1760 var bThen *ssa.Block
1761 if len(n.Body) != 0 {
1762 bThen = s.f.NewBlock(ssa.BlockPlain)
1763 } else {
1764 bThen = bEnd
1765 }
1766 var bElse *ssa.Block
1767 if len(n.Else) != 0 {
1768 bElse = s.f.NewBlock(ssa.BlockPlain)
1769 } else {
1770 bElse = bEnd
1771 }
1772 s.condBranch(n.Cond, bThen, bElse, likely)
1773
1774 if len(n.Body) != 0 {
1775 s.startBlock(bThen)
1776 s.stmtList(n.Body)
1777 if b := s.endBlock(); b != nil {
1778 b.AddEdgeTo(bEnd)
1779 }
1780 }
1781 if len(n.Else) != 0 {
1782 s.startBlock(bElse)
1783 s.stmtList(n.Else)
1784 if b := s.endBlock(); b != nil {
1785 b.AddEdgeTo(bEnd)
1786 }
1787 }
1788 s.startBlock(bEnd)
1789
1790 case ir.ORETURN:
1791 n := n.(*ir.ReturnStmt)
1792 s.stmtList(n.Results)
1793 b := s.exit()
1794 b.Pos = s.lastPos.WithIsStmt()
1795
1796 case ir.OTAILCALL:
1797 n := n.(*ir.TailCallStmt)
1798 s.callResult(n.Call, callTail)
1799 call := s.mem()
1800 b := s.endBlock()
1801 b.Kind = ssa.BlockRetJmp
1802 b.SetControl(call)
1803
1804 case ir.OCONTINUE, ir.OBREAK:
1805 n := n.(*ir.BranchStmt)
1806 var to *ssa.Block
1807 if n.Label == nil {
1808
1809 switch n.Op() {
1810 case ir.OCONTINUE:
1811 to = s.continueTo
1812 case ir.OBREAK:
1813 to = s.breakTo
1814 }
1815 } else {
1816
1817 sym := n.Label
1818 lab := s.label(sym)
1819 switch n.Op() {
1820 case ir.OCONTINUE:
1821 to = lab.continueTarget
1822 case ir.OBREAK:
1823 to = lab.breakTarget
1824 }
1825 }
1826
1827 b := s.endBlock()
1828 b.Pos = s.lastPos.WithIsStmt()
1829 b.AddEdgeTo(to)
1830
1831 case ir.OFOR:
1832
1833
1834 n := n.(*ir.ForStmt)
1835 base.Assert(!n.DistinctVars)
1836 bCond := s.f.NewBlock(ssa.BlockPlain)
1837 bBody := s.f.NewBlock(ssa.BlockPlain)
1838 bIncr := s.f.NewBlock(ssa.BlockPlain)
1839 bEnd := s.f.NewBlock(ssa.BlockPlain)
1840
1841
1842 bBody.Pos = n.Pos()
1843
1844
1845 b := s.endBlock()
1846 b.AddEdgeTo(bCond)
1847
1848
1849 s.startBlock(bCond)
1850 if n.Cond != nil {
1851 s.condBranch(n.Cond, bBody, bEnd, 1)
1852 } else {
1853 b := s.endBlock()
1854 b.Kind = ssa.BlockPlain
1855 b.AddEdgeTo(bBody)
1856 }
1857
1858
1859 prevContinue := s.continueTo
1860 prevBreak := s.breakTo
1861 s.continueTo = bIncr
1862 s.breakTo = bEnd
1863 var lab *ssaLabel
1864 if sym := n.Label; sym != nil {
1865
1866 lab = s.label(sym)
1867 lab.continueTarget = bIncr
1868 lab.breakTarget = bEnd
1869 }
1870
1871
1872 s.startBlock(bBody)
1873 s.stmtList(n.Body)
1874
1875
1876 s.continueTo = prevContinue
1877 s.breakTo = prevBreak
1878 if lab != nil {
1879 lab.continueTarget = nil
1880 lab.breakTarget = nil
1881 }
1882
1883
1884 if b := s.endBlock(); b != nil {
1885 b.AddEdgeTo(bIncr)
1886 }
1887
1888
1889 s.startBlock(bIncr)
1890 if n.Post != nil {
1891 s.stmt(n.Post)
1892 }
1893 if b := s.endBlock(); b != nil {
1894 b.AddEdgeTo(bCond)
1895
1896
1897 if b.Pos == src.NoXPos {
1898 b.Pos = bCond.Pos
1899 }
1900 }
1901
1902 s.startBlock(bEnd)
1903
1904 case ir.OSWITCH, ir.OSELECT:
1905
1906
1907 bEnd := s.f.NewBlock(ssa.BlockPlain)
1908
1909 prevBreak := s.breakTo
1910 s.breakTo = bEnd
1911 var sym *types.Sym
1912 var body ir.Nodes
1913 if n.Op() == ir.OSWITCH {
1914 n := n.(*ir.SwitchStmt)
1915 sym = n.Label
1916 body = n.Compiled
1917 } else {
1918 n := n.(*ir.SelectStmt)
1919 sym = n.Label
1920 body = n.Compiled
1921 }
1922
1923 var lab *ssaLabel
1924 if sym != nil {
1925
1926 lab = s.label(sym)
1927 lab.breakTarget = bEnd
1928 }
1929
1930
1931 s.stmtList(body)
1932
1933 s.breakTo = prevBreak
1934 if lab != nil {
1935 lab.breakTarget = nil
1936 }
1937
1938
1939
1940 if s.curBlock != nil {
1941 m := s.mem()
1942 b := s.endBlock()
1943 b.Kind = ssa.BlockExit
1944 b.SetControl(m)
1945 }
1946 s.startBlock(bEnd)
1947
1948 case ir.OJUMPTABLE:
1949 n := n.(*ir.JumpTableStmt)
1950
1951
1952 jt := s.f.NewBlock(ssa.BlockJumpTable)
1953 bEnd := s.f.NewBlock(ssa.BlockPlain)
1954
1955
1956 idx := s.expr(n.Idx)
1957 unsigned := idx.Type.IsUnsigned()
1958
1959
1960 t := types.Types[types.TUINTPTR]
1961 idx = s.conv(nil, idx, idx.Type, t)
1962
1963
1964
1965
1966
1967
1968
1969 var min, max uint64
1970 if unsigned {
1971 min, _ = constant.Uint64Val(n.Cases[0])
1972 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
1973 } else {
1974 mn, _ := constant.Int64Val(n.Cases[0])
1975 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
1976 min = uint64(mn)
1977 max = uint64(mx)
1978 }
1979
1980 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
1981 width := s.uintptrConstant(max - min)
1982 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
1983 b := s.endBlock()
1984 b.Kind = ssa.BlockIf
1985 b.SetControl(cmp)
1986 b.AddEdgeTo(jt)
1987 b.AddEdgeTo(bEnd)
1988 b.Likely = ssa.BranchLikely
1989
1990
1991 s.startBlock(jt)
1992 jt.Pos = n.Pos()
1993 if base.Flag.Cfg.SpectreIndex {
1994 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
1995 }
1996 jt.SetControl(idx)
1997
1998
1999 table := make([]*ssa.Block, max-min+1)
2000 for i := range table {
2001 table[i] = bEnd
2002 }
2003 for i := range n.Targets {
2004 c := n.Cases[i]
2005 lab := s.label(n.Targets[i])
2006 if lab.target == nil {
2007 lab.target = s.f.NewBlock(ssa.BlockPlain)
2008 }
2009 var val uint64
2010 if unsigned {
2011 val, _ = constant.Uint64Val(c)
2012 } else {
2013 vl, _ := constant.Int64Val(c)
2014 val = uint64(vl)
2015 }
2016
2017 table[val-min] = lab.target
2018 }
2019 for _, t := range table {
2020 jt.AddEdgeTo(t)
2021 }
2022 s.endBlock()
2023
2024 s.startBlock(bEnd)
2025
2026 case ir.OINTERFACESWITCH:
2027 n := n.(*ir.InterfaceSwitchStmt)
2028 typs := s.f.Config.Types
2029
2030 t := s.expr(n.RuntimeType)
2031 h := s.expr(n.Hash)
2032 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2033
2034
2035 var merge *ssa.Block
2036 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
2037
2038
2039 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2040 s.Fatalf("atomic load not available")
2041 }
2042 merge = s.f.NewBlock(ssa.BlockPlain)
2043 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2044 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2045 loopHead := s.f.NewBlock(ssa.BlockPlain)
2046 loopBody := s.f.NewBlock(ssa.BlockPlain)
2047
2048
2049 var mul, and, add, zext ssa.Op
2050 if s.config.PtrSize == 4 {
2051 mul = ssa.OpMul32
2052 and = ssa.OpAnd32
2053 add = ssa.OpAdd32
2054 zext = ssa.OpCopy
2055 } else {
2056 mul = ssa.OpMul64
2057 and = ssa.OpAnd64
2058 add = ssa.OpAdd64
2059 zext = ssa.OpZeroExt32to64
2060 }
2061
2062
2063
2064 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2065 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2066 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2067
2068
2069 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2070
2071
2072 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2073
2074 b := s.endBlock()
2075 b.AddEdgeTo(loopHead)
2076
2077
2078
2079 s.startBlock(loopHead)
2080 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2081 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2082 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2083 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2084
2085 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2086
2087
2088
2089 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2090 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2091 b = s.endBlock()
2092 b.Kind = ssa.BlockIf
2093 b.SetControl(cmp1)
2094 b.AddEdgeTo(cacheHit)
2095 b.AddEdgeTo(loopBody)
2096
2097
2098
2099 s.startBlock(loopBody)
2100 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2101 b = s.endBlock()
2102 b.Kind = ssa.BlockIf
2103 b.SetControl(cmp2)
2104 b.AddEdgeTo(cacheMiss)
2105 b.AddEdgeTo(loopHead)
2106
2107
2108
2109
2110 s.startBlock(cacheHit)
2111 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2112 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2113 s.assign(n.Case, eCase, false, 0)
2114 s.assign(n.Itab, eItab, false, 0)
2115 b = s.endBlock()
2116 b.AddEdgeTo(merge)
2117
2118
2119 s.startBlock(cacheMiss)
2120 }
2121
2122 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2123 s.assign(n.Case, r[0], false, 0)
2124 s.assign(n.Itab, r[1], false, 0)
2125
2126 if merge != nil {
2127
2128 b := s.endBlock()
2129 b.Kind = ssa.BlockPlain
2130 b.AddEdgeTo(merge)
2131 s.startBlock(merge)
2132 }
2133
2134 case ir.OCHECKNIL:
2135 n := n.(*ir.UnaryExpr)
2136 p := s.expr(n.X)
2137 _ = s.nilCheck(p)
2138
2139
2140 case ir.OINLMARK:
2141 n := n.(*ir.InlineMarkStmt)
2142 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2143
2144 default:
2145 s.Fatalf("unhandled stmt %v", n.Op())
2146 }
2147 }
2148
2149
2150
2151 const shareDeferExits = false
2152
2153
2154
2155
2156 func (s *state) exit() *ssa.Block {
2157 if s.hasdefer {
2158 if s.hasOpenDefers {
2159 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2160 if s.curBlock.Kind != ssa.BlockPlain {
2161 panic("Block for an exit should be BlockPlain")
2162 }
2163 s.curBlock.AddEdgeTo(s.lastDeferExit)
2164 s.endBlock()
2165 return s.lastDeferFinalBlock
2166 }
2167 s.openDeferExit()
2168 } else {
2169 s.rtcall(ir.Syms.Deferreturn, true, nil)
2170 }
2171 }
2172
2173
2174
2175 resultFields := s.curfn.Type().Results()
2176 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2177
2178 for i, f := range resultFields {
2179 n := f.Nname.(*ir.Name)
2180 if s.canSSA(n) {
2181 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2182
2183 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2184 }
2185 results[i] = s.variable(n, n.Type())
2186 } else if !n.OnStack() {
2187
2188 if n.Type().HasPointers() {
2189 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2190 }
2191 ha := s.expr(n.Heapaddr)
2192 s.instrumentFields(n.Type(), ha, instrumentRead)
2193 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2194 } else {
2195
2196
2197
2198 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2199 }
2200 }
2201
2202
2203
2204
2205 if s.instrumentEnterExit {
2206 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2207 }
2208
2209 results[len(results)-1] = s.mem()
2210 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2211 m.AddArgs(results...)
2212
2213 b := s.endBlock()
2214 b.Kind = ssa.BlockRet
2215 b.SetControl(m)
2216 if s.hasdefer && s.hasOpenDefers {
2217 s.lastDeferFinalBlock = b
2218 }
2219 return b
2220 }
2221
2222 type opAndType struct {
2223 op ir.Op
2224 etype types.Kind
2225 }
2226
2227 var opToSSA = map[opAndType]ssa.Op{
2228 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2229 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2230 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2231 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2232 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2233 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2234 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2235 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2236 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2237 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2238
2239 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2240 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2241 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2242 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2243 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2244 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2245 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2246 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2247 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2248 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2249
2250 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2251
2252 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2253 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2254 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2255 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2256 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2257 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2258 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2259 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2260 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2261 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2262
2263 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2264 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2265 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2266 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2267 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2268 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2269 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2270 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2271
2272 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2273 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2274 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2275 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2276
2277 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2278 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2279 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2280 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2281 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2282 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2283 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2284 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2285 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2286 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2287
2288 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2289 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2290
2291 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2292 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2293 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2294 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2295 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2296 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2297 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2298 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2299
2300 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2301 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2302 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2303 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2304 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2305 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2306 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2307 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2308
2309 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2310 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2311 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2312 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2313 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2314 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2315 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2316 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2317
2318 {ir.OOR, types.TINT8}: ssa.OpOr8,
2319 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2320 {ir.OOR, types.TINT16}: ssa.OpOr16,
2321 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2322 {ir.OOR, types.TINT32}: ssa.OpOr32,
2323 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2324 {ir.OOR, types.TINT64}: ssa.OpOr64,
2325 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2326
2327 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2328 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2329 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2330 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2331 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2332 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2333 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2334 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2335
2336 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2337 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2338 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2339 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2340 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2341 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2342 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2343 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2344 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2345 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2346 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2347 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2348 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2349 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2350 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2351 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2352 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2353 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2354 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2355
2356 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2357 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2358 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2359 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2360 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2361 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2362 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2363 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2364 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2365 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2366 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2367 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2368 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2369 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2370 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2371 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2372 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2373 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2374 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2375
2376 {ir.OLT, types.TINT8}: ssa.OpLess8,
2377 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2378 {ir.OLT, types.TINT16}: ssa.OpLess16,
2379 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2380 {ir.OLT, types.TINT32}: ssa.OpLess32,
2381 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2382 {ir.OLT, types.TINT64}: ssa.OpLess64,
2383 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2384 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2385 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2386
2387 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2388 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2389 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2390 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2391 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2392 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2393 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2394 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2395 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2396 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2397 }
2398
2399 func (s *state) concreteEtype(t *types.Type) types.Kind {
2400 e := t.Kind()
2401 switch e {
2402 default:
2403 return e
2404 case types.TINT:
2405 if s.config.PtrSize == 8 {
2406 return types.TINT64
2407 }
2408 return types.TINT32
2409 case types.TUINT:
2410 if s.config.PtrSize == 8 {
2411 return types.TUINT64
2412 }
2413 return types.TUINT32
2414 case types.TUINTPTR:
2415 if s.config.PtrSize == 8 {
2416 return types.TUINT64
2417 }
2418 return types.TUINT32
2419 }
2420 }
2421
2422 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2423 etype := s.concreteEtype(t)
2424 x, ok := opToSSA[opAndType{op, etype}]
2425 if !ok {
2426 s.Fatalf("unhandled binary op %v %s", op, etype)
2427 }
2428 return x
2429 }
2430
2431 type opAndTwoTypes struct {
2432 op ir.Op
2433 etype1 types.Kind
2434 etype2 types.Kind
2435 }
2436
2437 type twoTypes struct {
2438 etype1 types.Kind
2439 etype2 types.Kind
2440 }
2441
2442 type twoOpsAndType struct {
2443 op1 ssa.Op
2444 op2 ssa.Op
2445 intermediateType types.Kind
2446 }
2447
2448 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2449
2450 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2451 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2452 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2453 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2454
2455 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2456 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2457 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2458 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2459
2460 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2461 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2462 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2463 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2464
2465 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2466 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2467 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2468 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2469
2470 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2471 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2472 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2473 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2474
2475 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2476 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2477 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2478 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2479
2480 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2481 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2482 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto64, ssa.OpTrunc64to32, types.TINT64},
2483 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2484
2485 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2486 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2487 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto64, ssa.OpTrunc64to32, types.TINT64},
2488 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2489
2490
2491 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2492 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2493 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2494 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2495 }
2496
2497
2498
2499 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2500 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2501 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2502 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2503 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2504 }
2505
2506
2507 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2508 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2509 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2510 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2511 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2512 }
2513
2514 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2515 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2516 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2517 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2518 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2519 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2520 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2521 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2522 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2523
2524 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2525 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2526 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2527 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2528 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2529 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2530 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2531 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2532
2533 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2534 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2535 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2536 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2537 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2538 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2539 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2540 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2541
2542 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2543 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2544 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2545 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2546 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2547 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2548 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2549 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2550
2551 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2552 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2553 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2554 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2555 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2556 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2557 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2558 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2559
2560 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2561 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2562 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2563 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2564 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2565 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2566 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2567 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2568
2569 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2570 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2571 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2572 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2573 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2574 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2575 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2576 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2577
2578 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2579 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2580 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2581 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2582 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2583 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2584 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2585 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2586 }
2587
2588 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2589 etype1 := s.concreteEtype(t)
2590 etype2 := s.concreteEtype(u)
2591 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2592 if !ok {
2593 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2594 }
2595 return x
2596 }
2597
2598 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2599 if s.config.PtrSize == 4 {
2600 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2601 }
2602 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2603 }
2604
2605 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2606 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2607
2608 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2609 }
2610 if ft.IsInteger() && tt.IsInteger() {
2611 var op ssa.Op
2612 if tt.Size() == ft.Size() {
2613 op = ssa.OpCopy
2614 } else if tt.Size() < ft.Size() {
2615
2616 switch 10*ft.Size() + tt.Size() {
2617 case 21:
2618 op = ssa.OpTrunc16to8
2619 case 41:
2620 op = ssa.OpTrunc32to8
2621 case 42:
2622 op = ssa.OpTrunc32to16
2623 case 81:
2624 op = ssa.OpTrunc64to8
2625 case 82:
2626 op = ssa.OpTrunc64to16
2627 case 84:
2628 op = ssa.OpTrunc64to32
2629 default:
2630 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2631 }
2632 } else if ft.IsSigned() {
2633
2634 switch 10*ft.Size() + tt.Size() {
2635 case 12:
2636 op = ssa.OpSignExt8to16
2637 case 14:
2638 op = ssa.OpSignExt8to32
2639 case 18:
2640 op = ssa.OpSignExt8to64
2641 case 24:
2642 op = ssa.OpSignExt16to32
2643 case 28:
2644 op = ssa.OpSignExt16to64
2645 case 48:
2646 op = ssa.OpSignExt32to64
2647 default:
2648 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2649 }
2650 } else {
2651
2652 switch 10*ft.Size() + tt.Size() {
2653 case 12:
2654 op = ssa.OpZeroExt8to16
2655 case 14:
2656 op = ssa.OpZeroExt8to32
2657 case 18:
2658 op = ssa.OpZeroExt8to64
2659 case 24:
2660 op = ssa.OpZeroExt16to32
2661 case 28:
2662 op = ssa.OpZeroExt16to64
2663 case 48:
2664 op = ssa.OpZeroExt32to64
2665 default:
2666 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2667 }
2668 }
2669 return s.newValue1(op, tt, v)
2670 }
2671
2672 if ft.IsComplex() && tt.IsComplex() {
2673 var op ssa.Op
2674 if ft.Size() == tt.Size() {
2675 switch ft.Size() {
2676 case 8:
2677 op = ssa.OpRound32F
2678 case 16:
2679 op = ssa.OpRound64F
2680 default:
2681 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2682 }
2683 } else if ft.Size() == 8 && tt.Size() == 16 {
2684 op = ssa.OpCvt32Fto64F
2685 } else if ft.Size() == 16 && tt.Size() == 8 {
2686 op = ssa.OpCvt64Fto32F
2687 } else {
2688 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2689 }
2690 ftp := types.FloatForComplex(ft)
2691 ttp := types.FloatForComplex(tt)
2692 return s.newValue2(ssa.OpComplexMake, tt,
2693 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2694 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2695 }
2696
2697 if tt.IsComplex() {
2698
2699 et := types.FloatForComplex(tt)
2700 v = s.conv(n, v, ft, et)
2701 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2702 }
2703
2704 if ft.IsFloat() || tt.IsFloat() {
2705 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
2706 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2707 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2708 conv = conv1
2709 }
2710 }
2711 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2712 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2713 conv = conv1
2714 }
2715 }
2716
2717 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2718 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2719
2720 if tt.Size() == 4 {
2721 return s.uint32Tofloat32(n, v, ft, tt)
2722 }
2723 if tt.Size() == 8 {
2724 return s.uint32Tofloat64(n, v, ft, tt)
2725 }
2726 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2727
2728 if ft.Size() == 4 {
2729 return s.float32ToUint32(n, v, ft, tt)
2730 }
2731 if ft.Size() == 8 {
2732 return s.float64ToUint32(n, v, ft, tt)
2733 }
2734 }
2735 }
2736
2737 if !ok {
2738 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2739 }
2740 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2741
2742 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2743
2744 if op1 == ssa.OpCopy {
2745 if op2 == ssa.OpCopy {
2746 return v
2747 }
2748 return s.newValueOrSfCall1(op2, tt, v)
2749 }
2750 if op2 == ssa.OpCopy {
2751 return s.newValueOrSfCall1(op1, tt, v)
2752 }
2753 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2754 }
2755
2756 if ft.IsInteger() {
2757
2758 if tt.Size() == 4 {
2759 return s.uint64Tofloat32(n, v, ft, tt)
2760 }
2761 if tt.Size() == 8 {
2762 return s.uint64Tofloat64(n, v, ft, tt)
2763 }
2764 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2765 }
2766
2767 if ft.Size() == 4 {
2768 return s.float32ToUint64(n, v, ft, tt)
2769 }
2770 if ft.Size() == 8 {
2771 return s.float64ToUint64(n, v, ft, tt)
2772 }
2773 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2774 return nil
2775 }
2776
2777 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2778 return nil
2779 }
2780
2781
2782 func (s *state) expr(n ir.Node) *ssa.Value {
2783 return s.exprCheckPtr(n, true)
2784 }
2785
2786 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2787 if ir.HasUniquePos(n) {
2788
2789
2790 s.pushLine(n.Pos())
2791 defer s.popLine()
2792 }
2793
2794 s.stmtList(n.Init())
2795 switch n.Op() {
2796 case ir.OBYTES2STRTMP:
2797 n := n.(*ir.ConvExpr)
2798 slice := s.expr(n.X)
2799 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
2800 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
2801 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
2802 case ir.OSTR2BYTESTMP:
2803 n := n.(*ir.ConvExpr)
2804 str := s.expr(n.X)
2805 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
2806 if !n.NonNil() {
2807
2808
2809
2810 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
2811 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
2812 ptr = s.ternary(cond, ptr, zerobase)
2813 }
2814 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
2815 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
2816 case ir.OCFUNC:
2817 n := n.(*ir.UnaryExpr)
2818 aux := n.X.(*ir.Name).Linksym()
2819
2820
2821 if aux.ABI() != obj.ABIInternal {
2822 s.Fatalf("expected ABIInternal: %v", aux.ABI())
2823 }
2824 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
2825 case ir.ONAME:
2826 n := n.(*ir.Name)
2827 if n.Class == ir.PFUNC {
2828
2829 sym := staticdata.FuncLinksym(n)
2830 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
2831 }
2832 if s.canSSA(n) {
2833 return s.variable(n, n.Type())
2834 }
2835 return s.load(n.Type(), s.addr(n))
2836 case ir.OLINKSYMOFFSET:
2837 n := n.(*ir.LinksymOffsetExpr)
2838 return s.load(n.Type(), s.addr(n))
2839 case ir.ONIL:
2840 n := n.(*ir.NilExpr)
2841 t := n.Type()
2842 switch {
2843 case t.IsSlice():
2844 return s.constSlice(t)
2845 case t.IsInterface():
2846 return s.constInterface(t)
2847 default:
2848 return s.constNil(t)
2849 }
2850 case ir.OLITERAL:
2851 switch u := n.Val(); u.Kind() {
2852 case constant.Int:
2853 i := ir.IntVal(n.Type(), u)
2854 switch n.Type().Size() {
2855 case 1:
2856 return s.constInt8(n.Type(), int8(i))
2857 case 2:
2858 return s.constInt16(n.Type(), int16(i))
2859 case 4:
2860 return s.constInt32(n.Type(), int32(i))
2861 case 8:
2862 return s.constInt64(n.Type(), i)
2863 default:
2864 s.Fatalf("bad integer size %d", n.Type().Size())
2865 return nil
2866 }
2867 case constant.String:
2868 i := constant.StringVal(u)
2869 if i == "" {
2870 return s.constEmptyString(n.Type())
2871 }
2872 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
2873 case constant.Bool:
2874 return s.constBool(constant.BoolVal(u))
2875 case constant.Float:
2876 f, _ := constant.Float64Val(u)
2877 switch n.Type().Size() {
2878 case 4:
2879 return s.constFloat32(n.Type(), f)
2880 case 8:
2881 return s.constFloat64(n.Type(), f)
2882 default:
2883 s.Fatalf("bad float size %d", n.Type().Size())
2884 return nil
2885 }
2886 case constant.Complex:
2887 re, _ := constant.Float64Val(constant.Real(u))
2888 im, _ := constant.Float64Val(constant.Imag(u))
2889 switch n.Type().Size() {
2890 case 8:
2891 pt := types.Types[types.TFLOAT32]
2892 return s.newValue2(ssa.OpComplexMake, n.Type(),
2893 s.constFloat32(pt, re),
2894 s.constFloat32(pt, im))
2895 case 16:
2896 pt := types.Types[types.TFLOAT64]
2897 return s.newValue2(ssa.OpComplexMake, n.Type(),
2898 s.constFloat64(pt, re),
2899 s.constFloat64(pt, im))
2900 default:
2901 s.Fatalf("bad complex size %d", n.Type().Size())
2902 return nil
2903 }
2904 default:
2905 s.Fatalf("unhandled OLITERAL %v", u.Kind())
2906 return nil
2907 }
2908 case ir.OCONVNOP:
2909 n := n.(*ir.ConvExpr)
2910 to := n.Type()
2911 from := n.X.Type()
2912
2913
2914
2915 x := s.expr(n.X)
2916 if to == from {
2917 return x
2918 }
2919
2920
2921
2922
2923
2924 if to.IsPtrShaped() != from.IsPtrShaped() {
2925 return s.newValue2(ssa.OpConvert, to, x, s.mem())
2926 }
2927
2928 v := s.newValue1(ssa.OpCopy, to, x)
2929
2930
2931 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
2932 return v
2933 }
2934
2935
2936 if from.Kind() == to.Kind() {
2937 return v
2938 }
2939
2940
2941 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
2942 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
2943 s.checkPtrAlignment(n, v, nil)
2944 }
2945 return v
2946 }
2947
2948
2949 var mt *types.Type
2950 if buildcfg.Experiment.SwissMap {
2951 mt = types.NewPtr(reflectdata.SwissMapType())
2952 } else {
2953 mt = types.NewPtr(reflectdata.OldMapType())
2954 }
2955 if to.Kind() == types.TMAP && from == mt {
2956 return v
2957 }
2958
2959 types.CalcSize(from)
2960 types.CalcSize(to)
2961 if from.Size() != to.Size() {
2962 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
2963 return nil
2964 }
2965 if etypesign(from.Kind()) != etypesign(to.Kind()) {
2966 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
2967 return nil
2968 }
2969
2970 if base.Flag.Cfg.Instrumenting {
2971
2972
2973
2974 return v
2975 }
2976
2977 if etypesign(from.Kind()) == 0 {
2978 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
2979 return nil
2980 }
2981
2982
2983 return v
2984
2985 case ir.OCONV:
2986 n := n.(*ir.ConvExpr)
2987 x := s.expr(n.X)
2988 return s.conv(n, x, n.X.Type(), n.Type())
2989
2990 case ir.ODOTTYPE:
2991 n := n.(*ir.TypeAssertExpr)
2992 res, _ := s.dottype(n, false)
2993 return res
2994
2995 case ir.ODYNAMICDOTTYPE:
2996 n := n.(*ir.DynamicTypeAssertExpr)
2997 res, _ := s.dynamicDottype(n, false)
2998 return res
2999
3000
3001 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3002 n := n.(*ir.BinaryExpr)
3003 a := s.expr(n.X)
3004 b := s.expr(n.Y)
3005 if n.X.Type().IsComplex() {
3006 pt := types.FloatForComplex(n.X.Type())
3007 op := s.ssaOp(ir.OEQ, pt)
3008 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3009 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3010 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3011 switch n.Op() {
3012 case ir.OEQ:
3013 return c
3014 case ir.ONE:
3015 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3016 default:
3017 s.Fatalf("ordered complex compare %v", n.Op())
3018 }
3019 }
3020
3021
3022 op := n.Op()
3023 switch op {
3024 case ir.OGE:
3025 op, a, b = ir.OLE, b, a
3026 case ir.OGT:
3027 op, a, b = ir.OLT, b, a
3028 }
3029 if n.X.Type().IsFloat() {
3030
3031 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3032 }
3033
3034 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3035 case ir.OMUL:
3036 n := n.(*ir.BinaryExpr)
3037 a := s.expr(n.X)
3038 b := s.expr(n.Y)
3039 if n.Type().IsComplex() {
3040 mulop := ssa.OpMul64F
3041 addop := ssa.OpAdd64F
3042 subop := ssa.OpSub64F
3043 pt := types.FloatForComplex(n.Type())
3044 wt := types.Types[types.TFLOAT64]
3045
3046 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3047 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3048 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3049 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3050
3051 if pt != wt {
3052 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3053 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3054 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3055 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3056 }
3057
3058 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3059 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3060
3061 if pt != wt {
3062 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3063 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3064 }
3065
3066 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3067 }
3068
3069 if n.Type().IsFloat() {
3070 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3071 }
3072
3073 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3074
3075 case ir.ODIV:
3076 n := n.(*ir.BinaryExpr)
3077 a := s.expr(n.X)
3078 b := s.expr(n.Y)
3079 if n.Type().IsComplex() {
3080
3081
3082
3083 mulop := ssa.OpMul64F
3084 addop := ssa.OpAdd64F
3085 subop := ssa.OpSub64F
3086 divop := ssa.OpDiv64F
3087 pt := types.FloatForComplex(n.Type())
3088 wt := types.Types[types.TFLOAT64]
3089
3090 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3091 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3092 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3093 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3094
3095 if pt != wt {
3096 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3097 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3098 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3099 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3100 }
3101
3102 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3103 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3104 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3105
3106
3107
3108
3109
3110 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3111 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3112
3113 if pt != wt {
3114 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3115 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3116 }
3117 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3118 }
3119 if n.Type().IsFloat() {
3120 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3121 }
3122 return s.intDivide(n, a, b)
3123 case ir.OMOD:
3124 n := n.(*ir.BinaryExpr)
3125 a := s.expr(n.X)
3126 b := s.expr(n.Y)
3127 return s.intDivide(n, a, b)
3128 case ir.OADD, ir.OSUB:
3129 n := n.(*ir.BinaryExpr)
3130 a := s.expr(n.X)
3131 b := s.expr(n.Y)
3132 if n.Type().IsComplex() {
3133 pt := types.FloatForComplex(n.Type())
3134 op := s.ssaOp(n.Op(), pt)
3135 return s.newValue2(ssa.OpComplexMake, n.Type(),
3136 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3137 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3138 }
3139 if n.Type().IsFloat() {
3140 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3141 }
3142 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3143 case ir.OAND, ir.OOR, ir.OXOR:
3144 n := n.(*ir.BinaryExpr)
3145 a := s.expr(n.X)
3146 b := s.expr(n.Y)
3147 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3148 case ir.OANDNOT:
3149 n := n.(*ir.BinaryExpr)
3150 a := s.expr(n.X)
3151 b := s.expr(n.Y)
3152 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3153 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3154 case ir.OLSH, ir.ORSH:
3155 n := n.(*ir.BinaryExpr)
3156 a := s.expr(n.X)
3157 b := s.expr(n.Y)
3158 bt := b.Type
3159 if bt.IsSigned() {
3160 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3161 s.check(cmp, ir.Syms.Panicshift)
3162 bt = bt.ToUnsigned()
3163 }
3164 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3165 case ir.OANDAND, ir.OOROR:
3166
3167
3168
3169
3170
3171
3172
3173
3174
3175
3176
3177
3178
3179 n := n.(*ir.LogicalExpr)
3180 el := s.expr(n.X)
3181 s.vars[n] = el
3182
3183 b := s.endBlock()
3184 b.Kind = ssa.BlockIf
3185 b.SetControl(el)
3186
3187
3188
3189
3190
3191 bRight := s.f.NewBlock(ssa.BlockPlain)
3192 bResult := s.f.NewBlock(ssa.BlockPlain)
3193 if n.Op() == ir.OANDAND {
3194 b.AddEdgeTo(bRight)
3195 b.AddEdgeTo(bResult)
3196 } else if n.Op() == ir.OOROR {
3197 b.AddEdgeTo(bResult)
3198 b.AddEdgeTo(bRight)
3199 }
3200
3201 s.startBlock(bRight)
3202 er := s.expr(n.Y)
3203 s.vars[n] = er
3204
3205 b = s.endBlock()
3206 b.AddEdgeTo(bResult)
3207
3208 s.startBlock(bResult)
3209 return s.variable(n, types.Types[types.TBOOL])
3210 case ir.OCOMPLEX:
3211 n := n.(*ir.BinaryExpr)
3212 r := s.expr(n.X)
3213 i := s.expr(n.Y)
3214 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3215
3216
3217 case ir.ONEG:
3218 n := n.(*ir.UnaryExpr)
3219 a := s.expr(n.X)
3220 if n.Type().IsComplex() {
3221 tp := types.FloatForComplex(n.Type())
3222 negop := s.ssaOp(n.Op(), tp)
3223 return s.newValue2(ssa.OpComplexMake, n.Type(),
3224 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3225 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3226 }
3227 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3228 case ir.ONOT, ir.OBITNOT:
3229 n := n.(*ir.UnaryExpr)
3230 a := s.expr(n.X)
3231 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3232 case ir.OIMAG, ir.OREAL:
3233 n := n.(*ir.UnaryExpr)
3234 a := s.expr(n.X)
3235 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3236 case ir.OPLUS:
3237 n := n.(*ir.UnaryExpr)
3238 return s.expr(n.X)
3239
3240 case ir.OADDR:
3241 n := n.(*ir.AddrExpr)
3242 return s.addr(n.X)
3243
3244 case ir.ORESULT:
3245 n := n.(*ir.ResultExpr)
3246 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3247 panic("Expected to see a previous call")
3248 }
3249 which := n.Index
3250 if which == -1 {
3251 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3252 }
3253 return s.resultOfCall(s.prevCall, which, n.Type())
3254
3255 case ir.ODEREF:
3256 n := n.(*ir.StarExpr)
3257 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3258 return s.load(n.Type(), p)
3259
3260 case ir.ODOT:
3261 n := n.(*ir.SelectorExpr)
3262 if n.X.Op() == ir.OSTRUCTLIT {
3263
3264
3265
3266 if !ir.IsZero(n.X) {
3267 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3268 }
3269 return s.zeroVal(n.Type())
3270 }
3271
3272
3273
3274
3275 if ir.IsAddressable(n) && !s.canSSA(n) {
3276 p := s.addr(n)
3277 return s.load(n.Type(), p)
3278 }
3279 v := s.expr(n.X)
3280 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3281
3282 case ir.ODOTPTR:
3283 n := n.(*ir.SelectorExpr)
3284 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3285 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3286 return s.load(n.Type(), p)
3287
3288 case ir.OINDEX:
3289 n := n.(*ir.IndexExpr)
3290 switch {
3291 case n.X.Type().IsString():
3292 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3293
3294
3295
3296 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3297 }
3298 a := s.expr(n.X)
3299 i := s.expr(n.Index)
3300 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3301 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3302 ptrtyp := s.f.Config.Types.BytePtr
3303 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3304 if ir.IsConst(n.Index, constant.Int) {
3305 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3306 } else {
3307 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3308 }
3309 return s.load(types.Types[types.TUINT8], ptr)
3310 case n.X.Type().IsSlice():
3311 p := s.addr(n)
3312 return s.load(n.X.Type().Elem(), p)
3313 case n.X.Type().IsArray():
3314 if ssa.CanSSA(n.X.Type()) {
3315
3316 bound := n.X.Type().NumElem()
3317 a := s.expr(n.X)
3318 i := s.expr(n.Index)
3319 if bound == 0 {
3320
3321
3322 z := s.constInt(types.Types[types.TINT], 0)
3323 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3324
3325
3326 return s.zeroVal(n.Type())
3327 }
3328 len := s.constInt(types.Types[types.TINT], bound)
3329 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3330 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3331 }
3332 p := s.addr(n)
3333 return s.load(n.X.Type().Elem(), p)
3334 default:
3335 s.Fatalf("bad type for index %v", n.X.Type())
3336 return nil
3337 }
3338
3339 case ir.OLEN, ir.OCAP:
3340 n := n.(*ir.UnaryExpr)
3341 switch {
3342 case n.X.Type().IsSlice():
3343 op := ssa.OpSliceLen
3344 if n.Op() == ir.OCAP {
3345 op = ssa.OpSliceCap
3346 }
3347 return s.newValue1(op, types.Types[types.TINT], s.expr(n.X))
3348 case n.X.Type().IsString():
3349 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], s.expr(n.X))
3350 case n.X.Type().IsMap(), n.X.Type().IsChan():
3351 return s.referenceTypeBuiltin(n, s.expr(n.X))
3352 default:
3353 return s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
3354 }
3355
3356 case ir.OSPTR:
3357 n := n.(*ir.UnaryExpr)
3358 a := s.expr(n.X)
3359 if n.X.Type().IsSlice() {
3360 if n.Bounded() {
3361 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3362 }
3363 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3364 } else {
3365 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3366 }
3367
3368 case ir.OITAB:
3369 n := n.(*ir.UnaryExpr)
3370 a := s.expr(n.X)
3371 return s.newValue1(ssa.OpITab, n.Type(), a)
3372
3373 case ir.OIDATA:
3374 n := n.(*ir.UnaryExpr)
3375 a := s.expr(n.X)
3376 return s.newValue1(ssa.OpIData, n.Type(), a)
3377
3378 case ir.OMAKEFACE:
3379 n := n.(*ir.BinaryExpr)
3380 tab := s.expr(n.X)
3381 data := s.expr(n.Y)
3382 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3383
3384 case ir.OSLICEHEADER:
3385 n := n.(*ir.SliceHeaderExpr)
3386 p := s.expr(n.Ptr)
3387 l := s.expr(n.Len)
3388 c := s.expr(n.Cap)
3389 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3390
3391 case ir.OSTRINGHEADER:
3392 n := n.(*ir.StringHeaderExpr)
3393 p := s.expr(n.Ptr)
3394 l := s.expr(n.Len)
3395 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3396
3397 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3398 n := n.(*ir.SliceExpr)
3399 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3400 v := s.exprCheckPtr(n.X, !check)
3401 var i, j, k *ssa.Value
3402 if n.Low != nil {
3403 i = s.expr(n.Low)
3404 }
3405 if n.High != nil {
3406 j = s.expr(n.High)
3407 }
3408 if n.Max != nil {
3409 k = s.expr(n.Max)
3410 }
3411 p, l, c := s.slice(v, i, j, k, n.Bounded())
3412 if check {
3413
3414 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3415 }
3416 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3417
3418 case ir.OSLICESTR:
3419 n := n.(*ir.SliceExpr)
3420 v := s.expr(n.X)
3421 var i, j *ssa.Value
3422 if n.Low != nil {
3423 i = s.expr(n.Low)
3424 }
3425 if n.High != nil {
3426 j = s.expr(n.High)
3427 }
3428 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3429 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3430
3431 case ir.OSLICE2ARRPTR:
3432
3433
3434
3435
3436 n := n.(*ir.ConvExpr)
3437 v := s.expr(n.X)
3438 nelem := n.Type().Elem().NumElem()
3439 arrlen := s.constInt(types.Types[types.TINT], nelem)
3440 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3441 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3442 op := ssa.OpSlicePtr
3443 if nelem == 0 {
3444 op = ssa.OpSlicePtrUnchecked
3445 }
3446 return s.newValue1(op, n.Type(), v)
3447
3448 case ir.OCALLFUNC:
3449 n := n.(*ir.CallExpr)
3450 if ir.IsIntrinsicCall(n) {
3451 return s.intrinsicCall(n)
3452 }
3453 fallthrough
3454
3455 case ir.OCALLINTER:
3456 n := n.(*ir.CallExpr)
3457 return s.callResult(n, callNormal)
3458
3459 case ir.OGETG:
3460 n := n.(*ir.CallExpr)
3461 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3462
3463 case ir.OGETCALLERSP:
3464 n := n.(*ir.CallExpr)
3465 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3466
3467 case ir.OAPPEND:
3468 return s.append(n.(*ir.CallExpr), false)
3469
3470 case ir.OMIN, ir.OMAX:
3471 return s.minMax(n.(*ir.CallExpr))
3472
3473 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3474
3475
3476
3477 n := n.(*ir.CompLitExpr)
3478 if !ir.IsZero(n) {
3479 s.Fatalf("literal with nonzero value in SSA: %v", n)
3480 }
3481 return s.zeroVal(n.Type())
3482
3483 case ir.ONEW:
3484 n := n.(*ir.UnaryExpr)
3485 var rtype *ssa.Value
3486 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3487 rtype = s.expr(x.RType)
3488 }
3489 return s.newObject(n.Type().Elem(), rtype)
3490
3491 case ir.OUNSAFEADD:
3492 n := n.(*ir.BinaryExpr)
3493 ptr := s.expr(n.X)
3494 len := s.expr(n.Y)
3495
3496
3497
3498 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3499
3500 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3501
3502 default:
3503 s.Fatalf("unhandled expr %v", n.Op())
3504 return nil
3505 }
3506 }
3507
3508 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3509 aux := c.Aux.(*ssa.AuxCall)
3510 pa := aux.ParamAssignmentForResult(which)
3511
3512
3513 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3514 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3515 return s.rawLoad(t, addr)
3516 }
3517 return s.newValue1I(ssa.OpSelectN, t, which, c)
3518 }
3519
3520 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3521 aux := c.Aux.(*ssa.AuxCall)
3522 pa := aux.ParamAssignmentForResult(which)
3523 if len(pa.Registers) == 0 {
3524 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3525 }
3526 _, addr := s.temp(c.Pos, t)
3527 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3528 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3529 return addr
3530 }
3531
3532
3533
3534
3535
3536
3537
3538
3539
3540 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3541
3542
3543
3544
3545
3546
3547
3548
3549
3550
3551
3552
3553
3554
3555
3556
3557
3558
3559
3560
3561
3562
3563
3564
3565
3566
3567
3568
3569
3570
3571
3572
3573 et := n.Type().Elem()
3574 pt := types.NewPtr(et)
3575
3576
3577 sn := n.Args[0]
3578 var slice, addr *ssa.Value
3579 if inplace {
3580 addr = s.addr(sn)
3581 slice = s.load(n.Type(), addr)
3582 } else {
3583 slice = s.expr(sn)
3584 }
3585
3586
3587 grow := s.f.NewBlock(ssa.BlockPlain)
3588 assign := s.f.NewBlock(ssa.BlockPlain)
3589
3590
3591 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3592 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3593 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3594
3595
3596 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3597 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3598
3599
3600 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3601
3602
3603 s.vars[ptrVar] = p
3604 s.vars[lenVar] = l
3605 if !inplace {
3606 s.vars[capVar] = c
3607 }
3608
3609 b := s.endBlock()
3610 b.Kind = ssa.BlockIf
3611 b.Likely = ssa.BranchUnlikely
3612 b.SetControl(cmp)
3613 b.AddEdgeTo(grow)
3614 b.AddEdgeTo(assign)
3615
3616
3617 s.startBlock(grow)
3618 taddr := s.expr(n.Fun)
3619 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
3620
3621
3622 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
3623 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
3624 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
3625
3626 s.vars[ptrVar] = p
3627 s.vars[lenVar] = l
3628 s.vars[capVar] = c
3629 if inplace {
3630 if sn.Op() == ir.ONAME {
3631 sn := sn.(*ir.Name)
3632 if sn.Class != ir.PEXTERN {
3633
3634 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3635 }
3636 }
3637 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3638 s.store(types.Types[types.TINT], capaddr, c)
3639 s.store(pt, addr, p)
3640 }
3641
3642 b = s.endBlock()
3643 b.AddEdgeTo(assign)
3644
3645
3646 s.startBlock(assign)
3647 p = s.variable(ptrVar, pt)
3648 l = s.variable(lenVar, types.Types[types.TINT])
3649 if !inplace {
3650 c = s.variable(capVar, types.Types[types.TINT])
3651 }
3652
3653 if inplace {
3654
3655
3656 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3657 s.store(types.Types[types.TINT], lenaddr, l)
3658 }
3659
3660
3661 type argRec struct {
3662
3663
3664 v *ssa.Value
3665 store bool
3666 }
3667 args := make([]argRec, 0, len(n.Args[1:]))
3668 for _, n := range n.Args[1:] {
3669 if ssa.CanSSA(n.Type()) {
3670 args = append(args, argRec{v: s.expr(n), store: true})
3671 } else {
3672 v := s.addr(n)
3673 args = append(args, argRec{v: v})
3674 }
3675 }
3676
3677
3678 oldLen := s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3679 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
3680 for i, arg := range args {
3681 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
3682 if arg.store {
3683 s.storeType(et, addr, arg.v, 0, true)
3684 } else {
3685 s.move(et, addr, arg.v)
3686 }
3687 }
3688
3689
3690
3691
3692
3693 delete(s.vars, ptrVar)
3694 delete(s.vars, lenVar)
3695 if !inplace {
3696 delete(s.vars, capVar)
3697 }
3698
3699
3700 if inplace {
3701 return nil
3702 }
3703 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3704 }
3705
3706
3707 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
3708
3709
3710
3711 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
3712 x := s.expr(n.Args[0])
3713 for _, arg := range n.Args[1:] {
3714 x = op(x, s.expr(arg))
3715 }
3716 return x
3717 }
3718
3719 typ := n.Type()
3720
3721 if typ.IsFloat() || typ.IsString() {
3722
3723
3724
3725
3726
3727
3728
3729
3730 if typ.IsFloat() {
3731 hasIntrinsic := false
3732 switch Arch.LinkArch.Family {
3733 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64:
3734 hasIntrinsic = true
3735 case sys.PPC64:
3736 hasIntrinsic = buildcfg.GOPPC64 >= 9
3737 }
3738
3739 if hasIntrinsic {
3740 var op ssa.Op
3741 switch {
3742 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
3743 op = ssa.OpMin64F
3744 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
3745 op = ssa.OpMax64F
3746 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
3747 op = ssa.OpMin32F
3748 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
3749 op = ssa.OpMax32F
3750 }
3751 return fold(func(x, a *ssa.Value) *ssa.Value {
3752 return s.newValue2(op, typ, x, a)
3753 })
3754 }
3755 }
3756 var name string
3757 switch typ.Kind() {
3758 case types.TFLOAT32:
3759 switch n.Op() {
3760 case ir.OMIN:
3761 name = "fmin32"
3762 case ir.OMAX:
3763 name = "fmax32"
3764 }
3765 case types.TFLOAT64:
3766 switch n.Op() {
3767 case ir.OMIN:
3768 name = "fmin64"
3769 case ir.OMAX:
3770 name = "fmax64"
3771 }
3772 case types.TSTRING:
3773 switch n.Op() {
3774 case ir.OMIN:
3775 name = "strmin"
3776 case ir.OMAX:
3777 name = "strmax"
3778 }
3779 }
3780 fn := typecheck.LookupRuntimeFunc(name)
3781
3782 return fold(func(x, a *ssa.Value) *ssa.Value {
3783 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
3784 })
3785 }
3786
3787 if typ.IsInteger() {
3788 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
3789 var op ssa.Op
3790 switch {
3791 case typ.IsSigned() && n.Op() == ir.OMIN:
3792 op = ssa.OpMin64
3793 case typ.IsSigned() && n.Op() == ir.OMAX:
3794 op = ssa.OpMax64
3795 case typ.IsUnsigned() && n.Op() == ir.OMIN:
3796 op = ssa.OpMin64u
3797 case typ.IsUnsigned() && n.Op() == ir.OMAX:
3798 op = ssa.OpMax64u
3799 }
3800 return fold(func(x, a *ssa.Value) *ssa.Value {
3801 return s.newValue2(op, typ, x, a)
3802 })
3803 }
3804 }
3805
3806 lt := s.ssaOp(ir.OLT, typ)
3807
3808 return fold(func(x, a *ssa.Value) *ssa.Value {
3809 switch n.Op() {
3810 case ir.OMIN:
3811
3812 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
3813 case ir.OMAX:
3814
3815 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
3816 }
3817 panic("unreachable")
3818 })
3819 }
3820
3821
3822 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
3823
3824
3825 ternaryVar := ssaMarker("ternary")
3826
3827 bThen := s.f.NewBlock(ssa.BlockPlain)
3828 bElse := s.f.NewBlock(ssa.BlockPlain)
3829 bEnd := s.f.NewBlock(ssa.BlockPlain)
3830
3831 b := s.endBlock()
3832 b.Kind = ssa.BlockIf
3833 b.SetControl(cond)
3834 b.AddEdgeTo(bThen)
3835 b.AddEdgeTo(bElse)
3836
3837 s.startBlock(bThen)
3838 s.vars[ternaryVar] = x
3839 s.endBlock().AddEdgeTo(bEnd)
3840
3841 s.startBlock(bElse)
3842 s.vars[ternaryVar] = y
3843 s.endBlock().AddEdgeTo(bEnd)
3844
3845 s.startBlock(bEnd)
3846 r := s.variable(ternaryVar, x.Type)
3847 delete(s.vars, ternaryVar)
3848 return r
3849 }
3850
3851
3852
3853
3854
3855 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
3856 switch cond.Op() {
3857 case ir.OANDAND:
3858 cond := cond.(*ir.LogicalExpr)
3859 mid := s.f.NewBlock(ssa.BlockPlain)
3860 s.stmtList(cond.Init())
3861 s.condBranch(cond.X, mid, no, max(likely, 0))
3862 s.startBlock(mid)
3863 s.condBranch(cond.Y, yes, no, likely)
3864 return
3865
3866
3867
3868
3869
3870
3871 case ir.OOROR:
3872 cond := cond.(*ir.LogicalExpr)
3873 mid := s.f.NewBlock(ssa.BlockPlain)
3874 s.stmtList(cond.Init())
3875 s.condBranch(cond.X, yes, mid, min(likely, 0))
3876 s.startBlock(mid)
3877 s.condBranch(cond.Y, yes, no, likely)
3878 return
3879
3880
3881
3882 case ir.ONOT:
3883 cond := cond.(*ir.UnaryExpr)
3884 s.stmtList(cond.Init())
3885 s.condBranch(cond.X, no, yes, -likely)
3886 return
3887 case ir.OCONVNOP:
3888 cond := cond.(*ir.ConvExpr)
3889 s.stmtList(cond.Init())
3890 s.condBranch(cond.X, yes, no, likely)
3891 return
3892 }
3893 c := s.expr(cond)
3894 b := s.endBlock()
3895 b.Kind = ssa.BlockIf
3896 b.SetControl(c)
3897 b.Likely = ssa.BranchPrediction(likely)
3898 b.AddEdgeTo(yes)
3899 b.AddEdgeTo(no)
3900 }
3901
3902 type skipMask uint8
3903
3904 const (
3905 skipPtr skipMask = 1 << iota
3906 skipLen
3907 skipCap
3908 )
3909
3910
3911
3912
3913
3914
3915
3916 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
3917 s.assignWhichMayOverlap(left, right, deref, skip, false)
3918 }
3919 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
3920 if left.Op() == ir.ONAME && ir.IsBlank(left) {
3921 return
3922 }
3923 t := left.Type()
3924 types.CalcSize(t)
3925 if s.canSSA(left) {
3926 if deref {
3927 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
3928 }
3929 if left.Op() == ir.ODOT {
3930
3931
3932
3933
3934
3935
3936
3937
3938
3939
3940 left := left.(*ir.SelectorExpr)
3941 t := left.X.Type()
3942 nf := t.NumFields()
3943 idx := fieldIdx(left)
3944
3945
3946 old := s.expr(left.X)
3947
3948
3949 new := s.newValue0(ssa.OpStructMake, t)
3950
3951
3952 for i := 0; i < nf; i++ {
3953 if i == idx {
3954 new.AddArg(right)
3955 } else {
3956 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
3957 }
3958 }
3959
3960
3961 s.assign(left.X, new, false, 0)
3962
3963 return
3964 }
3965 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
3966 left := left.(*ir.IndexExpr)
3967 s.pushLine(left.Pos())
3968 defer s.popLine()
3969
3970
3971 t := left.X.Type()
3972 n := t.NumElem()
3973
3974 i := s.expr(left.Index)
3975 if n == 0 {
3976
3977
3978 z := s.constInt(types.Types[types.TINT], 0)
3979 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3980 return
3981 }
3982 if n != 1 {
3983 s.Fatalf("assigning to non-1-length array")
3984 }
3985
3986 len := s.constInt(types.Types[types.TINT], 1)
3987 s.boundsCheck(i, len, ssa.BoundsIndex, false)
3988 v := s.newValue1(ssa.OpArrayMake1, t, right)
3989 s.assign(left.X, v, false, 0)
3990 return
3991 }
3992 left := left.(*ir.Name)
3993
3994 s.vars[left] = right
3995 s.addNamedValue(left, right)
3996 return
3997 }
3998
3999
4000
4001 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4002 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4003 }
4004
4005
4006 addr := s.addr(left)
4007 if ir.IsReflectHeaderDataField(left) {
4008
4009
4010
4011
4012
4013 t = types.Types[types.TUNSAFEPTR]
4014 }
4015 if deref {
4016
4017 if right == nil {
4018 s.zero(t, addr)
4019 } else {
4020 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4021 }
4022 return
4023 }
4024
4025 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4026 }
4027
4028
4029 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4030 switch {
4031 case t.IsInteger():
4032 switch t.Size() {
4033 case 1:
4034 return s.constInt8(t, 0)
4035 case 2:
4036 return s.constInt16(t, 0)
4037 case 4:
4038 return s.constInt32(t, 0)
4039 case 8:
4040 return s.constInt64(t, 0)
4041 default:
4042 s.Fatalf("bad sized integer type %v", t)
4043 }
4044 case t.IsFloat():
4045 switch t.Size() {
4046 case 4:
4047 return s.constFloat32(t, 0)
4048 case 8:
4049 return s.constFloat64(t, 0)
4050 default:
4051 s.Fatalf("bad sized float type %v", t)
4052 }
4053 case t.IsComplex():
4054 switch t.Size() {
4055 case 8:
4056 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4057 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4058 case 16:
4059 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4060 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4061 default:
4062 s.Fatalf("bad sized complex type %v", t)
4063 }
4064
4065 case t.IsString():
4066 return s.constEmptyString(t)
4067 case t.IsPtrShaped():
4068 return s.constNil(t)
4069 case t.IsBoolean():
4070 return s.constBool(false)
4071 case t.IsInterface():
4072 return s.constInterface(t)
4073 case t.IsSlice():
4074 return s.constSlice(t)
4075 case t.IsStruct():
4076 n := t.NumFields()
4077 v := s.entryNewValue0(ssa.OpStructMake, t)
4078 for i := 0; i < n; i++ {
4079 v.AddArg(s.zeroVal(t.FieldType(i)))
4080 }
4081 return v
4082 case t.IsArray():
4083 switch t.NumElem() {
4084 case 0:
4085 return s.entryNewValue0(ssa.OpArrayMake0, t)
4086 case 1:
4087 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4088 }
4089 }
4090 s.Fatalf("zero for type %v not implemented", t)
4091 return nil
4092 }
4093
4094 type callKind int8
4095
4096 const (
4097 callNormal callKind = iota
4098 callDefer
4099 callDeferStack
4100 callGo
4101 callTail
4102 )
4103
4104 type sfRtCallDef struct {
4105 rtfn *obj.LSym
4106 rtype types.Kind
4107 }
4108
4109 var softFloatOps map[ssa.Op]sfRtCallDef
4110
4111 func softfloatInit() {
4112
4113 softFloatOps = map[ssa.Op]sfRtCallDef{
4114 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4115 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4116 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4117 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4118 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4119 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4120 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4121 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4122
4123 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4124 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4125 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4126 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4127 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4128 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4129 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4130 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4131
4132 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4133 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4134 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4135 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4136 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4137 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4138 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4139 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4140 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4141 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4142 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4143 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4144 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4145 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4146 }
4147 }
4148
4149
4150
4151 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4152 f2i := func(t *types.Type) *types.Type {
4153 switch t.Kind() {
4154 case types.TFLOAT32:
4155 return types.Types[types.TUINT32]
4156 case types.TFLOAT64:
4157 return types.Types[types.TUINT64]
4158 }
4159 return t
4160 }
4161
4162 if callDef, ok := softFloatOps[op]; ok {
4163 switch op {
4164 case ssa.OpLess32F,
4165 ssa.OpLess64F,
4166 ssa.OpLeq32F,
4167 ssa.OpLeq64F:
4168 args[0], args[1] = args[1], args[0]
4169 case ssa.OpSub32F,
4170 ssa.OpSub64F:
4171 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4172 }
4173
4174
4175
4176 for i, a := range args {
4177 if a.Type.IsFloat() {
4178 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4179 }
4180 }
4181
4182 rt := types.Types[callDef.rtype]
4183 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4184 if rt.IsFloat() {
4185 result = s.newValue1(ssa.OpCopy, rt, result)
4186 }
4187 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4188 result = s.newValue1(ssa.OpNot, result.Type, result)
4189 }
4190 return result, true
4191 }
4192 return nil, false
4193 }
4194
4195
4196 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4197 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4198 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4199 return p0, p1
4200 }
4201
4202
4203 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4204 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4205 if ssa.IntrinsicsDebug > 0 {
4206 x := v
4207 if x == nil {
4208 x = s.mem()
4209 }
4210 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4211 x = x.Args[0]
4212 }
4213 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4214 }
4215 return v
4216 }
4217
4218
4219 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4220 args := make([]*ssa.Value, len(n.Args))
4221 for i, n := range n.Args {
4222 args[i] = s.expr(n)
4223 }
4224 return args
4225 }
4226
4227
4228
4229
4230
4231
4232
4233 func (s *state) openDeferRecord(n *ir.CallExpr) {
4234 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4235 s.Fatalf("defer call with arguments or results: %v", n)
4236 }
4237
4238 opendefer := &openDeferInfo{
4239 n: n,
4240 }
4241 fn := n.Fun
4242
4243
4244
4245 closureVal := s.expr(fn)
4246 closure := s.openDeferSave(fn.Type(), closureVal)
4247 opendefer.closureNode = closure.Aux.(*ir.Name)
4248 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4249 opendefer.closure = closure
4250 }
4251 index := len(s.openDefers)
4252 s.openDefers = append(s.openDefers, opendefer)
4253
4254
4255
4256 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4257 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4258 s.vars[deferBitsVar] = newDeferBits
4259 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4260 }
4261
4262
4263
4264
4265
4266
4267 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4268 if !ssa.CanSSA(t) {
4269 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4270 }
4271 if !t.HasPointers() {
4272 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4273 }
4274 pos := val.Pos
4275 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4276 temp.SetOpenDeferSlot(true)
4277 temp.SetFrameOffset(int64(len(s.openDefers)))
4278 var addrTemp *ssa.Value
4279
4280
4281 if s.curBlock.ID != s.f.Entry.ID {
4282
4283
4284
4285 if t.HasPointers() {
4286 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4287 }
4288 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4289 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4290 } else {
4291
4292
4293
4294 if t.HasPointers() {
4295 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4296 }
4297 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4298 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4299 }
4300
4301
4302
4303
4304
4305 temp.SetNeedzero(true)
4306
4307
4308 s.store(t, addrTemp, val)
4309 return addrTemp
4310 }
4311
4312
4313
4314
4315
4316 func (s *state) openDeferExit() {
4317 deferExit := s.f.NewBlock(ssa.BlockPlain)
4318 s.endBlock().AddEdgeTo(deferExit)
4319 s.startBlock(deferExit)
4320 s.lastDeferExit = deferExit
4321 s.lastDeferCount = len(s.openDefers)
4322 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4323
4324 for i := len(s.openDefers) - 1; i >= 0; i-- {
4325 r := s.openDefers[i]
4326 bCond := s.f.NewBlock(ssa.BlockPlain)
4327 bEnd := s.f.NewBlock(ssa.BlockPlain)
4328
4329 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4330
4331
4332 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4333 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4334 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4335 b := s.endBlock()
4336 b.Kind = ssa.BlockIf
4337 b.SetControl(eqVal)
4338 b.AddEdgeTo(bEnd)
4339 b.AddEdgeTo(bCond)
4340 bCond.AddEdgeTo(bEnd)
4341 s.startBlock(bCond)
4342
4343
4344
4345 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4346 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4347 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4348
4349
4350 s.vars[deferBitsVar] = maskedval
4351
4352
4353
4354
4355 fn := r.n.Fun
4356 stksize := fn.Type().ArgWidth()
4357 var callArgs []*ssa.Value
4358 var call *ssa.Value
4359 if r.closure != nil {
4360 v := s.load(r.closure.Type.Elem(), r.closure)
4361 s.maybeNilCheckClosure(v, callDefer)
4362 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4363 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4364 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4365 } else {
4366 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4367 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4368 }
4369 callArgs = append(callArgs, s.mem())
4370 call.AddArgs(callArgs...)
4371 call.AuxInt = stksize
4372 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4373
4374
4375
4376
4377 if r.closureNode != nil {
4378 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4379 }
4380
4381 s.endBlock()
4382 s.startBlock(bEnd)
4383 }
4384 }
4385
4386 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4387 return s.call(n, k, false, nil)
4388 }
4389
4390 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4391 return s.call(n, k, true, nil)
4392 }
4393
4394
4395
4396 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4397 s.prevCall = nil
4398 var calleeLSym *obj.LSym
4399 var closure *ssa.Value
4400 var codeptr *ssa.Value
4401 var dextra *ssa.Value
4402 var rcvr *ssa.Value
4403 fn := n.Fun
4404 var ACArgs []*types.Type
4405 var ACResults []*types.Type
4406 var callArgs []*ssa.Value
4407
4408 callABI := s.f.ABIDefault
4409
4410 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4411 s.Fatalf("go/defer call with arguments: %v", n)
4412 }
4413
4414 switch n.Op() {
4415 case ir.OCALLFUNC:
4416 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4417 fn := fn.(*ir.Name)
4418 calleeLSym = callTargetLSym(fn)
4419 if buildcfg.Experiment.RegabiArgs {
4420
4421
4422
4423
4424
4425 if fn.Func != nil {
4426 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4427 }
4428 } else {
4429
4430 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4431 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4432 if inRegistersImported || inRegistersSamePackage {
4433 callABI = s.f.ABI1
4434 }
4435 }
4436 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
4437 s.f.HasDeferRangeFunc = true
4438 }
4439 break
4440 }
4441 closure = s.expr(fn)
4442 if k != callDefer && k != callDeferStack {
4443
4444
4445 s.maybeNilCheckClosure(closure, k)
4446 }
4447 case ir.OCALLINTER:
4448 if fn.Op() != ir.ODOTINTER {
4449 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
4450 }
4451 fn := fn.(*ir.SelectorExpr)
4452 var iclosure *ssa.Value
4453 iclosure, rcvr = s.getClosureAndRcvr(fn)
4454 if k == callNormal {
4455 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
4456 } else {
4457 closure = iclosure
4458 }
4459 }
4460 if deferExtra != nil {
4461 dextra = s.expr(deferExtra)
4462 }
4463
4464 params := callABI.ABIAnalyze(n.Fun.Type(), false )
4465 types.CalcSize(fn.Type())
4466 stksize := params.ArgWidth()
4467
4468 res := n.Fun.Type().Results()
4469 if k == callNormal || k == callTail {
4470 for _, p := range params.OutParams() {
4471 ACResults = append(ACResults, p.Type)
4472 }
4473 }
4474
4475 var call *ssa.Value
4476 if k == callDeferStack {
4477 if stksize != 0 {
4478 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
4479 }
4480
4481 t := deferstruct()
4482 n, addr := s.temp(n.Pos(), t)
4483 n.SetNonMergeable(true)
4484 s.store(closure.Type,
4485 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
4486 closure)
4487
4488
4489 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4490 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4491 callArgs = append(callArgs, addr, s.mem())
4492 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4493 call.AddArgs(callArgs...)
4494 call.AuxInt = int64(types.PtrSize)
4495 } else {
4496
4497
4498 argStart := base.Ctxt.Arch.FixedFrameSize
4499
4500 if k != callNormal && k != callTail {
4501
4502 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4503 callArgs = append(callArgs, closure)
4504 stksize += int64(types.PtrSize)
4505 argStart += int64(types.PtrSize)
4506 if dextra != nil {
4507
4508 ACArgs = append(ACArgs, types.Types[types.TINTER])
4509 callArgs = append(callArgs, dextra)
4510 stksize += 2 * int64(types.PtrSize)
4511 argStart += 2 * int64(types.PtrSize)
4512 }
4513 }
4514
4515
4516 if rcvr != nil {
4517 callArgs = append(callArgs, rcvr)
4518 }
4519
4520
4521 t := n.Fun.Type()
4522 args := n.Args
4523
4524 for _, p := range params.InParams() {
4525 ACArgs = append(ACArgs, p.Type)
4526 }
4527
4528
4529
4530
4531 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
4532 b := s.endBlock()
4533 b.Kind = ssa.BlockPlain
4534 curb := s.f.NewBlock(ssa.BlockPlain)
4535 b.AddEdgeTo(curb)
4536 s.startBlock(curb)
4537 }
4538
4539 for i, n := range args {
4540 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
4541 }
4542
4543 callArgs = append(callArgs, s.mem())
4544
4545
4546 switch {
4547 case k == callDefer:
4548 sym := ir.Syms.Deferproc
4549 if dextra != nil {
4550 sym = ir.Syms.Deferprocat
4551 }
4552 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4553 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4554 case k == callGo:
4555 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4556 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4557 case closure != nil:
4558
4559
4560
4561
4562
4563 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
4564 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
4565 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
4566 case codeptr != nil:
4567
4568 aux := ssa.InterfaceAuxCall(params)
4569 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
4570 case calleeLSym != nil:
4571 aux := ssa.StaticAuxCall(calleeLSym, params)
4572 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4573 if k == callTail {
4574 call.Op = ssa.OpTailLECall
4575 stksize = 0
4576 }
4577 default:
4578 s.Fatalf("bad call type %v %v", n.Op(), n)
4579 }
4580 call.AddArgs(callArgs...)
4581 call.AuxInt = stksize
4582 }
4583 s.prevCall = call
4584 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
4585
4586 for _, v := range n.KeepAlive {
4587 if !v.Addrtaken() {
4588 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
4589 }
4590 switch v.Class {
4591 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
4592 default:
4593 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
4594 }
4595 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
4596 }
4597
4598
4599 if k == callDefer || k == callDeferStack {
4600 b := s.endBlock()
4601 b.Kind = ssa.BlockDefer
4602 b.SetControl(call)
4603 bNext := s.f.NewBlock(ssa.BlockPlain)
4604 b.AddEdgeTo(bNext)
4605
4606 r := s.f.NewBlock(ssa.BlockPlain)
4607 s.startBlock(r)
4608 s.exit()
4609 b.AddEdgeTo(r)
4610 b.Likely = ssa.BranchLikely
4611 s.startBlock(bNext)
4612 }
4613
4614 if len(res) == 0 || k != callNormal {
4615
4616 return nil
4617 }
4618 fp := res[0]
4619 if returnResultAddr {
4620 return s.resultAddrOfCall(call, 0, fp.Type)
4621 }
4622 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
4623 }
4624
4625
4626
4627 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
4628 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
4629
4630
4631 s.nilCheck(closure)
4632 }
4633 }
4634
4635
4636
4637 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
4638 i := s.expr(fn.X)
4639 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
4640 s.nilCheck(itab)
4641 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
4642 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
4643 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
4644 return closure, rcvr
4645 }
4646
4647
4648
4649 func etypesign(e types.Kind) int8 {
4650 switch e {
4651 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
4652 return -1
4653 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
4654 return +1
4655 }
4656 return 0
4657 }
4658
4659
4660
4661 func (s *state) addr(n ir.Node) *ssa.Value {
4662 if n.Op() != ir.ONAME {
4663 s.pushLine(n.Pos())
4664 defer s.popLine()
4665 }
4666
4667 if s.canSSA(n) {
4668 s.Fatalf("addr of canSSA expression: %+v", n)
4669 }
4670
4671 t := types.NewPtr(n.Type())
4672 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
4673 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
4674
4675 if offset != 0 {
4676 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
4677 }
4678 return v
4679 }
4680 switch n.Op() {
4681 case ir.OLINKSYMOFFSET:
4682 no := n.(*ir.LinksymOffsetExpr)
4683 return linksymOffset(no.Linksym, no.Offset_)
4684 case ir.ONAME:
4685 n := n.(*ir.Name)
4686 if n.Heapaddr != nil {
4687 return s.expr(n.Heapaddr)
4688 }
4689 switch n.Class {
4690 case ir.PEXTERN:
4691
4692 return linksymOffset(n.Linksym(), 0)
4693 case ir.PPARAM:
4694
4695 v := s.decladdrs[n]
4696 if v != nil {
4697 return v
4698 }
4699 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
4700 return nil
4701 case ir.PAUTO:
4702 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
4703
4704 case ir.PPARAMOUT:
4705
4706
4707 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
4708 default:
4709 s.Fatalf("variable address class %v not implemented", n.Class)
4710 return nil
4711 }
4712 case ir.ORESULT:
4713
4714 n := n.(*ir.ResultExpr)
4715 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
4716 case ir.OINDEX:
4717 n := n.(*ir.IndexExpr)
4718 if n.X.Type().IsSlice() {
4719 a := s.expr(n.X)
4720 i := s.expr(n.Index)
4721 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
4722 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4723 p := s.newValue1(ssa.OpSlicePtr, t, a)
4724 return s.newValue2(ssa.OpPtrIndex, t, p, i)
4725 } else {
4726 a := s.addr(n.X)
4727 i := s.expr(n.Index)
4728 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
4729 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4730 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
4731 }
4732 case ir.ODEREF:
4733 n := n.(*ir.StarExpr)
4734 return s.exprPtr(n.X, n.Bounded(), n.Pos())
4735 case ir.ODOT:
4736 n := n.(*ir.SelectorExpr)
4737 p := s.addr(n.X)
4738 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
4739 case ir.ODOTPTR:
4740 n := n.(*ir.SelectorExpr)
4741 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
4742 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
4743 case ir.OCONVNOP:
4744 n := n.(*ir.ConvExpr)
4745 if n.Type() == n.X.Type() {
4746 return s.addr(n.X)
4747 }
4748 addr := s.addr(n.X)
4749 return s.newValue1(ssa.OpCopy, t, addr)
4750 case ir.OCALLFUNC, ir.OCALLINTER:
4751 n := n.(*ir.CallExpr)
4752 return s.callAddr(n, callNormal)
4753 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
4754 var v *ssa.Value
4755 if n.Op() == ir.ODOTTYPE {
4756 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
4757 } else {
4758 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
4759 }
4760 if v.Op != ssa.OpLoad {
4761 s.Fatalf("dottype of non-load")
4762 }
4763 if v.Args[1] != s.mem() {
4764 s.Fatalf("memory no longer live from dottype load")
4765 }
4766 return v.Args[0]
4767 default:
4768 s.Fatalf("unhandled addr %v", n.Op())
4769 return nil
4770 }
4771 }
4772
4773
4774
4775 func (s *state) canSSA(n ir.Node) bool {
4776 if base.Flag.N != 0 {
4777 return false
4778 }
4779 for {
4780 nn := n
4781 if nn.Op() == ir.ODOT {
4782 nn := nn.(*ir.SelectorExpr)
4783 n = nn.X
4784 continue
4785 }
4786 if nn.Op() == ir.OINDEX {
4787 nn := nn.(*ir.IndexExpr)
4788 if nn.X.Type().IsArray() {
4789 n = nn.X
4790 continue
4791 }
4792 }
4793 break
4794 }
4795 if n.Op() != ir.ONAME {
4796 return false
4797 }
4798 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
4799 }
4800
4801 func (s *state) canSSAName(name *ir.Name) bool {
4802 if name.Addrtaken() || !name.OnStack() {
4803 return false
4804 }
4805 switch name.Class {
4806 case ir.PPARAMOUT:
4807 if s.hasdefer {
4808
4809
4810
4811
4812
4813 return false
4814 }
4815 if s.cgoUnsafeArgs {
4816
4817
4818 return false
4819 }
4820 }
4821 return true
4822
4823 }
4824
4825
4826 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
4827 p := s.expr(n)
4828 if bounded || n.NonNil() {
4829 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
4830 s.f.Warnl(lineno, "removed nil check")
4831 }
4832 return p
4833 }
4834 p = s.nilCheck(p)
4835 return p
4836 }
4837
4838
4839
4840
4841
4842
4843 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
4844 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
4845 return ptr
4846 }
4847 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
4848 }
4849
4850
4851
4852
4853
4854
4855
4856 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
4857 idx = s.extendIndex(idx, len, kind, bounded)
4858
4859 if bounded || base.Flag.B != 0 {
4860
4861
4862
4863
4864
4865
4866
4867
4868
4869
4870
4871
4872
4873
4874
4875
4876
4877
4878
4879
4880 return idx
4881 }
4882
4883 bNext := s.f.NewBlock(ssa.BlockPlain)
4884 bPanic := s.f.NewBlock(ssa.BlockExit)
4885
4886 if !idx.Type.IsSigned() {
4887 switch kind {
4888 case ssa.BoundsIndex:
4889 kind = ssa.BoundsIndexU
4890 case ssa.BoundsSliceAlen:
4891 kind = ssa.BoundsSliceAlenU
4892 case ssa.BoundsSliceAcap:
4893 kind = ssa.BoundsSliceAcapU
4894 case ssa.BoundsSliceB:
4895 kind = ssa.BoundsSliceBU
4896 case ssa.BoundsSlice3Alen:
4897 kind = ssa.BoundsSlice3AlenU
4898 case ssa.BoundsSlice3Acap:
4899 kind = ssa.BoundsSlice3AcapU
4900 case ssa.BoundsSlice3B:
4901 kind = ssa.BoundsSlice3BU
4902 case ssa.BoundsSlice3C:
4903 kind = ssa.BoundsSlice3CU
4904 }
4905 }
4906
4907 var cmp *ssa.Value
4908 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
4909 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
4910 } else {
4911 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
4912 }
4913 b := s.endBlock()
4914 b.Kind = ssa.BlockIf
4915 b.SetControl(cmp)
4916 b.Likely = ssa.BranchLikely
4917 b.AddEdgeTo(bNext)
4918 b.AddEdgeTo(bPanic)
4919
4920 s.startBlock(bPanic)
4921 if Arch.LinkArch.Family == sys.Wasm {
4922
4923
4924 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
4925 } else {
4926 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
4927 s.endBlock().SetControl(mem)
4928 }
4929 s.startBlock(bNext)
4930
4931
4932 if base.Flag.Cfg.SpectreIndex {
4933 op := ssa.OpSpectreIndex
4934 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
4935 op = ssa.OpSpectreSliceIndex
4936 }
4937 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
4938 }
4939
4940 return idx
4941 }
4942
4943
4944 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
4945 b := s.endBlock()
4946 b.Kind = ssa.BlockIf
4947 b.SetControl(cmp)
4948 b.Likely = ssa.BranchLikely
4949 bNext := s.f.NewBlock(ssa.BlockPlain)
4950 line := s.peekPos()
4951 pos := base.Ctxt.PosTable.Pos(line)
4952 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
4953 bPanic := s.panics[fl]
4954 if bPanic == nil {
4955 bPanic = s.f.NewBlock(ssa.BlockPlain)
4956 s.panics[fl] = bPanic
4957 s.startBlock(bPanic)
4958
4959
4960 s.rtcall(fn, false, nil)
4961 }
4962 b.AddEdgeTo(bNext)
4963 b.AddEdgeTo(bPanic)
4964 s.startBlock(bNext)
4965 }
4966
4967 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
4968 needcheck := true
4969 switch b.Op {
4970 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
4971 if b.AuxInt != 0 {
4972 needcheck = false
4973 }
4974 }
4975 if needcheck {
4976
4977 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
4978 s.check(cmp, ir.Syms.Panicdivide)
4979 }
4980 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
4981 }
4982
4983
4984
4985
4986
4987 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
4988 s.prevCall = nil
4989
4990 off := base.Ctxt.Arch.FixedFrameSize
4991 var callArgs []*ssa.Value
4992 var callArgTypes []*types.Type
4993
4994 for _, arg := range args {
4995 t := arg.Type
4996 off = types.RoundUp(off, t.Alignment())
4997 size := t.Size()
4998 callArgs = append(callArgs, arg)
4999 callArgTypes = append(callArgTypes, t)
5000 off += size
5001 }
5002 off = types.RoundUp(off, int64(types.RegSize))
5003
5004
5005 var call *ssa.Value
5006 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5007 callArgs = append(callArgs, s.mem())
5008 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5009 call.AddArgs(callArgs...)
5010 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5011
5012 if !returns {
5013
5014 b := s.endBlock()
5015 b.Kind = ssa.BlockExit
5016 b.SetControl(call)
5017 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5018 if len(results) > 0 {
5019 s.Fatalf("panic call can't have results")
5020 }
5021 return nil
5022 }
5023
5024
5025 res := make([]*ssa.Value, len(results))
5026 for i, t := range results {
5027 off = types.RoundUp(off, t.Alignment())
5028 res[i] = s.resultOfCall(call, int64(i), t)
5029 off += t.Size()
5030 }
5031 off = types.RoundUp(off, int64(types.PtrSize))
5032
5033
5034 call.AuxInt = off
5035
5036 return res
5037 }
5038
5039
5040 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5041 s.instrument(t, left, instrumentWrite)
5042
5043 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5044
5045 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5046 return
5047 }
5048
5049
5050
5051
5052
5053
5054 s.storeTypeScalars(t, left, right, skip)
5055 if skip&skipPtr == 0 && t.HasPointers() {
5056 s.storeTypePtrs(t, left, right)
5057 }
5058 }
5059
5060
5061 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5062 switch {
5063 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
5064 s.store(t, left, right)
5065 case t.IsPtrShaped():
5066 if t.IsPtr() && t.Elem().NotInHeap() {
5067 s.store(t, left, right)
5068 }
5069
5070 case t.IsString():
5071 if skip&skipLen != 0 {
5072 return
5073 }
5074 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5075 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5076 s.store(types.Types[types.TINT], lenAddr, len)
5077 case t.IsSlice():
5078 if skip&skipLen == 0 {
5079 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5080 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5081 s.store(types.Types[types.TINT], lenAddr, len)
5082 }
5083 if skip&skipCap == 0 {
5084 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5085 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5086 s.store(types.Types[types.TINT], capAddr, cap)
5087 }
5088 case t.IsInterface():
5089
5090 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5091 s.store(types.Types[types.TUINTPTR], left, itab)
5092 case t.IsStruct():
5093 n := t.NumFields()
5094 for i := 0; i < n; i++ {
5095 ft := t.FieldType(i)
5096 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5097 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5098 s.storeTypeScalars(ft, addr, val, 0)
5099 }
5100 case t.IsArray() && t.NumElem() == 0:
5101
5102 case t.IsArray() && t.NumElem() == 1:
5103 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5104 default:
5105 s.Fatalf("bad write barrier type %v", t)
5106 }
5107 }
5108
5109
5110 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5111 switch {
5112 case t.IsPtrShaped():
5113 if t.IsPtr() && t.Elem().NotInHeap() {
5114 break
5115 }
5116 s.store(t, left, right)
5117 case t.IsString():
5118 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5119 s.store(s.f.Config.Types.BytePtr, left, ptr)
5120 case t.IsSlice():
5121 elType := types.NewPtr(t.Elem())
5122 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5123 s.store(elType, left, ptr)
5124 case t.IsInterface():
5125
5126 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5127 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5128 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5129 case t.IsStruct():
5130 n := t.NumFields()
5131 for i := 0; i < n; i++ {
5132 ft := t.FieldType(i)
5133 if !ft.HasPointers() {
5134 continue
5135 }
5136 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5137 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5138 s.storeTypePtrs(ft, addr, val)
5139 }
5140 case t.IsArray() && t.NumElem() == 0:
5141
5142 case t.IsArray() && t.NumElem() == 1:
5143 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5144 default:
5145 s.Fatalf("bad write barrier type %v", t)
5146 }
5147 }
5148
5149
5150 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5151 var a *ssa.Value
5152 if !ssa.CanSSA(t) {
5153 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5154 } else {
5155 a = s.expr(n)
5156 }
5157 return a
5158 }
5159
5160 func (s *state) storeArgWithBase(n ir.Node, t *types.Type, base *ssa.Value, off int64) {
5161 pt := types.NewPtr(t)
5162 var addr *ssa.Value
5163 if base == s.sp {
5164
5165 addr = s.constOffPtrSP(pt, off)
5166 } else {
5167 addr = s.newValue1I(ssa.OpOffPtr, pt, off, base)
5168 }
5169
5170 if !ssa.CanSSA(t) {
5171 a := s.addr(n)
5172 s.move(t, addr, a)
5173 return
5174 }
5175
5176 a := s.expr(n)
5177 s.storeType(t, addr, a, 0, false)
5178 }
5179
5180
5181
5182
5183 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5184 t := v.Type
5185 var ptr, len, cap *ssa.Value
5186 switch {
5187 case t.IsSlice():
5188 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5189 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5190 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5191 case t.IsString():
5192 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5193 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5194 cap = len
5195 case t.IsPtr():
5196 if !t.Elem().IsArray() {
5197 s.Fatalf("bad ptr to array in slice %v\n", t)
5198 }
5199 nv := s.nilCheck(v)
5200 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5201 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5202 cap = len
5203 default:
5204 s.Fatalf("bad type in slice %v\n", t)
5205 }
5206
5207
5208 if i == nil {
5209 i = s.constInt(types.Types[types.TINT], 0)
5210 }
5211 if j == nil {
5212 j = len
5213 }
5214 three := true
5215 if k == nil {
5216 three = false
5217 k = cap
5218 }
5219
5220
5221
5222
5223 if three {
5224 if k != cap {
5225 kind := ssa.BoundsSlice3Alen
5226 if t.IsSlice() {
5227 kind = ssa.BoundsSlice3Acap
5228 }
5229 k = s.boundsCheck(k, cap, kind, bounded)
5230 }
5231 if j != k {
5232 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5233 }
5234 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5235 } else {
5236 if j != k {
5237 kind := ssa.BoundsSliceAlen
5238 if t.IsSlice() {
5239 kind = ssa.BoundsSliceAcap
5240 }
5241 j = s.boundsCheck(j, k, kind, bounded)
5242 }
5243 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5244 }
5245
5246
5247 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5248 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5249 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5250
5251
5252
5253
5254
5255 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5256 rcap := rlen
5257 if j != k && !t.IsString() {
5258 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5259 }
5260
5261 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5262
5263 return ptr, rlen, rcap
5264 }
5265
5266
5267
5268
5269
5270
5271
5272
5273
5274
5275
5276
5277
5278
5279
5280 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5281
5282
5283 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5284
5285
5286
5287 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5288 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5289
5290
5291 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5292
5293 return rptr, rlen, rcap
5294 }
5295
5296 type u642fcvtTab struct {
5297 leq, cvt2F, and, rsh, or, add ssa.Op
5298 one func(*state, *types.Type, int64) *ssa.Value
5299 }
5300
5301 var u64_f64 = u642fcvtTab{
5302 leq: ssa.OpLeq64,
5303 cvt2F: ssa.OpCvt64to64F,
5304 and: ssa.OpAnd64,
5305 rsh: ssa.OpRsh64Ux64,
5306 or: ssa.OpOr64,
5307 add: ssa.OpAdd64F,
5308 one: (*state).constInt64,
5309 }
5310
5311 var u64_f32 = u642fcvtTab{
5312 leq: ssa.OpLeq64,
5313 cvt2F: ssa.OpCvt64to32F,
5314 and: ssa.OpAnd64,
5315 rsh: ssa.OpRsh64Ux64,
5316 or: ssa.OpOr64,
5317 add: ssa.OpAdd32F,
5318 one: (*state).constInt64,
5319 }
5320
5321 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5322 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5323 }
5324
5325 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5326 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5327 }
5328
5329 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5330
5331
5332
5333
5334
5335
5336
5337
5338
5339
5340
5341
5342
5343
5344
5345
5346
5347
5348
5349
5350
5351
5352
5353
5354 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5355 b := s.endBlock()
5356 b.Kind = ssa.BlockIf
5357 b.SetControl(cmp)
5358 b.Likely = ssa.BranchLikely
5359
5360 bThen := s.f.NewBlock(ssa.BlockPlain)
5361 bElse := s.f.NewBlock(ssa.BlockPlain)
5362 bAfter := s.f.NewBlock(ssa.BlockPlain)
5363
5364 b.AddEdgeTo(bThen)
5365 s.startBlock(bThen)
5366 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5367 s.vars[n] = a0
5368 s.endBlock()
5369 bThen.AddEdgeTo(bAfter)
5370
5371 b.AddEdgeTo(bElse)
5372 s.startBlock(bElse)
5373 one := cvttab.one(s, ft, 1)
5374 y := s.newValue2(cvttab.and, ft, x, one)
5375 z := s.newValue2(cvttab.rsh, ft, x, one)
5376 z = s.newValue2(cvttab.or, ft, z, y)
5377 a := s.newValue1(cvttab.cvt2F, tt, z)
5378 a1 := s.newValue2(cvttab.add, tt, a, a)
5379 s.vars[n] = a1
5380 s.endBlock()
5381 bElse.AddEdgeTo(bAfter)
5382
5383 s.startBlock(bAfter)
5384 return s.variable(n, n.Type())
5385 }
5386
5387 type u322fcvtTab struct {
5388 cvtI2F, cvtF2F ssa.Op
5389 }
5390
5391 var u32_f64 = u322fcvtTab{
5392 cvtI2F: ssa.OpCvt32to64F,
5393 cvtF2F: ssa.OpCopy,
5394 }
5395
5396 var u32_f32 = u322fcvtTab{
5397 cvtI2F: ssa.OpCvt32to32F,
5398 cvtF2F: ssa.OpCvt64Fto32F,
5399 }
5400
5401 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5402 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5403 }
5404
5405 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5406 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5407 }
5408
5409 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5410
5411
5412
5413
5414
5415 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5416 b := s.endBlock()
5417 b.Kind = ssa.BlockIf
5418 b.SetControl(cmp)
5419 b.Likely = ssa.BranchLikely
5420
5421 bThen := s.f.NewBlock(ssa.BlockPlain)
5422 bElse := s.f.NewBlock(ssa.BlockPlain)
5423 bAfter := s.f.NewBlock(ssa.BlockPlain)
5424
5425 b.AddEdgeTo(bThen)
5426 s.startBlock(bThen)
5427 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5428 s.vars[n] = a0
5429 s.endBlock()
5430 bThen.AddEdgeTo(bAfter)
5431
5432 b.AddEdgeTo(bElse)
5433 s.startBlock(bElse)
5434 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5435 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5436 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
5437 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
5438
5439 s.vars[n] = a3
5440 s.endBlock()
5441 bElse.AddEdgeTo(bAfter)
5442
5443 s.startBlock(bAfter)
5444 return s.variable(n, n.Type())
5445 }
5446
5447
5448 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
5449 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
5450 s.Fatalf("node must be a map or a channel")
5451 }
5452 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
5453 s.Fatalf("cannot inline len(chan)")
5454 }
5455 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
5456 s.Fatalf("cannot inline cap(chan)")
5457 }
5458 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
5459 s.Fatalf("cannot inline cap(map)")
5460 }
5461
5462
5463
5464
5465
5466
5467
5468
5469 lenType := n.Type()
5470 nilValue := s.constNil(types.Types[types.TUINTPTR])
5471 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
5472 b := s.endBlock()
5473 b.Kind = ssa.BlockIf
5474 b.SetControl(cmp)
5475 b.Likely = ssa.BranchUnlikely
5476
5477 bThen := s.f.NewBlock(ssa.BlockPlain)
5478 bElse := s.f.NewBlock(ssa.BlockPlain)
5479 bAfter := s.f.NewBlock(ssa.BlockPlain)
5480
5481
5482 b.AddEdgeTo(bThen)
5483 s.startBlock(bThen)
5484 s.vars[n] = s.zeroVal(lenType)
5485 s.endBlock()
5486 bThen.AddEdgeTo(bAfter)
5487
5488 b.AddEdgeTo(bElse)
5489 s.startBlock(bElse)
5490 switch n.Op() {
5491 case ir.OLEN:
5492 if buildcfg.Experiment.SwissMap && n.X.Type().IsMap() {
5493
5494 loadType := reflectdata.SwissMapType().Field(0).Type
5495 load := s.load(loadType, x)
5496 s.vars[n] = s.conv(nil, load, loadType, lenType)
5497 } else {
5498
5499 s.vars[n] = s.load(lenType, x)
5500 }
5501 case ir.OCAP:
5502
5503 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
5504 s.vars[n] = s.load(lenType, sw)
5505 default:
5506 s.Fatalf("op must be OLEN or OCAP")
5507 }
5508 s.endBlock()
5509 bElse.AddEdgeTo(bAfter)
5510
5511 s.startBlock(bAfter)
5512 return s.variable(n, lenType)
5513 }
5514
5515 type f2uCvtTab struct {
5516 ltf, cvt2U, subf, or ssa.Op
5517 floatValue func(*state, *types.Type, float64) *ssa.Value
5518 intValue func(*state, *types.Type, int64) *ssa.Value
5519 cutoff uint64
5520 }
5521
5522 var f32_u64 = f2uCvtTab{
5523 ltf: ssa.OpLess32F,
5524 cvt2U: ssa.OpCvt32Fto64,
5525 subf: ssa.OpSub32F,
5526 or: ssa.OpOr64,
5527 floatValue: (*state).constFloat32,
5528 intValue: (*state).constInt64,
5529 cutoff: 1 << 63,
5530 }
5531
5532 var f64_u64 = f2uCvtTab{
5533 ltf: ssa.OpLess64F,
5534 cvt2U: ssa.OpCvt64Fto64,
5535 subf: ssa.OpSub64F,
5536 or: ssa.OpOr64,
5537 floatValue: (*state).constFloat64,
5538 intValue: (*state).constInt64,
5539 cutoff: 1 << 63,
5540 }
5541
5542 var f32_u32 = f2uCvtTab{
5543 ltf: ssa.OpLess32F,
5544 cvt2U: ssa.OpCvt32Fto32,
5545 subf: ssa.OpSub32F,
5546 or: ssa.OpOr32,
5547 floatValue: (*state).constFloat32,
5548 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5549 cutoff: 1 << 31,
5550 }
5551
5552 var f64_u32 = f2uCvtTab{
5553 ltf: ssa.OpLess64F,
5554 cvt2U: ssa.OpCvt64Fto32,
5555 subf: ssa.OpSub64F,
5556 or: ssa.OpOr32,
5557 floatValue: (*state).constFloat64,
5558 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5559 cutoff: 1 << 31,
5560 }
5561
5562 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5563 return s.floatToUint(&f32_u64, n, x, ft, tt)
5564 }
5565 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5566 return s.floatToUint(&f64_u64, n, x, ft, tt)
5567 }
5568
5569 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5570 return s.floatToUint(&f32_u32, n, x, ft, tt)
5571 }
5572
5573 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5574 return s.floatToUint(&f64_u32, n, x, ft, tt)
5575 }
5576
5577 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5578
5579
5580
5581
5582
5583
5584
5585
5586 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
5587 cmp := s.newValue2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
5588 b := s.endBlock()
5589 b.Kind = ssa.BlockIf
5590 b.SetControl(cmp)
5591 b.Likely = ssa.BranchLikely
5592
5593 bThen := s.f.NewBlock(ssa.BlockPlain)
5594 bElse := s.f.NewBlock(ssa.BlockPlain)
5595 bAfter := s.f.NewBlock(ssa.BlockPlain)
5596
5597 b.AddEdgeTo(bThen)
5598 s.startBlock(bThen)
5599 a0 := s.newValue1(cvttab.cvt2U, tt, x)
5600 s.vars[n] = a0
5601 s.endBlock()
5602 bThen.AddEdgeTo(bAfter)
5603
5604 b.AddEdgeTo(bElse)
5605 s.startBlock(bElse)
5606 y := s.newValue2(cvttab.subf, ft, x, cutoff)
5607 y = s.newValue1(cvttab.cvt2U, tt, y)
5608 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
5609 a1 := s.newValue2(cvttab.or, tt, y, z)
5610 s.vars[n] = a1
5611 s.endBlock()
5612 bElse.AddEdgeTo(bAfter)
5613
5614 s.startBlock(bAfter)
5615 return s.variable(n, n.Type())
5616 }
5617
5618
5619
5620
5621 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5622 iface := s.expr(n.X)
5623 target := s.reflectType(n.Type())
5624 var targetItab *ssa.Value
5625 if n.ITab != nil {
5626 targetItab = s.expr(n.ITab)
5627 }
5628 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
5629 }
5630
5631 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5632 iface := s.expr(n.X)
5633 var source, target, targetItab *ssa.Value
5634 if n.SrcRType != nil {
5635 source = s.expr(n.SrcRType)
5636 }
5637 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
5638 byteptr := s.f.Config.Types.BytePtr
5639 targetItab = s.expr(n.ITab)
5640
5641
5642 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
5643 } else {
5644 target = s.expr(n.RType)
5645 }
5646 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
5647 }
5648
5649
5650
5651
5652
5653
5654
5655
5656
5657 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
5658 typs := s.f.Config.Types
5659 byteptr := typs.BytePtr
5660 if dst.IsInterface() {
5661 if dst.IsEmptyInterface() {
5662
5663
5664 if base.Debug.TypeAssert > 0 {
5665 base.WarnfAt(pos, "type assertion inlined")
5666 }
5667
5668
5669 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5670
5671 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5672
5673 if src.IsEmptyInterface() && commaok {
5674
5675 return iface, cond
5676 }
5677
5678
5679 b := s.endBlock()
5680 b.Kind = ssa.BlockIf
5681 b.SetControl(cond)
5682 b.Likely = ssa.BranchLikely
5683 bOk := s.f.NewBlock(ssa.BlockPlain)
5684 bFail := s.f.NewBlock(ssa.BlockPlain)
5685 b.AddEdgeTo(bOk)
5686 b.AddEdgeTo(bFail)
5687
5688 if !commaok {
5689
5690 s.startBlock(bFail)
5691 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
5692
5693
5694 s.startBlock(bOk)
5695 if src.IsEmptyInterface() {
5696 res = iface
5697 return
5698 }
5699
5700 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5701 typ := s.load(byteptr, off)
5702 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5703 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
5704 return
5705 }
5706
5707 s.startBlock(bOk)
5708
5709
5710 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5711 s.vars[typVar] = s.load(byteptr, off)
5712 s.endBlock()
5713
5714
5715 s.startBlock(bFail)
5716 s.vars[typVar] = itab
5717 s.endBlock()
5718
5719
5720 bEnd := s.f.NewBlock(ssa.BlockPlain)
5721 bOk.AddEdgeTo(bEnd)
5722 bFail.AddEdgeTo(bEnd)
5723 s.startBlock(bEnd)
5724 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5725 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
5726 resok = cond
5727 delete(s.vars, typVar)
5728 return
5729 }
5730
5731 if base.Debug.TypeAssert > 0 {
5732 base.WarnfAt(pos, "type assertion not inlined")
5733 }
5734
5735 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5736 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
5737
5738
5739 bNil := s.f.NewBlock(ssa.BlockPlain)
5740 bNonNil := s.f.NewBlock(ssa.BlockPlain)
5741 bMerge := s.f.NewBlock(ssa.BlockPlain)
5742 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5743 b := s.endBlock()
5744 b.Kind = ssa.BlockIf
5745 b.SetControl(cond)
5746 b.Likely = ssa.BranchLikely
5747 b.AddEdgeTo(bNonNil)
5748 b.AddEdgeTo(bNil)
5749
5750 s.startBlock(bNil)
5751 if commaok {
5752 s.vars[typVar] = itab
5753 b := s.endBlock()
5754 b.AddEdgeTo(bMerge)
5755 } else {
5756
5757 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
5758 }
5759
5760
5761 s.startBlock(bNonNil)
5762 typ := itab
5763 if !src.IsEmptyInterface() {
5764 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
5765 }
5766
5767
5768 var d *ssa.Value
5769 if descriptor != nil {
5770 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
5771 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
5772
5773
5774 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
5775 s.Fatalf("atomic load not available")
5776 }
5777
5778 var mul, and, add, zext ssa.Op
5779 if s.config.PtrSize == 4 {
5780 mul = ssa.OpMul32
5781 and = ssa.OpAnd32
5782 add = ssa.OpAdd32
5783 zext = ssa.OpCopy
5784 } else {
5785 mul = ssa.OpMul64
5786 and = ssa.OpAnd64
5787 add = ssa.OpAdd64
5788 zext = ssa.OpZeroExt32to64
5789 }
5790
5791 loopHead := s.f.NewBlock(ssa.BlockPlain)
5792 loopBody := s.f.NewBlock(ssa.BlockPlain)
5793 cacheHit := s.f.NewBlock(ssa.BlockPlain)
5794 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
5795
5796
5797
5798 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
5799 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
5800 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
5801
5802
5803 var hash *ssa.Value
5804 if src.IsEmptyInterface() {
5805 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
5806 } else {
5807 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
5808 }
5809 hash = s.newValue1(zext, typs.Uintptr, hash)
5810 s.vars[hashVar] = hash
5811
5812 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
5813
5814 b := s.endBlock()
5815 b.AddEdgeTo(loopHead)
5816
5817
5818
5819 s.startBlock(loopHead)
5820 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
5821 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
5822 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
5823 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
5824
5825 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
5826
5827
5828
5829 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
5830 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
5831 b = s.endBlock()
5832 b.Kind = ssa.BlockIf
5833 b.SetControl(cmp1)
5834 b.AddEdgeTo(cacheHit)
5835 b.AddEdgeTo(loopBody)
5836
5837
5838
5839 s.startBlock(loopBody)
5840 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
5841 b = s.endBlock()
5842 b.Kind = ssa.BlockIf
5843 b.SetControl(cmp2)
5844 b.AddEdgeTo(cacheMiss)
5845 b.AddEdgeTo(loopHead)
5846
5847
5848
5849 s.startBlock(cacheHit)
5850 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
5851 s.vars[typVar] = eItab
5852 b = s.endBlock()
5853 b.AddEdgeTo(bMerge)
5854
5855
5856 s.startBlock(cacheMiss)
5857 }
5858 }
5859
5860
5861 if descriptor != nil {
5862 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
5863 } else {
5864 var fn *obj.LSym
5865 if commaok {
5866 fn = ir.Syms.AssertE2I2
5867 } else {
5868 fn = ir.Syms.AssertE2I
5869 }
5870 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
5871 }
5872 s.vars[typVar] = itab
5873 b = s.endBlock()
5874 b.AddEdgeTo(bMerge)
5875
5876
5877 s.startBlock(bMerge)
5878 itab = s.variable(typVar, byteptr)
5879 var ok *ssa.Value
5880 if commaok {
5881 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5882 }
5883 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
5884 }
5885
5886 if base.Debug.TypeAssert > 0 {
5887 base.WarnfAt(pos, "type assertion inlined")
5888 }
5889
5890
5891 direct := types.IsDirectIface(dst)
5892 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5893 if base.Debug.TypeAssert > 0 {
5894 base.WarnfAt(pos, "type assertion inlined")
5895 }
5896 var wantedFirstWord *ssa.Value
5897 if src.IsEmptyInterface() {
5898
5899 wantedFirstWord = target
5900 } else {
5901
5902 wantedFirstWord = targetItab
5903 }
5904
5905 var tmp ir.Node
5906 var addr *ssa.Value
5907 if commaok && !ssa.CanSSA(dst) {
5908
5909
5910 tmp, addr = s.temp(pos, dst)
5911 }
5912
5913 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
5914 b := s.endBlock()
5915 b.Kind = ssa.BlockIf
5916 b.SetControl(cond)
5917 b.Likely = ssa.BranchLikely
5918
5919 bOk := s.f.NewBlock(ssa.BlockPlain)
5920 bFail := s.f.NewBlock(ssa.BlockPlain)
5921 b.AddEdgeTo(bOk)
5922 b.AddEdgeTo(bFail)
5923
5924 if !commaok {
5925
5926 s.startBlock(bFail)
5927 taddr := source
5928 if taddr == nil {
5929 taddr = s.reflectType(src)
5930 }
5931 if src.IsEmptyInterface() {
5932 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
5933 } else {
5934 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
5935 }
5936
5937
5938 s.startBlock(bOk)
5939 if direct {
5940 return s.newValue1(ssa.OpIData, dst, iface), nil
5941 }
5942 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
5943 return s.load(dst, p), nil
5944 }
5945
5946
5947
5948 bEnd := s.f.NewBlock(ssa.BlockPlain)
5949
5950
5951 valVar := ssaMarker("val")
5952
5953
5954 s.startBlock(bOk)
5955 if tmp == nil {
5956 if direct {
5957 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
5958 } else {
5959 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
5960 s.vars[valVar] = s.load(dst, p)
5961 }
5962 } else {
5963 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
5964 s.move(dst, addr, p)
5965 }
5966 s.vars[okVar] = s.constBool(true)
5967 s.endBlock()
5968 bOk.AddEdgeTo(bEnd)
5969
5970
5971 s.startBlock(bFail)
5972 if tmp == nil {
5973 s.vars[valVar] = s.zeroVal(dst)
5974 } else {
5975 s.zero(dst, addr)
5976 }
5977 s.vars[okVar] = s.constBool(false)
5978 s.endBlock()
5979 bFail.AddEdgeTo(bEnd)
5980
5981
5982 s.startBlock(bEnd)
5983 if tmp == nil {
5984 res = s.variable(valVar, dst)
5985 delete(s.vars, valVar)
5986 } else {
5987 res = s.load(dst, addr)
5988 }
5989 resok = s.variable(okVar, types.Types[types.TBOOL])
5990 delete(s.vars, okVar)
5991 return res, resok
5992 }
5993
5994
5995 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
5996 tmp := typecheck.TempAt(pos, s.curfn, t)
5997 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
5998 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
5999 }
6000 addr := s.addr(tmp)
6001 return tmp, addr
6002 }
6003
6004
6005 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6006 v := s.vars[n]
6007 if v != nil {
6008 return v
6009 }
6010 v = s.fwdVars[n]
6011 if v != nil {
6012 return v
6013 }
6014
6015 if s.curBlock == s.f.Entry {
6016
6017 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6018 }
6019
6020
6021 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6022 s.fwdVars[n] = v
6023 if n.Op() == ir.ONAME {
6024 s.addNamedValue(n.(*ir.Name), v)
6025 }
6026 return v
6027 }
6028
6029 func (s *state) mem() *ssa.Value {
6030 return s.variable(memVar, types.TypeMem)
6031 }
6032
6033 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6034 if n.Class == ir.Pxxx {
6035
6036 return
6037 }
6038 if ir.IsAutoTmp(n) {
6039
6040 return
6041 }
6042 if n.Class == ir.PPARAMOUT {
6043
6044
6045 return
6046 }
6047 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6048 values, ok := s.f.NamedValues[loc]
6049 if !ok {
6050 s.f.Names = append(s.f.Names, &loc)
6051 s.f.CanonicalLocalSlots[loc] = &loc
6052 }
6053 s.f.NamedValues[loc] = append(values, v)
6054 }
6055
6056
6057 type Branch struct {
6058 P *obj.Prog
6059 B *ssa.Block
6060 }
6061
6062
6063 type State struct {
6064 ABI obj.ABI
6065
6066 pp *objw.Progs
6067
6068
6069
6070 Branches []Branch
6071
6072
6073 JumpTables []*ssa.Block
6074
6075
6076 bstart []*obj.Prog
6077
6078 maxarg int64
6079
6080
6081
6082 livenessMap liveness.Map
6083
6084
6085
6086 partLiveArgs map[*ir.Name]bool
6087
6088
6089
6090
6091 lineRunStart *obj.Prog
6092
6093
6094 OnWasmStackSkipped int
6095 }
6096
6097 func (s *State) FuncInfo() *obj.FuncInfo {
6098 return s.pp.CurFunc.LSym.Func()
6099 }
6100
6101
6102 func (s *State) Prog(as obj.As) *obj.Prog {
6103 p := s.pp.Prog(as)
6104 if objw.LosesStmtMark(as) {
6105 return p
6106 }
6107
6108
6109 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6110 s.lineRunStart = p
6111 } else if p.Pos.IsStmt() == src.PosIsStmt {
6112 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6113 p.Pos = p.Pos.WithNotStmt()
6114 }
6115 return p
6116 }
6117
6118
6119 func (s *State) Pc() *obj.Prog {
6120 return s.pp.Next
6121 }
6122
6123
6124 func (s *State) SetPos(pos src.XPos) {
6125 s.pp.Pos = pos
6126 }
6127
6128
6129
6130
6131 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6132 p := s.Prog(op)
6133 p.To.Type = obj.TYPE_BRANCH
6134 s.Branches = append(s.Branches, Branch{P: p, B: target})
6135 return p
6136 }
6137
6138
6139
6140
6141
6142
6143 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6144 switch v.Op {
6145 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6146
6147 s.SetPos(v.Pos.WithNotStmt())
6148 default:
6149 p := v.Pos
6150 if p != src.NoXPos {
6151
6152
6153
6154
6155 if p.IsStmt() != src.PosIsStmt {
6156 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6157
6158
6159
6160
6161
6162
6163
6164
6165
6166
6167
6168
6169
6170 return
6171 }
6172 p = p.WithNotStmt()
6173
6174 }
6175 s.SetPos(p)
6176 } else {
6177 s.SetPos(s.pp.Pos.WithNotStmt())
6178 }
6179 }
6180 }
6181
6182
6183 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6184 ft := e.curfn.Type()
6185 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6186 return
6187 }
6188
6189 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6190 x.Set(obj.AttrContentAddressable, true)
6191 e.curfn.LSym.Func().ArgInfo = x
6192
6193
6194 p := pp.Prog(obj.AFUNCDATA)
6195 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6196 p.To.Type = obj.TYPE_MEM
6197 p.To.Name = obj.NAME_EXTERN
6198 p.To.Sym = x
6199 }
6200
6201
6202 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6203 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6204
6205
6206
6207
6208 PtrSize := int64(types.PtrSize)
6209 uintptrTyp := types.Types[types.TUINTPTR]
6210
6211 isAggregate := func(t *types.Type) bool {
6212 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6213 }
6214
6215 wOff := 0
6216 n := 0
6217 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6218
6219
6220 write1 := func(sz, offset int64) {
6221 if offset >= rtabi.TraceArgsSpecial {
6222 writebyte(rtabi.TraceArgsOffsetTooLarge)
6223 } else {
6224 writebyte(uint8(offset))
6225 writebyte(uint8(sz))
6226 }
6227 n++
6228 }
6229
6230
6231
6232 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6233 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6234 if n >= rtabi.TraceArgsLimit {
6235 writebyte(rtabi.TraceArgsDotdotdot)
6236 return false
6237 }
6238 if !isAggregate(t) {
6239 write1(t.Size(), baseOffset)
6240 return true
6241 }
6242 writebyte(rtabi.TraceArgsStartAgg)
6243 depth++
6244 if depth >= rtabi.TraceArgsMaxDepth {
6245 writebyte(rtabi.TraceArgsDotdotdot)
6246 writebyte(rtabi.TraceArgsEndAgg)
6247 n++
6248 return true
6249 }
6250 switch {
6251 case t.IsInterface(), t.IsString():
6252 _ = visitType(baseOffset, uintptrTyp, depth) &&
6253 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6254 case t.IsSlice():
6255 _ = visitType(baseOffset, uintptrTyp, depth) &&
6256 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6257 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6258 case t.IsComplex():
6259 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6260 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6261 case t.IsArray():
6262 if t.NumElem() == 0 {
6263 n++
6264 break
6265 }
6266 for i := int64(0); i < t.NumElem(); i++ {
6267 if !visitType(baseOffset, t.Elem(), depth) {
6268 break
6269 }
6270 baseOffset += t.Elem().Size()
6271 }
6272 case t.IsStruct():
6273 if t.NumFields() == 0 {
6274 n++
6275 break
6276 }
6277 for _, field := range t.Fields() {
6278 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6279 break
6280 }
6281 }
6282 }
6283 writebyte(rtabi.TraceArgsEndAgg)
6284 return true
6285 }
6286
6287 start := 0
6288 if strings.Contains(f.LSym.Name, "[") {
6289
6290 start = 1
6291 }
6292
6293 for _, a := range abiInfo.InParams()[start:] {
6294 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6295 break
6296 }
6297 }
6298 writebyte(rtabi.TraceArgsEndSeq)
6299 if wOff > rtabi.TraceArgsMaxLen {
6300 base.Fatalf("ArgInfo too large")
6301 }
6302
6303 return x
6304 }
6305
6306
6307 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6308 if base.Ctxt.Flag_linkshared {
6309
6310
6311 return
6312 }
6313
6314 wfn := e.curfn.WrappedFunc
6315 if wfn == nil {
6316 return
6317 }
6318
6319 wsym := wfn.Linksym()
6320 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6321 objw.SymPtrOff(x, 0, wsym)
6322 x.Set(obj.AttrContentAddressable, true)
6323 })
6324 e.curfn.LSym.Func().WrapInfo = x
6325
6326
6327 p := pp.Prog(obj.AFUNCDATA)
6328 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6329 p.To.Type = obj.TYPE_MEM
6330 p.To.Name = obj.NAME_EXTERN
6331 p.To.Sym = x
6332 }
6333
6334
6335 func genssa(f *ssa.Func, pp *objw.Progs) {
6336 var s State
6337 s.ABI = f.OwnAux.Fn.ABI()
6338
6339 e := f.Frontend().(*ssafn)
6340
6341 s.livenessMap, s.partLiveArgs = liveness.Compute(e.curfn, f, e.stkptrsize, pp)
6342 emitArgInfo(e, f, pp)
6343 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6344
6345 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6346 if openDeferInfo != nil {
6347
6348
6349 p := pp.Prog(obj.AFUNCDATA)
6350 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6351 p.To.Type = obj.TYPE_MEM
6352 p.To.Name = obj.NAME_EXTERN
6353 p.To.Sym = openDeferInfo
6354 }
6355
6356 emitWrappedFuncInfo(e, pp)
6357
6358
6359 s.bstart = make([]*obj.Prog, f.NumBlocks())
6360 s.pp = pp
6361 var progToValue map[*obj.Prog]*ssa.Value
6362 var progToBlock map[*obj.Prog]*ssa.Block
6363 var valueToProgAfter []*obj.Prog
6364 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6365 if gatherPrintInfo {
6366 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6367 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6368 f.Logf("genssa %s\n", f.Name)
6369 progToBlock[s.pp.Next] = f.Blocks[0]
6370 }
6371
6372 if base.Ctxt.Flag_locationlists {
6373 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6374 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6375 }
6376 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6377 for i := range valueToProgAfter {
6378 valueToProgAfter[i] = nil
6379 }
6380 }
6381
6382
6383
6384 firstPos := src.NoXPos
6385 for _, v := range f.Entry.Values {
6386 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6387 firstPos = v.Pos
6388 v.Pos = firstPos.WithDefaultStmt()
6389 break
6390 }
6391 }
6392
6393
6394
6395
6396 var inlMarks map[*obj.Prog]int32
6397 var inlMarkList []*obj.Prog
6398
6399
6400
6401 var inlMarksByPos map[src.XPos][]*obj.Prog
6402
6403 var argLiveIdx int = -1
6404
6405
6406
6407
6408
6409 var hotAlign, hotRequire int64
6410
6411 if base.Debug.AlignHot > 0 {
6412 switch base.Ctxt.Arch.Name {
6413
6414
6415
6416
6417
6418 case "amd64", "386":
6419
6420
6421
6422 hotAlign = 64
6423 hotRequire = 31
6424 }
6425 }
6426
6427
6428 for i, b := range f.Blocks {
6429
6430 s.lineRunStart = nil
6431 s.SetPos(s.pp.Pos.WithNotStmt())
6432
6433 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
6434
6435
6436
6437
6438
6439 p := s.pp.Prog(obj.APCALIGNMAX)
6440 p.From.SetConst(hotAlign)
6441 p.To.SetConst(hotRequire)
6442 }
6443
6444 s.bstart[b.ID] = s.pp.Next
6445
6446 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
6447 argLiveIdx = idx
6448 p := s.pp.Prog(obj.APCDATA)
6449 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6450 p.To.SetConst(int64(idx))
6451 }
6452
6453
6454 Arch.SSAMarkMoves(&s, b)
6455 for _, v := range b.Values {
6456 x := s.pp.Next
6457 s.DebugFriendlySetPosFrom(v)
6458
6459 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
6460 v.Fatalf("input[0] and output not in same register %s", v.LongString())
6461 }
6462
6463 switch v.Op {
6464 case ssa.OpInitMem:
6465
6466 case ssa.OpArg:
6467
6468 case ssa.OpSP, ssa.OpSB:
6469
6470 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
6471
6472 case ssa.OpGetG:
6473
6474
6475 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
6476
6477 case ssa.OpPhi:
6478 CheckLoweredPhi(v)
6479 case ssa.OpConvert:
6480
6481 if v.Args[0].Reg() != v.Reg() {
6482 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
6483 }
6484 case ssa.OpInlMark:
6485 p := Arch.Ginsnop(s.pp)
6486 if inlMarks == nil {
6487 inlMarks = map[*obj.Prog]int32{}
6488 inlMarksByPos = map[src.XPos][]*obj.Prog{}
6489 }
6490 inlMarks[p] = v.AuxInt32()
6491 inlMarkList = append(inlMarkList, p)
6492 pos := v.Pos.AtColumn1()
6493 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
6494 firstPos = src.NoXPos
6495
6496 default:
6497
6498 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6499 s.SetPos(firstPos)
6500 firstPos = src.NoXPos
6501 }
6502
6503
6504 s.pp.NextLive = s.livenessMap.Get(v)
6505 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
6506
6507
6508 Arch.SSAGenValue(&s, v)
6509 }
6510
6511 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
6512 argLiveIdx = idx
6513 p := s.pp.Prog(obj.APCDATA)
6514 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6515 p.To.SetConst(int64(idx))
6516 }
6517
6518 if base.Ctxt.Flag_locationlists {
6519 valueToProgAfter[v.ID] = s.pp.Next
6520 }
6521
6522 if gatherPrintInfo {
6523 for ; x != s.pp.Next; x = x.Link {
6524 progToValue[x] = v
6525 }
6526 }
6527 }
6528
6529 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
6530 p := Arch.Ginsnop(s.pp)
6531 p.Pos = p.Pos.WithIsStmt()
6532 if b.Pos == src.NoXPos {
6533 b.Pos = p.Pos
6534 if b.Pos == src.NoXPos {
6535 b.Pos = s.pp.Text.Pos
6536 }
6537 }
6538 b.Pos = b.Pos.WithBogusLine()
6539 }
6540
6541
6542
6543
6544
6545 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
6546
6547
6548 var next *ssa.Block
6549 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
6550
6551
6552
6553
6554 next = f.Blocks[i+1]
6555 }
6556 x := s.pp.Next
6557 s.SetPos(b.Pos)
6558 Arch.SSAGenBlock(&s, b, next)
6559 if gatherPrintInfo {
6560 for ; x != s.pp.Next; x = x.Link {
6561 progToBlock[x] = b
6562 }
6563 }
6564 }
6565 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
6566
6567
6568
6569
6570 Arch.Ginsnop(s.pp)
6571 }
6572 if openDeferInfo != nil || f.HasDeferRangeFunc {
6573
6574
6575
6576
6577
6578
6579 s.pp.NextLive = s.livenessMap.DeferReturn
6580 p := s.pp.Prog(obj.ACALL)
6581 p.To.Type = obj.TYPE_MEM
6582 p.To.Name = obj.NAME_EXTERN
6583 p.To.Sym = ir.Syms.Deferreturn
6584
6585
6586
6587
6588
6589 for _, o := range f.OwnAux.ABIInfo().OutParams() {
6590 n := o.Name
6591 rts, offs := o.RegisterTypesAndOffsets()
6592 for i := range o.Registers {
6593 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
6594 }
6595 }
6596
6597 s.pp.Prog(obj.ARET)
6598 }
6599
6600 if inlMarks != nil {
6601 hasCall := false
6602
6603
6604
6605
6606 for p := s.pp.Text; p != nil; p = p.Link {
6607 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
6608 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
6609
6610
6611
6612
6613
6614 continue
6615 }
6616 if _, ok := inlMarks[p]; ok {
6617
6618
6619 continue
6620 }
6621 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
6622 hasCall = true
6623 }
6624 pos := p.Pos.AtColumn1()
6625 marks := inlMarksByPos[pos]
6626 if len(marks) == 0 {
6627 continue
6628 }
6629 for _, m := range marks {
6630
6631
6632
6633 p.Pos = p.Pos.WithIsStmt()
6634 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
6635
6636 m.As = obj.ANOP
6637 m.Pos = src.NoXPos
6638 m.From = obj.Addr{}
6639 m.To = obj.Addr{}
6640 }
6641 delete(inlMarksByPos, pos)
6642 }
6643
6644 for _, p := range inlMarkList {
6645 if p.As != obj.ANOP {
6646 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
6647 }
6648 }
6649
6650 if e.stksize == 0 && !hasCall {
6651
6652
6653
6654
6655
6656
6657 for p := s.pp.Text; p != nil; p = p.Link {
6658 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
6659 continue
6660 }
6661 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
6662
6663 nop := Arch.Ginsnop(s.pp)
6664 nop.Pos = e.curfn.Pos().WithIsStmt()
6665
6666
6667
6668
6669
6670 for x := s.pp.Text; x != nil; x = x.Link {
6671 if x.Link == nop {
6672 x.Link = nop.Link
6673 break
6674 }
6675 }
6676
6677 for x := s.pp.Text; x != nil; x = x.Link {
6678 if x.Link == p {
6679 nop.Link = p
6680 x.Link = nop
6681 break
6682 }
6683 }
6684 }
6685 break
6686 }
6687 }
6688 }
6689
6690 if base.Ctxt.Flag_locationlists {
6691 var debugInfo *ssa.FuncDebug
6692 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
6693 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
6694 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
6695 } else {
6696 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
6697 }
6698 bstart := s.bstart
6699 idToIdx := make([]int, f.NumBlocks())
6700 for i, b := range f.Blocks {
6701 idToIdx[b.ID] = i
6702 }
6703
6704
6705
6706 debugInfo.GetPC = func(b, v ssa.ID) int64 {
6707 switch v {
6708 case ssa.BlockStart.ID:
6709 if b == f.Entry.ID {
6710 return 0
6711
6712 }
6713 return bstart[b].Pc
6714 case ssa.BlockEnd.ID:
6715 blk := f.Blocks[idToIdx[b]]
6716 nv := len(blk.Values)
6717 return valueToProgAfter[blk.Values[nv-1].ID].Pc
6718 case ssa.FuncEnd.ID:
6719 return e.curfn.LSym.Size
6720 default:
6721 return valueToProgAfter[v].Pc
6722 }
6723 }
6724 }
6725
6726
6727 for _, br := range s.Branches {
6728 br.P.To.SetTarget(s.bstart[br.B.ID])
6729 if br.P.Pos.IsStmt() != src.PosIsStmt {
6730 br.P.Pos = br.P.Pos.WithNotStmt()
6731 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
6732 br.P.Pos = br.P.Pos.WithNotStmt()
6733 }
6734
6735 }
6736
6737
6738 for _, jt := range s.JumpTables {
6739
6740 targets := make([]*obj.Prog, len(jt.Succs))
6741 for i, e := range jt.Succs {
6742 targets[i] = s.bstart[e.Block().ID]
6743 }
6744
6745
6746
6747 fi := s.pp.CurFunc.LSym.Func()
6748 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
6749 }
6750
6751 if e.log {
6752 filename := ""
6753 for p := s.pp.Text; p != nil; p = p.Link {
6754 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
6755 filename = p.InnermostFilename()
6756 f.Logf("# %s\n", filename)
6757 }
6758
6759 var s string
6760 if v, ok := progToValue[p]; ok {
6761 s = v.String()
6762 } else if b, ok := progToBlock[p]; ok {
6763 s = b.String()
6764 } else {
6765 s = " "
6766 }
6767 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
6768 }
6769 }
6770 if f.HTMLWriter != nil {
6771 var buf strings.Builder
6772 buf.WriteString("<code>")
6773 buf.WriteString("<dl class=\"ssa-gen\">")
6774 filename := ""
6775 for p := s.pp.Text; p != nil; p = p.Link {
6776
6777
6778 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
6779 filename = p.InnermostFilename()
6780 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
6781 buf.WriteString(html.EscapeString("# " + filename))
6782 buf.WriteString("</dd>")
6783 }
6784
6785 buf.WriteString("<dt class=\"ssa-prog-src\">")
6786 if v, ok := progToValue[p]; ok {
6787 buf.WriteString(v.HTML())
6788 } else if b, ok := progToBlock[p]; ok {
6789 buf.WriteString("<b>" + b.HTML() + "</b>")
6790 }
6791 buf.WriteString("</dt>")
6792 buf.WriteString("<dd class=\"ssa-prog\">")
6793 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
6794 buf.WriteString("</dd>")
6795 }
6796 buf.WriteString("</dl>")
6797 buf.WriteString("</code>")
6798 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
6799 }
6800 if ssa.GenssaDump[f.Name] {
6801 fi := f.DumpFileForPhase("genssa")
6802 if fi != nil {
6803
6804
6805 inliningDiffers := func(a, b []src.Pos) bool {
6806 if len(a) != len(b) {
6807 return true
6808 }
6809 for i := range a {
6810 if a[i].Filename() != b[i].Filename() {
6811 return true
6812 }
6813 if i != len(a)-1 && a[i].Line() != b[i].Line() {
6814 return true
6815 }
6816 }
6817 return false
6818 }
6819
6820 var allPosOld []src.Pos
6821 var allPos []src.Pos
6822
6823 for p := s.pp.Text; p != nil; p = p.Link {
6824 if p.Pos.IsKnown() {
6825 allPos = allPos[:0]
6826 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
6827 if inliningDiffers(allPos, allPosOld) {
6828 for _, pos := range allPos {
6829 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
6830 }
6831 allPos, allPosOld = allPosOld, allPos
6832 }
6833 }
6834
6835 var s string
6836 if v, ok := progToValue[p]; ok {
6837 s = v.String()
6838 } else if b, ok := progToBlock[p]; ok {
6839 s = b.String()
6840 } else {
6841 s = " "
6842 }
6843 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
6844 }
6845 fi.Close()
6846 }
6847 }
6848
6849 defframe(&s, e, f)
6850
6851 f.HTMLWriter.Close()
6852 f.HTMLWriter = nil
6853 }
6854
6855 func defframe(s *State, e *ssafn, f *ssa.Func) {
6856 pp := s.pp
6857
6858 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
6859 frame := s.maxarg + e.stksize
6860 if Arch.PadFrame != nil {
6861 frame = Arch.PadFrame(frame)
6862 }
6863
6864
6865 pp.Text.To.Type = obj.TYPE_TEXTSIZE
6866 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
6867 pp.Text.To.Offset = frame
6868
6869 p := pp.Text
6870
6871
6872
6873
6874
6875
6876
6877
6878
6879
6880 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
6881
6882
6883 type nameOff struct {
6884 n *ir.Name
6885 off int64
6886 }
6887 partLiveArgsSpilled := make(map[nameOff]bool)
6888 for _, v := range f.Entry.Values {
6889 if v.Op.IsCall() {
6890 break
6891 }
6892 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
6893 continue
6894 }
6895 n, off := ssa.AutoVar(v)
6896 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
6897 continue
6898 }
6899 partLiveArgsSpilled[nameOff{n, off}] = true
6900 }
6901
6902
6903 for _, a := range f.OwnAux.ABIInfo().InParams() {
6904 n := a.Name
6905 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
6906 continue
6907 }
6908 rts, offs := a.RegisterTypesAndOffsets()
6909 for i := range a.Registers {
6910 if !rts[i].HasPointers() {
6911 continue
6912 }
6913 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
6914 continue
6915 }
6916 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
6917 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
6918 }
6919 }
6920 }
6921
6922
6923
6924
6925 var lo, hi int64
6926
6927
6928
6929 var state uint32
6930
6931
6932
6933 for _, n := range e.curfn.Dcl {
6934 if !n.Needzero() {
6935 continue
6936 }
6937 if n.Class != ir.PAUTO {
6938 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
6939 }
6940 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
6941 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
6942 }
6943
6944 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
6945
6946 lo = n.FrameOffset()
6947 continue
6948 }
6949
6950
6951 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
6952
6953
6954 lo = n.FrameOffset()
6955 hi = lo + n.Type().Size()
6956 }
6957
6958
6959 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
6960 }
6961
6962
6963 type IndexJump struct {
6964 Jump obj.As
6965 Index int
6966 }
6967
6968 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
6969 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
6970 p.Pos = b.Pos
6971 }
6972
6973
6974
6975 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
6976 switch next {
6977 case b.Succs[0].Block():
6978 s.oneJump(b, &jumps[0][0])
6979 s.oneJump(b, &jumps[0][1])
6980 case b.Succs[1].Block():
6981 s.oneJump(b, &jumps[1][0])
6982 s.oneJump(b, &jumps[1][1])
6983 default:
6984 var q *obj.Prog
6985 if b.Likely != ssa.BranchUnlikely {
6986 s.oneJump(b, &jumps[1][0])
6987 s.oneJump(b, &jumps[1][1])
6988 q = s.Br(obj.AJMP, b.Succs[1].Block())
6989 } else {
6990 s.oneJump(b, &jumps[0][0])
6991 s.oneJump(b, &jumps[0][1])
6992 q = s.Br(obj.AJMP, b.Succs[0].Block())
6993 }
6994 q.Pos = b.Pos
6995 }
6996 }
6997
6998
6999 func AddAux(a *obj.Addr, v *ssa.Value) {
7000 AddAux2(a, v, v.AuxInt)
7001 }
7002 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7003 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7004 v.Fatalf("bad AddAux addr %v", a)
7005 }
7006
7007 a.Offset += offset
7008
7009
7010 if v.Aux == nil {
7011 return
7012 }
7013
7014 switch n := v.Aux.(type) {
7015 case *ssa.AuxCall:
7016 a.Name = obj.NAME_EXTERN
7017 a.Sym = n.Fn
7018 case *obj.LSym:
7019 a.Name = obj.NAME_EXTERN
7020 a.Sym = n
7021 case *ir.Name:
7022 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7023 a.Name = obj.NAME_PARAM
7024 } else {
7025 a.Name = obj.NAME_AUTO
7026 }
7027 a.Sym = n.Linksym()
7028 a.Offset += n.FrameOffset()
7029 default:
7030 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7031 }
7032 }
7033
7034
7035
7036 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7037 size := idx.Type.Size()
7038 if size == s.config.PtrSize {
7039 return idx
7040 }
7041 if size > s.config.PtrSize {
7042
7043
7044 var lo *ssa.Value
7045 if idx.Type.IsSigned() {
7046 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7047 } else {
7048 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7049 }
7050 if bounded || base.Flag.B != 0 {
7051 return lo
7052 }
7053 bNext := s.f.NewBlock(ssa.BlockPlain)
7054 bPanic := s.f.NewBlock(ssa.BlockExit)
7055 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7056 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7057 if !idx.Type.IsSigned() {
7058 switch kind {
7059 case ssa.BoundsIndex:
7060 kind = ssa.BoundsIndexU
7061 case ssa.BoundsSliceAlen:
7062 kind = ssa.BoundsSliceAlenU
7063 case ssa.BoundsSliceAcap:
7064 kind = ssa.BoundsSliceAcapU
7065 case ssa.BoundsSliceB:
7066 kind = ssa.BoundsSliceBU
7067 case ssa.BoundsSlice3Alen:
7068 kind = ssa.BoundsSlice3AlenU
7069 case ssa.BoundsSlice3Acap:
7070 kind = ssa.BoundsSlice3AcapU
7071 case ssa.BoundsSlice3B:
7072 kind = ssa.BoundsSlice3BU
7073 case ssa.BoundsSlice3C:
7074 kind = ssa.BoundsSlice3CU
7075 }
7076 }
7077 b := s.endBlock()
7078 b.Kind = ssa.BlockIf
7079 b.SetControl(cmp)
7080 b.Likely = ssa.BranchLikely
7081 b.AddEdgeTo(bNext)
7082 b.AddEdgeTo(bPanic)
7083
7084 s.startBlock(bPanic)
7085 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7086 s.endBlock().SetControl(mem)
7087 s.startBlock(bNext)
7088
7089 return lo
7090 }
7091
7092
7093 var op ssa.Op
7094 if idx.Type.IsSigned() {
7095 switch 10*size + s.config.PtrSize {
7096 case 14:
7097 op = ssa.OpSignExt8to32
7098 case 18:
7099 op = ssa.OpSignExt8to64
7100 case 24:
7101 op = ssa.OpSignExt16to32
7102 case 28:
7103 op = ssa.OpSignExt16to64
7104 case 48:
7105 op = ssa.OpSignExt32to64
7106 default:
7107 s.Fatalf("bad signed index extension %s", idx.Type)
7108 }
7109 } else {
7110 switch 10*size + s.config.PtrSize {
7111 case 14:
7112 op = ssa.OpZeroExt8to32
7113 case 18:
7114 op = ssa.OpZeroExt8to64
7115 case 24:
7116 op = ssa.OpZeroExt16to32
7117 case 28:
7118 op = ssa.OpZeroExt16to64
7119 case 48:
7120 op = ssa.OpZeroExt32to64
7121 default:
7122 s.Fatalf("bad unsigned index extension %s", idx.Type)
7123 }
7124 }
7125 return s.newValue1(op, types.Types[types.TINT], idx)
7126 }
7127
7128
7129
7130 func CheckLoweredPhi(v *ssa.Value) {
7131 if v.Op != ssa.OpPhi {
7132 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7133 }
7134 if v.Type.IsMemory() {
7135 return
7136 }
7137 f := v.Block.Func
7138 loc := f.RegAlloc[v.ID]
7139 for _, a := range v.Args {
7140 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7141 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7142 }
7143 }
7144 }
7145
7146
7147
7148
7149
7150 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7151 entry := v.Block.Func.Entry
7152 if entry != v.Block {
7153 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7154 }
7155 for _, w := range entry.Values {
7156 if w == v {
7157 break
7158 }
7159 switch w.Op {
7160 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7161
7162 default:
7163 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7164 }
7165 }
7166 }
7167
7168
7169 func CheckArgReg(v *ssa.Value) {
7170 entry := v.Block.Func.Entry
7171 if entry != v.Block {
7172 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7173 }
7174 }
7175
7176 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7177 n, off := ssa.AutoVar(v)
7178 a.Type = obj.TYPE_MEM
7179 a.Sym = n.Linksym()
7180 a.Reg = int16(Arch.REGSP)
7181 a.Offset = n.FrameOffset() + off
7182 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7183 a.Name = obj.NAME_PARAM
7184 } else {
7185 a.Name = obj.NAME_AUTO
7186 }
7187 }
7188
7189
7190
7191 func (s *State) Call(v *ssa.Value) *obj.Prog {
7192 pPosIsStmt := s.pp.Pos.IsStmt()
7193 s.PrepareCall(v)
7194
7195 p := s.Prog(obj.ACALL)
7196 if pPosIsStmt == src.PosIsStmt {
7197 p.Pos = v.Pos.WithIsStmt()
7198 } else {
7199 p.Pos = v.Pos.WithNotStmt()
7200 }
7201 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7202 p.To.Type = obj.TYPE_MEM
7203 p.To.Name = obj.NAME_EXTERN
7204 p.To.Sym = sym.Fn
7205 } else {
7206
7207 switch Arch.LinkArch.Family {
7208 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7209 p.To.Type = obj.TYPE_REG
7210 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7211 p.To.Type = obj.TYPE_MEM
7212 default:
7213 base.Fatalf("unknown indirect call family")
7214 }
7215 p.To.Reg = v.Args[0].Reg()
7216 }
7217 return p
7218 }
7219
7220
7221
7222 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7223 p := s.Call(v)
7224 p.As = obj.ARET
7225 return p
7226 }
7227
7228
7229
7230
7231 func (s *State) PrepareCall(v *ssa.Value) {
7232 idx := s.livenessMap.Get(v)
7233 if !idx.StackMapValid() {
7234
7235 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7236 base.Fatalf("missing stack map index for %v", v.LongString())
7237 }
7238 }
7239
7240 call, ok := v.Aux.(*ssa.AuxCall)
7241
7242 if ok {
7243
7244
7245 if nowritebarrierrecCheck != nil {
7246 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7247 }
7248 }
7249
7250 if s.maxarg < v.AuxInt {
7251 s.maxarg = v.AuxInt
7252 }
7253 }
7254
7255
7256
7257 func (s *State) UseArgs(n int64) {
7258 if s.maxarg < n {
7259 s.maxarg = n
7260 }
7261 }
7262
7263
7264 func fieldIdx(n *ir.SelectorExpr) int {
7265 t := n.X.Type()
7266 if !t.IsStruct() {
7267 panic("ODOT's LHS is not a struct")
7268 }
7269
7270 for i, f := range t.Fields() {
7271 if f.Sym == n.Sel {
7272 if f.Offset != n.Offset() {
7273 panic("field offset doesn't match")
7274 }
7275 return i
7276 }
7277 }
7278 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7279
7280
7281
7282 }
7283
7284
7285
7286 type ssafn struct {
7287 curfn *ir.Func
7288 strings map[string]*obj.LSym
7289 stksize int64
7290 stkptrsize int64
7291
7292
7293
7294
7295
7296 stkalign int64
7297
7298 log bool
7299 }
7300
7301
7302
7303 func (e *ssafn) StringData(s string) *obj.LSym {
7304 if aux, ok := e.strings[s]; ok {
7305 return aux
7306 }
7307 if e.strings == nil {
7308 e.strings = make(map[string]*obj.LSym)
7309 }
7310 data := staticdata.StringSym(e.curfn.Pos(), s)
7311 e.strings[s] = data
7312 return data
7313 }
7314
7315
7316 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7317 node := parent.N
7318
7319 if node.Class != ir.PAUTO || node.Addrtaken() {
7320
7321 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7322 }
7323
7324 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7325 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7326 n.SetUsed(true)
7327 n.SetEsc(ir.EscNever)
7328 types.CalcSize(t)
7329 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7330 }
7331
7332
7333 func (e *ssafn) Logf(msg string, args ...interface{}) {
7334 if e.log {
7335 fmt.Printf(msg, args...)
7336 }
7337 }
7338
7339 func (e *ssafn) Log() bool {
7340 return e.log
7341 }
7342
7343
7344 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
7345 base.Pos = pos
7346 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
7347 base.Fatalf("'%s': "+msg, nargs...)
7348 }
7349
7350
7351
7352 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
7353 base.WarnfAt(pos, fmt_, args...)
7354 }
7355
7356 func (e *ssafn) Debug_checknil() bool {
7357 return base.Debug.Nil != 0
7358 }
7359
7360 func (e *ssafn) UseWriteBarrier() bool {
7361 return base.Flag.WB
7362 }
7363
7364 func (e *ssafn) Syslook(name string) *obj.LSym {
7365 switch name {
7366 case "goschedguarded":
7367 return ir.Syms.Goschedguarded
7368 case "writeBarrier":
7369 return ir.Syms.WriteBarrier
7370 case "wbZero":
7371 return ir.Syms.WBZero
7372 case "wbMove":
7373 return ir.Syms.WBMove
7374 case "cgoCheckMemmove":
7375 return ir.Syms.CgoCheckMemmove
7376 case "cgoCheckPtrWrite":
7377 return ir.Syms.CgoCheckPtrWrite
7378 }
7379 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
7380 return nil
7381 }
7382
7383 func (e *ssafn) Func() *ir.Func {
7384 return e.curfn
7385 }
7386
7387 func clobberBase(n ir.Node) ir.Node {
7388 if n.Op() == ir.ODOT {
7389 n := n.(*ir.SelectorExpr)
7390 if n.X.Type().NumFields() == 1 {
7391 return clobberBase(n.X)
7392 }
7393 }
7394 if n.Op() == ir.OINDEX {
7395 n := n.(*ir.IndexExpr)
7396 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
7397 return clobberBase(n.X)
7398 }
7399 }
7400 return n
7401 }
7402
7403
7404 func callTargetLSym(callee *ir.Name) *obj.LSym {
7405 if callee.Func == nil {
7406
7407
7408
7409 return callee.Linksym()
7410 }
7411
7412 return callee.LinksymABI(callee.Func.ABI)
7413 }
7414
7415
7416 const deferStructFnField = 4
7417
7418 var deferType *types.Type
7419
7420
7421
7422 func deferstruct() *types.Type {
7423 if deferType != nil {
7424 return deferType
7425 }
7426
7427 makefield := func(name string, t *types.Type) *types.Field {
7428 sym := (*types.Pkg)(nil).Lookup(name)
7429 return types.NewField(src.NoXPos, sym, t)
7430 }
7431
7432 fields := []*types.Field{
7433 makefield("heap", types.Types[types.TBOOL]),
7434 makefield("rangefunc", types.Types[types.TBOOL]),
7435 makefield("sp", types.Types[types.TUINTPTR]),
7436 makefield("pc", types.Types[types.TUINTPTR]),
7437
7438
7439
7440 makefield("fn", types.Types[types.TUINTPTR]),
7441 makefield("link", types.Types[types.TUINTPTR]),
7442 makefield("head", types.Types[types.TUINTPTR]),
7443 }
7444 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
7445 base.Fatalf("deferStructFnField is %q, not fn", name)
7446 }
7447
7448 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
7449 typ := types.NewNamed(n)
7450 n.SetType(typ)
7451 n.SetTypecheck(1)
7452
7453
7454 typ.SetUnderlying(types.NewStruct(fields))
7455 types.CalcStructSize(typ)
7456
7457 deferType = typ
7458 return typ
7459 }
7460
7461
7462
7463
7464
7465 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
7466 return obj.Addr{
7467 Name: obj.NAME_NONE,
7468 Type: obj.TYPE_MEM,
7469 Reg: baseReg,
7470 Offset: spill.Offset + extraOffset,
7471 }
7472 }
7473
7474 var (
7475 BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
7476 ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
7477 )
7478
View as plain text