1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9 "go/constant"
10 "internal/abi"
11 "internal/buildcfg"
12 "strings"
13
14 "cmd/compile/internal/base"
15 "cmd/compile/internal/ir"
16 "cmd/compile/internal/objw"
17 "cmd/compile/internal/reflectdata"
18 "cmd/compile/internal/rttype"
19 "cmd/compile/internal/staticdata"
20 "cmd/compile/internal/typecheck"
21 "cmd/compile/internal/types"
22 "cmd/internal/obj"
23 "cmd/internal/objabi"
24 )
25
26
27
28
29 func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
30 if n == nil {
31 return n
32 }
33
34 if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
35
36
37
38 base.Fatalf("walkExpr init == &n->ninit")
39 }
40
41 if len(n.Init()) != 0 {
42 walkStmtList(n.Init())
43 init.Append(ir.TakeInit(n)...)
44 }
45
46 lno := ir.SetPos(n)
47
48 if base.Flag.LowerW > 1 {
49 ir.Dump("before walk expr", n)
50 }
51
52 if n.Typecheck() != 1 {
53 base.Fatalf("missed typecheck: %+v", n)
54 }
55
56 if n.Type().IsUntyped() {
57 base.Fatalf("expression has untyped type: %+v", n)
58 }
59
60 n = walkExpr1(n, init)
61
62
63 if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
64 types.CheckSize(typ)
65 }
66 if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
67 types.CheckSize(n.Heapaddr.Type())
68 }
69 if ir.IsConst(n, constant.String) {
70
71
72 _ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
73 }
74
75 if base.Flag.LowerW != 0 && n != nil {
76 ir.Dump("after walk expr", n)
77 }
78
79 base.Pos = lno
80 return n
81 }
82
83 func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
84 switch n.Op() {
85 default:
86 ir.Dump("walk", n)
87 base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
88 panic("unreachable")
89
90 case ir.OGETG, ir.OGETCALLERSP:
91 return n
92
93 case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
94
95
96
97
98 return n
99
100 case ir.OMETHEXPR:
101
102 n := n.(*ir.SelectorExpr)
103 return n.FuncName()
104
105 case ir.OMIN, ir.OMAX:
106 n := n.(*ir.CallExpr)
107 return walkMinMax(n, init)
108
109 case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
110 n := n.(*ir.UnaryExpr)
111 n.X = walkExpr(n.X, init)
112 return n
113
114 case ir.ODOTMETH, ir.ODOTINTER:
115 n := n.(*ir.SelectorExpr)
116 n.X = walkExpr(n.X, init)
117 return n
118
119 case ir.OADDR:
120 n := n.(*ir.AddrExpr)
121 n.X = walkExpr(n.X, init)
122 return n
123
124 case ir.ODEREF:
125 n := n.(*ir.StarExpr)
126 n.X = walkExpr(n.X, init)
127 return n
128
129 case ir.OMAKEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
130 ir.OUNSAFEADD:
131 n := n.(*ir.BinaryExpr)
132 n.X = walkExpr(n.X, init)
133 n.Y = walkExpr(n.Y, init)
134 return n
135
136 case ir.OUNSAFESLICE:
137 n := n.(*ir.BinaryExpr)
138 return walkUnsafeSlice(n, init)
139
140 case ir.OUNSAFESTRING:
141 n := n.(*ir.BinaryExpr)
142 return walkUnsafeString(n, init)
143
144 case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
145 n := n.(*ir.UnaryExpr)
146 return walkUnsafeData(n, init)
147
148 case ir.ODOT, ir.ODOTPTR:
149 n := n.(*ir.SelectorExpr)
150 return walkDot(n, init)
151
152 case ir.ODOTTYPE, ir.ODOTTYPE2:
153 n := n.(*ir.TypeAssertExpr)
154 return walkDotType(n, init)
155
156 case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
157 n := n.(*ir.DynamicTypeAssertExpr)
158 return walkDynamicDotType(n, init)
159
160 case ir.OLEN, ir.OCAP:
161 n := n.(*ir.UnaryExpr)
162 return walkLenCap(n, init)
163
164 case ir.OCOMPLEX:
165 n := n.(*ir.BinaryExpr)
166 n.X = walkExpr(n.X, init)
167 n.Y = walkExpr(n.Y, init)
168 return n
169
170 case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
171 n := n.(*ir.BinaryExpr)
172 return walkCompare(n, init)
173
174 case ir.OANDAND, ir.OOROR:
175 n := n.(*ir.LogicalExpr)
176 return walkLogical(n, init)
177
178 case ir.OPRINT, ir.OPRINTLN:
179 return walkPrint(n.(*ir.CallExpr), init)
180
181 case ir.OPANIC:
182 n := n.(*ir.UnaryExpr)
183 return mkcall("gopanic", nil, init, n.X)
184
185 case ir.ORECOVERFP:
186 return walkRecoverFP(n.(*ir.CallExpr), init)
187
188 case ir.OCFUNC:
189 return n
190
191 case ir.OCALLINTER, ir.OCALLFUNC:
192 n := n.(*ir.CallExpr)
193 return walkCall(n, init)
194
195 case ir.OAS, ir.OASOP:
196 return walkAssign(init, n)
197
198 case ir.OAS2:
199 n := n.(*ir.AssignListStmt)
200 return walkAssignList(init, n)
201
202
203 case ir.OAS2FUNC:
204 n := n.(*ir.AssignListStmt)
205 return walkAssignFunc(init, n)
206
207
208
209 case ir.OAS2RECV:
210 n := n.(*ir.AssignListStmt)
211 return walkAssignRecv(init, n)
212
213
214 case ir.OAS2MAPR:
215 n := n.(*ir.AssignListStmt)
216 return walkAssignMapRead(init, n)
217
218 case ir.ODELETE:
219 n := n.(*ir.CallExpr)
220 return walkDelete(init, n)
221
222 case ir.OAS2DOTTYPE:
223 n := n.(*ir.AssignListStmt)
224 return walkAssignDotType(n, init)
225
226 case ir.OCONVIFACE:
227 n := n.(*ir.ConvExpr)
228 return walkConvInterface(n, init)
229
230 case ir.OCONV, ir.OCONVNOP:
231 n := n.(*ir.ConvExpr)
232 return walkConv(n, init)
233
234 case ir.OSLICE2ARR:
235 n := n.(*ir.ConvExpr)
236 return walkSliceToArray(n, init)
237
238 case ir.OSLICE2ARRPTR:
239 n := n.(*ir.ConvExpr)
240 n.X = walkExpr(n.X, init)
241 return n
242
243 case ir.ODIV, ir.OMOD:
244 n := n.(*ir.BinaryExpr)
245 return walkDivMod(n, init)
246
247 case ir.OINDEX:
248 n := n.(*ir.IndexExpr)
249 return walkIndex(n, init)
250
251 case ir.OINDEXMAP:
252 n := n.(*ir.IndexExpr)
253 return walkIndexMap(n, init)
254
255 case ir.ORECV:
256 base.Fatalf("walkExpr ORECV")
257 panic("unreachable")
258
259 case ir.OSLICEHEADER:
260 n := n.(*ir.SliceHeaderExpr)
261 return walkSliceHeader(n, init)
262
263 case ir.OSTRINGHEADER:
264 n := n.(*ir.StringHeaderExpr)
265 return walkStringHeader(n, init)
266
267 case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
268 n := n.(*ir.SliceExpr)
269 return walkSlice(n, init)
270
271 case ir.ONEW:
272 n := n.(*ir.UnaryExpr)
273 return walkNew(n, init)
274
275 case ir.OADDSTR:
276 return walkAddString(n.Type(), n.(*ir.AddStringExpr), init)
277
278 case ir.OAPPEND:
279
280 base.Fatalf("append outside assignment")
281 panic("unreachable")
282
283 case ir.OCOPY:
284 return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
285
286 case ir.OCLEAR:
287 n := n.(*ir.UnaryExpr)
288 return walkClear(n)
289
290 case ir.OCLOSE:
291 n := n.(*ir.UnaryExpr)
292 return walkClose(n, init)
293
294 case ir.OMAKECHAN:
295 n := n.(*ir.MakeExpr)
296 return walkMakeChan(n, init)
297
298 case ir.OMAKEMAP:
299 n := n.(*ir.MakeExpr)
300 return walkMakeMap(n, init)
301
302 case ir.OMAKESLICE:
303 n := n.(*ir.MakeExpr)
304 return walkMakeSlice(n, init)
305
306 case ir.OMAKESLICECOPY:
307 n := n.(*ir.MakeExpr)
308 return walkMakeSliceCopy(n, init)
309
310 case ir.ORUNESTR:
311 n := n.(*ir.ConvExpr)
312 return walkRuneToString(n, init)
313
314 case ir.OBYTES2STR, ir.ORUNES2STR:
315 n := n.(*ir.ConvExpr)
316 return walkBytesRunesToString(n, init)
317
318 case ir.OBYTES2STRTMP:
319 n := n.(*ir.ConvExpr)
320 return walkBytesToStringTemp(n, init)
321
322 case ir.OSTR2BYTES:
323 n := n.(*ir.ConvExpr)
324 return walkStringToBytes(n, init)
325
326 case ir.OSTR2BYTESTMP:
327 n := n.(*ir.ConvExpr)
328 return walkStringToBytesTemp(n, init)
329
330 case ir.OSTR2RUNES:
331 n := n.(*ir.ConvExpr)
332 return walkStringToRunes(n, init)
333
334 case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
335 return walkCompLit(n, init)
336
337 case ir.OSEND:
338 n := n.(*ir.SendStmt)
339 return walkSend(n, init)
340
341 case ir.OCLOSURE:
342 return walkClosure(n.(*ir.ClosureExpr), init)
343
344 case ir.OMETHVALUE:
345 return walkMethodValue(n.(*ir.SelectorExpr), init)
346 }
347
348
349
350
351 }
352
353
354
355
356
357
358 func walkExprList(s []ir.Node, init *ir.Nodes) {
359 for i := range s {
360 s[i] = walkExpr(s[i], init)
361 }
362 }
363
364 func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
365 for i, n := range s {
366 s[i] = cheapExpr(n, init)
367 s[i] = walkExpr(s[i], init)
368 }
369 }
370
371 func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
372 for i, n := range s {
373 s[i] = safeExpr(n, init)
374 s[i] = walkExpr(s[i], init)
375 }
376 }
377
378
379
380 func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
381 switch n.Op() {
382 case ir.ONAME, ir.OLITERAL, ir.ONIL:
383 return n
384 }
385
386 return copyExpr(n, n.Type(), init)
387 }
388
389
390
391 func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
392 if n == nil {
393 return nil
394 }
395
396 if len(n.Init()) != 0 {
397 walkStmtList(n.Init())
398 init.Append(ir.TakeInit(n)...)
399 }
400
401 switch n.Op() {
402 case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
403 return n
404
405 case ir.OLEN, ir.OCAP:
406 n := n.(*ir.UnaryExpr)
407 l := safeExpr(n.X, init)
408 if l == n.X {
409 return n
410 }
411 a := ir.Copy(n).(*ir.UnaryExpr)
412 a.X = l
413 return walkExpr(typecheck.Expr(a), init)
414
415 case ir.ODOT, ir.ODOTPTR:
416 n := n.(*ir.SelectorExpr)
417 l := safeExpr(n.X, init)
418 if l == n.X {
419 return n
420 }
421 a := ir.Copy(n).(*ir.SelectorExpr)
422 a.X = l
423 return walkExpr(typecheck.Expr(a), init)
424
425 case ir.ODEREF:
426 n := n.(*ir.StarExpr)
427 l := safeExpr(n.X, init)
428 if l == n.X {
429 return n
430 }
431 a := ir.Copy(n).(*ir.StarExpr)
432 a.X = l
433 return walkExpr(typecheck.Expr(a), init)
434
435 case ir.OINDEX, ir.OINDEXMAP:
436 n := n.(*ir.IndexExpr)
437 l := safeExpr(n.X, init)
438 r := safeExpr(n.Index, init)
439 if l == n.X && r == n.Index {
440 return n
441 }
442 a := ir.Copy(n).(*ir.IndexExpr)
443 a.X = l
444 a.Index = r
445 return walkExpr(typecheck.Expr(a), init)
446
447 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
448 n := n.(*ir.CompLitExpr)
449 if isStaticCompositeLiteral(n) {
450 return n
451 }
452 }
453
454
455 if ir.IsAddressable(n) {
456 base.Fatalf("missing lvalue case in safeExpr: %v", n)
457 }
458 return cheapExpr(n, init)
459 }
460
461 func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
462 l := typecheck.TempAt(base.Pos, ir.CurFunc, t)
463 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
464 return l
465 }
466
467 func walkAddString(typ *types.Type, n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
468 c := len(n.List)
469
470 if c < 2 {
471 base.Fatalf("walkAddString count %d too small", c)
472 }
473
474
475 var args []ir.Node
476
477 var fn, fnsmall, fnbig string
478
479 switch {
480 default:
481 base.FatalfAt(n.Pos(), "unexpected type: %v", typ)
482 case typ.IsString():
483 buf := typecheck.NodNil()
484 if n.Esc() == ir.EscNone {
485 sz := int64(0)
486 for _, n1 := range n.List {
487 if n1.Op() == ir.OLITERAL {
488 sz += int64(len(ir.StringVal(n1)))
489 }
490 }
491
492
493 if sz < tmpstringbufsize {
494
495 buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
496 }
497 }
498
499 args = []ir.Node{buf}
500 fnsmall, fnbig = "concatstring%d", "concatstrings"
501 case typ.IsSlice() && typ.Elem().IsKind(types.TUINT8):
502 fnsmall, fnbig = "concatbyte%d", "concatbytes"
503 }
504
505 if c <= 5 {
506
507
508 fn = fmt.Sprintf(fnsmall, c)
509
510 for _, n2 := range n.List {
511 args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
512 }
513 } else {
514
515 fn = fnbig
516 t := types.NewSlice(types.Types[types.TSTRING])
517
518 slargs := make([]ir.Node, len(n.List))
519 for i, n2 := range n.List {
520 slargs[i] = typecheck.Conv(n2, types.Types[types.TSTRING])
521 }
522 slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, t, slargs)
523 slice.Prealloc = n.Prealloc
524 args = append(args, slice)
525 slice.SetEsc(ir.EscNone)
526 }
527
528 cat := typecheck.LookupRuntime(fn)
529 r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
530 r.Args = args
531 r1 := typecheck.Expr(r)
532 r1 = walkExpr(r1, init)
533 r1.SetType(typ)
534
535 return r1
536 }
537
538 type hookInfo struct {
539 paramType types.Kind
540 argsNum int
541 runtimeFunc string
542 }
543
544 var hooks = map[string]hookInfo{
545 "strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
546 }
547
548
549 func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
550 if n.Op() == ir.OCALLMETH {
551 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
552 }
553 if n.Op() == ir.OCALLINTER || n.Fun.Op() == ir.OMETHEXPR {
554
555
556 usemethod(n)
557 }
558 if n.Op() == ir.OCALLINTER {
559 reflectdata.MarkUsedIfaceMethod(n)
560 }
561
562 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.OCLOSURE {
563 directClosureCall(n)
564 }
565
566 if ir.IsFuncPCIntrinsic(n) {
567
568
569 name := n.Fun.(*ir.Name).Sym().Name
570 arg := n.Args[0]
571 var wantABI obj.ABI
572 switch name {
573 case "FuncPCABI0":
574 wantABI = obj.ABI0
575 case "FuncPCABIInternal":
576 wantABI = obj.ABIInternal
577 }
578 if n.Type() != types.Types[types.TUINTPTR] {
579 base.FatalfAt(n.Pos(), "FuncPC intrinsic should return uintptr, got %v", n.Type())
580 }
581 n := ir.FuncPC(n.Pos(), arg, wantABI)
582 return walkExpr(n, init)
583 }
584
585 if n.Op() == ir.OCALLFUNC {
586 fn := ir.StaticCalleeName(n.Fun)
587 if fn != nil && fn.Sym().Pkg.Path == "hash/maphash" && strings.HasPrefix(fn.Sym().Name, "escapeForHash[") {
588
589
590
591
592 ps := fn.Type().Params()
593 if len(ps) == 2 && ps[1].Type.IsShape() {
594 return walkExpr(n.Args[1], init)
595 }
596 }
597 }
598
599 if name, ok := n.Fun.(*ir.Name); ok {
600 sym := name.Sym()
601 if sym.Pkg.Path == "go.runtime" && sym.Name == "deferrangefunc" {
602
603
604
605 ir.CurFunc.SetHasDefer(true)
606 ir.CurFunc.SetOpenCodedDeferDisallowed(true)
607 }
608 }
609
610 walkCall1(n, init)
611 return n
612 }
613
614 func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
615 if n.Walked() {
616 return
617 }
618 n.SetWalked(true)
619
620 if n.Op() == ir.OCALLMETH {
621 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
622 }
623
624 args := n.Args
625 params := n.Fun.Type().Params()
626
627 n.Fun = walkExpr(n.Fun, init)
628 walkExprList(args, init)
629
630 for i, arg := range args {
631
632 param := params[i]
633 if !types.Identical(arg.Type(), param.Type) {
634 base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
635 }
636
637
638
639
640 if mayCall(arg) {
641
642 tmp := typecheck.TempAt(base.Pos, ir.CurFunc, param.Type)
643 init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
644
645 args[i] = tmp
646 }
647 }
648
649 funSym := n.Fun.Sym()
650 if base.Debug.Libfuzzer != 0 && funSym != nil {
651 if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found {
652 if len(args) != hook.argsNum {
653 panic(fmt.Sprintf("%s.%s expects %d arguments, but received %d", funSym.Pkg.Path, funSym.Name, hook.argsNum, len(args)))
654 }
655 var hookArgs []ir.Node
656 for _, arg := range args {
657 hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
658 }
659 hookArgs = append(hookArgs, fakePC(n))
660 init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
661 }
662 }
663 }
664
665
666 func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
667 n.X = walkExpr(n.X, init)
668 n.Y = walkExpr(n.Y, init)
669
670
671 et := n.X.Type().Kind()
672
673 if types.IsComplex[et] && n.Op() == ir.ODIV {
674 t := n.Type()
675 call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
676 return typecheck.Conv(call, t)
677 }
678
679
680 if types.IsFloat[et] {
681 return n
682 }
683
684
685
686
687 if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
688 if n.Y.Op() == ir.OLITERAL {
689
690
691 switch et {
692 case types.TINT64:
693 c := ir.Int64Val(n.Y)
694 if c < 0 {
695 c = -c
696 }
697 if c != 0 && c&(c-1) == 0 {
698 return n
699 }
700 case types.TUINT64:
701 c := ir.Uint64Val(n.Y)
702 if c < 1<<16 {
703 return n
704 }
705 if c != 0 && c&(c-1) == 0 {
706 return n
707 }
708 }
709 }
710 var fn string
711 if et == types.TINT64 {
712 fn = "int64"
713 } else {
714 fn = "uint64"
715 }
716 if n.Op() == ir.ODIV {
717 fn += "div"
718 } else {
719 fn += "mod"
720 }
721 return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
722 }
723 return n
724 }
725
726
727 func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
728 usefield(n)
729 n.X = walkExpr(n.X, init)
730 return n
731 }
732
733
734 func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
735 n.X = walkExpr(n.X, init)
736
737 if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
738 n.ITab = reflectdata.ITabAddrAt(base.Pos, n.Type(), n.X.Type())
739 }
740 if n.X.Type().IsInterface() && n.Type().IsInterface() && !n.Type().IsEmptyInterface() {
741
742
743 n.Descriptor = makeTypeAssertDescriptor(n.Type(), n.Op() == ir.ODOTTYPE2)
744 }
745 return n
746 }
747
748 func makeTypeAssertDescriptor(target *types.Type, canFail bool) *obj.LSym {
749
750
751 lsym := types.LocalPkg.Lookup(fmt.Sprintf(".typeAssert.%d", typeAssertGen)).LinksymABI(obj.ABI0)
752 typeAssertGen++
753 c := rttype.NewCursor(lsym, 0, rttype.TypeAssert)
754 c.Field("Cache").WritePtr(typecheck.LookupRuntimeVar("emptyTypeAssertCache"))
755 c.Field("Inter").WritePtr(reflectdata.TypeLinksym(target))
756 c.Field("CanFail").WriteBool(canFail)
757 objw.Global(lsym, int32(rttype.TypeAssert.Size()), obj.LOCAL)
758 lsym.Gotype = reflectdata.TypeLinksym(rttype.TypeAssert)
759 return lsym
760 }
761
762 var typeAssertGen int
763
764
765 func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
766 n.X = walkExpr(n.X, init)
767 n.RType = walkExpr(n.RType, init)
768 n.ITab = walkExpr(n.ITab, init)
769
770 if n.RType != nil && n.RType.Op() == ir.OADDR {
771 addr := n.RType.(*ir.AddrExpr)
772 if addr.X.Op() == ir.OLINKSYMOFFSET {
773 r := ir.NewTypeAssertExpr(n.Pos(), n.X, n.Type())
774 if n.Op() == ir.ODYNAMICDOTTYPE2 {
775 r.SetOp(ir.ODOTTYPE2)
776 }
777 r.SetType(n.Type())
778 r.SetTypecheck(1)
779 return walkExpr(r, init)
780 }
781 }
782 return n
783 }
784
785
786 func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
787 n.X = walkExpr(n.X, init)
788
789
790
791 r := n.Index
792
793 n.Index = walkExpr(n.Index, init)
794
795
796
797 if n.Bounded() {
798 return n
799 }
800 t := n.X.Type()
801 if t != nil && t.IsPtr() {
802 t = t.Elem()
803 }
804 if t.IsArray() {
805 n.SetBounded(bounded(r, t.NumElem()))
806 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
807 base.Warn("index bounds check elided")
808 }
809 } else if ir.IsConst(n.X, constant.String) {
810 n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
811 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
812 base.Warn("index bounds check elided")
813 }
814 }
815 return n
816 }
817
818
819
820
821 func mapKeyArg(fast int, n, key ir.Node, assigned bool) ir.Node {
822 if fast == mapslow {
823
824
825 return typecheck.NodAddr(key)
826 }
827 if assigned {
828
829 return key
830 }
831
832 switch fast {
833 case mapfast32ptr:
834 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
835 case mapfast64ptr:
836 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
837 default:
838
839 return key
840 }
841 }
842
843
844
845 func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
846 n.X = walkExpr(n.X, init)
847 n.Index = walkExpr(n.Index, init)
848 map_ := n.X
849 t := map_.Type()
850 fast := mapfast(t)
851 key := mapKeyArg(fast, n, n.Index, n.Assigned)
852 args := []ir.Node{reflectdata.IndexMapRType(base.Pos, n), map_, key}
853
854 var mapFn ir.Node
855 switch {
856 case n.Assigned:
857 mapFn = mapfn(mapassign[fast], t, false)
858 case t.Elem().Size() > abi.ZeroValSize:
859 args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
860 mapFn = mapfn("mapaccess1_fat", t, true)
861 default:
862 mapFn = mapfn(mapaccess1[fast], t, false)
863 }
864 call := mkcall1(mapFn, nil, init, args...)
865 call.SetType(types.NewPtr(t.Elem()))
866 call.MarkNonNil()
867 star := ir.NewStarExpr(base.Pos, call)
868 star.SetType(t.Elem())
869 star.SetTypecheck(1)
870 return star
871 }
872
873
874 func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
875 n.X = walkExpr(n.X, init)
876
877
878
879
880 var ll ir.Nodes
881
882 n.Y = walkExpr(n.Y, &ll)
883 n.Y = ir.InitExpr(ll, n.Y)
884 return n
885 }
886
887
888 func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
889 n1 := n.Value
890 n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
891 n1 = walkExpr(n1, init)
892 n1 = typecheck.NodAddr(n1)
893 return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
894 }
895
896
897 func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
898 n.X = walkExpr(n.X, init)
899 n.Low = walkExpr(n.Low, init)
900 if n.Low != nil && ir.IsZero(n.Low) {
901
902 n.Low = nil
903 }
904 n.High = walkExpr(n.High, init)
905 n.Max = walkExpr(n.Max, init)
906
907 if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
908
909 if base.Debug.Slice > 0 {
910 base.Warn("slice: omit slice operation")
911 }
912 return n.X
913 }
914 return n
915 }
916
917
918 func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
919 n.Ptr = walkExpr(n.Ptr, init)
920 n.Len = walkExpr(n.Len, init)
921 n.Cap = walkExpr(n.Cap, init)
922 return n
923 }
924
925
926 func walkStringHeader(n *ir.StringHeaderExpr, init *ir.Nodes) ir.Node {
927 n.Ptr = walkExpr(n.Ptr, init)
928 n.Len = walkExpr(n.Len, init)
929 return n
930 }
931
932
933 func bounded(n ir.Node, max int64) bool {
934 if n.Type() == nil || !n.Type().IsInteger() {
935 return false
936 }
937
938 sign := n.Type().IsSigned()
939 bits := int32(8 * n.Type().Size())
940
941 if ir.IsSmallIntConst(n) {
942 v := ir.Int64Val(n)
943 return 0 <= v && v < max
944 }
945
946 switch n.Op() {
947 case ir.OAND, ir.OANDNOT:
948 n := n.(*ir.BinaryExpr)
949 v := int64(-1)
950 switch {
951 case ir.IsSmallIntConst(n.X):
952 v = ir.Int64Val(n.X)
953 case ir.IsSmallIntConst(n.Y):
954 v = ir.Int64Val(n.Y)
955 if n.Op() == ir.OANDNOT {
956 v = ^v
957 if !sign {
958 v &= 1<<uint(bits) - 1
959 }
960 }
961 }
962 if 0 <= v && v < max {
963 return true
964 }
965
966 case ir.OMOD:
967 n := n.(*ir.BinaryExpr)
968 if !sign && ir.IsSmallIntConst(n.Y) {
969 v := ir.Int64Val(n.Y)
970 if 0 <= v && v <= max {
971 return true
972 }
973 }
974
975 case ir.ODIV:
976 n := n.(*ir.BinaryExpr)
977 if !sign && ir.IsSmallIntConst(n.Y) {
978 v := ir.Int64Val(n.Y)
979 for bits > 0 && v >= 2 {
980 bits--
981 v >>= 1
982 }
983 }
984
985 case ir.ORSH:
986 n := n.(*ir.BinaryExpr)
987 if !sign && ir.IsSmallIntConst(n.Y) {
988 v := ir.Int64Val(n.Y)
989 if v > int64(bits) {
990 return true
991 }
992 bits -= int32(v)
993 }
994 }
995
996 if !sign && bits <= 62 && 1<<uint(bits) <= max {
997 return true
998 }
999
1000 return false
1001 }
1002
1003
1004
1005 func usemethod(n *ir.CallExpr) {
1006
1007
1008
1009 if base.Ctxt.Pkgpath == "reflect" {
1010
1011 switch fn := ir.CurFunc.Nname.Sym().Name; {
1012 case fn == "(*rtype).Method", fn == "(*rtype).MethodByName":
1013 return
1014 case fn == "(*interfaceType).Method", fn == "(*interfaceType).MethodByName":
1015 return
1016 case fn == "Value.Method", fn == "Value.MethodByName":
1017 return
1018 }
1019 }
1020
1021 dot, ok := n.Fun.(*ir.SelectorExpr)
1022 if !ok {
1023 return
1024 }
1025
1026
1027
1028
1029
1030
1031
1032 methodName := dot.Sel.Name
1033 t := dot.Selection.Type
1034
1035
1036 if t.NumParams() != 1 || (t.NumResults() != 1 && t.NumResults() != 2) {
1037 return
1038 }
1039
1040
1041 switch pKind := t.Param(0).Type.Kind(); {
1042 case methodName == "Method" && pKind == types.TINT,
1043 methodName == "MethodByName" && pKind == types.TSTRING:
1044
1045 default:
1046
1047 return
1048 }
1049
1050
1051
1052
1053
1054 switch s := t.Result(0).Type.Sym(); {
1055 case s != nil && types.ReflectSymName(s) == "Method",
1056 s != nil && types.ReflectSymName(s) == "Value":
1057
1058 default:
1059
1060 return
1061 }
1062
1063 var targetName ir.Node
1064 switch dot.Op() {
1065 case ir.ODOTINTER:
1066 if methodName == "MethodByName" {
1067 targetName = n.Args[0]
1068 }
1069 case ir.OMETHEXPR:
1070 if methodName == "MethodByName" {
1071 targetName = n.Args[1]
1072 }
1073 default:
1074 base.FatalfAt(dot.Pos(), "usemethod: unexpected dot.Op() %s", dot.Op())
1075 }
1076
1077 if ir.IsConst(targetName, constant.String) {
1078 name := constant.StringVal(targetName.Val())
1079 ir.CurFunc.LSym.AddRel(base.Ctxt, obj.Reloc{
1080 Type: objabi.R_USENAMEDMETHOD,
1081 Sym: staticdata.StringSymNoCommon(name),
1082 })
1083 } else {
1084 ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
1085 }
1086 }
1087
1088 func usefield(n *ir.SelectorExpr) {
1089 if !buildcfg.Experiment.FieldTrack {
1090 return
1091 }
1092
1093 switch n.Op() {
1094 default:
1095 base.Fatalf("usefield %v", n.Op())
1096
1097 case ir.ODOT, ir.ODOTPTR:
1098 break
1099 }
1100
1101 field := n.Selection
1102 if field == nil {
1103 base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
1104 }
1105 if field.Sym != n.Sel {
1106 base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
1107 }
1108 if !strings.Contains(field.Note, "go:\"track\"") {
1109 return
1110 }
1111
1112 outer := n.X.Type()
1113 if outer.IsPtr() {
1114 outer = outer.Elem()
1115 }
1116 if outer.Sym() == nil {
1117 base.Errorf("tracked field must be in named struct type")
1118 }
1119
1120 sym := reflectdata.TrackSym(outer, field)
1121 if ir.CurFunc.FieldTrack == nil {
1122 ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
1123 }
1124 ir.CurFunc.FieldTrack[sym] = struct{}{}
1125 }
1126
View as plain text