1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9 "go/constant"
10 "internal/buildcfg"
11
12 "cmd/compile/internal/base"
13 "cmd/compile/internal/ir"
14 "cmd/compile/internal/reflectdata"
15 "cmd/compile/internal/ssa"
16 "cmd/compile/internal/staticinit"
17 "cmd/compile/internal/typecheck"
18 "cmd/compile/internal/types"
19 "cmd/internal/objabi"
20 "cmd/internal/src"
21 )
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42 type orderState struct {
43 out []ir.Node
44 temp []*ir.Name
45 free map[string][]*ir.Name
46 edit func(ir.Node) ir.Node
47 }
48
49
50
51 func order(fn *ir.Func) {
52 if base.Flag.W > 1 {
53 s := fmt.Sprintf("\nbefore order %v", fn.Sym())
54 ir.DumpList(s, fn.Body)
55 }
56 ir.SetPos(fn)
57 orderBlock(&fn.Body, map[string][]*ir.Name{})
58 }
59
60
61 func (o *orderState) append(stmt ir.Node) {
62 o.out = append(o.out, typecheck.Stmt(stmt))
63 }
64
65
66
67
68 func (o *orderState) newTemp(t *types.Type, clear bool) *ir.Name {
69 var v *ir.Name
70 key := t.LinkString()
71 if a := o.free[key]; len(a) > 0 {
72 v = a[len(a)-1]
73 if !types.Identical(t, v.Type()) {
74 base.Fatalf("expected %L to have type %v", v, t)
75 }
76 o.free[key] = a[:len(a)-1]
77 } else {
78 v = typecheck.TempAt(base.Pos, ir.CurFunc, t)
79 }
80 if clear {
81 o.append(ir.NewAssignStmt(base.Pos, v, nil))
82 }
83
84 o.temp = append(o.temp, v)
85 return v
86 }
87
88
89
90 func (o *orderState) copyExpr(n ir.Node) *ir.Name {
91 return o.copyExpr1(n, false)
92 }
93
94
95
96
97
98
99
100
101
102
103
104 func (o *orderState) copyExprClear(n ir.Node) *ir.Name {
105 return o.copyExpr1(n, true)
106 }
107
108 func (o *orderState) copyExpr1(n ir.Node, clear bool) *ir.Name {
109 t := n.Type()
110 v := o.newTemp(t, clear)
111 o.append(ir.NewAssignStmt(base.Pos, v, n))
112 return v
113 }
114
115
116
117
118
119 func (o *orderState) cheapExpr(n ir.Node) ir.Node {
120 if n == nil {
121 return nil
122 }
123
124 switch n.Op() {
125 case ir.ONAME, ir.OLITERAL, ir.ONIL:
126 return n
127 case ir.OLEN, ir.OCAP:
128 n := n.(*ir.UnaryExpr)
129 l := o.cheapExpr(n.X)
130 if l == n.X {
131 return n
132 }
133 a := ir.Copy(n).(*ir.UnaryExpr)
134 a.X = l
135 return typecheck.Expr(a)
136 }
137
138 return o.copyExpr(n)
139 }
140
141
142
143
144
145
146
147
148 func (o *orderState) safeExpr(n ir.Node) ir.Node {
149 switch n.Op() {
150 case ir.ONAME, ir.OLITERAL, ir.ONIL:
151 return n
152
153 case ir.OLEN, ir.OCAP:
154 n := n.(*ir.UnaryExpr)
155 l := o.safeExpr(n.X)
156 if l == n.X {
157 return n
158 }
159 a := ir.Copy(n).(*ir.UnaryExpr)
160 a.X = l
161 return typecheck.Expr(a)
162
163 case ir.ODOT:
164 n := n.(*ir.SelectorExpr)
165 l := o.safeExpr(n.X)
166 if l == n.X {
167 return n
168 }
169 a := ir.Copy(n).(*ir.SelectorExpr)
170 a.X = l
171 return typecheck.Expr(a)
172
173 case ir.ODOTPTR:
174 n := n.(*ir.SelectorExpr)
175 l := o.cheapExpr(n.X)
176 if l == n.X {
177 return n
178 }
179 a := ir.Copy(n).(*ir.SelectorExpr)
180 a.X = l
181 return typecheck.Expr(a)
182
183 case ir.ODEREF:
184 n := n.(*ir.StarExpr)
185 l := o.cheapExpr(n.X)
186 if l == n.X {
187 return n
188 }
189 a := ir.Copy(n).(*ir.StarExpr)
190 a.X = l
191 return typecheck.Expr(a)
192
193 case ir.OINDEX, ir.OINDEXMAP:
194 n := n.(*ir.IndexExpr)
195 var l ir.Node
196 if n.X.Type().IsArray() {
197 l = o.safeExpr(n.X)
198 } else {
199 l = o.cheapExpr(n.X)
200 }
201 r := o.cheapExpr(n.Index)
202 if l == n.X && r == n.Index {
203 return n
204 }
205 a := ir.Copy(n).(*ir.IndexExpr)
206 a.X = l
207 a.Index = r
208 return typecheck.Expr(a)
209
210 default:
211 base.Fatalf("order.safeExpr %v", n.Op())
212 return nil
213 }
214 }
215
216
217
218
219
220
221
222 func (o *orderState) addrTemp(n ir.Node) ir.Node {
223
224
225
226
227
228 if n.Op() == ir.ONIL || (n.Op() == ir.OLITERAL && !base.Ctxt.IsFIPS()) {
229
230 n = typecheck.DefaultLit(n, nil)
231 types.CalcSize(n.Type())
232 vstat := readonlystaticname(n.Type())
233 var s staticinit.Schedule
234 s.StaticAssign(vstat, 0, n, n.Type())
235 if s.Out != nil {
236 base.Fatalf("staticassign of const generated code: %+v", n)
237 }
238 vstat = typecheck.Expr(vstat).(*ir.Name)
239 return vstat
240 }
241
242
243
244
245
246
247
248
249 if ir.IsAddressable(n) {
250 if name, ok := ir.OuterValue(n).(*ir.Name); ok && name.Op() == ir.ONAME {
251 if name.Class == ir.PAUTO && !name.Addrtaken() && ssa.CanSSA(name.Type()) {
252 goto Copy
253 }
254 }
255
256 return n
257 }
258
259 Copy:
260 return o.copyExpr(n)
261 }
262
263
264
265
266 func (o *orderState) mapKeyTemp(outerPos src.XPos, t *types.Type, n ir.Node) ir.Node {
267 pos := outerPos
268 if ir.HasUniquePos(n) {
269 pos = n.Pos()
270 }
271
272
273 alg := mapfast(t)
274 if alg == mapslow {
275 return o.addrTemp(n)
276 }
277 var kt *types.Type
278 switch alg {
279 case mapfast32:
280 kt = types.Types[types.TUINT32]
281 case mapfast64:
282 kt = types.Types[types.TUINT64]
283 case mapfast32ptr, mapfast64ptr:
284 kt = types.Types[types.TUNSAFEPTR]
285 case mapfaststr:
286 kt = types.Types[types.TSTRING]
287 }
288 nt := n.Type()
289 switch {
290 case nt == kt:
291 return n
292 case nt.Kind() == kt.Kind(), nt.IsPtrShaped() && kt.IsPtrShaped():
293
294 return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONVNOP, kt, n))
295 case nt.IsInteger() && kt.IsInteger():
296
297 if n.Op() == ir.OLITERAL && nt.IsSigned() {
298
299 n = ir.NewConstExpr(constant.MakeUint64(uint64(ir.Int64Val(n))), n)
300 n.SetType(kt)
301 return n
302 }
303 return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONV, kt, n))
304 default:
305
306
307
308 if uint8(kt.Alignment()) < uint8(nt.Alignment()) {
309 base.Fatalf("mapKeyTemp: key type is not sufficiently aligned, kt=%v nt=%v", kt, nt)
310 }
311 tmp := o.newTemp(kt, true)
312
313 var e ir.Node = typecheck.NodAddr(tmp)
314 e = ir.NewConvExpr(pos, ir.OCONVNOP, nt.PtrTo(), e)
315 e = ir.NewStarExpr(pos, e)
316 o.append(ir.NewAssignStmt(pos, e, n))
317 return tmp
318 }
319 }
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345 func mapKeyReplaceStrConv(n ir.Node) bool {
346 var replaced bool
347 switch n.Op() {
348 case ir.OBYTES2STR:
349 n := n.(*ir.ConvExpr)
350 n.SetOp(ir.OBYTES2STRTMP)
351 replaced = true
352 case ir.OSTRUCTLIT:
353 n := n.(*ir.CompLitExpr)
354 for _, elem := range n.List {
355 elem := elem.(*ir.StructKeyExpr)
356 if mapKeyReplaceStrConv(elem.Value) {
357 replaced = true
358 }
359 }
360 case ir.OARRAYLIT:
361 n := n.(*ir.CompLitExpr)
362 for _, elem := range n.List {
363 if elem.Op() == ir.OKEY {
364 elem = elem.(*ir.KeyExpr).Value
365 }
366 if mapKeyReplaceStrConv(elem) {
367 replaced = true
368 }
369 }
370 }
371 return replaced
372 }
373
374 type ordermarker int
375
376
377 func (o *orderState) markTemp() ordermarker {
378 return ordermarker(len(o.temp))
379 }
380
381
382
383 func (o *orderState) popTemp(mark ordermarker) {
384 for _, n := range o.temp[mark:] {
385 key := n.Type().LinkString()
386 o.free[key] = append(o.free[key], n)
387 }
388 o.temp = o.temp[:mark]
389 }
390
391
392 func (o *orderState) stmtList(l ir.Nodes) {
393 s := l
394 for i := range s {
395 orderMakeSliceCopy(s[i:])
396 o.stmt(s[i])
397 }
398 }
399
400
401
402
403
404
405
406
407 func orderMakeSliceCopy(s []ir.Node) {
408 if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
409 return
410 }
411 if len(s) < 2 || s[0] == nil || s[0].Op() != ir.OAS || s[1] == nil || s[1].Op() != ir.OCOPY {
412 return
413 }
414
415 as := s[0].(*ir.AssignStmt)
416 cp := s[1].(*ir.BinaryExpr)
417 if as.Y == nil || as.Y.Op() != ir.OMAKESLICE || ir.IsBlank(as.X) ||
418 as.X.Op() != ir.ONAME || cp.X.Op() != ir.ONAME || cp.Y.Op() != ir.ONAME ||
419 as.X.Name() != cp.X.Name() || cp.X.Name() == cp.Y.Name() {
420
421
422
423 return
424 }
425
426 mk := as.Y.(*ir.MakeExpr)
427 if mk.Esc() == ir.EscNone || mk.Len == nil || mk.Cap != nil {
428 return
429 }
430 mk.SetOp(ir.OMAKESLICECOPY)
431 mk.Cap = cp.Y
432
433 mk.SetBounded(mk.Len.Op() == ir.OLEN && ir.SameSafeExpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
434 as.Y = typecheck.Expr(mk)
435 s[1] = nil
436 }
437
438
439 func (o *orderState) edge() {
440 if base.Debug.Libfuzzer == 0 {
441 return
442 }
443
444
445 counter := staticinit.StaticName(types.Types[types.TUINT8])
446 counter.SetLibfuzzer8BitCounter(true)
447
448
449
450
451
452
453 counter.Linksym().Type = objabi.SLIBFUZZER_8BIT_COUNTER
454
455
456
457
458
459
460
461
462
463
464
465 o.append(ir.NewIfStmt(base.Pos,
466 ir.NewBinaryExpr(base.Pos, ir.OEQ, counter, ir.NewInt(base.Pos, 0xff)),
467 []ir.Node{ir.NewAssignStmt(base.Pos, counter, ir.NewInt(base.Pos, 1))},
468 []ir.Node{ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(base.Pos, 1))}))
469 }
470
471
472
473
474 func orderBlock(n *ir.Nodes, free map[string][]*ir.Name) {
475 if len(*n) != 0 {
476
477
478
479 ir.SetPos((*n)[0])
480 }
481 var order orderState
482 order.free = free
483 mark := order.markTemp()
484 order.edge()
485 order.stmtList(*n)
486 order.popTemp(mark)
487 *n = order.out
488 }
489
490
491
492
493
494
495 func (o *orderState) exprInPlace(n ir.Node) ir.Node {
496 var order orderState
497 order.free = o.free
498 n = order.expr(n, nil)
499 n = ir.InitExpr(order.out, n)
500
501
502
503 o.temp = append(o.temp, order.temp...)
504 return n
505 }
506
507
508
509
510
511
512
513
514 func orderStmtInPlace(n ir.Node, free map[string][]*ir.Name) ir.Node {
515 var order orderState
516 order.free = free
517 mark := order.markTemp()
518 order.stmt(n)
519 order.popTemp(mark)
520 return ir.NewBlockStmt(src.NoXPos, order.out)
521 }
522
523
524 func (o *orderState) init(n ir.Node) {
525 if ir.MayBeShared(n) {
526
527
528 if len(n.Init()) > 0 {
529 base.Fatalf("order.init shared node with ninit")
530 }
531 return
532 }
533 o.stmtList(ir.TakeInit(n))
534 }
535
536
537
538 func (o *orderState) call(nn ir.Node) {
539 if len(nn.Init()) > 0 {
540
541 base.Fatalf("%v with unexpected ninit", nn.Op())
542 }
543 if nn.Op() == ir.OCALLMETH {
544 base.FatalfAt(nn.Pos(), "OCALLMETH missed by typecheck")
545 }
546
547
548 if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLINTER {
549 switch n := nn.(type) {
550 default:
551 base.Fatalf("unexpected call: %+v", n)
552 case *ir.UnaryExpr:
553 n.X = o.expr(n.X, nil)
554 case *ir.ConvExpr:
555 n.X = o.expr(n.X, nil)
556 case *ir.BinaryExpr:
557 n.X = o.expr(n.X, nil)
558 n.Y = o.expr(n.Y, nil)
559 case *ir.MakeExpr:
560 n.Len = o.expr(n.Len, nil)
561 n.Cap = o.expr(n.Cap, nil)
562 case *ir.CallExpr:
563 o.exprList(n.Args)
564 }
565 return
566 }
567
568 n := nn.(*ir.CallExpr)
569 typecheck.AssertFixedCall(n)
570
571 if ir.IsFuncPCIntrinsic(n) && ir.IsIfaceOfFunc(n.Args[0]) != nil {
572
573
574
575 return
576 }
577
578 n.Fun = o.expr(n.Fun, nil)
579 o.exprList(n.Args)
580 }
581
582
583 func (o *orderState) mapAssign(n ir.Node) {
584 switch n.Op() {
585 default:
586 base.Fatalf("order.mapAssign %v", n.Op())
587
588 case ir.OAS:
589 n := n.(*ir.AssignStmt)
590 if n.X.Op() == ir.OINDEXMAP {
591 n.Y = o.safeMapRHS(n.Y)
592 }
593 o.out = append(o.out, n)
594 case ir.OASOP:
595 n := n.(*ir.AssignOpStmt)
596 if n.X.Op() == ir.OINDEXMAP {
597 n.Y = o.safeMapRHS(n.Y)
598 }
599 o.out = append(o.out, n)
600 }
601 }
602
603 func (o *orderState) safeMapRHS(r ir.Node) ir.Node {
604
605
606 if r.Op() == ir.OAPPEND {
607 r := r.(*ir.CallExpr)
608 s := r.Args[1:]
609 for i, n := range s {
610 s[i] = o.cheapExpr(n)
611 }
612 return r
613 }
614 return o.cheapExpr(r)
615 }
616
617
618 func (o *orderState) stmt(n ir.Node) {
619 if n == nil {
620 return
621 }
622
623 lno := ir.SetPos(n)
624 o.init(n)
625
626 switch n.Op() {
627 default:
628 base.Fatalf("order.stmt %v", n.Op())
629
630 case ir.OINLMARK:
631 o.out = append(o.out, n)
632
633 case ir.OAS:
634 n := n.(*ir.AssignStmt)
635 t := o.markTemp()
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650 mapAppend := n.X.Op() == ir.OINDEXMAP && n.Y.Op() == ir.OAPPEND &&
651 ir.SameSafeExpr(n.X, n.Y.(*ir.CallExpr).Args[0])
652
653 n.X = o.expr(n.X, nil)
654 if mapAppend {
655 indexLHS := n.X.(*ir.IndexExpr)
656 indexLHS.X = o.cheapExpr(indexLHS.X)
657 indexLHS.Index = o.cheapExpr(indexLHS.Index)
658
659 call := n.Y.(*ir.CallExpr)
660 arg0 := call.Args[0]
661
662 for arg0.Op() == ir.OCONVNOP {
663 arg0 = arg0.(*ir.ConvExpr).X
664 }
665 indexRHS := arg0.(*ir.IndexExpr)
666 indexRHS.X = indexLHS.X
667 indexRHS.Index = indexLHS.Index
668
669 o.exprList(call.Args[1:])
670 } else {
671 n.Y = o.expr(n.Y, n.X)
672 }
673 o.mapAssign(n)
674 o.popTemp(t)
675
676 case ir.OASOP:
677 n := n.(*ir.AssignOpStmt)
678 t := o.markTemp()
679 n.X = o.expr(n.X, nil)
680 n.Y = o.expr(n.Y, nil)
681
682 if base.Flag.Cfg.Instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
683
684
685
686
687
688
689 l1 := o.safeExpr(n.X)
690 l2 := ir.DeepCopy(src.NoXPos, l1)
691 if l2.Op() == ir.OINDEXMAP {
692 l2 := l2.(*ir.IndexExpr)
693 l2.Assigned = false
694 }
695 l2 = o.copyExpr(l2)
696 r := o.expr(typecheck.Expr(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y)), nil)
697 as := typecheck.Stmt(ir.NewAssignStmt(n.Pos(), l1, r))
698 o.mapAssign(as)
699 o.popTemp(t)
700 return
701 }
702
703 o.mapAssign(n)
704 o.popTemp(t)
705
706 case ir.OAS2:
707 n := n.(*ir.AssignListStmt)
708 t := o.markTemp()
709 o.exprList(n.Lhs)
710 o.exprList(n.Rhs)
711 o.out = append(o.out, n)
712 o.popTemp(t)
713
714
715 case ir.OAS2FUNC:
716 n := n.(*ir.AssignListStmt)
717 t := o.markTemp()
718 o.exprList(n.Lhs)
719 call := n.Rhs[0]
720 o.init(call)
721 if ic, ok := call.(*ir.InlinedCallExpr); ok {
722 o.stmtList(ic.Body)
723
724 n.SetOp(ir.OAS2)
725 n.Rhs = ic.ReturnVars
726
727 o.exprList(n.Rhs)
728 o.out = append(o.out, n)
729 } else {
730 o.call(call)
731 o.as2func(n)
732 }
733 o.popTemp(t)
734
735
736
737
738
739
740
741 case ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OAS2MAPR:
742 n := n.(*ir.AssignListStmt)
743 t := o.markTemp()
744 o.exprList(n.Lhs)
745
746 switch r := n.Rhs[0]; r.Op() {
747 case ir.ODOTTYPE2:
748 r := r.(*ir.TypeAssertExpr)
749 r.X = o.expr(r.X, nil)
750 case ir.ODYNAMICDOTTYPE2:
751 r := r.(*ir.DynamicTypeAssertExpr)
752 r.X = o.expr(r.X, nil)
753 r.RType = o.expr(r.RType, nil)
754 r.ITab = o.expr(r.ITab, nil)
755 case ir.ORECV:
756 r := r.(*ir.UnaryExpr)
757 r.X = o.expr(r.X, nil)
758 case ir.OINDEXMAP:
759 r := r.(*ir.IndexExpr)
760 r.X = o.expr(r.X, nil)
761 r.Index = o.expr(r.Index, nil)
762
763 _ = mapKeyReplaceStrConv(r.Index)
764 r.Index = o.mapKeyTemp(r.Pos(), r.X.Type(), r.Index)
765 default:
766 base.Fatalf("order.stmt: %v", r.Op())
767 }
768
769 o.as2ok(n)
770 o.popTemp(t)
771
772
773 case ir.OBLOCK:
774 n := n.(*ir.BlockStmt)
775 o.stmtList(n.List)
776
777
778 case ir.OBREAK,
779 ir.OCONTINUE,
780 ir.ODCL,
781 ir.OFALL,
782 ir.OGOTO,
783 ir.OLABEL,
784 ir.OTAILCALL:
785 o.out = append(o.out, n)
786
787
788 case ir.OCALLFUNC, ir.OCALLINTER:
789 n := n.(*ir.CallExpr)
790 t := o.markTemp()
791 o.call(n)
792 o.out = append(o.out, n)
793 o.popTemp(t)
794
795 case ir.OINLCALL:
796 n := n.(*ir.InlinedCallExpr)
797 o.stmtList(n.Body)
798
799
800 for _, result := range n.ReturnVars {
801 if staticinit.AnySideEffects(result) {
802 base.FatalfAt(result.Pos(), "inlined call result has side effects: %v", result)
803 }
804 }
805
806 case ir.OCHECKNIL, ir.OCLEAR, ir.OCLOSE, ir.OPANIC, ir.ORECV:
807 n := n.(*ir.UnaryExpr)
808 t := o.markTemp()
809 n.X = o.expr(n.X, nil)
810 o.out = append(o.out, n)
811 o.popTemp(t)
812
813 case ir.OCOPY:
814 n := n.(*ir.BinaryExpr)
815 t := o.markTemp()
816 n.X = o.expr(n.X, nil)
817 n.Y = o.expr(n.Y, nil)
818 o.out = append(o.out, n)
819 o.popTemp(t)
820
821 case ir.OPRINT, ir.OPRINTLN, ir.ORECOVERFP:
822 n := n.(*ir.CallExpr)
823 t := o.markTemp()
824 o.call(n)
825 o.out = append(o.out, n)
826 o.popTemp(t)
827
828
829 case ir.ODEFER, ir.OGO:
830 n := n.(*ir.GoDeferStmt)
831 t := o.markTemp()
832 o.init(n.Call)
833 o.call(n.Call)
834 o.out = append(o.out, n)
835 o.popTemp(t)
836
837 case ir.ODELETE:
838 n := n.(*ir.CallExpr)
839 t := o.markTemp()
840 n.Args[0] = o.expr(n.Args[0], nil)
841 n.Args[1] = o.expr(n.Args[1], nil)
842 n.Args[1] = o.mapKeyTemp(n.Pos(), n.Args[0].Type(), n.Args[1])
843 o.out = append(o.out, n)
844 o.popTemp(t)
845
846
847
848 case ir.OFOR:
849 n := n.(*ir.ForStmt)
850 t := o.markTemp()
851 n.Cond = o.exprInPlace(n.Cond)
852 orderBlock(&n.Body, o.free)
853 n.Post = orderStmtInPlace(n.Post, o.free)
854 o.out = append(o.out, n)
855 o.popTemp(t)
856
857
858
859 case ir.OIF:
860 n := n.(*ir.IfStmt)
861 t := o.markTemp()
862 n.Cond = o.exprInPlace(n.Cond)
863 o.popTemp(t)
864 orderBlock(&n.Body, o.free)
865 orderBlock(&n.Else, o.free)
866 o.out = append(o.out, n)
867
868 case ir.ORANGE:
869
870
871
872
873
874
875
876
877
878
879
880
881
882 n := n.(*ir.RangeStmt)
883 if x, ok := n.X.(*ir.ConvExpr); ok {
884 switch x.Op() {
885 case ir.OSTR2BYTES:
886 x.SetOp(ir.OSTR2BYTESTMP)
887 fallthrough
888 case ir.OSTR2BYTESTMP:
889 x.MarkNonNil()
890 }
891 }
892
893 t := o.markTemp()
894 n.X = o.expr(n.X, nil)
895
896 orderBody := true
897 xt := typecheck.RangeExprType(n.X.Type())
898 switch k := xt.Kind(); {
899 default:
900 base.Fatalf("order.stmt range %v", n.Type())
901
902 case types.IsInt[k]:
903
904
905 case k == types.TARRAY, k == types.TSLICE:
906 if n.Value == nil || ir.IsBlank(n.Value) {
907
908
909 break
910 }
911 fallthrough
912
913 case k == types.TCHAN, k == types.TSTRING:
914
915
916 r := n.X
917
918 if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
919 r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
920 r.SetType(types.Types[types.TSTRING])
921 r = typecheck.Expr(r)
922 }
923
924 n.X = o.copyExpr(r)
925
926 case k == types.TMAP:
927 if isMapClear(n) {
928
929
930
931 orderBody = false
932 break
933 }
934
935
936
937
938 r := n.X
939 n.X = o.copyExpr(r)
940
941
942
943 if buildcfg.Experiment.SwissMap {
944 n.Prealloc = o.newTemp(reflectdata.SwissMapIterType(), true)
945 } else {
946 n.Prealloc = o.newTemp(reflectdata.OldMapIterType(), true)
947 }
948 }
949 n.Key = o.exprInPlace(n.Key)
950 n.Value = o.exprInPlace(n.Value)
951 if orderBody {
952 orderBlock(&n.Body, o.free)
953 }
954 o.out = append(o.out, n)
955 o.popTemp(t)
956
957 case ir.ORETURN:
958 n := n.(*ir.ReturnStmt)
959 o.exprList(n.Results)
960 o.out = append(o.out, n)
961
962
963
964
965
966
967
968
969
970
971 case ir.OSELECT:
972 n := n.(*ir.SelectStmt)
973 t := o.markTemp()
974 for _, ncas := range n.Cases {
975 r := ncas.Comm
976 ir.SetPos(ncas)
977
978
979
980 if len(ncas.Init()) != 0 {
981 base.Fatalf("order select ninit")
982 }
983 if r == nil {
984 continue
985 }
986 switch r.Op() {
987 default:
988 ir.Dump("select case", r)
989 base.Fatalf("unknown op in select %v", r.Op())
990
991 case ir.OSELRECV2:
992
993 r := r.(*ir.AssignListStmt)
994 recv := r.Rhs[0].(*ir.UnaryExpr)
995 recv.X = o.expr(recv.X, nil)
996 if !ir.IsAutoTmp(recv.X) {
997 recv.X = o.copyExpr(recv.X)
998 }
999 init := ir.TakeInit(r)
1000
1001 colas := r.Def
1002 do := func(i int, t *types.Type) {
1003 n := r.Lhs[i]
1004 if ir.IsBlank(n) {
1005 return
1006 }
1007
1008
1009
1010
1011 if colas {
1012 if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
1013 init = init[1:]
1014
1015
1016
1017 if len(init) > 0 && init[0].Op() == ir.OAS && init[0].(*ir.AssignStmt).X == n {
1018 init = init[1:]
1019 }
1020 }
1021 dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
1022 ncas.PtrInit().Append(dcl)
1023 }
1024 tmp := o.newTemp(t, t.HasPointers())
1025 as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, n, typecheck.Conv(tmp, n.Type())))
1026 ncas.PtrInit().Append(as)
1027 r.Lhs[i] = tmp
1028 }
1029 do(0, recv.X.Type().Elem())
1030 do(1, types.Types[types.TBOOL])
1031 if len(init) != 0 {
1032 ir.DumpList("ninit", init)
1033 base.Fatalf("ninit on select recv")
1034 }
1035 orderBlock(ncas.PtrInit(), o.free)
1036
1037 case ir.OSEND:
1038 r := r.(*ir.SendStmt)
1039 if len(r.Init()) != 0 {
1040 ir.DumpList("ninit", r.Init())
1041 base.Fatalf("ninit on select send")
1042 }
1043
1044
1045
1046 r.Chan = o.expr(r.Chan, nil)
1047
1048 if !ir.IsAutoTmp(r.Chan) {
1049 r.Chan = o.copyExpr(r.Chan)
1050 }
1051 r.Value = o.expr(r.Value, nil)
1052 if !ir.IsAutoTmp(r.Value) {
1053 r.Value = o.copyExpr(r.Value)
1054 }
1055 }
1056 }
1057
1058
1059
1060 for _, cas := range n.Cases {
1061 orderBlock(&cas.Body, o.free)
1062
1063
1064
1065 cas.Body.Prepend(ir.TakeInit(cas)...)
1066 }
1067
1068 o.out = append(o.out, n)
1069 o.popTemp(t)
1070
1071
1072 case ir.OSEND:
1073 n := n.(*ir.SendStmt)
1074 t := o.markTemp()
1075 n.Chan = o.expr(n.Chan, nil)
1076 n.Value = o.expr(n.Value, nil)
1077 if base.Flag.Cfg.Instrumenting {
1078
1079
1080 n.Value = o.copyExpr(n.Value)
1081 } else {
1082 n.Value = o.addrTemp(n.Value)
1083 }
1084 o.out = append(o.out, n)
1085 o.popTemp(t)
1086
1087
1088
1089
1090
1091
1092
1093
1094 case ir.OSWITCH:
1095 n := n.(*ir.SwitchStmt)
1096 if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
1097
1098 n.Cases = append(n.Cases, ir.NewCaseStmt(base.Pos, nil, nil))
1099 }
1100
1101 t := o.markTemp()
1102 n.Tag = o.expr(n.Tag, nil)
1103 for _, ncas := range n.Cases {
1104 o.exprListInPlace(ncas.List)
1105 orderBlock(&ncas.Body, o.free)
1106 }
1107
1108 o.out = append(o.out, n)
1109 o.popTemp(t)
1110 }
1111
1112 base.Pos = lno
1113 }
1114
1115 func hasDefaultCase(n *ir.SwitchStmt) bool {
1116 for _, ncas := range n.Cases {
1117 if len(ncas.List) == 0 {
1118 return true
1119 }
1120 }
1121 return false
1122 }
1123
1124
1125 func (o *orderState) exprList(l ir.Nodes) {
1126 s := l
1127 for i := range s {
1128 s[i] = o.expr(s[i], nil)
1129 }
1130 }
1131
1132
1133
1134 func (o *orderState) exprListInPlace(l ir.Nodes) {
1135 s := l
1136 for i := range s {
1137 s[i] = o.exprInPlace(s[i])
1138 }
1139 }
1140
1141 func (o *orderState) exprNoLHS(n ir.Node) ir.Node {
1142 return o.expr(n, nil)
1143 }
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153 func (o *orderState) expr(n, lhs ir.Node) ir.Node {
1154 if n == nil {
1155 return n
1156 }
1157 lno := ir.SetPos(n)
1158 n = o.expr1(n, lhs)
1159 base.Pos = lno
1160 return n
1161 }
1162
1163 func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
1164 o.init(n)
1165
1166 switch n.Op() {
1167 default:
1168 if o.edit == nil {
1169 o.edit = o.exprNoLHS
1170 }
1171 ir.EditChildren(n, o.edit)
1172 return n
1173
1174
1175
1176
1177 case ir.OADDSTR:
1178 n := n.(*ir.AddStringExpr)
1179 o.exprList(n.List)
1180
1181 if len(n.List) > 5 {
1182 t := types.NewArray(types.Types[types.TSTRING], int64(len(n.List)))
1183 n.Prealloc = o.newTemp(t, false)
1184 }
1185
1186
1187
1188
1189
1190
1191
1192
1193 hasbyte := false
1194
1195 haslit := false
1196 for _, n1 := range n.List {
1197 hasbyte = hasbyte || n1.Op() == ir.OBYTES2STR
1198 haslit = haslit || n1.Op() == ir.OLITERAL && len(ir.StringVal(n1)) != 0
1199 }
1200
1201 if haslit && hasbyte {
1202 for _, n2 := range n.List {
1203 if n2.Op() == ir.OBYTES2STR {
1204 n2 := n2.(*ir.ConvExpr)
1205 n2.SetOp(ir.OBYTES2STRTMP)
1206 }
1207 }
1208 }
1209 return n
1210
1211 case ir.OINDEXMAP:
1212 n := n.(*ir.IndexExpr)
1213 n.X = o.expr(n.X, nil)
1214 n.Index = o.expr(n.Index, nil)
1215 needCopy := false
1216
1217 if !n.Assigned {
1218
1219
1220
1221
1222 needCopy = mapKeyReplaceStrConv(n.Index)
1223
1224 if base.Flag.Cfg.Instrumenting {
1225
1226 needCopy = true
1227 }
1228 }
1229
1230
1231 n.Index = o.mapKeyTemp(n.Pos(), n.X.Type(), n.Index)
1232 if needCopy {
1233 return o.copyExpr(n)
1234 }
1235 return n
1236
1237
1238
1239 case ir.OCONVIFACE:
1240 n := n.(*ir.ConvExpr)
1241 n.X = o.expr(n.X, nil)
1242 if n.X.Type().IsInterface() {
1243 return n
1244 }
1245 if _, _, needsaddr := dataWordFuncName(n.X.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
1246
1247
1248
1249 n.X = o.addrTemp(n.X)
1250 }
1251 return n
1252
1253 case ir.OCONVNOP:
1254 n := n.(*ir.ConvExpr)
1255 if n.X.Op() == ir.OCALLMETH {
1256 base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
1257 }
1258 if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER) {
1259 call := n.X.(*ir.CallExpr)
1260
1261
1262
1263 o.init(call)
1264 o.call(call)
1265 if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
1266 return o.copyExpr(n)
1267 }
1268 } else {
1269 n.X = o.expr(n.X, nil)
1270 }
1271 return n
1272
1273 case ir.OANDAND, ir.OOROR:
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283 n := n.(*ir.LogicalExpr)
1284 r := o.newTemp(n.Type(), false)
1285
1286
1287 lhs := o.expr(n.X, nil)
1288 o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, lhs)))
1289
1290
1291 saveout := o.out
1292 o.out = nil
1293 t := o.markTemp()
1294 o.edge()
1295 rhs := o.expr(n.Y, nil)
1296 o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, rhs)))
1297 o.popTemp(t)
1298 gen := o.out
1299 o.out = saveout
1300
1301
1302 nif := ir.NewIfStmt(base.Pos, r, nil, nil)
1303 if n.Op() == ir.OANDAND {
1304 nif.Body = gen
1305 } else {
1306 nif.Else = gen
1307 }
1308 o.out = append(o.out, nif)
1309 return r
1310
1311 case ir.OCALLMETH:
1312 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
1313 panic("unreachable")
1314
1315 case ir.OCALLFUNC,
1316 ir.OCALLINTER,
1317 ir.OCAP,
1318 ir.OCOMPLEX,
1319 ir.OCOPY,
1320 ir.OIMAG,
1321 ir.OLEN,
1322 ir.OMAKECHAN,
1323 ir.OMAKEMAP,
1324 ir.OMAKESLICE,
1325 ir.OMAKESLICECOPY,
1326 ir.OMAX,
1327 ir.OMIN,
1328 ir.ONEW,
1329 ir.OREAL,
1330 ir.ORECOVERFP,
1331 ir.OSTR2BYTES,
1332 ir.OSTR2BYTESTMP,
1333 ir.OSTR2RUNES:
1334
1335 if isRuneCount(n) {
1336
1337 conv := n.(*ir.UnaryExpr).X.(*ir.ConvExpr)
1338 conv.X = o.expr(conv.X, nil)
1339 } else {
1340 o.call(n)
1341 }
1342
1343 if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
1344 return o.copyExpr(n)
1345 }
1346 return n
1347
1348 case ir.OINLCALL:
1349 n := n.(*ir.InlinedCallExpr)
1350 o.stmtList(n.Body)
1351 return n.SingleResult()
1352
1353 case ir.OAPPEND:
1354
1355 n := n.(*ir.CallExpr)
1356 if isAppendOfMake(n) {
1357 n.Args[0] = o.expr(n.Args[0], nil)
1358 mk := n.Args[1].(*ir.MakeExpr)
1359 mk.Len = o.expr(mk.Len, nil)
1360 } else {
1361 o.exprList(n.Args)
1362 }
1363
1364 if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.Args[0]) {
1365 return o.copyExpr(n)
1366 }
1367 return n
1368
1369 case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
1370 n := n.(*ir.SliceExpr)
1371 n.X = o.expr(n.X, nil)
1372 n.Low = o.cheapExpr(o.expr(n.Low, nil))
1373 n.High = o.cheapExpr(o.expr(n.High, nil))
1374 n.Max = o.cheapExpr(o.expr(n.Max, nil))
1375 if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.X) {
1376 return o.copyExpr(n)
1377 }
1378 return n
1379
1380 case ir.OCLOSURE:
1381 n := n.(*ir.ClosureExpr)
1382 if n.Transient() && len(n.Func.ClosureVars) > 0 {
1383 n.Prealloc = o.newTemp(typecheck.ClosureType(n), false)
1384 }
1385 return n
1386
1387 case ir.OMETHVALUE:
1388 n := n.(*ir.SelectorExpr)
1389 n.X = o.expr(n.X, nil)
1390 if n.Transient() {
1391 t := typecheck.MethodValueType(n)
1392 n.Prealloc = o.newTemp(t, false)
1393 }
1394 return n
1395
1396 case ir.OSLICELIT:
1397 n := n.(*ir.CompLitExpr)
1398 o.exprList(n.List)
1399 if n.Transient() {
1400 t := types.NewArray(n.Type().Elem(), n.Len)
1401 n.Prealloc = o.newTemp(t, false)
1402 }
1403 return n
1404
1405 case ir.ODOTTYPE, ir.ODOTTYPE2:
1406 n := n.(*ir.TypeAssertExpr)
1407 n.X = o.expr(n.X, nil)
1408 if !types.IsDirectIface(n.Type()) || base.Flag.Cfg.Instrumenting {
1409 return o.copyExprClear(n)
1410 }
1411 return n
1412
1413 case ir.ORECV:
1414 n := n.(*ir.UnaryExpr)
1415 n.X = o.expr(n.X, nil)
1416 return o.copyExprClear(n)
1417
1418 case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
1419 n := n.(*ir.BinaryExpr)
1420 n.X = o.expr(n.X, nil)
1421 n.Y = o.expr(n.Y, nil)
1422
1423 t := n.X.Type()
1424 switch {
1425 case t.IsString():
1426
1427
1428
1429 if n.X.Op() == ir.OBYTES2STR {
1430 n.X.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
1431 }
1432 if n.Y.Op() == ir.OBYTES2STR {
1433 n.Y.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
1434 }
1435
1436 case t.IsStruct() || t.IsArray():
1437
1438
1439 n.X = o.addrTemp(n.X)
1440 n.Y = o.addrTemp(n.Y)
1441 }
1442 return n
1443
1444 case ir.OMAPLIT:
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460 n := n.(*ir.CompLitExpr)
1461 entries := n.List
1462 statics := entries[:0]
1463 var dynamics []*ir.KeyExpr
1464 for _, r := range entries {
1465 r := r.(*ir.KeyExpr)
1466
1467 if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
1468 dynamics = append(dynamics, r)
1469 continue
1470 }
1471
1472
1473
1474 r = o.expr(r, nil).(*ir.KeyExpr)
1475 if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
1476 dynamics = append(dynamics, r)
1477 continue
1478 }
1479
1480 statics = append(statics, r)
1481 }
1482 n.List = statics
1483
1484 if len(dynamics) == 0 {
1485 return n
1486 }
1487
1488
1489 m := o.newTemp(n.Type(), false)
1490 as := ir.NewAssignStmt(base.Pos, m, n)
1491 typecheck.Stmt(as)
1492 o.stmt(as)
1493
1494
1495 for _, r := range dynamics {
1496 lhs := typecheck.AssignExpr(ir.NewIndexExpr(base.Pos, m, r.Key)).(*ir.IndexExpr)
1497 base.AssertfAt(lhs.Op() == ir.OINDEXMAP, lhs.Pos(), "want OINDEXMAP, have %+v", lhs)
1498 lhs.RType = n.RType
1499
1500 as := ir.NewAssignStmt(base.Pos, lhs, r.Value)
1501 typecheck.Stmt(as)
1502 o.stmt(as)
1503 }
1504
1505
1506
1507
1508
1509
1510
1511 n.Len += int64(len(dynamics))
1512
1513 return m
1514 }
1515
1516
1517 }
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531 func (o *orderState) as2func(n *ir.AssignListStmt) {
1532 results := n.Rhs[0].Type()
1533 as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
1534 for i, nl := range n.Lhs {
1535 if !ir.IsBlank(nl) {
1536 typ := results.Field(i).Type
1537 tmp := o.newTemp(typ, typ.HasPointers())
1538 n.Lhs[i] = tmp
1539 as.Lhs = append(as.Lhs, nl)
1540 as.Rhs = append(as.Rhs, tmp)
1541 }
1542 }
1543
1544 o.out = append(o.out, n)
1545 o.stmt(typecheck.Stmt(as))
1546 }
1547
1548
1549
1550 func (o *orderState) as2ok(n *ir.AssignListStmt) {
1551 as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
1552
1553 do := func(i int, typ *types.Type) {
1554 if nl := n.Lhs[i]; !ir.IsBlank(nl) {
1555 var tmp ir.Node = o.newTemp(typ, typ.HasPointers())
1556 n.Lhs[i] = tmp
1557 as.Lhs = append(as.Lhs, nl)
1558 if i == 1 {
1559
1560
1561
1562 tmp = typecheck.Conv(tmp, nl.Type())
1563 }
1564 as.Rhs = append(as.Rhs, tmp)
1565 }
1566 }
1567
1568 do(0, n.Rhs[0].Type())
1569 do(1, types.Types[types.TBOOL])
1570
1571 o.out = append(o.out, n)
1572 o.stmt(typecheck.Stmt(as))
1573 }
1574
View as plain text