1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/reflectdata"
9 "cmd/compile/internal/types"
10 "cmd/internal/obj"
11 "cmd/internal/objabi"
12 "cmd/internal/src"
13 "fmt"
14 "internal/buildcfg"
15 )
16
17
18
19
20
21
22
23 type ZeroRegion struct {
24 base *Value
25 mask uint64
26 }
27
28
29
30 func mightBeHeapPointer(v *Value) bool {
31 if IsGlobalAddr(v) {
32 return false
33 }
34 return true
35 }
36
37
38
39
40 func mightContainHeapPointer(ptr *Value, size int64, mem *Value, zeroes map[ID]ZeroRegion) bool {
41 if IsReadOnlyGlobalAddr(ptr) {
42
43 return false
44 }
45
46
47
48
49 var off int64
50 for ptr.Op == OpOffPtr {
51 off += ptr.AuxInt
52 ptr = ptr.Args[0]
53 }
54
55 ptrSize := ptr.Block.Func.Config.PtrSize
56 if off%ptrSize != 0 {
57 return true
58 }
59 if size%ptrSize != 0 {
60 ptr.Fatalf("unaligned pointer write")
61 }
62 if off < 0 || off+size > 64*ptrSize {
63
64 return true
65 }
66 z := zeroes[mem.ID]
67 if ptr != z.base {
68
69 return true
70 }
71
72 m := (uint64(1)<<(size/ptrSize) - 1) << (off / ptrSize)
73
74 if z.mask&m == m {
75
76 return false
77 }
78 return true
79 }
80
81
82
83
84 func needwb(v *Value, zeroes map[ID]ZeroRegion) bool {
85 t, ok := v.Aux.(*types.Type)
86 if !ok {
87 v.Fatalf("store aux is not a type: %s", v.LongString())
88 }
89 if !t.HasPointers() {
90 return false
91 }
92 dst := v.Args[0]
93 if IsStackAddr(dst) {
94 return false
95 }
96
97
98 if mightContainHeapPointer(dst, t.Size(), v.MemoryArg(), zeroes) {
99 return true
100 }
101
102
103 switch v.Op {
104 case OpStore:
105 if !mightBeHeapPointer(v.Args[1]) {
106 return false
107 }
108 case OpZero:
109 return false
110 case OpMove:
111 if !mightContainHeapPointer(v.Args[1], t.Size(), v.Args[2], zeroes) {
112 return false
113 }
114 default:
115 v.Fatalf("store op unknown: %s", v.LongString())
116 }
117 return true
118 }
119
120
121 func needWBsrc(v *Value) bool {
122 return !IsGlobalAddr(v)
123 }
124
125
126
127 func needWBdst(ptr, mem *Value, zeroes map[ID]ZeroRegion) bool {
128
129 var off int64
130 for ptr.Op == OpOffPtr {
131 off += ptr.AuxInt
132 ptr = ptr.Args[0]
133 }
134 ptrSize := ptr.Block.Func.Config.PtrSize
135 if off%ptrSize != 0 {
136 return true
137 }
138 if off < 0 || off >= 64*ptrSize {
139
140 return true
141 }
142 z := zeroes[mem.ID]
143 if ptr != z.base {
144 return true
145 }
146
147
148 return z.mask>>uint(off/ptrSize)&1 == 0
149 }
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164 func writebarrier(f *Func) {
165 if !f.fe.UseWriteBarrier() {
166 return
167 }
168
169
170
171
172 const maxEntries = 8
173
174 var sb, sp, wbaddr, const0 *Value
175 var cgoCheckPtrWrite, cgoCheckMemmove *obj.LSym
176 var wbZero, wbMove *obj.LSym
177 var stores, after []*Value
178 var sset, sset2 *sparseSet
179 var storeNumber []int32
180
181
182 select1 := f.Cache.allocValueSlice(f.NumValues())
183 defer func() { f.Cache.freeValueSlice(select1) }()
184 for _, b := range f.Blocks {
185 for _, v := range b.Values {
186 if v.Op != OpSelectN {
187 continue
188 }
189 if v.AuxInt != 1 {
190 continue
191 }
192 select1[v.Args[0].ID] = v
193 }
194 }
195
196 zeroes := f.computeZeroMap(select1)
197 for _, b := range f.Blocks {
198
199
200 nWBops := 0
201 for _, v := range b.Values {
202 switch v.Op {
203 case OpStore, OpMove, OpZero:
204 if needwb(v, zeroes) {
205 switch v.Op {
206 case OpStore:
207 v.Op = OpStoreWB
208 case OpMove:
209 v.Op = OpMoveWB
210 case OpZero:
211 v.Op = OpZeroWB
212 }
213 nWBops++
214 }
215 }
216 }
217 if nWBops == 0 {
218 continue
219 }
220
221 if wbaddr == nil {
222
223
224 initpos := f.Entry.Pos
225 sp, sb = f.spSb()
226 wbsym := f.fe.Syslook("writeBarrier")
227 wbaddr = f.Entry.NewValue1A(initpos, OpAddr, f.Config.Types.UInt32Ptr, wbsym, sb)
228 wbZero = f.fe.Syslook("wbZero")
229 wbMove = f.fe.Syslook("wbMove")
230 if buildcfg.Experiment.CgoCheck2 {
231 cgoCheckPtrWrite = f.fe.Syslook("cgoCheckPtrWrite")
232 cgoCheckMemmove = f.fe.Syslook("cgoCheckMemmove")
233 }
234 const0 = f.ConstInt32(f.Config.Types.UInt32, 0)
235
236
237 sset = f.newSparseSet(f.NumValues())
238 defer f.retSparseSet(sset)
239 sset2 = f.newSparseSet(f.NumValues())
240 defer f.retSparseSet(sset2)
241 storeNumber = f.Cache.allocInt32Slice(f.NumValues())
242 defer f.Cache.freeInt32Slice(storeNumber)
243 }
244
245
246 b.Values = storeOrder(b.Values, sset, storeNumber)
247 again:
248
249
250
251 var last *Value
252 var start, end int
253 var nonPtrStores int
254 values := b.Values
255 hasMove := false
256 FindSeq:
257 for i := len(values) - 1; i >= 0; i-- {
258 w := values[i]
259 switch w.Op {
260 case OpStoreWB, OpMoveWB, OpZeroWB:
261 start = i
262 if last == nil {
263 last = w
264 end = i + 1
265 }
266 nonPtrStores = 0
267 if w.Op == OpMoveWB {
268 hasMove = true
269 }
270 case OpVarDef, OpVarLive:
271 continue
272 case OpStore:
273 if last == nil {
274 continue
275 }
276 nonPtrStores++
277 if nonPtrStores > 2 {
278 break FindSeq
279 }
280 if hasMove {
281
282
283
284
285
286
287
288
289 break FindSeq
290 }
291 default:
292 if last == nil {
293 continue
294 }
295 break FindSeq
296 }
297 }
298 stores = append(stores[:0], b.Values[start:end]...)
299 after = append(after[:0], b.Values[end:]...)
300 b.Values = b.Values[:start]
301
302
303 mem := stores[0].MemoryArg()
304 pos := stores[0].Pos
305
306
307
308
309
310
311
312
313
314
315 type volatileCopy struct {
316 src *Value
317 tmp *Value
318 }
319 var volatiles []volatileCopy
320
321 if !(f.ABIDefault == f.ABI1 && len(f.Config.intParamRegs) >= 3) {
322
323
324
325 copyLoop:
326 for _, w := range stores {
327 if w.Op == OpMoveWB {
328 val := w.Args[1]
329 if isVolatile(val) {
330 for _, c := range volatiles {
331 if val == c.src {
332 continue copyLoop
333 }
334 }
335
336 t := val.Type.Elem()
337 tmp := f.NewLocal(w.Pos, t)
338 mem = b.NewValue1A(w.Pos, OpVarDef, types.TypeMem, tmp, mem)
339 tmpaddr := b.NewValue2A(w.Pos, OpLocalAddr, t.PtrTo(), tmp, sp, mem)
340 siz := t.Size()
341 mem = b.NewValue3I(w.Pos, OpMove, types.TypeMem, siz, tmpaddr, val, mem)
342 mem.Aux = t
343 volatiles = append(volatiles, volatileCopy{val, tmpaddr})
344 }
345 }
346 }
347 }
348
349
350 bThen := f.NewBlock(BlockPlain)
351 bEnd := f.NewBlock(b.Kind)
352 bThen.Pos = pos
353 bEnd.Pos = b.Pos
354 b.Pos = pos
355
356
357 bEnd.CopyControls(b)
358 bEnd.Likely = b.Likely
359 for _, e := range b.Succs {
360 bEnd.Succs = append(bEnd.Succs, e)
361 e.b.Preds[e.i].b = bEnd
362 }
363
364
365
366 cfgtypes := &f.Config.Types
367 flag := b.NewValue2(pos, OpLoad, cfgtypes.UInt32, wbaddr, mem)
368 flag = b.NewValue2(pos, OpNeq32, cfgtypes.Bool, flag, const0)
369 b.Kind = BlockIf
370 b.SetControl(flag)
371 b.Likely = BranchUnlikely
372 b.Succs = b.Succs[:0]
373 b.AddEdgeTo(bThen)
374 b.AddEdgeTo(bEnd)
375 bThen.AddEdgeTo(bEnd)
376
377
378 memThen := mem
379 var curCall *Value
380 var curPtr *Value
381 addEntry := func(pos src.XPos, v *Value) {
382 if curCall == nil || curCall.AuxInt == maxEntries {
383 t := types.NewTuple(types.Types[types.TUINTPTR].PtrTo(), types.TypeMem)
384 curCall = bThen.NewValue1(pos, OpWB, t, memThen)
385 curPtr = bThen.NewValue1(pos, OpSelect0, types.Types[types.TUINTPTR].PtrTo(), curCall)
386 memThen = bThen.NewValue1(pos, OpSelect1, types.TypeMem, curCall)
387 }
388
389 num := curCall.AuxInt
390 curCall.AuxInt = num + 1
391 wbuf := bThen.NewValue1I(pos, OpOffPtr, types.Types[types.TUINTPTR].PtrTo(), num*f.Config.PtrSize, curPtr)
392 memThen = bThen.NewValue3A(pos, OpStore, types.TypeMem, types.Types[types.TUINTPTR], wbuf, v, memThen)
393 }
394
395
396
397
398
399
400
401
402
403
404
405
406 srcs := sset
407 srcs.clear()
408
409
410 dsts := sset2
411 dsts.clear()
412
413 for _, w := range stores {
414 if w.Op != OpStoreWB {
415 continue
416 }
417 pos := w.Pos
418 ptr := w.Args[0]
419 val := w.Args[1]
420 if !srcs.contains(val.ID) && needWBsrc(val) {
421 srcs.add(val.ID)
422 addEntry(pos, val)
423 }
424 if !dsts.contains(ptr.ID) && needWBdst(ptr, w.Args[2], zeroes) {
425 dsts.add(ptr.ID)
426
427
428
429
430
431
432
433 oldVal := bThen.NewValue2(pos, OpLoad, types.Types[types.TUINTPTR], ptr, memThen)
434
435 addEntry(pos, oldVal)
436 }
437 f.fe.Func().SetWBPos(pos)
438 nWBops--
439 }
440
441 for _, w := range stores {
442 pos := w.Pos
443 switch w.Op {
444 case OpZeroWB:
445 dst := w.Args[0]
446 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
447
448 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
449 memThen = wbcall(pos, bThen, wbZero, sp, memThen, taddr, dst)
450 f.fe.Func().SetWBPos(pos)
451 nWBops--
452 case OpMoveWB:
453 dst := w.Args[0]
454 src := w.Args[1]
455 if isVolatile(src) {
456 for _, c := range volatiles {
457 if src == c.src {
458 src = c.tmp
459 break
460 }
461 }
462 }
463 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
464
465 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
466 memThen = wbcall(pos, bThen, wbMove, sp, memThen, taddr, dst, src)
467 f.fe.Func().SetWBPos(pos)
468 nWBops--
469 }
470 }
471
472
473 mem = bEnd.NewValue2(pos, OpPhi, types.TypeMem, mem, memThen)
474
475
476 for _, w := range stores {
477 pos := w.Pos
478 switch w.Op {
479 case OpStoreWB:
480 ptr := w.Args[0]
481 val := w.Args[1]
482 if buildcfg.Experiment.CgoCheck2 {
483
484 mem = wbcall(pos, bEnd, cgoCheckPtrWrite, sp, mem, ptr, val)
485 }
486 mem = bEnd.NewValue3A(pos, OpStore, types.TypeMem, w.Aux, ptr, val, mem)
487 case OpZeroWB:
488 dst := w.Args[0]
489 mem = bEnd.NewValue2I(pos, OpZero, types.TypeMem, w.AuxInt, dst, mem)
490 mem.Aux = w.Aux
491 case OpMoveWB:
492 dst := w.Args[0]
493 src := w.Args[1]
494 if isVolatile(src) {
495 for _, c := range volatiles {
496 if src == c.src {
497 src = c.tmp
498 break
499 }
500 }
501 }
502 if buildcfg.Experiment.CgoCheck2 {
503
504 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
505 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
506 mem = wbcall(pos, bEnd, cgoCheckMemmove, sp, mem, taddr, dst, src)
507 }
508 mem = bEnd.NewValue3I(pos, OpMove, types.TypeMem, w.AuxInt, dst, src, mem)
509 mem.Aux = w.Aux
510 case OpVarDef, OpVarLive:
511 mem = bEnd.NewValue1A(pos, w.Op, types.TypeMem, w.Aux, mem)
512 case OpStore:
513 ptr := w.Args[0]
514 val := w.Args[1]
515 mem = bEnd.NewValue3A(pos, OpStore, types.TypeMem, w.Aux, ptr, val, mem)
516 }
517 }
518
519
520
521
522
523 bEnd.Values = append(bEnd.Values, last)
524 last.Block = bEnd
525 last.reset(OpWBend)
526 last.Pos = last.Pos.WithNotStmt()
527 last.Type = types.TypeMem
528 last.AddArg(mem)
529
530
531 for _, w := range stores {
532 if w != last {
533 w.resetArgs()
534 }
535 }
536 for _, w := range stores {
537 if w != last {
538 f.freeValue(w)
539 }
540 }
541
542
543 bEnd.Values = append(bEnd.Values, after...)
544 for _, w := range after {
545 w.Block = bEnd
546 }
547
548
549 if nWBops > 0 {
550 goto again
551 }
552 }
553 }
554
555
556
557 func (f *Func) computeZeroMap(select1 []*Value) map[ID]ZeroRegion {
558
559 ptrSize := f.Config.PtrSize
560
561
562
563
564 zeroes := map[ID]ZeroRegion{}
565
566 for _, b := range f.Blocks {
567 for _, v := range b.Values {
568 if mem, ok := IsNewObject(v, select1); ok {
569
570
571
572
573 if types.LocalPkg.Path == "runtime" && v.Type.IsUnsafePtr() {
574 continue
575 }
576
577 nptr := v.Type.Elem().Size() / ptrSize
578 if nptr > 64 {
579 nptr = 64
580 }
581 zeroes[mem.ID] = ZeroRegion{base: v, mask: 1<<uint(nptr) - 1}
582 }
583 }
584 }
585
586 for {
587 changed := false
588 for _, b := range f.Blocks {
589
590
591 for _, v := range b.Values {
592 if v.Op != OpStore {
593 continue
594 }
595 z, ok := zeroes[v.MemoryArg().ID]
596 if !ok {
597 continue
598 }
599 ptr := v.Args[0]
600 var off int64
601 size := v.Aux.(*types.Type).Size()
602 for ptr.Op == OpOffPtr {
603 off += ptr.AuxInt
604 ptr = ptr.Args[0]
605 }
606 if ptr != z.base {
607
608
609
610
611 continue
612 }
613
614
615
616 if d := off % ptrSize; d != 0 {
617 off -= d
618 size += d
619 }
620 if d := size % ptrSize; d != 0 {
621 size += ptrSize - d
622 }
623
624 min := off
625 max := off + size
626 if min < 0 {
627 min = 0
628 }
629 if max > 64*ptrSize {
630 max = 64 * ptrSize
631 }
632
633
634 for i := min; i < max; i += ptrSize {
635 bit := i / ptrSize
636 z.mask &^= 1 << uint(bit)
637 }
638 if z.mask == 0 {
639
640 continue
641 }
642
643 if zeroes[v.ID] != z {
644 zeroes[v.ID] = z
645 changed = true
646 }
647 }
648 }
649 if !changed {
650 break
651 }
652 }
653 if f.pass.debug > 0 {
654 fmt.Printf("func %s\n", f.Name)
655 for mem, z := range zeroes {
656 fmt.Printf(" memory=v%d ptr=%v zeromask=%b\n", mem, z.base, z.mask)
657 }
658 }
659 return zeroes
660 }
661
662
663 func wbcall(pos src.XPos, b *Block, fn *obj.LSym, sp, mem *Value, args ...*Value) *Value {
664 config := b.Func.Config
665 typ := config.Types.Uintptr
666 nargs := len(args)
667
668
669 inRegs := b.Func.ABIDefault == b.Func.ABI1 && len(config.intParamRegs) >= 3
670
671 if !inRegs {
672
673 off := config.ctxt.Arch.FixedFrameSize
674 for _, arg := range args {
675 stkaddr := b.NewValue1I(pos, OpOffPtr, typ.PtrTo(), off, sp)
676 mem = b.NewValue3A(pos, OpStore, types.TypeMem, typ, stkaddr, arg, mem)
677 off += typ.Size()
678 }
679 args = args[:0]
680 }
681
682 args = append(args, mem)
683
684
685 argTypes := make([]*types.Type, nargs, 3)
686 for i := 0; i < nargs; i++ {
687 argTypes[i] = typ
688 }
689 call := b.NewValue0A(pos, OpStaticCall, types.TypeResultMem, StaticAuxCall(fn, b.Func.ABIDefault.ABIAnalyzeTypes(argTypes, nil)))
690 call.AddArgs(args...)
691 call.AuxInt = int64(nargs) * typ.Size()
692 return b.NewValue1I(pos, OpSelectN, types.TypeMem, 0, call)
693 }
694
695
696 func round(o int64, r int64) int64 {
697 return (o + r - 1) &^ (r - 1)
698 }
699
700
701 func IsStackAddr(v *Value) bool {
702 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
703 v = v.Args[0]
704 }
705 switch v.Op {
706 case OpSP, OpLocalAddr, OpSelectNAddr, OpGetCallerSP:
707 return true
708 }
709 return false
710 }
711
712
713 func IsGlobalAddr(v *Value) bool {
714 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
715 v = v.Args[0]
716 }
717 if v.Op == OpAddr && v.Args[0].Op == OpSB {
718 return true
719 }
720 if v.Op == OpConstNil {
721 return true
722 }
723 if v.Op == OpLoad && IsReadOnlyGlobalAddr(v.Args[0]) {
724 return true
725 }
726 return false
727 }
728
729
730 func IsReadOnlyGlobalAddr(v *Value) bool {
731 if v.Op == OpConstNil {
732
733 return true
734 }
735 if v.Op == OpAddr && v.Aux != nil && v.Aux.(*obj.LSym).Type == objabi.SRODATA {
736 return true
737 }
738 return false
739 }
740
741
742
743 func IsNewObject(v *Value, select1 []*Value) (mem *Value, ok bool) {
744 f := v.Block.Func
745 c := f.Config
746 if f.ABIDefault == f.ABI1 && len(c.intParamRegs) >= 1 {
747 if v.Op != OpSelectN || v.AuxInt != 0 {
748 return nil, false
749 }
750 mem = select1[v.Args[0].ID]
751 if mem == nil {
752 return nil, false
753 }
754 } else {
755 if v.Op != OpLoad {
756 return nil, false
757 }
758 mem = v.MemoryArg()
759 if mem.Op != OpSelectN {
760 return nil, false
761 }
762 if mem.Type != types.TypeMem {
763 return nil, false
764 }
765 }
766 call := mem.Args[0]
767 if call.Op != OpStaticCall {
768 return nil, false
769 }
770 if !isSameCall(call.Aux, "runtime.newobject") {
771 return nil, false
772 }
773 if f.ABIDefault == f.ABI1 && len(c.intParamRegs) >= 1 {
774 if v.Args[0] == call {
775 return mem, true
776 }
777 return nil, false
778 }
779 if v.Args[0].Op != OpOffPtr {
780 return nil, false
781 }
782 if v.Args[0].Args[0].Op != OpSP {
783 return nil, false
784 }
785 if v.Args[0].AuxInt != c.ctxt.Arch.FixedFrameSize+c.RegSize {
786 return nil, false
787 }
788 return mem, true
789 }
790
791
792
793 func IsSanitizerSafeAddr(v *Value) bool {
794 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
795 v = v.Args[0]
796 }
797 switch v.Op {
798 case OpSP, OpLocalAddr, OpSelectNAddr:
799
800 return true
801 case OpITab, OpStringPtr, OpGetClosurePtr:
802
803
804 return true
805 case OpAddr:
806 vt := v.Aux.(*obj.LSym).Type
807 return vt == objabi.SRODATA || vt == objabi.SLIBFUZZER_8BIT_COUNTER || vt == objabi.SCOVERAGE_COUNTER || vt == objabi.SCOVERAGE_AUXVAR
808 }
809 return false
810 }
811
812
813
814 func isVolatile(v *Value) bool {
815 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy || v.Op == OpSelectNAddr {
816 v = v.Args[0]
817 }
818 return v.Op == OpSP
819 }
820
View as plain text