1
2
3
4
5 package walk
6
7 import (
8 "encoding/binary"
9 "fmt"
10 "go/constant"
11 "hash/fnv"
12 "io"
13
14 "cmd/compile/internal/base"
15 "cmd/compile/internal/compare"
16 "cmd/compile/internal/ir"
17 "cmd/compile/internal/reflectdata"
18 "cmd/compile/internal/ssagen"
19 "cmd/compile/internal/typecheck"
20 "cmd/compile/internal/types"
21 )
22
23 func fakePC(n ir.Node) ir.Node {
24
25
26 hash := fnv.New32()
27
28 io.WriteString(hash, base.Ctxt.Pkgpath)
29 io.WriteString(hash, base.Ctxt.PosTable.Pos(n.Pos()).AbsFilename())
30 binary.Write(hash, binary.LittleEndian, int64(n.Pos().Line()))
31 binary.Write(hash, binary.LittleEndian, int64(n.Pos().Col()))
32
33
34 io.WriteString(hash, fmt.Sprintf("%v", n))
35
36 return ir.NewInt(base.Pos, int64(hash.Sum32()))
37 }
38
39
40
41
42 func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
43 if n.X.Type().IsInterface() && n.Y.Type().IsInterface() && n.X.Op() != ir.ONIL && n.Y.Op() != ir.ONIL {
44 return walkCompareInterface(n, init)
45 }
46
47 if n.X.Type().IsString() && n.Y.Type().IsString() {
48 return walkCompareString(n, init)
49 }
50
51 n.X = walkExpr(n.X, init)
52 n.Y = walkExpr(n.Y, init)
53
54
55
56
57
58
59
60
61 if n.X.Type().IsInterface() != n.Y.Type().IsInterface() {
62
63 l := cheapExpr(n.X, init)
64 r := cheapExpr(n.Y, init)
65
66 if n.Y.Type().IsInterface() {
67 l, r = r, l
68 }
69
70
71 eq := n.Op()
72 andor := ir.OOROR
73 if eq == ir.OEQ {
74 andor = ir.OANDAND
75 }
76
77
78
79
80
81
82
83
84 var eqtype ir.Node
85 tab := ir.NewUnaryExpr(base.Pos, ir.OITAB, l)
86 rtyp := reflectdata.CompareRType(base.Pos, n)
87 if l.Type().IsEmptyInterface() {
88 tab.SetType(types.NewPtr(types.Types[types.TUINT8]))
89 tab.SetTypecheck(1)
90 eqtype = ir.NewBinaryExpr(base.Pos, eq, tab, rtyp)
91 } else {
92 nonnil := ir.NewBinaryExpr(base.Pos, brcom(eq), typecheck.NodNil(), tab)
93 match := ir.NewBinaryExpr(base.Pos, eq, itabType(tab), rtyp)
94 eqtype = ir.NewLogicalExpr(base.Pos, andor, nonnil, match)
95 }
96
97 eqdata := ir.NewBinaryExpr(base.Pos, eq, ifaceData(n.Pos(), l, r.Type()), r)
98
99 expr := ir.NewLogicalExpr(base.Pos, andor, eqtype, eqdata)
100 return finishCompare(n, expr, init)
101 }
102
103
104
105
106
107 t := n.X.Type()
108 var inline bool
109
110 maxcmpsize := int64(4)
111 unalignedLoad := ssagen.Arch.LinkArch.CanMergeLoads
112 if unalignedLoad {
113
114 maxcmpsize = 2 * int64(ssagen.Arch.LinkArch.RegSize)
115 }
116
117 switch t.Kind() {
118 default:
119 if base.Debug.Libfuzzer != 0 && t.IsInteger() && (n.X.Name() == nil || !n.X.Name().Libfuzzer8BitCounter()) {
120 n.X = cheapExpr(n.X, init)
121 n.Y = cheapExpr(n.Y, init)
122
123
124
125
126
127 l, r := n.X, n.Y
128 if r.Op() == ir.OLITERAL {
129 l, r = r, l
130 }
131 constcmp := l.Op() == ir.OLITERAL && r.Op() != ir.OLITERAL
132
133 var fn string
134 var paramType *types.Type
135 switch t.Size() {
136 case 1:
137 fn = "libfuzzerTraceCmp1"
138 if constcmp {
139 fn = "libfuzzerTraceConstCmp1"
140 }
141 paramType = types.Types[types.TUINT8]
142 case 2:
143 fn = "libfuzzerTraceCmp2"
144 if constcmp {
145 fn = "libfuzzerTraceConstCmp2"
146 }
147 paramType = types.Types[types.TUINT16]
148 case 4:
149 fn = "libfuzzerTraceCmp4"
150 if constcmp {
151 fn = "libfuzzerTraceConstCmp4"
152 }
153 paramType = types.Types[types.TUINT32]
154 case 8:
155 fn = "libfuzzerTraceCmp8"
156 if constcmp {
157 fn = "libfuzzerTraceConstCmp8"
158 }
159 paramType = types.Types[types.TUINT64]
160 default:
161 base.Fatalf("unexpected integer size %d for %v", t.Size(), t)
162 }
163 init.Append(mkcall(fn, nil, init, tracecmpArg(l, paramType, init), tracecmpArg(r, paramType, init), fakePC(n)))
164 }
165 return n
166 case types.TARRAY:
167
168 inline = t.NumElem() <= 1 || (types.IsSimple[t.Elem().Kind()] && (t.NumElem() <= 4 || t.Elem().Size()*t.NumElem() <= maxcmpsize))
169 case types.TSTRUCT:
170 inline = compare.EqStructCost(t) <= 4
171 }
172
173 cmpl := n.X
174 for cmpl != nil && cmpl.Op() == ir.OCONVNOP {
175 cmpl = cmpl.(*ir.ConvExpr).X
176 }
177 cmpr := n.Y
178 for cmpr != nil && cmpr.Op() == ir.OCONVNOP {
179 cmpr = cmpr.(*ir.ConvExpr).X
180 }
181
182
183 if !inline {
184
185 if !ir.IsAddressable(cmpl) || !ir.IsAddressable(cmpr) {
186 base.Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
187 }
188
189
190
191
192
193 fn, needsLength := reflectdata.EqFor(t)
194 call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
195 addrCmpl := typecheck.NodAddr(cmpl)
196 addrCmpR := typecheck.NodAddr(cmpr)
197 if !types.IsNoRacePkg(types.LocalPkg) && base.Flag.Race {
198 ptrL := typecheck.Conv(typecheck.Conv(addrCmpl, types.Types[types.TUNSAFEPTR]), types.Types[types.TUINTPTR])
199 ptrR := typecheck.Conv(typecheck.Conv(addrCmpR, types.Types[types.TUNSAFEPTR]), types.Types[types.TUINTPTR])
200 raceFn := typecheck.LookupRuntime("racereadrange")
201 size := ir.NewInt(base.Pos, t.Size())
202 call.PtrInit().Append(mkcall1(raceFn, nil, init, ptrL, size))
203 call.PtrInit().Append(mkcall1(raceFn, nil, init, ptrR, size))
204 }
205 call.Args.Append(addrCmpl)
206 call.Args.Append(addrCmpR)
207 if needsLength {
208 call.Args.Append(ir.NewInt(base.Pos, t.Size()))
209 }
210 res := ir.Node(call)
211 if n.Op() != ir.OEQ {
212 res = ir.NewUnaryExpr(base.Pos, ir.ONOT, res)
213 }
214 return finishCompare(n, res, init)
215 }
216
217
218 andor := ir.OANDAND
219 if n.Op() == ir.ONE {
220 andor = ir.OOROR
221 }
222 var expr ir.Node
223 comp := func(el, er ir.Node) {
224 a := ir.NewBinaryExpr(base.Pos, n.Op(), el, er)
225 if expr == nil {
226 expr = a
227 } else {
228 expr = ir.NewLogicalExpr(base.Pos, andor, expr, a)
229 }
230 }
231 and := func(cond ir.Node) {
232 if expr == nil {
233 expr = cond
234 } else {
235 expr = ir.NewLogicalExpr(base.Pos, andor, expr, cond)
236 }
237 }
238 cmpl = safeExpr(cmpl, init)
239 cmpr = safeExpr(cmpr, init)
240 if t.IsStruct() {
241 conds, _ := compare.EqStruct(t, cmpl, cmpr)
242 if n.Op() == ir.OEQ {
243 for _, cond := range conds {
244 and(cond)
245 }
246 } else {
247 for _, cond := range conds {
248 notCond := ir.NewUnaryExpr(base.Pos, ir.ONOT, cond)
249 and(notCond)
250 }
251 }
252 } else {
253 step := int64(1)
254 remains := t.NumElem() * t.Elem().Size()
255 combine64bit := unalignedLoad && types.RegSize == 8 && t.Elem().Size() <= 4 && t.Elem().IsInteger()
256 combine32bit := unalignedLoad && t.Elem().Size() <= 2 && t.Elem().IsInteger()
257 combine16bit := unalignedLoad && t.Elem().Size() == 1 && t.Elem().IsInteger()
258 for i := int64(0); remains > 0; {
259 var convType *types.Type
260 switch {
261 case remains >= 8 && combine64bit:
262 convType = types.Types[types.TINT64]
263 step = 8 / t.Elem().Size()
264 case remains >= 4 && combine32bit:
265 convType = types.Types[types.TUINT32]
266 step = 4 / t.Elem().Size()
267 case remains >= 2 && combine16bit:
268 convType = types.Types[types.TUINT16]
269 step = 2 / t.Elem().Size()
270 default:
271 step = 1
272 }
273 if step == 1 {
274 comp(
275 ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i)),
276 ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i)),
277 )
278 i++
279 remains -= t.Elem().Size()
280 } else {
281 elemType := t.Elem().ToUnsigned()
282 cmplw := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i)))
283 cmplw = typecheck.Conv(cmplw, elemType)
284 cmplw = typecheck.Conv(cmplw, convType)
285 cmprw := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i)))
286 cmprw = typecheck.Conv(cmprw, elemType)
287 cmprw = typecheck.Conv(cmprw, convType)
288
289
290 for offset := int64(1); offset < step; offset++ {
291 lb := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i+offset)))
292 lb = typecheck.Conv(lb, elemType)
293 lb = typecheck.Conv(lb, convType)
294 lb = ir.NewBinaryExpr(base.Pos, ir.OLSH, lb, ir.NewInt(base.Pos, 8*t.Elem().Size()*offset))
295 cmplw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmplw, lb)
296 rb := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i+offset)))
297 rb = typecheck.Conv(rb, elemType)
298 rb = typecheck.Conv(rb, convType)
299 rb = ir.NewBinaryExpr(base.Pos, ir.OLSH, rb, ir.NewInt(base.Pos, 8*t.Elem().Size()*offset))
300 cmprw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmprw, rb)
301 }
302 comp(cmplw, cmprw)
303 i += step
304 remains -= step * t.Elem().Size()
305 }
306 }
307 }
308 if expr == nil {
309 expr = ir.NewBool(base.Pos, n.Op() == ir.OEQ)
310
311
312 a1 := typecheck.Stmt(ir.NewAssignStmt(base.Pos, ir.BlankNode, cmpl))
313 a2 := typecheck.Stmt(ir.NewAssignStmt(base.Pos, ir.BlankNode, cmpr))
314 init.Append(a1, a2)
315 }
316 return finishCompare(n, expr, init)
317 }
318
319 func walkCompareInterface(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
320 swap := n.X.Op() != ir.OCONVIFACE && n.Y.Op() == ir.OCONVIFACE
321 n.Y = cheapExpr(n.Y, init)
322 n.X = cheapExpr(n.X, init)
323 if swap {
324
325
326
327
328 n.X, n.Y = n.Y, n.X
329 }
330
331 eqtab, eqdata := compare.EqInterface(n.X, n.Y)
332 var cmp ir.Node
333 if n.Op() == ir.OEQ {
334 cmp = ir.NewLogicalExpr(base.Pos, ir.OANDAND, eqtab, eqdata)
335 } else {
336 eqtab.SetOp(ir.ONE)
337 cmp = ir.NewLogicalExpr(base.Pos, ir.OOROR, eqtab, ir.NewUnaryExpr(base.Pos, ir.ONOT, eqdata))
338 }
339 return finishCompare(n, cmp, init)
340 }
341
342 func walkCompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
343 if base.Debug.Libfuzzer != 0 {
344 if !ir.IsConst(n.X, constant.String) || !ir.IsConst(n.Y, constant.String) {
345 fn := "libfuzzerHookStrCmp"
346 n.X = cheapExpr(n.X, init)
347 n.Y = cheapExpr(n.Y, init)
348 paramType := types.Types[types.TSTRING]
349 init.Append(mkcall(fn, nil, init, tracecmpArg(n.X, paramType, init), tracecmpArg(n.Y, paramType, init), fakePC(n)))
350 }
351 }
352
353 var cs, ncs ir.Node
354 switch {
355 case ir.IsConst(n.X, constant.String) && ir.IsConst(n.Y, constant.String):
356
357 case ir.IsConst(n.X, constant.String):
358 cs = n.X
359 ncs = n.Y
360 case ir.IsConst(n.Y, constant.String):
361 cs = n.Y
362 ncs = n.X
363 }
364 if cs != nil {
365 cmp := n.Op()
366
367
368
369 if ir.IsConst(n.X, constant.String) {
370 cmp = brrev(cmp)
371 }
372
373
374
375
376
377 maxRewriteLen := 6
378
379
380 canCombineLoads := ssagen.Arch.LinkArch.CanMergeLoads
381 combine64bit := false
382 if canCombineLoads {
383
384 maxRewriteLen = 2 * ssagen.Arch.LinkArch.RegSize
385 combine64bit = ssagen.Arch.LinkArch.RegSize >= 8
386 }
387
388 var and ir.Op
389 switch cmp {
390 case ir.OEQ:
391 and = ir.OANDAND
392 case ir.ONE:
393 and = ir.OOROR
394 default:
395
396
397
398 maxRewriteLen = 0
399 }
400 if s := ir.StringVal(cs); len(s) <= maxRewriteLen {
401 if len(s) > 0 {
402 ncs = safeExpr(ncs, init)
403 }
404 r := ir.Node(ir.NewBinaryExpr(base.Pos, cmp, ir.NewUnaryExpr(base.Pos, ir.OLEN, ncs), ir.NewInt(base.Pos, int64(len(s)))))
405 remains := len(s)
406 for i := 0; remains > 0; {
407 if remains == 1 || !canCombineLoads {
408 cb := ir.NewInt(base.Pos, int64(s[i]))
409 ncb := ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i)))
410 r = ir.NewLogicalExpr(base.Pos, and, r, ir.NewBinaryExpr(base.Pos, cmp, ncb, cb))
411 remains--
412 i++
413 continue
414 }
415 var step int
416 var convType *types.Type
417 switch {
418 case remains >= 8 && combine64bit:
419 convType = types.Types[types.TINT64]
420 step = 8
421 case remains >= 4:
422 convType = types.Types[types.TUINT32]
423 step = 4
424 case remains >= 2:
425 convType = types.Types[types.TUINT16]
426 step = 2
427 }
428 ncsubstr := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i))), convType)
429 csubstr := int64(s[i])
430
431
432
433 for offset := 1; offset < step; offset++ {
434 b := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i+offset))), convType)
435 b = ir.NewBinaryExpr(base.Pos, ir.OLSH, b, ir.NewInt(base.Pos, int64(8*offset)))
436 ncsubstr = ir.NewBinaryExpr(base.Pos, ir.OOR, ncsubstr, b)
437 csubstr |= int64(s[i+offset]) << uint8(8*offset)
438 }
439 csubstrPart := ir.NewInt(base.Pos, csubstr)
440
441 r = ir.NewLogicalExpr(base.Pos, and, r, ir.NewBinaryExpr(base.Pos, cmp, csubstrPart, ncsubstr))
442 remains -= step
443 i += step
444 }
445 return finishCompare(n, r, init)
446 }
447 }
448
449 var r ir.Node
450 if n.Op() == ir.OEQ || n.Op() == ir.ONE {
451
452 n.X = cheapExpr(n.X, init)
453 n.Y = cheapExpr(n.Y, init)
454 eqlen, eqmem := compare.EqString(n.X, n.Y)
455
456
457 if n.Op() == ir.OEQ {
458
459 r = ir.NewLogicalExpr(base.Pos, ir.OANDAND, eqlen, eqmem)
460 } else {
461
462 eqlen.SetOp(ir.ONE)
463 r = ir.NewLogicalExpr(base.Pos, ir.OOROR, eqlen, ir.NewUnaryExpr(base.Pos, ir.ONOT, eqmem))
464 }
465 } else {
466
467 r = mkcall("cmpstring", types.Types[types.TINT], init, typecheck.Conv(n.X, types.Types[types.TSTRING]), typecheck.Conv(n.Y, types.Types[types.TSTRING]))
468 r = ir.NewBinaryExpr(base.Pos, n.Op(), r, ir.NewInt(base.Pos, 0))
469 }
470
471 return finishCompare(n, r, init)
472 }
473
474
475
476
477 func finishCompare(n *ir.BinaryExpr, r ir.Node, init *ir.Nodes) ir.Node {
478 r = typecheck.Expr(r)
479 r = typecheck.Conv(r, n.Type())
480 r = walkExpr(r, init)
481 return r
482 }
483
484
485
486 func brcom(op ir.Op) ir.Op {
487 switch op {
488 case ir.OEQ:
489 return ir.ONE
490 case ir.ONE:
491 return ir.OEQ
492 case ir.OLT:
493 return ir.OGE
494 case ir.OGT:
495 return ir.OLE
496 case ir.OLE:
497 return ir.OGT
498 case ir.OGE:
499 return ir.OLT
500 }
501 base.Fatalf("brcom: no com for %v\n", op)
502 return op
503 }
504
505
506
507 func brrev(op ir.Op) ir.Op {
508 switch op {
509 case ir.OEQ:
510 return ir.OEQ
511 case ir.ONE:
512 return ir.ONE
513 case ir.OLT:
514 return ir.OGT
515 case ir.OGT:
516 return ir.OLT
517 case ir.OLE:
518 return ir.OGE
519 case ir.OGE:
520 return ir.OLE
521 }
522 base.Fatalf("brrev: no rev for %v\n", op)
523 return op
524 }
525
526 func tracecmpArg(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
527
528 if n.Op() == ir.OLITERAL && n.Type().IsSigned() && ir.Int64Val(n) < 0 {
529 n = copyExpr(n, n.Type(), init)
530 }
531
532 return typecheck.Conv(n, t)
533 }
534
View as plain text