1
2
3
4
5 package walk
6
7 import (
8 "encoding/binary"
9 "fmt"
10 "go/constant"
11 "hash/fnv"
12 "io"
13
14 "cmd/compile/internal/base"
15 "cmd/compile/internal/compare"
16 "cmd/compile/internal/ir"
17 "cmd/compile/internal/reflectdata"
18 "cmd/compile/internal/ssagen"
19 "cmd/compile/internal/typecheck"
20 "cmd/compile/internal/types"
21 )
22
23 func fakePC(n ir.Node) ir.Node {
24
25
26 hash := fnv.New32()
27
28 io.WriteString(hash, base.Ctxt.Pkgpath)
29 io.WriteString(hash, base.Ctxt.PosTable.Pos(n.Pos()).AbsFilename())
30 binary.Write(hash, binary.LittleEndian, int64(n.Pos().Line()))
31 binary.Write(hash, binary.LittleEndian, int64(n.Pos().Col()))
32
33
34 io.WriteString(hash, fmt.Sprintf("%v", n))
35
36 return ir.NewInt(base.Pos, int64(hash.Sum32()))
37 }
38
39
40
41
42 func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
43 if n.X.Type().IsInterface() && n.Y.Type().IsInterface() && n.X.Op() != ir.ONIL && n.Y.Op() != ir.ONIL {
44 return walkCompareInterface(n, init)
45 }
46
47 if n.X.Type().IsString() && n.Y.Type().IsString() {
48 return walkCompareString(n, init)
49 }
50
51 n.X = walkExpr(n.X, init)
52 n.Y = walkExpr(n.Y, init)
53
54
55
56
57
58
59
60
61 if n.X.Type().IsInterface() != n.Y.Type().IsInterface() {
62
63 l := cheapExpr(n.X, init)
64 r := cheapExpr(n.Y, init)
65
66 if n.Y.Type().IsInterface() {
67 l, r = r, l
68 }
69
70
71 eq := n.Op()
72 andor := ir.OOROR
73 if eq == ir.OEQ {
74 andor = ir.OANDAND
75 }
76
77
78
79
80
81
82
83
84 var eqtype ir.Node
85 tab := ir.NewUnaryExpr(base.Pos, ir.OITAB, l)
86 rtyp := reflectdata.CompareRType(base.Pos, n)
87 if l.Type().IsEmptyInterface() {
88 tab.SetType(types.NewPtr(types.Types[types.TUINT8]))
89 tab.SetTypecheck(1)
90 eqtype = ir.NewBinaryExpr(base.Pos, eq, tab, rtyp)
91 } else {
92 nonnil := ir.NewBinaryExpr(base.Pos, brcom(eq), typecheck.NodNil(), tab)
93 match := ir.NewBinaryExpr(base.Pos, eq, itabType(tab), rtyp)
94 eqtype = ir.NewLogicalExpr(base.Pos, andor, nonnil, match)
95 }
96
97 eqdata := ir.NewBinaryExpr(base.Pos, eq, ifaceData(n.Pos(), l, r.Type()), r)
98
99 expr := ir.NewLogicalExpr(base.Pos, andor, eqtype, eqdata)
100 return finishCompare(n, expr, init)
101 }
102
103
104
105
106
107 t := n.X.Type()
108 var inline bool
109
110 maxcmpsize := int64(4)
111 unalignedLoad := ssagen.Arch.LinkArch.CanMergeLoads
112 if unalignedLoad {
113
114 maxcmpsize = 2 * int64(ssagen.Arch.LinkArch.RegSize)
115 }
116
117 switch t.Kind() {
118 default:
119 if base.Debug.Libfuzzer != 0 && t.IsInteger() && (n.X.Name() == nil || !n.X.Name().Libfuzzer8BitCounter()) {
120 n.X = cheapExpr(n.X, init)
121 n.Y = cheapExpr(n.Y, init)
122
123
124
125
126
127 l, r := n.X, n.Y
128 if r.Op() == ir.OLITERAL {
129 l, r = r, l
130 }
131 constcmp := l.Op() == ir.OLITERAL && r.Op() != ir.OLITERAL
132
133 var fn string
134 var paramType *types.Type
135 switch t.Size() {
136 case 1:
137 fn = "libfuzzerTraceCmp1"
138 if constcmp {
139 fn = "libfuzzerTraceConstCmp1"
140 }
141 paramType = types.Types[types.TUINT8]
142 case 2:
143 fn = "libfuzzerTraceCmp2"
144 if constcmp {
145 fn = "libfuzzerTraceConstCmp2"
146 }
147 paramType = types.Types[types.TUINT16]
148 case 4:
149 fn = "libfuzzerTraceCmp4"
150 if constcmp {
151 fn = "libfuzzerTraceConstCmp4"
152 }
153 paramType = types.Types[types.TUINT32]
154 case 8:
155 fn = "libfuzzerTraceCmp8"
156 if constcmp {
157 fn = "libfuzzerTraceConstCmp8"
158 }
159 paramType = types.Types[types.TUINT64]
160 default:
161 base.Fatalf("unexpected integer size %d for %v", t.Size(), t)
162 }
163 init.Append(mkcall(fn, nil, init, tracecmpArg(l, paramType, init), tracecmpArg(r, paramType, init), fakePC(n)))
164 }
165 return n
166 case types.TARRAY:
167
168 inline = t.NumElem() <= 1 || (types.IsSimple[t.Elem().Kind()] && (t.NumElem() <= 4 || t.Elem().Size()*t.NumElem() <= maxcmpsize))
169 case types.TSTRUCT:
170 inline = compare.EqStructCost(t) <= 4
171 }
172
173 cmpl := n.X
174 for cmpl != nil && cmpl.Op() == ir.OCONVNOP {
175 cmpl = cmpl.(*ir.ConvExpr).X
176 }
177 cmpr := n.Y
178 for cmpr != nil && cmpr.Op() == ir.OCONVNOP {
179 cmpr = cmpr.(*ir.ConvExpr).X
180 }
181
182
183 if !inline {
184
185 if !ir.IsAddressable(cmpl) || !ir.IsAddressable(cmpr) {
186 base.Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
187 }
188
189
190
191
192
193 fn, needsLength := reflectdata.EqFor(t)
194 call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
195 addrCmpl := typecheck.NodAddr(cmpl)
196 addrCmpR := typecheck.NodAddr(cmpr)
197 if !types.IsNoRacePkg(types.LocalPkg) && base.Flag.Race {
198 ptrL := typecheck.Conv(typecheck.Conv(addrCmpl, types.Types[types.TUNSAFEPTR]), types.Types[types.TUINTPTR])
199 ptrR := typecheck.Conv(typecheck.Conv(addrCmpR, types.Types[types.TUNSAFEPTR]), types.Types[types.TUINTPTR])
200 raceFn := typecheck.LookupRuntime("racereadrange")
201 size := ir.NewInt(base.Pos, t.Size())
202 call.PtrInit().Append(mkcall1(raceFn, nil, init, ptrL, size))
203 call.PtrInit().Append(mkcall1(raceFn, nil, init, ptrR, size))
204 }
205 call.Args.Append(addrCmpl)
206 call.Args.Append(addrCmpR)
207 if needsLength {
208 call.Args.Append(ir.NewInt(base.Pos, t.Size()))
209 }
210 res := ir.Node(call)
211 if n.Op() != ir.OEQ {
212 res = ir.NewUnaryExpr(base.Pos, ir.ONOT, res)
213 }
214 return finishCompare(n, res, init)
215 }
216
217
218 andor := ir.OANDAND
219 if n.Op() == ir.ONE {
220 andor = ir.OOROR
221 }
222 var expr ir.Node
223 comp := func(el, er ir.Node) {
224 a := ir.NewBinaryExpr(base.Pos, n.Op(), el, er)
225 if expr == nil {
226 expr = a
227 } else {
228 expr = ir.NewLogicalExpr(base.Pos, andor, expr, a)
229 }
230 }
231 and := func(cond ir.Node) {
232 if expr == nil {
233 expr = cond
234 } else {
235 expr = ir.NewLogicalExpr(base.Pos, andor, expr, cond)
236 }
237 }
238 cmpl = safeExpr(cmpl, init)
239 cmpr = safeExpr(cmpr, init)
240 if t.IsStruct() {
241 conds, _ := compare.EqStruct(t, cmpl, cmpr)
242 if n.Op() == ir.OEQ {
243 for _, cond := range conds {
244 and(cond)
245 }
246 } else {
247 for _, cond := range conds {
248 notCond := ir.NewUnaryExpr(base.Pos, ir.ONOT, cond)
249 and(notCond)
250 }
251 }
252 } else {
253 step := int64(1)
254 remains := t.NumElem() * t.Elem().Size()
255 combine64bit := unalignedLoad && types.RegSize == 8 && t.Elem().Size() <= 4 && t.Elem().IsInteger()
256 combine32bit := unalignedLoad && t.Elem().Size() <= 2 && t.Elem().IsInteger()
257 combine16bit := unalignedLoad && t.Elem().Size() == 1 && t.Elem().IsInteger()
258 for i := int64(0); remains > 0; {
259 var convType *types.Type
260 switch {
261 case remains >= 8 && combine64bit:
262 convType = types.Types[types.TINT64]
263 step = 8 / t.Elem().Size()
264 case remains >= 4 && combine32bit:
265 convType = types.Types[types.TUINT32]
266 step = 4 / t.Elem().Size()
267 case remains >= 2 && combine16bit:
268 convType = types.Types[types.TUINT16]
269 step = 2 / t.Elem().Size()
270 default:
271 step = 1
272 }
273 if step == 1 {
274 comp(
275 ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i)),
276 ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i)),
277 )
278 i++
279 remains -= t.Elem().Size()
280 } else {
281 elemType := t.Elem().ToUnsigned()
282 cmplw := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i)))
283 cmplw = typecheck.Conv(cmplw, elemType)
284 cmplw = typecheck.Conv(cmplw, convType)
285 cmprw := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i)))
286 cmprw = typecheck.Conv(cmprw, elemType)
287 cmprw = typecheck.Conv(cmprw, convType)
288
289
290 for offset := int64(1); offset < step; offset++ {
291 lb := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i+offset)))
292 lb = typecheck.Conv(lb, elemType)
293 lb = typecheck.Conv(lb, convType)
294 lb = ir.NewBinaryExpr(base.Pos, ir.OLSH, lb, ir.NewInt(base.Pos, 8*t.Elem().Size()*offset))
295 cmplw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmplw, lb)
296 rb := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i+offset)))
297 rb = typecheck.Conv(rb, elemType)
298 rb = typecheck.Conv(rb, convType)
299 rb = ir.NewBinaryExpr(base.Pos, ir.OLSH, rb, ir.NewInt(base.Pos, 8*t.Elem().Size()*offset))
300 cmprw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmprw, rb)
301 }
302 comp(cmplw, cmprw)
303 i += step
304 remains -= step * t.Elem().Size()
305 }
306 }
307 }
308 if expr == nil {
309 expr = ir.NewBool(base.Pos, n.Op() == ir.OEQ)
310
311
312 a1 := typecheck.Stmt(ir.NewAssignStmt(base.Pos, ir.BlankNode, cmpl))
313 a2 := typecheck.Stmt(ir.NewAssignStmt(base.Pos, ir.BlankNode, cmpr))
314 init.Append(a1, a2)
315 }
316 return finishCompare(n, expr, init)
317 }
318
319 func walkCompareInterface(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
320 n.Y = cheapExpr(n.Y, init)
321 n.X = cheapExpr(n.X, init)
322 eqtab, eqdata := compare.EqInterface(n.X, n.Y)
323 var cmp ir.Node
324 if n.Op() == ir.OEQ {
325 cmp = ir.NewLogicalExpr(base.Pos, ir.OANDAND, eqtab, eqdata)
326 } else {
327 eqtab.SetOp(ir.ONE)
328 cmp = ir.NewLogicalExpr(base.Pos, ir.OOROR, eqtab, ir.NewUnaryExpr(base.Pos, ir.ONOT, eqdata))
329 }
330 return finishCompare(n, cmp, init)
331 }
332
333 func walkCompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
334 if base.Debug.Libfuzzer != 0 {
335 if !ir.IsConst(n.X, constant.String) || !ir.IsConst(n.Y, constant.String) {
336 fn := "libfuzzerHookStrCmp"
337 n.X = cheapExpr(n.X, init)
338 n.Y = cheapExpr(n.Y, init)
339 paramType := types.Types[types.TSTRING]
340 init.Append(mkcall(fn, nil, init, tracecmpArg(n.X, paramType, init), tracecmpArg(n.Y, paramType, init), fakePC(n)))
341 }
342 }
343
344 var cs, ncs ir.Node
345 switch {
346 case ir.IsConst(n.X, constant.String) && ir.IsConst(n.Y, constant.String):
347
348 case ir.IsConst(n.X, constant.String):
349 cs = n.X
350 ncs = n.Y
351 case ir.IsConst(n.Y, constant.String):
352 cs = n.Y
353 ncs = n.X
354 }
355 if cs != nil {
356 cmp := n.Op()
357
358
359
360 if ir.IsConst(n.X, constant.String) {
361 cmp = brrev(cmp)
362 }
363
364
365
366
367
368 maxRewriteLen := 6
369
370
371 canCombineLoads := ssagen.Arch.LinkArch.CanMergeLoads
372 combine64bit := false
373 if canCombineLoads {
374
375 maxRewriteLen = 2 * ssagen.Arch.LinkArch.RegSize
376 combine64bit = ssagen.Arch.LinkArch.RegSize >= 8
377 }
378
379 var and ir.Op
380 switch cmp {
381 case ir.OEQ:
382 and = ir.OANDAND
383 case ir.ONE:
384 and = ir.OOROR
385 default:
386
387
388
389 maxRewriteLen = 0
390 }
391 if s := ir.StringVal(cs); len(s) <= maxRewriteLen {
392 if len(s) > 0 {
393 ncs = safeExpr(ncs, init)
394 }
395 r := ir.Node(ir.NewBinaryExpr(base.Pos, cmp, ir.NewUnaryExpr(base.Pos, ir.OLEN, ncs), ir.NewInt(base.Pos, int64(len(s)))))
396 remains := len(s)
397 for i := 0; remains > 0; {
398 if remains == 1 || !canCombineLoads {
399 cb := ir.NewInt(base.Pos, int64(s[i]))
400 ncb := ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i)))
401 r = ir.NewLogicalExpr(base.Pos, and, r, ir.NewBinaryExpr(base.Pos, cmp, ncb, cb))
402 remains--
403 i++
404 continue
405 }
406 var step int
407 var convType *types.Type
408 switch {
409 case remains >= 8 && combine64bit:
410 convType = types.Types[types.TINT64]
411 step = 8
412 case remains >= 4:
413 convType = types.Types[types.TUINT32]
414 step = 4
415 case remains >= 2:
416 convType = types.Types[types.TUINT16]
417 step = 2
418 }
419 ncsubstr := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i))), convType)
420 csubstr := int64(s[i])
421
422
423
424 for offset := 1; offset < step; offset++ {
425 b := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i+offset))), convType)
426 b = ir.NewBinaryExpr(base.Pos, ir.OLSH, b, ir.NewInt(base.Pos, int64(8*offset)))
427 ncsubstr = ir.NewBinaryExpr(base.Pos, ir.OOR, ncsubstr, b)
428 csubstr |= int64(s[i+offset]) << uint8(8*offset)
429 }
430 csubstrPart := ir.NewInt(base.Pos, csubstr)
431
432 r = ir.NewLogicalExpr(base.Pos, and, r, ir.NewBinaryExpr(base.Pos, cmp, csubstrPart, ncsubstr))
433 remains -= step
434 i += step
435 }
436 return finishCompare(n, r, init)
437 }
438 }
439
440 var r ir.Node
441 if n.Op() == ir.OEQ || n.Op() == ir.ONE {
442
443 n.X = cheapExpr(n.X, init)
444 n.Y = cheapExpr(n.Y, init)
445 eqlen, eqmem := compare.EqString(n.X, n.Y)
446
447
448 if n.Op() == ir.OEQ {
449
450 r = ir.NewLogicalExpr(base.Pos, ir.OANDAND, eqlen, eqmem)
451 } else {
452
453 eqlen.SetOp(ir.ONE)
454 r = ir.NewLogicalExpr(base.Pos, ir.OOROR, eqlen, ir.NewUnaryExpr(base.Pos, ir.ONOT, eqmem))
455 }
456 } else {
457
458 r = mkcall("cmpstring", types.Types[types.TINT], init, typecheck.Conv(n.X, types.Types[types.TSTRING]), typecheck.Conv(n.Y, types.Types[types.TSTRING]))
459 r = ir.NewBinaryExpr(base.Pos, n.Op(), r, ir.NewInt(base.Pos, 0))
460 }
461
462 return finishCompare(n, r, init)
463 }
464
465
466
467
468 func finishCompare(n *ir.BinaryExpr, r ir.Node, init *ir.Nodes) ir.Node {
469 r = typecheck.Expr(r)
470 r = typecheck.Conv(r, n.Type())
471 r = walkExpr(r, init)
472 return r
473 }
474
475
476
477 func brcom(op ir.Op) ir.Op {
478 switch op {
479 case ir.OEQ:
480 return ir.ONE
481 case ir.ONE:
482 return ir.OEQ
483 case ir.OLT:
484 return ir.OGE
485 case ir.OGT:
486 return ir.OLE
487 case ir.OLE:
488 return ir.OGT
489 case ir.OGE:
490 return ir.OLT
491 }
492 base.Fatalf("brcom: no com for %v\n", op)
493 return op
494 }
495
496
497
498 func brrev(op ir.Op) ir.Op {
499 switch op {
500 case ir.OEQ:
501 return ir.OEQ
502 case ir.ONE:
503 return ir.ONE
504 case ir.OLT:
505 return ir.OGT
506 case ir.OGT:
507 return ir.OLT
508 case ir.OLE:
509 return ir.OGE
510 case ir.OGE:
511 return ir.OLE
512 }
513 base.Fatalf("brrev: no rev for %v\n", op)
514 return op
515 }
516
517 func tracecmpArg(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
518
519 if n.Op() == ir.OLITERAL && n.Type().IsSigned() && ir.Int64Val(n) < 0 {
520 n = copyExpr(n, n.Type(), init)
521 }
522
523 return typecheck.Conv(n, t)
524 }
525
View as plain text