1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmp"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "internal/goexperiment"
16 "internal/runtime/gc"
17 "os"
18 "path/filepath"
19 "slices"
20 "strings"
21
22 "cmd/compile/internal/abi"
23 "cmd/compile/internal/base"
24 "cmd/compile/internal/ir"
25 "cmd/compile/internal/liveness"
26 "cmd/compile/internal/objw"
27 "cmd/compile/internal/reflectdata"
28 "cmd/compile/internal/rttype"
29 "cmd/compile/internal/ssa"
30 "cmd/compile/internal/staticdata"
31 "cmd/compile/internal/typecheck"
32 "cmd/compile/internal/types"
33 "cmd/internal/obj"
34 "cmd/internal/objabi"
35 "cmd/internal/src"
36 "cmd/internal/sys"
37
38 rtabi "internal/abi"
39 )
40
41 var ssaConfig *ssa.Config
42 var ssaCaches []ssa.Cache
43
44 var ssaDump string
45 var ssaDir string
46 var ssaDumpStdout bool
47 var ssaDumpCFG string
48 const ssaDumpFile = "ssa.html"
49
50
51 var ssaDumpInlined []*ir.Func
52
53
54
55
56 const maxAggregatedHeapAllocation = 16
57
58 func DumpInline(fn *ir.Func) {
59 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
60 ssaDumpInlined = append(ssaDumpInlined, fn)
61 }
62 }
63
64 func InitEnv() {
65 ssaDump = os.Getenv("GOSSAFUNC")
66 ssaDir = os.Getenv("GOSSADIR")
67 if ssaDump != "" {
68 if strings.HasSuffix(ssaDump, "+") {
69 ssaDump = ssaDump[:len(ssaDump)-1]
70 ssaDumpStdout = true
71 }
72 spl := strings.Split(ssaDump, ":")
73 if len(spl) > 1 {
74 ssaDump = spl[0]
75 ssaDumpCFG = spl[1]
76 }
77 }
78 }
79
80 func InitConfig() {
81 types_ := ssa.NewTypes()
82
83 if Arch.SoftFloat {
84 softfloatInit()
85 }
86
87
88
89 _ = types.NewPtr(types.Types[types.TINTER])
90 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
91 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
92 _ = types.NewPtr(types.NewPtr(types.ByteType))
93 _ = types.NewPtr(types.NewSlice(types.ByteType))
94 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
95 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
96 _ = types.NewPtr(types.Types[types.TINT16])
97 _ = types.NewPtr(types.Types[types.TINT64])
98 _ = types.NewPtr(types.ErrorType)
99 _ = types.NewPtr(reflectdata.MapType())
100 _ = types.NewPtr(deferstruct())
101 types.NewPtrCacheEnabled = false
102 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
103 ssaConfig.Race = base.Flag.Race
104 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
105
106
107 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
108 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
109 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
110 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
111 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
112 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
113 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
114 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
115 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
116 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
117 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
118 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
119 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
120 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
121 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
122 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
123 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
124 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
125 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
126 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
127 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
128 ir.Syms.GrowsliceBuf = typecheck.LookupRuntimeFunc("growsliceBuf")
129 ir.Syms.GrowsliceBufNoAlias = typecheck.LookupRuntimeFunc("growsliceBufNoAlias")
130 ir.Syms.GrowsliceNoAlias = typecheck.LookupRuntimeFunc("growsliceNoAlias")
131 ir.Syms.MoveSlice = typecheck.LookupRuntimeFunc("moveSlice")
132 ir.Syms.MoveSliceNoScan = typecheck.LookupRuntimeFunc("moveSliceNoScan")
133 ir.Syms.MoveSliceNoCap = typecheck.LookupRuntimeFunc("moveSliceNoCap")
134 ir.Syms.MoveSliceNoCapNoScan = typecheck.LookupRuntimeFunc("moveSliceNoCapNoScan")
135 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
136 for i := 1; i < len(ir.Syms.MallocGCSmallNoScan); i++ {
137 ir.Syms.MallocGCSmallNoScan[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallNoScanSC%d", i))
138 }
139 for i := 1; i < len(ir.Syms.MallocGCSmallScanNoHeader); i++ {
140 ir.Syms.MallocGCSmallScanNoHeader[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallScanNoHeaderSC%d", i))
141 }
142 for i := 1; i < len(ir.Syms.MallocGCTiny); i++ {
143 ir.Syms.MallocGCTiny[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcTinySize%d", i))
144 }
145 ir.Syms.MallocGC = typecheck.LookupRuntimeFunc("mallocgc")
146 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
147 ir.Syms.Memequal = typecheck.LookupRuntimeFunc("memequal")
148 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
149 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
150 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
151 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
152 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
153 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
154 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
155 ir.Syms.PanicBounds = typecheck.LookupRuntimeFunc("panicBounds")
156 ir.Syms.PanicExtend = typecheck.LookupRuntimeFunc("panicExtend")
157 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
158 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
159 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
160 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
161 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
162 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
163 ir.Syms.PanicSimdImm = typecheck.LookupRuntimeFunc("panicSimdImm")
164 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
165 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
166 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
167 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
168 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
169 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
170 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
171 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
172 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
173 ir.Syms.X86HasAVX = typecheck.LookupRuntimeVar("x86HasAVX")
174 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
175 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
176 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
177 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
178 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
179 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
180 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
181 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
182 ir.Syms.RISCV64HasZbb = typecheck.LookupRuntimeVar("riscv64HasZbb")
183 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
184 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
185 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
186 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
187 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
188 ir.Syms.ZeroVal = typecheck.LookupRuntimeVar("zeroVal")
189
190 if Arch.LinkArch.Family == sys.Wasm {
191 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
192 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
193 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
194 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
195 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
196 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
197 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
198 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
199 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
200 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
201 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
202 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
203 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
204 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
205 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
206 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
207 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
208 }
209
210
211 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
212 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
213 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
214 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
215 }
216
217 func InitTables() {
218 initIntrinsics(nil)
219 }
220
221
222
223
224
225
226
227
228 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
229 return ssaConfig.ABI0.Copy()
230 }
231
232
233
234 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
235 if buildcfg.Experiment.RegabiArgs {
236
237 if fn == nil {
238 return abi1
239 }
240 switch fn.ABI {
241 case obj.ABI0:
242 return abi0
243 case obj.ABIInternal:
244
245
246 return abi1
247 }
248 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
249 panic("not reachable")
250 }
251
252 a := abi0
253 if fn != nil {
254 if fn.Pragma&ir.RegisterParams != 0 {
255 a = abi1
256 }
257 }
258 return a
259 }
260
261
262
263
264
265
266
267
268
269
270
271
272 func (s *state) emitOpenDeferInfo() {
273 firstOffset := s.openDefers[0].closureNode.FrameOffset()
274
275
276 for i, r := range s.openDefers {
277 have := r.closureNode.FrameOffset()
278 want := firstOffset + int64(i)*int64(types.PtrSize)
279 if have != want {
280 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
281 }
282 }
283
284 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
285 x.Set(obj.AttrContentAddressable, true)
286 x.Align = 1
287 s.curfn.LSym.Func().OpenCodedDeferInfo = x
288
289 off := 0
290 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
291 off = objw.Uvarint(x, off, uint64(-firstOffset))
292 }
293
294
295
296 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
297 name := ir.FuncName(fn)
298
299 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
300
301 printssa := false
302
303
304 if strings.Contains(ssaDump, name) {
305 nameOptABI := name
306 if l := len(ssaDump); l > 1 && ssaDump[l-2] == ',' {
307 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
308 } else if strings.HasSuffix(ssaDump, ">") {
309 l := len(ssaDump)
310 if l >= 3 && ssaDump[l-3] == '<' {
311 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
312 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
313 }
314 }
315 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
316 printssa = nameOptABI == ssaDump ||
317 pkgDotName == ssaDump ||
318 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
319 }
320
321 var astBuf *bytes.Buffer
322 if printssa {
323 astBuf = &bytes.Buffer{}
324 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
325 if ssaDumpStdout {
326 fmt.Println("generating SSA for", name)
327 fmt.Print(astBuf.String())
328 }
329 }
330
331 var s state
332 s.pushLine(fn.Pos())
333 defer s.popLine()
334
335 s.hasdefer = fn.HasDefer()
336 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
337 s.cgoUnsafeArgs = true
338 }
339 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
340
341 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
342 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
343 s.instrumentMemory = true
344 if base.Flag.Race {
345 s.instrumentEnterExit = true
346 }
347 }
348 }
349
350 fe := ssafn{
351 curfn: fn,
352 log: printssa && ssaDumpStdout,
353 }
354 s.curfn = fn
355
356 cache := &ssaCaches[worker]
357 cache.Reset()
358
359 s.f = ssaConfig.NewFunc(&fe, cache)
360 s.config = ssaConfig
361 s.f.Type = fn.Type()
362 s.f.Name = name
363 s.f.PrintOrHtmlSSA = printssa
364 if fn.Pragma&ir.Nosplit != 0 {
365 s.f.NoSplit = true
366 }
367 s.f.ABI0 = ssaConfig.ABI0
368 s.f.ABI1 = ssaConfig.ABI1
369 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
370 s.f.ABISelf = abiSelf
371
372 s.panics = map[funcLine]*ssa.Block{}
373 s.softFloat = s.config.SoftFloat
374
375
376 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
377 s.f.Entry.Pos = fn.Pos()
378 s.f.IsPgoHot = isPgoHot
379
380 if printssa {
381 ssaDF := ssaDumpFile
382 if ssaDir != "" {
383 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
384 ssaD := filepath.Dir(ssaDF)
385 os.MkdirAll(ssaD, 0755)
386 }
387 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
388
389 dumpSourcesColumn(s.f.HTMLWriter, fn)
390 s.f.HTMLWriter.WriteAST("AST", astBuf)
391 }
392
393
394 s.labels = map[string]*ssaLabel{}
395 s.fwdVars = map[ir.Node]*ssa.Value{}
396 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
397
398 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
399 switch {
400 case base.Debug.NoOpenDefer != 0:
401 s.hasOpenDefers = false
402 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
403
404
405
406
407
408 s.hasOpenDefers = false
409 }
410 if s.hasOpenDefers && s.instrumentEnterExit {
411
412
413
414 s.hasOpenDefers = false
415 }
416 if s.hasOpenDefers {
417
418
419 for _, f := range s.curfn.Type().Results() {
420 if !f.Nname.(*ir.Name).OnStack() {
421 s.hasOpenDefers = false
422 break
423 }
424 }
425 }
426 if s.hasOpenDefers &&
427 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
428
429
430
431
432
433 s.hasOpenDefers = false
434 }
435
436 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
437 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
438
439 s.startBlock(s.f.Entry)
440 s.vars[memVar] = s.startmem
441 if s.hasOpenDefers {
442
443
444
445 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
446 deferBitsTemp.SetAddrtaken(true)
447 s.deferBitsTemp = deferBitsTemp
448
449 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
450 s.vars[deferBitsVar] = startDeferBits
451 s.deferBitsAddr = s.addr(deferBitsTemp)
452 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
453
454
455
456
457
458 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
459 }
460
461 var params *abi.ABIParamResultInfo
462 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
463
464
465
466
467
468
469 var debugInfo ssa.FuncDebug
470 for _, n := range fn.Dcl {
471 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
472 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
473 }
474 }
475 fn.DebugInfo = &debugInfo
476
477
478 s.decladdrs = map[*ir.Name]*ssa.Value{}
479 for _, n := range fn.Dcl {
480 switch n.Class {
481 case ir.PPARAM:
482
483 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
484 case ir.PPARAMOUT:
485 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
486 case ir.PAUTO:
487
488
489 default:
490 s.Fatalf("local variable with class %v unimplemented", n.Class)
491 }
492 }
493
494 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
495
496
497 for _, n := range fn.Dcl {
498 if n.Class == ir.PPARAM {
499 if s.canSSA(n) {
500 v := s.newValue0A(ssa.OpArg, n.Type(), n)
501 s.vars[n] = v
502 s.addNamedValue(n, v)
503 } else {
504 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
505 if len(paramAssignment.Registers) > 0 {
506 if ssa.CanSSA(n.Type()) {
507 v := s.newValue0A(ssa.OpArg, n.Type(), n)
508 s.store(n.Type(), s.decladdrs[n], v)
509 } else {
510
511
512 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
513 }
514 }
515 }
516 }
517 }
518
519
520 if fn.Needctxt() {
521 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
522 if fn.RangeParent != nil && base.Flag.N != 0 {
523
524
525
526 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
527 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
528 cloSlot.SetUsed(true)
529 cloSlot.SetEsc(ir.EscNever)
530 cloSlot.SetAddrtaken(true)
531 s.f.CloSlot = cloSlot
532 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
533 addr := s.addr(cloSlot)
534 s.store(s.f.Config.Types.BytePtr, addr, clo)
535
536 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
537 }
538 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
539 for {
540 n, typ, offset := csiter.Next()
541 if n == nil {
542 break
543 }
544
545 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
546
547
548
549
550
551
552
553
554
555 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
556 n.Class = ir.PAUTO
557 fn.Dcl = append(fn.Dcl, n)
558 s.assign(n, s.load(n.Type(), ptr), false, 0)
559 continue
560 }
561
562 if !n.Byval() {
563 ptr = s.load(typ, ptr)
564 }
565 s.setHeapaddr(fn.Pos(), n, ptr)
566 }
567 }
568
569
570 if s.instrumentEnterExit {
571 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
572 }
573 s.zeroResults()
574 s.paramsToHeap()
575 s.stmtList(fn.Body)
576
577
578 if s.curBlock != nil {
579 s.pushLine(fn.Endlineno)
580 s.exit()
581 s.popLine()
582 }
583
584 for _, b := range s.f.Blocks {
585 if b.Pos != src.NoXPos {
586 s.updateUnsetPredPos(b)
587 }
588 }
589
590 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
591
592 s.insertPhis()
593
594
595 ssa.Compile(s.f)
596
597 fe.AllocFrame(s.f)
598
599 if len(s.openDefers) != 0 {
600 s.emitOpenDeferInfo()
601 }
602
603
604
605
606
607
608 for _, p := range params.InParams() {
609 typs, offs := p.RegisterTypesAndOffsets()
610 if len(offs) < len(typs) {
611 s.Fatalf("len(offs)=%d < len(typs)=%d, params=\n%s", len(offs), len(typs), params)
612 }
613 for i, t := range typs {
614 o := offs[i]
615 fo := p.FrameOffset(params)
616 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
617 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
618 }
619 }
620
621 return s.f
622 }
623
624 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
625 typs, offs := paramAssignment.RegisterTypesAndOffsets()
626 for i, t := range typs {
627 if pointersOnly && !t.IsPtrShaped() {
628 continue
629 }
630 r := paramAssignment.Registers[i]
631 o := offs[i]
632 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
633 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
634 v := s.newValue0I(op, t, reg)
635 v.Aux = aux
636 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
637 s.store(t, p, v)
638 }
639 }
640
641
642
643
644
645
646
647 func (s *state) zeroResults() {
648 for _, f := range s.curfn.Type().Results() {
649 n := f.Nname.(*ir.Name)
650 if !n.OnStack() {
651
652
653
654 continue
655 }
656
657 if typ := n.Type(); ssa.CanSSA(typ) {
658 s.assign(n, s.zeroVal(typ), false, 0)
659 } else {
660 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
661 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
662 }
663 s.zero(n.Type(), s.decladdrs[n])
664 }
665 }
666 }
667
668
669
670 func (s *state) paramsToHeap() {
671 do := func(params []*types.Field) {
672 for _, f := range params {
673 if f.Nname == nil {
674 continue
675 }
676 n := f.Nname.(*ir.Name)
677 if ir.IsBlank(n) || n.OnStack() {
678 continue
679 }
680 s.newHeapaddr(n)
681 if n.Class == ir.PPARAM {
682 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
683 }
684 }
685 }
686
687 typ := s.curfn.Type()
688 do(typ.Recvs())
689 do(typ.Params())
690 do(typ.Results())
691 }
692
693
694
695
696 func allocSizeAndAlign(t *types.Type) (int64, int64) {
697 size, align := t.Size(), t.Alignment()
698 if types.PtrSize == 4 && align == 4 && size >= 8 {
699
700 size = types.RoundUp(size, 8)
701 align = 8
702 }
703 return size, align
704 }
705 func allocSize(t *types.Type) int64 {
706 size, _ := allocSizeAndAlign(t)
707 return size
708 }
709 func allocAlign(t *types.Type) int64 {
710 _, align := allocSizeAndAlign(t)
711 return align
712 }
713
714
715 func (s *state) newHeapaddr(n *ir.Name) {
716 size := allocSize(n.Type())
717 if n.Type().HasPointers() || size >= maxAggregatedHeapAllocation || size == 0 {
718 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type()))
719 return
720 }
721
722
723
724 var used int64
725 for _, v := range s.pendingHeapAllocations {
726 used += allocSize(v.Type.Elem())
727 }
728 if used+size > maxAggregatedHeapAllocation {
729 s.flushPendingHeapAllocations()
730 }
731
732 var allocCall *ssa.Value
733 if len(s.pendingHeapAllocations) == 0 {
734
735
736
737 allocCall = s.newObjectNonSpecialized(n.Type(), nil)
738 } else {
739 allocCall = s.pendingHeapAllocations[0].Args[0]
740 }
741
742 v := s.newValue1I(ssa.OpOffPtr, n.Type().PtrTo(), 0, allocCall)
743
744
745 s.pendingHeapAllocations = append(s.pendingHeapAllocations, v)
746
747
748 s.setHeapaddr(n.Pos(), n, v)
749 }
750
751 func (s *state) flushPendingHeapAllocations() {
752 pending := s.pendingHeapAllocations
753 if len(pending) == 0 {
754 return
755 }
756 s.pendingHeapAllocations = nil
757 ptr := pending[0].Args[0]
758 call := ptr.Args[0]
759
760 if len(pending) == 1 {
761
762 v := pending[0]
763 v.Op = ssa.OpCopy
764 return
765 }
766
767
768
769
770 slices.SortStableFunc(pending, func(x, y *ssa.Value) int {
771 return cmp.Compare(allocAlign(y.Type.Elem()), allocAlign(x.Type.Elem()))
772 })
773
774
775 var size int64
776 for _, v := range pending {
777 v.AuxInt = size
778 size += allocSize(v.Type.Elem())
779 }
780 align := allocAlign(pending[0].Type.Elem())
781 size = types.RoundUp(size, align)
782
783
784 args := []*ssa.Value{
785 s.constInt(types.Types[types.TUINTPTR], size),
786 s.constNil(call.Args[0].Type),
787 s.constBool(true),
788 call.Args[1],
789 }
790 mallocSym := ir.Syms.MallocGC
791 if specialMallocSym := s.specializedMallocSym(size, false); specialMallocSym != nil {
792 mallocSym = specialMallocSym
793 }
794 call.Aux = ssa.StaticAuxCall(mallocSym, s.f.ABIDefault.ABIAnalyzeTypes(
795 []*types.Type{args[0].Type, args[1].Type, args[2].Type},
796 []*types.Type{types.Types[types.TUNSAFEPTR]},
797 ))
798 call.AuxInt = 4 * s.config.PtrSize
799 call.SetArgs4(args[0], args[1], args[2], args[3])
800
801
802 call.Type = types.NewTuple(types.Types[types.TUNSAFEPTR], types.TypeMem)
803 ptr.Type = types.Types[types.TUNSAFEPTR]
804 }
805
806 func (s *state) specializedMallocSym(size int64, hasPointers bool) *obj.LSym {
807 if !s.sizeSpecializedMallocEnabled() {
808 return nil
809 }
810 ptrSize := s.config.PtrSize
811 ptrBits := ptrSize * 8
812 minSizeForMallocHeader := ptrSize * ptrBits
813 heapBitsInSpan := size <= minSizeForMallocHeader
814 if !heapBitsInSpan {
815 return nil
816 }
817 divRoundUp := func(n, a uintptr) uintptr { return (n + a - 1) / a }
818 sizeClass := gc.SizeToSizeClass8[divRoundUp(uintptr(size), gc.SmallSizeDiv)]
819 if hasPointers {
820 return ir.Syms.MallocGCSmallScanNoHeader[sizeClass]
821 }
822 if size < gc.TinySize {
823 return ir.Syms.MallocGCTiny[size]
824 }
825 return ir.Syms.MallocGCSmallNoScan[sizeClass]
826 }
827
828 func (s *state) sizeSpecializedMallocEnabled() bool {
829 if base.Flag.CompilingRuntime {
830
831
832
833
834
835
836
837 return false
838 }
839
840 return buildcfg.Experiment.SizeSpecializedMalloc && !base.Flag.Cfg.Instrumenting
841 }
842
843
844
845 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
846 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
847 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
848 }
849
850
851 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
852 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
853 addr.SetUsed(true)
854 types.CalcSize(addr.Type())
855
856 if n.Class == ir.PPARAMOUT {
857 addr.SetIsOutputParamHeapAddr(true)
858 }
859
860 n.Heapaddr = addr
861 s.assign(addr, ptr, false, 0)
862 }
863
864
865 func (s *state) newObject(typ *types.Type) *ssa.Value {
866 if typ.Size() == 0 {
867 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
868 }
869 rtype := s.reflectType(typ)
870 if specialMallocSym := s.specializedMallocSym(typ.Size(), typ.HasPointers()); specialMallocSym != nil {
871 return s.rtcall(specialMallocSym, true, []*types.Type{types.NewPtr(typ)},
872 s.constInt(types.Types[types.TUINTPTR], typ.Size()),
873 rtype,
874 s.constBool(true),
875 )[0]
876 }
877 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
878 }
879
880
881
882 func (s *state) newObjectNonSpecialized(typ *types.Type, rtype *ssa.Value) *ssa.Value {
883 if typ.Size() == 0 {
884 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
885 }
886 if rtype == nil {
887 rtype = s.reflectType(typ)
888 }
889 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
890 }
891
892 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
893 if !n.Type().IsPtr() {
894 s.Fatalf("expected pointer type: %v", n.Type())
895 }
896 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
897 if count != nil {
898 if !elem.IsArray() {
899 s.Fatalf("expected array type: %v", elem)
900 }
901 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
902 }
903 size := elem.Size()
904
905 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
906 return
907 }
908 if count == nil {
909 count = s.constInt(types.Types[types.TUINTPTR], 1)
910 }
911 if count.Type.Size() != s.config.PtrSize {
912 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
913 }
914 var rtype *ssa.Value
915 if rtypeExpr != nil {
916 rtype = s.expr(rtypeExpr)
917 } else {
918 rtype = s.reflectType(elem)
919 }
920 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
921 }
922
923
924
925 func (s *state) reflectType(typ *types.Type) *ssa.Value {
926
927
928 lsym := reflectdata.TypeLinksym(typ)
929 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
930 }
931
932 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
933
934 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
935 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
936 if err != nil {
937 writer.Logf("cannot read sources for function %v: %v", fn, err)
938 }
939
940
941 var inlFns []*ssa.FuncLines
942 for _, fi := range ssaDumpInlined {
943 elno := fi.Endlineno
944 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
945 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
946 if err != nil {
947 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
948 continue
949 }
950 inlFns = append(inlFns, fnLines)
951 }
952
953 slices.SortFunc(inlFns, ssa.ByTopoCmp)
954 if targetFn != nil {
955 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
956 }
957
958 writer.WriteSources("sources", inlFns)
959 }
960
961 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
962 f, err := os.Open(os.ExpandEnv(file))
963 if err != nil {
964 return nil, err
965 }
966 defer f.Close()
967 var lines []string
968 ln := uint(1)
969 scanner := bufio.NewScanner(f)
970 for scanner.Scan() && ln <= end {
971 if ln >= start {
972 lines = append(lines, scanner.Text())
973 }
974 ln++
975 }
976 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
977 }
978
979
980
981
982 func (s *state) updateUnsetPredPos(b *ssa.Block) {
983 if b.Pos == src.NoXPos {
984 s.Fatalf("Block %s should have a position", b)
985 }
986 bestPos := src.NoXPos
987 for _, e := range b.Preds {
988 p := e.Block()
989 if !p.LackingPos() {
990 continue
991 }
992 if bestPos == src.NoXPos {
993 bestPos = b.Pos
994 for _, v := range b.Values {
995 if v.LackingPos() {
996 continue
997 }
998 if v.Pos != src.NoXPos {
999
1000
1001 bestPos = v.Pos
1002 break
1003 }
1004 }
1005 }
1006 p.Pos = bestPos
1007 s.updateUnsetPredPos(p)
1008 }
1009 }
1010
1011
1012 type openDeferInfo struct {
1013
1014 n *ir.CallExpr
1015
1016
1017 closure *ssa.Value
1018
1019
1020
1021 closureNode *ir.Name
1022 }
1023
1024 type state struct {
1025
1026 config *ssa.Config
1027
1028
1029 f *ssa.Func
1030
1031
1032 curfn *ir.Func
1033
1034
1035 labels map[string]*ssaLabel
1036
1037
1038 breakTo *ssa.Block
1039 continueTo *ssa.Block
1040
1041
1042 curBlock *ssa.Block
1043
1044
1045
1046
1047 vars map[ir.Node]*ssa.Value
1048
1049
1050
1051
1052 fwdVars map[ir.Node]*ssa.Value
1053
1054
1055 defvars []map[ir.Node]*ssa.Value
1056
1057
1058 decladdrs map[*ir.Name]*ssa.Value
1059
1060
1061 startmem *ssa.Value
1062 sp *ssa.Value
1063 sb *ssa.Value
1064
1065 deferBitsAddr *ssa.Value
1066 deferBitsTemp *ir.Name
1067
1068
1069 line []src.XPos
1070
1071 lastPos src.XPos
1072
1073
1074
1075 panics map[funcLine]*ssa.Block
1076
1077 cgoUnsafeArgs bool
1078 hasdefer bool
1079 softFloat bool
1080 hasOpenDefers bool
1081 checkPtrEnabled bool
1082 instrumentEnterExit bool
1083 instrumentMemory bool
1084
1085
1086
1087
1088 openDefers []*openDeferInfo
1089
1090
1091
1092
1093 lastDeferExit *ssa.Block
1094 lastDeferFinalBlock *ssa.Block
1095 lastDeferCount int
1096
1097 prevCall *ssa.Value
1098
1099
1100
1101
1102 pendingHeapAllocations []*ssa.Value
1103
1104
1105 appendTargets map[ir.Node]bool
1106
1107
1108 blockStarts []src.XPos
1109
1110
1111
1112 backingStores map[ir.Node]*backingStoreInfo
1113 }
1114
1115 type backingStoreInfo struct {
1116
1117 K int64
1118
1119 store *ir.Name
1120
1121 used *ir.Name
1122
1123
1124
1125 usedStatic bool
1126 }
1127
1128 type funcLine struct {
1129 f *obj.LSym
1130 base *src.PosBase
1131 line uint
1132 }
1133
1134 type ssaLabel struct {
1135 target *ssa.Block
1136 breakTarget *ssa.Block
1137 continueTarget *ssa.Block
1138 }
1139
1140
1141 func (s *state) label(sym *types.Sym) *ssaLabel {
1142 lab := s.labels[sym.Name]
1143 if lab == nil {
1144 lab = new(ssaLabel)
1145 s.labels[sym.Name] = lab
1146 }
1147 return lab
1148 }
1149
1150 func (s *state) Logf(msg string, args ...any) { s.f.Logf(msg, args...) }
1151 func (s *state) Log() bool { return s.f.Log() }
1152 func (s *state) Fatalf(msg string, args ...any) {
1153 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
1154 }
1155 func (s *state) Warnl(pos src.XPos, msg string, args ...any) { s.f.Warnl(pos, msg, args...) }
1156 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
1157
1158 func ssaMarker(name string) *ir.Name {
1159 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
1160 }
1161
1162 var (
1163
1164 memVar = ssaMarker("mem")
1165
1166
1167 ptrVar = ssaMarker("ptr")
1168 lenVar = ssaMarker("len")
1169 capVar = ssaMarker("cap")
1170 typVar = ssaMarker("typ")
1171 okVar = ssaMarker("ok")
1172 deferBitsVar = ssaMarker("deferBits")
1173 hashVar = ssaMarker("hash")
1174 )
1175
1176
1177 func (s *state) startBlock(b *ssa.Block) {
1178 if s.curBlock != nil {
1179 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
1180 }
1181 s.curBlock = b
1182 s.vars = map[ir.Node]*ssa.Value{}
1183 clear(s.fwdVars)
1184 for len(s.blockStarts) <= int(b.ID) {
1185 s.blockStarts = append(s.blockStarts, src.NoXPos)
1186 }
1187 }
1188
1189
1190
1191
1192 func (s *state) endBlock() *ssa.Block {
1193 b := s.curBlock
1194 if b == nil {
1195 return nil
1196 }
1197
1198 s.flushPendingHeapAllocations()
1199
1200 for len(s.defvars) <= int(b.ID) {
1201 s.defvars = append(s.defvars, nil)
1202 }
1203 s.defvars[b.ID] = s.vars
1204 s.curBlock = nil
1205 s.vars = nil
1206 if b.LackingPos() {
1207
1208
1209
1210 b.Pos = src.NoXPos
1211 } else {
1212 b.Pos = s.lastPos
1213 if s.blockStarts[b.ID] == src.NoXPos {
1214 s.blockStarts[b.ID] = s.lastPos
1215 }
1216 }
1217 return b
1218 }
1219
1220
1221 func (s *state) pushLine(line src.XPos) {
1222 if !line.IsKnown() {
1223
1224
1225 line = s.peekPos()
1226 if base.Flag.K != 0 {
1227 base.Warn("buildssa: unknown position (line 0)")
1228 }
1229 } else {
1230 s.lastPos = line
1231 }
1232
1233
1234 if b := s.curBlock; b != nil && s.blockStarts[b.ID] == src.NoXPos {
1235 s.blockStarts[b.ID] = line
1236 }
1237
1238 s.line = append(s.line, line)
1239 }
1240
1241
1242 func (s *state) popLine() {
1243 s.line = s.line[:len(s.line)-1]
1244 }
1245
1246
1247 func (s *state) peekPos() src.XPos {
1248 return s.line[len(s.line)-1]
1249 }
1250
1251
1252 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1253 return s.curBlock.NewValue0(s.peekPos(), op, t)
1254 }
1255
1256
1257 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1258 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1259 }
1260
1261
1262 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1263 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1264 }
1265
1266
1267 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1268 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1269 }
1270
1271
1272 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1273 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1274 }
1275
1276
1277
1278
1279 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1280 if isStmt {
1281 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1282 }
1283 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1284 }
1285
1286
1287 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1288 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1289 }
1290
1291
1292 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1293 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1294 }
1295
1296
1297 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1298 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1299 }
1300
1301
1302
1303
1304 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1305 if isStmt {
1306 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1307 }
1308 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1309 }
1310
1311
1312 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1313 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1314 }
1315
1316
1317 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1318 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1319 }
1320
1321
1322 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1323 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1324 }
1325
1326
1327 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1328 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1329 }
1330
1331
1332
1333
1334 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1335 if isStmt {
1336 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1337 }
1338 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1339 }
1340
1341
1342 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1343 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1344 }
1345
1346
1347 func (s *state) newValue4A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1348 return s.curBlock.NewValue4A(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1349 }
1350
1351
1352 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1353 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1354 }
1355
1356 func (s *state) entryBlock() *ssa.Block {
1357 b := s.f.Entry
1358 if base.Flag.N > 0 && s.curBlock != nil {
1359
1360
1361
1362
1363 b = s.curBlock
1364 }
1365 return b
1366 }
1367
1368
1369 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1370 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1371 }
1372
1373
1374 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1375 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1376 }
1377
1378
1379 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1380 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1381 }
1382
1383
1384 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1385 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1386 }
1387
1388
1389 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1390 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1391 }
1392
1393
1394 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1395 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1396 }
1397
1398
1399 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1400 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1401 }
1402
1403
1404 func (s *state) constSlice(t *types.Type) *ssa.Value {
1405 return s.f.ConstSlice(t)
1406 }
1407 func (s *state) constInterface(t *types.Type) *ssa.Value {
1408 return s.f.ConstInterface(t)
1409 }
1410 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1411 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1412 return s.f.ConstEmptyString(t)
1413 }
1414 func (s *state) constBool(c bool) *ssa.Value {
1415 return s.f.ConstBool(types.Types[types.TBOOL], c)
1416 }
1417 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1418 return s.f.ConstInt8(t, c)
1419 }
1420 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1421 return s.f.ConstInt16(t, c)
1422 }
1423 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1424 return s.f.ConstInt32(t, c)
1425 }
1426 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1427 return s.f.ConstInt64(t, c)
1428 }
1429 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1430 return s.f.ConstFloat32(t, c)
1431 }
1432 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1433 return s.f.ConstFloat64(t, c)
1434 }
1435 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1436 if s.config.PtrSize == 8 {
1437 return s.constInt64(t, c)
1438 }
1439 if int64(int32(c)) != c {
1440 s.Fatalf("integer constant too big %d", c)
1441 }
1442 return s.constInt32(t, int32(c))
1443 }
1444
1445
1446
1447 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1448 if s.softFloat {
1449 if c, ok := s.sfcall(op, arg); ok {
1450 return c
1451 }
1452 }
1453 return s.newValue1(op, t, arg)
1454 }
1455 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1456 if s.softFloat {
1457 if c, ok := s.sfcall(op, arg0, arg1); ok {
1458 return c
1459 }
1460 }
1461 return s.newValue2(op, t, arg0, arg1)
1462 }
1463
1464 type instrumentKind uint8
1465
1466 const (
1467 instrumentRead = iota
1468 instrumentWrite
1469 instrumentMove
1470 )
1471
1472 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1473 s.instrument2(t, addr, nil, kind)
1474 }
1475
1476
1477
1478
1479 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1480 if !(base.Flag.MSan || base.Flag.ASan) || !isStructNotSIMD(t) {
1481 s.instrument(t, addr, kind)
1482 return
1483 }
1484 for _, f := range t.Fields() {
1485 if f.Sym.IsBlank() {
1486 continue
1487 }
1488 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1489 s.instrumentFields(f.Type, offptr, kind)
1490 }
1491 }
1492
1493 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1494 if base.Flag.MSan {
1495 s.instrument2(t, dst, src, instrumentMove)
1496 } else {
1497 s.instrument(t, src, instrumentRead)
1498 s.instrument(t, dst, instrumentWrite)
1499 }
1500 }
1501
1502 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1503 if !s.instrumentMemory {
1504 return
1505 }
1506
1507 w := t.Size()
1508 if w == 0 {
1509 return
1510 }
1511
1512 if ssa.IsSanitizerSafeAddr(addr) {
1513 return
1514 }
1515
1516 var fn *obj.LSym
1517 needWidth := false
1518
1519 if addr2 != nil && kind != instrumentMove {
1520 panic("instrument2: non-nil addr2 for non-move instrumentation")
1521 }
1522
1523 if base.Flag.MSan {
1524 switch kind {
1525 case instrumentRead:
1526 fn = ir.Syms.Msanread
1527 case instrumentWrite:
1528 fn = ir.Syms.Msanwrite
1529 case instrumentMove:
1530 fn = ir.Syms.Msanmove
1531 default:
1532 panic("unreachable")
1533 }
1534 needWidth = true
1535 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1536
1537
1538
1539 switch kind {
1540 case instrumentRead:
1541 fn = ir.Syms.Racereadrange
1542 case instrumentWrite:
1543 fn = ir.Syms.Racewriterange
1544 default:
1545 panic("unreachable")
1546 }
1547 needWidth = true
1548 } else if base.Flag.Race {
1549
1550
1551 switch kind {
1552 case instrumentRead:
1553 fn = ir.Syms.Raceread
1554 case instrumentWrite:
1555 fn = ir.Syms.Racewrite
1556 default:
1557 panic("unreachable")
1558 }
1559 } else if base.Flag.ASan {
1560 switch kind {
1561 case instrumentRead:
1562 fn = ir.Syms.Asanread
1563 case instrumentWrite:
1564 fn = ir.Syms.Asanwrite
1565 default:
1566 panic("unreachable")
1567 }
1568 needWidth = true
1569 } else {
1570 panic("unreachable")
1571 }
1572
1573 args := []*ssa.Value{addr}
1574 if addr2 != nil {
1575 args = append(args, addr2)
1576 }
1577 if needWidth {
1578 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1579 }
1580 s.rtcall(fn, true, nil, args...)
1581 }
1582
1583 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1584 s.instrumentFields(t, src, instrumentRead)
1585 return s.rawLoad(t, src)
1586 }
1587
1588 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1589 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1590 }
1591
1592 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1593 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1594 }
1595
1596 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1597 s.instrument(t, dst, instrumentWrite)
1598 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1599 store.Aux = t
1600 s.vars[memVar] = store
1601 }
1602
1603 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1604 s.moveWhichMayOverlap(t, dst, src, false)
1605 }
1606 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1607 s.instrumentMove(t, dst, src)
1608 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632 if t.HasPointers() {
1633 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1634
1635
1636
1637
1638 s.curfn.SetWBPos(s.peekPos())
1639 } else {
1640 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1641 }
1642 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1643 return
1644 }
1645 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1646 store.Aux = t
1647 s.vars[memVar] = store
1648 }
1649
1650
1651 func (s *state) stmtList(l ir.Nodes) {
1652 for _, n := range l {
1653 s.stmt(n)
1654 }
1655 }
1656
1657 func peelConvNop(n ir.Node) ir.Node {
1658 if n == nil {
1659 return n
1660 }
1661 for n.Op() == ir.OCONVNOP {
1662 n = n.(*ir.ConvExpr).X
1663 }
1664 return n
1665 }
1666
1667
1668 func (s *state) stmt(n ir.Node) {
1669 s.pushLine(n.Pos())
1670 defer s.popLine()
1671
1672
1673
1674 if s.curBlock == nil && n.Op() != ir.OLABEL {
1675 return
1676 }
1677
1678 s.stmtList(n.Init())
1679 switch n.Op() {
1680
1681 case ir.OBLOCK:
1682 n := n.(*ir.BlockStmt)
1683 s.stmtList(n.List)
1684
1685 case ir.OFALL:
1686
1687
1688 case ir.OCALLFUNC:
1689 n := n.(*ir.CallExpr)
1690 if ir.IsIntrinsicCall(n) {
1691 s.intrinsicCall(n)
1692 return
1693 }
1694 fallthrough
1695
1696 case ir.OCALLINTER:
1697 n := n.(*ir.CallExpr)
1698 s.callResult(n, callNormal)
1699 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1700 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1701 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1702 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1703 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1704 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1705 fn == "panicrangestate") {
1706 m := s.mem()
1707 b := s.endBlock()
1708 b.Kind = ssa.BlockExit
1709 b.SetControl(m)
1710
1711
1712
1713 }
1714 }
1715 case ir.ODEFER:
1716 n := n.(*ir.GoDeferStmt)
1717 if base.Debug.Defer > 0 {
1718 var defertype string
1719 if s.hasOpenDefers {
1720 defertype = "open-coded"
1721 } else if n.Esc() == ir.EscNever {
1722 defertype = "stack-allocated"
1723 } else {
1724 defertype = "heap-allocated"
1725 }
1726 base.WarnfAt(n.Pos(), "%s defer", defertype)
1727 }
1728 if s.hasOpenDefers {
1729 s.openDeferRecord(n.Call.(*ir.CallExpr))
1730 } else {
1731 d := callDefer
1732 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1733 d = callDeferStack
1734 }
1735 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1736 }
1737 case ir.OGO:
1738 n := n.(*ir.GoDeferStmt)
1739 s.callResult(n.Call.(*ir.CallExpr), callGo)
1740
1741 case ir.OAS2DOTTYPE:
1742 n := n.(*ir.AssignListStmt)
1743 var res, resok *ssa.Value
1744 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1745 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1746 } else {
1747 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1748 }
1749 deref := false
1750 if !ssa.CanSSA(n.Rhs[0].Type()) {
1751 if res.Op != ssa.OpLoad {
1752 s.Fatalf("dottype of non-load")
1753 }
1754 mem := s.mem()
1755 if res.Args[1] != mem {
1756 s.Fatalf("memory no longer live from 2-result dottype load")
1757 }
1758 deref = true
1759 res = res.Args[0]
1760 }
1761 s.assign(n.Lhs[0], res, deref, 0)
1762 s.assign(n.Lhs[1], resok, false, 0)
1763 return
1764
1765 case ir.OAS2FUNC:
1766
1767 n := n.(*ir.AssignListStmt)
1768 call := n.Rhs[0].(*ir.CallExpr)
1769 if !ir.IsIntrinsicCall(call) {
1770 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1771 }
1772 v := s.intrinsicCall(call)
1773 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1774 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1775 s.assign(n.Lhs[0], v1, false, 0)
1776 s.assign(n.Lhs[1], v2, false, 0)
1777 return
1778
1779 case ir.ODCL:
1780 n := n.(*ir.Decl)
1781 if v := n.X; v.Esc() == ir.EscHeap {
1782 s.newHeapaddr(v)
1783 }
1784
1785 case ir.OLABEL:
1786 n := n.(*ir.LabelStmt)
1787 sym := n.Label
1788 if sym.IsBlank() {
1789
1790 break
1791 }
1792 lab := s.label(sym)
1793
1794
1795 if lab.target == nil {
1796 lab.target = s.f.NewBlock(ssa.BlockPlain)
1797 }
1798
1799
1800
1801 if s.curBlock != nil {
1802 b := s.endBlock()
1803 b.AddEdgeTo(lab.target)
1804 }
1805 s.startBlock(lab.target)
1806
1807 case ir.OGOTO:
1808 n := n.(*ir.BranchStmt)
1809 sym := n.Label
1810
1811 lab := s.label(sym)
1812 if lab.target == nil {
1813 lab.target = s.f.NewBlock(ssa.BlockPlain)
1814 }
1815
1816 b := s.endBlock()
1817 b.Pos = s.lastPos.WithIsStmt()
1818 b.AddEdgeTo(lab.target)
1819
1820 case ir.OAS:
1821 n := n.(*ir.AssignStmt)
1822 if n.X == n.Y && n.X.Op() == ir.ONAME {
1823
1824
1825
1826
1827
1828
1829
1830 return
1831 }
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842 ny := peelConvNop(n.Y)
1843 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && ny.Op() == ir.ODEREF)
1844 if ny != nil && ny.Op() == ir.ODEREF {
1845 p := peelConvNop(ny.(*ir.StarExpr).X)
1846 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1847
1848
1849 mayOverlap = false
1850 }
1851 }
1852
1853
1854 rhs := n.Y
1855 if rhs != nil {
1856 switch rhs.Op() {
1857 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1858
1859
1860
1861 if !ir.IsZero(rhs) {
1862 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1863 }
1864 rhs = nil
1865 case ir.OAPPEND:
1866 rhs := rhs.(*ir.CallExpr)
1867
1868
1869
1870 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1871 break
1872 }
1873
1874
1875
1876 if s.canSSA(n.X) {
1877 if base.Debug.Append > 0 {
1878 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1879 }
1880 break
1881 }
1882 if base.Debug.Append > 0 {
1883 base.WarnfAt(n.Pos(), "append: len-only update")
1884 }
1885 s.append(rhs, true)
1886 return
1887 }
1888 }
1889
1890 if ir.IsBlank(n.X) {
1891
1892
1893 if rhs != nil {
1894 s.expr(rhs)
1895 }
1896 return
1897 }
1898
1899 var t *types.Type
1900 if n.Y != nil {
1901 t = n.Y.Type()
1902 } else {
1903 t = n.X.Type()
1904 }
1905
1906 var r *ssa.Value
1907 deref := !ssa.CanSSA(t)
1908 if deref {
1909 if rhs == nil {
1910 r = nil
1911 } else {
1912 r = s.addr(rhs)
1913 }
1914 } else {
1915 if rhs == nil {
1916 r = s.zeroVal(t)
1917 } else {
1918 r = s.expr(rhs)
1919 }
1920 }
1921
1922 var skip skipMask
1923 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1924
1925
1926 rhs := rhs.(*ir.SliceExpr)
1927 i, j, k := rhs.Low, rhs.High, rhs.Max
1928 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1929
1930 i = nil
1931 }
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942 if i == nil {
1943 skip |= skipPtr
1944 if j == nil {
1945 skip |= skipLen
1946 }
1947 if k == nil {
1948 skip |= skipCap
1949 }
1950 }
1951 }
1952
1953 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1954
1955 case ir.OIF:
1956 n := n.(*ir.IfStmt)
1957 if ir.IsConst(n.Cond, constant.Bool) {
1958 s.stmtList(n.Cond.Init())
1959 if ir.BoolVal(n.Cond) {
1960 s.stmtList(n.Body)
1961 } else {
1962 s.stmtList(n.Else)
1963 }
1964 break
1965 }
1966
1967 bEnd := s.f.NewBlock(ssa.BlockPlain)
1968 var likely int8
1969 if n.Likely {
1970 likely = 1
1971 }
1972 var bThen *ssa.Block
1973 if len(n.Body) != 0 {
1974 bThen = s.f.NewBlock(ssa.BlockPlain)
1975 } else {
1976 bThen = bEnd
1977 }
1978 var bElse *ssa.Block
1979 if len(n.Else) != 0 {
1980 bElse = s.f.NewBlock(ssa.BlockPlain)
1981 } else {
1982 bElse = bEnd
1983 }
1984 s.condBranch(n.Cond, bThen, bElse, likely)
1985
1986 if len(n.Body) != 0 {
1987 s.startBlock(bThen)
1988 s.stmtList(n.Body)
1989 if b := s.endBlock(); b != nil {
1990 b.AddEdgeTo(bEnd)
1991 }
1992 }
1993 if len(n.Else) != 0 {
1994 s.startBlock(bElse)
1995 s.stmtList(n.Else)
1996 if b := s.endBlock(); b != nil {
1997 b.AddEdgeTo(bEnd)
1998 }
1999 }
2000 s.startBlock(bEnd)
2001
2002 case ir.ORETURN:
2003 n := n.(*ir.ReturnStmt)
2004 s.stmtList(n.Results)
2005 b := s.exit()
2006 b.Pos = s.lastPos.WithIsStmt()
2007
2008 case ir.OTAILCALL:
2009 n := n.(*ir.TailCallStmt)
2010 s.callResult(n.Call, callTail)
2011 call := s.mem()
2012 b := s.endBlock()
2013 b.Kind = ssa.BlockRetJmp
2014 b.SetControl(call)
2015
2016 case ir.OCONTINUE, ir.OBREAK:
2017 n := n.(*ir.BranchStmt)
2018 var to *ssa.Block
2019 if n.Label == nil {
2020
2021 switch n.Op() {
2022 case ir.OCONTINUE:
2023 to = s.continueTo
2024 case ir.OBREAK:
2025 to = s.breakTo
2026 }
2027 } else {
2028
2029 sym := n.Label
2030 lab := s.label(sym)
2031 switch n.Op() {
2032 case ir.OCONTINUE:
2033 to = lab.continueTarget
2034 case ir.OBREAK:
2035 to = lab.breakTarget
2036 }
2037 }
2038
2039 b := s.endBlock()
2040 b.Pos = s.lastPos.WithIsStmt()
2041 b.AddEdgeTo(to)
2042
2043 case ir.OFOR:
2044
2045
2046 n := n.(*ir.ForStmt)
2047 base.Assert(!n.DistinctVars)
2048 bCond := s.f.NewBlock(ssa.BlockPlain)
2049 bBody := s.f.NewBlock(ssa.BlockPlain)
2050 bIncr := s.f.NewBlock(ssa.BlockPlain)
2051 bEnd := s.f.NewBlock(ssa.BlockPlain)
2052
2053
2054 bBody.Pos = n.Pos()
2055
2056
2057 b := s.endBlock()
2058 b.AddEdgeTo(bCond)
2059
2060
2061 s.startBlock(bCond)
2062 if n.Cond != nil {
2063 s.condBranch(n.Cond, bBody, bEnd, 1)
2064 } else {
2065 b := s.endBlock()
2066 b.Kind = ssa.BlockPlain
2067 b.AddEdgeTo(bBody)
2068 }
2069
2070
2071 prevContinue := s.continueTo
2072 prevBreak := s.breakTo
2073 s.continueTo = bIncr
2074 s.breakTo = bEnd
2075 var lab *ssaLabel
2076 if sym := n.Label; sym != nil {
2077
2078 lab = s.label(sym)
2079 lab.continueTarget = bIncr
2080 lab.breakTarget = bEnd
2081 }
2082
2083
2084 s.startBlock(bBody)
2085 s.stmtList(n.Body)
2086
2087
2088 s.continueTo = prevContinue
2089 s.breakTo = prevBreak
2090 if lab != nil {
2091 lab.continueTarget = nil
2092 lab.breakTarget = nil
2093 }
2094
2095
2096 if b := s.endBlock(); b != nil {
2097 b.AddEdgeTo(bIncr)
2098 }
2099
2100
2101 s.startBlock(bIncr)
2102 if n.Post != nil {
2103 s.stmt(n.Post)
2104 }
2105 if b := s.endBlock(); b != nil {
2106 b.AddEdgeTo(bCond)
2107
2108
2109 if b.Pos == src.NoXPos {
2110 b.Pos = bCond.Pos
2111 }
2112 }
2113
2114 s.startBlock(bEnd)
2115
2116 case ir.OSWITCH, ir.OSELECT:
2117
2118
2119 bEnd := s.f.NewBlock(ssa.BlockPlain)
2120
2121 prevBreak := s.breakTo
2122 s.breakTo = bEnd
2123 var sym *types.Sym
2124 var body ir.Nodes
2125 if n.Op() == ir.OSWITCH {
2126 n := n.(*ir.SwitchStmt)
2127 sym = n.Label
2128 body = n.Compiled
2129 } else {
2130 n := n.(*ir.SelectStmt)
2131 sym = n.Label
2132 body = n.Compiled
2133 }
2134
2135 var lab *ssaLabel
2136 if sym != nil {
2137
2138 lab = s.label(sym)
2139 lab.breakTarget = bEnd
2140 }
2141
2142
2143 s.stmtList(body)
2144
2145 s.breakTo = prevBreak
2146 if lab != nil {
2147 lab.breakTarget = nil
2148 }
2149
2150
2151
2152 if s.curBlock != nil {
2153 m := s.mem()
2154 b := s.endBlock()
2155 b.Kind = ssa.BlockExit
2156 b.SetControl(m)
2157 }
2158 s.startBlock(bEnd)
2159
2160 case ir.OJUMPTABLE:
2161 n := n.(*ir.JumpTableStmt)
2162
2163
2164 jt := s.f.NewBlock(ssa.BlockJumpTable)
2165 bEnd := s.f.NewBlock(ssa.BlockPlain)
2166
2167
2168 idx := s.expr(n.Idx)
2169 unsigned := idx.Type.IsUnsigned()
2170
2171
2172 t := types.Types[types.TUINTPTR]
2173 idx = s.conv(nil, idx, idx.Type, t)
2174
2175
2176
2177
2178
2179
2180
2181 var min, max uint64
2182 if unsigned {
2183 min, _ = constant.Uint64Val(n.Cases[0])
2184 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
2185 } else {
2186 mn, _ := constant.Int64Val(n.Cases[0])
2187 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
2188 min = uint64(mn)
2189 max = uint64(mx)
2190 }
2191
2192 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
2193 width := s.uintptrConstant(max - min)
2194 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
2195 b := s.endBlock()
2196 b.Kind = ssa.BlockIf
2197 b.SetControl(cmp)
2198 b.AddEdgeTo(jt)
2199 b.AddEdgeTo(bEnd)
2200 b.Likely = ssa.BranchLikely
2201
2202
2203 s.startBlock(jt)
2204 jt.Pos = n.Pos()
2205 if base.Flag.Cfg.SpectreIndex {
2206 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
2207 }
2208 jt.SetControl(idx)
2209
2210
2211 table := make([]*ssa.Block, max-min+1)
2212 for i := range table {
2213 table[i] = bEnd
2214 }
2215 for i := range n.Targets {
2216 c := n.Cases[i]
2217 lab := s.label(n.Targets[i])
2218 if lab.target == nil {
2219 lab.target = s.f.NewBlock(ssa.BlockPlain)
2220 }
2221 var val uint64
2222 if unsigned {
2223 val, _ = constant.Uint64Val(c)
2224 } else {
2225 vl, _ := constant.Int64Val(c)
2226 val = uint64(vl)
2227 }
2228
2229 table[val-min] = lab.target
2230 }
2231 for _, t := range table {
2232 jt.AddEdgeTo(t)
2233 }
2234 s.endBlock()
2235
2236 s.startBlock(bEnd)
2237
2238 case ir.OINTERFACESWITCH:
2239 n := n.(*ir.InterfaceSwitchStmt)
2240 typs := s.f.Config.Types
2241
2242 t := s.expr(n.RuntimeType)
2243 h := s.expr(n.Hash)
2244 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2245
2246
2247 var merge *ssa.Block
2248 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
2249
2250
2251 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2252 s.Fatalf("atomic load not available")
2253 }
2254 merge = s.f.NewBlock(ssa.BlockPlain)
2255 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2256 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2257 loopHead := s.f.NewBlock(ssa.BlockPlain)
2258 loopBody := s.f.NewBlock(ssa.BlockPlain)
2259
2260
2261 var mul, and, add, zext ssa.Op
2262 if s.config.PtrSize == 4 {
2263 mul = ssa.OpMul32
2264 and = ssa.OpAnd32
2265 add = ssa.OpAdd32
2266 zext = ssa.OpCopy
2267 } else {
2268 mul = ssa.OpMul64
2269 and = ssa.OpAnd64
2270 add = ssa.OpAdd64
2271 zext = ssa.OpZeroExt32to64
2272 }
2273
2274
2275
2276 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2277 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2278 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2279
2280
2281 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2282
2283
2284 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2285
2286 b := s.endBlock()
2287 b.AddEdgeTo(loopHead)
2288
2289
2290
2291 s.startBlock(loopHead)
2292 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2293 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2294 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2295 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2296
2297 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2298
2299
2300
2301 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2302 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2303 b = s.endBlock()
2304 b.Kind = ssa.BlockIf
2305 b.SetControl(cmp1)
2306 b.AddEdgeTo(cacheHit)
2307 b.AddEdgeTo(loopBody)
2308
2309
2310
2311 s.startBlock(loopBody)
2312 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2313 b = s.endBlock()
2314 b.Kind = ssa.BlockIf
2315 b.SetControl(cmp2)
2316 b.AddEdgeTo(cacheMiss)
2317 b.AddEdgeTo(loopHead)
2318
2319
2320
2321
2322 s.startBlock(cacheHit)
2323 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2324 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2325 s.assign(n.Case, eCase, false, 0)
2326 s.assign(n.Itab, eItab, false, 0)
2327 b = s.endBlock()
2328 b.AddEdgeTo(merge)
2329
2330
2331 s.startBlock(cacheMiss)
2332 }
2333
2334 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2335 s.assign(n.Case, r[0], false, 0)
2336 s.assign(n.Itab, r[1], false, 0)
2337
2338 if merge != nil {
2339
2340 b := s.endBlock()
2341 b.Kind = ssa.BlockPlain
2342 b.AddEdgeTo(merge)
2343 s.startBlock(merge)
2344 }
2345
2346 case ir.OCHECKNIL:
2347 n := n.(*ir.UnaryExpr)
2348 p := s.expr(n.X)
2349 _ = s.nilCheck(p)
2350
2351
2352 case ir.OINLMARK:
2353 n := n.(*ir.InlineMarkStmt)
2354 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2355
2356 default:
2357 s.Fatalf("unhandled stmt %v", n.Op())
2358 }
2359 }
2360
2361
2362
2363 const shareDeferExits = false
2364
2365
2366
2367
2368 func (s *state) exit() *ssa.Block {
2369 if s.hasdefer {
2370 if s.hasOpenDefers {
2371 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2372 if s.curBlock.Kind != ssa.BlockPlain {
2373 panic("Block for an exit should be BlockPlain")
2374 }
2375 s.curBlock.AddEdgeTo(s.lastDeferExit)
2376 s.endBlock()
2377 return s.lastDeferFinalBlock
2378 }
2379 s.openDeferExit()
2380 } else {
2381
2382
2383
2384
2385
2386
2387
2388
2389 s.pushLine(s.curfn.Endlineno)
2390 s.rtcall(ir.Syms.Deferreturn, true, nil)
2391 s.popLine()
2392 }
2393 }
2394
2395
2396
2397 resultFields := s.curfn.Type().Results()
2398 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2399
2400 for i, f := range resultFields {
2401 n := f.Nname.(*ir.Name)
2402 if s.canSSA(n) {
2403 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2404
2405 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2406 }
2407 results[i] = s.variable(n, n.Type())
2408 } else if !n.OnStack() {
2409
2410 if n.Type().HasPointers() {
2411 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2412 }
2413 ha := s.expr(n.Heapaddr)
2414 s.instrumentFields(n.Type(), ha, instrumentRead)
2415 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2416 } else {
2417
2418
2419
2420 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2421 }
2422 }
2423
2424
2425
2426
2427 if s.instrumentEnterExit {
2428 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2429 }
2430
2431 results[len(results)-1] = s.mem()
2432 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2433 m.AddArgs(results...)
2434
2435 b := s.endBlock()
2436 b.Kind = ssa.BlockRet
2437 b.SetControl(m)
2438 if s.hasdefer && s.hasOpenDefers {
2439 s.lastDeferFinalBlock = b
2440 }
2441 return b
2442 }
2443
2444 type opAndType struct {
2445 op ir.Op
2446 etype types.Kind
2447 }
2448
2449 var opToSSA = map[opAndType]ssa.Op{
2450 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2451 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2452 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2453 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2454 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2455 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2456 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2457 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2458 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2459 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2460
2461 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2462 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2463 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2464 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2465 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2466 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2467 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2468 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2469 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2470 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2471
2472 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2473
2474 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2475 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2476 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2477 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2478 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2479 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2480 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2481 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2482 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2483 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2484
2485 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2486 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2487 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2488 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2489 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2490 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2491 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2492 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2493
2494 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2495 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2496 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2497 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2498
2499 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2500 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2501 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2502 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2503 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2504 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2505 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2506 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2507 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2508 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2509
2510 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2511 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2512
2513 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2514 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2515 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2516 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2517 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2518 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2519 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2520 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2521
2522 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2523 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2524 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2525 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2526 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2527 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2528 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2529 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2530
2531 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2532 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2533 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2534 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2535 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2536 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2537 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2538 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2539
2540 {ir.OOR, types.TINT8}: ssa.OpOr8,
2541 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2542 {ir.OOR, types.TINT16}: ssa.OpOr16,
2543 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2544 {ir.OOR, types.TINT32}: ssa.OpOr32,
2545 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2546 {ir.OOR, types.TINT64}: ssa.OpOr64,
2547 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2548
2549 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2550 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2551 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2552 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2553 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2554 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2555 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2556 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2557
2558 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2559 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2560 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2561 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2562 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2563 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2564 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2565 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2566 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2567 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2568 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2569 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2570 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2571 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2572 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2573 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2574 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2575 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2576 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2577
2578 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2579 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2580 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2581 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2582 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2583 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2584 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2585 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2586 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2587 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2588 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2589 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2590 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2591 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2592 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2593 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2594 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2595 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2596 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2597
2598 {ir.OLT, types.TINT8}: ssa.OpLess8,
2599 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2600 {ir.OLT, types.TINT16}: ssa.OpLess16,
2601 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2602 {ir.OLT, types.TINT32}: ssa.OpLess32,
2603 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2604 {ir.OLT, types.TINT64}: ssa.OpLess64,
2605 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2606 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2607 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2608
2609 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2610 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2611 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2612 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2613 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2614 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2615 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2616 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2617 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2618 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2619 }
2620
2621 func (s *state) concreteEtype(t *types.Type) types.Kind {
2622 e := t.Kind()
2623 switch e {
2624 default:
2625 return e
2626 case types.TINT:
2627 if s.config.PtrSize == 8 {
2628 return types.TINT64
2629 }
2630 return types.TINT32
2631 case types.TUINT:
2632 if s.config.PtrSize == 8 {
2633 return types.TUINT64
2634 }
2635 return types.TUINT32
2636 case types.TUINTPTR:
2637 if s.config.PtrSize == 8 {
2638 return types.TUINT64
2639 }
2640 return types.TUINT32
2641 }
2642 }
2643
2644 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2645 etype := s.concreteEtype(t)
2646 x, ok := opToSSA[opAndType{op, etype}]
2647 if !ok {
2648 s.Fatalf("unhandled binary op %v %s", op, etype)
2649 }
2650 return x
2651 }
2652
2653 type opAndTwoTypes struct {
2654 op ir.Op
2655 etype1 types.Kind
2656 etype2 types.Kind
2657 }
2658
2659 type twoTypes struct {
2660 etype1 types.Kind
2661 etype2 types.Kind
2662 }
2663
2664 type twoOpsAndType struct {
2665 op1 ssa.Op
2666 op2 ssa.Op
2667 intermediateType types.Kind
2668 }
2669
2670 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2671
2672 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2673 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2674 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2675 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2676
2677 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2678 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2679 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2680 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2681
2682 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2683 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2684 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2685 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2686
2687 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2688 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2689 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2690 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2691
2692 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2693 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2694 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2695 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2696
2697 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2698 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2699 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2700 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2701
2702 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2703 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2704 {types.TFLOAT32, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2705 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2706
2707 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2708 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2709 {types.TFLOAT64, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2710 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2711
2712
2713 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2714 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2715 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2716 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2717 }
2718
2719
2720
2721 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2722 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2723 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2724 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2725 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2726 }
2727
2728
2729 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2730 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2731 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2732 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2733 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2734 }
2735
2736 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2737 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2738 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2739 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2740 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2741 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2742 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2743 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2744 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2745
2746 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2747 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2748 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2749 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2750 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2751 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2752 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2753 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2754
2755 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2756 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2757 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2758 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2759 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2760 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2761 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2762 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2763
2764 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2765 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2766 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2767 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2768 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2769 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2770 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2771 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2772
2773 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2774 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2775 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2776 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2777 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2778 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2779 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2780 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2781
2782 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2783 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2784 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2785 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2786 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2787 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2788 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2789 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2790
2791 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2792 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2793 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2794 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2795 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2796 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2797 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2798 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2799
2800 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2801 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2802 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2803 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2804 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2805 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2806 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2807 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2808 }
2809
2810 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2811 etype1 := s.concreteEtype(t)
2812 etype2 := s.concreteEtype(u)
2813 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2814 if !ok {
2815 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2816 }
2817 return x
2818 }
2819
2820 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2821 if s.config.PtrSize == 4 {
2822 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2823 }
2824 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2825 }
2826
2827 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2828 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2829
2830 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2831 }
2832 if ft.IsInteger() && tt.IsInteger() {
2833 var op ssa.Op
2834 if tt.Size() == ft.Size() {
2835 op = ssa.OpCopy
2836 } else if tt.Size() < ft.Size() {
2837
2838 switch 10*ft.Size() + tt.Size() {
2839 case 21:
2840 op = ssa.OpTrunc16to8
2841 case 41:
2842 op = ssa.OpTrunc32to8
2843 case 42:
2844 op = ssa.OpTrunc32to16
2845 case 81:
2846 op = ssa.OpTrunc64to8
2847 case 82:
2848 op = ssa.OpTrunc64to16
2849 case 84:
2850 op = ssa.OpTrunc64to32
2851 default:
2852 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2853 }
2854 } else if ft.IsSigned() {
2855
2856 switch 10*ft.Size() + tt.Size() {
2857 case 12:
2858 op = ssa.OpSignExt8to16
2859 case 14:
2860 op = ssa.OpSignExt8to32
2861 case 18:
2862 op = ssa.OpSignExt8to64
2863 case 24:
2864 op = ssa.OpSignExt16to32
2865 case 28:
2866 op = ssa.OpSignExt16to64
2867 case 48:
2868 op = ssa.OpSignExt32to64
2869 default:
2870 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2871 }
2872 } else {
2873
2874 switch 10*ft.Size() + tt.Size() {
2875 case 12:
2876 op = ssa.OpZeroExt8to16
2877 case 14:
2878 op = ssa.OpZeroExt8to32
2879 case 18:
2880 op = ssa.OpZeroExt8to64
2881 case 24:
2882 op = ssa.OpZeroExt16to32
2883 case 28:
2884 op = ssa.OpZeroExt16to64
2885 case 48:
2886 op = ssa.OpZeroExt32to64
2887 default:
2888 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2889 }
2890 }
2891 return s.newValue1(op, tt, v)
2892 }
2893
2894 if ft.IsComplex() && tt.IsComplex() {
2895 var op ssa.Op
2896 if ft.Size() == tt.Size() {
2897 switch ft.Size() {
2898 case 8:
2899 op = ssa.OpRound32F
2900 case 16:
2901 op = ssa.OpRound64F
2902 default:
2903 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2904 }
2905 } else if ft.Size() == 8 && tt.Size() == 16 {
2906 op = ssa.OpCvt32Fto64F
2907 } else if ft.Size() == 16 && tt.Size() == 8 {
2908 op = ssa.OpCvt64Fto32F
2909 } else {
2910 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2911 }
2912 ftp := types.FloatForComplex(ft)
2913 ttp := types.FloatForComplex(tt)
2914 return s.newValue2(ssa.OpComplexMake, tt,
2915 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2916 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2917 }
2918
2919 if tt.IsComplex() {
2920
2921 et := types.FloatForComplex(tt)
2922 v = s.conv(n, v, ft, et)
2923 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2924 }
2925
2926 if ft.IsFloat() || tt.IsFloat() {
2927 cft, ctt := s.concreteEtype(ft), s.concreteEtype(tt)
2928 conv, ok := fpConvOpToSSA[twoTypes{cft, ctt}]
2929
2930
2931 if ctt == types.TUINT32 && ft.IsFloat() && !base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil) {
2932
2933 conv.op1 = ssa.OpCvt64Fto64
2934 if cft == types.TFLOAT32 {
2935 conv.op1 = ssa.OpCvt32Fto64
2936 }
2937 conv.op2 = ssa.OpTrunc64to32
2938
2939 }
2940 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2941 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2942 conv = conv1
2943 }
2944 }
2945 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2946 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2947 conv = conv1
2948 }
2949 }
2950
2951 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2952 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2953
2954 if tt.Size() == 4 {
2955 return s.uint32Tofloat32(n, v, ft, tt)
2956 }
2957 if tt.Size() == 8 {
2958 return s.uint32Tofloat64(n, v, ft, tt)
2959 }
2960 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2961
2962 if ft.Size() == 4 {
2963 return s.float32ToUint32(n, v, ft, tt)
2964 }
2965 if ft.Size() == 8 {
2966 return s.float64ToUint32(n, v, ft, tt)
2967 }
2968 }
2969 }
2970
2971 if !ok {
2972 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2973 }
2974 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2975
2976 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2977
2978 if op1 == ssa.OpCopy {
2979 if op2 == ssa.OpCopy {
2980 return v
2981 }
2982 return s.newValueOrSfCall1(op2, tt, v)
2983 }
2984 if op2 == ssa.OpCopy {
2985 return s.newValueOrSfCall1(op1, tt, v)
2986 }
2987 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2988 }
2989
2990 if ft.IsInteger() {
2991
2992 if tt.Size() == 4 {
2993 return s.uint64Tofloat32(n, v, ft, tt)
2994 }
2995 if tt.Size() == 8 {
2996 return s.uint64Tofloat64(n, v, ft, tt)
2997 }
2998 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2999 }
3000
3001 if ft.Size() == 4 {
3002 switch tt.Size() {
3003 case 8:
3004 return s.float32ToUint64(n, v, ft, tt)
3005 case 4, 2, 1:
3006
3007 return s.float32ToUint32(n, v, ft, tt)
3008 }
3009 }
3010 if ft.Size() == 8 {
3011 switch tt.Size() {
3012 case 8:
3013 return s.float64ToUint64(n, v, ft, tt)
3014 case 4, 2, 1:
3015
3016 return s.float64ToUint32(n, v, ft, tt)
3017 }
3018
3019 }
3020 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
3021 return nil
3022 }
3023
3024 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
3025 return nil
3026 }
3027
3028
3029 func (s *state) expr(n ir.Node) *ssa.Value {
3030 return s.exprCheckPtr(n, true)
3031 }
3032
3033 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
3034 if ir.HasUniquePos(n) {
3035
3036
3037 s.pushLine(n.Pos())
3038 defer s.popLine()
3039 }
3040
3041 s.stmtList(n.Init())
3042 switch n.Op() {
3043 case ir.OBYTES2STRTMP:
3044 n := n.(*ir.ConvExpr)
3045 slice := s.expr(n.X)
3046 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
3047 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3048 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
3049 case ir.OSTR2BYTESTMP:
3050 n := n.(*ir.ConvExpr)
3051 str := s.expr(n.X)
3052 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
3053 if !n.NonNil() {
3054
3055
3056
3057 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
3058 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
3059 ptr = s.ternary(cond, ptr, zerobase)
3060 }
3061 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
3062 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
3063 case ir.OCFUNC:
3064 n := n.(*ir.UnaryExpr)
3065 aux := n.X.(*ir.Name).Linksym()
3066
3067
3068 if aux.ABI() != obj.ABIInternal {
3069 s.Fatalf("expected ABIInternal: %v", aux.ABI())
3070 }
3071 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
3072 case ir.ONAME:
3073 n := n.(*ir.Name)
3074 if n.Class == ir.PFUNC {
3075
3076 sym := staticdata.FuncLinksym(n)
3077 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
3078 }
3079 if s.canSSA(n) {
3080 return s.variable(n, n.Type())
3081 }
3082 return s.load(n.Type(), s.addr(n))
3083 case ir.OLINKSYMOFFSET:
3084 n := n.(*ir.LinksymOffsetExpr)
3085 return s.load(n.Type(), s.addr(n))
3086 case ir.ONIL:
3087 n := n.(*ir.NilExpr)
3088 t := n.Type()
3089 switch {
3090 case t.IsSlice():
3091 return s.constSlice(t)
3092 case t.IsInterface():
3093 return s.constInterface(t)
3094 default:
3095 return s.constNil(t)
3096 }
3097 case ir.OLITERAL:
3098 switch u := n.Val(); u.Kind() {
3099 case constant.Int:
3100 i := ir.IntVal(n.Type(), u)
3101 switch n.Type().Size() {
3102 case 1:
3103 return s.constInt8(n.Type(), int8(i))
3104 case 2:
3105 return s.constInt16(n.Type(), int16(i))
3106 case 4:
3107 return s.constInt32(n.Type(), int32(i))
3108 case 8:
3109 return s.constInt64(n.Type(), i)
3110 default:
3111 s.Fatalf("bad integer size %d", n.Type().Size())
3112 return nil
3113 }
3114 case constant.String:
3115 i := constant.StringVal(u)
3116 if i == "" {
3117 return s.constEmptyString(n.Type())
3118 }
3119 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
3120 case constant.Bool:
3121 return s.constBool(constant.BoolVal(u))
3122 case constant.Float:
3123 f, _ := constant.Float64Val(u)
3124 switch n.Type().Size() {
3125 case 4:
3126 return s.constFloat32(n.Type(), f)
3127 case 8:
3128 return s.constFloat64(n.Type(), f)
3129 default:
3130 s.Fatalf("bad float size %d", n.Type().Size())
3131 return nil
3132 }
3133 case constant.Complex:
3134 re, _ := constant.Float64Val(constant.Real(u))
3135 im, _ := constant.Float64Val(constant.Imag(u))
3136 switch n.Type().Size() {
3137 case 8:
3138 pt := types.Types[types.TFLOAT32]
3139 return s.newValue2(ssa.OpComplexMake, n.Type(),
3140 s.constFloat32(pt, re),
3141 s.constFloat32(pt, im))
3142 case 16:
3143 pt := types.Types[types.TFLOAT64]
3144 return s.newValue2(ssa.OpComplexMake, n.Type(),
3145 s.constFloat64(pt, re),
3146 s.constFloat64(pt, im))
3147 default:
3148 s.Fatalf("bad complex size %d", n.Type().Size())
3149 return nil
3150 }
3151 default:
3152 s.Fatalf("unhandled OLITERAL %v", u.Kind())
3153 return nil
3154 }
3155 case ir.OCONVNOP:
3156 n := n.(*ir.ConvExpr)
3157 to := n.Type()
3158 from := n.X.Type()
3159
3160
3161
3162 x := s.expr(n.X)
3163 if to == from {
3164 return x
3165 }
3166
3167
3168
3169
3170
3171 if to.IsPtrShaped() != from.IsPtrShaped() {
3172 return s.newValue2(ssa.OpConvert, to, x, s.mem())
3173 }
3174
3175 v := s.newValue1(ssa.OpCopy, to, x)
3176
3177
3178 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
3179 return v
3180 }
3181
3182
3183 if from.Kind() == to.Kind() {
3184 return v
3185 }
3186
3187
3188 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
3189 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
3190 s.checkPtrAlignment(n, v, nil)
3191 }
3192 return v
3193 }
3194
3195
3196 mt := types.NewPtr(reflectdata.MapType())
3197 if to.Kind() == types.TMAP && from == mt {
3198 return v
3199 }
3200
3201 types.CalcSize(from)
3202 types.CalcSize(to)
3203 if from.Size() != to.Size() {
3204 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
3205 return nil
3206 }
3207 if etypesign(from.Kind()) != etypesign(to.Kind()) {
3208 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
3209 return nil
3210 }
3211
3212 if base.Flag.Cfg.Instrumenting {
3213
3214
3215
3216 return v
3217 }
3218
3219 if etypesign(from.Kind()) == 0 {
3220 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
3221 return nil
3222 }
3223
3224
3225 return v
3226
3227 case ir.OCONV:
3228 n := n.(*ir.ConvExpr)
3229 x := s.expr(n.X)
3230 return s.conv(n, x, n.X.Type(), n.Type())
3231
3232 case ir.ODOTTYPE:
3233 n := n.(*ir.TypeAssertExpr)
3234 res, _ := s.dottype(n, false)
3235 return res
3236
3237 case ir.ODYNAMICDOTTYPE:
3238 n := n.(*ir.DynamicTypeAssertExpr)
3239 res, _ := s.dynamicDottype(n, false)
3240 return res
3241
3242
3243 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3244 n := n.(*ir.BinaryExpr)
3245 a := s.expr(n.X)
3246 b := s.expr(n.Y)
3247 if n.X.Type().IsComplex() {
3248 pt := types.FloatForComplex(n.X.Type())
3249 op := s.ssaOp(ir.OEQ, pt)
3250 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3251 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3252 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3253 switch n.Op() {
3254 case ir.OEQ:
3255 return c
3256 case ir.ONE:
3257 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3258 default:
3259 s.Fatalf("ordered complex compare %v", n.Op())
3260 }
3261 }
3262
3263
3264 op := n.Op()
3265 switch op {
3266 case ir.OGE:
3267 op, a, b = ir.OLE, b, a
3268 case ir.OGT:
3269 op, a, b = ir.OLT, b, a
3270 }
3271 if n.X.Type().IsFloat() {
3272
3273 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3274 }
3275
3276 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3277 case ir.OMUL:
3278 n := n.(*ir.BinaryExpr)
3279 a := s.expr(n.X)
3280 b := s.expr(n.Y)
3281 if n.Type().IsComplex() {
3282 mulop := ssa.OpMul64F
3283 addop := ssa.OpAdd64F
3284 subop := ssa.OpSub64F
3285 pt := types.FloatForComplex(n.Type())
3286 wt := types.Types[types.TFLOAT64]
3287
3288 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3289 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3290 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3291 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3292
3293 if pt != wt {
3294 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3295 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3296 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3297 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3298 }
3299
3300 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3301 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3302
3303 if pt != wt {
3304 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3305 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3306 }
3307
3308 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3309 }
3310
3311 if n.Type().IsFloat() {
3312 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3313 }
3314
3315 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3316
3317 case ir.ODIV:
3318 n := n.(*ir.BinaryExpr)
3319 a := s.expr(n.X)
3320 b := s.expr(n.Y)
3321 if n.Type().IsComplex() {
3322
3323
3324
3325 mulop := ssa.OpMul64F
3326 addop := ssa.OpAdd64F
3327 subop := ssa.OpSub64F
3328 divop := ssa.OpDiv64F
3329 pt := types.FloatForComplex(n.Type())
3330 wt := types.Types[types.TFLOAT64]
3331
3332 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3333 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3334 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3335 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3336
3337 if pt != wt {
3338 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3339 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3340 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3341 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3342 }
3343
3344 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3345 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3346 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3347
3348
3349
3350
3351
3352 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3353 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3354
3355 if pt != wt {
3356 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3357 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3358 }
3359 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3360 }
3361 if n.Type().IsFloat() {
3362 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3363 }
3364 return s.intDivide(n, a, b)
3365 case ir.OMOD:
3366 n := n.(*ir.BinaryExpr)
3367 a := s.expr(n.X)
3368 b := s.expr(n.Y)
3369 return s.intDivide(n, a, b)
3370 case ir.OADD, ir.OSUB:
3371 n := n.(*ir.BinaryExpr)
3372 a := s.expr(n.X)
3373 b := s.expr(n.Y)
3374 if n.Type().IsComplex() {
3375 pt := types.FloatForComplex(n.Type())
3376 op := s.ssaOp(n.Op(), pt)
3377 return s.newValue2(ssa.OpComplexMake, n.Type(),
3378 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3379 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3380 }
3381 if n.Type().IsFloat() {
3382 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3383 }
3384 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3385 case ir.OAND, ir.OOR, ir.OXOR:
3386 n := n.(*ir.BinaryExpr)
3387 a := s.expr(n.X)
3388 b := s.expr(n.Y)
3389 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3390 case ir.OANDNOT:
3391 n := n.(*ir.BinaryExpr)
3392 a := s.expr(n.X)
3393 b := s.expr(n.Y)
3394 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3395 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3396 case ir.OLSH, ir.ORSH:
3397 n := n.(*ir.BinaryExpr)
3398 a := s.expr(n.X)
3399 b := s.expr(n.Y)
3400 bt := b.Type
3401 if bt.IsSigned() {
3402 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3403 s.check(cmp, ir.Syms.Panicshift)
3404 bt = bt.ToUnsigned()
3405 }
3406 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3407 case ir.OANDAND, ir.OOROR:
3408
3409
3410
3411
3412
3413
3414
3415
3416
3417
3418
3419
3420
3421 n := n.(*ir.LogicalExpr)
3422 el := s.expr(n.X)
3423 s.vars[n] = el
3424
3425 b := s.endBlock()
3426 b.Kind = ssa.BlockIf
3427 b.SetControl(el)
3428
3429
3430
3431
3432
3433 bRight := s.f.NewBlock(ssa.BlockPlain)
3434 bResult := s.f.NewBlock(ssa.BlockPlain)
3435 if n.Op() == ir.OANDAND {
3436 b.AddEdgeTo(bRight)
3437 b.AddEdgeTo(bResult)
3438 } else if n.Op() == ir.OOROR {
3439 b.AddEdgeTo(bResult)
3440 b.AddEdgeTo(bRight)
3441 }
3442
3443 s.startBlock(bRight)
3444 er := s.expr(n.Y)
3445 s.vars[n] = er
3446
3447 b = s.endBlock()
3448 b.AddEdgeTo(bResult)
3449
3450 s.startBlock(bResult)
3451 return s.variable(n, types.Types[types.TBOOL])
3452 case ir.OCOMPLEX:
3453 n := n.(*ir.BinaryExpr)
3454 r := s.expr(n.X)
3455 i := s.expr(n.Y)
3456 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3457
3458
3459 case ir.ONEG:
3460 n := n.(*ir.UnaryExpr)
3461 a := s.expr(n.X)
3462 if n.Type().IsComplex() {
3463 tp := types.FloatForComplex(n.Type())
3464 negop := s.ssaOp(n.Op(), tp)
3465 return s.newValue2(ssa.OpComplexMake, n.Type(),
3466 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3467 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3468 }
3469 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3470 case ir.ONOT, ir.OBITNOT:
3471 n := n.(*ir.UnaryExpr)
3472 a := s.expr(n.X)
3473 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3474 case ir.OIMAG, ir.OREAL:
3475 n := n.(*ir.UnaryExpr)
3476 a := s.expr(n.X)
3477 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3478 case ir.OPLUS:
3479 n := n.(*ir.UnaryExpr)
3480 return s.expr(n.X)
3481
3482 case ir.OADDR:
3483 n := n.(*ir.AddrExpr)
3484 return s.addr(n.X)
3485
3486 case ir.ORESULT:
3487 n := n.(*ir.ResultExpr)
3488 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3489 panic("Expected to see a previous call")
3490 }
3491 which := n.Index
3492 if which == -1 {
3493 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3494 }
3495 return s.resultOfCall(s.prevCall, which, n.Type())
3496
3497 case ir.ODEREF:
3498 n := n.(*ir.StarExpr)
3499 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3500 return s.load(n.Type(), p)
3501
3502 case ir.ODOT:
3503 n := n.(*ir.SelectorExpr)
3504 if n.X.Op() == ir.OSTRUCTLIT {
3505
3506
3507
3508 if !ir.IsZero(n.X) {
3509 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3510 }
3511 return s.zeroVal(n.Type())
3512 }
3513
3514
3515
3516
3517 if ir.IsAddressable(n) && !s.canSSA(n) {
3518 p := s.addr(n)
3519 return s.load(n.Type(), p)
3520 }
3521 v := s.expr(n.X)
3522 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3523
3524 case ir.ODOTPTR:
3525 n := n.(*ir.SelectorExpr)
3526 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3527 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3528 return s.load(n.Type(), p)
3529
3530 case ir.OINDEX:
3531 n := n.(*ir.IndexExpr)
3532 switch {
3533 case n.X.Type().IsString():
3534 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3535
3536
3537
3538 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3539 }
3540 a := s.expr(n.X)
3541 i := s.expr(n.Index)
3542 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3543 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3544 ptrtyp := s.f.Config.Types.BytePtr
3545 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3546 if ir.IsConst(n.Index, constant.Int) {
3547 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3548 } else {
3549 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3550 }
3551 return s.load(types.Types[types.TUINT8], ptr)
3552 case n.X.Type().IsSlice():
3553 p := s.addr(n)
3554 return s.load(n.X.Type().Elem(), p)
3555 case n.X.Type().IsArray():
3556 if ssa.CanSSA(n.X.Type()) {
3557
3558 bound := n.X.Type().NumElem()
3559 a := s.expr(n.X)
3560 i := s.expr(n.Index)
3561 if bound == 0 {
3562
3563
3564 z := s.constInt(types.Types[types.TINT], 0)
3565 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3566
3567
3568 return s.load(n.Type(), s.constNil(n.Type().PtrTo()))
3569 }
3570 len := s.constInt(types.Types[types.TINT], bound)
3571 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3572 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3573 }
3574 p := s.addr(n)
3575 return s.load(n.X.Type().Elem(), p)
3576 default:
3577 s.Fatalf("bad type for index %v", n.X.Type())
3578 return nil
3579 }
3580
3581 case ir.OLEN, ir.OCAP:
3582 n := n.(*ir.UnaryExpr)
3583
3584
3585 a := s.expr(n.X)
3586 t := n.X.Type()
3587 switch {
3588 case t.IsSlice():
3589 op := ssa.OpSliceLen
3590 if n.Op() == ir.OCAP {
3591 op = ssa.OpSliceCap
3592 }
3593 return s.newValue1(op, types.Types[types.TINT], a)
3594 case t.IsString():
3595 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3596 case t.IsMap(), t.IsChan():
3597 return s.referenceTypeBuiltin(n, a)
3598 case t.IsArray():
3599 return s.constInt(types.Types[types.TINT], t.NumElem())
3600 case t.IsPtr() && t.Elem().IsArray():
3601 return s.constInt(types.Types[types.TINT], t.Elem().NumElem())
3602 default:
3603 s.Fatalf("bad type in len/cap: %v", t)
3604 return nil
3605 }
3606
3607 case ir.OSPTR:
3608 n := n.(*ir.UnaryExpr)
3609 a := s.expr(n.X)
3610 if n.X.Type().IsSlice() {
3611 if n.Bounded() {
3612 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3613 }
3614 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3615 } else {
3616 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3617 }
3618
3619 case ir.OITAB:
3620 n := n.(*ir.UnaryExpr)
3621 a := s.expr(n.X)
3622 return s.newValue1(ssa.OpITab, n.Type(), a)
3623
3624 case ir.OIDATA:
3625 n := n.(*ir.UnaryExpr)
3626 a := s.expr(n.X)
3627 return s.newValue1(ssa.OpIData, n.Type(), a)
3628
3629 case ir.OMAKEFACE:
3630 n := n.(*ir.BinaryExpr)
3631 tab := s.expr(n.X)
3632 data := s.expr(n.Y)
3633 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3634
3635 case ir.OSLICEHEADER:
3636 n := n.(*ir.SliceHeaderExpr)
3637 p := s.expr(n.Ptr)
3638 l := s.expr(n.Len)
3639 c := s.expr(n.Cap)
3640 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3641
3642 case ir.OSTRINGHEADER:
3643 n := n.(*ir.StringHeaderExpr)
3644 p := s.expr(n.Ptr)
3645 l := s.expr(n.Len)
3646 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3647
3648 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3649 n := n.(*ir.SliceExpr)
3650 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3651 v := s.exprCheckPtr(n.X, !check)
3652 var i, j, k *ssa.Value
3653 if n.Low != nil {
3654 i = s.expr(n.Low)
3655 }
3656 if n.High != nil {
3657 j = s.expr(n.High)
3658 }
3659 if n.Max != nil {
3660 k = s.expr(n.Max)
3661 }
3662 p, l, c := s.slice(v, i, j, k, n.Bounded())
3663 if check {
3664
3665 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3666 }
3667 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3668
3669 case ir.OSLICESTR:
3670 n := n.(*ir.SliceExpr)
3671 v := s.expr(n.X)
3672 var i, j *ssa.Value
3673 if n.Low != nil {
3674 i = s.expr(n.Low)
3675 }
3676 if n.High != nil {
3677 j = s.expr(n.High)
3678 }
3679 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3680 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3681
3682 case ir.OSLICE2ARRPTR:
3683
3684
3685
3686
3687 n := n.(*ir.ConvExpr)
3688 v := s.expr(n.X)
3689 nelem := n.Type().Elem().NumElem()
3690 arrlen := s.constInt(types.Types[types.TINT], nelem)
3691 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3692 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3693 op := ssa.OpSlicePtr
3694 if nelem == 0 {
3695 op = ssa.OpSlicePtrUnchecked
3696 }
3697 return s.newValue1(op, n.Type(), v)
3698
3699 case ir.OCALLFUNC:
3700 n := n.(*ir.CallExpr)
3701 if ir.IsIntrinsicCall(n) {
3702 return s.intrinsicCall(n)
3703 }
3704 fallthrough
3705
3706 case ir.OCALLINTER:
3707 n := n.(*ir.CallExpr)
3708 return s.callResult(n, callNormal)
3709
3710 case ir.OGETG:
3711 n := n.(*ir.CallExpr)
3712 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3713
3714 case ir.OGETCALLERSP:
3715 n := n.(*ir.CallExpr)
3716 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3717
3718 case ir.OAPPEND:
3719 return s.append(n.(*ir.CallExpr), false)
3720
3721 case ir.OMOVE2HEAP:
3722 return s.move2heap(n.(*ir.MoveToHeapExpr))
3723
3724 case ir.OMIN, ir.OMAX:
3725 return s.minMax(n.(*ir.CallExpr))
3726
3727 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3728
3729
3730
3731 n := n.(*ir.CompLitExpr)
3732 if !ir.IsZero(n) {
3733 s.Fatalf("literal with nonzero value in SSA: %v", n)
3734 }
3735 return s.zeroVal(n.Type())
3736
3737 case ir.ONEW:
3738 n := n.(*ir.UnaryExpr)
3739 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3740 return s.newObjectNonSpecialized(n.Type().Elem(), s.expr(x.RType))
3741 }
3742 return s.newObject(n.Type().Elem())
3743
3744 case ir.OUNSAFEADD:
3745 n := n.(*ir.BinaryExpr)
3746 ptr := s.expr(n.X)
3747 len := s.expr(n.Y)
3748
3749
3750
3751 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3752
3753 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3754
3755 default:
3756 s.Fatalf("unhandled expr %v", n.Op())
3757 return nil
3758 }
3759 }
3760
3761 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3762 aux := c.Aux.(*ssa.AuxCall)
3763 pa := aux.ParamAssignmentForResult(which)
3764
3765
3766 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3767 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3768 return s.rawLoad(t, addr)
3769 }
3770 return s.newValue1I(ssa.OpSelectN, t, which, c)
3771 }
3772
3773 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3774 aux := c.Aux.(*ssa.AuxCall)
3775 pa := aux.ParamAssignmentForResult(which)
3776 if len(pa.Registers) == 0 {
3777 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3778 }
3779 _, addr := s.temp(c.Pos, t)
3780 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3781 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3782 return addr
3783 }
3784
3785
3786 func (s *state) getBackingStoreInfoForAppend(n *ir.CallExpr) *backingStoreInfo {
3787 if n.Esc() != ir.EscNone {
3788 return nil
3789 }
3790 return s.getBackingStoreInfo(n.Args[0])
3791 }
3792 func (s *state) getBackingStoreInfo(n ir.Node) *backingStoreInfo {
3793 t := n.Type()
3794 et := t.Elem()
3795 maxStackSize := int64(base.Debug.VariableMakeThreshold)
3796 if et.Size() == 0 || et.Size() > maxStackSize {
3797 return nil
3798 }
3799 if base.Flag.N != 0 {
3800 return nil
3801 }
3802 if !base.VariableMakeHash.MatchPos(n.Pos(), nil) {
3803 return nil
3804 }
3805 i := s.backingStores[n]
3806 if i != nil {
3807 return i
3808 }
3809
3810
3811 K := maxStackSize / et.Size()
3812 KT := types.NewArray(et, K)
3813 KT.SetNoalg(true)
3814 types.CalcArraySize(KT)
3815
3816 align := types.NewArray(types.Types[types.TUINTPTR], 0)
3817 types.CalcArraySize(align)
3818 storeTyp := types.NewStruct([]*types.Field{
3819 {Sym: types.BlankSym, Type: align},
3820 {Sym: types.BlankSym, Type: KT},
3821 })
3822 storeTyp.SetNoalg(true)
3823 types.CalcStructSize(storeTyp)
3824
3825
3826 backingStore := typecheck.TempAt(n.Pos(), s.curfn, storeTyp)
3827 backingStore.SetAddrtaken(true)
3828
3829
3830 used := typecheck.TempAt(n.Pos(), s.curfn, types.Types[types.TBOOL])
3831 if s.curBlock == s.f.Entry {
3832 s.vars[used] = s.constBool(false)
3833 } else {
3834
3835 s.defvars[s.f.Entry.ID][used] = s.constBool(false)
3836 }
3837
3838
3839 if s.backingStores == nil {
3840 s.backingStores = map[ir.Node]*backingStoreInfo{}
3841 }
3842 i = &backingStoreInfo{K: K, store: backingStore, used: used, usedStatic: false}
3843 s.backingStores[n] = i
3844 return i
3845 }
3846
3847
3848
3849
3850
3851
3852
3853
3854
3855 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3856
3857
3858
3859
3860
3861
3862
3863
3864
3865
3866
3867
3868
3869
3870
3871
3872
3873
3874
3875
3876
3877
3878
3879
3880
3881
3882
3883
3884
3885
3886
3887
3888 et := n.Type().Elem()
3889 pt := types.NewPtr(et)
3890
3891
3892 sn := n.Args[0]
3893 var slice, addr *ssa.Value
3894 if inplace {
3895 addr = s.addr(sn)
3896 slice = s.load(n.Type(), addr)
3897 } else {
3898 slice = s.expr(sn)
3899 }
3900
3901
3902 grow := s.f.NewBlock(ssa.BlockPlain)
3903 assign := s.f.NewBlock(ssa.BlockPlain)
3904
3905
3906 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3907 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3908 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3909
3910
3911 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3912 oldLen := l
3913 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3914
3915
3916 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3917
3918
3919 s.vars[ptrVar] = p
3920 s.vars[lenVar] = l
3921 if !inplace {
3922 s.vars[capVar] = c
3923 }
3924
3925 b := s.endBlock()
3926 b.Kind = ssa.BlockIf
3927 b.Likely = ssa.BranchUnlikely
3928 b.SetControl(cmp)
3929 b.AddEdgeTo(grow)
3930 b.AddEdgeTo(assign)
3931
3932
3933
3934
3935
3936
3937
3938
3939
3940
3941
3942
3943
3944
3945
3946
3947
3948
3949
3950
3951
3952
3953
3954 var info *backingStoreInfo
3955 if !inplace {
3956 info = s.getBackingStoreInfoForAppend(n)
3957 }
3958
3959 if !inplace && info != nil && !n.UseBuf && !info.usedStatic {
3960
3961
3962
3963
3964
3965
3966
3967
3968
3969
3970
3971
3972
3973
3974
3975
3976
3977
3978
3979
3980
3981
3982
3983 info.usedStatic = true
3984
3985
3986 usedTestBlock := s.f.NewBlock(ssa.BlockPlain)
3987 oldLenTestBlock := s.f.NewBlock(ssa.BlockPlain)
3988 bodyBlock := s.f.NewBlock(ssa.BlockPlain)
3989 growSlice := s.f.NewBlock(ssa.BlockPlain)
3990 tInt := types.Types[types.TINT]
3991 tBool := types.Types[types.TBOOL]
3992
3993
3994 s.startBlock(grow)
3995 kTest := s.newValue2(s.ssaOp(ir.OLE, tInt), tBool, l, s.constInt(tInt, info.K))
3996 b := s.endBlock()
3997 b.Kind = ssa.BlockIf
3998 b.SetControl(kTest)
3999 b.AddEdgeTo(usedTestBlock)
4000 b.AddEdgeTo(growSlice)
4001 b.Likely = ssa.BranchLikely
4002
4003
4004 s.startBlock(usedTestBlock)
4005 usedTest := s.newValue1(ssa.OpNot, tBool, s.expr(info.used))
4006 b = s.endBlock()
4007 b.Kind = ssa.BlockIf
4008 b.SetControl(usedTest)
4009 b.AddEdgeTo(oldLenTestBlock)
4010 b.AddEdgeTo(growSlice)
4011 b.Likely = ssa.BranchLikely
4012
4013
4014 s.startBlock(oldLenTestBlock)
4015 oldLenTest := s.newValue2(s.ssaOp(ir.OEQ, tInt), tBool, oldLen, s.constInt(tInt, 0))
4016 b = s.endBlock()
4017 b.Kind = ssa.BlockIf
4018 b.SetControl(oldLenTest)
4019 b.AddEdgeTo(bodyBlock)
4020 b.AddEdgeTo(growSlice)
4021 b.Likely = ssa.BranchLikely
4022
4023
4024 s.startBlock(bodyBlock)
4025 if et.HasPointers() {
4026 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, info.store, s.mem())
4027 }
4028 addr := s.addr(info.store)
4029 s.zero(info.store.Type(), addr)
4030
4031
4032 s.vars[ptrVar] = addr
4033 s.vars[lenVar] = l
4034 s.vars[capVar] = s.constInt(tInt, info.K)
4035
4036
4037 s.assign(info.used, s.constBool(true), false, 0)
4038 b = s.endBlock()
4039 b.AddEdgeTo(assign)
4040
4041
4042 grow = growSlice
4043 }
4044
4045
4046 s.startBlock(grow)
4047 taddr := s.expr(n.Fun)
4048 var r []*ssa.Value
4049 if info != nil && n.UseBuf {
4050
4051 if et.HasPointers() && !info.usedStatic {
4052
4053
4054
4055 mem := s.defvars[s.f.Entry.ID][memVar]
4056 mem = s.f.Entry.NewValue1A(n.Pos(), ssa.OpVarDef, types.TypeMem, info.store, mem)
4057 addr := s.f.Entry.NewValue2A(n.Pos(), ssa.OpLocalAddr, types.NewPtr(info.store.Type()), info.store, s.sp, mem)
4058 mem = s.f.Entry.NewValue2I(n.Pos(), ssa.OpZero, types.TypeMem, info.store.Type().Size(), addr, mem)
4059 mem.Aux = info.store.Type()
4060 s.defvars[s.f.Entry.ID][memVar] = mem
4061 info.usedStatic = true
4062 }
4063 fn := ir.Syms.GrowsliceBuf
4064 if goexperiment.RuntimeFreegc && n.AppendNoAlias && !et.HasPointers() {
4065
4066
4067
4068
4069 fn = ir.Syms.GrowsliceBufNoAlias
4070 }
4071 r = s.rtcall(fn, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr, s.addr(info.store), s.constInt(types.Types[types.TINT], info.K))
4072 } else {
4073 fn := ir.Syms.Growslice
4074 if goexperiment.RuntimeFreegc && n.AppendNoAlias && !et.HasPointers() {
4075
4076
4077
4078
4079 fn = ir.Syms.GrowsliceNoAlias
4080 }
4081 r = s.rtcall(fn, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
4082 }
4083
4084
4085 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
4086 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
4087 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
4088
4089 s.vars[ptrVar] = p
4090 s.vars[lenVar] = l
4091 s.vars[capVar] = c
4092 if inplace {
4093 if sn.Op() == ir.ONAME {
4094 sn := sn.(*ir.Name)
4095 if sn.Class != ir.PEXTERN {
4096
4097 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
4098 }
4099 }
4100 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
4101 s.store(types.Types[types.TINT], capaddr, c)
4102 s.store(pt, addr, p)
4103 }
4104
4105 b = s.endBlock()
4106 b.AddEdgeTo(assign)
4107
4108
4109 s.startBlock(assign)
4110 p = s.variable(ptrVar, pt)
4111 l = s.variable(lenVar, types.Types[types.TINT])
4112 if !inplace {
4113 c = s.variable(capVar, types.Types[types.TINT])
4114 }
4115
4116 if inplace {
4117
4118
4119 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
4120 s.store(types.Types[types.TINT], lenaddr, l)
4121 }
4122
4123
4124 type argRec struct {
4125
4126
4127 v *ssa.Value
4128 store bool
4129 }
4130 args := make([]argRec, 0, len(n.Args[1:]))
4131 for _, n := range n.Args[1:] {
4132 if ssa.CanSSA(n.Type()) {
4133 args = append(args, argRec{v: s.expr(n), store: true})
4134 } else {
4135 v := s.addr(n)
4136 args = append(args, argRec{v: v})
4137 }
4138 }
4139
4140
4141 oldLen = s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
4142 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
4143 for i, arg := range args {
4144 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
4145 if arg.store {
4146 s.storeType(et, addr, arg.v, 0, true)
4147 } else {
4148 s.move(et, addr, arg.v)
4149 }
4150 }
4151
4152
4153
4154
4155
4156 delete(s.vars, ptrVar)
4157 delete(s.vars, lenVar)
4158 if !inplace {
4159 delete(s.vars, capVar)
4160 }
4161
4162
4163 if inplace {
4164 return nil
4165 }
4166 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
4167 }
4168
4169 func (s *state) move2heap(n *ir.MoveToHeapExpr) *ssa.Value {
4170
4171
4172
4173
4174
4175
4176
4177
4178 slice := s.expr(n.Slice)
4179 et := slice.Type.Elem()
4180 pt := types.NewPtr(et)
4181
4182 info := s.getBackingStoreInfo(n)
4183 if info == nil {
4184
4185
4186 return slice
4187 }
4188
4189
4190 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
4191 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
4192 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
4193
4194 moveBlock := s.f.NewBlock(ssa.BlockPlain)
4195 mergeBlock := s.f.NewBlock(ssa.BlockPlain)
4196
4197 s.vars[ptrVar] = p
4198 s.vars[lenVar] = l
4199 s.vars[capVar] = c
4200
4201
4202
4203 sub := ssa.OpSub64
4204 less := ssa.OpLess64U
4205 if s.config.PtrSize == 4 {
4206 sub = ssa.OpSub32
4207 less = ssa.OpLess32U
4208 }
4209 callerSP := s.newValue1(ssa.OpGetCallerSP, types.Types[types.TUINTPTR], s.mem())
4210 frameSize := s.newValue2(sub, types.Types[types.TUINTPTR], callerSP, s.sp)
4211 pInt := s.newValue2(ssa.OpConvert, types.Types[types.TUINTPTR], p, s.mem())
4212 off := s.newValue2(sub, types.Types[types.TUINTPTR], pInt, s.sp)
4213 cond := s.newValue2(less, types.Types[types.TBOOL], off, frameSize)
4214
4215 b := s.endBlock()
4216 b.Kind = ssa.BlockIf
4217 b.Likely = ssa.BranchUnlikely
4218 b.SetControl(cond)
4219 b.AddEdgeTo(moveBlock)
4220 b.AddEdgeTo(mergeBlock)
4221
4222
4223 s.startBlock(moveBlock)
4224 var newSlice *ssa.Value
4225 if et.HasPointers() {
4226 typ := s.expr(n.RType)
4227 if n.PreserveCapacity {
4228 newSlice = s.rtcall(ir.Syms.MoveSlice, true, []*types.Type{slice.Type}, typ, p, l, c)[0]
4229 } else {
4230 newSlice = s.rtcall(ir.Syms.MoveSliceNoCap, true, []*types.Type{slice.Type}, typ, p, l)[0]
4231 }
4232 } else {
4233 elemSize := s.constInt(types.Types[types.TUINTPTR], et.Size())
4234 if n.PreserveCapacity {
4235 newSlice = s.rtcall(ir.Syms.MoveSliceNoScan, true, []*types.Type{slice.Type}, elemSize, p, l, c)[0]
4236 } else {
4237 newSlice = s.rtcall(ir.Syms.MoveSliceNoCapNoScan, true, []*types.Type{slice.Type}, elemSize, p, l)[0]
4238 }
4239 }
4240
4241 s.vars[ptrVar] = s.newValue1(ssa.OpSlicePtr, pt, newSlice)
4242 s.vars[lenVar] = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], newSlice)
4243 s.vars[capVar] = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], newSlice)
4244 b = s.endBlock()
4245 b.AddEdgeTo(mergeBlock)
4246
4247
4248 s.startBlock(mergeBlock)
4249 p = s.variable(ptrVar, pt)
4250 l = s.variable(lenVar, types.Types[types.TINT])
4251 c = s.variable(capVar, types.Types[types.TINT])
4252 delete(s.vars, ptrVar)
4253 delete(s.vars, lenVar)
4254 delete(s.vars, capVar)
4255 return s.newValue3(ssa.OpSliceMake, slice.Type, p, l, c)
4256 }
4257
4258
4259 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
4260
4261
4262
4263 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
4264 x := s.expr(n.Args[0])
4265 for _, arg := range n.Args[1:] {
4266 x = op(x, s.expr(arg))
4267 }
4268 return x
4269 }
4270
4271 typ := n.Type()
4272
4273 if typ.IsFloat() || typ.IsString() {
4274
4275
4276
4277
4278
4279
4280
4281
4282 if typ.IsFloat() {
4283 hasIntrinsic := false
4284 switch Arch.LinkArch.Family {
4285 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64, sys.S390X:
4286 hasIntrinsic = true
4287 case sys.PPC64:
4288 hasIntrinsic = buildcfg.GOPPC64 >= 9
4289 }
4290
4291 if hasIntrinsic {
4292 var op ssa.Op
4293 switch {
4294 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
4295 op = ssa.OpMin64F
4296 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
4297 op = ssa.OpMax64F
4298 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
4299 op = ssa.OpMin32F
4300 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
4301 op = ssa.OpMax32F
4302 }
4303 return fold(func(x, a *ssa.Value) *ssa.Value {
4304 return s.newValue2(op, typ, x, a)
4305 })
4306 }
4307 }
4308 var name string
4309 switch typ.Kind() {
4310 case types.TFLOAT32:
4311 switch n.Op() {
4312 case ir.OMIN:
4313 name = "fmin32"
4314 case ir.OMAX:
4315 name = "fmax32"
4316 }
4317 case types.TFLOAT64:
4318 switch n.Op() {
4319 case ir.OMIN:
4320 name = "fmin64"
4321 case ir.OMAX:
4322 name = "fmax64"
4323 }
4324 case types.TSTRING:
4325 switch n.Op() {
4326 case ir.OMIN:
4327 name = "strmin"
4328 case ir.OMAX:
4329 name = "strmax"
4330 }
4331 }
4332 fn := typecheck.LookupRuntimeFunc(name)
4333
4334 return fold(func(x, a *ssa.Value) *ssa.Value {
4335 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
4336 })
4337 }
4338
4339 if typ.IsInteger() {
4340 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
4341 var op ssa.Op
4342 switch {
4343 case typ.IsSigned() && n.Op() == ir.OMIN:
4344 op = ssa.OpMin64
4345 case typ.IsSigned() && n.Op() == ir.OMAX:
4346 op = ssa.OpMax64
4347 case typ.IsUnsigned() && n.Op() == ir.OMIN:
4348 op = ssa.OpMin64u
4349 case typ.IsUnsigned() && n.Op() == ir.OMAX:
4350 op = ssa.OpMax64u
4351 }
4352 return fold(func(x, a *ssa.Value) *ssa.Value {
4353 return s.newValue2(op, typ, x, a)
4354 })
4355 }
4356 }
4357
4358 lt := s.ssaOp(ir.OLT, typ)
4359
4360 return fold(func(x, a *ssa.Value) *ssa.Value {
4361 switch n.Op() {
4362 case ir.OMIN:
4363
4364 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
4365 case ir.OMAX:
4366
4367 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
4368 }
4369 panic("unreachable")
4370 })
4371 }
4372
4373
4374 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
4375
4376
4377 ternaryVar := ssaMarker("ternary")
4378
4379 bThen := s.f.NewBlock(ssa.BlockPlain)
4380 bElse := s.f.NewBlock(ssa.BlockPlain)
4381 bEnd := s.f.NewBlock(ssa.BlockPlain)
4382
4383 b := s.endBlock()
4384 b.Kind = ssa.BlockIf
4385 b.SetControl(cond)
4386 b.AddEdgeTo(bThen)
4387 b.AddEdgeTo(bElse)
4388
4389 s.startBlock(bThen)
4390 s.vars[ternaryVar] = x
4391 s.endBlock().AddEdgeTo(bEnd)
4392
4393 s.startBlock(bElse)
4394 s.vars[ternaryVar] = y
4395 s.endBlock().AddEdgeTo(bEnd)
4396
4397 s.startBlock(bEnd)
4398 r := s.variable(ternaryVar, x.Type)
4399 delete(s.vars, ternaryVar)
4400 return r
4401 }
4402
4403
4404
4405
4406
4407 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
4408 switch cond.Op() {
4409 case ir.OANDAND:
4410 cond := cond.(*ir.LogicalExpr)
4411 mid := s.f.NewBlock(ssa.BlockPlain)
4412 s.stmtList(cond.Init())
4413 s.condBranch(cond.X, mid, no, max(likely, 0))
4414 s.startBlock(mid)
4415 s.condBranch(cond.Y, yes, no, likely)
4416 return
4417
4418
4419
4420
4421
4422
4423 case ir.OOROR:
4424 cond := cond.(*ir.LogicalExpr)
4425 mid := s.f.NewBlock(ssa.BlockPlain)
4426 s.stmtList(cond.Init())
4427 s.condBranch(cond.X, yes, mid, min(likely, 0))
4428 s.startBlock(mid)
4429 s.condBranch(cond.Y, yes, no, likely)
4430 return
4431
4432
4433
4434 case ir.ONOT:
4435 cond := cond.(*ir.UnaryExpr)
4436 s.stmtList(cond.Init())
4437 s.condBranch(cond.X, no, yes, -likely)
4438 return
4439 case ir.OCONVNOP:
4440 cond := cond.(*ir.ConvExpr)
4441 s.stmtList(cond.Init())
4442 s.condBranch(cond.X, yes, no, likely)
4443 return
4444 }
4445 c := s.expr(cond)
4446 b := s.endBlock()
4447 b.Kind = ssa.BlockIf
4448 b.SetControl(c)
4449 b.Likely = ssa.BranchPrediction(likely)
4450 b.AddEdgeTo(yes)
4451 b.AddEdgeTo(no)
4452 }
4453
4454 type skipMask uint8
4455
4456 const (
4457 skipPtr skipMask = 1 << iota
4458 skipLen
4459 skipCap
4460 )
4461
4462
4463
4464
4465
4466
4467
4468 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
4469 s.assignWhichMayOverlap(left, right, deref, skip, false)
4470 }
4471 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
4472 if left.Op() == ir.ONAME && ir.IsBlank(left) {
4473 return
4474 }
4475 t := left.Type()
4476 types.CalcSize(t)
4477 if s.canSSA(left) {
4478 if deref {
4479 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
4480 }
4481 if left.Op() == ir.ODOT {
4482
4483
4484
4485
4486
4487
4488
4489
4490
4491
4492 left := left.(*ir.SelectorExpr)
4493 t := left.X.Type()
4494 nf := t.NumFields()
4495 idx := fieldIdx(left)
4496
4497
4498 old := s.expr(left.X)
4499
4500 if left.Type().Size() == 0 {
4501
4502 return
4503 }
4504
4505
4506 new := s.newValue0(ssa.OpStructMake, t)
4507
4508
4509 for i := 0; i < nf; i++ {
4510 if i == idx {
4511 new.AddArg(right)
4512 } else {
4513 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
4514 }
4515 }
4516
4517
4518 s.assign(left.X, new, false, 0)
4519
4520 return
4521 }
4522 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
4523 left := left.(*ir.IndexExpr)
4524 s.pushLine(left.Pos())
4525 defer s.popLine()
4526
4527
4528 t := left.X.Type()
4529 n := t.NumElem()
4530
4531 i := s.expr(left.Index)
4532 if n == 0 {
4533
4534
4535 z := s.constInt(types.Types[types.TINT], 0)
4536 s.boundsCheck(z, z, ssa.BoundsIndex, false)
4537 return
4538 }
4539 if n != 1 {
4540
4541
4542
4543
4544
4545
4546
4547
4548 return
4549 }
4550 if t.Size() == 0 {
4551 return
4552 }
4553
4554
4555 len := s.constInt(types.Types[types.TINT], 1)
4556 s.boundsCheck(i, len, ssa.BoundsIndex, false)
4557 v := s.newValue1(ssa.OpArrayMake1, t, right)
4558 s.assign(left.X, v, false, 0)
4559 return
4560 }
4561 left := left.(*ir.Name)
4562
4563 s.vars[left] = right
4564 s.addNamedValue(left, right)
4565 return
4566 }
4567
4568
4569
4570 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4571 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4572 }
4573
4574
4575 addr := s.addr(left)
4576 if ir.IsReflectHeaderDataField(left) {
4577
4578
4579
4580
4581
4582 t = types.Types[types.TUNSAFEPTR]
4583 }
4584 if deref {
4585
4586 if right == nil {
4587 s.zero(t, addr)
4588 } else {
4589 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4590 }
4591 return
4592 }
4593
4594 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4595 }
4596
4597
4598 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4599 if t.Size() == 0 {
4600 return s.entryNewValue0(ssa.OpEmpty, t)
4601 }
4602 switch {
4603 case t.IsInteger():
4604 switch t.Size() {
4605 case 1:
4606 return s.constInt8(t, 0)
4607 case 2:
4608 return s.constInt16(t, 0)
4609 case 4:
4610 return s.constInt32(t, 0)
4611 case 8:
4612 return s.constInt64(t, 0)
4613 default:
4614 s.Fatalf("bad sized integer type %v", t)
4615 }
4616 case t.IsFloat():
4617 switch t.Size() {
4618 case 4:
4619 return s.constFloat32(t, 0)
4620 case 8:
4621 return s.constFloat64(t, 0)
4622 default:
4623 s.Fatalf("bad sized float type %v", t)
4624 }
4625 case t.IsComplex():
4626 switch t.Size() {
4627 case 8:
4628 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4629 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4630 case 16:
4631 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4632 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4633 default:
4634 s.Fatalf("bad sized complex type %v", t)
4635 }
4636
4637 case t.IsString():
4638 return s.constEmptyString(t)
4639 case t.IsPtrShaped():
4640 return s.constNil(t)
4641 case t.IsBoolean():
4642 return s.constBool(false)
4643 case t.IsInterface():
4644 return s.constInterface(t)
4645 case t.IsSlice():
4646 return s.constSlice(t)
4647 case isStructNotSIMD(t):
4648 n := t.NumFields()
4649 v := s.entryNewValue0(ssa.OpStructMake, t)
4650 for i := 0; i < n; i++ {
4651 v.AddArg(s.zeroVal(t.FieldType(i)))
4652 }
4653 return v
4654 case t.IsArray() && t.NumElem() == 1:
4655 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4656 case t.IsSIMD():
4657 return s.newValue0(ssa.OpZeroSIMD, t)
4658 }
4659 s.Fatalf("zero for type %v not implemented", t)
4660 return nil
4661 }
4662
4663 type callKind int8
4664
4665 const (
4666 callNormal callKind = iota
4667 callDefer
4668 callDeferStack
4669 callGo
4670 callTail
4671 )
4672
4673 type sfRtCallDef struct {
4674 rtfn *obj.LSym
4675 rtype types.Kind
4676 }
4677
4678 var softFloatOps map[ssa.Op]sfRtCallDef
4679
4680 func softfloatInit() {
4681
4682 softFloatOps = map[ssa.Op]sfRtCallDef{
4683 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4684 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4685 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4686 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4687 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4688 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4689 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4690 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4691
4692 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4693 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4694 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4695 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4696 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4697 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4698 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4699 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4700
4701 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4702 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4703 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4704 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4705 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4706 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4707 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4708 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4709 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4710 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4711 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4712 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4713 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4714 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4715 }
4716 }
4717
4718
4719
4720 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4721 f2i := func(t *types.Type) *types.Type {
4722 switch t.Kind() {
4723 case types.TFLOAT32:
4724 return types.Types[types.TUINT32]
4725 case types.TFLOAT64:
4726 return types.Types[types.TUINT64]
4727 }
4728 return t
4729 }
4730
4731 if callDef, ok := softFloatOps[op]; ok {
4732 switch op {
4733 case ssa.OpLess32F,
4734 ssa.OpLess64F,
4735 ssa.OpLeq32F,
4736 ssa.OpLeq64F:
4737 args[0], args[1] = args[1], args[0]
4738 case ssa.OpSub32F,
4739 ssa.OpSub64F:
4740 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4741 }
4742
4743
4744
4745 for i, a := range args {
4746 if a.Type.IsFloat() {
4747 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4748 }
4749 }
4750
4751 rt := types.Types[callDef.rtype]
4752 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4753 if rt.IsFloat() {
4754 result = s.newValue1(ssa.OpCopy, rt, result)
4755 }
4756 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4757 result = s.newValue1(ssa.OpNot, result.Type, result)
4758 }
4759 return result, true
4760 }
4761 return nil, false
4762 }
4763
4764
4765 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4766 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4767 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4768 return p0, p1
4769 }
4770
4771
4772 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4773 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4774 if ssa.IntrinsicsDebug > 0 {
4775 x := v
4776 if x == nil {
4777 x = s.mem()
4778 }
4779 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4780 x = x.Args[0]
4781 }
4782 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4783 }
4784 return v
4785 }
4786
4787
4788 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4789 args := make([]*ssa.Value, len(n.Args))
4790 for i, n := range n.Args {
4791 args[i] = s.expr(n)
4792 }
4793 return args
4794 }
4795
4796
4797
4798
4799
4800
4801
4802 func (s *state) openDeferRecord(n *ir.CallExpr) {
4803 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4804 s.Fatalf("defer call with arguments or results: %v", n)
4805 }
4806
4807 opendefer := &openDeferInfo{
4808 n: n,
4809 }
4810 fn := n.Fun
4811
4812
4813
4814 closureVal := s.expr(fn)
4815 closure := s.openDeferSave(fn.Type(), closureVal)
4816 opendefer.closureNode = closure.Aux.(*ir.Name)
4817 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4818 opendefer.closure = closure
4819 }
4820 index := len(s.openDefers)
4821 s.openDefers = append(s.openDefers, opendefer)
4822
4823
4824
4825 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4826 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4827 s.vars[deferBitsVar] = newDeferBits
4828 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4829 }
4830
4831
4832
4833
4834
4835
4836 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4837 if !ssa.CanSSA(t) {
4838 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4839 }
4840 if !t.HasPointers() {
4841 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4842 }
4843 pos := val.Pos
4844 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4845 temp.SetOpenDeferSlot(true)
4846 temp.SetFrameOffset(int64(len(s.openDefers)))
4847 var addrTemp *ssa.Value
4848
4849
4850 if s.curBlock.ID != s.f.Entry.ID {
4851
4852
4853
4854 if t.HasPointers() {
4855 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4856 }
4857 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4858 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4859 } else {
4860
4861
4862
4863 if t.HasPointers() {
4864 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4865 }
4866 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4867 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4868 }
4869
4870
4871
4872
4873
4874 temp.SetNeedzero(true)
4875
4876
4877 s.store(t, addrTemp, val)
4878 return addrTemp
4879 }
4880
4881
4882
4883
4884
4885 func (s *state) openDeferExit() {
4886 deferExit := s.f.NewBlock(ssa.BlockPlain)
4887 s.endBlock().AddEdgeTo(deferExit)
4888 s.startBlock(deferExit)
4889 s.lastDeferExit = deferExit
4890 s.lastDeferCount = len(s.openDefers)
4891 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4892
4893 for i := len(s.openDefers) - 1; i >= 0; i-- {
4894 r := s.openDefers[i]
4895 bCond := s.f.NewBlock(ssa.BlockPlain)
4896 bEnd := s.f.NewBlock(ssa.BlockPlain)
4897
4898 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4899
4900
4901 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4902 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4903 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4904 b := s.endBlock()
4905 b.Kind = ssa.BlockIf
4906 b.SetControl(eqVal)
4907 b.AddEdgeTo(bEnd)
4908 b.AddEdgeTo(bCond)
4909 bCond.AddEdgeTo(bEnd)
4910 s.startBlock(bCond)
4911
4912
4913
4914 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4915 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4916 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4917
4918
4919 s.vars[deferBitsVar] = maskedval
4920
4921
4922
4923
4924 fn := r.n.Fun
4925 stksize := fn.Type().ArgWidth()
4926 var callArgs []*ssa.Value
4927 var call *ssa.Value
4928 if r.closure != nil {
4929 v := s.load(r.closure.Type.Elem(), r.closure)
4930 s.maybeNilCheckClosure(v, callDefer)
4931 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4932 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4933 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4934 } else {
4935 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4936 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4937 }
4938 callArgs = append(callArgs, s.mem())
4939 call.AddArgs(callArgs...)
4940 call.AuxInt = stksize
4941 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4942
4943
4944
4945
4946 if r.closureNode != nil {
4947 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4948 }
4949
4950 s.endBlock()
4951 s.startBlock(bEnd)
4952 }
4953 }
4954
4955 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4956 return s.call(n, k, false, nil)
4957 }
4958
4959 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4960 return s.call(n, k, true, nil)
4961 }
4962
4963
4964
4965 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4966 s.prevCall = nil
4967 var calleeLSym *obj.LSym
4968 var closure *ssa.Value
4969 var codeptr *ssa.Value
4970 var dextra *ssa.Value
4971 var rcvr *ssa.Value
4972 fn := n.Fun
4973 var ACArgs []*types.Type
4974 var ACResults []*types.Type
4975 var callArgs []*ssa.Value
4976
4977 callABI := s.f.ABIDefault
4978
4979 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4980 s.Fatalf("go/defer call with arguments: %v", n)
4981 }
4982
4983 isCallDeferRangeFunc := false
4984
4985 switch n.Op() {
4986 case ir.OCALLFUNC:
4987 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4988 fn := fn.(*ir.Name)
4989 calleeLSym = callTargetLSym(fn)
4990 if buildcfg.Experiment.RegabiArgs {
4991
4992
4993
4994
4995
4996 if fn.Func != nil {
4997 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4998 }
4999 } else {
5000
5001 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
5002 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
5003 if inRegistersImported || inRegistersSamePackage {
5004 callABI = s.f.ABI1
5005 }
5006 }
5007 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
5008 isCallDeferRangeFunc = true
5009 }
5010 break
5011 }
5012 closure = s.expr(fn)
5013 if k != callDefer && k != callDeferStack {
5014
5015
5016 s.maybeNilCheckClosure(closure, k)
5017 }
5018 case ir.OCALLINTER:
5019 if fn.Op() != ir.ODOTINTER {
5020 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
5021 }
5022 fn := fn.(*ir.SelectorExpr)
5023 var iclosure *ssa.Value
5024 iclosure, rcvr = s.getClosureAndRcvr(fn)
5025 if k == callNormal || k == callTail {
5026 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
5027 } else {
5028 closure = iclosure
5029 }
5030 }
5031 if deferExtra != nil {
5032 dextra = s.expr(deferExtra)
5033 }
5034
5035 params := callABI.ABIAnalyze(n.Fun.Type(), false )
5036 types.CalcSize(fn.Type())
5037 stksize := params.ArgWidth()
5038
5039 res := n.Fun.Type().Results()
5040 if k == callNormal || k == callTail {
5041 for _, p := range params.OutParams() {
5042 ACResults = append(ACResults, p.Type)
5043 }
5044 }
5045
5046 var call *ssa.Value
5047 if k == callDeferStack {
5048 if stksize != 0 {
5049 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
5050 }
5051
5052 t := deferstruct()
5053 n, addr := s.temp(n.Pos(), t)
5054 n.SetNonMergeable(true)
5055 s.store(closure.Type,
5056 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
5057 closure)
5058
5059
5060 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5061 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5062 callArgs = append(callArgs, addr, s.mem())
5063 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5064 call.AddArgs(callArgs...)
5065 call.AuxInt = int64(types.PtrSize)
5066 } else {
5067
5068
5069 argStart := base.Ctxt.Arch.FixedFrameSize
5070
5071 if k != callNormal && k != callTail {
5072
5073 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5074 callArgs = append(callArgs, closure)
5075 stksize += int64(types.PtrSize)
5076 argStart += int64(types.PtrSize)
5077 if dextra != nil {
5078
5079 ACArgs = append(ACArgs, types.Types[types.TINTER])
5080 callArgs = append(callArgs, dextra)
5081 stksize += 2 * int64(types.PtrSize)
5082 argStart += 2 * int64(types.PtrSize)
5083 }
5084 }
5085
5086
5087 if rcvr != nil {
5088 callArgs = append(callArgs, rcvr)
5089 }
5090
5091
5092 t := n.Fun.Type()
5093 args := n.Args
5094
5095 for _, p := range params.InParams() {
5096 ACArgs = append(ACArgs, p.Type)
5097 }
5098
5099
5100
5101
5102 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
5103 b := s.endBlock()
5104 b.Kind = ssa.BlockPlain
5105 curb := s.f.NewBlock(ssa.BlockPlain)
5106 b.AddEdgeTo(curb)
5107 s.startBlock(curb)
5108 }
5109
5110 for i, n := range args {
5111 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
5112 }
5113
5114 callArgs = append(callArgs, s.mem())
5115
5116
5117 switch {
5118 case k == callDefer:
5119 sym := ir.Syms.Deferproc
5120 if dextra != nil {
5121 sym = ir.Syms.Deferprocat
5122 }
5123 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5124 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5125 case k == callGo:
5126 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5127 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5128 case closure != nil:
5129
5130
5131
5132
5133
5134 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
5135 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
5136 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
5137 case codeptr != nil:
5138
5139 aux := ssa.InterfaceAuxCall(params)
5140 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
5141 if k == callTail {
5142 call.Op = ssa.OpTailLECallInter
5143 stksize = 0
5144 }
5145 case calleeLSym != nil:
5146 aux := ssa.StaticAuxCall(calleeLSym, params)
5147 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5148 if k == callTail {
5149 call.Op = ssa.OpTailLECall
5150 stksize = 0
5151 }
5152 default:
5153 s.Fatalf("bad call type %v %v", n.Op(), n)
5154 }
5155 call.AddArgs(callArgs...)
5156 call.AuxInt = stksize
5157 }
5158 s.prevCall = call
5159 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
5160
5161 for _, v := range n.KeepAlive {
5162 if !v.Addrtaken() {
5163 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
5164 }
5165 switch v.Class {
5166 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
5167 default:
5168 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
5169 }
5170 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
5171 }
5172
5173
5174 var result *ssa.Value
5175 if len(res) == 0 || k != callNormal {
5176 result = nil
5177 } else {
5178 fp := res[0]
5179 if returnResultAddr {
5180 result = s.resultAddrOfCall(call, 0, fp.Type)
5181 } else {
5182 result = s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
5183 }
5184 }
5185
5186
5187 if k == callDefer || k == callDeferStack || isCallDeferRangeFunc {
5188 b := s.endBlock()
5189 b.Kind = ssa.BlockDefer
5190 b.SetControl(call)
5191 bNext := s.f.NewBlock(ssa.BlockPlain)
5192 b.AddEdgeTo(bNext)
5193 r := s.f.DeferReturn
5194 if r == nil {
5195 r = s.f.NewBlock(ssa.BlockPlain)
5196 s.startBlock(r)
5197 s.exit()
5198 s.f.DeferReturn = r
5199 }
5200 b.AddEdgeTo(r)
5201 b.Likely = ssa.BranchLikely
5202 s.startBlock(bNext)
5203 }
5204
5205 return result
5206 }
5207
5208
5209
5210 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
5211 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
5212
5213
5214 s.nilCheck(closure)
5215 }
5216 }
5217
5218
5219
5220 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
5221 i := s.expr(fn.X)
5222 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
5223 s.nilCheck(itab)
5224 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
5225 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
5226 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
5227 return closure, rcvr
5228 }
5229
5230
5231
5232 func etypesign(e types.Kind) int8 {
5233 switch e {
5234 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
5235 return -1
5236 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
5237 return +1
5238 }
5239 return 0
5240 }
5241
5242
5243
5244 func (s *state) addr(n ir.Node) *ssa.Value {
5245 if n.Op() != ir.ONAME {
5246 s.pushLine(n.Pos())
5247 defer s.popLine()
5248 }
5249
5250 if s.canSSA(n) {
5251
5252
5253
5254
5255
5256
5257
5258
5259 return s.newValue1A(ssa.OpAddr, n.Type().PtrTo(), ir.Syms.Zerobase, s.sb)
5260 }
5261
5262 t := types.NewPtr(n.Type())
5263 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
5264 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
5265
5266 if offset != 0 {
5267 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
5268 }
5269 return v
5270 }
5271 switch n.Op() {
5272 case ir.OLINKSYMOFFSET:
5273 no := n.(*ir.LinksymOffsetExpr)
5274 return linksymOffset(no.Linksym, no.Offset_)
5275 case ir.ONAME:
5276 n := n.(*ir.Name)
5277 if n.Heapaddr != nil {
5278 return s.expr(n.Heapaddr)
5279 }
5280 switch n.Class {
5281 case ir.PEXTERN:
5282
5283 return linksymOffset(n.Linksym(), 0)
5284 case ir.PPARAM:
5285
5286 v := s.decladdrs[n]
5287 if v != nil {
5288 return v
5289 }
5290 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
5291 return nil
5292 case ir.PAUTO:
5293 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
5294
5295 case ir.PPARAMOUT:
5296
5297
5298 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
5299 default:
5300 s.Fatalf("variable address class %v not implemented", n.Class)
5301 return nil
5302 }
5303 case ir.ORESULT:
5304
5305 n := n.(*ir.ResultExpr)
5306 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
5307 case ir.OINDEX:
5308 n := n.(*ir.IndexExpr)
5309 if n.X.Type().IsSlice() {
5310 a := s.expr(n.X)
5311 i := s.expr(n.Index)
5312 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
5313 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5314 p := s.newValue1(ssa.OpSlicePtr, t, a)
5315 return s.newValue2(ssa.OpPtrIndex, t, p, i)
5316 } else {
5317 a := s.addr(n.X)
5318 i := s.expr(n.Index)
5319 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
5320 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5321 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5322 }
5323 case ir.ODEREF:
5324 n := n.(*ir.StarExpr)
5325 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5326 case ir.ODOT:
5327 n := n.(*ir.SelectorExpr)
5328 p := s.addr(n.X)
5329 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5330 case ir.ODOTPTR:
5331 n := n.(*ir.SelectorExpr)
5332 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5333 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5334 case ir.OCONVNOP:
5335 n := n.(*ir.ConvExpr)
5336 if n.Type() == n.X.Type() {
5337 return s.addr(n.X)
5338 }
5339 addr := s.addr(n.X)
5340 return s.newValue1(ssa.OpCopy, t, addr)
5341 case ir.OCALLFUNC, ir.OCALLINTER:
5342 n := n.(*ir.CallExpr)
5343 return s.callAddr(n, callNormal)
5344 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5345 var v *ssa.Value
5346 if n.Op() == ir.ODOTTYPE {
5347 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5348 } else {
5349 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5350 }
5351 if v.Op != ssa.OpLoad {
5352 s.Fatalf("dottype of non-load")
5353 }
5354 if v.Args[1] != s.mem() {
5355 s.Fatalf("memory no longer live from dottype load")
5356 }
5357 return v.Args[0]
5358 default:
5359 s.Fatalf("unhandled addr %v", n.Op())
5360 return nil
5361 }
5362 }
5363
5364
5365
5366 func (s *state) canSSA(n ir.Node) bool {
5367 if base.Flag.N != 0 {
5368 return false
5369 }
5370 for {
5371 nn := n
5372 if nn.Op() == ir.ODOT {
5373 nn := nn.(*ir.SelectorExpr)
5374 n = nn.X
5375 continue
5376 }
5377 if nn.Op() == ir.OINDEX {
5378 nn := nn.(*ir.IndexExpr)
5379 if nn.X.Type().IsArray() {
5380 n = nn.X
5381 continue
5382 }
5383 }
5384 break
5385 }
5386 if n.Op() != ir.ONAME {
5387 return false
5388 }
5389 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5390 }
5391
5392 func (s *state) canSSAName(name *ir.Name) bool {
5393 if name.Addrtaken() || !name.OnStack() {
5394 return false
5395 }
5396 switch name.Class {
5397 case ir.PPARAMOUT:
5398 if s.hasdefer {
5399
5400
5401
5402
5403
5404 return false
5405 }
5406 if s.cgoUnsafeArgs {
5407
5408
5409 return false
5410 }
5411 }
5412 return true
5413
5414 }
5415
5416
5417 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5418 p := s.expr(n)
5419 if bounded || n.NonNil() {
5420 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5421 s.f.Warnl(lineno, "removed nil check")
5422 }
5423 return p
5424 }
5425 p = s.nilCheck(p)
5426 return p
5427 }
5428
5429
5430
5431
5432
5433
5434 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5435 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5436 return ptr
5437 }
5438 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5439 }
5440
5441
5442
5443
5444
5445
5446
5447 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5448 idx = s.extendIndex(idx, len, kind, bounded)
5449
5450 if bounded || base.Flag.B != 0 {
5451
5452
5453
5454
5455
5456
5457
5458
5459
5460
5461
5462
5463
5464
5465
5466
5467
5468
5469
5470
5471 return idx
5472 }
5473
5474 bNext := s.f.NewBlock(ssa.BlockPlain)
5475 bPanic := s.f.NewBlock(ssa.BlockExit)
5476
5477 if !idx.Type.IsSigned() {
5478 switch kind {
5479 case ssa.BoundsIndex:
5480 kind = ssa.BoundsIndexU
5481 case ssa.BoundsSliceAlen:
5482 kind = ssa.BoundsSliceAlenU
5483 case ssa.BoundsSliceAcap:
5484 kind = ssa.BoundsSliceAcapU
5485 case ssa.BoundsSliceB:
5486 kind = ssa.BoundsSliceBU
5487 case ssa.BoundsSlice3Alen:
5488 kind = ssa.BoundsSlice3AlenU
5489 case ssa.BoundsSlice3Acap:
5490 kind = ssa.BoundsSlice3AcapU
5491 case ssa.BoundsSlice3B:
5492 kind = ssa.BoundsSlice3BU
5493 case ssa.BoundsSlice3C:
5494 kind = ssa.BoundsSlice3CU
5495 }
5496 }
5497
5498 var cmp *ssa.Value
5499 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5500 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5501 } else {
5502 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5503 }
5504 b := s.endBlock()
5505 b.Kind = ssa.BlockIf
5506 b.SetControl(cmp)
5507 b.Likely = ssa.BranchLikely
5508 b.AddEdgeTo(bNext)
5509 b.AddEdgeTo(bPanic)
5510
5511 s.startBlock(bPanic)
5512 if Arch.LinkArch.Family == sys.Wasm {
5513
5514
5515 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5516 } else {
5517 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5518 s.endBlock().SetControl(mem)
5519 }
5520 s.startBlock(bNext)
5521
5522
5523 if base.Flag.Cfg.SpectreIndex {
5524 op := ssa.OpSpectreIndex
5525 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5526 op = ssa.OpSpectreSliceIndex
5527 }
5528 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5529 }
5530
5531 return idx
5532 }
5533
5534
5535 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5536 b := s.endBlock()
5537 b.Kind = ssa.BlockIf
5538 b.SetControl(cmp)
5539 b.Likely = ssa.BranchLikely
5540 bNext := s.f.NewBlock(ssa.BlockPlain)
5541 line := s.peekPos()
5542 pos := base.Ctxt.PosTable.Pos(line)
5543 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5544 bPanic := s.panics[fl]
5545 if bPanic == nil {
5546 bPanic = s.f.NewBlock(ssa.BlockPlain)
5547 s.panics[fl] = bPanic
5548 s.startBlock(bPanic)
5549
5550
5551 s.rtcall(fn, false, nil)
5552 }
5553 b.AddEdgeTo(bNext)
5554 b.AddEdgeTo(bPanic)
5555 s.startBlock(bNext)
5556 }
5557
5558 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5559 needcheck := true
5560 switch b.Op {
5561 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5562 if b.AuxInt != 0 {
5563 needcheck = false
5564 }
5565 }
5566 if needcheck {
5567
5568 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5569 s.check(cmp, ir.Syms.Panicdivide)
5570 }
5571 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5572 }
5573
5574
5575
5576
5577
5578 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5579 s.prevCall = nil
5580
5581 off := base.Ctxt.Arch.FixedFrameSize
5582 var callArgs []*ssa.Value
5583 var callArgTypes []*types.Type
5584
5585 for _, arg := range args {
5586 t := arg.Type
5587 off = types.RoundUp(off, t.Alignment())
5588 size := t.Size()
5589 callArgs = append(callArgs, arg)
5590 callArgTypes = append(callArgTypes, t)
5591 off += size
5592 }
5593 off = types.RoundUp(off, int64(types.RegSize))
5594
5595
5596 var call *ssa.Value
5597 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5598 callArgs = append(callArgs, s.mem())
5599 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5600 call.AddArgs(callArgs...)
5601 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5602
5603 if !returns {
5604
5605 b := s.endBlock()
5606 b.Kind = ssa.BlockExit
5607 b.SetControl(call)
5608 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5609 if len(results) > 0 {
5610 s.Fatalf("panic call can't have results")
5611 }
5612 return nil
5613 }
5614
5615
5616 res := make([]*ssa.Value, len(results))
5617 for i, t := range results {
5618 off = types.RoundUp(off, t.Alignment())
5619 res[i] = s.resultOfCall(call, int64(i), t)
5620 off += t.Size()
5621 }
5622 off = types.RoundUp(off, int64(types.PtrSize))
5623
5624
5625 call.AuxInt = off
5626
5627 return res
5628 }
5629
5630
5631 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5632 s.instrument(t, left, instrumentWrite)
5633
5634 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5635
5636 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5637 return
5638 }
5639
5640
5641
5642
5643
5644
5645 s.storeTypeScalars(t, left, right, skip)
5646 if skip&skipPtr == 0 && t.HasPointers() {
5647 s.storeTypePtrs(t, left, right)
5648 }
5649 }
5650
5651
5652 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5653 switch {
5654 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex() || t.IsSIMD():
5655 s.store(t, left, right)
5656 case t.IsPtrShaped():
5657 if t.IsPtr() && t.Elem().NotInHeap() {
5658 s.store(t, left, right)
5659 }
5660
5661 case t.IsString():
5662 if skip&skipLen != 0 {
5663 return
5664 }
5665 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5666 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5667 s.store(types.Types[types.TINT], lenAddr, len)
5668 case t.IsSlice():
5669 if skip&skipLen == 0 {
5670 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5671 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5672 s.store(types.Types[types.TINT], lenAddr, len)
5673 }
5674 if skip&skipCap == 0 {
5675 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5676 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5677 s.store(types.Types[types.TINT], capAddr, cap)
5678 }
5679 case t.IsInterface():
5680
5681 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5682 s.store(types.Types[types.TUINTPTR], left, itab)
5683 case isStructNotSIMD(t):
5684 n := t.NumFields()
5685 for i := 0; i < n; i++ {
5686 ft := t.FieldType(i)
5687 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5688 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5689 s.storeTypeScalars(ft, addr, val, 0)
5690 }
5691 case t.IsArray() && t.Size() == 0:
5692
5693 case t.IsArray() && t.NumElem() == 1:
5694 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5695 default:
5696 s.Fatalf("bad write barrier type %v", t)
5697 }
5698 }
5699
5700
5701 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5702 switch {
5703 case t.IsPtrShaped():
5704 if t.IsPtr() && t.Elem().NotInHeap() {
5705 break
5706 }
5707 s.store(t, left, right)
5708 case t.IsString():
5709 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5710 s.store(s.f.Config.Types.BytePtr, left, ptr)
5711 case t.IsSlice():
5712 elType := types.NewPtr(t.Elem())
5713 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5714 s.store(elType, left, ptr)
5715 case t.IsInterface():
5716
5717 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5718 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5719 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5720 case isStructNotSIMD(t):
5721 n := t.NumFields()
5722 for i := 0; i < n; i++ {
5723 ft := t.FieldType(i)
5724 if !ft.HasPointers() {
5725 continue
5726 }
5727 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5728 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5729 s.storeTypePtrs(ft, addr, val)
5730 }
5731 case t.IsArray() && t.Size() == 0:
5732
5733 case t.IsArray() && t.NumElem() == 1:
5734 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5735 default:
5736 s.Fatalf("bad write barrier type %v", t)
5737 }
5738 }
5739
5740
5741 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5742 var a *ssa.Value
5743 if !ssa.CanSSA(t) {
5744 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5745 } else {
5746 a = s.expr(n)
5747 }
5748 return a
5749 }
5750
5751
5752
5753
5754 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5755 t := v.Type
5756 var ptr, len, cap *ssa.Value
5757 switch {
5758 case t.IsSlice():
5759 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5760 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5761 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5762 case t.IsString():
5763 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5764 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5765 cap = len
5766 case t.IsPtr():
5767 if !t.Elem().IsArray() {
5768 s.Fatalf("bad ptr to array in slice %v\n", t)
5769 }
5770 nv := s.nilCheck(v)
5771 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5772 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5773 cap = len
5774 default:
5775 s.Fatalf("bad type in slice %v\n", t)
5776 }
5777
5778
5779 if i == nil {
5780 i = s.constInt(types.Types[types.TINT], 0)
5781 }
5782 if j == nil {
5783 j = len
5784 }
5785 three := true
5786 if k == nil {
5787 three = false
5788 k = cap
5789 }
5790
5791
5792
5793
5794 if three {
5795 if k != cap {
5796 kind := ssa.BoundsSlice3Alen
5797 if t.IsSlice() {
5798 kind = ssa.BoundsSlice3Acap
5799 }
5800 k = s.boundsCheck(k, cap, kind, bounded)
5801 }
5802 if j != k {
5803 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5804 }
5805 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5806 } else {
5807 if j != k {
5808 kind := ssa.BoundsSliceAlen
5809 if t.IsSlice() {
5810 kind = ssa.BoundsSliceAcap
5811 }
5812 j = s.boundsCheck(j, k, kind, bounded)
5813 }
5814 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5815 }
5816
5817
5818 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5819 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5820 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5821
5822
5823
5824
5825
5826 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5827 rcap := rlen
5828 if j != k && !t.IsString() {
5829 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5830 }
5831
5832 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5833
5834 return ptr, rlen, rcap
5835 }
5836
5837
5838
5839
5840
5841
5842
5843
5844
5845
5846
5847
5848
5849
5850
5851 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5852
5853
5854 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5855
5856
5857
5858 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5859 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5860
5861
5862 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5863
5864 return rptr, rlen, rcap
5865 }
5866
5867 type u642fcvtTab struct {
5868 leq, cvt2F, and, rsh, or, add ssa.Op
5869 one func(*state, *types.Type, int64) *ssa.Value
5870 }
5871
5872 var u64_f64 = u642fcvtTab{
5873 leq: ssa.OpLeq64,
5874 cvt2F: ssa.OpCvt64to64F,
5875 and: ssa.OpAnd64,
5876 rsh: ssa.OpRsh64Ux64,
5877 or: ssa.OpOr64,
5878 add: ssa.OpAdd64F,
5879 one: (*state).constInt64,
5880 }
5881
5882 var u64_f32 = u642fcvtTab{
5883 leq: ssa.OpLeq64,
5884 cvt2F: ssa.OpCvt64to32F,
5885 and: ssa.OpAnd64,
5886 rsh: ssa.OpRsh64Ux64,
5887 or: ssa.OpOr64,
5888 add: ssa.OpAdd32F,
5889 one: (*state).constInt64,
5890 }
5891
5892 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5893 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5894 }
5895
5896 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5897 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5898 }
5899
5900 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5901
5902
5903
5904
5905
5906
5907
5908
5909
5910
5911
5912
5913
5914
5915
5916
5917
5918
5919
5920
5921
5922
5923
5924
5925
5926 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5927
5928 b := s.endBlock()
5929 b.Kind = ssa.BlockIf
5930 b.SetControl(cmp)
5931 b.Likely = ssa.BranchLikely
5932
5933 bThen := s.f.NewBlock(ssa.BlockPlain)
5934 bElse := s.f.NewBlock(ssa.BlockPlain)
5935 bAfter := s.f.NewBlock(ssa.BlockPlain)
5936
5937 b.AddEdgeTo(bThen)
5938 s.startBlock(bThen)
5939 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5940 s.vars[n] = a0
5941 s.endBlock()
5942 bThen.AddEdgeTo(bAfter)
5943
5944 b.AddEdgeTo(bElse)
5945 s.startBlock(bElse)
5946 one := cvttab.one(s, ft, 1)
5947 y := s.newValue2(cvttab.and, ft, x, one)
5948 z := s.newValue2(cvttab.rsh, ft, x, one)
5949 z = s.newValue2(cvttab.or, ft, z, y)
5950 a := s.newValue1(cvttab.cvt2F, tt, z)
5951 a1 := s.newValue2(cvttab.add, tt, a, a)
5952 s.vars[n] = a1
5953 s.endBlock()
5954 bElse.AddEdgeTo(bAfter)
5955
5956 s.startBlock(bAfter)
5957 return s.variable(n, n.Type())
5958 }
5959
5960 type u322fcvtTab struct {
5961 cvtI2F, cvtF2F ssa.Op
5962 }
5963
5964 var u32_f64 = u322fcvtTab{
5965 cvtI2F: ssa.OpCvt32to64F,
5966 cvtF2F: ssa.OpCopy,
5967 }
5968
5969 var u32_f32 = u322fcvtTab{
5970 cvtI2F: ssa.OpCvt32to32F,
5971 cvtF2F: ssa.OpCvt64Fto32F,
5972 }
5973
5974 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5975 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5976 }
5977
5978 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5979 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5980 }
5981
5982 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5983
5984
5985
5986
5987
5988 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5989 b := s.endBlock()
5990 b.Kind = ssa.BlockIf
5991 b.SetControl(cmp)
5992 b.Likely = ssa.BranchLikely
5993
5994 bThen := s.f.NewBlock(ssa.BlockPlain)
5995 bElse := s.f.NewBlock(ssa.BlockPlain)
5996 bAfter := s.f.NewBlock(ssa.BlockPlain)
5997
5998 b.AddEdgeTo(bThen)
5999 s.startBlock(bThen)
6000 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
6001 s.vars[n] = a0
6002 s.endBlock()
6003 bThen.AddEdgeTo(bAfter)
6004
6005 b.AddEdgeTo(bElse)
6006 s.startBlock(bElse)
6007 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
6008 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
6009 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
6010 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
6011
6012 s.vars[n] = a3
6013 s.endBlock()
6014 bElse.AddEdgeTo(bAfter)
6015
6016 s.startBlock(bAfter)
6017 return s.variable(n, n.Type())
6018 }
6019
6020
6021 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
6022 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
6023 s.Fatalf("node must be a map or a channel")
6024 }
6025 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
6026 s.Fatalf("cannot inline len(chan)")
6027 }
6028 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
6029 s.Fatalf("cannot inline cap(chan)")
6030 }
6031 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
6032 s.Fatalf("cannot inline cap(map)")
6033 }
6034
6035
6036
6037
6038
6039
6040
6041
6042 lenType := n.Type()
6043 nilValue := s.constNil(types.Types[types.TUINTPTR])
6044 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
6045 b := s.endBlock()
6046 b.Kind = ssa.BlockIf
6047 b.SetControl(cmp)
6048 b.Likely = ssa.BranchUnlikely
6049
6050 bThen := s.f.NewBlock(ssa.BlockPlain)
6051 bElse := s.f.NewBlock(ssa.BlockPlain)
6052 bAfter := s.f.NewBlock(ssa.BlockPlain)
6053
6054
6055 b.AddEdgeTo(bThen)
6056 s.startBlock(bThen)
6057 s.vars[n] = s.zeroVal(lenType)
6058 s.endBlock()
6059 bThen.AddEdgeTo(bAfter)
6060
6061 b.AddEdgeTo(bElse)
6062 s.startBlock(bElse)
6063 switch n.Op() {
6064 case ir.OLEN:
6065 if n.X.Type().IsMap() {
6066
6067 loadType := reflectdata.MapType().Field(0).Type
6068 load := s.load(loadType, x)
6069 s.vars[n] = s.conv(nil, load, loadType, lenType)
6070 } else {
6071
6072 s.vars[n] = s.load(lenType, x)
6073 }
6074 case ir.OCAP:
6075
6076 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
6077 s.vars[n] = s.load(lenType, sw)
6078 default:
6079 s.Fatalf("op must be OLEN or OCAP")
6080 }
6081 s.endBlock()
6082 bElse.AddEdgeTo(bAfter)
6083
6084 s.startBlock(bAfter)
6085 return s.variable(n, lenType)
6086 }
6087
6088 type f2uCvtTab struct {
6089 ltf, cvt2U, subf, or ssa.Op
6090 floatValue func(*state, *types.Type, float64) *ssa.Value
6091 intValue func(*state, *types.Type, int64) *ssa.Value
6092 cutoff uint64
6093 }
6094
6095 var f32_u64 = f2uCvtTab{
6096 ltf: ssa.OpLess32F,
6097 cvt2U: ssa.OpCvt32Fto64,
6098 subf: ssa.OpSub32F,
6099 or: ssa.OpOr64,
6100 floatValue: (*state).constFloat32,
6101 intValue: (*state).constInt64,
6102 cutoff: 1 << 63,
6103 }
6104
6105 var f64_u64 = f2uCvtTab{
6106 ltf: ssa.OpLess64F,
6107 cvt2U: ssa.OpCvt64Fto64,
6108 subf: ssa.OpSub64F,
6109 or: ssa.OpOr64,
6110 floatValue: (*state).constFloat64,
6111 intValue: (*state).constInt64,
6112 cutoff: 1 << 63,
6113 }
6114
6115 var f32_u32 = f2uCvtTab{
6116 ltf: ssa.OpLess32F,
6117 cvt2U: ssa.OpCvt32Fto32,
6118 subf: ssa.OpSub32F,
6119 or: ssa.OpOr32,
6120 floatValue: (*state).constFloat32,
6121 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6122 cutoff: 1 << 31,
6123 }
6124
6125 var f64_u32 = f2uCvtTab{
6126 ltf: ssa.OpLess64F,
6127 cvt2U: ssa.OpCvt64Fto32,
6128 subf: ssa.OpSub64F,
6129 or: ssa.OpOr32,
6130 floatValue: (*state).constFloat64,
6131 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6132 cutoff: 1 << 31,
6133 }
6134
6135 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6136 return s.floatToUint(&f32_u64, n, x, ft, tt)
6137 }
6138 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6139 return s.floatToUint(&f64_u64, n, x, ft, tt)
6140 }
6141
6142 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6143 return s.floatToUint(&f32_u32, n, x, ft, tt)
6144 }
6145
6146 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6147 return s.floatToUint(&f64_u32, n, x, ft, tt)
6148 }
6149
6150 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6151
6152
6153
6154
6155
6156
6157
6158
6159
6160
6161
6162
6163
6164 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
6165 cmp := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
6166 b := s.endBlock()
6167 b.Kind = ssa.BlockIf
6168 b.SetControl(cmp)
6169 b.Likely = ssa.BranchLikely
6170
6171 var bThen, bZero *ssa.Block
6172
6173 newConversion := base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil)
6174 if newConversion {
6175 bZero = s.f.NewBlock(ssa.BlockPlain)
6176 bThen = s.f.NewBlock(ssa.BlockIf)
6177 } else {
6178 bThen = s.f.NewBlock(ssa.BlockPlain)
6179 }
6180
6181 bElse := s.f.NewBlock(ssa.BlockPlain)
6182 bAfter := s.f.NewBlock(ssa.BlockPlain)
6183
6184 b.AddEdgeTo(bThen)
6185 s.startBlock(bThen)
6186 a0 := s.newValueOrSfCall1(cvttab.cvt2U, tt, x)
6187 s.vars[n] = a0
6188
6189 if newConversion {
6190 cmpz := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cvttab.floatValue(s, ft, 0.0))
6191 s.endBlock()
6192 bThen.SetControl(cmpz)
6193 bThen.AddEdgeTo(bZero)
6194 bThen.Likely = ssa.BranchUnlikely
6195 bThen.AddEdgeTo(bAfter)
6196
6197 s.startBlock(bZero)
6198 s.vars[n] = cvttab.intValue(s, tt, 0)
6199 s.endBlock()
6200 bZero.AddEdgeTo(bAfter)
6201 } else {
6202 s.endBlock()
6203 bThen.AddEdgeTo(bAfter)
6204 }
6205
6206 b.AddEdgeTo(bElse)
6207 s.startBlock(bElse)
6208 y := s.newValueOrSfCall2(cvttab.subf, ft, x, cutoff)
6209 y = s.newValueOrSfCall1(cvttab.cvt2U, tt, y)
6210 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
6211 a1 := s.newValue2(cvttab.or, tt, y, z)
6212 s.vars[n] = a1
6213 s.endBlock()
6214 bElse.AddEdgeTo(bAfter)
6215
6216 s.startBlock(bAfter)
6217 return s.variable(n, n.Type())
6218 }
6219
6220
6221
6222
6223 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6224 iface := s.expr(n.X)
6225 target := s.reflectType(n.Type())
6226 var targetItab *ssa.Value
6227 if n.ITab != nil {
6228 targetItab = s.expr(n.ITab)
6229 }
6230
6231 if n.UseNilPanic {
6232 if commaok {
6233 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && commaok == true")
6234 }
6235 if n.Type().IsInterface() {
6236
6237
6238 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && Type().IsInterface() == true")
6239 }
6240 typs := s.f.Config.Types
6241 iface = s.newValue2(
6242 ssa.OpIMake,
6243 iface.Type,
6244 s.nilCheck(s.newValue1(ssa.OpITab, typs.BytePtr, iface)),
6245 s.newValue1(ssa.OpIData, typs.BytePtr, iface),
6246 )
6247 }
6248
6249 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
6250 }
6251
6252 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6253 iface := s.expr(n.X)
6254 var source, target, targetItab *ssa.Value
6255 if n.SrcRType != nil {
6256 source = s.expr(n.SrcRType)
6257 }
6258 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
6259 byteptr := s.f.Config.Types.BytePtr
6260 targetItab = s.expr(n.ITab)
6261
6262
6263 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
6264 } else {
6265 target = s.expr(n.RType)
6266 }
6267 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
6268 }
6269
6270
6271
6272
6273
6274
6275
6276
6277
6278 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
6279 typs := s.f.Config.Types
6280 byteptr := typs.BytePtr
6281 if dst.IsInterface() {
6282 if dst.IsEmptyInterface() {
6283
6284
6285 if base.Debug.TypeAssert > 0 {
6286 base.WarnfAt(pos, "type assertion inlined")
6287 }
6288
6289
6290 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6291
6292 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6293
6294 if src.IsEmptyInterface() && commaok {
6295
6296 return iface, cond
6297 }
6298
6299
6300 b := s.endBlock()
6301 b.Kind = ssa.BlockIf
6302 b.SetControl(cond)
6303 b.Likely = ssa.BranchLikely
6304 bOk := s.f.NewBlock(ssa.BlockPlain)
6305 bFail := s.f.NewBlock(ssa.BlockPlain)
6306 b.AddEdgeTo(bOk)
6307 b.AddEdgeTo(bFail)
6308
6309 if !commaok {
6310
6311 s.startBlock(bFail)
6312 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6313
6314
6315 s.startBlock(bOk)
6316 if src.IsEmptyInterface() {
6317 res = iface
6318 return
6319 }
6320
6321 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6322 typ := s.load(byteptr, off)
6323 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6324 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
6325 return
6326 }
6327
6328 s.startBlock(bOk)
6329
6330
6331 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6332 s.vars[typVar] = s.load(byteptr, off)
6333 s.endBlock()
6334
6335
6336 s.startBlock(bFail)
6337 s.vars[typVar] = itab
6338 s.endBlock()
6339
6340
6341 bEnd := s.f.NewBlock(ssa.BlockPlain)
6342 bOk.AddEdgeTo(bEnd)
6343 bFail.AddEdgeTo(bEnd)
6344 s.startBlock(bEnd)
6345 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6346 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
6347 resok = cond
6348 delete(s.vars, typVar)
6349 return
6350 }
6351
6352 if base.Debug.TypeAssert > 0 {
6353 base.WarnfAt(pos, "type assertion not inlined")
6354 }
6355
6356 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6357 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6358
6359
6360 bNil := s.f.NewBlock(ssa.BlockPlain)
6361 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6362 bMerge := s.f.NewBlock(ssa.BlockPlain)
6363 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6364 b := s.endBlock()
6365 b.Kind = ssa.BlockIf
6366 b.SetControl(cond)
6367 b.Likely = ssa.BranchLikely
6368 b.AddEdgeTo(bNonNil)
6369 b.AddEdgeTo(bNil)
6370
6371 s.startBlock(bNil)
6372 if commaok {
6373 s.vars[typVar] = itab
6374 b := s.endBlock()
6375 b.AddEdgeTo(bMerge)
6376 } else {
6377
6378 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6379 }
6380
6381
6382 s.startBlock(bNonNil)
6383 typ := itab
6384 if !src.IsEmptyInterface() {
6385 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
6386 }
6387
6388
6389 var d *ssa.Value
6390 if descriptor != nil {
6391 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6392 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
6393
6394
6395 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
6396 s.Fatalf("atomic load not available")
6397 }
6398
6399 var mul, and, add, zext ssa.Op
6400 if s.config.PtrSize == 4 {
6401 mul = ssa.OpMul32
6402 and = ssa.OpAnd32
6403 add = ssa.OpAdd32
6404 zext = ssa.OpCopy
6405 } else {
6406 mul = ssa.OpMul64
6407 and = ssa.OpAnd64
6408 add = ssa.OpAdd64
6409 zext = ssa.OpZeroExt32to64
6410 }
6411
6412 loopHead := s.f.NewBlock(ssa.BlockPlain)
6413 loopBody := s.f.NewBlock(ssa.BlockPlain)
6414 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6415 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6416
6417
6418
6419 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6420 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6421 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6422
6423
6424 var hash *ssa.Value
6425 if src.IsEmptyInterface() {
6426 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6427 } else {
6428 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6429 }
6430 hash = s.newValue1(zext, typs.Uintptr, hash)
6431 s.vars[hashVar] = hash
6432
6433 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6434
6435 b := s.endBlock()
6436 b.AddEdgeTo(loopHead)
6437
6438
6439
6440 s.startBlock(loopHead)
6441 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6442 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6443 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6444 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6445
6446 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6447
6448
6449
6450 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6451 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6452 b = s.endBlock()
6453 b.Kind = ssa.BlockIf
6454 b.SetControl(cmp1)
6455 b.AddEdgeTo(cacheHit)
6456 b.AddEdgeTo(loopBody)
6457
6458
6459
6460 s.startBlock(loopBody)
6461 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6462 b = s.endBlock()
6463 b.Kind = ssa.BlockIf
6464 b.SetControl(cmp2)
6465 b.AddEdgeTo(cacheMiss)
6466 b.AddEdgeTo(loopHead)
6467
6468
6469
6470 s.startBlock(cacheHit)
6471 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6472 s.vars[typVar] = eItab
6473 b = s.endBlock()
6474 b.AddEdgeTo(bMerge)
6475
6476
6477 s.startBlock(cacheMiss)
6478 }
6479 }
6480
6481
6482 if descriptor != nil {
6483 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6484 } else {
6485 var fn *obj.LSym
6486 if commaok {
6487 fn = ir.Syms.AssertE2I2
6488 } else {
6489 fn = ir.Syms.AssertE2I
6490 }
6491 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6492 }
6493 s.vars[typVar] = itab
6494 b = s.endBlock()
6495 b.AddEdgeTo(bMerge)
6496
6497
6498 s.startBlock(bMerge)
6499 itab = s.variable(typVar, byteptr)
6500 var ok *ssa.Value
6501 if commaok {
6502 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6503 }
6504 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6505 }
6506
6507 if base.Debug.TypeAssert > 0 {
6508 base.WarnfAt(pos, "type assertion inlined")
6509 }
6510
6511
6512 direct := types.IsDirectIface(dst)
6513 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6514 if base.Debug.TypeAssert > 0 {
6515 base.WarnfAt(pos, "type assertion inlined")
6516 }
6517 var wantedFirstWord *ssa.Value
6518 if src.IsEmptyInterface() {
6519
6520 wantedFirstWord = target
6521 } else {
6522
6523 wantedFirstWord = targetItab
6524 }
6525
6526 var tmp ir.Node
6527 var addr *ssa.Value
6528 if commaok && !ssa.CanSSA(dst) {
6529
6530
6531 tmp, addr = s.temp(pos, dst)
6532 }
6533
6534 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6535 b := s.endBlock()
6536 b.Kind = ssa.BlockIf
6537 b.SetControl(cond)
6538 b.Likely = ssa.BranchLikely
6539
6540 bOk := s.f.NewBlock(ssa.BlockPlain)
6541 bFail := s.f.NewBlock(ssa.BlockPlain)
6542 b.AddEdgeTo(bOk)
6543 b.AddEdgeTo(bFail)
6544
6545 if !commaok {
6546
6547 s.startBlock(bFail)
6548 taddr := source
6549 if taddr == nil {
6550 taddr = s.reflectType(src)
6551 }
6552 if src.IsEmptyInterface() {
6553 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6554 } else {
6555 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6556 }
6557
6558
6559 s.startBlock(bOk)
6560 if direct {
6561 return s.newValue1(ssa.OpIData, dst, iface), nil
6562 }
6563 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6564 return s.load(dst, p), nil
6565 }
6566
6567
6568
6569 bEnd := s.f.NewBlock(ssa.BlockPlain)
6570
6571
6572 valVar := ssaMarker("val")
6573
6574
6575 s.startBlock(bOk)
6576 if tmp == nil {
6577 if direct {
6578 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6579 } else {
6580 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6581 s.vars[valVar] = s.load(dst, p)
6582 }
6583 } else {
6584 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6585 s.move(dst, addr, p)
6586 }
6587 s.vars[okVar] = s.constBool(true)
6588 s.endBlock()
6589 bOk.AddEdgeTo(bEnd)
6590
6591
6592 s.startBlock(bFail)
6593 if tmp == nil {
6594 s.vars[valVar] = s.zeroVal(dst)
6595 } else {
6596 s.zero(dst, addr)
6597 }
6598 s.vars[okVar] = s.constBool(false)
6599 s.endBlock()
6600 bFail.AddEdgeTo(bEnd)
6601
6602
6603 s.startBlock(bEnd)
6604 if tmp == nil {
6605 res = s.variable(valVar, dst)
6606 delete(s.vars, valVar)
6607 } else {
6608 res = s.load(dst, addr)
6609 }
6610 resok = s.variable(okVar, types.Types[types.TBOOL])
6611 delete(s.vars, okVar)
6612 return res, resok
6613 }
6614
6615
6616 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6617 tmp := typecheck.TempAt(pos, s.curfn, t)
6618 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6619 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6620 }
6621 addr := s.addr(tmp)
6622 return tmp, addr
6623 }
6624
6625
6626 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6627 v := s.vars[n]
6628 if v != nil {
6629 return v
6630 }
6631 v = s.fwdVars[n]
6632 if v != nil {
6633 return v
6634 }
6635
6636 if s.curBlock == s.f.Entry {
6637
6638 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6639 }
6640
6641
6642 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6643 s.fwdVars[n] = v
6644 if n.Op() == ir.ONAME {
6645 s.addNamedValue(n.(*ir.Name), v)
6646 }
6647 return v
6648 }
6649
6650 func (s *state) mem() *ssa.Value {
6651 return s.variable(memVar, types.TypeMem)
6652 }
6653
6654 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6655 if n.Class == ir.Pxxx {
6656
6657 return
6658 }
6659 if ir.IsAutoTmp(n) {
6660
6661 return
6662 }
6663 if n.Class == ir.PPARAMOUT {
6664
6665
6666 return
6667 }
6668 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6669 values, ok := s.f.NamedValues[loc]
6670 if !ok {
6671 s.f.Names = append(s.f.Names, &loc)
6672 s.f.CanonicalLocalSlots[loc] = &loc
6673 }
6674 s.f.NamedValues[loc] = append(values, v)
6675 }
6676
6677
6678 type Branch struct {
6679 P *obj.Prog
6680 B *ssa.Block
6681 }
6682
6683
6684 type State struct {
6685 ABI obj.ABI
6686
6687 pp *objw.Progs
6688
6689
6690
6691 Branches []Branch
6692
6693
6694 JumpTables []*ssa.Block
6695
6696
6697 bstart []*obj.Prog
6698
6699 maxarg int64
6700
6701
6702
6703 livenessMap liveness.Map
6704
6705
6706
6707 partLiveArgs map[*ir.Name]bool
6708
6709
6710
6711
6712 lineRunStart *obj.Prog
6713
6714
6715 OnWasmStackSkipped int
6716 }
6717
6718 func (s *State) FuncInfo() *obj.FuncInfo {
6719 return s.pp.CurFunc.LSym.Func()
6720 }
6721
6722
6723 func (s *State) Prog(as obj.As) *obj.Prog {
6724 p := s.pp.Prog(as)
6725 if objw.LosesStmtMark(as) {
6726 return p
6727 }
6728
6729
6730 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6731 s.lineRunStart = p
6732 } else if p.Pos.IsStmt() == src.PosIsStmt {
6733 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6734 p.Pos = p.Pos.WithNotStmt()
6735 }
6736 return p
6737 }
6738
6739
6740 func (s *State) Pc() *obj.Prog {
6741 return s.pp.Next
6742 }
6743
6744
6745 func (s *State) SetPos(pos src.XPos) {
6746 s.pp.Pos = pos
6747 }
6748
6749
6750
6751
6752 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6753 p := s.Prog(op)
6754 p.To.Type = obj.TYPE_BRANCH
6755 s.Branches = append(s.Branches, Branch{P: p, B: target})
6756 return p
6757 }
6758
6759
6760
6761
6762
6763
6764 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6765 switch v.Op {
6766 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6767
6768 s.SetPos(v.Pos.WithNotStmt())
6769 default:
6770 p := v.Pos
6771 if p != src.NoXPos {
6772
6773
6774
6775
6776 if p.IsStmt() != src.PosIsStmt {
6777 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6778
6779
6780
6781
6782
6783
6784
6785
6786
6787
6788
6789
6790
6791 return
6792 }
6793 p = p.WithNotStmt()
6794
6795 }
6796 s.SetPos(p)
6797 } else {
6798 s.SetPos(s.pp.Pos.WithNotStmt())
6799 }
6800 }
6801 }
6802
6803
6804 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6805 ft := e.curfn.Type()
6806 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6807 return
6808 }
6809
6810 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6811 x.Set(obj.AttrContentAddressable, true)
6812 e.curfn.LSym.Func().ArgInfo = x
6813
6814
6815 p := pp.Prog(obj.AFUNCDATA)
6816 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6817 p.To.Type = obj.TYPE_MEM
6818 p.To.Name = obj.NAME_EXTERN
6819 p.To.Sym = x
6820 }
6821
6822
6823 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6824 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6825 x.Align = 1
6826
6827
6828
6829
6830 PtrSize := int64(types.PtrSize)
6831 uintptrTyp := types.Types[types.TUINTPTR]
6832
6833 isAggregate := func(t *types.Type) bool {
6834 return isStructNotSIMD(t) || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6835 }
6836
6837 wOff := 0
6838 n := 0
6839 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6840
6841
6842 write1 := func(sz, offset int64) {
6843 if offset >= rtabi.TraceArgsSpecial {
6844 writebyte(rtabi.TraceArgsOffsetTooLarge)
6845 } else {
6846 writebyte(uint8(offset))
6847 writebyte(uint8(sz))
6848 }
6849 n++
6850 }
6851
6852
6853
6854 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6855 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6856 if n >= rtabi.TraceArgsLimit {
6857 writebyte(rtabi.TraceArgsDotdotdot)
6858 return false
6859 }
6860 if !isAggregate(t) {
6861 write1(t.Size(), baseOffset)
6862 return true
6863 }
6864 writebyte(rtabi.TraceArgsStartAgg)
6865 depth++
6866 if depth >= rtabi.TraceArgsMaxDepth {
6867 writebyte(rtabi.TraceArgsDotdotdot)
6868 writebyte(rtabi.TraceArgsEndAgg)
6869 n++
6870 return true
6871 }
6872 switch {
6873 case t.IsInterface(), t.IsString():
6874 _ = visitType(baseOffset, uintptrTyp, depth) &&
6875 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6876 case t.IsSlice():
6877 _ = visitType(baseOffset, uintptrTyp, depth) &&
6878 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6879 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6880 case t.IsComplex():
6881 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6882 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6883 case t.IsArray():
6884 if t.NumElem() == 0 {
6885 n++
6886 break
6887 }
6888 for i := int64(0); i < t.NumElem(); i++ {
6889 if !visitType(baseOffset, t.Elem(), depth) {
6890 break
6891 }
6892 baseOffset += t.Elem().Size()
6893 }
6894 case isStructNotSIMD(t):
6895 if t.NumFields() == 0 {
6896 n++
6897 break
6898 }
6899 for _, field := range t.Fields() {
6900 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6901 break
6902 }
6903 }
6904 }
6905 writebyte(rtabi.TraceArgsEndAgg)
6906 return true
6907 }
6908
6909 start := 0
6910 if strings.Contains(f.LSym.Name, "[") {
6911
6912 start = 1
6913 }
6914
6915 for _, a := range abiInfo.InParams()[start:] {
6916 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6917 break
6918 }
6919 }
6920 writebyte(rtabi.TraceArgsEndSeq)
6921 if wOff > rtabi.TraceArgsMaxLen {
6922 base.Fatalf("ArgInfo too large")
6923 }
6924
6925 return x
6926 }
6927
6928
6929 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6930 if base.Ctxt.Flag_linkshared {
6931
6932
6933 return
6934 }
6935
6936 wfn := e.curfn.WrappedFunc
6937 if wfn == nil {
6938 return
6939 }
6940
6941 wsym := wfn.Linksym()
6942 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6943 objw.SymPtrOff(x, 0, wsym)
6944 x.Set(obj.AttrContentAddressable, true)
6945 x.Align = 4
6946 })
6947 e.curfn.LSym.Func().WrapInfo = x
6948
6949
6950 p := pp.Prog(obj.AFUNCDATA)
6951 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6952 p.To.Type = obj.TYPE_MEM
6953 p.To.Name = obj.NAME_EXTERN
6954 p.To.Sym = x
6955 }
6956
6957
6958 func genssa(f *ssa.Func, pp *objw.Progs) {
6959 var s State
6960 s.ABI = f.OwnAux.Fn.ABI()
6961
6962 e := f.Frontend().(*ssafn)
6963
6964 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6965
6966 var lv *liveness.Liveness
6967 s.livenessMap, s.partLiveArgs, lv = liveness.Compute(e.curfn, f, e.stkptrsize, pp, gatherPrintInfo)
6968 emitArgInfo(e, f, pp)
6969 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6970
6971 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6972 if openDeferInfo != nil {
6973
6974
6975 p := pp.Prog(obj.AFUNCDATA)
6976 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6977 p.To.Type = obj.TYPE_MEM
6978 p.To.Name = obj.NAME_EXTERN
6979 p.To.Sym = openDeferInfo
6980 }
6981
6982 emitWrappedFuncInfo(e, pp)
6983
6984
6985 s.bstart = make([]*obj.Prog, f.NumBlocks())
6986 s.pp = pp
6987 var progToValue map[*obj.Prog]*ssa.Value
6988 var progToBlock map[*obj.Prog]*ssa.Block
6989 var valueToProgAfter []*obj.Prog
6990 if gatherPrintInfo {
6991 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6992 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6993 f.Logf("genssa %s\n", f.Name)
6994 progToBlock[s.pp.Next] = f.Blocks[0]
6995 }
6996
6997 if base.Ctxt.Flag_locationlists {
6998 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6999 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
7000 }
7001 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
7002 clear(valueToProgAfter)
7003 }
7004
7005
7006
7007 firstPos := src.NoXPos
7008 for _, v := range f.Entry.Values {
7009 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7010 firstPos = v.Pos
7011 v.Pos = firstPos.WithDefaultStmt()
7012 break
7013 }
7014 }
7015
7016
7017
7018
7019 var inlMarks map[*obj.Prog]int32
7020 var inlMarkList []*obj.Prog
7021
7022
7023
7024 var inlMarksByPos map[src.XPos][]*obj.Prog
7025
7026 var argLiveIdx int = -1
7027
7028
7029
7030
7031
7032 var hotAlign, hotRequire int64
7033
7034 if base.Debug.AlignHot > 0 {
7035 switch base.Ctxt.Arch.Name {
7036
7037
7038
7039
7040
7041 case "amd64", "386":
7042
7043
7044
7045 hotAlign = 64
7046 hotRequire = 31
7047 }
7048 }
7049
7050
7051 for i, b := range f.Blocks {
7052
7053 s.lineRunStart = nil
7054 s.SetPos(s.pp.Pos.WithNotStmt())
7055
7056 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
7057
7058
7059
7060
7061
7062 p := s.pp.Prog(obj.APCALIGNMAX)
7063 p.From.SetConst(hotAlign)
7064 p.To.SetConst(hotRequire)
7065 }
7066
7067 s.bstart[b.ID] = s.pp.Next
7068
7069 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
7070 argLiveIdx = idx
7071 p := s.pp.Prog(obj.APCDATA)
7072 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7073 p.To.SetConst(int64(idx))
7074 }
7075
7076
7077 Arch.SSAMarkMoves(&s, b)
7078 for _, v := range b.Values {
7079 x := s.pp.Next
7080 s.DebugFriendlySetPosFrom(v)
7081
7082 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
7083 v.Fatalf("input[0] and output not in same register %s", v.LongString())
7084 }
7085
7086 switch v.Op {
7087 case ssa.OpInitMem:
7088
7089 case ssa.OpArg:
7090
7091 case ssa.OpSP, ssa.OpSB:
7092
7093 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
7094
7095 case ssa.OpGetG:
7096
7097
7098 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
7099
7100 case ssa.OpPhi:
7101 CheckLoweredPhi(v)
7102 case ssa.OpConvert:
7103
7104 if v.Args[0].Reg() != v.Reg() {
7105 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
7106 }
7107 case ssa.OpInlMark:
7108 p := Arch.Ginsnop(s.pp)
7109 if inlMarks == nil {
7110 inlMarks = map[*obj.Prog]int32{}
7111 inlMarksByPos = map[src.XPos][]*obj.Prog{}
7112 }
7113 inlMarks[p] = v.AuxInt32()
7114 inlMarkList = append(inlMarkList, p)
7115 pos := v.Pos.AtColumn1()
7116 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
7117 firstPos = src.NoXPos
7118
7119 default:
7120
7121 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7122 s.SetPos(firstPos)
7123 firstPos = src.NoXPos
7124 }
7125
7126
7127 s.pp.NextLive = s.livenessMap.Get(v)
7128 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
7129
7130
7131 Arch.SSAGenValue(&s, v)
7132 }
7133
7134 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
7135 argLiveIdx = idx
7136 p := s.pp.Prog(obj.APCDATA)
7137 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7138 p.To.SetConst(int64(idx))
7139 }
7140
7141 if base.Ctxt.Flag_locationlists {
7142 valueToProgAfter[v.ID] = s.pp.Next
7143 }
7144
7145 if gatherPrintInfo {
7146 for ; x != s.pp.Next; x = x.Link {
7147 progToValue[x] = v
7148 }
7149 }
7150 }
7151
7152 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
7153 p := Arch.Ginsnop(s.pp)
7154 p.Pos = p.Pos.WithIsStmt()
7155 if b.Pos == src.NoXPos {
7156 b.Pos = p.Pos
7157 if b.Pos == src.NoXPos {
7158 b.Pos = s.pp.Text.Pos
7159 }
7160 }
7161 b.Pos = b.Pos.WithBogusLine()
7162 }
7163
7164
7165
7166
7167
7168 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
7169
7170
7171 var next *ssa.Block
7172 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
7173
7174
7175
7176
7177 next = f.Blocks[i+1]
7178 }
7179 x := s.pp.Next
7180 s.SetPos(b.Pos)
7181 Arch.SSAGenBlock(&s, b, next)
7182 if gatherPrintInfo {
7183 for ; x != s.pp.Next; x = x.Link {
7184 progToBlock[x] = b
7185 }
7186 }
7187 }
7188 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
7189
7190
7191
7192
7193 Arch.Ginsnop(s.pp)
7194 }
7195 if openDeferInfo != nil {
7196
7197
7198
7199
7200
7201
7202
7203
7204 s.pp.NextLive = s.livenessMap.DeferReturn
7205 p := s.pp.Prog(obj.ACALL)
7206 p.To.Type = obj.TYPE_MEM
7207 p.To.Name = obj.NAME_EXTERN
7208 p.To.Sym = ir.Syms.Deferreturn
7209
7210
7211
7212
7213
7214 for _, o := range f.OwnAux.ABIInfo().OutParams() {
7215 n := o.Name
7216 rts, offs := o.RegisterTypesAndOffsets()
7217 for i := range o.Registers {
7218 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
7219 }
7220 }
7221
7222 s.pp.Prog(obj.ARET)
7223 }
7224
7225 if inlMarks != nil {
7226 hasCall := false
7227
7228
7229
7230
7231 for p := s.pp.Text; p != nil; p = p.Link {
7232 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
7233 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
7234
7235
7236
7237
7238
7239 continue
7240 }
7241 if _, ok := inlMarks[p]; ok {
7242
7243
7244 continue
7245 }
7246 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
7247 hasCall = true
7248 }
7249 pos := p.Pos.AtColumn1()
7250 marks := inlMarksByPos[pos]
7251 if len(marks) == 0 {
7252 continue
7253 }
7254 for _, m := range marks {
7255
7256
7257
7258 p.Pos = p.Pos.WithIsStmt()
7259 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
7260
7261 m.As = obj.ANOP
7262 m.Pos = src.NoXPos
7263 m.From = obj.Addr{}
7264 m.To = obj.Addr{}
7265 }
7266 delete(inlMarksByPos, pos)
7267 }
7268
7269 for _, p := range inlMarkList {
7270 if p.As != obj.ANOP {
7271 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
7272 }
7273 }
7274
7275 if e.stksize == 0 && !hasCall {
7276
7277
7278
7279
7280
7281
7282 for p := s.pp.Text; p != nil; p = p.Link {
7283 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
7284 continue
7285 }
7286 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
7287
7288 nop := Arch.Ginsnop(s.pp)
7289 nop.Pos = e.curfn.Pos().WithIsStmt()
7290
7291
7292
7293
7294
7295 for x := s.pp.Text; x != nil; x = x.Link {
7296 if x.Link == nop {
7297 x.Link = nop.Link
7298 break
7299 }
7300 }
7301
7302 for x := s.pp.Text; x != nil; x = x.Link {
7303 if x.Link == p {
7304 nop.Link = p
7305 x.Link = nop
7306 break
7307 }
7308 }
7309 }
7310 break
7311 }
7312 }
7313 }
7314
7315 if base.Ctxt.Flag_locationlists {
7316 var debugInfo *ssa.FuncDebug
7317 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
7318
7319
7320 debugInfo.EntryID = f.Entry.ID
7321 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
7322 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7323 } else {
7324 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
7325 }
7326 bstart := s.bstart
7327 idToIdx := make([]int, f.NumBlocks())
7328 for i, b := range f.Blocks {
7329 idToIdx[b.ID] = i
7330 }
7331
7332
7333
7334 debugInfo.GetPC = func(b, v ssa.ID) int64 {
7335 switch v {
7336 case ssa.BlockStart.ID:
7337 if b == f.Entry.ID {
7338 return 0
7339
7340 }
7341 return bstart[b].Pc
7342 case ssa.BlockEnd.ID:
7343 blk := f.Blocks[idToIdx[b]]
7344 nv := len(blk.Values)
7345 return valueToProgAfter[blk.Values[nv-1].ID].Pc
7346 case ssa.FuncEnd.ID:
7347 return e.curfn.LSym.Size
7348 default:
7349 return valueToProgAfter[v].Pc
7350 }
7351 }
7352 }
7353
7354
7355 for _, br := range s.Branches {
7356 br.P.To.SetTarget(s.bstart[br.B.ID])
7357 if br.P.Pos.IsStmt() != src.PosIsStmt {
7358 br.P.Pos = br.P.Pos.WithNotStmt()
7359 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7360 br.P.Pos = br.P.Pos.WithNotStmt()
7361 }
7362
7363 }
7364
7365
7366 for _, jt := range s.JumpTables {
7367
7368 targets := make([]*obj.Prog, len(jt.Succs))
7369 for i, e := range jt.Succs {
7370 targets[i] = s.bstart[e.Block().ID]
7371 }
7372
7373
7374
7375 fi := s.pp.CurFunc.LSym.Func()
7376 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7377 }
7378
7379 if e.log {
7380 filename := ""
7381 for p := s.pp.Text; p != nil; p = p.Link {
7382 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7383 filename = p.InnermostFilename()
7384 f.Logf("# %s\n", filename)
7385 }
7386
7387 var s string
7388 if v, ok := progToValue[p]; ok {
7389 s = v.String()
7390 } else if b, ok := progToBlock[p]; ok {
7391 s = b.String()
7392 } else {
7393 s = " "
7394 }
7395 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7396 }
7397 }
7398 if f.HTMLWriter != nil {
7399 var buf strings.Builder
7400 buf.WriteString("<code>")
7401 buf.WriteString("<dl class=\"ssa-gen\">")
7402 filename := ""
7403
7404 liveness := lv.Format(nil)
7405 if liveness != "" {
7406 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7407 buf.WriteString(html.EscapeString("# " + liveness))
7408 buf.WriteString("</dd>")
7409 }
7410
7411 for p := s.pp.Text; p != nil; p = p.Link {
7412
7413
7414 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7415 filename = p.InnermostFilename()
7416 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7417 buf.WriteString(html.EscapeString("# " + filename))
7418 buf.WriteString("</dd>")
7419 }
7420
7421 buf.WriteString("<dt class=\"ssa-prog-src\">")
7422 if v, ok := progToValue[p]; ok {
7423
7424
7425 if p.As != obj.APCDATA {
7426 if liveness := lv.Format(v); liveness != "" {
7427
7428 buf.WriteString("</dt><dd class=\"ssa-prog\">")
7429 buf.WriteString(html.EscapeString("# " + liveness))
7430 buf.WriteString("</dd>")
7431
7432 buf.WriteString("<dt class=\"ssa-prog-src\">")
7433 }
7434 }
7435
7436 buf.WriteString(v.HTML())
7437 } else if b, ok := progToBlock[p]; ok {
7438 buf.WriteString("<b>" + b.HTML() + "</b>")
7439 }
7440 buf.WriteString("</dt>")
7441 buf.WriteString("<dd class=\"ssa-prog\">")
7442 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7443 buf.WriteString("</dd>")
7444 }
7445 buf.WriteString("</dl>")
7446 buf.WriteString("</code>")
7447 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7448 }
7449 if ssa.GenssaDump[f.Name] {
7450 fi := f.DumpFileForPhase("genssa")
7451 if fi != nil {
7452
7453
7454 inliningDiffers := func(a, b []src.Pos) bool {
7455 if len(a) != len(b) {
7456 return true
7457 }
7458 for i := range a {
7459 if a[i].Filename() != b[i].Filename() {
7460 return true
7461 }
7462 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7463 return true
7464 }
7465 }
7466 return false
7467 }
7468
7469 var allPosOld []src.Pos
7470 var allPos []src.Pos
7471
7472 for p := s.pp.Text; p != nil; p = p.Link {
7473 if p.Pos.IsKnown() {
7474 allPos = allPos[:0]
7475 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7476 if inliningDiffers(allPos, allPosOld) {
7477 for _, pos := range allPos {
7478 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7479 }
7480 allPos, allPosOld = allPosOld, allPos
7481 }
7482 }
7483
7484 var s string
7485 if v, ok := progToValue[p]; ok {
7486 s = v.String()
7487 } else if b, ok := progToBlock[p]; ok {
7488 s = b.String()
7489 } else {
7490 s = " "
7491 }
7492 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7493 }
7494 fi.Close()
7495 }
7496 }
7497
7498 defframe(&s, e, f)
7499
7500 f.HTMLWriter.Close()
7501 f.HTMLWriter = nil
7502 }
7503
7504 func defframe(s *State, e *ssafn, f *ssa.Func) {
7505 pp := s.pp
7506
7507 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7508 frame := s.maxarg + e.stksize
7509 if Arch.PadFrame != nil {
7510 frame = Arch.PadFrame(frame)
7511 }
7512
7513
7514 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7515 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7516 pp.Text.To.Offset = frame
7517
7518 p := pp.Text
7519
7520
7521
7522
7523
7524
7525
7526
7527
7528
7529 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7530
7531
7532 type nameOff struct {
7533 n *ir.Name
7534 off int64
7535 }
7536 partLiveArgsSpilled := make(map[nameOff]bool)
7537 for _, v := range f.Entry.Values {
7538 if v.Op.IsCall() {
7539 break
7540 }
7541 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7542 continue
7543 }
7544 n, off := ssa.AutoVar(v)
7545 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7546 continue
7547 }
7548 partLiveArgsSpilled[nameOff{n, off}] = true
7549 }
7550
7551
7552 for _, a := range f.OwnAux.ABIInfo().InParams() {
7553 n := a.Name
7554 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7555 continue
7556 }
7557 rts, offs := a.RegisterTypesAndOffsets()
7558 for i := range a.Registers {
7559 if !rts[i].HasPointers() {
7560 continue
7561 }
7562 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7563 continue
7564 }
7565 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7566 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7567 }
7568 }
7569 }
7570
7571
7572
7573
7574 var lo, hi int64
7575
7576
7577
7578 var state uint32
7579
7580
7581
7582 for _, n := range e.curfn.Dcl {
7583 if !n.Needzero() {
7584 continue
7585 }
7586 if n.Class != ir.PAUTO {
7587 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7588 }
7589 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7590 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7591 }
7592
7593 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7594
7595 lo = n.FrameOffset()
7596 continue
7597 }
7598
7599
7600 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7601
7602
7603 lo = n.FrameOffset()
7604 hi = lo + n.Type().Size()
7605 }
7606
7607
7608 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7609 }
7610
7611
7612 type IndexJump struct {
7613 Jump obj.As
7614 Index int
7615 }
7616
7617 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7618 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7619 p.Pos = b.Pos
7620 }
7621
7622
7623
7624 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7625 switch next {
7626 case b.Succs[0].Block():
7627 s.oneJump(b, &jumps[0][0])
7628 s.oneJump(b, &jumps[0][1])
7629 case b.Succs[1].Block():
7630 s.oneJump(b, &jumps[1][0])
7631 s.oneJump(b, &jumps[1][1])
7632 default:
7633 var q *obj.Prog
7634 if b.Likely != ssa.BranchUnlikely {
7635 s.oneJump(b, &jumps[1][0])
7636 s.oneJump(b, &jumps[1][1])
7637 q = s.Br(obj.AJMP, b.Succs[1].Block())
7638 } else {
7639 s.oneJump(b, &jumps[0][0])
7640 s.oneJump(b, &jumps[0][1])
7641 q = s.Br(obj.AJMP, b.Succs[0].Block())
7642 }
7643 q.Pos = b.Pos
7644 }
7645 }
7646
7647
7648 func AddAux(a *obj.Addr, v *ssa.Value) {
7649 AddAux2(a, v, v.AuxInt)
7650 }
7651 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7652 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7653 v.Fatalf("bad AddAux addr %v", a)
7654 }
7655
7656 a.Offset += offset
7657
7658
7659 if v.Aux == nil {
7660 return
7661 }
7662
7663 switch n := v.Aux.(type) {
7664 case *ssa.AuxCall:
7665 a.Name = obj.NAME_EXTERN
7666 a.Sym = n.Fn
7667 case *obj.LSym:
7668 a.Name = obj.NAME_EXTERN
7669 a.Sym = n
7670 case *ir.Name:
7671 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7672 a.Name = obj.NAME_PARAM
7673 } else {
7674 a.Name = obj.NAME_AUTO
7675 }
7676 a.Sym = n.Linksym()
7677 a.Offset += n.FrameOffset()
7678 default:
7679 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7680 }
7681 }
7682
7683
7684
7685 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7686 size := idx.Type.Size()
7687 if size == s.config.PtrSize {
7688 return idx
7689 }
7690 if size > s.config.PtrSize {
7691
7692
7693 var lo *ssa.Value
7694 if idx.Type.IsSigned() {
7695 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7696 } else {
7697 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7698 }
7699 if bounded || base.Flag.B != 0 {
7700 return lo
7701 }
7702 bNext := s.f.NewBlock(ssa.BlockPlain)
7703 bPanic := s.f.NewBlock(ssa.BlockExit)
7704 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7705 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7706 if !idx.Type.IsSigned() {
7707 switch kind {
7708 case ssa.BoundsIndex:
7709 kind = ssa.BoundsIndexU
7710 case ssa.BoundsSliceAlen:
7711 kind = ssa.BoundsSliceAlenU
7712 case ssa.BoundsSliceAcap:
7713 kind = ssa.BoundsSliceAcapU
7714 case ssa.BoundsSliceB:
7715 kind = ssa.BoundsSliceBU
7716 case ssa.BoundsSlice3Alen:
7717 kind = ssa.BoundsSlice3AlenU
7718 case ssa.BoundsSlice3Acap:
7719 kind = ssa.BoundsSlice3AcapU
7720 case ssa.BoundsSlice3B:
7721 kind = ssa.BoundsSlice3BU
7722 case ssa.BoundsSlice3C:
7723 kind = ssa.BoundsSlice3CU
7724 }
7725 }
7726 b := s.endBlock()
7727 b.Kind = ssa.BlockIf
7728 b.SetControl(cmp)
7729 b.Likely = ssa.BranchLikely
7730 b.AddEdgeTo(bNext)
7731 b.AddEdgeTo(bPanic)
7732
7733 s.startBlock(bPanic)
7734 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7735 s.endBlock().SetControl(mem)
7736 s.startBlock(bNext)
7737
7738 return lo
7739 }
7740
7741
7742 var op ssa.Op
7743 if idx.Type.IsSigned() {
7744 switch 10*size + s.config.PtrSize {
7745 case 14:
7746 op = ssa.OpSignExt8to32
7747 case 18:
7748 op = ssa.OpSignExt8to64
7749 case 24:
7750 op = ssa.OpSignExt16to32
7751 case 28:
7752 op = ssa.OpSignExt16to64
7753 case 48:
7754 op = ssa.OpSignExt32to64
7755 default:
7756 s.Fatalf("bad signed index extension %s", idx.Type)
7757 }
7758 } else {
7759 switch 10*size + s.config.PtrSize {
7760 case 14:
7761 op = ssa.OpZeroExt8to32
7762 case 18:
7763 op = ssa.OpZeroExt8to64
7764 case 24:
7765 op = ssa.OpZeroExt16to32
7766 case 28:
7767 op = ssa.OpZeroExt16to64
7768 case 48:
7769 op = ssa.OpZeroExt32to64
7770 default:
7771 s.Fatalf("bad unsigned index extension %s", idx.Type)
7772 }
7773 }
7774 return s.newValue1(op, types.Types[types.TINT], idx)
7775 }
7776
7777
7778
7779 func CheckLoweredPhi(v *ssa.Value) {
7780 if v.Op != ssa.OpPhi {
7781 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7782 }
7783 if v.Type.IsMemory() {
7784 return
7785 }
7786 f := v.Block.Func
7787 loc := f.RegAlloc[v.ID]
7788 for _, a := range v.Args {
7789 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7790 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7791 }
7792 }
7793 }
7794
7795
7796
7797
7798
7799 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7800 entry := v.Block.Func.Entry
7801 if entry != v.Block {
7802 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7803 }
7804 for _, w := range entry.Values {
7805 if w == v {
7806 break
7807 }
7808 switch w.Op {
7809 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7810
7811 default:
7812 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7813 }
7814 }
7815 }
7816
7817
7818 func CheckArgReg(v *ssa.Value) {
7819 entry := v.Block.Func.Entry
7820 if entry != v.Block {
7821 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7822 }
7823 }
7824
7825 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7826 n, off := ssa.AutoVar(v)
7827 a.Type = obj.TYPE_MEM
7828 a.Sym = n.Linksym()
7829 a.Reg = int16(Arch.REGSP)
7830 a.Offset = n.FrameOffset() + off
7831 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7832 a.Name = obj.NAME_PARAM
7833 } else {
7834 a.Name = obj.NAME_AUTO
7835 }
7836 }
7837
7838
7839
7840 func (s *State) Call(v *ssa.Value) *obj.Prog {
7841 pPosIsStmt := s.pp.Pos.IsStmt()
7842 s.PrepareCall(v)
7843
7844 p := s.Prog(obj.ACALL)
7845 if pPosIsStmt == src.PosIsStmt {
7846 p.Pos = v.Pos.WithIsStmt()
7847 } else {
7848 p.Pos = v.Pos.WithNotStmt()
7849 }
7850 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7851 p.To.Type = obj.TYPE_MEM
7852 p.To.Name = obj.NAME_EXTERN
7853 p.To.Sym = sym.Fn
7854 } else {
7855
7856 switch Arch.LinkArch.Family {
7857 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7858 p.To.Type = obj.TYPE_REG
7859 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7860 p.To.Type = obj.TYPE_MEM
7861 default:
7862 base.Fatalf("unknown indirect call family")
7863 }
7864 p.To.Reg = v.Args[0].Reg()
7865 }
7866 return p
7867 }
7868
7869
7870
7871 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7872 p := s.Call(v)
7873 p.As = obj.ARET
7874 return p
7875 }
7876
7877
7878
7879
7880 func (s *State) PrepareCall(v *ssa.Value) {
7881 idx := s.livenessMap.Get(v)
7882 if !idx.StackMapValid() {
7883
7884 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7885 base.Fatalf("missing stack map index for %v", v.LongString())
7886 }
7887 }
7888
7889 call, ok := v.Aux.(*ssa.AuxCall)
7890
7891 if ok {
7892
7893
7894 if nowritebarrierrecCheck != nil {
7895 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7896 }
7897 }
7898
7899 if s.maxarg < v.AuxInt {
7900 s.maxarg = v.AuxInt
7901 }
7902 }
7903
7904
7905
7906 func (s *State) UseArgs(n int64) {
7907 if s.maxarg < n {
7908 s.maxarg = n
7909 }
7910 }
7911
7912
7913 func fieldIdx(n *ir.SelectorExpr) int {
7914 t := n.X.Type()
7915 if !isStructNotSIMD(t) {
7916 panic("ODOT's LHS is not a struct")
7917 }
7918
7919 for i, f := range t.Fields() {
7920 if f.Sym == n.Sel {
7921 if f.Offset != n.Offset() {
7922 panic("field offset doesn't match")
7923 }
7924 return i
7925 }
7926 }
7927 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7928
7929
7930
7931 }
7932
7933
7934
7935 type ssafn struct {
7936 curfn *ir.Func
7937 strings map[string]*obj.LSym
7938 stksize int64
7939 stkptrsize int64
7940
7941
7942
7943
7944
7945 stkalign int64
7946
7947 log bool
7948 }
7949
7950
7951
7952 func (e *ssafn) StringData(s string) *obj.LSym {
7953 if aux, ok := e.strings[s]; ok {
7954 return aux
7955 }
7956 if e.strings == nil {
7957 e.strings = make(map[string]*obj.LSym)
7958 }
7959 data := staticdata.StringSym(e.curfn.Pos(), s)
7960 e.strings[s] = data
7961 return data
7962 }
7963
7964
7965 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7966 node := parent.N
7967
7968 if node.Class != ir.PAUTO || node.Addrtaken() {
7969
7970 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7971 }
7972
7973 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7974 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7975 n.SetUsed(true)
7976 n.SetEsc(ir.EscNever)
7977 types.CalcSize(t)
7978 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7979 }
7980
7981
7982 func (e *ssafn) Logf(msg string, args ...any) {
7983 if e.log {
7984 fmt.Printf(msg, args...)
7985 }
7986 }
7987
7988 func (e *ssafn) Log() bool {
7989 return e.log
7990 }
7991
7992
7993 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...any) {
7994 base.Pos = pos
7995 nargs := append([]any{ir.FuncName(e.curfn)}, args...)
7996 base.Fatalf("'%s': "+msg, nargs...)
7997 }
7998
7999
8000
8001 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...any) {
8002 base.WarnfAt(pos, fmt_, args...)
8003 }
8004
8005 func (e *ssafn) Debug_checknil() bool {
8006 return base.Debug.Nil != 0
8007 }
8008
8009 func (e *ssafn) UseWriteBarrier() bool {
8010 return base.Flag.WB
8011 }
8012
8013 func (e *ssafn) Syslook(name string) *obj.LSym {
8014 switch name {
8015 case "goschedguarded":
8016 return ir.Syms.Goschedguarded
8017 case "writeBarrier":
8018 return ir.Syms.WriteBarrier
8019 case "wbZero":
8020 return ir.Syms.WBZero
8021 case "wbMove":
8022 return ir.Syms.WBMove
8023 case "cgoCheckMemmove":
8024 return ir.Syms.CgoCheckMemmove
8025 case "cgoCheckPtrWrite":
8026 return ir.Syms.CgoCheckPtrWrite
8027 }
8028 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
8029 return nil
8030 }
8031
8032 func (e *ssafn) Func() *ir.Func {
8033 return e.curfn
8034 }
8035
8036 func clobberBase(n ir.Node) ir.Node {
8037 if n.Op() == ir.ODOT {
8038 n := n.(*ir.SelectorExpr)
8039 if n.X.Type().NumFields() == 1 {
8040 return clobberBase(n.X)
8041 }
8042 }
8043 if n.Op() == ir.OINDEX {
8044 n := n.(*ir.IndexExpr)
8045 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
8046 return clobberBase(n.X)
8047 }
8048 }
8049 return n
8050 }
8051
8052
8053 func callTargetLSym(callee *ir.Name) *obj.LSym {
8054 if callee.Func == nil {
8055
8056
8057
8058 return callee.Linksym()
8059 }
8060
8061 return callee.LinksymABI(callee.Func.ABI)
8062 }
8063
8064
8065 const deferStructFnField = 4
8066
8067 var deferType *types.Type
8068
8069
8070
8071 func deferstruct() *types.Type {
8072 if deferType != nil {
8073 return deferType
8074 }
8075
8076 makefield := func(name string, t *types.Type) *types.Field {
8077 sym := (*types.Pkg)(nil).Lookup(name)
8078 return types.NewField(src.NoXPos, sym, t)
8079 }
8080
8081 fields := []*types.Field{
8082 makefield("heap", types.Types[types.TBOOL]),
8083 makefield("rangefunc", types.Types[types.TBOOL]),
8084 makefield("sp", types.Types[types.TUINTPTR]),
8085 makefield("pc", types.Types[types.TUINTPTR]),
8086
8087
8088
8089 makefield("fn", types.Types[types.TUINTPTR]),
8090 makefield("link", types.Types[types.TUINTPTR]),
8091 makefield("head", types.Types[types.TUINTPTR]),
8092 }
8093 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
8094 base.Fatalf("deferStructFnField is %q, not fn", name)
8095 }
8096
8097 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
8098 typ := types.NewNamed(n)
8099 n.SetType(typ)
8100 n.SetTypecheck(1)
8101
8102
8103 typ.SetUnderlying(types.NewStruct(fields))
8104 types.CalcStructSize(typ)
8105
8106 deferType = typ
8107 return typ
8108 }
8109
8110
8111
8112
8113
8114 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
8115 return obj.Addr{
8116 Name: obj.NAME_NONE,
8117 Type: obj.TYPE_MEM,
8118 Reg: baseReg,
8119 Offset: spill.Offset + extraOffset,
8120 }
8121 }
8122
8123 func isStructNotSIMD(t *types.Type) bool {
8124 return t.IsStruct() && !t.IsSIMD()
8125 }
8126
8127 var BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
8128
View as plain text