1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmp"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "internal/runtime/gc"
16 "os"
17 "path/filepath"
18 "slices"
19 "strings"
20
21 "cmd/compile/internal/abi"
22 "cmd/compile/internal/base"
23 "cmd/compile/internal/ir"
24 "cmd/compile/internal/liveness"
25 "cmd/compile/internal/objw"
26 "cmd/compile/internal/reflectdata"
27 "cmd/compile/internal/rttype"
28 "cmd/compile/internal/ssa"
29 "cmd/compile/internal/staticdata"
30 "cmd/compile/internal/typecheck"
31 "cmd/compile/internal/types"
32 "cmd/internal/obj"
33 "cmd/internal/objabi"
34 "cmd/internal/src"
35 "cmd/internal/sys"
36
37 rtabi "internal/abi"
38 )
39
40 var ssaConfig *ssa.Config
41 var ssaCaches []ssa.Cache
42
43 var ssaDump string
44 var ssaDir string
45 var ssaDumpStdout bool
46 var ssaDumpCFG string
47 const ssaDumpFile = "ssa.html"
48
49
50 var ssaDumpInlined []*ir.Func
51
52
53
54
55 const maxAggregatedHeapAllocation = 16
56
57 func DumpInline(fn *ir.Func) {
58 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
59 ssaDumpInlined = append(ssaDumpInlined, fn)
60 }
61 }
62
63 func InitEnv() {
64 ssaDump = os.Getenv("GOSSAFUNC")
65 ssaDir = os.Getenv("GOSSADIR")
66 if ssaDump != "" {
67 if strings.HasSuffix(ssaDump, "+") {
68 ssaDump = ssaDump[:len(ssaDump)-1]
69 ssaDumpStdout = true
70 }
71 spl := strings.Split(ssaDump, ":")
72 if len(spl) > 1 {
73 ssaDump = spl[0]
74 ssaDumpCFG = spl[1]
75 }
76 }
77 }
78
79 func InitConfig() {
80 types_ := ssa.NewTypes()
81
82 if Arch.SoftFloat {
83 softfloatInit()
84 }
85
86
87
88 _ = types.NewPtr(types.Types[types.TINTER])
89 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
90 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
91 _ = types.NewPtr(types.NewPtr(types.ByteType))
92 _ = types.NewPtr(types.NewSlice(types.ByteType))
93 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
94 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
95 _ = types.NewPtr(types.Types[types.TINT16])
96 _ = types.NewPtr(types.Types[types.TINT64])
97 _ = types.NewPtr(types.ErrorType)
98 _ = types.NewPtr(reflectdata.MapType())
99 _ = types.NewPtr(deferstruct())
100 types.NewPtrCacheEnabled = false
101 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
102 ssaConfig.Race = base.Flag.Race
103 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
104
105
106 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
107 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
108 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
109 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
110 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
111 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
112 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
113 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
114 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
115 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
116 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
117 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
118 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
119 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
120 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
121 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
122 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
123 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
124 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
125 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
126 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
127 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
128 for i := 1; i < len(ir.Syms.MallocGCSmallNoScan); i++ {
129 ir.Syms.MallocGCSmallNoScan[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallNoScanSC%d", i))
130 }
131 for i := 1; i < len(ir.Syms.MallocGCSmallScanNoHeader); i++ {
132 ir.Syms.MallocGCSmallScanNoHeader[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallScanNoHeaderSC%d", i))
133 }
134 for i := 1; i < len(ir.Syms.MallocGCTiny); i++ {
135 ir.Syms.MallocGCTiny[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocTiny%d", i))
136 }
137 ir.Syms.MallocGC = typecheck.LookupRuntimeFunc("mallocgc")
138 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
139 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
140 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
141 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
142 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
143 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
144 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
145 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
146 ir.Syms.PanicBounds = typecheck.LookupRuntimeFunc("panicBounds")
147 ir.Syms.PanicExtend = typecheck.LookupRuntimeFunc("panicExtend")
148 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
149 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
150 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
151 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
152 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
153 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
154 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
155 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
156 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
157 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
158 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
159 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
160 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
161 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
162 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
163 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
164 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
165 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
166 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
167 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
168 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
169 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
170 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
171 ir.Syms.RISCV64HasZbb = typecheck.LookupRuntimeVar("riscv64HasZbb")
172 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
173 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
174 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
175 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
176 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
177 ir.Syms.ZeroVal = typecheck.LookupRuntimeVar("zeroVal")
178
179 if Arch.LinkArch.Family == sys.Wasm {
180 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
181 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
182 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
183 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
184 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
185 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
186 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
187 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
188 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
189 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
190 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
191 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
192 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
193 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
194 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
195 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
196 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
197 }
198
199
200 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
201 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
202 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
203 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
204 }
205
206 func InitTables() {
207 initIntrinsics(nil)
208 }
209
210
211
212
213
214
215
216
217 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
218 return ssaConfig.ABI0.Copy()
219 }
220
221
222
223 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
224 if buildcfg.Experiment.RegabiArgs {
225
226 if fn == nil {
227 return abi1
228 }
229 switch fn.ABI {
230 case obj.ABI0:
231 return abi0
232 case obj.ABIInternal:
233
234
235 return abi1
236 }
237 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
238 panic("not reachable")
239 }
240
241 a := abi0
242 if fn != nil {
243 if fn.Pragma&ir.RegisterParams != 0 {
244 a = abi1
245 }
246 }
247 return a
248 }
249
250
251
252
253
254
255
256
257
258
259
260
261 func (s *state) emitOpenDeferInfo() {
262 firstOffset := s.openDefers[0].closureNode.FrameOffset()
263
264
265 for i, r := range s.openDefers {
266 have := r.closureNode.FrameOffset()
267 want := firstOffset + int64(i)*int64(types.PtrSize)
268 if have != want {
269 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
270 }
271 }
272
273 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
274 x.Set(obj.AttrContentAddressable, true)
275 s.curfn.LSym.Func().OpenCodedDeferInfo = x
276
277 off := 0
278 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
279 off = objw.Uvarint(x, off, uint64(-firstOffset))
280 }
281
282
283
284 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
285 name := ir.FuncName(fn)
286
287 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
288
289 printssa := false
290
291
292 if strings.Contains(ssaDump, name) {
293 nameOptABI := name
294 if l := len(ssaDump); l > 1 && ssaDump[l-2] == ',' {
295 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
296 } else if strings.HasSuffix(ssaDump, ">") {
297 l := len(ssaDump)
298 if l >= 3 && ssaDump[l-3] == '<' {
299 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
300 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
301 }
302 }
303 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
304 printssa = nameOptABI == ssaDump ||
305 pkgDotName == ssaDump ||
306 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
307 }
308
309 var astBuf *bytes.Buffer
310 if printssa {
311 astBuf = &bytes.Buffer{}
312 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
313 if ssaDumpStdout {
314 fmt.Println("generating SSA for", name)
315 fmt.Print(astBuf.String())
316 }
317 }
318
319 var s state
320 s.pushLine(fn.Pos())
321 defer s.popLine()
322
323 s.hasdefer = fn.HasDefer()
324 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
325 s.cgoUnsafeArgs = true
326 }
327 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
328
329 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
330 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
331 s.instrumentMemory = true
332 }
333 if base.Flag.Race {
334 s.instrumentEnterExit = true
335 }
336 }
337
338 fe := ssafn{
339 curfn: fn,
340 log: printssa && ssaDumpStdout,
341 }
342 s.curfn = fn
343
344 cache := &ssaCaches[worker]
345 cache.Reset()
346
347 s.f = ssaConfig.NewFunc(&fe, cache)
348 s.config = ssaConfig
349 s.f.Type = fn.Type()
350 s.f.Name = name
351 s.f.PrintOrHtmlSSA = printssa
352 if fn.Pragma&ir.Nosplit != 0 {
353 s.f.NoSplit = true
354 }
355 s.f.ABI0 = ssaConfig.ABI0
356 s.f.ABI1 = ssaConfig.ABI1
357 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
358 s.f.ABISelf = abiSelf
359
360 s.panics = map[funcLine]*ssa.Block{}
361 s.softFloat = s.config.SoftFloat
362
363
364 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
365 s.f.Entry.Pos = fn.Pos()
366 s.f.IsPgoHot = isPgoHot
367
368 if printssa {
369 ssaDF := ssaDumpFile
370 if ssaDir != "" {
371 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
372 ssaD := filepath.Dir(ssaDF)
373 os.MkdirAll(ssaD, 0755)
374 }
375 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
376
377 dumpSourcesColumn(s.f.HTMLWriter, fn)
378 s.f.HTMLWriter.WriteAST("AST", astBuf)
379 }
380
381
382 s.labels = map[string]*ssaLabel{}
383 s.fwdVars = map[ir.Node]*ssa.Value{}
384 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
385
386 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
387 switch {
388 case base.Debug.NoOpenDefer != 0:
389 s.hasOpenDefers = false
390 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
391
392
393
394
395
396 s.hasOpenDefers = false
397 }
398 if s.hasOpenDefers && s.instrumentEnterExit {
399
400
401
402 s.hasOpenDefers = false
403 }
404 if s.hasOpenDefers {
405
406
407 for _, f := range s.curfn.Type().Results() {
408 if !f.Nname.(*ir.Name).OnStack() {
409 s.hasOpenDefers = false
410 break
411 }
412 }
413 }
414 if s.hasOpenDefers &&
415 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
416
417
418
419
420
421 s.hasOpenDefers = false
422 }
423
424 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
425 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
426
427 s.startBlock(s.f.Entry)
428 s.vars[memVar] = s.startmem
429 if s.hasOpenDefers {
430
431
432
433 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
434 deferBitsTemp.SetAddrtaken(true)
435 s.deferBitsTemp = deferBitsTemp
436
437 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
438 s.vars[deferBitsVar] = startDeferBits
439 s.deferBitsAddr = s.addr(deferBitsTemp)
440 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
441
442
443
444
445
446 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
447 }
448
449 var params *abi.ABIParamResultInfo
450 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
451
452
453
454
455
456
457 var debugInfo ssa.FuncDebug
458 for _, n := range fn.Dcl {
459 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
460 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
461 }
462 }
463 fn.DebugInfo = &debugInfo
464
465
466 s.decladdrs = map[*ir.Name]*ssa.Value{}
467 for _, n := range fn.Dcl {
468 switch n.Class {
469 case ir.PPARAM:
470
471 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
472 case ir.PPARAMOUT:
473 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
474 case ir.PAUTO:
475
476
477 default:
478 s.Fatalf("local variable with class %v unimplemented", n.Class)
479 }
480 }
481
482 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
483
484
485 for _, n := range fn.Dcl {
486 if n.Class == ir.PPARAM {
487 if s.canSSA(n) {
488 v := s.newValue0A(ssa.OpArg, n.Type(), n)
489 s.vars[n] = v
490 s.addNamedValue(n, v)
491 } else {
492 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
493 if len(paramAssignment.Registers) > 0 {
494 if ssa.CanSSA(n.Type()) {
495 v := s.newValue0A(ssa.OpArg, n.Type(), n)
496 s.store(n.Type(), s.decladdrs[n], v)
497 } else {
498
499
500 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
501 }
502 }
503 }
504 }
505 }
506
507
508 if fn.Needctxt() {
509 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
510 if fn.RangeParent != nil && base.Flag.N != 0 {
511
512
513
514 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
515 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
516 cloSlot.SetUsed(true)
517 cloSlot.SetEsc(ir.EscNever)
518 cloSlot.SetAddrtaken(true)
519 s.f.CloSlot = cloSlot
520 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
521 addr := s.addr(cloSlot)
522 s.store(s.f.Config.Types.BytePtr, addr, clo)
523
524 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
525 }
526 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
527 for {
528 n, typ, offset := csiter.Next()
529 if n == nil {
530 break
531 }
532
533 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
534
535
536
537
538
539
540
541
542
543 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
544 n.Class = ir.PAUTO
545 fn.Dcl = append(fn.Dcl, n)
546 s.assign(n, s.load(n.Type(), ptr), false, 0)
547 continue
548 }
549
550 if !n.Byval() {
551 ptr = s.load(typ, ptr)
552 }
553 s.setHeapaddr(fn.Pos(), n, ptr)
554 }
555 }
556
557
558 if s.instrumentEnterExit {
559 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
560 }
561 s.zeroResults()
562 s.paramsToHeap()
563 s.stmtList(fn.Body)
564
565
566 if s.curBlock != nil {
567 s.pushLine(fn.Endlineno)
568 s.exit()
569 s.popLine()
570 }
571
572 for _, b := range s.f.Blocks {
573 if b.Pos != src.NoXPos {
574 s.updateUnsetPredPos(b)
575 }
576 }
577
578 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
579
580 s.insertPhis()
581
582
583 ssa.Compile(s.f)
584
585 fe.AllocFrame(s.f)
586
587 if len(s.openDefers) != 0 {
588 s.emitOpenDeferInfo()
589 }
590
591
592
593
594
595
596 for _, p := range params.InParams() {
597 typs, offs := p.RegisterTypesAndOffsets()
598 for i, t := range typs {
599 o := offs[i]
600 fo := p.FrameOffset(params)
601 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
602 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
603 }
604 }
605
606 return s.f
607 }
608
609 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
610 typs, offs := paramAssignment.RegisterTypesAndOffsets()
611 for i, t := range typs {
612 if pointersOnly && !t.IsPtrShaped() {
613 continue
614 }
615 r := paramAssignment.Registers[i]
616 o := offs[i]
617 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
618 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
619 v := s.newValue0I(op, t, reg)
620 v.Aux = aux
621 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
622 s.store(t, p, v)
623 }
624 }
625
626
627
628
629
630
631
632 func (s *state) zeroResults() {
633 for _, f := range s.curfn.Type().Results() {
634 n := f.Nname.(*ir.Name)
635 if !n.OnStack() {
636
637
638
639 continue
640 }
641
642 if typ := n.Type(); ssa.CanSSA(typ) {
643 s.assign(n, s.zeroVal(typ), false, 0)
644 } else {
645 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
646 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
647 }
648 s.zero(n.Type(), s.decladdrs[n])
649 }
650 }
651 }
652
653
654
655 func (s *state) paramsToHeap() {
656 do := func(params []*types.Field) {
657 for _, f := range params {
658 if f.Nname == nil {
659 continue
660 }
661 n := f.Nname.(*ir.Name)
662 if ir.IsBlank(n) || n.OnStack() {
663 continue
664 }
665 s.newHeapaddr(n)
666 if n.Class == ir.PPARAM {
667 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
668 }
669 }
670 }
671
672 typ := s.curfn.Type()
673 do(typ.Recvs())
674 do(typ.Params())
675 do(typ.Results())
676 }
677
678
679
680
681 func allocSizeAndAlign(t *types.Type) (int64, int64) {
682 size, align := t.Size(), t.Alignment()
683 if types.PtrSize == 4 && align == 4 && size >= 8 {
684
685 size = types.RoundUp(size, 8)
686 align = 8
687 }
688 return size, align
689 }
690 func allocSize(t *types.Type) int64 {
691 size, _ := allocSizeAndAlign(t)
692 return size
693 }
694 func allocAlign(t *types.Type) int64 {
695 _, align := allocSizeAndAlign(t)
696 return align
697 }
698
699
700 func (s *state) newHeapaddr(n *ir.Name) {
701 size := allocSize(n.Type())
702 if n.Type().HasPointers() || size >= maxAggregatedHeapAllocation || size == 0 {
703 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type()))
704 return
705 }
706
707
708
709 var used int64
710 for _, v := range s.pendingHeapAllocations {
711 used += allocSize(v.Type.Elem())
712 }
713 if used+size > maxAggregatedHeapAllocation {
714 s.flushPendingHeapAllocations()
715 }
716
717 var allocCall *ssa.Value
718 if len(s.pendingHeapAllocations) == 0 {
719
720
721
722 allocCall = s.newObjectNonSpecialized(n.Type(), nil)
723 } else {
724 allocCall = s.pendingHeapAllocations[0].Args[0]
725 }
726
727 v := s.newValue1I(ssa.OpOffPtr, n.Type().PtrTo(), 0, allocCall)
728
729
730 s.pendingHeapAllocations = append(s.pendingHeapAllocations, v)
731
732
733 s.setHeapaddr(n.Pos(), n, v)
734 }
735
736 func (s *state) flushPendingHeapAllocations() {
737 pending := s.pendingHeapAllocations
738 if len(pending) == 0 {
739 return
740 }
741 s.pendingHeapAllocations = nil
742 ptr := pending[0].Args[0]
743 call := ptr.Args[0]
744
745 if len(pending) == 1 {
746
747 v := pending[0]
748 v.Op = ssa.OpCopy
749 return
750 }
751
752
753
754
755 slices.SortStableFunc(pending, func(x, y *ssa.Value) int {
756 return cmp.Compare(allocAlign(y.Type.Elem()), allocAlign(x.Type.Elem()))
757 })
758
759
760 var size int64
761 for _, v := range pending {
762 v.AuxInt = size
763 size += allocSize(v.Type.Elem())
764 }
765 align := allocAlign(pending[0].Type.Elem())
766 size = types.RoundUp(size, align)
767
768
769 args := []*ssa.Value{
770 s.constInt(types.Types[types.TUINTPTR], size),
771 s.constNil(call.Args[0].Type),
772 s.constBool(true),
773 call.Args[1],
774 }
775 mallocSym := ir.Syms.MallocGC
776 if specialMallocSym := s.specializedMallocSym(size, false); specialMallocSym != nil {
777 mallocSym = specialMallocSym
778 }
779 call.Aux = ssa.StaticAuxCall(mallocSym, s.f.ABIDefault.ABIAnalyzeTypes(
780 []*types.Type{args[0].Type, args[1].Type, args[2].Type},
781 []*types.Type{types.Types[types.TUNSAFEPTR]},
782 ))
783 call.AuxInt = 4 * s.config.PtrSize
784 call.SetArgs4(args[0], args[1], args[2], args[3])
785
786
787 call.Type = types.NewTuple(types.Types[types.TUNSAFEPTR], types.TypeMem)
788 ptr.Type = types.Types[types.TUNSAFEPTR]
789 }
790
791 func (s *state) specializedMallocSym(size int64, hasPointers bool) *obj.LSym {
792 if !s.sizeSpecializedMallocEnabled() {
793 return nil
794 }
795 ptrSize := s.config.PtrSize
796 ptrBits := ptrSize * 8
797 minSizeForMallocHeader := ptrSize * ptrBits
798 heapBitsInSpan := size <= minSizeForMallocHeader
799 if !heapBitsInSpan {
800 return nil
801 }
802 divRoundUp := func(n, a uintptr) uintptr { return (n + a - 1) / a }
803 sizeClass := gc.SizeToSizeClass8[divRoundUp(uintptr(size), gc.SmallSizeDiv)]
804 if hasPointers {
805 return ir.Syms.MallocGCSmallScanNoHeader[sizeClass]
806 }
807 if size < gc.TinySize {
808 return ir.Syms.MallocGCTiny[size]
809 }
810 return ir.Syms.MallocGCSmallNoScan[sizeClass]
811 }
812
813 func (s *state) sizeSpecializedMallocEnabled() bool {
814 if base.Flag.CompilingRuntime {
815
816
817
818
819
820
821
822 return false
823 }
824
825 return buildcfg.Experiment.SizeSpecializedMalloc && !base.Flag.Cfg.Instrumenting
826 }
827
828
829
830 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
831 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
832 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
833 }
834
835
836 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
837 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
838 addr.SetUsed(true)
839 types.CalcSize(addr.Type())
840
841 if n.Class == ir.PPARAMOUT {
842 addr.SetIsOutputParamHeapAddr(true)
843 }
844
845 n.Heapaddr = addr
846 s.assign(addr, ptr, false, 0)
847 }
848
849
850 func (s *state) newObject(typ *types.Type) *ssa.Value {
851 if typ.Size() == 0 {
852 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
853 }
854 rtype := s.reflectType(typ)
855 if specialMallocSym := s.specializedMallocSym(typ.Size(), typ.HasPointers()); specialMallocSym != nil {
856 return s.rtcall(specialMallocSym, true, []*types.Type{types.NewPtr(typ)},
857 s.constInt(types.Types[types.TUINTPTR], typ.Size()),
858 rtype,
859 s.constBool(true),
860 )[0]
861 }
862 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
863 }
864
865
866
867 func (s *state) newObjectNonSpecialized(typ *types.Type, rtype *ssa.Value) *ssa.Value {
868 if typ.Size() == 0 {
869 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
870 }
871 if rtype == nil {
872 rtype = s.reflectType(typ)
873 }
874 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
875 }
876
877 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
878 if !n.Type().IsPtr() {
879 s.Fatalf("expected pointer type: %v", n.Type())
880 }
881 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
882 if count != nil {
883 if !elem.IsArray() {
884 s.Fatalf("expected array type: %v", elem)
885 }
886 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
887 }
888 size := elem.Size()
889
890 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
891 return
892 }
893 if count == nil {
894 count = s.constInt(types.Types[types.TUINTPTR], 1)
895 }
896 if count.Type.Size() != s.config.PtrSize {
897 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
898 }
899 var rtype *ssa.Value
900 if rtypeExpr != nil {
901 rtype = s.expr(rtypeExpr)
902 } else {
903 rtype = s.reflectType(elem)
904 }
905 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
906 }
907
908
909
910 func (s *state) reflectType(typ *types.Type) *ssa.Value {
911
912
913 lsym := reflectdata.TypeLinksym(typ)
914 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
915 }
916
917 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
918
919 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
920 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
921 if err != nil {
922 writer.Logf("cannot read sources for function %v: %v", fn, err)
923 }
924
925
926 var inlFns []*ssa.FuncLines
927 for _, fi := range ssaDumpInlined {
928 elno := fi.Endlineno
929 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
930 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
931 if err != nil {
932 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
933 continue
934 }
935 inlFns = append(inlFns, fnLines)
936 }
937
938 slices.SortFunc(inlFns, ssa.ByTopoCmp)
939 if targetFn != nil {
940 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
941 }
942
943 writer.WriteSources("sources", inlFns)
944 }
945
946 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
947 f, err := os.Open(os.ExpandEnv(file))
948 if err != nil {
949 return nil, err
950 }
951 defer f.Close()
952 var lines []string
953 ln := uint(1)
954 scanner := bufio.NewScanner(f)
955 for scanner.Scan() && ln <= end {
956 if ln >= start {
957 lines = append(lines, scanner.Text())
958 }
959 ln++
960 }
961 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
962 }
963
964
965
966
967 func (s *state) updateUnsetPredPos(b *ssa.Block) {
968 if b.Pos == src.NoXPos {
969 s.Fatalf("Block %s should have a position", b)
970 }
971 bestPos := src.NoXPos
972 for _, e := range b.Preds {
973 p := e.Block()
974 if !p.LackingPos() {
975 continue
976 }
977 if bestPos == src.NoXPos {
978 bestPos = b.Pos
979 for _, v := range b.Values {
980 if v.LackingPos() {
981 continue
982 }
983 if v.Pos != src.NoXPos {
984
985
986 bestPos = v.Pos
987 break
988 }
989 }
990 }
991 p.Pos = bestPos
992 s.updateUnsetPredPos(p)
993 }
994 }
995
996
997 type openDeferInfo struct {
998
999 n *ir.CallExpr
1000
1001
1002 closure *ssa.Value
1003
1004
1005
1006 closureNode *ir.Name
1007 }
1008
1009 type state struct {
1010
1011 config *ssa.Config
1012
1013
1014 f *ssa.Func
1015
1016
1017 curfn *ir.Func
1018
1019
1020 labels map[string]*ssaLabel
1021
1022
1023 breakTo *ssa.Block
1024 continueTo *ssa.Block
1025
1026
1027 curBlock *ssa.Block
1028
1029
1030
1031
1032 vars map[ir.Node]*ssa.Value
1033
1034
1035
1036
1037 fwdVars map[ir.Node]*ssa.Value
1038
1039
1040 defvars []map[ir.Node]*ssa.Value
1041
1042
1043 decladdrs map[*ir.Name]*ssa.Value
1044
1045
1046 startmem *ssa.Value
1047 sp *ssa.Value
1048 sb *ssa.Value
1049
1050 deferBitsAddr *ssa.Value
1051 deferBitsTemp *ir.Name
1052
1053
1054 line []src.XPos
1055
1056 lastPos src.XPos
1057
1058
1059
1060 panics map[funcLine]*ssa.Block
1061
1062 cgoUnsafeArgs bool
1063 hasdefer bool
1064 softFloat bool
1065 hasOpenDefers bool
1066 checkPtrEnabled bool
1067 instrumentEnterExit bool
1068 instrumentMemory bool
1069
1070
1071
1072
1073 openDefers []*openDeferInfo
1074
1075
1076
1077
1078 lastDeferExit *ssa.Block
1079 lastDeferFinalBlock *ssa.Block
1080 lastDeferCount int
1081
1082 prevCall *ssa.Value
1083
1084
1085
1086
1087 pendingHeapAllocations []*ssa.Value
1088
1089
1090 appendTargets map[ir.Node]bool
1091
1092
1093 blockStarts []src.XPos
1094 }
1095
1096 type funcLine struct {
1097 f *obj.LSym
1098 base *src.PosBase
1099 line uint
1100 }
1101
1102 type ssaLabel struct {
1103 target *ssa.Block
1104 breakTarget *ssa.Block
1105 continueTarget *ssa.Block
1106 }
1107
1108
1109 func (s *state) label(sym *types.Sym) *ssaLabel {
1110 lab := s.labels[sym.Name]
1111 if lab == nil {
1112 lab = new(ssaLabel)
1113 s.labels[sym.Name] = lab
1114 }
1115 return lab
1116 }
1117
1118 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
1119 func (s *state) Log() bool { return s.f.Log() }
1120 func (s *state) Fatalf(msg string, args ...interface{}) {
1121 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
1122 }
1123 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
1124 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
1125
1126 func ssaMarker(name string) *ir.Name {
1127 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
1128 }
1129
1130 var (
1131
1132 memVar = ssaMarker("mem")
1133
1134
1135 ptrVar = ssaMarker("ptr")
1136 lenVar = ssaMarker("len")
1137 capVar = ssaMarker("cap")
1138 typVar = ssaMarker("typ")
1139 okVar = ssaMarker("ok")
1140 deferBitsVar = ssaMarker("deferBits")
1141 hashVar = ssaMarker("hash")
1142 )
1143
1144
1145 func (s *state) startBlock(b *ssa.Block) {
1146 if s.curBlock != nil {
1147 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
1148 }
1149 s.curBlock = b
1150 s.vars = map[ir.Node]*ssa.Value{}
1151 clear(s.fwdVars)
1152 for len(s.blockStarts) <= int(b.ID) {
1153 s.blockStarts = append(s.blockStarts, src.NoXPos)
1154 }
1155 }
1156
1157
1158
1159
1160 func (s *state) endBlock() *ssa.Block {
1161 b := s.curBlock
1162 if b == nil {
1163 return nil
1164 }
1165
1166 s.flushPendingHeapAllocations()
1167
1168 for len(s.defvars) <= int(b.ID) {
1169 s.defvars = append(s.defvars, nil)
1170 }
1171 s.defvars[b.ID] = s.vars
1172 s.curBlock = nil
1173 s.vars = nil
1174 if b.LackingPos() {
1175
1176
1177
1178 b.Pos = src.NoXPos
1179 } else {
1180 b.Pos = s.lastPos
1181 if s.blockStarts[b.ID] == src.NoXPos {
1182 s.blockStarts[b.ID] = s.lastPos
1183 }
1184 }
1185 return b
1186 }
1187
1188
1189 func (s *state) pushLine(line src.XPos) {
1190 if !line.IsKnown() {
1191
1192
1193 line = s.peekPos()
1194 if base.Flag.K != 0 {
1195 base.Warn("buildssa: unknown position (line 0)")
1196 }
1197 } else {
1198 s.lastPos = line
1199 }
1200
1201
1202 if b := s.curBlock; b != nil && s.blockStarts[b.ID] == src.NoXPos {
1203 s.blockStarts[b.ID] = line
1204 }
1205
1206 s.line = append(s.line, line)
1207 }
1208
1209
1210 func (s *state) popLine() {
1211 s.line = s.line[:len(s.line)-1]
1212 }
1213
1214
1215 func (s *state) peekPos() src.XPos {
1216 return s.line[len(s.line)-1]
1217 }
1218
1219
1220 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1221 return s.curBlock.NewValue0(s.peekPos(), op, t)
1222 }
1223
1224
1225 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1226 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1227 }
1228
1229
1230 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1231 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1232 }
1233
1234
1235 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1236 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1237 }
1238
1239
1240 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1241 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1242 }
1243
1244
1245
1246
1247 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1248 if isStmt {
1249 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1250 }
1251 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1252 }
1253
1254
1255 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1256 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1257 }
1258
1259
1260 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1261 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1262 }
1263
1264
1265 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1266 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1267 }
1268
1269
1270
1271
1272 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1273 if isStmt {
1274 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1275 }
1276 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1277 }
1278
1279
1280 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1281 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1282 }
1283
1284
1285 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1286 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1287 }
1288
1289
1290 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1291 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1292 }
1293
1294
1295 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1296 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1297 }
1298
1299
1300
1301
1302 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1303 if isStmt {
1304 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1305 }
1306 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1307 }
1308
1309
1310 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1311 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1312 }
1313
1314
1315 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1316 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1317 }
1318
1319 func (s *state) entryBlock() *ssa.Block {
1320 b := s.f.Entry
1321 if base.Flag.N > 0 && s.curBlock != nil {
1322
1323
1324
1325
1326 b = s.curBlock
1327 }
1328 return b
1329 }
1330
1331
1332 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1333 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1334 }
1335
1336
1337 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1338 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1339 }
1340
1341
1342 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1343 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1344 }
1345
1346
1347 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1348 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1349 }
1350
1351
1352 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1353 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1354 }
1355
1356
1357 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1358 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1359 }
1360
1361
1362 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1363 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1364 }
1365
1366
1367 func (s *state) constSlice(t *types.Type) *ssa.Value {
1368 return s.f.ConstSlice(t)
1369 }
1370 func (s *state) constInterface(t *types.Type) *ssa.Value {
1371 return s.f.ConstInterface(t)
1372 }
1373 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1374 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1375 return s.f.ConstEmptyString(t)
1376 }
1377 func (s *state) constBool(c bool) *ssa.Value {
1378 return s.f.ConstBool(types.Types[types.TBOOL], c)
1379 }
1380 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1381 return s.f.ConstInt8(t, c)
1382 }
1383 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1384 return s.f.ConstInt16(t, c)
1385 }
1386 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1387 return s.f.ConstInt32(t, c)
1388 }
1389 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1390 return s.f.ConstInt64(t, c)
1391 }
1392 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1393 return s.f.ConstFloat32(t, c)
1394 }
1395 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1396 return s.f.ConstFloat64(t, c)
1397 }
1398 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1399 if s.config.PtrSize == 8 {
1400 return s.constInt64(t, c)
1401 }
1402 if int64(int32(c)) != c {
1403 s.Fatalf("integer constant too big %d", c)
1404 }
1405 return s.constInt32(t, int32(c))
1406 }
1407
1408
1409
1410 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1411 if s.softFloat {
1412 if c, ok := s.sfcall(op, arg); ok {
1413 return c
1414 }
1415 }
1416 return s.newValue1(op, t, arg)
1417 }
1418 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1419 if s.softFloat {
1420 if c, ok := s.sfcall(op, arg0, arg1); ok {
1421 return c
1422 }
1423 }
1424 return s.newValue2(op, t, arg0, arg1)
1425 }
1426
1427 type instrumentKind uint8
1428
1429 const (
1430 instrumentRead = iota
1431 instrumentWrite
1432 instrumentMove
1433 )
1434
1435 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1436 s.instrument2(t, addr, nil, kind)
1437 }
1438
1439
1440
1441
1442 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1443 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1444 s.instrument(t, addr, kind)
1445 return
1446 }
1447 for _, f := range t.Fields() {
1448 if f.Sym.IsBlank() {
1449 continue
1450 }
1451 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1452 s.instrumentFields(f.Type, offptr, kind)
1453 }
1454 }
1455
1456 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1457 if base.Flag.MSan {
1458 s.instrument2(t, dst, src, instrumentMove)
1459 } else {
1460 s.instrument(t, src, instrumentRead)
1461 s.instrument(t, dst, instrumentWrite)
1462 }
1463 }
1464
1465 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1466 if !s.instrumentMemory {
1467 return
1468 }
1469
1470 w := t.Size()
1471 if w == 0 {
1472 return
1473 }
1474
1475 if ssa.IsSanitizerSafeAddr(addr) {
1476 return
1477 }
1478
1479 var fn *obj.LSym
1480 needWidth := false
1481
1482 if addr2 != nil && kind != instrumentMove {
1483 panic("instrument2: non-nil addr2 for non-move instrumentation")
1484 }
1485
1486 if base.Flag.MSan {
1487 switch kind {
1488 case instrumentRead:
1489 fn = ir.Syms.Msanread
1490 case instrumentWrite:
1491 fn = ir.Syms.Msanwrite
1492 case instrumentMove:
1493 fn = ir.Syms.Msanmove
1494 default:
1495 panic("unreachable")
1496 }
1497 needWidth = true
1498 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1499
1500
1501
1502 switch kind {
1503 case instrumentRead:
1504 fn = ir.Syms.Racereadrange
1505 case instrumentWrite:
1506 fn = ir.Syms.Racewriterange
1507 default:
1508 panic("unreachable")
1509 }
1510 needWidth = true
1511 } else if base.Flag.Race {
1512
1513
1514 switch kind {
1515 case instrumentRead:
1516 fn = ir.Syms.Raceread
1517 case instrumentWrite:
1518 fn = ir.Syms.Racewrite
1519 default:
1520 panic("unreachable")
1521 }
1522 } else if base.Flag.ASan {
1523 switch kind {
1524 case instrumentRead:
1525 fn = ir.Syms.Asanread
1526 case instrumentWrite:
1527 fn = ir.Syms.Asanwrite
1528 default:
1529 panic("unreachable")
1530 }
1531 needWidth = true
1532 } else {
1533 panic("unreachable")
1534 }
1535
1536 args := []*ssa.Value{addr}
1537 if addr2 != nil {
1538 args = append(args, addr2)
1539 }
1540 if needWidth {
1541 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1542 }
1543 s.rtcall(fn, true, nil, args...)
1544 }
1545
1546 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1547 s.instrumentFields(t, src, instrumentRead)
1548 return s.rawLoad(t, src)
1549 }
1550
1551 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1552 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1553 }
1554
1555 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1556 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1557 }
1558
1559 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1560 s.instrument(t, dst, instrumentWrite)
1561 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1562 store.Aux = t
1563 s.vars[memVar] = store
1564 }
1565
1566 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1567 s.moveWhichMayOverlap(t, dst, src, false)
1568 }
1569 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1570 s.instrumentMove(t, dst, src)
1571 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595 if t.HasPointers() {
1596 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1597
1598
1599
1600
1601 s.curfn.SetWBPos(s.peekPos())
1602 } else {
1603 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1604 }
1605 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1606 return
1607 }
1608 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1609 store.Aux = t
1610 s.vars[memVar] = store
1611 }
1612
1613
1614 func (s *state) stmtList(l ir.Nodes) {
1615 for _, n := range l {
1616 s.stmt(n)
1617 }
1618 }
1619
1620
1621 func (s *state) stmt(n ir.Node) {
1622 s.pushLine(n.Pos())
1623 defer s.popLine()
1624
1625
1626
1627 if s.curBlock == nil && n.Op() != ir.OLABEL {
1628 return
1629 }
1630
1631 s.stmtList(n.Init())
1632 switch n.Op() {
1633
1634 case ir.OBLOCK:
1635 n := n.(*ir.BlockStmt)
1636 s.stmtList(n.List)
1637
1638 case ir.OFALL:
1639
1640
1641 case ir.OCALLFUNC:
1642 n := n.(*ir.CallExpr)
1643 if ir.IsIntrinsicCall(n) {
1644 s.intrinsicCall(n)
1645 return
1646 }
1647 fallthrough
1648
1649 case ir.OCALLINTER:
1650 n := n.(*ir.CallExpr)
1651 s.callResult(n, callNormal)
1652 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1653 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1654 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1655 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1656 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1657 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1658 fn == "panicrangestate") {
1659 m := s.mem()
1660 b := s.endBlock()
1661 b.Kind = ssa.BlockExit
1662 b.SetControl(m)
1663
1664
1665
1666 }
1667 }
1668 case ir.ODEFER:
1669 n := n.(*ir.GoDeferStmt)
1670 if base.Debug.Defer > 0 {
1671 var defertype string
1672 if s.hasOpenDefers {
1673 defertype = "open-coded"
1674 } else if n.Esc() == ir.EscNever {
1675 defertype = "stack-allocated"
1676 } else {
1677 defertype = "heap-allocated"
1678 }
1679 base.WarnfAt(n.Pos(), "%s defer", defertype)
1680 }
1681 if s.hasOpenDefers {
1682 s.openDeferRecord(n.Call.(*ir.CallExpr))
1683 } else {
1684 d := callDefer
1685 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1686 d = callDeferStack
1687 }
1688 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1689 }
1690 case ir.OGO:
1691 n := n.(*ir.GoDeferStmt)
1692 s.callResult(n.Call.(*ir.CallExpr), callGo)
1693
1694 case ir.OAS2DOTTYPE:
1695 n := n.(*ir.AssignListStmt)
1696 var res, resok *ssa.Value
1697 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1698 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1699 } else {
1700 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1701 }
1702 deref := false
1703 if !ssa.CanSSA(n.Rhs[0].Type()) {
1704 if res.Op != ssa.OpLoad {
1705 s.Fatalf("dottype of non-load")
1706 }
1707 mem := s.mem()
1708 if res.Args[1] != mem {
1709 s.Fatalf("memory no longer live from 2-result dottype load")
1710 }
1711 deref = true
1712 res = res.Args[0]
1713 }
1714 s.assign(n.Lhs[0], res, deref, 0)
1715 s.assign(n.Lhs[1], resok, false, 0)
1716 return
1717
1718 case ir.OAS2FUNC:
1719
1720 n := n.(*ir.AssignListStmt)
1721 call := n.Rhs[0].(*ir.CallExpr)
1722 if !ir.IsIntrinsicCall(call) {
1723 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1724 }
1725 v := s.intrinsicCall(call)
1726 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1727 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1728 s.assign(n.Lhs[0], v1, false, 0)
1729 s.assign(n.Lhs[1], v2, false, 0)
1730 return
1731
1732 case ir.ODCL:
1733 n := n.(*ir.Decl)
1734 if v := n.X; v.Esc() == ir.EscHeap {
1735 s.newHeapaddr(v)
1736 }
1737
1738 case ir.OLABEL:
1739 n := n.(*ir.LabelStmt)
1740 sym := n.Label
1741 if sym.IsBlank() {
1742
1743 break
1744 }
1745 lab := s.label(sym)
1746
1747
1748 if lab.target == nil {
1749 lab.target = s.f.NewBlock(ssa.BlockPlain)
1750 }
1751
1752
1753
1754 if s.curBlock != nil {
1755 b := s.endBlock()
1756 b.AddEdgeTo(lab.target)
1757 }
1758 s.startBlock(lab.target)
1759
1760 case ir.OGOTO:
1761 n := n.(*ir.BranchStmt)
1762 sym := n.Label
1763
1764 lab := s.label(sym)
1765 if lab.target == nil {
1766 lab.target = s.f.NewBlock(ssa.BlockPlain)
1767 }
1768
1769 b := s.endBlock()
1770 b.Pos = s.lastPos.WithIsStmt()
1771 b.AddEdgeTo(lab.target)
1772
1773 case ir.OAS:
1774 n := n.(*ir.AssignStmt)
1775 if n.X == n.Y && n.X.Op() == ir.ONAME {
1776
1777
1778
1779
1780
1781
1782
1783 return
1784 }
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1796 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1797 p := n.Y.(*ir.StarExpr).X
1798 for p.Op() == ir.OCONVNOP {
1799 p = p.(*ir.ConvExpr).X
1800 }
1801 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1802
1803
1804 mayOverlap = false
1805 }
1806 }
1807
1808
1809 rhs := n.Y
1810 if rhs != nil {
1811 switch rhs.Op() {
1812 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1813
1814
1815
1816 if !ir.IsZero(rhs) {
1817 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1818 }
1819 rhs = nil
1820 case ir.OAPPEND:
1821 rhs := rhs.(*ir.CallExpr)
1822
1823
1824
1825 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1826 break
1827 }
1828
1829
1830
1831 if s.canSSA(n.X) {
1832 if base.Debug.Append > 0 {
1833 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1834 }
1835 break
1836 }
1837 if base.Debug.Append > 0 {
1838 base.WarnfAt(n.Pos(), "append: len-only update")
1839 }
1840 s.append(rhs, true)
1841 return
1842 }
1843 }
1844
1845 if ir.IsBlank(n.X) {
1846
1847
1848 if rhs != nil {
1849 s.expr(rhs)
1850 }
1851 return
1852 }
1853
1854 var t *types.Type
1855 if n.Y != nil {
1856 t = n.Y.Type()
1857 } else {
1858 t = n.X.Type()
1859 }
1860
1861 var r *ssa.Value
1862 deref := !ssa.CanSSA(t)
1863 if deref {
1864 if rhs == nil {
1865 r = nil
1866 } else {
1867 r = s.addr(rhs)
1868 }
1869 } else {
1870 if rhs == nil {
1871 r = s.zeroVal(t)
1872 } else {
1873 r = s.expr(rhs)
1874 }
1875 }
1876
1877 var skip skipMask
1878 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1879
1880
1881 rhs := rhs.(*ir.SliceExpr)
1882 i, j, k := rhs.Low, rhs.High, rhs.Max
1883 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1884
1885 i = nil
1886 }
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897 if i == nil {
1898 skip |= skipPtr
1899 if j == nil {
1900 skip |= skipLen
1901 }
1902 if k == nil {
1903 skip |= skipCap
1904 }
1905 }
1906 }
1907
1908 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1909
1910 case ir.OIF:
1911 n := n.(*ir.IfStmt)
1912 if ir.IsConst(n.Cond, constant.Bool) {
1913 s.stmtList(n.Cond.Init())
1914 if ir.BoolVal(n.Cond) {
1915 s.stmtList(n.Body)
1916 } else {
1917 s.stmtList(n.Else)
1918 }
1919 break
1920 }
1921
1922 bEnd := s.f.NewBlock(ssa.BlockPlain)
1923 var likely int8
1924 if n.Likely {
1925 likely = 1
1926 }
1927 var bThen *ssa.Block
1928 if len(n.Body) != 0 {
1929 bThen = s.f.NewBlock(ssa.BlockPlain)
1930 } else {
1931 bThen = bEnd
1932 }
1933 var bElse *ssa.Block
1934 if len(n.Else) != 0 {
1935 bElse = s.f.NewBlock(ssa.BlockPlain)
1936 } else {
1937 bElse = bEnd
1938 }
1939 s.condBranch(n.Cond, bThen, bElse, likely)
1940
1941 if len(n.Body) != 0 {
1942 s.startBlock(bThen)
1943 s.stmtList(n.Body)
1944 if b := s.endBlock(); b != nil {
1945 b.AddEdgeTo(bEnd)
1946 }
1947 }
1948 if len(n.Else) != 0 {
1949 s.startBlock(bElse)
1950 s.stmtList(n.Else)
1951 if b := s.endBlock(); b != nil {
1952 b.AddEdgeTo(bEnd)
1953 }
1954 }
1955 s.startBlock(bEnd)
1956
1957 case ir.ORETURN:
1958 n := n.(*ir.ReturnStmt)
1959 s.stmtList(n.Results)
1960 b := s.exit()
1961 b.Pos = s.lastPos.WithIsStmt()
1962
1963 case ir.OTAILCALL:
1964 n := n.(*ir.TailCallStmt)
1965 s.callResult(n.Call, callTail)
1966 call := s.mem()
1967 b := s.endBlock()
1968 b.Kind = ssa.BlockRetJmp
1969 b.SetControl(call)
1970
1971 case ir.OCONTINUE, ir.OBREAK:
1972 n := n.(*ir.BranchStmt)
1973 var to *ssa.Block
1974 if n.Label == nil {
1975
1976 switch n.Op() {
1977 case ir.OCONTINUE:
1978 to = s.continueTo
1979 case ir.OBREAK:
1980 to = s.breakTo
1981 }
1982 } else {
1983
1984 sym := n.Label
1985 lab := s.label(sym)
1986 switch n.Op() {
1987 case ir.OCONTINUE:
1988 to = lab.continueTarget
1989 case ir.OBREAK:
1990 to = lab.breakTarget
1991 }
1992 }
1993
1994 b := s.endBlock()
1995 b.Pos = s.lastPos.WithIsStmt()
1996 b.AddEdgeTo(to)
1997
1998 case ir.OFOR:
1999
2000
2001 n := n.(*ir.ForStmt)
2002 base.Assert(!n.DistinctVars)
2003 bCond := s.f.NewBlock(ssa.BlockPlain)
2004 bBody := s.f.NewBlock(ssa.BlockPlain)
2005 bIncr := s.f.NewBlock(ssa.BlockPlain)
2006 bEnd := s.f.NewBlock(ssa.BlockPlain)
2007
2008
2009 bBody.Pos = n.Pos()
2010
2011
2012 b := s.endBlock()
2013 b.AddEdgeTo(bCond)
2014
2015
2016 s.startBlock(bCond)
2017 if n.Cond != nil {
2018 s.condBranch(n.Cond, bBody, bEnd, 1)
2019 } else {
2020 b := s.endBlock()
2021 b.Kind = ssa.BlockPlain
2022 b.AddEdgeTo(bBody)
2023 }
2024
2025
2026 prevContinue := s.continueTo
2027 prevBreak := s.breakTo
2028 s.continueTo = bIncr
2029 s.breakTo = bEnd
2030 var lab *ssaLabel
2031 if sym := n.Label; sym != nil {
2032
2033 lab = s.label(sym)
2034 lab.continueTarget = bIncr
2035 lab.breakTarget = bEnd
2036 }
2037
2038
2039 s.startBlock(bBody)
2040 s.stmtList(n.Body)
2041
2042
2043 s.continueTo = prevContinue
2044 s.breakTo = prevBreak
2045 if lab != nil {
2046 lab.continueTarget = nil
2047 lab.breakTarget = nil
2048 }
2049
2050
2051 if b := s.endBlock(); b != nil {
2052 b.AddEdgeTo(bIncr)
2053 }
2054
2055
2056 s.startBlock(bIncr)
2057 if n.Post != nil {
2058 s.stmt(n.Post)
2059 }
2060 if b := s.endBlock(); b != nil {
2061 b.AddEdgeTo(bCond)
2062
2063
2064 if b.Pos == src.NoXPos {
2065 b.Pos = bCond.Pos
2066 }
2067 }
2068
2069 s.startBlock(bEnd)
2070
2071 case ir.OSWITCH, ir.OSELECT:
2072
2073
2074 bEnd := s.f.NewBlock(ssa.BlockPlain)
2075
2076 prevBreak := s.breakTo
2077 s.breakTo = bEnd
2078 var sym *types.Sym
2079 var body ir.Nodes
2080 if n.Op() == ir.OSWITCH {
2081 n := n.(*ir.SwitchStmt)
2082 sym = n.Label
2083 body = n.Compiled
2084 } else {
2085 n := n.(*ir.SelectStmt)
2086 sym = n.Label
2087 body = n.Compiled
2088 }
2089
2090 var lab *ssaLabel
2091 if sym != nil {
2092
2093 lab = s.label(sym)
2094 lab.breakTarget = bEnd
2095 }
2096
2097
2098 s.stmtList(body)
2099
2100 s.breakTo = prevBreak
2101 if lab != nil {
2102 lab.breakTarget = nil
2103 }
2104
2105
2106
2107 if s.curBlock != nil {
2108 m := s.mem()
2109 b := s.endBlock()
2110 b.Kind = ssa.BlockExit
2111 b.SetControl(m)
2112 }
2113 s.startBlock(bEnd)
2114
2115 case ir.OJUMPTABLE:
2116 n := n.(*ir.JumpTableStmt)
2117
2118
2119 jt := s.f.NewBlock(ssa.BlockJumpTable)
2120 bEnd := s.f.NewBlock(ssa.BlockPlain)
2121
2122
2123 idx := s.expr(n.Idx)
2124 unsigned := idx.Type.IsUnsigned()
2125
2126
2127 t := types.Types[types.TUINTPTR]
2128 idx = s.conv(nil, idx, idx.Type, t)
2129
2130
2131
2132
2133
2134
2135
2136 var min, max uint64
2137 if unsigned {
2138 min, _ = constant.Uint64Val(n.Cases[0])
2139 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
2140 } else {
2141 mn, _ := constant.Int64Val(n.Cases[0])
2142 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
2143 min = uint64(mn)
2144 max = uint64(mx)
2145 }
2146
2147 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
2148 width := s.uintptrConstant(max - min)
2149 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
2150 b := s.endBlock()
2151 b.Kind = ssa.BlockIf
2152 b.SetControl(cmp)
2153 b.AddEdgeTo(jt)
2154 b.AddEdgeTo(bEnd)
2155 b.Likely = ssa.BranchLikely
2156
2157
2158 s.startBlock(jt)
2159 jt.Pos = n.Pos()
2160 if base.Flag.Cfg.SpectreIndex {
2161 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
2162 }
2163 jt.SetControl(idx)
2164
2165
2166 table := make([]*ssa.Block, max-min+1)
2167 for i := range table {
2168 table[i] = bEnd
2169 }
2170 for i := range n.Targets {
2171 c := n.Cases[i]
2172 lab := s.label(n.Targets[i])
2173 if lab.target == nil {
2174 lab.target = s.f.NewBlock(ssa.BlockPlain)
2175 }
2176 var val uint64
2177 if unsigned {
2178 val, _ = constant.Uint64Val(c)
2179 } else {
2180 vl, _ := constant.Int64Val(c)
2181 val = uint64(vl)
2182 }
2183
2184 table[val-min] = lab.target
2185 }
2186 for _, t := range table {
2187 jt.AddEdgeTo(t)
2188 }
2189 s.endBlock()
2190
2191 s.startBlock(bEnd)
2192
2193 case ir.OINTERFACESWITCH:
2194 n := n.(*ir.InterfaceSwitchStmt)
2195 typs := s.f.Config.Types
2196
2197 t := s.expr(n.RuntimeType)
2198 h := s.expr(n.Hash)
2199 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2200
2201
2202 var merge *ssa.Block
2203 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
2204
2205
2206 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2207 s.Fatalf("atomic load not available")
2208 }
2209 merge = s.f.NewBlock(ssa.BlockPlain)
2210 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2211 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2212 loopHead := s.f.NewBlock(ssa.BlockPlain)
2213 loopBody := s.f.NewBlock(ssa.BlockPlain)
2214
2215
2216 var mul, and, add, zext ssa.Op
2217 if s.config.PtrSize == 4 {
2218 mul = ssa.OpMul32
2219 and = ssa.OpAnd32
2220 add = ssa.OpAdd32
2221 zext = ssa.OpCopy
2222 } else {
2223 mul = ssa.OpMul64
2224 and = ssa.OpAnd64
2225 add = ssa.OpAdd64
2226 zext = ssa.OpZeroExt32to64
2227 }
2228
2229
2230
2231 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2232 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2233 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2234
2235
2236 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2237
2238
2239 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2240
2241 b := s.endBlock()
2242 b.AddEdgeTo(loopHead)
2243
2244
2245
2246 s.startBlock(loopHead)
2247 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2248 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2249 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2250 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2251
2252 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2253
2254
2255
2256 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2257 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2258 b = s.endBlock()
2259 b.Kind = ssa.BlockIf
2260 b.SetControl(cmp1)
2261 b.AddEdgeTo(cacheHit)
2262 b.AddEdgeTo(loopBody)
2263
2264
2265
2266 s.startBlock(loopBody)
2267 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2268 b = s.endBlock()
2269 b.Kind = ssa.BlockIf
2270 b.SetControl(cmp2)
2271 b.AddEdgeTo(cacheMiss)
2272 b.AddEdgeTo(loopHead)
2273
2274
2275
2276
2277 s.startBlock(cacheHit)
2278 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2279 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2280 s.assign(n.Case, eCase, false, 0)
2281 s.assign(n.Itab, eItab, false, 0)
2282 b = s.endBlock()
2283 b.AddEdgeTo(merge)
2284
2285
2286 s.startBlock(cacheMiss)
2287 }
2288
2289 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2290 s.assign(n.Case, r[0], false, 0)
2291 s.assign(n.Itab, r[1], false, 0)
2292
2293 if merge != nil {
2294
2295 b := s.endBlock()
2296 b.Kind = ssa.BlockPlain
2297 b.AddEdgeTo(merge)
2298 s.startBlock(merge)
2299 }
2300
2301 case ir.OCHECKNIL:
2302 n := n.(*ir.UnaryExpr)
2303 p := s.expr(n.X)
2304 _ = s.nilCheck(p)
2305
2306
2307 case ir.OINLMARK:
2308 n := n.(*ir.InlineMarkStmt)
2309 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2310
2311 default:
2312 s.Fatalf("unhandled stmt %v", n.Op())
2313 }
2314 }
2315
2316
2317
2318 const shareDeferExits = false
2319
2320
2321
2322
2323 func (s *state) exit() *ssa.Block {
2324 if s.hasdefer {
2325 if s.hasOpenDefers {
2326 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2327 if s.curBlock.Kind != ssa.BlockPlain {
2328 panic("Block for an exit should be BlockPlain")
2329 }
2330 s.curBlock.AddEdgeTo(s.lastDeferExit)
2331 s.endBlock()
2332 return s.lastDeferFinalBlock
2333 }
2334 s.openDeferExit()
2335 } else {
2336
2337
2338
2339
2340
2341
2342
2343
2344 s.pushLine(s.curfn.Endlineno)
2345 s.rtcall(ir.Syms.Deferreturn, true, nil)
2346 s.popLine()
2347 }
2348 }
2349
2350
2351
2352 resultFields := s.curfn.Type().Results()
2353 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2354
2355 for i, f := range resultFields {
2356 n := f.Nname.(*ir.Name)
2357 if s.canSSA(n) {
2358 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2359
2360 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2361 }
2362 results[i] = s.variable(n, n.Type())
2363 } else if !n.OnStack() {
2364
2365 if n.Type().HasPointers() {
2366 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2367 }
2368 ha := s.expr(n.Heapaddr)
2369 s.instrumentFields(n.Type(), ha, instrumentRead)
2370 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2371 } else {
2372
2373
2374
2375 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2376 }
2377 }
2378
2379
2380
2381
2382 if s.instrumentEnterExit {
2383 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2384 }
2385
2386 results[len(results)-1] = s.mem()
2387 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2388 m.AddArgs(results...)
2389
2390 b := s.endBlock()
2391 b.Kind = ssa.BlockRet
2392 b.SetControl(m)
2393 if s.hasdefer && s.hasOpenDefers {
2394 s.lastDeferFinalBlock = b
2395 }
2396 return b
2397 }
2398
2399 type opAndType struct {
2400 op ir.Op
2401 etype types.Kind
2402 }
2403
2404 var opToSSA = map[opAndType]ssa.Op{
2405 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2406 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2407 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2408 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2409 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2410 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2411 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2412 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2413 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2414 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2415
2416 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2417 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2418 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2419 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2420 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2421 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2422 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2423 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2424 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2425 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2426
2427 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2428
2429 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2430 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2431 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2432 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2433 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2434 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2435 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2436 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2437 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2438 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2439
2440 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2441 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2442 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2443 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2444 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2445 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2446 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2447 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2448
2449 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2450 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2451 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2452 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2453
2454 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2455 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2456 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2457 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2458 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2459 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2460 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2461 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2462 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2463 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2464
2465 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2466 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2467
2468 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2469 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2470 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2471 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2472 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2473 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2474 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2475 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2476
2477 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2478 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2479 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2480 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2481 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2482 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2483 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2484 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2485
2486 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2487 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2488 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2489 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2490 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2491 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2492 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2493 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2494
2495 {ir.OOR, types.TINT8}: ssa.OpOr8,
2496 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2497 {ir.OOR, types.TINT16}: ssa.OpOr16,
2498 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2499 {ir.OOR, types.TINT32}: ssa.OpOr32,
2500 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2501 {ir.OOR, types.TINT64}: ssa.OpOr64,
2502 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2503
2504 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2505 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2506 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2507 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2508 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2509 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2510 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2511 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2512
2513 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2514 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2515 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2516 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2517 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2518 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2519 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2520 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2521 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2522 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2523 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2524 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2525 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2526 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2527 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2528 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2529 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2530 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2531 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2532
2533 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2534 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2535 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2536 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2537 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2538 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2539 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2540 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2541 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2542 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2543 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2544 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2545 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2546 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2547 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2548 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2549 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2550 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2551 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2552
2553 {ir.OLT, types.TINT8}: ssa.OpLess8,
2554 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2555 {ir.OLT, types.TINT16}: ssa.OpLess16,
2556 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2557 {ir.OLT, types.TINT32}: ssa.OpLess32,
2558 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2559 {ir.OLT, types.TINT64}: ssa.OpLess64,
2560 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2561 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2562 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2563
2564 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2565 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2566 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2567 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2568 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2569 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2570 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2571 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2572 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2573 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2574 }
2575
2576 func (s *state) concreteEtype(t *types.Type) types.Kind {
2577 e := t.Kind()
2578 switch e {
2579 default:
2580 return e
2581 case types.TINT:
2582 if s.config.PtrSize == 8 {
2583 return types.TINT64
2584 }
2585 return types.TINT32
2586 case types.TUINT:
2587 if s.config.PtrSize == 8 {
2588 return types.TUINT64
2589 }
2590 return types.TUINT32
2591 case types.TUINTPTR:
2592 if s.config.PtrSize == 8 {
2593 return types.TUINT64
2594 }
2595 return types.TUINT32
2596 }
2597 }
2598
2599 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2600 etype := s.concreteEtype(t)
2601 x, ok := opToSSA[opAndType{op, etype}]
2602 if !ok {
2603 s.Fatalf("unhandled binary op %v %s", op, etype)
2604 }
2605 return x
2606 }
2607
2608 type opAndTwoTypes struct {
2609 op ir.Op
2610 etype1 types.Kind
2611 etype2 types.Kind
2612 }
2613
2614 type twoTypes struct {
2615 etype1 types.Kind
2616 etype2 types.Kind
2617 }
2618
2619 type twoOpsAndType struct {
2620 op1 ssa.Op
2621 op2 ssa.Op
2622 intermediateType types.Kind
2623 }
2624
2625 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2626
2627 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2628 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2629 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2630 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2631
2632 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2633 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2634 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2635 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2636
2637 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2638 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2639 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2640 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2641
2642 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2643 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2644 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2645 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2646
2647 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2648 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2649 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2650 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2651
2652 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2653 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2654 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2655 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2656
2657 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2658 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2659 {types.TFLOAT32, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2660 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2661
2662 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2663 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2664 {types.TFLOAT64, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2665 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2666
2667
2668 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2669 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2670 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2671 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2672 }
2673
2674
2675
2676 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2677 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2678 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2679 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2680 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2681 }
2682
2683
2684 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2685 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2686 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2687 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2688 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2689 }
2690
2691 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2692 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2693 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2694 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2695 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2696 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2697 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2698 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2699 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2700
2701 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2702 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2703 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2704 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2705 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2706 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2707 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2708 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2709
2710 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2711 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2712 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2713 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2714 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2715 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2716 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2717 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2718
2719 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2720 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2721 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2722 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2723 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2724 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2725 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2726 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2727
2728 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2729 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2730 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2731 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2732 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2733 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2734 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2735 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2736
2737 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2738 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2739 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2740 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2741 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2742 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2743 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2744 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2745
2746 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2747 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2748 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2749 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2750 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2751 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2752 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2753 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2754
2755 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2756 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2757 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2758 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2759 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2760 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2761 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2762 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2763 }
2764
2765 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2766 etype1 := s.concreteEtype(t)
2767 etype2 := s.concreteEtype(u)
2768 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2769 if !ok {
2770 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2771 }
2772 return x
2773 }
2774
2775 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2776 if s.config.PtrSize == 4 {
2777 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2778 }
2779 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2780 }
2781
2782 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2783 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2784
2785 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2786 }
2787 if ft.IsInteger() && tt.IsInteger() {
2788 var op ssa.Op
2789 if tt.Size() == ft.Size() {
2790 op = ssa.OpCopy
2791 } else if tt.Size() < ft.Size() {
2792
2793 switch 10*ft.Size() + tt.Size() {
2794 case 21:
2795 op = ssa.OpTrunc16to8
2796 case 41:
2797 op = ssa.OpTrunc32to8
2798 case 42:
2799 op = ssa.OpTrunc32to16
2800 case 81:
2801 op = ssa.OpTrunc64to8
2802 case 82:
2803 op = ssa.OpTrunc64to16
2804 case 84:
2805 op = ssa.OpTrunc64to32
2806 default:
2807 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2808 }
2809 } else if ft.IsSigned() {
2810
2811 switch 10*ft.Size() + tt.Size() {
2812 case 12:
2813 op = ssa.OpSignExt8to16
2814 case 14:
2815 op = ssa.OpSignExt8to32
2816 case 18:
2817 op = ssa.OpSignExt8to64
2818 case 24:
2819 op = ssa.OpSignExt16to32
2820 case 28:
2821 op = ssa.OpSignExt16to64
2822 case 48:
2823 op = ssa.OpSignExt32to64
2824 default:
2825 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2826 }
2827 } else {
2828
2829 switch 10*ft.Size() + tt.Size() {
2830 case 12:
2831 op = ssa.OpZeroExt8to16
2832 case 14:
2833 op = ssa.OpZeroExt8to32
2834 case 18:
2835 op = ssa.OpZeroExt8to64
2836 case 24:
2837 op = ssa.OpZeroExt16to32
2838 case 28:
2839 op = ssa.OpZeroExt16to64
2840 case 48:
2841 op = ssa.OpZeroExt32to64
2842 default:
2843 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2844 }
2845 }
2846 return s.newValue1(op, tt, v)
2847 }
2848
2849 if ft.IsComplex() && tt.IsComplex() {
2850 var op ssa.Op
2851 if ft.Size() == tt.Size() {
2852 switch ft.Size() {
2853 case 8:
2854 op = ssa.OpRound32F
2855 case 16:
2856 op = ssa.OpRound64F
2857 default:
2858 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2859 }
2860 } else if ft.Size() == 8 && tt.Size() == 16 {
2861 op = ssa.OpCvt32Fto64F
2862 } else if ft.Size() == 16 && tt.Size() == 8 {
2863 op = ssa.OpCvt64Fto32F
2864 } else {
2865 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2866 }
2867 ftp := types.FloatForComplex(ft)
2868 ttp := types.FloatForComplex(tt)
2869 return s.newValue2(ssa.OpComplexMake, tt,
2870 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2871 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2872 }
2873
2874 if tt.IsComplex() {
2875
2876 et := types.FloatForComplex(tt)
2877 v = s.conv(n, v, ft, et)
2878 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2879 }
2880
2881 if ft.IsFloat() || tt.IsFloat() {
2882 cft, ctt := s.concreteEtype(ft), s.concreteEtype(tt)
2883 conv, ok := fpConvOpToSSA[twoTypes{cft, ctt}]
2884
2885
2886 if ctt == types.TUINT32 && ft.IsFloat() && !base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil) {
2887
2888 conv.op1 = ssa.OpCvt64Fto64
2889 if cft == types.TFLOAT32 {
2890 conv.op1 = ssa.OpCvt32Fto64
2891 }
2892 conv.op2 = ssa.OpTrunc64to32
2893
2894 }
2895 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2896 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2897 conv = conv1
2898 }
2899 }
2900 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2901 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2902 conv = conv1
2903 }
2904 }
2905
2906 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2907 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2908
2909 if tt.Size() == 4 {
2910 return s.uint32Tofloat32(n, v, ft, tt)
2911 }
2912 if tt.Size() == 8 {
2913 return s.uint32Tofloat64(n, v, ft, tt)
2914 }
2915 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2916
2917 if ft.Size() == 4 {
2918 return s.float32ToUint32(n, v, ft, tt)
2919 }
2920 if ft.Size() == 8 {
2921 return s.float64ToUint32(n, v, ft, tt)
2922 }
2923 }
2924 }
2925
2926 if !ok {
2927 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2928 }
2929 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2930
2931 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2932
2933 if op1 == ssa.OpCopy {
2934 if op2 == ssa.OpCopy {
2935 return v
2936 }
2937 return s.newValueOrSfCall1(op2, tt, v)
2938 }
2939 if op2 == ssa.OpCopy {
2940 return s.newValueOrSfCall1(op1, tt, v)
2941 }
2942 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2943 }
2944
2945 if ft.IsInteger() {
2946
2947 if tt.Size() == 4 {
2948 return s.uint64Tofloat32(n, v, ft, tt)
2949 }
2950 if tt.Size() == 8 {
2951 return s.uint64Tofloat64(n, v, ft, tt)
2952 }
2953 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2954 }
2955
2956 if ft.Size() == 4 {
2957 switch tt.Size() {
2958 case 8:
2959 return s.float32ToUint64(n, v, ft, tt)
2960 case 4, 2, 1:
2961
2962 return s.float32ToUint32(n, v, ft, tt)
2963 }
2964 }
2965 if ft.Size() == 8 {
2966 switch tt.Size() {
2967 case 8:
2968 return s.float64ToUint64(n, v, ft, tt)
2969 case 4, 2, 1:
2970
2971 return s.float64ToUint32(n, v, ft, tt)
2972 }
2973
2974 }
2975 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2976 return nil
2977 }
2978
2979 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2980 return nil
2981 }
2982
2983
2984 func (s *state) expr(n ir.Node) *ssa.Value {
2985 return s.exprCheckPtr(n, true)
2986 }
2987
2988 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2989 if ir.HasUniquePos(n) {
2990
2991
2992 s.pushLine(n.Pos())
2993 defer s.popLine()
2994 }
2995
2996 s.stmtList(n.Init())
2997 switch n.Op() {
2998 case ir.OBYTES2STRTMP:
2999 n := n.(*ir.ConvExpr)
3000 slice := s.expr(n.X)
3001 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
3002 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3003 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
3004 case ir.OSTR2BYTESTMP:
3005 n := n.(*ir.ConvExpr)
3006 str := s.expr(n.X)
3007 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
3008 if !n.NonNil() {
3009
3010
3011
3012 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
3013 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
3014 ptr = s.ternary(cond, ptr, zerobase)
3015 }
3016 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
3017 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
3018 case ir.OCFUNC:
3019 n := n.(*ir.UnaryExpr)
3020 aux := n.X.(*ir.Name).Linksym()
3021
3022
3023 if aux.ABI() != obj.ABIInternal {
3024 s.Fatalf("expected ABIInternal: %v", aux.ABI())
3025 }
3026 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
3027 case ir.ONAME:
3028 n := n.(*ir.Name)
3029 if n.Class == ir.PFUNC {
3030
3031 sym := staticdata.FuncLinksym(n)
3032 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
3033 }
3034 if s.canSSA(n) {
3035 return s.variable(n, n.Type())
3036 }
3037 return s.load(n.Type(), s.addr(n))
3038 case ir.OLINKSYMOFFSET:
3039 n := n.(*ir.LinksymOffsetExpr)
3040 return s.load(n.Type(), s.addr(n))
3041 case ir.ONIL:
3042 n := n.(*ir.NilExpr)
3043 t := n.Type()
3044 switch {
3045 case t.IsSlice():
3046 return s.constSlice(t)
3047 case t.IsInterface():
3048 return s.constInterface(t)
3049 default:
3050 return s.constNil(t)
3051 }
3052 case ir.OLITERAL:
3053 switch u := n.Val(); u.Kind() {
3054 case constant.Int:
3055 i := ir.IntVal(n.Type(), u)
3056 switch n.Type().Size() {
3057 case 1:
3058 return s.constInt8(n.Type(), int8(i))
3059 case 2:
3060 return s.constInt16(n.Type(), int16(i))
3061 case 4:
3062 return s.constInt32(n.Type(), int32(i))
3063 case 8:
3064 return s.constInt64(n.Type(), i)
3065 default:
3066 s.Fatalf("bad integer size %d", n.Type().Size())
3067 return nil
3068 }
3069 case constant.String:
3070 i := constant.StringVal(u)
3071 if i == "" {
3072 return s.constEmptyString(n.Type())
3073 }
3074 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
3075 case constant.Bool:
3076 return s.constBool(constant.BoolVal(u))
3077 case constant.Float:
3078 f, _ := constant.Float64Val(u)
3079 switch n.Type().Size() {
3080 case 4:
3081 return s.constFloat32(n.Type(), f)
3082 case 8:
3083 return s.constFloat64(n.Type(), f)
3084 default:
3085 s.Fatalf("bad float size %d", n.Type().Size())
3086 return nil
3087 }
3088 case constant.Complex:
3089 re, _ := constant.Float64Val(constant.Real(u))
3090 im, _ := constant.Float64Val(constant.Imag(u))
3091 switch n.Type().Size() {
3092 case 8:
3093 pt := types.Types[types.TFLOAT32]
3094 return s.newValue2(ssa.OpComplexMake, n.Type(),
3095 s.constFloat32(pt, re),
3096 s.constFloat32(pt, im))
3097 case 16:
3098 pt := types.Types[types.TFLOAT64]
3099 return s.newValue2(ssa.OpComplexMake, n.Type(),
3100 s.constFloat64(pt, re),
3101 s.constFloat64(pt, im))
3102 default:
3103 s.Fatalf("bad complex size %d", n.Type().Size())
3104 return nil
3105 }
3106 default:
3107 s.Fatalf("unhandled OLITERAL %v", u.Kind())
3108 return nil
3109 }
3110 case ir.OCONVNOP:
3111 n := n.(*ir.ConvExpr)
3112 to := n.Type()
3113 from := n.X.Type()
3114
3115
3116
3117 x := s.expr(n.X)
3118 if to == from {
3119 return x
3120 }
3121
3122
3123
3124
3125
3126 if to.IsPtrShaped() != from.IsPtrShaped() {
3127 return s.newValue2(ssa.OpConvert, to, x, s.mem())
3128 }
3129
3130 v := s.newValue1(ssa.OpCopy, to, x)
3131
3132
3133 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
3134 return v
3135 }
3136
3137
3138 if from.Kind() == to.Kind() {
3139 return v
3140 }
3141
3142
3143 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
3144 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
3145 s.checkPtrAlignment(n, v, nil)
3146 }
3147 return v
3148 }
3149
3150
3151 mt := types.NewPtr(reflectdata.MapType())
3152 if to.Kind() == types.TMAP && from == mt {
3153 return v
3154 }
3155
3156 types.CalcSize(from)
3157 types.CalcSize(to)
3158 if from.Size() != to.Size() {
3159 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
3160 return nil
3161 }
3162 if etypesign(from.Kind()) != etypesign(to.Kind()) {
3163 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
3164 return nil
3165 }
3166
3167 if base.Flag.Cfg.Instrumenting {
3168
3169
3170
3171 return v
3172 }
3173
3174 if etypesign(from.Kind()) == 0 {
3175 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
3176 return nil
3177 }
3178
3179
3180 return v
3181
3182 case ir.OCONV:
3183 n := n.(*ir.ConvExpr)
3184 x := s.expr(n.X)
3185 return s.conv(n, x, n.X.Type(), n.Type())
3186
3187 case ir.ODOTTYPE:
3188 n := n.(*ir.TypeAssertExpr)
3189 res, _ := s.dottype(n, false)
3190 return res
3191
3192 case ir.ODYNAMICDOTTYPE:
3193 n := n.(*ir.DynamicTypeAssertExpr)
3194 res, _ := s.dynamicDottype(n, false)
3195 return res
3196
3197
3198 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3199 n := n.(*ir.BinaryExpr)
3200 a := s.expr(n.X)
3201 b := s.expr(n.Y)
3202 if n.X.Type().IsComplex() {
3203 pt := types.FloatForComplex(n.X.Type())
3204 op := s.ssaOp(ir.OEQ, pt)
3205 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3206 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3207 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3208 switch n.Op() {
3209 case ir.OEQ:
3210 return c
3211 case ir.ONE:
3212 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3213 default:
3214 s.Fatalf("ordered complex compare %v", n.Op())
3215 }
3216 }
3217
3218
3219 op := n.Op()
3220 switch op {
3221 case ir.OGE:
3222 op, a, b = ir.OLE, b, a
3223 case ir.OGT:
3224 op, a, b = ir.OLT, b, a
3225 }
3226 if n.X.Type().IsFloat() {
3227
3228 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3229 }
3230
3231 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3232 case ir.OMUL:
3233 n := n.(*ir.BinaryExpr)
3234 a := s.expr(n.X)
3235 b := s.expr(n.Y)
3236 if n.Type().IsComplex() {
3237 mulop := ssa.OpMul64F
3238 addop := ssa.OpAdd64F
3239 subop := ssa.OpSub64F
3240 pt := types.FloatForComplex(n.Type())
3241 wt := types.Types[types.TFLOAT64]
3242
3243 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3244 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3245 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3246 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3247
3248 if pt != wt {
3249 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3250 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3251 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3252 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3253 }
3254
3255 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3256 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3257
3258 if pt != wt {
3259 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3260 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3261 }
3262
3263 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3264 }
3265
3266 if n.Type().IsFloat() {
3267 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3268 }
3269
3270 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3271
3272 case ir.ODIV:
3273 n := n.(*ir.BinaryExpr)
3274 a := s.expr(n.X)
3275 b := s.expr(n.Y)
3276 if n.Type().IsComplex() {
3277
3278
3279
3280 mulop := ssa.OpMul64F
3281 addop := ssa.OpAdd64F
3282 subop := ssa.OpSub64F
3283 divop := ssa.OpDiv64F
3284 pt := types.FloatForComplex(n.Type())
3285 wt := types.Types[types.TFLOAT64]
3286
3287 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3288 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3289 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3290 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3291
3292 if pt != wt {
3293 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3294 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3295 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3296 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3297 }
3298
3299 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3300 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3301 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3302
3303
3304
3305
3306
3307 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3308 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3309
3310 if pt != wt {
3311 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3312 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3313 }
3314 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3315 }
3316 if n.Type().IsFloat() {
3317 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3318 }
3319 return s.intDivide(n, a, b)
3320 case ir.OMOD:
3321 n := n.(*ir.BinaryExpr)
3322 a := s.expr(n.X)
3323 b := s.expr(n.Y)
3324 return s.intDivide(n, a, b)
3325 case ir.OADD, ir.OSUB:
3326 n := n.(*ir.BinaryExpr)
3327 a := s.expr(n.X)
3328 b := s.expr(n.Y)
3329 if n.Type().IsComplex() {
3330 pt := types.FloatForComplex(n.Type())
3331 op := s.ssaOp(n.Op(), pt)
3332 return s.newValue2(ssa.OpComplexMake, n.Type(),
3333 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3334 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3335 }
3336 if n.Type().IsFloat() {
3337 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3338 }
3339 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3340 case ir.OAND, ir.OOR, ir.OXOR:
3341 n := n.(*ir.BinaryExpr)
3342 a := s.expr(n.X)
3343 b := s.expr(n.Y)
3344 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3345 case ir.OANDNOT:
3346 n := n.(*ir.BinaryExpr)
3347 a := s.expr(n.X)
3348 b := s.expr(n.Y)
3349 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3350 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3351 case ir.OLSH, ir.ORSH:
3352 n := n.(*ir.BinaryExpr)
3353 a := s.expr(n.X)
3354 b := s.expr(n.Y)
3355 bt := b.Type
3356 if bt.IsSigned() {
3357 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3358 s.check(cmp, ir.Syms.Panicshift)
3359 bt = bt.ToUnsigned()
3360 }
3361 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3362 case ir.OANDAND, ir.OOROR:
3363
3364
3365
3366
3367
3368
3369
3370
3371
3372
3373
3374
3375
3376 n := n.(*ir.LogicalExpr)
3377 el := s.expr(n.X)
3378 s.vars[n] = el
3379
3380 b := s.endBlock()
3381 b.Kind = ssa.BlockIf
3382 b.SetControl(el)
3383
3384
3385
3386
3387
3388 bRight := s.f.NewBlock(ssa.BlockPlain)
3389 bResult := s.f.NewBlock(ssa.BlockPlain)
3390 if n.Op() == ir.OANDAND {
3391 b.AddEdgeTo(bRight)
3392 b.AddEdgeTo(bResult)
3393 } else if n.Op() == ir.OOROR {
3394 b.AddEdgeTo(bResult)
3395 b.AddEdgeTo(bRight)
3396 }
3397
3398 s.startBlock(bRight)
3399 er := s.expr(n.Y)
3400 s.vars[n] = er
3401
3402 b = s.endBlock()
3403 b.AddEdgeTo(bResult)
3404
3405 s.startBlock(bResult)
3406 return s.variable(n, types.Types[types.TBOOL])
3407 case ir.OCOMPLEX:
3408 n := n.(*ir.BinaryExpr)
3409 r := s.expr(n.X)
3410 i := s.expr(n.Y)
3411 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3412
3413
3414 case ir.ONEG:
3415 n := n.(*ir.UnaryExpr)
3416 a := s.expr(n.X)
3417 if n.Type().IsComplex() {
3418 tp := types.FloatForComplex(n.Type())
3419 negop := s.ssaOp(n.Op(), tp)
3420 return s.newValue2(ssa.OpComplexMake, n.Type(),
3421 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3422 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3423 }
3424 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3425 case ir.ONOT, ir.OBITNOT:
3426 n := n.(*ir.UnaryExpr)
3427 a := s.expr(n.X)
3428 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3429 case ir.OIMAG, ir.OREAL:
3430 n := n.(*ir.UnaryExpr)
3431 a := s.expr(n.X)
3432 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3433 case ir.OPLUS:
3434 n := n.(*ir.UnaryExpr)
3435 return s.expr(n.X)
3436
3437 case ir.OADDR:
3438 n := n.(*ir.AddrExpr)
3439 return s.addr(n.X)
3440
3441 case ir.ORESULT:
3442 n := n.(*ir.ResultExpr)
3443 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3444 panic("Expected to see a previous call")
3445 }
3446 which := n.Index
3447 if which == -1 {
3448 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3449 }
3450 return s.resultOfCall(s.prevCall, which, n.Type())
3451
3452 case ir.ODEREF:
3453 n := n.(*ir.StarExpr)
3454 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3455 return s.load(n.Type(), p)
3456
3457 case ir.ODOT:
3458 n := n.(*ir.SelectorExpr)
3459 if n.X.Op() == ir.OSTRUCTLIT {
3460
3461
3462
3463 if !ir.IsZero(n.X) {
3464 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3465 }
3466 return s.zeroVal(n.Type())
3467 }
3468
3469
3470
3471
3472 if ir.IsAddressable(n) && !s.canSSA(n) {
3473 p := s.addr(n)
3474 return s.load(n.Type(), p)
3475 }
3476 v := s.expr(n.X)
3477 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3478
3479 case ir.ODOTPTR:
3480 n := n.(*ir.SelectorExpr)
3481 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3482 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3483 return s.load(n.Type(), p)
3484
3485 case ir.OINDEX:
3486 n := n.(*ir.IndexExpr)
3487 switch {
3488 case n.X.Type().IsString():
3489 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3490
3491
3492
3493 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3494 }
3495 a := s.expr(n.X)
3496 i := s.expr(n.Index)
3497 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3498 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3499 ptrtyp := s.f.Config.Types.BytePtr
3500 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3501 if ir.IsConst(n.Index, constant.Int) {
3502 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3503 } else {
3504 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3505 }
3506 return s.load(types.Types[types.TUINT8], ptr)
3507 case n.X.Type().IsSlice():
3508 p := s.addr(n)
3509 return s.load(n.X.Type().Elem(), p)
3510 case n.X.Type().IsArray():
3511 if ssa.CanSSA(n.X.Type()) {
3512
3513 bound := n.X.Type().NumElem()
3514 a := s.expr(n.X)
3515 i := s.expr(n.Index)
3516 if bound == 0 {
3517
3518
3519 z := s.constInt(types.Types[types.TINT], 0)
3520 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3521
3522
3523 return s.zeroVal(n.Type())
3524 }
3525 len := s.constInt(types.Types[types.TINT], bound)
3526 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3527 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3528 }
3529 p := s.addr(n)
3530 return s.load(n.X.Type().Elem(), p)
3531 default:
3532 s.Fatalf("bad type for index %v", n.X.Type())
3533 return nil
3534 }
3535
3536 case ir.OLEN, ir.OCAP:
3537 n := n.(*ir.UnaryExpr)
3538
3539
3540 a := s.expr(n.X)
3541 t := n.X.Type()
3542 switch {
3543 case t.IsSlice():
3544 op := ssa.OpSliceLen
3545 if n.Op() == ir.OCAP {
3546 op = ssa.OpSliceCap
3547 }
3548 return s.newValue1(op, types.Types[types.TINT], a)
3549 case t.IsString():
3550 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3551 case t.IsMap(), t.IsChan():
3552 return s.referenceTypeBuiltin(n, a)
3553 case t.IsArray():
3554 return s.constInt(types.Types[types.TINT], t.NumElem())
3555 case t.IsPtr() && t.Elem().IsArray():
3556 return s.constInt(types.Types[types.TINT], t.Elem().NumElem())
3557 default:
3558 s.Fatalf("bad type in len/cap: %v", t)
3559 return nil
3560 }
3561
3562 case ir.OSPTR:
3563 n := n.(*ir.UnaryExpr)
3564 a := s.expr(n.X)
3565 if n.X.Type().IsSlice() {
3566 if n.Bounded() {
3567 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3568 }
3569 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3570 } else {
3571 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3572 }
3573
3574 case ir.OITAB:
3575 n := n.(*ir.UnaryExpr)
3576 a := s.expr(n.X)
3577 return s.newValue1(ssa.OpITab, n.Type(), a)
3578
3579 case ir.OIDATA:
3580 n := n.(*ir.UnaryExpr)
3581 a := s.expr(n.X)
3582 return s.newValue1(ssa.OpIData, n.Type(), a)
3583
3584 case ir.OMAKEFACE:
3585 n := n.(*ir.BinaryExpr)
3586 tab := s.expr(n.X)
3587 data := s.expr(n.Y)
3588 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3589
3590 case ir.OSLICEHEADER:
3591 n := n.(*ir.SliceHeaderExpr)
3592 p := s.expr(n.Ptr)
3593 l := s.expr(n.Len)
3594 c := s.expr(n.Cap)
3595 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3596
3597 case ir.OSTRINGHEADER:
3598 n := n.(*ir.StringHeaderExpr)
3599 p := s.expr(n.Ptr)
3600 l := s.expr(n.Len)
3601 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3602
3603 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3604 n := n.(*ir.SliceExpr)
3605 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3606 v := s.exprCheckPtr(n.X, !check)
3607 var i, j, k *ssa.Value
3608 if n.Low != nil {
3609 i = s.expr(n.Low)
3610 }
3611 if n.High != nil {
3612 j = s.expr(n.High)
3613 }
3614 if n.Max != nil {
3615 k = s.expr(n.Max)
3616 }
3617 p, l, c := s.slice(v, i, j, k, n.Bounded())
3618 if check {
3619
3620 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3621 }
3622 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3623
3624 case ir.OSLICESTR:
3625 n := n.(*ir.SliceExpr)
3626 v := s.expr(n.X)
3627 var i, j *ssa.Value
3628 if n.Low != nil {
3629 i = s.expr(n.Low)
3630 }
3631 if n.High != nil {
3632 j = s.expr(n.High)
3633 }
3634 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3635 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3636
3637 case ir.OSLICE2ARRPTR:
3638
3639
3640
3641
3642 n := n.(*ir.ConvExpr)
3643 v := s.expr(n.X)
3644 nelem := n.Type().Elem().NumElem()
3645 arrlen := s.constInt(types.Types[types.TINT], nelem)
3646 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3647 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3648 op := ssa.OpSlicePtr
3649 if nelem == 0 {
3650 op = ssa.OpSlicePtrUnchecked
3651 }
3652 return s.newValue1(op, n.Type(), v)
3653
3654 case ir.OCALLFUNC:
3655 n := n.(*ir.CallExpr)
3656 if ir.IsIntrinsicCall(n) {
3657 return s.intrinsicCall(n)
3658 }
3659 fallthrough
3660
3661 case ir.OCALLINTER:
3662 n := n.(*ir.CallExpr)
3663 return s.callResult(n, callNormal)
3664
3665 case ir.OGETG:
3666 n := n.(*ir.CallExpr)
3667 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3668
3669 case ir.OGETCALLERSP:
3670 n := n.(*ir.CallExpr)
3671 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3672
3673 case ir.OAPPEND:
3674 return s.append(n.(*ir.CallExpr), false)
3675
3676 case ir.OMIN, ir.OMAX:
3677 return s.minMax(n.(*ir.CallExpr))
3678
3679 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3680
3681
3682
3683 n := n.(*ir.CompLitExpr)
3684 if !ir.IsZero(n) {
3685 s.Fatalf("literal with nonzero value in SSA: %v", n)
3686 }
3687 return s.zeroVal(n.Type())
3688
3689 case ir.ONEW:
3690 n := n.(*ir.UnaryExpr)
3691 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3692 return s.newObjectNonSpecialized(n.Type().Elem(), s.expr(x.RType))
3693 }
3694 return s.newObject(n.Type().Elem())
3695
3696 case ir.OUNSAFEADD:
3697 n := n.(*ir.BinaryExpr)
3698 ptr := s.expr(n.X)
3699 len := s.expr(n.Y)
3700
3701
3702
3703 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3704
3705 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3706
3707 default:
3708 s.Fatalf("unhandled expr %v", n.Op())
3709 return nil
3710 }
3711 }
3712
3713 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3714 aux := c.Aux.(*ssa.AuxCall)
3715 pa := aux.ParamAssignmentForResult(which)
3716
3717
3718 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3719 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3720 return s.rawLoad(t, addr)
3721 }
3722 return s.newValue1I(ssa.OpSelectN, t, which, c)
3723 }
3724
3725 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3726 aux := c.Aux.(*ssa.AuxCall)
3727 pa := aux.ParamAssignmentForResult(which)
3728 if len(pa.Registers) == 0 {
3729 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3730 }
3731 _, addr := s.temp(c.Pos, t)
3732 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3733 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3734 return addr
3735 }
3736
3737
3738
3739
3740
3741
3742
3743
3744
3745 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3746
3747
3748
3749
3750
3751
3752
3753
3754
3755
3756
3757
3758
3759
3760
3761
3762
3763
3764
3765
3766
3767
3768
3769
3770
3771
3772
3773
3774
3775
3776
3777
3778 et := n.Type().Elem()
3779 pt := types.NewPtr(et)
3780
3781
3782 sn := n.Args[0]
3783 var slice, addr *ssa.Value
3784 if inplace {
3785 addr = s.addr(sn)
3786 slice = s.load(n.Type(), addr)
3787 } else {
3788 slice = s.expr(sn)
3789 }
3790
3791
3792 grow := s.f.NewBlock(ssa.BlockPlain)
3793 assign := s.f.NewBlock(ssa.BlockPlain)
3794
3795
3796 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3797 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3798 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3799
3800
3801 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3802 oldLen := l
3803 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3804
3805
3806 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3807
3808
3809 s.vars[ptrVar] = p
3810 s.vars[lenVar] = l
3811 if !inplace {
3812 s.vars[capVar] = c
3813 }
3814
3815 b := s.endBlock()
3816 b.Kind = ssa.BlockIf
3817 b.Likely = ssa.BranchUnlikely
3818 b.SetControl(cmp)
3819 b.AddEdgeTo(grow)
3820 b.AddEdgeTo(assign)
3821
3822
3823
3824
3825
3826
3827
3828 maxStackSize := int64(base.Debug.VariableMakeThreshold)
3829 if !inplace && n.Esc() == ir.EscNone && et.Size() > 0 && et.Size() <= maxStackSize && base.Flag.N == 0 && base.VariableMakeHash.MatchPos(n.Pos(), nil) && !s.appendTargets[sn] {
3830
3831
3832
3833
3834
3835
3836
3837
3838
3839
3840
3841
3842
3843
3844
3845
3846
3847
3848
3849
3850
3851
3852
3853 if s.appendTargets == nil {
3854 s.appendTargets = map[ir.Node]bool{}
3855 }
3856 s.appendTargets[sn] = true
3857
3858 K := maxStackSize / et.Size()
3859 KT := types.NewArray(et, K)
3860 KT.SetNoalg(true)
3861 types.CalcArraySize(KT)
3862
3863 align := types.NewArray(types.Types[types.TUINTPTR], 0)
3864 types.CalcArraySize(align)
3865 storeTyp := types.NewStruct([]*types.Field{
3866 {Sym: types.BlankSym, Type: align},
3867 {Sym: types.BlankSym, Type: KT},
3868 })
3869 storeTyp.SetNoalg(true)
3870 types.CalcStructSize(storeTyp)
3871
3872 usedTestBlock := s.f.NewBlock(ssa.BlockPlain)
3873 oldLenTestBlock := s.f.NewBlock(ssa.BlockPlain)
3874 bodyBlock := s.f.NewBlock(ssa.BlockPlain)
3875 growSlice := s.f.NewBlock(ssa.BlockPlain)
3876
3877
3878 tBool := types.Types[types.TBOOL]
3879 used := typecheck.TempAt(n.Pos(), s.curfn, tBool)
3880 s.defvars[s.f.Entry.ID][used] = s.constBool(false)
3881
3882
3883 tInt := types.Types[types.TINT]
3884 backingStore := typecheck.TempAt(n.Pos(), s.curfn, storeTyp)
3885 backingStore.SetAddrtaken(true)
3886
3887
3888 s.startBlock(grow)
3889 kTest := s.newValue2(s.ssaOp(ir.OLE, tInt), tBool, l, s.constInt(tInt, K))
3890 b := s.endBlock()
3891 b.Kind = ssa.BlockIf
3892 b.SetControl(kTest)
3893 b.AddEdgeTo(usedTestBlock)
3894 b.AddEdgeTo(growSlice)
3895 b.Likely = ssa.BranchLikely
3896
3897
3898 s.startBlock(usedTestBlock)
3899 usedTest := s.newValue1(ssa.OpNot, tBool, s.expr(used))
3900 b = s.endBlock()
3901 b.Kind = ssa.BlockIf
3902 b.SetControl(usedTest)
3903 b.AddEdgeTo(oldLenTestBlock)
3904 b.AddEdgeTo(growSlice)
3905 b.Likely = ssa.BranchLikely
3906
3907
3908 s.startBlock(oldLenTestBlock)
3909 oldLenTest := s.newValue2(s.ssaOp(ir.OEQ, tInt), tBool, oldLen, s.constInt(tInt, 0))
3910 b = s.endBlock()
3911 b.Kind = ssa.BlockIf
3912 b.SetControl(oldLenTest)
3913 b.AddEdgeTo(bodyBlock)
3914 b.AddEdgeTo(growSlice)
3915 b.Likely = ssa.BranchLikely
3916
3917
3918 s.startBlock(bodyBlock)
3919 if et.HasPointers() {
3920 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, backingStore, s.mem())
3921 }
3922 addr := s.addr(backingStore)
3923 s.zero(storeTyp, addr)
3924
3925
3926 s.vars[ptrVar] = addr
3927 s.vars[lenVar] = l
3928 s.vars[capVar] = s.constInt(tInt, K)
3929
3930
3931 s.assign(used, s.constBool(true), false, 0)
3932 b = s.endBlock()
3933 b.AddEdgeTo(assign)
3934
3935
3936 grow = growSlice
3937 }
3938
3939
3940 s.startBlock(grow)
3941 taddr := s.expr(n.Fun)
3942 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
3943
3944
3945 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
3946 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
3947 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
3948
3949 s.vars[ptrVar] = p
3950 s.vars[lenVar] = l
3951 s.vars[capVar] = c
3952 if inplace {
3953 if sn.Op() == ir.ONAME {
3954 sn := sn.(*ir.Name)
3955 if sn.Class != ir.PEXTERN {
3956
3957 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3958 }
3959 }
3960 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3961 s.store(types.Types[types.TINT], capaddr, c)
3962 s.store(pt, addr, p)
3963 }
3964
3965 b = s.endBlock()
3966 b.AddEdgeTo(assign)
3967
3968
3969 s.startBlock(assign)
3970 p = s.variable(ptrVar, pt)
3971 l = s.variable(lenVar, types.Types[types.TINT])
3972 if !inplace {
3973 c = s.variable(capVar, types.Types[types.TINT])
3974 }
3975
3976 if inplace {
3977
3978
3979 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3980 s.store(types.Types[types.TINT], lenaddr, l)
3981 }
3982
3983
3984 type argRec struct {
3985
3986
3987 v *ssa.Value
3988 store bool
3989 }
3990 args := make([]argRec, 0, len(n.Args[1:]))
3991 for _, n := range n.Args[1:] {
3992 if ssa.CanSSA(n.Type()) {
3993 args = append(args, argRec{v: s.expr(n), store: true})
3994 } else {
3995 v := s.addr(n)
3996 args = append(args, argRec{v: v})
3997 }
3998 }
3999
4000
4001 oldLen = s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
4002 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
4003 for i, arg := range args {
4004 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
4005 if arg.store {
4006 s.storeType(et, addr, arg.v, 0, true)
4007 } else {
4008 s.move(et, addr, arg.v)
4009 }
4010 }
4011
4012
4013
4014
4015
4016 delete(s.vars, ptrVar)
4017 delete(s.vars, lenVar)
4018 if !inplace {
4019 delete(s.vars, capVar)
4020 }
4021
4022
4023 if inplace {
4024 return nil
4025 }
4026 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
4027 }
4028
4029
4030 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
4031
4032
4033
4034 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
4035 x := s.expr(n.Args[0])
4036 for _, arg := range n.Args[1:] {
4037 x = op(x, s.expr(arg))
4038 }
4039 return x
4040 }
4041
4042 typ := n.Type()
4043
4044 if typ.IsFloat() || typ.IsString() {
4045
4046
4047
4048
4049
4050
4051
4052
4053 if typ.IsFloat() {
4054 hasIntrinsic := false
4055 switch Arch.LinkArch.Family {
4056 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64, sys.S390X:
4057 hasIntrinsic = true
4058 case sys.PPC64:
4059 hasIntrinsic = buildcfg.GOPPC64 >= 9
4060 }
4061
4062 if hasIntrinsic {
4063 var op ssa.Op
4064 switch {
4065 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
4066 op = ssa.OpMin64F
4067 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
4068 op = ssa.OpMax64F
4069 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
4070 op = ssa.OpMin32F
4071 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
4072 op = ssa.OpMax32F
4073 }
4074 return fold(func(x, a *ssa.Value) *ssa.Value {
4075 return s.newValue2(op, typ, x, a)
4076 })
4077 }
4078 }
4079 var name string
4080 switch typ.Kind() {
4081 case types.TFLOAT32:
4082 switch n.Op() {
4083 case ir.OMIN:
4084 name = "fmin32"
4085 case ir.OMAX:
4086 name = "fmax32"
4087 }
4088 case types.TFLOAT64:
4089 switch n.Op() {
4090 case ir.OMIN:
4091 name = "fmin64"
4092 case ir.OMAX:
4093 name = "fmax64"
4094 }
4095 case types.TSTRING:
4096 switch n.Op() {
4097 case ir.OMIN:
4098 name = "strmin"
4099 case ir.OMAX:
4100 name = "strmax"
4101 }
4102 }
4103 fn := typecheck.LookupRuntimeFunc(name)
4104
4105 return fold(func(x, a *ssa.Value) *ssa.Value {
4106 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
4107 })
4108 }
4109
4110 if typ.IsInteger() {
4111 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
4112 var op ssa.Op
4113 switch {
4114 case typ.IsSigned() && n.Op() == ir.OMIN:
4115 op = ssa.OpMin64
4116 case typ.IsSigned() && n.Op() == ir.OMAX:
4117 op = ssa.OpMax64
4118 case typ.IsUnsigned() && n.Op() == ir.OMIN:
4119 op = ssa.OpMin64u
4120 case typ.IsUnsigned() && n.Op() == ir.OMAX:
4121 op = ssa.OpMax64u
4122 }
4123 return fold(func(x, a *ssa.Value) *ssa.Value {
4124 return s.newValue2(op, typ, x, a)
4125 })
4126 }
4127 }
4128
4129 lt := s.ssaOp(ir.OLT, typ)
4130
4131 return fold(func(x, a *ssa.Value) *ssa.Value {
4132 switch n.Op() {
4133 case ir.OMIN:
4134
4135 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
4136 case ir.OMAX:
4137
4138 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
4139 }
4140 panic("unreachable")
4141 })
4142 }
4143
4144
4145 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
4146
4147
4148 ternaryVar := ssaMarker("ternary")
4149
4150 bThen := s.f.NewBlock(ssa.BlockPlain)
4151 bElse := s.f.NewBlock(ssa.BlockPlain)
4152 bEnd := s.f.NewBlock(ssa.BlockPlain)
4153
4154 b := s.endBlock()
4155 b.Kind = ssa.BlockIf
4156 b.SetControl(cond)
4157 b.AddEdgeTo(bThen)
4158 b.AddEdgeTo(bElse)
4159
4160 s.startBlock(bThen)
4161 s.vars[ternaryVar] = x
4162 s.endBlock().AddEdgeTo(bEnd)
4163
4164 s.startBlock(bElse)
4165 s.vars[ternaryVar] = y
4166 s.endBlock().AddEdgeTo(bEnd)
4167
4168 s.startBlock(bEnd)
4169 r := s.variable(ternaryVar, x.Type)
4170 delete(s.vars, ternaryVar)
4171 return r
4172 }
4173
4174
4175
4176
4177
4178 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
4179 switch cond.Op() {
4180 case ir.OANDAND:
4181 cond := cond.(*ir.LogicalExpr)
4182 mid := s.f.NewBlock(ssa.BlockPlain)
4183 s.stmtList(cond.Init())
4184 s.condBranch(cond.X, mid, no, max(likely, 0))
4185 s.startBlock(mid)
4186 s.condBranch(cond.Y, yes, no, likely)
4187 return
4188
4189
4190
4191
4192
4193
4194 case ir.OOROR:
4195 cond := cond.(*ir.LogicalExpr)
4196 mid := s.f.NewBlock(ssa.BlockPlain)
4197 s.stmtList(cond.Init())
4198 s.condBranch(cond.X, yes, mid, min(likely, 0))
4199 s.startBlock(mid)
4200 s.condBranch(cond.Y, yes, no, likely)
4201 return
4202
4203
4204
4205 case ir.ONOT:
4206 cond := cond.(*ir.UnaryExpr)
4207 s.stmtList(cond.Init())
4208 s.condBranch(cond.X, no, yes, -likely)
4209 return
4210 case ir.OCONVNOP:
4211 cond := cond.(*ir.ConvExpr)
4212 s.stmtList(cond.Init())
4213 s.condBranch(cond.X, yes, no, likely)
4214 return
4215 }
4216 c := s.expr(cond)
4217 b := s.endBlock()
4218 b.Kind = ssa.BlockIf
4219 b.SetControl(c)
4220 b.Likely = ssa.BranchPrediction(likely)
4221 b.AddEdgeTo(yes)
4222 b.AddEdgeTo(no)
4223 }
4224
4225 type skipMask uint8
4226
4227 const (
4228 skipPtr skipMask = 1 << iota
4229 skipLen
4230 skipCap
4231 )
4232
4233
4234
4235
4236
4237
4238
4239 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
4240 s.assignWhichMayOverlap(left, right, deref, skip, false)
4241 }
4242 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
4243 if left.Op() == ir.ONAME && ir.IsBlank(left) {
4244 return
4245 }
4246 t := left.Type()
4247 types.CalcSize(t)
4248 if s.canSSA(left) {
4249 if deref {
4250 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
4251 }
4252 if left.Op() == ir.ODOT {
4253
4254
4255
4256
4257
4258
4259
4260
4261
4262
4263 left := left.(*ir.SelectorExpr)
4264 t := left.X.Type()
4265 nf := t.NumFields()
4266 idx := fieldIdx(left)
4267
4268
4269 old := s.expr(left.X)
4270
4271
4272 new := s.newValue0(ssa.OpStructMake, t)
4273
4274
4275 for i := 0; i < nf; i++ {
4276 if i == idx {
4277 new.AddArg(right)
4278 } else {
4279 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
4280 }
4281 }
4282
4283
4284 s.assign(left.X, new, false, 0)
4285
4286 return
4287 }
4288 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
4289 left := left.(*ir.IndexExpr)
4290 s.pushLine(left.Pos())
4291 defer s.popLine()
4292
4293
4294 t := left.X.Type()
4295 n := t.NumElem()
4296
4297 i := s.expr(left.Index)
4298 if n == 0 {
4299
4300
4301 z := s.constInt(types.Types[types.TINT], 0)
4302 s.boundsCheck(z, z, ssa.BoundsIndex, false)
4303 return
4304 }
4305 if n != 1 {
4306 s.Fatalf("assigning to non-1-length array")
4307 }
4308
4309 len := s.constInt(types.Types[types.TINT], 1)
4310 s.boundsCheck(i, len, ssa.BoundsIndex, false)
4311 v := s.newValue1(ssa.OpArrayMake1, t, right)
4312 s.assign(left.X, v, false, 0)
4313 return
4314 }
4315 left := left.(*ir.Name)
4316
4317 s.vars[left] = right
4318 s.addNamedValue(left, right)
4319 return
4320 }
4321
4322
4323
4324 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4325 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4326 }
4327
4328
4329 addr := s.addr(left)
4330 if ir.IsReflectHeaderDataField(left) {
4331
4332
4333
4334
4335
4336 t = types.Types[types.TUNSAFEPTR]
4337 }
4338 if deref {
4339
4340 if right == nil {
4341 s.zero(t, addr)
4342 } else {
4343 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4344 }
4345 return
4346 }
4347
4348 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4349 }
4350
4351
4352 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4353 switch {
4354 case t.IsInteger():
4355 switch t.Size() {
4356 case 1:
4357 return s.constInt8(t, 0)
4358 case 2:
4359 return s.constInt16(t, 0)
4360 case 4:
4361 return s.constInt32(t, 0)
4362 case 8:
4363 return s.constInt64(t, 0)
4364 default:
4365 s.Fatalf("bad sized integer type %v", t)
4366 }
4367 case t.IsFloat():
4368 switch t.Size() {
4369 case 4:
4370 return s.constFloat32(t, 0)
4371 case 8:
4372 return s.constFloat64(t, 0)
4373 default:
4374 s.Fatalf("bad sized float type %v", t)
4375 }
4376 case t.IsComplex():
4377 switch t.Size() {
4378 case 8:
4379 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4380 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4381 case 16:
4382 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4383 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4384 default:
4385 s.Fatalf("bad sized complex type %v", t)
4386 }
4387
4388 case t.IsString():
4389 return s.constEmptyString(t)
4390 case t.IsPtrShaped():
4391 return s.constNil(t)
4392 case t.IsBoolean():
4393 return s.constBool(false)
4394 case t.IsInterface():
4395 return s.constInterface(t)
4396 case t.IsSlice():
4397 return s.constSlice(t)
4398 case t.IsStruct():
4399 n := t.NumFields()
4400 v := s.entryNewValue0(ssa.OpStructMake, t)
4401 for i := 0; i < n; i++ {
4402 v.AddArg(s.zeroVal(t.FieldType(i)))
4403 }
4404 return v
4405 case t.IsArray():
4406 switch t.NumElem() {
4407 case 0:
4408 return s.entryNewValue0(ssa.OpArrayMake0, t)
4409 case 1:
4410 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4411 }
4412 }
4413 s.Fatalf("zero for type %v not implemented", t)
4414 return nil
4415 }
4416
4417 type callKind int8
4418
4419 const (
4420 callNormal callKind = iota
4421 callDefer
4422 callDeferStack
4423 callGo
4424 callTail
4425 )
4426
4427 type sfRtCallDef struct {
4428 rtfn *obj.LSym
4429 rtype types.Kind
4430 }
4431
4432 var softFloatOps map[ssa.Op]sfRtCallDef
4433
4434 func softfloatInit() {
4435
4436 softFloatOps = map[ssa.Op]sfRtCallDef{
4437 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4438 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4439 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4440 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4441 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4442 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4443 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4444 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4445
4446 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4447 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4448 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4449 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4450 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4451 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4452 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4453 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4454
4455 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4456 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4457 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4458 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4459 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4460 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4461 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4462 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4463 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4464 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4465 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4466 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4467 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4468 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4469 }
4470 }
4471
4472
4473
4474 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4475 f2i := func(t *types.Type) *types.Type {
4476 switch t.Kind() {
4477 case types.TFLOAT32:
4478 return types.Types[types.TUINT32]
4479 case types.TFLOAT64:
4480 return types.Types[types.TUINT64]
4481 }
4482 return t
4483 }
4484
4485 if callDef, ok := softFloatOps[op]; ok {
4486 switch op {
4487 case ssa.OpLess32F,
4488 ssa.OpLess64F,
4489 ssa.OpLeq32F,
4490 ssa.OpLeq64F:
4491 args[0], args[1] = args[1], args[0]
4492 case ssa.OpSub32F,
4493 ssa.OpSub64F:
4494 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4495 }
4496
4497
4498
4499 for i, a := range args {
4500 if a.Type.IsFloat() {
4501 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4502 }
4503 }
4504
4505 rt := types.Types[callDef.rtype]
4506 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4507 if rt.IsFloat() {
4508 result = s.newValue1(ssa.OpCopy, rt, result)
4509 }
4510 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4511 result = s.newValue1(ssa.OpNot, result.Type, result)
4512 }
4513 return result, true
4514 }
4515 return nil, false
4516 }
4517
4518
4519 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4520 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4521 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4522 return p0, p1
4523 }
4524
4525
4526 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4527 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4528 if ssa.IntrinsicsDebug > 0 {
4529 x := v
4530 if x == nil {
4531 x = s.mem()
4532 }
4533 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4534 x = x.Args[0]
4535 }
4536 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4537 }
4538 return v
4539 }
4540
4541
4542 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4543 args := make([]*ssa.Value, len(n.Args))
4544 for i, n := range n.Args {
4545 args[i] = s.expr(n)
4546 }
4547 return args
4548 }
4549
4550
4551
4552
4553
4554
4555
4556 func (s *state) openDeferRecord(n *ir.CallExpr) {
4557 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4558 s.Fatalf("defer call with arguments or results: %v", n)
4559 }
4560
4561 opendefer := &openDeferInfo{
4562 n: n,
4563 }
4564 fn := n.Fun
4565
4566
4567
4568 closureVal := s.expr(fn)
4569 closure := s.openDeferSave(fn.Type(), closureVal)
4570 opendefer.closureNode = closure.Aux.(*ir.Name)
4571 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4572 opendefer.closure = closure
4573 }
4574 index := len(s.openDefers)
4575 s.openDefers = append(s.openDefers, opendefer)
4576
4577
4578
4579 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4580 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4581 s.vars[deferBitsVar] = newDeferBits
4582 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4583 }
4584
4585
4586
4587
4588
4589
4590 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4591 if !ssa.CanSSA(t) {
4592 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4593 }
4594 if !t.HasPointers() {
4595 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4596 }
4597 pos := val.Pos
4598 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4599 temp.SetOpenDeferSlot(true)
4600 temp.SetFrameOffset(int64(len(s.openDefers)))
4601 var addrTemp *ssa.Value
4602
4603
4604 if s.curBlock.ID != s.f.Entry.ID {
4605
4606
4607
4608 if t.HasPointers() {
4609 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4610 }
4611 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4612 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4613 } else {
4614
4615
4616
4617 if t.HasPointers() {
4618 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4619 }
4620 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4621 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4622 }
4623
4624
4625
4626
4627
4628 temp.SetNeedzero(true)
4629
4630
4631 s.store(t, addrTemp, val)
4632 return addrTemp
4633 }
4634
4635
4636
4637
4638
4639 func (s *state) openDeferExit() {
4640 deferExit := s.f.NewBlock(ssa.BlockPlain)
4641 s.endBlock().AddEdgeTo(deferExit)
4642 s.startBlock(deferExit)
4643 s.lastDeferExit = deferExit
4644 s.lastDeferCount = len(s.openDefers)
4645 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4646
4647 for i := len(s.openDefers) - 1; i >= 0; i-- {
4648 r := s.openDefers[i]
4649 bCond := s.f.NewBlock(ssa.BlockPlain)
4650 bEnd := s.f.NewBlock(ssa.BlockPlain)
4651
4652 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4653
4654
4655 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4656 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4657 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4658 b := s.endBlock()
4659 b.Kind = ssa.BlockIf
4660 b.SetControl(eqVal)
4661 b.AddEdgeTo(bEnd)
4662 b.AddEdgeTo(bCond)
4663 bCond.AddEdgeTo(bEnd)
4664 s.startBlock(bCond)
4665
4666
4667
4668 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4669 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4670 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4671
4672
4673 s.vars[deferBitsVar] = maskedval
4674
4675
4676
4677
4678 fn := r.n.Fun
4679 stksize := fn.Type().ArgWidth()
4680 var callArgs []*ssa.Value
4681 var call *ssa.Value
4682 if r.closure != nil {
4683 v := s.load(r.closure.Type.Elem(), r.closure)
4684 s.maybeNilCheckClosure(v, callDefer)
4685 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4686 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4687 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4688 } else {
4689 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4690 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4691 }
4692 callArgs = append(callArgs, s.mem())
4693 call.AddArgs(callArgs...)
4694 call.AuxInt = stksize
4695 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4696
4697
4698
4699
4700 if r.closureNode != nil {
4701 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4702 }
4703
4704 s.endBlock()
4705 s.startBlock(bEnd)
4706 }
4707 }
4708
4709 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4710 return s.call(n, k, false, nil)
4711 }
4712
4713 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4714 return s.call(n, k, true, nil)
4715 }
4716
4717
4718
4719 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4720 s.prevCall = nil
4721 var calleeLSym *obj.LSym
4722 var closure *ssa.Value
4723 var codeptr *ssa.Value
4724 var dextra *ssa.Value
4725 var rcvr *ssa.Value
4726 fn := n.Fun
4727 var ACArgs []*types.Type
4728 var ACResults []*types.Type
4729 var callArgs []*ssa.Value
4730
4731 callABI := s.f.ABIDefault
4732
4733 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4734 s.Fatalf("go/defer call with arguments: %v", n)
4735 }
4736
4737 isCallDeferRangeFunc := false
4738
4739 switch n.Op() {
4740 case ir.OCALLFUNC:
4741 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4742 fn := fn.(*ir.Name)
4743 calleeLSym = callTargetLSym(fn)
4744 if buildcfg.Experiment.RegabiArgs {
4745
4746
4747
4748
4749
4750 if fn.Func != nil {
4751 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4752 }
4753 } else {
4754
4755 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4756 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4757 if inRegistersImported || inRegistersSamePackage {
4758 callABI = s.f.ABI1
4759 }
4760 }
4761 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
4762 isCallDeferRangeFunc = true
4763 }
4764 break
4765 }
4766 closure = s.expr(fn)
4767 if k != callDefer && k != callDeferStack {
4768
4769
4770 s.maybeNilCheckClosure(closure, k)
4771 }
4772 case ir.OCALLINTER:
4773 if fn.Op() != ir.ODOTINTER {
4774 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
4775 }
4776 fn := fn.(*ir.SelectorExpr)
4777 var iclosure *ssa.Value
4778 iclosure, rcvr = s.getClosureAndRcvr(fn)
4779 if k == callNormal {
4780 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
4781 } else {
4782 closure = iclosure
4783 }
4784 }
4785 if deferExtra != nil {
4786 dextra = s.expr(deferExtra)
4787 }
4788
4789 params := callABI.ABIAnalyze(n.Fun.Type(), false )
4790 types.CalcSize(fn.Type())
4791 stksize := params.ArgWidth()
4792
4793 res := n.Fun.Type().Results()
4794 if k == callNormal || k == callTail {
4795 for _, p := range params.OutParams() {
4796 ACResults = append(ACResults, p.Type)
4797 }
4798 }
4799
4800 var call *ssa.Value
4801 if k == callDeferStack {
4802 if stksize != 0 {
4803 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
4804 }
4805
4806 t := deferstruct()
4807 n, addr := s.temp(n.Pos(), t)
4808 n.SetNonMergeable(true)
4809 s.store(closure.Type,
4810 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
4811 closure)
4812
4813
4814 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4815 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4816 callArgs = append(callArgs, addr, s.mem())
4817 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4818 call.AddArgs(callArgs...)
4819 call.AuxInt = int64(types.PtrSize)
4820 } else {
4821
4822
4823 argStart := base.Ctxt.Arch.FixedFrameSize
4824
4825 if k != callNormal && k != callTail {
4826
4827 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4828 callArgs = append(callArgs, closure)
4829 stksize += int64(types.PtrSize)
4830 argStart += int64(types.PtrSize)
4831 if dextra != nil {
4832
4833 ACArgs = append(ACArgs, types.Types[types.TINTER])
4834 callArgs = append(callArgs, dextra)
4835 stksize += 2 * int64(types.PtrSize)
4836 argStart += 2 * int64(types.PtrSize)
4837 }
4838 }
4839
4840
4841 if rcvr != nil {
4842 callArgs = append(callArgs, rcvr)
4843 }
4844
4845
4846 t := n.Fun.Type()
4847 args := n.Args
4848
4849 for _, p := range params.InParams() {
4850 ACArgs = append(ACArgs, p.Type)
4851 }
4852
4853
4854
4855
4856 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
4857 b := s.endBlock()
4858 b.Kind = ssa.BlockPlain
4859 curb := s.f.NewBlock(ssa.BlockPlain)
4860 b.AddEdgeTo(curb)
4861 s.startBlock(curb)
4862 }
4863
4864 for i, n := range args {
4865 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
4866 }
4867
4868 callArgs = append(callArgs, s.mem())
4869
4870
4871 switch {
4872 case k == callDefer:
4873 sym := ir.Syms.Deferproc
4874 if dextra != nil {
4875 sym = ir.Syms.Deferprocat
4876 }
4877 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4878 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4879 case k == callGo:
4880 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4881 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4882 case closure != nil:
4883
4884
4885
4886
4887
4888 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
4889 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
4890 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
4891 case codeptr != nil:
4892
4893 aux := ssa.InterfaceAuxCall(params)
4894 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
4895 case calleeLSym != nil:
4896 aux := ssa.StaticAuxCall(calleeLSym, params)
4897 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4898 if k == callTail {
4899 call.Op = ssa.OpTailLECall
4900 stksize = 0
4901 }
4902 default:
4903 s.Fatalf("bad call type %v %v", n.Op(), n)
4904 }
4905 call.AddArgs(callArgs...)
4906 call.AuxInt = stksize
4907 }
4908 s.prevCall = call
4909 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
4910
4911 for _, v := range n.KeepAlive {
4912 if !v.Addrtaken() {
4913 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
4914 }
4915 switch v.Class {
4916 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
4917 default:
4918 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
4919 }
4920 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
4921 }
4922
4923
4924 if k == callDefer || k == callDeferStack || isCallDeferRangeFunc {
4925 b := s.endBlock()
4926 b.Kind = ssa.BlockDefer
4927 b.SetControl(call)
4928 bNext := s.f.NewBlock(ssa.BlockPlain)
4929 b.AddEdgeTo(bNext)
4930 r := s.f.DeferReturn
4931 if r == nil {
4932 r = s.f.NewBlock(ssa.BlockPlain)
4933 s.startBlock(r)
4934 s.exit()
4935 s.f.DeferReturn = r
4936 }
4937 b.AddEdgeTo(r)
4938 b.Likely = ssa.BranchLikely
4939 s.startBlock(bNext)
4940 }
4941
4942 if len(res) == 0 || k != callNormal {
4943
4944 return nil
4945 }
4946 fp := res[0]
4947 if returnResultAddr {
4948 return s.resultAddrOfCall(call, 0, fp.Type)
4949 }
4950 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
4951 }
4952
4953
4954
4955 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
4956 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
4957
4958
4959 s.nilCheck(closure)
4960 }
4961 }
4962
4963
4964
4965 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
4966 i := s.expr(fn.X)
4967 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
4968 s.nilCheck(itab)
4969 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
4970 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
4971 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
4972 return closure, rcvr
4973 }
4974
4975
4976
4977 func etypesign(e types.Kind) int8 {
4978 switch e {
4979 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
4980 return -1
4981 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
4982 return +1
4983 }
4984 return 0
4985 }
4986
4987
4988
4989 func (s *state) addr(n ir.Node) *ssa.Value {
4990 if n.Op() != ir.ONAME {
4991 s.pushLine(n.Pos())
4992 defer s.popLine()
4993 }
4994
4995 if s.canSSA(n) {
4996 s.Fatalf("addr of canSSA expression: %+v", n)
4997 }
4998
4999 t := types.NewPtr(n.Type())
5000 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
5001 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
5002
5003 if offset != 0 {
5004 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
5005 }
5006 return v
5007 }
5008 switch n.Op() {
5009 case ir.OLINKSYMOFFSET:
5010 no := n.(*ir.LinksymOffsetExpr)
5011 return linksymOffset(no.Linksym, no.Offset_)
5012 case ir.ONAME:
5013 n := n.(*ir.Name)
5014 if n.Heapaddr != nil {
5015 return s.expr(n.Heapaddr)
5016 }
5017 switch n.Class {
5018 case ir.PEXTERN:
5019
5020 return linksymOffset(n.Linksym(), 0)
5021 case ir.PPARAM:
5022
5023 v := s.decladdrs[n]
5024 if v != nil {
5025 return v
5026 }
5027 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
5028 return nil
5029 case ir.PAUTO:
5030 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
5031
5032 case ir.PPARAMOUT:
5033
5034
5035 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
5036 default:
5037 s.Fatalf("variable address class %v not implemented", n.Class)
5038 return nil
5039 }
5040 case ir.ORESULT:
5041
5042 n := n.(*ir.ResultExpr)
5043 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
5044 case ir.OINDEX:
5045 n := n.(*ir.IndexExpr)
5046 if n.X.Type().IsSlice() {
5047 a := s.expr(n.X)
5048 i := s.expr(n.Index)
5049 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
5050 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5051 p := s.newValue1(ssa.OpSlicePtr, t, a)
5052 return s.newValue2(ssa.OpPtrIndex, t, p, i)
5053 } else {
5054 a := s.addr(n.X)
5055 i := s.expr(n.Index)
5056 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
5057 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5058 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5059 }
5060 case ir.ODEREF:
5061 n := n.(*ir.StarExpr)
5062 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5063 case ir.ODOT:
5064 n := n.(*ir.SelectorExpr)
5065 p := s.addr(n.X)
5066 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5067 case ir.ODOTPTR:
5068 n := n.(*ir.SelectorExpr)
5069 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5070 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5071 case ir.OCONVNOP:
5072 n := n.(*ir.ConvExpr)
5073 if n.Type() == n.X.Type() {
5074 return s.addr(n.X)
5075 }
5076 addr := s.addr(n.X)
5077 return s.newValue1(ssa.OpCopy, t, addr)
5078 case ir.OCALLFUNC, ir.OCALLINTER:
5079 n := n.(*ir.CallExpr)
5080 return s.callAddr(n, callNormal)
5081 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5082 var v *ssa.Value
5083 if n.Op() == ir.ODOTTYPE {
5084 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5085 } else {
5086 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5087 }
5088 if v.Op != ssa.OpLoad {
5089 s.Fatalf("dottype of non-load")
5090 }
5091 if v.Args[1] != s.mem() {
5092 s.Fatalf("memory no longer live from dottype load")
5093 }
5094 return v.Args[0]
5095 default:
5096 s.Fatalf("unhandled addr %v", n.Op())
5097 return nil
5098 }
5099 }
5100
5101
5102
5103 func (s *state) canSSA(n ir.Node) bool {
5104 if base.Flag.N != 0 {
5105 return false
5106 }
5107 for {
5108 nn := n
5109 if nn.Op() == ir.ODOT {
5110 nn := nn.(*ir.SelectorExpr)
5111 n = nn.X
5112 continue
5113 }
5114 if nn.Op() == ir.OINDEX {
5115 nn := nn.(*ir.IndexExpr)
5116 if nn.X.Type().IsArray() {
5117 n = nn.X
5118 continue
5119 }
5120 }
5121 break
5122 }
5123 if n.Op() != ir.ONAME {
5124 return false
5125 }
5126 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5127 }
5128
5129 func (s *state) canSSAName(name *ir.Name) bool {
5130 if name.Addrtaken() || !name.OnStack() {
5131 return false
5132 }
5133 switch name.Class {
5134 case ir.PPARAMOUT:
5135 if s.hasdefer {
5136
5137
5138
5139
5140
5141 return false
5142 }
5143 if s.cgoUnsafeArgs {
5144
5145
5146 return false
5147 }
5148 }
5149 return true
5150
5151 }
5152
5153
5154 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5155 p := s.expr(n)
5156 if bounded || n.NonNil() {
5157 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5158 s.f.Warnl(lineno, "removed nil check")
5159 }
5160 return p
5161 }
5162 p = s.nilCheck(p)
5163 return p
5164 }
5165
5166
5167
5168
5169
5170
5171 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5172 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5173 return ptr
5174 }
5175 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5176 }
5177
5178
5179
5180
5181
5182
5183
5184 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5185 idx = s.extendIndex(idx, len, kind, bounded)
5186
5187 if bounded || base.Flag.B != 0 {
5188
5189
5190
5191
5192
5193
5194
5195
5196
5197
5198
5199
5200
5201
5202
5203
5204
5205
5206
5207
5208 return idx
5209 }
5210
5211 bNext := s.f.NewBlock(ssa.BlockPlain)
5212 bPanic := s.f.NewBlock(ssa.BlockExit)
5213
5214 if !idx.Type.IsSigned() {
5215 switch kind {
5216 case ssa.BoundsIndex:
5217 kind = ssa.BoundsIndexU
5218 case ssa.BoundsSliceAlen:
5219 kind = ssa.BoundsSliceAlenU
5220 case ssa.BoundsSliceAcap:
5221 kind = ssa.BoundsSliceAcapU
5222 case ssa.BoundsSliceB:
5223 kind = ssa.BoundsSliceBU
5224 case ssa.BoundsSlice3Alen:
5225 kind = ssa.BoundsSlice3AlenU
5226 case ssa.BoundsSlice3Acap:
5227 kind = ssa.BoundsSlice3AcapU
5228 case ssa.BoundsSlice3B:
5229 kind = ssa.BoundsSlice3BU
5230 case ssa.BoundsSlice3C:
5231 kind = ssa.BoundsSlice3CU
5232 }
5233 }
5234
5235 var cmp *ssa.Value
5236 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5237 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5238 } else {
5239 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5240 }
5241 b := s.endBlock()
5242 b.Kind = ssa.BlockIf
5243 b.SetControl(cmp)
5244 b.Likely = ssa.BranchLikely
5245 b.AddEdgeTo(bNext)
5246 b.AddEdgeTo(bPanic)
5247
5248 s.startBlock(bPanic)
5249 if Arch.LinkArch.Family == sys.Wasm {
5250
5251
5252 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5253 } else {
5254 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5255 s.endBlock().SetControl(mem)
5256 }
5257 s.startBlock(bNext)
5258
5259
5260 if base.Flag.Cfg.SpectreIndex {
5261 op := ssa.OpSpectreIndex
5262 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5263 op = ssa.OpSpectreSliceIndex
5264 }
5265 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5266 }
5267
5268 return idx
5269 }
5270
5271
5272 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5273 b := s.endBlock()
5274 b.Kind = ssa.BlockIf
5275 b.SetControl(cmp)
5276 b.Likely = ssa.BranchLikely
5277 bNext := s.f.NewBlock(ssa.BlockPlain)
5278 line := s.peekPos()
5279 pos := base.Ctxt.PosTable.Pos(line)
5280 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5281 bPanic := s.panics[fl]
5282 if bPanic == nil {
5283 bPanic = s.f.NewBlock(ssa.BlockPlain)
5284 s.panics[fl] = bPanic
5285 s.startBlock(bPanic)
5286
5287
5288 s.rtcall(fn, false, nil)
5289 }
5290 b.AddEdgeTo(bNext)
5291 b.AddEdgeTo(bPanic)
5292 s.startBlock(bNext)
5293 }
5294
5295 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5296 needcheck := true
5297 switch b.Op {
5298 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5299 if b.AuxInt != 0 {
5300 needcheck = false
5301 }
5302 }
5303 if needcheck {
5304
5305 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5306 s.check(cmp, ir.Syms.Panicdivide)
5307 }
5308 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5309 }
5310
5311
5312
5313
5314
5315 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5316 s.prevCall = nil
5317
5318 off := base.Ctxt.Arch.FixedFrameSize
5319 var callArgs []*ssa.Value
5320 var callArgTypes []*types.Type
5321
5322 for _, arg := range args {
5323 t := arg.Type
5324 off = types.RoundUp(off, t.Alignment())
5325 size := t.Size()
5326 callArgs = append(callArgs, arg)
5327 callArgTypes = append(callArgTypes, t)
5328 off += size
5329 }
5330 off = types.RoundUp(off, int64(types.RegSize))
5331
5332
5333 var call *ssa.Value
5334 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5335 callArgs = append(callArgs, s.mem())
5336 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5337 call.AddArgs(callArgs...)
5338 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5339
5340 if !returns {
5341
5342 b := s.endBlock()
5343 b.Kind = ssa.BlockExit
5344 b.SetControl(call)
5345 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5346 if len(results) > 0 {
5347 s.Fatalf("panic call can't have results")
5348 }
5349 return nil
5350 }
5351
5352
5353 res := make([]*ssa.Value, len(results))
5354 for i, t := range results {
5355 off = types.RoundUp(off, t.Alignment())
5356 res[i] = s.resultOfCall(call, int64(i), t)
5357 off += t.Size()
5358 }
5359 off = types.RoundUp(off, int64(types.PtrSize))
5360
5361
5362 call.AuxInt = off
5363
5364 return res
5365 }
5366
5367
5368 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5369 s.instrument(t, left, instrumentWrite)
5370
5371 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5372
5373 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5374 return
5375 }
5376
5377
5378
5379
5380
5381
5382 s.storeTypeScalars(t, left, right, skip)
5383 if skip&skipPtr == 0 && t.HasPointers() {
5384 s.storeTypePtrs(t, left, right)
5385 }
5386 }
5387
5388
5389 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5390 switch {
5391 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
5392 s.store(t, left, right)
5393 case t.IsPtrShaped():
5394 if t.IsPtr() && t.Elem().NotInHeap() {
5395 s.store(t, left, right)
5396 }
5397
5398 case t.IsString():
5399 if skip&skipLen != 0 {
5400 return
5401 }
5402 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5403 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5404 s.store(types.Types[types.TINT], lenAddr, len)
5405 case t.IsSlice():
5406 if skip&skipLen == 0 {
5407 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5408 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5409 s.store(types.Types[types.TINT], lenAddr, len)
5410 }
5411 if skip&skipCap == 0 {
5412 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5413 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5414 s.store(types.Types[types.TINT], capAddr, cap)
5415 }
5416 case t.IsInterface():
5417
5418 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5419 s.store(types.Types[types.TUINTPTR], left, itab)
5420 case t.IsStruct():
5421 n := t.NumFields()
5422 for i := 0; i < n; i++ {
5423 ft := t.FieldType(i)
5424 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5425 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5426 s.storeTypeScalars(ft, addr, val, 0)
5427 }
5428 case t.IsArray() && t.NumElem() == 0:
5429
5430 case t.IsArray() && t.NumElem() == 1:
5431 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5432 default:
5433 s.Fatalf("bad write barrier type %v", t)
5434 }
5435 }
5436
5437
5438 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5439 switch {
5440 case t.IsPtrShaped():
5441 if t.IsPtr() && t.Elem().NotInHeap() {
5442 break
5443 }
5444 s.store(t, left, right)
5445 case t.IsString():
5446 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5447 s.store(s.f.Config.Types.BytePtr, left, ptr)
5448 case t.IsSlice():
5449 elType := types.NewPtr(t.Elem())
5450 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5451 s.store(elType, left, ptr)
5452 case t.IsInterface():
5453
5454 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5455 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5456 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5457 case t.IsStruct():
5458 n := t.NumFields()
5459 for i := 0; i < n; i++ {
5460 ft := t.FieldType(i)
5461 if !ft.HasPointers() {
5462 continue
5463 }
5464 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5465 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5466 s.storeTypePtrs(ft, addr, val)
5467 }
5468 case t.IsArray() && t.NumElem() == 0:
5469
5470 case t.IsArray() && t.NumElem() == 1:
5471 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5472 default:
5473 s.Fatalf("bad write barrier type %v", t)
5474 }
5475 }
5476
5477
5478 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5479 var a *ssa.Value
5480 if !ssa.CanSSA(t) {
5481 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5482 } else {
5483 a = s.expr(n)
5484 }
5485 return a
5486 }
5487
5488
5489
5490
5491 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5492 t := v.Type
5493 var ptr, len, cap *ssa.Value
5494 switch {
5495 case t.IsSlice():
5496 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5497 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5498 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5499 case t.IsString():
5500 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5501 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5502 cap = len
5503 case t.IsPtr():
5504 if !t.Elem().IsArray() {
5505 s.Fatalf("bad ptr to array in slice %v\n", t)
5506 }
5507 nv := s.nilCheck(v)
5508 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5509 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5510 cap = len
5511 default:
5512 s.Fatalf("bad type in slice %v\n", t)
5513 }
5514
5515
5516 if i == nil {
5517 i = s.constInt(types.Types[types.TINT], 0)
5518 }
5519 if j == nil {
5520 j = len
5521 }
5522 three := true
5523 if k == nil {
5524 three = false
5525 k = cap
5526 }
5527
5528
5529
5530
5531 if three {
5532 if k != cap {
5533 kind := ssa.BoundsSlice3Alen
5534 if t.IsSlice() {
5535 kind = ssa.BoundsSlice3Acap
5536 }
5537 k = s.boundsCheck(k, cap, kind, bounded)
5538 }
5539 if j != k {
5540 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5541 }
5542 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5543 } else {
5544 if j != k {
5545 kind := ssa.BoundsSliceAlen
5546 if t.IsSlice() {
5547 kind = ssa.BoundsSliceAcap
5548 }
5549 j = s.boundsCheck(j, k, kind, bounded)
5550 }
5551 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5552 }
5553
5554
5555 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5556 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5557 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5558
5559
5560
5561
5562
5563 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5564 rcap := rlen
5565 if j != k && !t.IsString() {
5566 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5567 }
5568
5569 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5570
5571 return ptr, rlen, rcap
5572 }
5573
5574
5575
5576
5577
5578
5579
5580
5581
5582
5583
5584
5585
5586
5587
5588 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5589
5590
5591 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5592
5593
5594
5595 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5596 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5597
5598
5599 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5600
5601 return rptr, rlen, rcap
5602 }
5603
5604 type u642fcvtTab struct {
5605 leq, cvt2F, and, rsh, or, add ssa.Op
5606 one func(*state, *types.Type, int64) *ssa.Value
5607 }
5608
5609 var u64_f64 = u642fcvtTab{
5610 leq: ssa.OpLeq64,
5611 cvt2F: ssa.OpCvt64to64F,
5612 and: ssa.OpAnd64,
5613 rsh: ssa.OpRsh64Ux64,
5614 or: ssa.OpOr64,
5615 add: ssa.OpAdd64F,
5616 one: (*state).constInt64,
5617 }
5618
5619 var u64_f32 = u642fcvtTab{
5620 leq: ssa.OpLeq64,
5621 cvt2F: ssa.OpCvt64to32F,
5622 and: ssa.OpAnd64,
5623 rsh: ssa.OpRsh64Ux64,
5624 or: ssa.OpOr64,
5625 add: ssa.OpAdd32F,
5626 one: (*state).constInt64,
5627 }
5628
5629 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5630 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5631 }
5632
5633 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5634 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5635 }
5636
5637 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5638
5639
5640
5641
5642
5643
5644
5645
5646
5647
5648
5649
5650
5651
5652
5653
5654
5655
5656
5657
5658
5659
5660
5661
5662
5663 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5664
5665 b := s.endBlock()
5666 b.Kind = ssa.BlockIf
5667 b.SetControl(cmp)
5668 b.Likely = ssa.BranchLikely
5669
5670 bThen := s.f.NewBlock(ssa.BlockPlain)
5671 bElse := s.f.NewBlock(ssa.BlockPlain)
5672 bAfter := s.f.NewBlock(ssa.BlockPlain)
5673
5674 b.AddEdgeTo(bThen)
5675 s.startBlock(bThen)
5676 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5677 s.vars[n] = a0
5678 s.endBlock()
5679 bThen.AddEdgeTo(bAfter)
5680
5681 b.AddEdgeTo(bElse)
5682 s.startBlock(bElse)
5683 one := cvttab.one(s, ft, 1)
5684 y := s.newValue2(cvttab.and, ft, x, one)
5685 z := s.newValue2(cvttab.rsh, ft, x, one)
5686 z = s.newValue2(cvttab.or, ft, z, y)
5687 a := s.newValue1(cvttab.cvt2F, tt, z)
5688 a1 := s.newValue2(cvttab.add, tt, a, a)
5689 s.vars[n] = a1
5690 s.endBlock()
5691 bElse.AddEdgeTo(bAfter)
5692
5693 s.startBlock(bAfter)
5694 return s.variable(n, n.Type())
5695 }
5696
5697 type u322fcvtTab struct {
5698 cvtI2F, cvtF2F ssa.Op
5699 }
5700
5701 var u32_f64 = u322fcvtTab{
5702 cvtI2F: ssa.OpCvt32to64F,
5703 cvtF2F: ssa.OpCopy,
5704 }
5705
5706 var u32_f32 = u322fcvtTab{
5707 cvtI2F: ssa.OpCvt32to32F,
5708 cvtF2F: ssa.OpCvt64Fto32F,
5709 }
5710
5711 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5712 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5713 }
5714
5715 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5716 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5717 }
5718
5719 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5720
5721
5722
5723
5724
5725 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5726 b := s.endBlock()
5727 b.Kind = ssa.BlockIf
5728 b.SetControl(cmp)
5729 b.Likely = ssa.BranchLikely
5730
5731 bThen := s.f.NewBlock(ssa.BlockPlain)
5732 bElse := s.f.NewBlock(ssa.BlockPlain)
5733 bAfter := s.f.NewBlock(ssa.BlockPlain)
5734
5735 b.AddEdgeTo(bThen)
5736 s.startBlock(bThen)
5737 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5738 s.vars[n] = a0
5739 s.endBlock()
5740 bThen.AddEdgeTo(bAfter)
5741
5742 b.AddEdgeTo(bElse)
5743 s.startBlock(bElse)
5744 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5745 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5746 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
5747 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
5748
5749 s.vars[n] = a3
5750 s.endBlock()
5751 bElse.AddEdgeTo(bAfter)
5752
5753 s.startBlock(bAfter)
5754 return s.variable(n, n.Type())
5755 }
5756
5757
5758 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
5759 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
5760 s.Fatalf("node must be a map or a channel")
5761 }
5762 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
5763 s.Fatalf("cannot inline len(chan)")
5764 }
5765 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
5766 s.Fatalf("cannot inline cap(chan)")
5767 }
5768 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
5769 s.Fatalf("cannot inline cap(map)")
5770 }
5771
5772
5773
5774
5775
5776
5777
5778
5779 lenType := n.Type()
5780 nilValue := s.constNil(types.Types[types.TUINTPTR])
5781 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
5782 b := s.endBlock()
5783 b.Kind = ssa.BlockIf
5784 b.SetControl(cmp)
5785 b.Likely = ssa.BranchUnlikely
5786
5787 bThen := s.f.NewBlock(ssa.BlockPlain)
5788 bElse := s.f.NewBlock(ssa.BlockPlain)
5789 bAfter := s.f.NewBlock(ssa.BlockPlain)
5790
5791
5792 b.AddEdgeTo(bThen)
5793 s.startBlock(bThen)
5794 s.vars[n] = s.zeroVal(lenType)
5795 s.endBlock()
5796 bThen.AddEdgeTo(bAfter)
5797
5798 b.AddEdgeTo(bElse)
5799 s.startBlock(bElse)
5800 switch n.Op() {
5801 case ir.OLEN:
5802 if n.X.Type().IsMap() {
5803
5804 loadType := reflectdata.MapType().Field(0).Type
5805 load := s.load(loadType, x)
5806 s.vars[n] = s.conv(nil, load, loadType, lenType)
5807 } else {
5808
5809 s.vars[n] = s.load(lenType, x)
5810 }
5811 case ir.OCAP:
5812
5813 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
5814 s.vars[n] = s.load(lenType, sw)
5815 default:
5816 s.Fatalf("op must be OLEN or OCAP")
5817 }
5818 s.endBlock()
5819 bElse.AddEdgeTo(bAfter)
5820
5821 s.startBlock(bAfter)
5822 return s.variable(n, lenType)
5823 }
5824
5825 type f2uCvtTab struct {
5826 ltf, cvt2U, subf, or ssa.Op
5827 floatValue func(*state, *types.Type, float64) *ssa.Value
5828 intValue func(*state, *types.Type, int64) *ssa.Value
5829 cutoff uint64
5830 }
5831
5832 var f32_u64 = f2uCvtTab{
5833 ltf: ssa.OpLess32F,
5834 cvt2U: ssa.OpCvt32Fto64,
5835 subf: ssa.OpSub32F,
5836 or: ssa.OpOr64,
5837 floatValue: (*state).constFloat32,
5838 intValue: (*state).constInt64,
5839 cutoff: 1 << 63,
5840 }
5841
5842 var f64_u64 = f2uCvtTab{
5843 ltf: ssa.OpLess64F,
5844 cvt2U: ssa.OpCvt64Fto64,
5845 subf: ssa.OpSub64F,
5846 or: ssa.OpOr64,
5847 floatValue: (*state).constFloat64,
5848 intValue: (*state).constInt64,
5849 cutoff: 1 << 63,
5850 }
5851
5852 var f32_u32 = f2uCvtTab{
5853 ltf: ssa.OpLess32F,
5854 cvt2U: ssa.OpCvt32Fto32,
5855 subf: ssa.OpSub32F,
5856 or: ssa.OpOr32,
5857 floatValue: (*state).constFloat32,
5858 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5859 cutoff: 1 << 31,
5860 }
5861
5862 var f64_u32 = f2uCvtTab{
5863 ltf: ssa.OpLess64F,
5864 cvt2U: ssa.OpCvt64Fto32,
5865 subf: ssa.OpSub64F,
5866 or: ssa.OpOr32,
5867 floatValue: (*state).constFloat64,
5868 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5869 cutoff: 1 << 31,
5870 }
5871
5872 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5873 return s.floatToUint(&f32_u64, n, x, ft, tt)
5874 }
5875 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5876 return s.floatToUint(&f64_u64, n, x, ft, tt)
5877 }
5878
5879 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5880 return s.floatToUint(&f32_u32, n, x, ft, tt)
5881 }
5882
5883 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5884 return s.floatToUint(&f64_u32, n, x, ft, tt)
5885 }
5886
5887 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5888
5889
5890
5891
5892
5893
5894
5895
5896
5897
5898
5899
5900
5901 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
5902 cmp := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
5903 b := s.endBlock()
5904 b.Kind = ssa.BlockIf
5905 b.SetControl(cmp)
5906 b.Likely = ssa.BranchLikely
5907
5908 var bThen, bZero *ssa.Block
5909
5910 newConversion := base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil)
5911 if newConversion {
5912 bZero = s.f.NewBlock(ssa.BlockPlain)
5913 bThen = s.f.NewBlock(ssa.BlockIf)
5914 } else {
5915 bThen = s.f.NewBlock(ssa.BlockPlain)
5916 }
5917
5918 bElse := s.f.NewBlock(ssa.BlockPlain)
5919 bAfter := s.f.NewBlock(ssa.BlockPlain)
5920
5921 b.AddEdgeTo(bThen)
5922 s.startBlock(bThen)
5923 a0 := s.newValueOrSfCall1(cvttab.cvt2U, tt, x)
5924 s.vars[n] = a0
5925
5926 if newConversion {
5927 cmpz := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cvttab.floatValue(s, ft, 0.0))
5928 s.endBlock()
5929 bThen.SetControl(cmpz)
5930 bThen.AddEdgeTo(bZero)
5931 bThen.Likely = ssa.BranchUnlikely
5932 bThen.AddEdgeTo(bAfter)
5933
5934 s.startBlock(bZero)
5935 s.vars[n] = cvttab.intValue(s, tt, 0)
5936 s.endBlock()
5937 bZero.AddEdgeTo(bAfter)
5938 } else {
5939 s.endBlock()
5940 bThen.AddEdgeTo(bAfter)
5941 }
5942
5943 b.AddEdgeTo(bElse)
5944 s.startBlock(bElse)
5945 y := s.newValueOrSfCall2(cvttab.subf, ft, x, cutoff)
5946 y = s.newValueOrSfCall1(cvttab.cvt2U, tt, y)
5947 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
5948 a1 := s.newValue2(cvttab.or, tt, y, z)
5949 s.vars[n] = a1
5950 s.endBlock()
5951 bElse.AddEdgeTo(bAfter)
5952
5953 s.startBlock(bAfter)
5954 return s.variable(n, n.Type())
5955 }
5956
5957
5958
5959
5960 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5961 iface := s.expr(n.X)
5962 target := s.reflectType(n.Type())
5963 var targetItab *ssa.Value
5964 if n.ITab != nil {
5965 targetItab = s.expr(n.ITab)
5966 }
5967
5968 if n.UseNilPanic {
5969 if commaok {
5970 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && commaok == true")
5971 }
5972 if n.Type().IsInterface() {
5973
5974
5975 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && Type().IsInterface() == true")
5976 }
5977 typs := s.f.Config.Types
5978 iface = s.newValue2(
5979 ssa.OpIMake,
5980 iface.Type,
5981 s.nilCheck(s.newValue1(ssa.OpITab, typs.BytePtr, iface)),
5982 s.newValue1(ssa.OpIData, typs.BytePtr, iface),
5983 )
5984 }
5985
5986 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
5987 }
5988
5989 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5990 iface := s.expr(n.X)
5991 var source, target, targetItab *ssa.Value
5992 if n.SrcRType != nil {
5993 source = s.expr(n.SrcRType)
5994 }
5995 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
5996 byteptr := s.f.Config.Types.BytePtr
5997 targetItab = s.expr(n.ITab)
5998
5999
6000 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
6001 } else {
6002 target = s.expr(n.RType)
6003 }
6004 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
6005 }
6006
6007
6008
6009
6010
6011
6012
6013
6014
6015 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
6016 typs := s.f.Config.Types
6017 byteptr := typs.BytePtr
6018 if dst.IsInterface() {
6019 if dst.IsEmptyInterface() {
6020
6021
6022 if base.Debug.TypeAssert > 0 {
6023 base.WarnfAt(pos, "type assertion inlined")
6024 }
6025
6026
6027 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6028
6029 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6030
6031 if src.IsEmptyInterface() && commaok {
6032
6033 return iface, cond
6034 }
6035
6036
6037 b := s.endBlock()
6038 b.Kind = ssa.BlockIf
6039 b.SetControl(cond)
6040 b.Likely = ssa.BranchLikely
6041 bOk := s.f.NewBlock(ssa.BlockPlain)
6042 bFail := s.f.NewBlock(ssa.BlockPlain)
6043 b.AddEdgeTo(bOk)
6044 b.AddEdgeTo(bFail)
6045
6046 if !commaok {
6047
6048 s.startBlock(bFail)
6049 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6050
6051
6052 s.startBlock(bOk)
6053 if src.IsEmptyInterface() {
6054 res = iface
6055 return
6056 }
6057
6058 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6059 typ := s.load(byteptr, off)
6060 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6061 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
6062 return
6063 }
6064
6065 s.startBlock(bOk)
6066
6067
6068 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6069 s.vars[typVar] = s.load(byteptr, off)
6070 s.endBlock()
6071
6072
6073 s.startBlock(bFail)
6074 s.vars[typVar] = itab
6075 s.endBlock()
6076
6077
6078 bEnd := s.f.NewBlock(ssa.BlockPlain)
6079 bOk.AddEdgeTo(bEnd)
6080 bFail.AddEdgeTo(bEnd)
6081 s.startBlock(bEnd)
6082 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6083 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
6084 resok = cond
6085 delete(s.vars, typVar)
6086 return
6087 }
6088
6089 if base.Debug.TypeAssert > 0 {
6090 base.WarnfAt(pos, "type assertion not inlined")
6091 }
6092
6093 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6094 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6095
6096
6097 bNil := s.f.NewBlock(ssa.BlockPlain)
6098 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6099 bMerge := s.f.NewBlock(ssa.BlockPlain)
6100 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6101 b := s.endBlock()
6102 b.Kind = ssa.BlockIf
6103 b.SetControl(cond)
6104 b.Likely = ssa.BranchLikely
6105 b.AddEdgeTo(bNonNil)
6106 b.AddEdgeTo(bNil)
6107
6108 s.startBlock(bNil)
6109 if commaok {
6110 s.vars[typVar] = itab
6111 b := s.endBlock()
6112 b.AddEdgeTo(bMerge)
6113 } else {
6114
6115 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6116 }
6117
6118
6119 s.startBlock(bNonNil)
6120 typ := itab
6121 if !src.IsEmptyInterface() {
6122 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
6123 }
6124
6125
6126 var d *ssa.Value
6127 if descriptor != nil {
6128 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6129 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
6130
6131
6132 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
6133 s.Fatalf("atomic load not available")
6134 }
6135
6136 var mul, and, add, zext ssa.Op
6137 if s.config.PtrSize == 4 {
6138 mul = ssa.OpMul32
6139 and = ssa.OpAnd32
6140 add = ssa.OpAdd32
6141 zext = ssa.OpCopy
6142 } else {
6143 mul = ssa.OpMul64
6144 and = ssa.OpAnd64
6145 add = ssa.OpAdd64
6146 zext = ssa.OpZeroExt32to64
6147 }
6148
6149 loopHead := s.f.NewBlock(ssa.BlockPlain)
6150 loopBody := s.f.NewBlock(ssa.BlockPlain)
6151 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6152 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6153
6154
6155
6156 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6157 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6158 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6159
6160
6161 var hash *ssa.Value
6162 if src.IsEmptyInterface() {
6163 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6164 } else {
6165 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6166 }
6167 hash = s.newValue1(zext, typs.Uintptr, hash)
6168 s.vars[hashVar] = hash
6169
6170 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6171
6172 b := s.endBlock()
6173 b.AddEdgeTo(loopHead)
6174
6175
6176
6177 s.startBlock(loopHead)
6178 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6179 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6180 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6181 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6182
6183 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6184
6185
6186
6187 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6188 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6189 b = s.endBlock()
6190 b.Kind = ssa.BlockIf
6191 b.SetControl(cmp1)
6192 b.AddEdgeTo(cacheHit)
6193 b.AddEdgeTo(loopBody)
6194
6195
6196
6197 s.startBlock(loopBody)
6198 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6199 b = s.endBlock()
6200 b.Kind = ssa.BlockIf
6201 b.SetControl(cmp2)
6202 b.AddEdgeTo(cacheMiss)
6203 b.AddEdgeTo(loopHead)
6204
6205
6206
6207 s.startBlock(cacheHit)
6208 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6209 s.vars[typVar] = eItab
6210 b = s.endBlock()
6211 b.AddEdgeTo(bMerge)
6212
6213
6214 s.startBlock(cacheMiss)
6215 }
6216 }
6217
6218
6219 if descriptor != nil {
6220 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6221 } else {
6222 var fn *obj.LSym
6223 if commaok {
6224 fn = ir.Syms.AssertE2I2
6225 } else {
6226 fn = ir.Syms.AssertE2I
6227 }
6228 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6229 }
6230 s.vars[typVar] = itab
6231 b = s.endBlock()
6232 b.AddEdgeTo(bMerge)
6233
6234
6235 s.startBlock(bMerge)
6236 itab = s.variable(typVar, byteptr)
6237 var ok *ssa.Value
6238 if commaok {
6239 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6240 }
6241 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6242 }
6243
6244 if base.Debug.TypeAssert > 0 {
6245 base.WarnfAt(pos, "type assertion inlined")
6246 }
6247
6248
6249 direct := types.IsDirectIface(dst)
6250 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6251 if base.Debug.TypeAssert > 0 {
6252 base.WarnfAt(pos, "type assertion inlined")
6253 }
6254 var wantedFirstWord *ssa.Value
6255 if src.IsEmptyInterface() {
6256
6257 wantedFirstWord = target
6258 } else {
6259
6260 wantedFirstWord = targetItab
6261 }
6262
6263 var tmp ir.Node
6264 var addr *ssa.Value
6265 if commaok && !ssa.CanSSA(dst) {
6266
6267
6268 tmp, addr = s.temp(pos, dst)
6269 }
6270
6271 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6272 b := s.endBlock()
6273 b.Kind = ssa.BlockIf
6274 b.SetControl(cond)
6275 b.Likely = ssa.BranchLikely
6276
6277 bOk := s.f.NewBlock(ssa.BlockPlain)
6278 bFail := s.f.NewBlock(ssa.BlockPlain)
6279 b.AddEdgeTo(bOk)
6280 b.AddEdgeTo(bFail)
6281
6282 if !commaok {
6283
6284 s.startBlock(bFail)
6285 taddr := source
6286 if taddr == nil {
6287 taddr = s.reflectType(src)
6288 }
6289 if src.IsEmptyInterface() {
6290 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6291 } else {
6292 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6293 }
6294
6295
6296 s.startBlock(bOk)
6297 if direct {
6298 return s.newValue1(ssa.OpIData, dst, iface), nil
6299 }
6300 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6301 return s.load(dst, p), nil
6302 }
6303
6304
6305
6306 bEnd := s.f.NewBlock(ssa.BlockPlain)
6307
6308
6309 valVar := ssaMarker("val")
6310
6311
6312 s.startBlock(bOk)
6313 if tmp == nil {
6314 if direct {
6315 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6316 } else {
6317 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6318 s.vars[valVar] = s.load(dst, p)
6319 }
6320 } else {
6321 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6322 s.move(dst, addr, p)
6323 }
6324 s.vars[okVar] = s.constBool(true)
6325 s.endBlock()
6326 bOk.AddEdgeTo(bEnd)
6327
6328
6329 s.startBlock(bFail)
6330 if tmp == nil {
6331 s.vars[valVar] = s.zeroVal(dst)
6332 } else {
6333 s.zero(dst, addr)
6334 }
6335 s.vars[okVar] = s.constBool(false)
6336 s.endBlock()
6337 bFail.AddEdgeTo(bEnd)
6338
6339
6340 s.startBlock(bEnd)
6341 if tmp == nil {
6342 res = s.variable(valVar, dst)
6343 delete(s.vars, valVar)
6344 } else {
6345 res = s.load(dst, addr)
6346 }
6347 resok = s.variable(okVar, types.Types[types.TBOOL])
6348 delete(s.vars, okVar)
6349 return res, resok
6350 }
6351
6352
6353 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6354 tmp := typecheck.TempAt(pos, s.curfn, t)
6355 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6356 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6357 }
6358 addr := s.addr(tmp)
6359 return tmp, addr
6360 }
6361
6362
6363 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6364 v := s.vars[n]
6365 if v != nil {
6366 return v
6367 }
6368 v = s.fwdVars[n]
6369 if v != nil {
6370 return v
6371 }
6372
6373 if s.curBlock == s.f.Entry {
6374
6375 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6376 }
6377
6378
6379 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6380 s.fwdVars[n] = v
6381 if n.Op() == ir.ONAME {
6382 s.addNamedValue(n.(*ir.Name), v)
6383 }
6384 return v
6385 }
6386
6387 func (s *state) mem() *ssa.Value {
6388 return s.variable(memVar, types.TypeMem)
6389 }
6390
6391 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6392 if n.Class == ir.Pxxx {
6393
6394 return
6395 }
6396 if ir.IsAutoTmp(n) {
6397
6398 return
6399 }
6400 if n.Class == ir.PPARAMOUT {
6401
6402
6403 return
6404 }
6405 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6406 values, ok := s.f.NamedValues[loc]
6407 if !ok {
6408 s.f.Names = append(s.f.Names, &loc)
6409 s.f.CanonicalLocalSlots[loc] = &loc
6410 }
6411 s.f.NamedValues[loc] = append(values, v)
6412 }
6413
6414
6415 type Branch struct {
6416 P *obj.Prog
6417 B *ssa.Block
6418 }
6419
6420
6421 type State struct {
6422 ABI obj.ABI
6423
6424 pp *objw.Progs
6425
6426
6427
6428 Branches []Branch
6429
6430
6431 JumpTables []*ssa.Block
6432
6433
6434 bstart []*obj.Prog
6435
6436 maxarg int64
6437
6438
6439
6440 livenessMap liveness.Map
6441
6442
6443
6444 partLiveArgs map[*ir.Name]bool
6445
6446
6447
6448
6449 lineRunStart *obj.Prog
6450
6451
6452 OnWasmStackSkipped int
6453 }
6454
6455 func (s *State) FuncInfo() *obj.FuncInfo {
6456 return s.pp.CurFunc.LSym.Func()
6457 }
6458
6459
6460 func (s *State) Prog(as obj.As) *obj.Prog {
6461 p := s.pp.Prog(as)
6462 if objw.LosesStmtMark(as) {
6463 return p
6464 }
6465
6466
6467 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6468 s.lineRunStart = p
6469 } else if p.Pos.IsStmt() == src.PosIsStmt {
6470 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6471 p.Pos = p.Pos.WithNotStmt()
6472 }
6473 return p
6474 }
6475
6476
6477 func (s *State) Pc() *obj.Prog {
6478 return s.pp.Next
6479 }
6480
6481
6482 func (s *State) SetPos(pos src.XPos) {
6483 s.pp.Pos = pos
6484 }
6485
6486
6487
6488
6489 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6490 p := s.Prog(op)
6491 p.To.Type = obj.TYPE_BRANCH
6492 s.Branches = append(s.Branches, Branch{P: p, B: target})
6493 return p
6494 }
6495
6496
6497
6498
6499
6500
6501 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6502 switch v.Op {
6503 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6504
6505 s.SetPos(v.Pos.WithNotStmt())
6506 default:
6507 p := v.Pos
6508 if p != src.NoXPos {
6509
6510
6511
6512
6513 if p.IsStmt() != src.PosIsStmt {
6514 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6515
6516
6517
6518
6519
6520
6521
6522
6523
6524
6525
6526
6527
6528 return
6529 }
6530 p = p.WithNotStmt()
6531
6532 }
6533 s.SetPos(p)
6534 } else {
6535 s.SetPos(s.pp.Pos.WithNotStmt())
6536 }
6537 }
6538 }
6539
6540
6541 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6542 ft := e.curfn.Type()
6543 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6544 return
6545 }
6546
6547 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6548 x.Set(obj.AttrContentAddressable, true)
6549 e.curfn.LSym.Func().ArgInfo = x
6550
6551
6552 p := pp.Prog(obj.AFUNCDATA)
6553 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6554 p.To.Type = obj.TYPE_MEM
6555 p.To.Name = obj.NAME_EXTERN
6556 p.To.Sym = x
6557 }
6558
6559
6560 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6561 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6562
6563
6564
6565
6566 PtrSize := int64(types.PtrSize)
6567 uintptrTyp := types.Types[types.TUINTPTR]
6568
6569 isAggregate := func(t *types.Type) bool {
6570 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6571 }
6572
6573 wOff := 0
6574 n := 0
6575 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6576
6577
6578 write1 := func(sz, offset int64) {
6579 if offset >= rtabi.TraceArgsSpecial {
6580 writebyte(rtabi.TraceArgsOffsetTooLarge)
6581 } else {
6582 writebyte(uint8(offset))
6583 writebyte(uint8(sz))
6584 }
6585 n++
6586 }
6587
6588
6589
6590 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6591 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6592 if n >= rtabi.TraceArgsLimit {
6593 writebyte(rtabi.TraceArgsDotdotdot)
6594 return false
6595 }
6596 if !isAggregate(t) {
6597 write1(t.Size(), baseOffset)
6598 return true
6599 }
6600 writebyte(rtabi.TraceArgsStartAgg)
6601 depth++
6602 if depth >= rtabi.TraceArgsMaxDepth {
6603 writebyte(rtabi.TraceArgsDotdotdot)
6604 writebyte(rtabi.TraceArgsEndAgg)
6605 n++
6606 return true
6607 }
6608 switch {
6609 case t.IsInterface(), t.IsString():
6610 _ = visitType(baseOffset, uintptrTyp, depth) &&
6611 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6612 case t.IsSlice():
6613 _ = visitType(baseOffset, uintptrTyp, depth) &&
6614 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6615 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6616 case t.IsComplex():
6617 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6618 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6619 case t.IsArray():
6620 if t.NumElem() == 0 {
6621 n++
6622 break
6623 }
6624 for i := int64(0); i < t.NumElem(); i++ {
6625 if !visitType(baseOffset, t.Elem(), depth) {
6626 break
6627 }
6628 baseOffset += t.Elem().Size()
6629 }
6630 case t.IsStruct():
6631 if t.NumFields() == 0 {
6632 n++
6633 break
6634 }
6635 for _, field := range t.Fields() {
6636 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6637 break
6638 }
6639 }
6640 }
6641 writebyte(rtabi.TraceArgsEndAgg)
6642 return true
6643 }
6644
6645 start := 0
6646 if strings.Contains(f.LSym.Name, "[") {
6647
6648 start = 1
6649 }
6650
6651 for _, a := range abiInfo.InParams()[start:] {
6652 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6653 break
6654 }
6655 }
6656 writebyte(rtabi.TraceArgsEndSeq)
6657 if wOff > rtabi.TraceArgsMaxLen {
6658 base.Fatalf("ArgInfo too large")
6659 }
6660
6661 return x
6662 }
6663
6664
6665 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6666 if base.Ctxt.Flag_linkshared {
6667
6668
6669 return
6670 }
6671
6672 wfn := e.curfn.WrappedFunc
6673 if wfn == nil {
6674 return
6675 }
6676
6677 wsym := wfn.Linksym()
6678 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6679 objw.SymPtrOff(x, 0, wsym)
6680 x.Set(obj.AttrContentAddressable, true)
6681 })
6682 e.curfn.LSym.Func().WrapInfo = x
6683
6684
6685 p := pp.Prog(obj.AFUNCDATA)
6686 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6687 p.To.Type = obj.TYPE_MEM
6688 p.To.Name = obj.NAME_EXTERN
6689 p.To.Sym = x
6690 }
6691
6692
6693 func genssa(f *ssa.Func, pp *objw.Progs) {
6694 var s State
6695 s.ABI = f.OwnAux.Fn.ABI()
6696
6697 e := f.Frontend().(*ssafn)
6698
6699 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6700
6701 var lv *liveness.Liveness
6702 s.livenessMap, s.partLiveArgs, lv = liveness.Compute(e.curfn, f, e.stkptrsize, pp, gatherPrintInfo)
6703 emitArgInfo(e, f, pp)
6704 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6705
6706 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6707 if openDeferInfo != nil {
6708
6709
6710 p := pp.Prog(obj.AFUNCDATA)
6711 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6712 p.To.Type = obj.TYPE_MEM
6713 p.To.Name = obj.NAME_EXTERN
6714 p.To.Sym = openDeferInfo
6715 }
6716
6717 emitWrappedFuncInfo(e, pp)
6718
6719
6720 s.bstart = make([]*obj.Prog, f.NumBlocks())
6721 s.pp = pp
6722 var progToValue map[*obj.Prog]*ssa.Value
6723 var progToBlock map[*obj.Prog]*ssa.Block
6724 var valueToProgAfter []*obj.Prog
6725 if gatherPrintInfo {
6726 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6727 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6728 f.Logf("genssa %s\n", f.Name)
6729 progToBlock[s.pp.Next] = f.Blocks[0]
6730 }
6731
6732 if base.Ctxt.Flag_locationlists {
6733 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6734 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6735 }
6736 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6737 clear(valueToProgAfter)
6738 }
6739
6740
6741
6742 firstPos := src.NoXPos
6743 for _, v := range f.Entry.Values {
6744 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6745 firstPos = v.Pos
6746 v.Pos = firstPos.WithDefaultStmt()
6747 break
6748 }
6749 }
6750
6751
6752
6753
6754 var inlMarks map[*obj.Prog]int32
6755 var inlMarkList []*obj.Prog
6756
6757
6758
6759 var inlMarksByPos map[src.XPos][]*obj.Prog
6760
6761 var argLiveIdx int = -1
6762
6763
6764
6765
6766
6767 var hotAlign, hotRequire int64
6768
6769 if base.Debug.AlignHot > 0 {
6770 switch base.Ctxt.Arch.Name {
6771
6772
6773
6774
6775
6776 case "amd64", "386":
6777
6778
6779
6780 hotAlign = 64
6781 hotRequire = 31
6782 }
6783 }
6784
6785
6786 for i, b := range f.Blocks {
6787
6788 s.lineRunStart = nil
6789 s.SetPos(s.pp.Pos.WithNotStmt())
6790
6791 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
6792
6793
6794
6795
6796
6797 p := s.pp.Prog(obj.APCALIGNMAX)
6798 p.From.SetConst(hotAlign)
6799 p.To.SetConst(hotRequire)
6800 }
6801
6802 s.bstart[b.ID] = s.pp.Next
6803
6804 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
6805 argLiveIdx = idx
6806 p := s.pp.Prog(obj.APCDATA)
6807 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6808 p.To.SetConst(int64(idx))
6809 }
6810
6811
6812 Arch.SSAMarkMoves(&s, b)
6813 for _, v := range b.Values {
6814 x := s.pp.Next
6815 s.DebugFriendlySetPosFrom(v)
6816
6817 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
6818 v.Fatalf("input[0] and output not in same register %s", v.LongString())
6819 }
6820
6821 switch v.Op {
6822 case ssa.OpInitMem:
6823
6824 case ssa.OpArg:
6825
6826 case ssa.OpSP, ssa.OpSB:
6827
6828 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
6829
6830 case ssa.OpGetG:
6831
6832
6833 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
6834
6835 case ssa.OpPhi:
6836 CheckLoweredPhi(v)
6837 case ssa.OpConvert:
6838
6839 if v.Args[0].Reg() != v.Reg() {
6840 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
6841 }
6842 case ssa.OpInlMark:
6843 p := Arch.Ginsnop(s.pp)
6844 if inlMarks == nil {
6845 inlMarks = map[*obj.Prog]int32{}
6846 inlMarksByPos = map[src.XPos][]*obj.Prog{}
6847 }
6848 inlMarks[p] = v.AuxInt32()
6849 inlMarkList = append(inlMarkList, p)
6850 pos := v.Pos.AtColumn1()
6851 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
6852 firstPos = src.NoXPos
6853
6854 default:
6855
6856 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6857 s.SetPos(firstPos)
6858 firstPos = src.NoXPos
6859 }
6860
6861
6862 s.pp.NextLive = s.livenessMap.Get(v)
6863 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
6864
6865
6866 Arch.SSAGenValue(&s, v)
6867 }
6868
6869 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
6870 argLiveIdx = idx
6871 p := s.pp.Prog(obj.APCDATA)
6872 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6873 p.To.SetConst(int64(idx))
6874 }
6875
6876 if base.Ctxt.Flag_locationlists {
6877 valueToProgAfter[v.ID] = s.pp.Next
6878 }
6879
6880 if gatherPrintInfo {
6881 for ; x != s.pp.Next; x = x.Link {
6882 progToValue[x] = v
6883 }
6884 }
6885 }
6886
6887 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
6888 p := Arch.Ginsnop(s.pp)
6889 p.Pos = p.Pos.WithIsStmt()
6890 if b.Pos == src.NoXPos {
6891 b.Pos = p.Pos
6892 if b.Pos == src.NoXPos {
6893 b.Pos = s.pp.Text.Pos
6894 }
6895 }
6896 b.Pos = b.Pos.WithBogusLine()
6897 }
6898
6899
6900
6901
6902
6903 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
6904
6905
6906 var next *ssa.Block
6907 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
6908
6909
6910
6911
6912 next = f.Blocks[i+1]
6913 }
6914 x := s.pp.Next
6915 s.SetPos(b.Pos)
6916 Arch.SSAGenBlock(&s, b, next)
6917 if gatherPrintInfo {
6918 for ; x != s.pp.Next; x = x.Link {
6919 progToBlock[x] = b
6920 }
6921 }
6922 }
6923 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
6924
6925
6926
6927
6928 Arch.Ginsnop(s.pp)
6929 }
6930 if openDeferInfo != nil {
6931
6932
6933
6934
6935
6936
6937
6938
6939 s.pp.NextLive = s.livenessMap.DeferReturn
6940 p := s.pp.Prog(obj.ACALL)
6941 p.To.Type = obj.TYPE_MEM
6942 p.To.Name = obj.NAME_EXTERN
6943 p.To.Sym = ir.Syms.Deferreturn
6944
6945
6946
6947
6948
6949 for _, o := range f.OwnAux.ABIInfo().OutParams() {
6950 n := o.Name
6951 rts, offs := o.RegisterTypesAndOffsets()
6952 for i := range o.Registers {
6953 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
6954 }
6955 }
6956
6957 s.pp.Prog(obj.ARET)
6958 }
6959
6960 if inlMarks != nil {
6961 hasCall := false
6962
6963
6964
6965
6966 for p := s.pp.Text; p != nil; p = p.Link {
6967 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
6968 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
6969
6970
6971
6972
6973
6974 continue
6975 }
6976 if _, ok := inlMarks[p]; ok {
6977
6978
6979 continue
6980 }
6981 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
6982 hasCall = true
6983 }
6984 pos := p.Pos.AtColumn1()
6985 marks := inlMarksByPos[pos]
6986 if len(marks) == 0 {
6987 continue
6988 }
6989 for _, m := range marks {
6990
6991
6992
6993 p.Pos = p.Pos.WithIsStmt()
6994 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
6995
6996 m.As = obj.ANOP
6997 m.Pos = src.NoXPos
6998 m.From = obj.Addr{}
6999 m.To = obj.Addr{}
7000 }
7001 delete(inlMarksByPos, pos)
7002 }
7003
7004 for _, p := range inlMarkList {
7005 if p.As != obj.ANOP {
7006 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
7007 }
7008 }
7009
7010 if e.stksize == 0 && !hasCall {
7011
7012
7013
7014
7015
7016
7017 for p := s.pp.Text; p != nil; p = p.Link {
7018 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
7019 continue
7020 }
7021 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
7022
7023 nop := Arch.Ginsnop(s.pp)
7024 nop.Pos = e.curfn.Pos().WithIsStmt()
7025
7026
7027
7028
7029
7030 for x := s.pp.Text; x != nil; x = x.Link {
7031 if x.Link == nop {
7032 x.Link = nop.Link
7033 break
7034 }
7035 }
7036
7037 for x := s.pp.Text; x != nil; x = x.Link {
7038 if x.Link == p {
7039 nop.Link = p
7040 x.Link = nop
7041 break
7042 }
7043 }
7044 }
7045 break
7046 }
7047 }
7048 }
7049
7050 if base.Ctxt.Flag_locationlists {
7051 var debugInfo *ssa.FuncDebug
7052 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
7053
7054
7055 debugInfo.EntryID = f.Entry.ID
7056 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
7057 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7058 } else {
7059 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
7060 }
7061 bstart := s.bstart
7062 idToIdx := make([]int, f.NumBlocks())
7063 for i, b := range f.Blocks {
7064 idToIdx[b.ID] = i
7065 }
7066
7067
7068
7069 debugInfo.GetPC = func(b, v ssa.ID) int64 {
7070 switch v {
7071 case ssa.BlockStart.ID:
7072 if b == f.Entry.ID {
7073 return 0
7074
7075 }
7076 return bstart[b].Pc
7077 case ssa.BlockEnd.ID:
7078 blk := f.Blocks[idToIdx[b]]
7079 nv := len(blk.Values)
7080 return valueToProgAfter[blk.Values[nv-1].ID].Pc
7081 case ssa.FuncEnd.ID:
7082 return e.curfn.LSym.Size
7083 default:
7084 return valueToProgAfter[v].Pc
7085 }
7086 }
7087 }
7088
7089
7090 for _, br := range s.Branches {
7091 br.P.To.SetTarget(s.bstart[br.B.ID])
7092 if br.P.Pos.IsStmt() != src.PosIsStmt {
7093 br.P.Pos = br.P.Pos.WithNotStmt()
7094 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7095 br.P.Pos = br.P.Pos.WithNotStmt()
7096 }
7097
7098 }
7099
7100
7101 for _, jt := range s.JumpTables {
7102
7103 targets := make([]*obj.Prog, len(jt.Succs))
7104 for i, e := range jt.Succs {
7105 targets[i] = s.bstart[e.Block().ID]
7106 }
7107
7108
7109
7110 fi := s.pp.CurFunc.LSym.Func()
7111 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7112 }
7113
7114 if e.log {
7115 filename := ""
7116 for p := s.pp.Text; p != nil; p = p.Link {
7117 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7118 filename = p.InnermostFilename()
7119 f.Logf("# %s\n", filename)
7120 }
7121
7122 var s string
7123 if v, ok := progToValue[p]; ok {
7124 s = v.String()
7125 } else if b, ok := progToBlock[p]; ok {
7126 s = b.String()
7127 } else {
7128 s = " "
7129 }
7130 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7131 }
7132 }
7133 if f.HTMLWriter != nil {
7134 var buf strings.Builder
7135 buf.WriteString("<code>")
7136 buf.WriteString("<dl class=\"ssa-gen\">")
7137 filename := ""
7138
7139 liveness := lv.Format(nil)
7140 if liveness != "" {
7141 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7142 buf.WriteString(html.EscapeString("# " + liveness))
7143 buf.WriteString("</dd>")
7144 }
7145
7146 for p := s.pp.Text; p != nil; p = p.Link {
7147
7148
7149 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7150 filename = p.InnermostFilename()
7151 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7152 buf.WriteString(html.EscapeString("# " + filename))
7153 buf.WriteString("</dd>")
7154 }
7155
7156 buf.WriteString("<dt class=\"ssa-prog-src\">")
7157 if v, ok := progToValue[p]; ok {
7158
7159
7160 if p.As != obj.APCDATA {
7161 if liveness := lv.Format(v); liveness != "" {
7162
7163 buf.WriteString("</dt><dd class=\"ssa-prog\">")
7164 buf.WriteString(html.EscapeString("# " + liveness))
7165 buf.WriteString("</dd>")
7166
7167 buf.WriteString("<dt class=\"ssa-prog-src\">")
7168 }
7169 }
7170
7171 buf.WriteString(v.HTML())
7172 } else if b, ok := progToBlock[p]; ok {
7173 buf.WriteString("<b>" + b.HTML() + "</b>")
7174 }
7175 buf.WriteString("</dt>")
7176 buf.WriteString("<dd class=\"ssa-prog\">")
7177 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7178 buf.WriteString("</dd>")
7179 }
7180 buf.WriteString("</dl>")
7181 buf.WriteString("</code>")
7182 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7183 }
7184 if ssa.GenssaDump[f.Name] {
7185 fi := f.DumpFileForPhase("genssa")
7186 if fi != nil {
7187
7188
7189 inliningDiffers := func(a, b []src.Pos) bool {
7190 if len(a) != len(b) {
7191 return true
7192 }
7193 for i := range a {
7194 if a[i].Filename() != b[i].Filename() {
7195 return true
7196 }
7197 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7198 return true
7199 }
7200 }
7201 return false
7202 }
7203
7204 var allPosOld []src.Pos
7205 var allPos []src.Pos
7206
7207 for p := s.pp.Text; p != nil; p = p.Link {
7208 if p.Pos.IsKnown() {
7209 allPos = allPos[:0]
7210 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7211 if inliningDiffers(allPos, allPosOld) {
7212 for _, pos := range allPos {
7213 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7214 }
7215 allPos, allPosOld = allPosOld, allPos
7216 }
7217 }
7218
7219 var s string
7220 if v, ok := progToValue[p]; ok {
7221 s = v.String()
7222 } else if b, ok := progToBlock[p]; ok {
7223 s = b.String()
7224 } else {
7225 s = " "
7226 }
7227 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7228 }
7229 fi.Close()
7230 }
7231 }
7232
7233 defframe(&s, e, f)
7234
7235 f.HTMLWriter.Close()
7236 f.HTMLWriter = nil
7237 }
7238
7239 func defframe(s *State, e *ssafn, f *ssa.Func) {
7240 pp := s.pp
7241
7242 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7243 frame := s.maxarg + e.stksize
7244 if Arch.PadFrame != nil {
7245 frame = Arch.PadFrame(frame)
7246 }
7247
7248
7249 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7250 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7251 pp.Text.To.Offset = frame
7252
7253 p := pp.Text
7254
7255
7256
7257
7258
7259
7260
7261
7262
7263
7264 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7265
7266
7267 type nameOff struct {
7268 n *ir.Name
7269 off int64
7270 }
7271 partLiveArgsSpilled := make(map[nameOff]bool)
7272 for _, v := range f.Entry.Values {
7273 if v.Op.IsCall() {
7274 break
7275 }
7276 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7277 continue
7278 }
7279 n, off := ssa.AutoVar(v)
7280 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7281 continue
7282 }
7283 partLiveArgsSpilled[nameOff{n, off}] = true
7284 }
7285
7286
7287 for _, a := range f.OwnAux.ABIInfo().InParams() {
7288 n := a.Name
7289 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7290 continue
7291 }
7292 rts, offs := a.RegisterTypesAndOffsets()
7293 for i := range a.Registers {
7294 if !rts[i].HasPointers() {
7295 continue
7296 }
7297 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7298 continue
7299 }
7300 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7301 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7302 }
7303 }
7304 }
7305
7306
7307
7308
7309 var lo, hi int64
7310
7311
7312
7313 var state uint32
7314
7315
7316
7317 for _, n := range e.curfn.Dcl {
7318 if !n.Needzero() {
7319 continue
7320 }
7321 if n.Class != ir.PAUTO {
7322 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7323 }
7324 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7325 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7326 }
7327
7328 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7329
7330 lo = n.FrameOffset()
7331 continue
7332 }
7333
7334
7335 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7336
7337
7338 lo = n.FrameOffset()
7339 hi = lo + n.Type().Size()
7340 }
7341
7342
7343 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7344 }
7345
7346
7347 type IndexJump struct {
7348 Jump obj.As
7349 Index int
7350 }
7351
7352 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7353 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7354 p.Pos = b.Pos
7355 }
7356
7357
7358
7359 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7360 switch next {
7361 case b.Succs[0].Block():
7362 s.oneJump(b, &jumps[0][0])
7363 s.oneJump(b, &jumps[0][1])
7364 case b.Succs[1].Block():
7365 s.oneJump(b, &jumps[1][0])
7366 s.oneJump(b, &jumps[1][1])
7367 default:
7368 var q *obj.Prog
7369 if b.Likely != ssa.BranchUnlikely {
7370 s.oneJump(b, &jumps[1][0])
7371 s.oneJump(b, &jumps[1][1])
7372 q = s.Br(obj.AJMP, b.Succs[1].Block())
7373 } else {
7374 s.oneJump(b, &jumps[0][0])
7375 s.oneJump(b, &jumps[0][1])
7376 q = s.Br(obj.AJMP, b.Succs[0].Block())
7377 }
7378 q.Pos = b.Pos
7379 }
7380 }
7381
7382
7383 func AddAux(a *obj.Addr, v *ssa.Value) {
7384 AddAux2(a, v, v.AuxInt)
7385 }
7386 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7387 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7388 v.Fatalf("bad AddAux addr %v", a)
7389 }
7390
7391 a.Offset += offset
7392
7393
7394 if v.Aux == nil {
7395 return
7396 }
7397
7398 switch n := v.Aux.(type) {
7399 case *ssa.AuxCall:
7400 a.Name = obj.NAME_EXTERN
7401 a.Sym = n.Fn
7402 case *obj.LSym:
7403 a.Name = obj.NAME_EXTERN
7404 a.Sym = n
7405 case *ir.Name:
7406 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7407 a.Name = obj.NAME_PARAM
7408 } else {
7409 a.Name = obj.NAME_AUTO
7410 }
7411 a.Sym = n.Linksym()
7412 a.Offset += n.FrameOffset()
7413 default:
7414 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7415 }
7416 }
7417
7418
7419
7420 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7421 size := idx.Type.Size()
7422 if size == s.config.PtrSize {
7423 return idx
7424 }
7425 if size > s.config.PtrSize {
7426
7427
7428 var lo *ssa.Value
7429 if idx.Type.IsSigned() {
7430 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7431 } else {
7432 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7433 }
7434 if bounded || base.Flag.B != 0 {
7435 return lo
7436 }
7437 bNext := s.f.NewBlock(ssa.BlockPlain)
7438 bPanic := s.f.NewBlock(ssa.BlockExit)
7439 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7440 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7441 if !idx.Type.IsSigned() {
7442 switch kind {
7443 case ssa.BoundsIndex:
7444 kind = ssa.BoundsIndexU
7445 case ssa.BoundsSliceAlen:
7446 kind = ssa.BoundsSliceAlenU
7447 case ssa.BoundsSliceAcap:
7448 kind = ssa.BoundsSliceAcapU
7449 case ssa.BoundsSliceB:
7450 kind = ssa.BoundsSliceBU
7451 case ssa.BoundsSlice3Alen:
7452 kind = ssa.BoundsSlice3AlenU
7453 case ssa.BoundsSlice3Acap:
7454 kind = ssa.BoundsSlice3AcapU
7455 case ssa.BoundsSlice3B:
7456 kind = ssa.BoundsSlice3BU
7457 case ssa.BoundsSlice3C:
7458 kind = ssa.BoundsSlice3CU
7459 }
7460 }
7461 b := s.endBlock()
7462 b.Kind = ssa.BlockIf
7463 b.SetControl(cmp)
7464 b.Likely = ssa.BranchLikely
7465 b.AddEdgeTo(bNext)
7466 b.AddEdgeTo(bPanic)
7467
7468 s.startBlock(bPanic)
7469 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7470 s.endBlock().SetControl(mem)
7471 s.startBlock(bNext)
7472
7473 return lo
7474 }
7475
7476
7477 var op ssa.Op
7478 if idx.Type.IsSigned() {
7479 switch 10*size + s.config.PtrSize {
7480 case 14:
7481 op = ssa.OpSignExt8to32
7482 case 18:
7483 op = ssa.OpSignExt8to64
7484 case 24:
7485 op = ssa.OpSignExt16to32
7486 case 28:
7487 op = ssa.OpSignExt16to64
7488 case 48:
7489 op = ssa.OpSignExt32to64
7490 default:
7491 s.Fatalf("bad signed index extension %s", idx.Type)
7492 }
7493 } else {
7494 switch 10*size + s.config.PtrSize {
7495 case 14:
7496 op = ssa.OpZeroExt8to32
7497 case 18:
7498 op = ssa.OpZeroExt8to64
7499 case 24:
7500 op = ssa.OpZeroExt16to32
7501 case 28:
7502 op = ssa.OpZeroExt16to64
7503 case 48:
7504 op = ssa.OpZeroExt32to64
7505 default:
7506 s.Fatalf("bad unsigned index extension %s", idx.Type)
7507 }
7508 }
7509 return s.newValue1(op, types.Types[types.TINT], idx)
7510 }
7511
7512
7513
7514 func CheckLoweredPhi(v *ssa.Value) {
7515 if v.Op != ssa.OpPhi {
7516 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7517 }
7518 if v.Type.IsMemory() {
7519 return
7520 }
7521 f := v.Block.Func
7522 loc := f.RegAlloc[v.ID]
7523 for _, a := range v.Args {
7524 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7525 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7526 }
7527 }
7528 }
7529
7530
7531
7532
7533
7534 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7535 entry := v.Block.Func.Entry
7536 if entry != v.Block {
7537 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7538 }
7539 for _, w := range entry.Values {
7540 if w == v {
7541 break
7542 }
7543 switch w.Op {
7544 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7545
7546 default:
7547 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7548 }
7549 }
7550 }
7551
7552
7553 func CheckArgReg(v *ssa.Value) {
7554 entry := v.Block.Func.Entry
7555 if entry != v.Block {
7556 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7557 }
7558 }
7559
7560 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7561 n, off := ssa.AutoVar(v)
7562 a.Type = obj.TYPE_MEM
7563 a.Sym = n.Linksym()
7564 a.Reg = int16(Arch.REGSP)
7565 a.Offset = n.FrameOffset() + off
7566 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7567 a.Name = obj.NAME_PARAM
7568 } else {
7569 a.Name = obj.NAME_AUTO
7570 }
7571 }
7572
7573
7574
7575 func (s *State) Call(v *ssa.Value) *obj.Prog {
7576 pPosIsStmt := s.pp.Pos.IsStmt()
7577 s.PrepareCall(v)
7578
7579 p := s.Prog(obj.ACALL)
7580 if pPosIsStmt == src.PosIsStmt {
7581 p.Pos = v.Pos.WithIsStmt()
7582 } else {
7583 p.Pos = v.Pos.WithNotStmt()
7584 }
7585 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7586 p.To.Type = obj.TYPE_MEM
7587 p.To.Name = obj.NAME_EXTERN
7588 p.To.Sym = sym.Fn
7589 } else {
7590
7591 switch Arch.LinkArch.Family {
7592 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7593 p.To.Type = obj.TYPE_REG
7594 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7595 p.To.Type = obj.TYPE_MEM
7596 default:
7597 base.Fatalf("unknown indirect call family")
7598 }
7599 p.To.Reg = v.Args[0].Reg()
7600 }
7601 return p
7602 }
7603
7604
7605
7606 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7607 p := s.Call(v)
7608 p.As = obj.ARET
7609 return p
7610 }
7611
7612
7613
7614
7615 func (s *State) PrepareCall(v *ssa.Value) {
7616 idx := s.livenessMap.Get(v)
7617 if !idx.StackMapValid() {
7618
7619 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7620 base.Fatalf("missing stack map index for %v", v.LongString())
7621 }
7622 }
7623
7624 call, ok := v.Aux.(*ssa.AuxCall)
7625
7626 if ok {
7627
7628
7629 if nowritebarrierrecCheck != nil {
7630 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7631 }
7632 }
7633
7634 if s.maxarg < v.AuxInt {
7635 s.maxarg = v.AuxInt
7636 }
7637 }
7638
7639
7640
7641 func (s *State) UseArgs(n int64) {
7642 if s.maxarg < n {
7643 s.maxarg = n
7644 }
7645 }
7646
7647
7648 func fieldIdx(n *ir.SelectorExpr) int {
7649 t := n.X.Type()
7650 if !t.IsStruct() {
7651 panic("ODOT's LHS is not a struct")
7652 }
7653
7654 for i, f := range t.Fields() {
7655 if f.Sym == n.Sel {
7656 if f.Offset != n.Offset() {
7657 panic("field offset doesn't match")
7658 }
7659 return i
7660 }
7661 }
7662 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7663
7664
7665
7666 }
7667
7668
7669
7670 type ssafn struct {
7671 curfn *ir.Func
7672 strings map[string]*obj.LSym
7673 stksize int64
7674 stkptrsize int64
7675
7676
7677
7678
7679
7680 stkalign int64
7681
7682 log bool
7683 }
7684
7685
7686
7687 func (e *ssafn) StringData(s string) *obj.LSym {
7688 if aux, ok := e.strings[s]; ok {
7689 return aux
7690 }
7691 if e.strings == nil {
7692 e.strings = make(map[string]*obj.LSym)
7693 }
7694 data := staticdata.StringSym(e.curfn.Pos(), s)
7695 e.strings[s] = data
7696 return data
7697 }
7698
7699
7700 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7701 node := parent.N
7702
7703 if node.Class != ir.PAUTO || node.Addrtaken() {
7704
7705 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7706 }
7707
7708 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7709 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7710 n.SetUsed(true)
7711 n.SetEsc(ir.EscNever)
7712 types.CalcSize(t)
7713 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7714 }
7715
7716
7717 func (e *ssafn) Logf(msg string, args ...interface{}) {
7718 if e.log {
7719 fmt.Printf(msg, args...)
7720 }
7721 }
7722
7723 func (e *ssafn) Log() bool {
7724 return e.log
7725 }
7726
7727
7728 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
7729 base.Pos = pos
7730 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
7731 base.Fatalf("'%s': "+msg, nargs...)
7732 }
7733
7734
7735
7736 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
7737 base.WarnfAt(pos, fmt_, args...)
7738 }
7739
7740 func (e *ssafn) Debug_checknil() bool {
7741 return base.Debug.Nil != 0
7742 }
7743
7744 func (e *ssafn) UseWriteBarrier() bool {
7745 return base.Flag.WB
7746 }
7747
7748 func (e *ssafn) Syslook(name string) *obj.LSym {
7749 switch name {
7750 case "goschedguarded":
7751 return ir.Syms.Goschedguarded
7752 case "writeBarrier":
7753 return ir.Syms.WriteBarrier
7754 case "wbZero":
7755 return ir.Syms.WBZero
7756 case "wbMove":
7757 return ir.Syms.WBMove
7758 case "cgoCheckMemmove":
7759 return ir.Syms.CgoCheckMemmove
7760 case "cgoCheckPtrWrite":
7761 return ir.Syms.CgoCheckPtrWrite
7762 }
7763 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
7764 return nil
7765 }
7766
7767 func (e *ssafn) Func() *ir.Func {
7768 return e.curfn
7769 }
7770
7771 func clobberBase(n ir.Node) ir.Node {
7772 if n.Op() == ir.ODOT {
7773 n := n.(*ir.SelectorExpr)
7774 if n.X.Type().NumFields() == 1 {
7775 return clobberBase(n.X)
7776 }
7777 }
7778 if n.Op() == ir.OINDEX {
7779 n := n.(*ir.IndexExpr)
7780 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
7781 return clobberBase(n.X)
7782 }
7783 }
7784 return n
7785 }
7786
7787
7788 func callTargetLSym(callee *ir.Name) *obj.LSym {
7789 if callee.Func == nil {
7790
7791
7792
7793 return callee.Linksym()
7794 }
7795
7796 return callee.LinksymABI(callee.Func.ABI)
7797 }
7798
7799
7800 const deferStructFnField = 4
7801
7802 var deferType *types.Type
7803
7804
7805
7806 func deferstruct() *types.Type {
7807 if deferType != nil {
7808 return deferType
7809 }
7810
7811 makefield := func(name string, t *types.Type) *types.Field {
7812 sym := (*types.Pkg)(nil).Lookup(name)
7813 return types.NewField(src.NoXPos, sym, t)
7814 }
7815
7816 fields := []*types.Field{
7817 makefield("heap", types.Types[types.TBOOL]),
7818 makefield("rangefunc", types.Types[types.TBOOL]),
7819 makefield("sp", types.Types[types.TUINTPTR]),
7820 makefield("pc", types.Types[types.TUINTPTR]),
7821
7822
7823
7824 makefield("fn", types.Types[types.TUINTPTR]),
7825 makefield("link", types.Types[types.TUINTPTR]),
7826 makefield("head", types.Types[types.TUINTPTR]),
7827 }
7828 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
7829 base.Fatalf("deferStructFnField is %q, not fn", name)
7830 }
7831
7832 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
7833 typ := types.NewNamed(n)
7834 n.SetType(typ)
7835 n.SetTypecheck(1)
7836
7837
7838 typ.SetUnderlying(types.NewStruct(fields))
7839 types.CalcStructSize(typ)
7840
7841 deferType = typ
7842 return typ
7843 }
7844
7845
7846
7847
7848
7849 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
7850 return obj.Addr{
7851 Name: obj.NAME_NONE,
7852 Type: obj.TYPE_MEM,
7853 Reg: baseReg,
7854 Offset: spill.Offset + extraOffset,
7855 }
7856 }
7857
7858 var BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
7859
View as plain text