1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmp"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "internal/goexperiment"
16 "internal/runtime/gc"
17 "os"
18 "path/filepath"
19 "slices"
20 "strings"
21
22 "cmd/compile/internal/abi"
23 "cmd/compile/internal/base"
24 "cmd/compile/internal/ir"
25 "cmd/compile/internal/liveness"
26 "cmd/compile/internal/objw"
27 "cmd/compile/internal/reflectdata"
28 "cmd/compile/internal/rttype"
29 "cmd/compile/internal/ssa"
30 "cmd/compile/internal/staticdata"
31 "cmd/compile/internal/typecheck"
32 "cmd/compile/internal/types"
33 "cmd/internal/obj"
34 "cmd/internal/objabi"
35 "cmd/internal/src"
36 "cmd/internal/sys"
37
38 rtabi "internal/abi"
39 )
40
41 var ssaConfig *ssa.Config
42 var ssaCaches []ssa.Cache
43
44 var ssaDump string
45 var ssaDir string
46 var ssaDumpStdout bool
47 var ssaDumpCFG string
48 const ssaDumpFile = "ssa.html"
49
50
51 var ssaDumpInlined []*ir.Func
52
53
54
55
56 const maxAggregatedHeapAllocation = 16
57
58 func DumpInline(fn *ir.Func) {
59 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
60 ssaDumpInlined = append(ssaDumpInlined, fn)
61 }
62 }
63
64 func InitEnv() {
65 ssaDump = os.Getenv("GOSSAFUNC")
66 ssaDir = os.Getenv("GOSSADIR")
67 if ssaDump != "" {
68 if strings.HasSuffix(ssaDump, "+") {
69 ssaDump = ssaDump[:len(ssaDump)-1]
70 ssaDumpStdout = true
71 }
72 spl := strings.Split(ssaDump, ":")
73 if len(spl) > 1 {
74 ssaDump = spl[0]
75 ssaDumpCFG = spl[1]
76 }
77 }
78 }
79
80 func InitConfig() {
81 types_ := ssa.NewTypes()
82
83 if Arch.SoftFloat {
84 softfloatInit()
85 }
86
87
88
89 _ = types.NewPtr(types.Types[types.TINTER])
90 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
91 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
92 _ = types.NewPtr(types.NewPtr(types.ByteType))
93 _ = types.NewPtr(types.NewSlice(types.ByteType))
94 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
95 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
96 _ = types.NewPtr(types.Types[types.TINT16])
97 _ = types.NewPtr(types.Types[types.TINT64])
98 _ = types.NewPtr(types.ErrorType)
99 _ = types.NewPtr(reflectdata.MapType())
100 _ = types.NewPtr(deferstruct())
101 types.NewPtrCacheEnabled = false
102 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
103 ssaConfig.Race = base.Flag.Race
104 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
105
106
107 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
108 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
109 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
110 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
111 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
112 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
113 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
114 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
115 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
116 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
117 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
118 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
119 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
120 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
121 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
122 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
123 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
124 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
125 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
126 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
127 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
128 ir.Syms.GrowsliceBuf = typecheck.LookupRuntimeFunc("growsliceBuf")
129 ir.Syms.GrowsliceBufNoAlias = typecheck.LookupRuntimeFunc("growsliceBufNoAlias")
130 ir.Syms.GrowsliceNoAlias = typecheck.LookupRuntimeFunc("growsliceNoAlias")
131 ir.Syms.MoveSlice = typecheck.LookupRuntimeFunc("moveSlice")
132 ir.Syms.MoveSliceNoScan = typecheck.LookupRuntimeFunc("moveSliceNoScan")
133 ir.Syms.MoveSliceNoCap = typecheck.LookupRuntimeFunc("moveSliceNoCap")
134 ir.Syms.MoveSliceNoCapNoScan = typecheck.LookupRuntimeFunc("moveSliceNoCapNoScan")
135 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
136 for i := 1; i < len(ir.Syms.MallocGCSmallNoScan); i++ {
137 ir.Syms.MallocGCSmallNoScan[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallNoScanSC%d", i))
138 }
139 for i := 1; i < len(ir.Syms.MallocGCSmallScanNoHeader); i++ {
140 ir.Syms.MallocGCSmallScanNoHeader[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallScanNoHeaderSC%d", i))
141 }
142 for i := 1; i < len(ir.Syms.MallocGCTiny); i++ {
143 ir.Syms.MallocGCTiny[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocTiny%d", i))
144 }
145 ir.Syms.MallocGC = typecheck.LookupRuntimeFunc("mallocgc")
146 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
147 ir.Syms.Memequal = typecheck.LookupRuntimeFunc("memequal")
148 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
149 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
150 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
151 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
152 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
153 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
154 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
155 ir.Syms.PanicBounds = typecheck.LookupRuntimeFunc("panicBounds")
156 ir.Syms.PanicExtend = typecheck.LookupRuntimeFunc("panicExtend")
157 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
158 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
159 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
160 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
161 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
162 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
163 ir.Syms.PanicSimdImm = typecheck.LookupRuntimeFunc("panicSimdImm")
164 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
165 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
166 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
167 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
168 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
169 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
170 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
171 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
172 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
173 ir.Syms.X86HasAVX = typecheck.LookupRuntimeVar("x86HasAVX")
174 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
175 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
176 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
177 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
178 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
179 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
180 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
181 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
182 ir.Syms.RISCV64HasZbb = typecheck.LookupRuntimeVar("riscv64HasZbb")
183 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
184 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
185 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
186 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
187 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
188 ir.Syms.ZeroVal = typecheck.LookupRuntimeVar("zeroVal")
189
190 if Arch.LinkArch.Family == sys.Wasm {
191 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
192 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
193 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
194 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
195 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
196 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
197 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
198 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
199 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
200 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
201 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
202 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
203 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
204 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
205 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
206 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
207 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
208 }
209
210
211 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
212 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
213 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
214 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
215 }
216
217 func InitTables() {
218 initIntrinsics(nil)
219 }
220
221
222
223
224
225
226
227
228 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
229 return ssaConfig.ABI0.Copy()
230 }
231
232
233
234 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
235 if buildcfg.Experiment.RegabiArgs {
236
237 if fn == nil {
238 return abi1
239 }
240 switch fn.ABI {
241 case obj.ABI0:
242 return abi0
243 case obj.ABIInternal:
244
245
246 return abi1
247 }
248 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
249 panic("not reachable")
250 }
251
252 a := abi0
253 if fn != nil {
254 if fn.Pragma&ir.RegisterParams != 0 {
255 a = abi1
256 }
257 }
258 return a
259 }
260
261
262
263
264
265
266
267
268
269
270
271
272 func (s *state) emitOpenDeferInfo() {
273 firstOffset := s.openDefers[0].closureNode.FrameOffset()
274
275
276 for i, r := range s.openDefers {
277 have := r.closureNode.FrameOffset()
278 want := firstOffset + int64(i)*int64(types.PtrSize)
279 if have != want {
280 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
281 }
282 }
283
284 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
285 x.Set(obj.AttrContentAddressable, true)
286 s.curfn.LSym.Func().OpenCodedDeferInfo = x
287
288 off := 0
289 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
290 off = objw.Uvarint(x, off, uint64(-firstOffset))
291 }
292
293
294
295 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
296 name := ir.FuncName(fn)
297
298 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
299
300 printssa := false
301
302
303 if strings.Contains(ssaDump, name) {
304 nameOptABI := name
305 if l := len(ssaDump); l > 1 && ssaDump[l-2] == ',' {
306 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
307 } else if strings.HasSuffix(ssaDump, ">") {
308 l := len(ssaDump)
309 if l >= 3 && ssaDump[l-3] == '<' {
310 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
311 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
312 }
313 }
314 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
315 printssa = nameOptABI == ssaDump ||
316 pkgDotName == ssaDump ||
317 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
318 }
319
320 var astBuf *bytes.Buffer
321 if printssa {
322 astBuf = &bytes.Buffer{}
323 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
324 if ssaDumpStdout {
325 fmt.Println("generating SSA for", name)
326 fmt.Print(astBuf.String())
327 }
328 }
329
330 var s state
331 s.pushLine(fn.Pos())
332 defer s.popLine()
333
334 s.hasdefer = fn.HasDefer()
335 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
336 s.cgoUnsafeArgs = true
337 }
338 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
339
340 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
341 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
342 s.instrumentMemory = true
343 }
344 if base.Flag.Race {
345 s.instrumentEnterExit = true
346 }
347 }
348
349 fe := ssafn{
350 curfn: fn,
351 log: printssa && ssaDumpStdout,
352 }
353 s.curfn = fn
354
355 cache := &ssaCaches[worker]
356 cache.Reset()
357
358 s.f = ssaConfig.NewFunc(&fe, cache)
359 s.config = ssaConfig
360 s.f.Type = fn.Type()
361 s.f.Name = name
362 s.f.PrintOrHtmlSSA = printssa
363 if fn.Pragma&ir.Nosplit != 0 {
364 s.f.NoSplit = true
365 }
366 s.f.ABI0 = ssaConfig.ABI0
367 s.f.ABI1 = ssaConfig.ABI1
368 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
369 s.f.ABISelf = abiSelf
370
371 s.panics = map[funcLine]*ssa.Block{}
372 s.softFloat = s.config.SoftFloat
373
374
375 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
376 s.f.Entry.Pos = fn.Pos()
377 s.f.IsPgoHot = isPgoHot
378
379 if printssa {
380 ssaDF := ssaDumpFile
381 if ssaDir != "" {
382 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
383 ssaD := filepath.Dir(ssaDF)
384 os.MkdirAll(ssaD, 0755)
385 }
386 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
387
388 dumpSourcesColumn(s.f.HTMLWriter, fn)
389 s.f.HTMLWriter.WriteAST("AST", astBuf)
390 }
391
392
393 s.labels = map[string]*ssaLabel{}
394 s.fwdVars = map[ir.Node]*ssa.Value{}
395 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
396
397 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
398 switch {
399 case base.Debug.NoOpenDefer != 0:
400 s.hasOpenDefers = false
401 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
402
403
404
405
406
407 s.hasOpenDefers = false
408 }
409 if s.hasOpenDefers && s.instrumentEnterExit {
410
411
412
413 s.hasOpenDefers = false
414 }
415 if s.hasOpenDefers {
416
417
418 for _, f := range s.curfn.Type().Results() {
419 if !f.Nname.(*ir.Name).OnStack() {
420 s.hasOpenDefers = false
421 break
422 }
423 }
424 }
425 if s.hasOpenDefers &&
426 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
427
428
429
430
431
432 s.hasOpenDefers = false
433 }
434
435 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
436 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
437
438 s.startBlock(s.f.Entry)
439 s.vars[memVar] = s.startmem
440 if s.hasOpenDefers {
441
442
443
444 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
445 deferBitsTemp.SetAddrtaken(true)
446 s.deferBitsTemp = deferBitsTemp
447
448 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
449 s.vars[deferBitsVar] = startDeferBits
450 s.deferBitsAddr = s.addr(deferBitsTemp)
451 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
452
453
454
455
456
457 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
458 }
459
460 var params *abi.ABIParamResultInfo
461 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
462
463
464
465
466
467
468 var debugInfo ssa.FuncDebug
469 for _, n := range fn.Dcl {
470 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
471 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
472 }
473 }
474 fn.DebugInfo = &debugInfo
475
476
477 s.decladdrs = map[*ir.Name]*ssa.Value{}
478 for _, n := range fn.Dcl {
479 switch n.Class {
480 case ir.PPARAM:
481
482 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
483 case ir.PPARAMOUT:
484 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
485 case ir.PAUTO:
486
487
488 default:
489 s.Fatalf("local variable with class %v unimplemented", n.Class)
490 }
491 }
492
493 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
494
495
496 for _, n := range fn.Dcl {
497 if n.Class == ir.PPARAM {
498 if s.canSSA(n) {
499 v := s.newValue0A(ssa.OpArg, n.Type(), n)
500 s.vars[n] = v
501 s.addNamedValue(n, v)
502 } else {
503 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
504 if len(paramAssignment.Registers) > 0 {
505 if ssa.CanSSA(n.Type()) {
506 v := s.newValue0A(ssa.OpArg, n.Type(), n)
507 s.store(n.Type(), s.decladdrs[n], v)
508 } else {
509
510
511 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
512 }
513 }
514 }
515 }
516 }
517
518
519 if fn.Needctxt() {
520 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
521 if fn.RangeParent != nil && base.Flag.N != 0 {
522
523
524
525 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
526 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
527 cloSlot.SetUsed(true)
528 cloSlot.SetEsc(ir.EscNever)
529 cloSlot.SetAddrtaken(true)
530 s.f.CloSlot = cloSlot
531 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
532 addr := s.addr(cloSlot)
533 s.store(s.f.Config.Types.BytePtr, addr, clo)
534
535 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
536 }
537 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
538 for {
539 n, typ, offset := csiter.Next()
540 if n == nil {
541 break
542 }
543
544 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
545
546
547
548
549
550
551
552
553
554 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
555 n.Class = ir.PAUTO
556 fn.Dcl = append(fn.Dcl, n)
557 s.assign(n, s.load(n.Type(), ptr), false, 0)
558 continue
559 }
560
561 if !n.Byval() {
562 ptr = s.load(typ, ptr)
563 }
564 s.setHeapaddr(fn.Pos(), n, ptr)
565 }
566 }
567
568
569 if s.instrumentEnterExit {
570 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
571 }
572 s.zeroResults()
573 s.paramsToHeap()
574 s.stmtList(fn.Body)
575
576
577 if s.curBlock != nil {
578 s.pushLine(fn.Endlineno)
579 s.exit()
580 s.popLine()
581 }
582
583 for _, b := range s.f.Blocks {
584 if b.Pos != src.NoXPos {
585 s.updateUnsetPredPos(b)
586 }
587 }
588
589 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
590
591 s.insertPhis()
592
593
594 ssa.Compile(s.f)
595
596 fe.AllocFrame(s.f)
597
598 if len(s.openDefers) != 0 {
599 s.emitOpenDeferInfo()
600 }
601
602
603
604
605
606
607 for _, p := range params.InParams() {
608 typs, offs := p.RegisterTypesAndOffsets()
609 if len(offs) < len(typs) {
610 s.Fatalf("len(offs)=%d < len(typs)=%d, params=\n%s", len(offs), len(typs), params)
611 }
612 for i, t := range typs {
613 o := offs[i]
614 fo := p.FrameOffset(params)
615 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
616 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
617 }
618 }
619
620 return s.f
621 }
622
623 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
624 typs, offs := paramAssignment.RegisterTypesAndOffsets()
625 for i, t := range typs {
626 if pointersOnly && !t.IsPtrShaped() {
627 continue
628 }
629 r := paramAssignment.Registers[i]
630 o := offs[i]
631 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
632 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
633 v := s.newValue0I(op, t, reg)
634 v.Aux = aux
635 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
636 s.store(t, p, v)
637 }
638 }
639
640
641
642
643
644
645
646 func (s *state) zeroResults() {
647 for _, f := range s.curfn.Type().Results() {
648 n := f.Nname.(*ir.Name)
649 if !n.OnStack() {
650
651
652
653 continue
654 }
655
656 if typ := n.Type(); ssa.CanSSA(typ) {
657 s.assign(n, s.zeroVal(typ), false, 0)
658 } else {
659 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
660 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
661 }
662 s.zero(n.Type(), s.decladdrs[n])
663 }
664 }
665 }
666
667
668
669 func (s *state) paramsToHeap() {
670 do := func(params []*types.Field) {
671 for _, f := range params {
672 if f.Nname == nil {
673 continue
674 }
675 n := f.Nname.(*ir.Name)
676 if ir.IsBlank(n) || n.OnStack() {
677 continue
678 }
679 s.newHeapaddr(n)
680 if n.Class == ir.PPARAM {
681 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
682 }
683 }
684 }
685
686 typ := s.curfn.Type()
687 do(typ.Recvs())
688 do(typ.Params())
689 do(typ.Results())
690 }
691
692
693
694
695 func allocSizeAndAlign(t *types.Type) (int64, int64) {
696 size, align := t.Size(), t.Alignment()
697 if types.PtrSize == 4 && align == 4 && size >= 8 {
698
699 size = types.RoundUp(size, 8)
700 align = 8
701 }
702 return size, align
703 }
704 func allocSize(t *types.Type) int64 {
705 size, _ := allocSizeAndAlign(t)
706 return size
707 }
708 func allocAlign(t *types.Type) int64 {
709 _, align := allocSizeAndAlign(t)
710 return align
711 }
712
713
714 func (s *state) newHeapaddr(n *ir.Name) {
715 size := allocSize(n.Type())
716 if n.Type().HasPointers() || size >= maxAggregatedHeapAllocation || size == 0 {
717 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type()))
718 return
719 }
720
721
722
723 var used int64
724 for _, v := range s.pendingHeapAllocations {
725 used += allocSize(v.Type.Elem())
726 }
727 if used+size > maxAggregatedHeapAllocation {
728 s.flushPendingHeapAllocations()
729 }
730
731 var allocCall *ssa.Value
732 if len(s.pendingHeapAllocations) == 0 {
733
734
735
736 allocCall = s.newObjectNonSpecialized(n.Type(), nil)
737 } else {
738 allocCall = s.pendingHeapAllocations[0].Args[0]
739 }
740
741 v := s.newValue1I(ssa.OpOffPtr, n.Type().PtrTo(), 0, allocCall)
742
743
744 s.pendingHeapAllocations = append(s.pendingHeapAllocations, v)
745
746
747 s.setHeapaddr(n.Pos(), n, v)
748 }
749
750 func (s *state) flushPendingHeapAllocations() {
751 pending := s.pendingHeapAllocations
752 if len(pending) == 0 {
753 return
754 }
755 s.pendingHeapAllocations = nil
756 ptr := pending[0].Args[0]
757 call := ptr.Args[0]
758
759 if len(pending) == 1 {
760
761 v := pending[0]
762 v.Op = ssa.OpCopy
763 return
764 }
765
766
767
768
769 slices.SortStableFunc(pending, func(x, y *ssa.Value) int {
770 return cmp.Compare(allocAlign(y.Type.Elem()), allocAlign(x.Type.Elem()))
771 })
772
773
774 var size int64
775 for _, v := range pending {
776 v.AuxInt = size
777 size += allocSize(v.Type.Elem())
778 }
779 align := allocAlign(pending[0].Type.Elem())
780 size = types.RoundUp(size, align)
781
782
783 args := []*ssa.Value{
784 s.constInt(types.Types[types.TUINTPTR], size),
785 s.constNil(call.Args[0].Type),
786 s.constBool(true),
787 call.Args[1],
788 }
789 mallocSym := ir.Syms.MallocGC
790 if specialMallocSym := s.specializedMallocSym(size, false); specialMallocSym != nil {
791 mallocSym = specialMallocSym
792 }
793 call.Aux = ssa.StaticAuxCall(mallocSym, s.f.ABIDefault.ABIAnalyzeTypes(
794 []*types.Type{args[0].Type, args[1].Type, args[2].Type},
795 []*types.Type{types.Types[types.TUNSAFEPTR]},
796 ))
797 call.AuxInt = 4 * s.config.PtrSize
798 call.SetArgs4(args[0], args[1], args[2], args[3])
799
800
801 call.Type = types.NewTuple(types.Types[types.TUNSAFEPTR], types.TypeMem)
802 ptr.Type = types.Types[types.TUNSAFEPTR]
803 }
804
805 func (s *state) specializedMallocSym(size int64, hasPointers bool) *obj.LSym {
806 if !s.sizeSpecializedMallocEnabled() {
807 return nil
808 }
809 ptrSize := s.config.PtrSize
810 ptrBits := ptrSize * 8
811 minSizeForMallocHeader := ptrSize * ptrBits
812 heapBitsInSpan := size <= minSizeForMallocHeader
813 if !heapBitsInSpan {
814 return nil
815 }
816 divRoundUp := func(n, a uintptr) uintptr { return (n + a - 1) / a }
817 sizeClass := gc.SizeToSizeClass8[divRoundUp(uintptr(size), gc.SmallSizeDiv)]
818 if hasPointers {
819 return ir.Syms.MallocGCSmallScanNoHeader[sizeClass]
820 }
821 if size < gc.TinySize {
822 return ir.Syms.MallocGCTiny[size]
823 }
824 return ir.Syms.MallocGCSmallNoScan[sizeClass]
825 }
826
827 func (s *state) sizeSpecializedMallocEnabled() bool {
828 if base.Flag.CompilingRuntime {
829
830
831
832
833
834
835
836 return false
837 }
838
839 return buildcfg.Experiment.SizeSpecializedMalloc && !base.Flag.Cfg.Instrumenting
840 }
841
842
843
844 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
845 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
846 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
847 }
848
849
850 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
851 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
852 addr.SetUsed(true)
853 types.CalcSize(addr.Type())
854
855 if n.Class == ir.PPARAMOUT {
856 addr.SetIsOutputParamHeapAddr(true)
857 }
858
859 n.Heapaddr = addr
860 s.assign(addr, ptr, false, 0)
861 }
862
863
864 func (s *state) newObject(typ *types.Type) *ssa.Value {
865 if typ.Size() == 0 {
866 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
867 }
868 rtype := s.reflectType(typ)
869 if specialMallocSym := s.specializedMallocSym(typ.Size(), typ.HasPointers()); specialMallocSym != nil {
870 return s.rtcall(specialMallocSym, true, []*types.Type{types.NewPtr(typ)},
871 s.constInt(types.Types[types.TUINTPTR], typ.Size()),
872 rtype,
873 s.constBool(true),
874 )[0]
875 }
876 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
877 }
878
879
880
881 func (s *state) newObjectNonSpecialized(typ *types.Type, rtype *ssa.Value) *ssa.Value {
882 if typ.Size() == 0 {
883 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
884 }
885 if rtype == nil {
886 rtype = s.reflectType(typ)
887 }
888 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
889 }
890
891 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
892 if !n.Type().IsPtr() {
893 s.Fatalf("expected pointer type: %v", n.Type())
894 }
895 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
896 if count != nil {
897 if !elem.IsArray() {
898 s.Fatalf("expected array type: %v", elem)
899 }
900 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
901 }
902 size := elem.Size()
903
904 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
905 return
906 }
907 if count == nil {
908 count = s.constInt(types.Types[types.TUINTPTR], 1)
909 }
910 if count.Type.Size() != s.config.PtrSize {
911 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
912 }
913 var rtype *ssa.Value
914 if rtypeExpr != nil {
915 rtype = s.expr(rtypeExpr)
916 } else {
917 rtype = s.reflectType(elem)
918 }
919 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
920 }
921
922
923
924 func (s *state) reflectType(typ *types.Type) *ssa.Value {
925
926
927 lsym := reflectdata.TypeLinksym(typ)
928 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
929 }
930
931 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
932
933 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
934 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
935 if err != nil {
936 writer.Logf("cannot read sources for function %v: %v", fn, err)
937 }
938
939
940 var inlFns []*ssa.FuncLines
941 for _, fi := range ssaDumpInlined {
942 elno := fi.Endlineno
943 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
944 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
945 if err != nil {
946 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
947 continue
948 }
949 inlFns = append(inlFns, fnLines)
950 }
951
952 slices.SortFunc(inlFns, ssa.ByTopoCmp)
953 if targetFn != nil {
954 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
955 }
956
957 writer.WriteSources("sources", inlFns)
958 }
959
960 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
961 f, err := os.Open(os.ExpandEnv(file))
962 if err != nil {
963 return nil, err
964 }
965 defer f.Close()
966 var lines []string
967 ln := uint(1)
968 scanner := bufio.NewScanner(f)
969 for scanner.Scan() && ln <= end {
970 if ln >= start {
971 lines = append(lines, scanner.Text())
972 }
973 ln++
974 }
975 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
976 }
977
978
979
980
981 func (s *state) updateUnsetPredPos(b *ssa.Block) {
982 if b.Pos == src.NoXPos {
983 s.Fatalf("Block %s should have a position", b)
984 }
985 bestPos := src.NoXPos
986 for _, e := range b.Preds {
987 p := e.Block()
988 if !p.LackingPos() {
989 continue
990 }
991 if bestPos == src.NoXPos {
992 bestPos = b.Pos
993 for _, v := range b.Values {
994 if v.LackingPos() {
995 continue
996 }
997 if v.Pos != src.NoXPos {
998
999
1000 bestPos = v.Pos
1001 break
1002 }
1003 }
1004 }
1005 p.Pos = bestPos
1006 s.updateUnsetPredPos(p)
1007 }
1008 }
1009
1010
1011 type openDeferInfo struct {
1012
1013 n *ir.CallExpr
1014
1015
1016 closure *ssa.Value
1017
1018
1019
1020 closureNode *ir.Name
1021 }
1022
1023 type state struct {
1024
1025 config *ssa.Config
1026
1027
1028 f *ssa.Func
1029
1030
1031 curfn *ir.Func
1032
1033
1034 labels map[string]*ssaLabel
1035
1036
1037 breakTo *ssa.Block
1038 continueTo *ssa.Block
1039
1040
1041 curBlock *ssa.Block
1042
1043
1044
1045
1046 vars map[ir.Node]*ssa.Value
1047
1048
1049
1050
1051 fwdVars map[ir.Node]*ssa.Value
1052
1053
1054 defvars []map[ir.Node]*ssa.Value
1055
1056
1057 decladdrs map[*ir.Name]*ssa.Value
1058
1059
1060 startmem *ssa.Value
1061 sp *ssa.Value
1062 sb *ssa.Value
1063
1064 deferBitsAddr *ssa.Value
1065 deferBitsTemp *ir.Name
1066
1067
1068 line []src.XPos
1069
1070 lastPos src.XPos
1071
1072
1073
1074 panics map[funcLine]*ssa.Block
1075
1076 cgoUnsafeArgs bool
1077 hasdefer bool
1078 softFloat bool
1079 hasOpenDefers bool
1080 checkPtrEnabled bool
1081 instrumentEnterExit bool
1082 instrumentMemory bool
1083
1084
1085
1086
1087 openDefers []*openDeferInfo
1088
1089
1090
1091
1092 lastDeferExit *ssa.Block
1093 lastDeferFinalBlock *ssa.Block
1094 lastDeferCount int
1095
1096 prevCall *ssa.Value
1097
1098
1099
1100
1101 pendingHeapAllocations []*ssa.Value
1102
1103
1104 appendTargets map[ir.Node]bool
1105
1106
1107 blockStarts []src.XPos
1108
1109
1110
1111 backingStores map[ir.Node]*backingStoreInfo
1112 }
1113
1114 type backingStoreInfo struct {
1115
1116 K int64
1117
1118 store *ir.Name
1119
1120 used *ir.Name
1121
1122
1123
1124 usedStatic bool
1125 }
1126
1127 type funcLine struct {
1128 f *obj.LSym
1129 base *src.PosBase
1130 line uint
1131 }
1132
1133 type ssaLabel struct {
1134 target *ssa.Block
1135 breakTarget *ssa.Block
1136 continueTarget *ssa.Block
1137 }
1138
1139
1140 func (s *state) label(sym *types.Sym) *ssaLabel {
1141 lab := s.labels[sym.Name]
1142 if lab == nil {
1143 lab = new(ssaLabel)
1144 s.labels[sym.Name] = lab
1145 }
1146 return lab
1147 }
1148
1149 func (s *state) Logf(msg string, args ...any) { s.f.Logf(msg, args...) }
1150 func (s *state) Log() bool { return s.f.Log() }
1151 func (s *state) Fatalf(msg string, args ...any) {
1152 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
1153 }
1154 func (s *state) Warnl(pos src.XPos, msg string, args ...any) { s.f.Warnl(pos, msg, args...) }
1155 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
1156
1157 func ssaMarker(name string) *ir.Name {
1158 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
1159 }
1160
1161 var (
1162
1163 memVar = ssaMarker("mem")
1164
1165
1166 ptrVar = ssaMarker("ptr")
1167 lenVar = ssaMarker("len")
1168 capVar = ssaMarker("cap")
1169 typVar = ssaMarker("typ")
1170 okVar = ssaMarker("ok")
1171 deferBitsVar = ssaMarker("deferBits")
1172 hashVar = ssaMarker("hash")
1173 )
1174
1175
1176 func (s *state) startBlock(b *ssa.Block) {
1177 if s.curBlock != nil {
1178 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
1179 }
1180 s.curBlock = b
1181 s.vars = map[ir.Node]*ssa.Value{}
1182 clear(s.fwdVars)
1183 for len(s.blockStarts) <= int(b.ID) {
1184 s.blockStarts = append(s.blockStarts, src.NoXPos)
1185 }
1186 }
1187
1188
1189
1190
1191 func (s *state) endBlock() *ssa.Block {
1192 b := s.curBlock
1193 if b == nil {
1194 return nil
1195 }
1196
1197 s.flushPendingHeapAllocations()
1198
1199 for len(s.defvars) <= int(b.ID) {
1200 s.defvars = append(s.defvars, nil)
1201 }
1202 s.defvars[b.ID] = s.vars
1203 s.curBlock = nil
1204 s.vars = nil
1205 if b.LackingPos() {
1206
1207
1208
1209 b.Pos = src.NoXPos
1210 } else {
1211 b.Pos = s.lastPos
1212 if s.blockStarts[b.ID] == src.NoXPos {
1213 s.blockStarts[b.ID] = s.lastPos
1214 }
1215 }
1216 return b
1217 }
1218
1219
1220 func (s *state) pushLine(line src.XPos) {
1221 if !line.IsKnown() {
1222
1223
1224 line = s.peekPos()
1225 if base.Flag.K != 0 {
1226 base.Warn("buildssa: unknown position (line 0)")
1227 }
1228 } else {
1229 s.lastPos = line
1230 }
1231
1232
1233 if b := s.curBlock; b != nil && s.blockStarts[b.ID] == src.NoXPos {
1234 s.blockStarts[b.ID] = line
1235 }
1236
1237 s.line = append(s.line, line)
1238 }
1239
1240
1241 func (s *state) popLine() {
1242 s.line = s.line[:len(s.line)-1]
1243 }
1244
1245
1246 func (s *state) peekPos() src.XPos {
1247 return s.line[len(s.line)-1]
1248 }
1249
1250
1251 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1252 return s.curBlock.NewValue0(s.peekPos(), op, t)
1253 }
1254
1255
1256 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1257 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1258 }
1259
1260
1261 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1262 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1263 }
1264
1265
1266 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1267 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1268 }
1269
1270
1271 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1272 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1273 }
1274
1275
1276
1277
1278 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1279 if isStmt {
1280 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1281 }
1282 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1283 }
1284
1285
1286 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1287 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1288 }
1289
1290
1291 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1292 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1293 }
1294
1295
1296 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1297 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1298 }
1299
1300
1301
1302
1303 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1304 if isStmt {
1305 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1306 }
1307 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1308 }
1309
1310
1311 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1312 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1313 }
1314
1315
1316 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1317 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1318 }
1319
1320
1321 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1322 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1323 }
1324
1325
1326 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1327 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1328 }
1329
1330
1331
1332
1333 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1334 if isStmt {
1335 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1336 }
1337 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1338 }
1339
1340
1341 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1342 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1343 }
1344
1345
1346 func (s *state) newValue4A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1347 return s.curBlock.NewValue4A(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1348 }
1349
1350
1351 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1352 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1353 }
1354
1355 func (s *state) entryBlock() *ssa.Block {
1356 b := s.f.Entry
1357 if base.Flag.N > 0 && s.curBlock != nil {
1358
1359
1360
1361
1362 b = s.curBlock
1363 }
1364 return b
1365 }
1366
1367
1368 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1369 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1370 }
1371
1372
1373 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1374 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1375 }
1376
1377
1378 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1379 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1380 }
1381
1382
1383 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1384 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1385 }
1386
1387
1388 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1389 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1390 }
1391
1392
1393 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1394 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1395 }
1396
1397
1398 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1399 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1400 }
1401
1402
1403 func (s *state) constSlice(t *types.Type) *ssa.Value {
1404 return s.f.ConstSlice(t)
1405 }
1406 func (s *state) constInterface(t *types.Type) *ssa.Value {
1407 return s.f.ConstInterface(t)
1408 }
1409 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1410 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1411 return s.f.ConstEmptyString(t)
1412 }
1413 func (s *state) constBool(c bool) *ssa.Value {
1414 return s.f.ConstBool(types.Types[types.TBOOL], c)
1415 }
1416 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1417 return s.f.ConstInt8(t, c)
1418 }
1419 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1420 return s.f.ConstInt16(t, c)
1421 }
1422 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1423 return s.f.ConstInt32(t, c)
1424 }
1425 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1426 return s.f.ConstInt64(t, c)
1427 }
1428 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1429 return s.f.ConstFloat32(t, c)
1430 }
1431 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1432 return s.f.ConstFloat64(t, c)
1433 }
1434 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1435 if s.config.PtrSize == 8 {
1436 return s.constInt64(t, c)
1437 }
1438 if int64(int32(c)) != c {
1439 s.Fatalf("integer constant too big %d", c)
1440 }
1441 return s.constInt32(t, int32(c))
1442 }
1443
1444
1445
1446 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1447 if s.softFloat {
1448 if c, ok := s.sfcall(op, arg); ok {
1449 return c
1450 }
1451 }
1452 return s.newValue1(op, t, arg)
1453 }
1454 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1455 if s.softFloat {
1456 if c, ok := s.sfcall(op, arg0, arg1); ok {
1457 return c
1458 }
1459 }
1460 return s.newValue2(op, t, arg0, arg1)
1461 }
1462
1463 type instrumentKind uint8
1464
1465 const (
1466 instrumentRead = iota
1467 instrumentWrite
1468 instrumentMove
1469 )
1470
1471 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1472 s.instrument2(t, addr, nil, kind)
1473 }
1474
1475
1476
1477
1478 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1479 if !(base.Flag.MSan || base.Flag.ASan) || !isStructNotSIMD(t) {
1480 s.instrument(t, addr, kind)
1481 return
1482 }
1483 for _, f := range t.Fields() {
1484 if f.Sym.IsBlank() {
1485 continue
1486 }
1487 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1488 s.instrumentFields(f.Type, offptr, kind)
1489 }
1490 }
1491
1492 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1493 if base.Flag.MSan {
1494 s.instrument2(t, dst, src, instrumentMove)
1495 } else {
1496 s.instrument(t, src, instrumentRead)
1497 s.instrument(t, dst, instrumentWrite)
1498 }
1499 }
1500
1501 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1502 if !s.instrumentMemory {
1503 return
1504 }
1505
1506 w := t.Size()
1507 if w == 0 {
1508 return
1509 }
1510
1511 if ssa.IsSanitizerSafeAddr(addr) {
1512 return
1513 }
1514
1515 var fn *obj.LSym
1516 needWidth := false
1517
1518 if addr2 != nil && kind != instrumentMove {
1519 panic("instrument2: non-nil addr2 for non-move instrumentation")
1520 }
1521
1522 if base.Flag.MSan {
1523 switch kind {
1524 case instrumentRead:
1525 fn = ir.Syms.Msanread
1526 case instrumentWrite:
1527 fn = ir.Syms.Msanwrite
1528 case instrumentMove:
1529 fn = ir.Syms.Msanmove
1530 default:
1531 panic("unreachable")
1532 }
1533 needWidth = true
1534 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1535
1536
1537
1538 switch kind {
1539 case instrumentRead:
1540 fn = ir.Syms.Racereadrange
1541 case instrumentWrite:
1542 fn = ir.Syms.Racewriterange
1543 default:
1544 panic("unreachable")
1545 }
1546 needWidth = true
1547 } else if base.Flag.Race {
1548
1549
1550 switch kind {
1551 case instrumentRead:
1552 fn = ir.Syms.Raceread
1553 case instrumentWrite:
1554 fn = ir.Syms.Racewrite
1555 default:
1556 panic("unreachable")
1557 }
1558 } else if base.Flag.ASan {
1559 switch kind {
1560 case instrumentRead:
1561 fn = ir.Syms.Asanread
1562 case instrumentWrite:
1563 fn = ir.Syms.Asanwrite
1564 default:
1565 panic("unreachable")
1566 }
1567 needWidth = true
1568 } else {
1569 panic("unreachable")
1570 }
1571
1572 args := []*ssa.Value{addr}
1573 if addr2 != nil {
1574 args = append(args, addr2)
1575 }
1576 if needWidth {
1577 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1578 }
1579 s.rtcall(fn, true, nil, args...)
1580 }
1581
1582 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1583 s.instrumentFields(t, src, instrumentRead)
1584 return s.rawLoad(t, src)
1585 }
1586
1587 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1588 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1589 }
1590
1591 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1592 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1593 }
1594
1595 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1596 s.instrument(t, dst, instrumentWrite)
1597 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1598 store.Aux = t
1599 s.vars[memVar] = store
1600 }
1601
1602 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1603 s.moveWhichMayOverlap(t, dst, src, false)
1604 }
1605 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1606 s.instrumentMove(t, dst, src)
1607 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631 if t.HasPointers() {
1632 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1633
1634
1635
1636
1637 s.curfn.SetWBPos(s.peekPos())
1638 } else {
1639 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1640 }
1641 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1642 return
1643 }
1644 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1645 store.Aux = t
1646 s.vars[memVar] = store
1647 }
1648
1649
1650 func (s *state) stmtList(l ir.Nodes) {
1651 for _, n := range l {
1652 s.stmt(n)
1653 }
1654 }
1655
1656
1657 func (s *state) stmt(n ir.Node) {
1658 s.pushLine(n.Pos())
1659 defer s.popLine()
1660
1661
1662
1663 if s.curBlock == nil && n.Op() != ir.OLABEL {
1664 return
1665 }
1666
1667 s.stmtList(n.Init())
1668 switch n.Op() {
1669
1670 case ir.OBLOCK:
1671 n := n.(*ir.BlockStmt)
1672 s.stmtList(n.List)
1673
1674 case ir.OFALL:
1675
1676
1677 case ir.OCALLFUNC:
1678 n := n.(*ir.CallExpr)
1679 if ir.IsIntrinsicCall(n) {
1680 s.intrinsicCall(n)
1681 return
1682 }
1683 fallthrough
1684
1685 case ir.OCALLINTER:
1686 n := n.(*ir.CallExpr)
1687 s.callResult(n, callNormal)
1688 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1689 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1690 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1691 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1692 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1693 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1694 fn == "panicrangestate") {
1695 m := s.mem()
1696 b := s.endBlock()
1697 b.Kind = ssa.BlockExit
1698 b.SetControl(m)
1699
1700
1701
1702 }
1703 }
1704 case ir.ODEFER:
1705 n := n.(*ir.GoDeferStmt)
1706 if base.Debug.Defer > 0 {
1707 var defertype string
1708 if s.hasOpenDefers {
1709 defertype = "open-coded"
1710 } else if n.Esc() == ir.EscNever {
1711 defertype = "stack-allocated"
1712 } else {
1713 defertype = "heap-allocated"
1714 }
1715 base.WarnfAt(n.Pos(), "%s defer", defertype)
1716 }
1717 if s.hasOpenDefers {
1718 s.openDeferRecord(n.Call.(*ir.CallExpr))
1719 } else {
1720 d := callDefer
1721 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1722 d = callDeferStack
1723 }
1724 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1725 }
1726 case ir.OGO:
1727 n := n.(*ir.GoDeferStmt)
1728 s.callResult(n.Call.(*ir.CallExpr), callGo)
1729
1730 case ir.OAS2DOTTYPE:
1731 n := n.(*ir.AssignListStmt)
1732 var res, resok *ssa.Value
1733 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1734 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1735 } else {
1736 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1737 }
1738 deref := false
1739 if !ssa.CanSSA(n.Rhs[0].Type()) {
1740 if res.Op != ssa.OpLoad {
1741 s.Fatalf("dottype of non-load")
1742 }
1743 mem := s.mem()
1744 if res.Args[1] != mem {
1745 s.Fatalf("memory no longer live from 2-result dottype load")
1746 }
1747 deref = true
1748 res = res.Args[0]
1749 }
1750 s.assign(n.Lhs[0], res, deref, 0)
1751 s.assign(n.Lhs[1], resok, false, 0)
1752 return
1753
1754 case ir.OAS2FUNC:
1755
1756 n := n.(*ir.AssignListStmt)
1757 call := n.Rhs[0].(*ir.CallExpr)
1758 if !ir.IsIntrinsicCall(call) {
1759 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1760 }
1761 v := s.intrinsicCall(call)
1762 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1763 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1764 s.assign(n.Lhs[0], v1, false, 0)
1765 s.assign(n.Lhs[1], v2, false, 0)
1766 return
1767
1768 case ir.ODCL:
1769 n := n.(*ir.Decl)
1770 if v := n.X; v.Esc() == ir.EscHeap {
1771 s.newHeapaddr(v)
1772 }
1773
1774 case ir.OLABEL:
1775 n := n.(*ir.LabelStmt)
1776 sym := n.Label
1777 if sym.IsBlank() {
1778
1779 break
1780 }
1781 lab := s.label(sym)
1782
1783
1784 if lab.target == nil {
1785 lab.target = s.f.NewBlock(ssa.BlockPlain)
1786 }
1787
1788
1789
1790 if s.curBlock != nil {
1791 b := s.endBlock()
1792 b.AddEdgeTo(lab.target)
1793 }
1794 s.startBlock(lab.target)
1795
1796 case ir.OGOTO:
1797 n := n.(*ir.BranchStmt)
1798 sym := n.Label
1799
1800 lab := s.label(sym)
1801 if lab.target == nil {
1802 lab.target = s.f.NewBlock(ssa.BlockPlain)
1803 }
1804
1805 b := s.endBlock()
1806 b.Pos = s.lastPos.WithIsStmt()
1807 b.AddEdgeTo(lab.target)
1808
1809 case ir.OAS:
1810 n := n.(*ir.AssignStmt)
1811 if n.X == n.Y && n.X.Op() == ir.ONAME {
1812
1813
1814
1815
1816
1817
1818
1819 return
1820 }
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1832 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1833 p := n.Y.(*ir.StarExpr).X
1834 for p.Op() == ir.OCONVNOP {
1835 p = p.(*ir.ConvExpr).X
1836 }
1837 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1838
1839
1840 mayOverlap = false
1841 }
1842 }
1843
1844
1845 rhs := n.Y
1846 if rhs != nil {
1847 switch rhs.Op() {
1848 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1849
1850
1851
1852 if !ir.IsZero(rhs) {
1853 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1854 }
1855 rhs = nil
1856 case ir.OAPPEND:
1857 rhs := rhs.(*ir.CallExpr)
1858
1859
1860
1861 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1862 break
1863 }
1864
1865
1866
1867 if s.canSSA(n.X) {
1868 if base.Debug.Append > 0 {
1869 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1870 }
1871 break
1872 }
1873 if base.Debug.Append > 0 {
1874 base.WarnfAt(n.Pos(), "append: len-only update")
1875 }
1876 s.append(rhs, true)
1877 return
1878 }
1879 }
1880
1881 if ir.IsBlank(n.X) {
1882
1883
1884 if rhs != nil {
1885 s.expr(rhs)
1886 }
1887 return
1888 }
1889
1890 var t *types.Type
1891 if n.Y != nil {
1892 t = n.Y.Type()
1893 } else {
1894 t = n.X.Type()
1895 }
1896
1897 var r *ssa.Value
1898 deref := !ssa.CanSSA(t)
1899 if deref {
1900 if rhs == nil {
1901 r = nil
1902 } else {
1903 r = s.addr(rhs)
1904 }
1905 } else {
1906 if rhs == nil {
1907 r = s.zeroVal(t)
1908 } else {
1909 r = s.expr(rhs)
1910 }
1911 }
1912
1913 var skip skipMask
1914 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1915
1916
1917 rhs := rhs.(*ir.SliceExpr)
1918 i, j, k := rhs.Low, rhs.High, rhs.Max
1919 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1920
1921 i = nil
1922 }
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933 if i == nil {
1934 skip |= skipPtr
1935 if j == nil {
1936 skip |= skipLen
1937 }
1938 if k == nil {
1939 skip |= skipCap
1940 }
1941 }
1942 }
1943
1944 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1945
1946 case ir.OIF:
1947 n := n.(*ir.IfStmt)
1948 if ir.IsConst(n.Cond, constant.Bool) {
1949 s.stmtList(n.Cond.Init())
1950 if ir.BoolVal(n.Cond) {
1951 s.stmtList(n.Body)
1952 } else {
1953 s.stmtList(n.Else)
1954 }
1955 break
1956 }
1957
1958 bEnd := s.f.NewBlock(ssa.BlockPlain)
1959 var likely int8
1960 if n.Likely {
1961 likely = 1
1962 }
1963 var bThen *ssa.Block
1964 if len(n.Body) != 0 {
1965 bThen = s.f.NewBlock(ssa.BlockPlain)
1966 } else {
1967 bThen = bEnd
1968 }
1969 var bElse *ssa.Block
1970 if len(n.Else) != 0 {
1971 bElse = s.f.NewBlock(ssa.BlockPlain)
1972 } else {
1973 bElse = bEnd
1974 }
1975 s.condBranch(n.Cond, bThen, bElse, likely)
1976
1977 if len(n.Body) != 0 {
1978 s.startBlock(bThen)
1979 s.stmtList(n.Body)
1980 if b := s.endBlock(); b != nil {
1981 b.AddEdgeTo(bEnd)
1982 }
1983 }
1984 if len(n.Else) != 0 {
1985 s.startBlock(bElse)
1986 s.stmtList(n.Else)
1987 if b := s.endBlock(); b != nil {
1988 b.AddEdgeTo(bEnd)
1989 }
1990 }
1991 s.startBlock(bEnd)
1992
1993 case ir.ORETURN:
1994 n := n.(*ir.ReturnStmt)
1995 s.stmtList(n.Results)
1996 b := s.exit()
1997 b.Pos = s.lastPos.WithIsStmt()
1998
1999 case ir.OTAILCALL:
2000 n := n.(*ir.TailCallStmt)
2001 s.callResult(n.Call, callTail)
2002 call := s.mem()
2003 b := s.endBlock()
2004 b.Kind = ssa.BlockRetJmp
2005 b.SetControl(call)
2006
2007 case ir.OCONTINUE, ir.OBREAK:
2008 n := n.(*ir.BranchStmt)
2009 var to *ssa.Block
2010 if n.Label == nil {
2011
2012 switch n.Op() {
2013 case ir.OCONTINUE:
2014 to = s.continueTo
2015 case ir.OBREAK:
2016 to = s.breakTo
2017 }
2018 } else {
2019
2020 sym := n.Label
2021 lab := s.label(sym)
2022 switch n.Op() {
2023 case ir.OCONTINUE:
2024 to = lab.continueTarget
2025 case ir.OBREAK:
2026 to = lab.breakTarget
2027 }
2028 }
2029
2030 b := s.endBlock()
2031 b.Pos = s.lastPos.WithIsStmt()
2032 b.AddEdgeTo(to)
2033
2034 case ir.OFOR:
2035
2036
2037 n := n.(*ir.ForStmt)
2038 base.Assert(!n.DistinctVars)
2039 bCond := s.f.NewBlock(ssa.BlockPlain)
2040 bBody := s.f.NewBlock(ssa.BlockPlain)
2041 bIncr := s.f.NewBlock(ssa.BlockPlain)
2042 bEnd := s.f.NewBlock(ssa.BlockPlain)
2043
2044
2045 bBody.Pos = n.Pos()
2046
2047
2048 b := s.endBlock()
2049 b.AddEdgeTo(bCond)
2050
2051
2052 s.startBlock(bCond)
2053 if n.Cond != nil {
2054 s.condBranch(n.Cond, bBody, bEnd, 1)
2055 } else {
2056 b := s.endBlock()
2057 b.Kind = ssa.BlockPlain
2058 b.AddEdgeTo(bBody)
2059 }
2060
2061
2062 prevContinue := s.continueTo
2063 prevBreak := s.breakTo
2064 s.continueTo = bIncr
2065 s.breakTo = bEnd
2066 var lab *ssaLabel
2067 if sym := n.Label; sym != nil {
2068
2069 lab = s.label(sym)
2070 lab.continueTarget = bIncr
2071 lab.breakTarget = bEnd
2072 }
2073
2074
2075 s.startBlock(bBody)
2076 s.stmtList(n.Body)
2077
2078
2079 s.continueTo = prevContinue
2080 s.breakTo = prevBreak
2081 if lab != nil {
2082 lab.continueTarget = nil
2083 lab.breakTarget = nil
2084 }
2085
2086
2087 if b := s.endBlock(); b != nil {
2088 b.AddEdgeTo(bIncr)
2089 }
2090
2091
2092 s.startBlock(bIncr)
2093 if n.Post != nil {
2094 s.stmt(n.Post)
2095 }
2096 if b := s.endBlock(); b != nil {
2097 b.AddEdgeTo(bCond)
2098
2099
2100 if b.Pos == src.NoXPos {
2101 b.Pos = bCond.Pos
2102 }
2103 }
2104
2105 s.startBlock(bEnd)
2106
2107 case ir.OSWITCH, ir.OSELECT:
2108
2109
2110 bEnd := s.f.NewBlock(ssa.BlockPlain)
2111
2112 prevBreak := s.breakTo
2113 s.breakTo = bEnd
2114 var sym *types.Sym
2115 var body ir.Nodes
2116 if n.Op() == ir.OSWITCH {
2117 n := n.(*ir.SwitchStmt)
2118 sym = n.Label
2119 body = n.Compiled
2120 } else {
2121 n := n.(*ir.SelectStmt)
2122 sym = n.Label
2123 body = n.Compiled
2124 }
2125
2126 var lab *ssaLabel
2127 if sym != nil {
2128
2129 lab = s.label(sym)
2130 lab.breakTarget = bEnd
2131 }
2132
2133
2134 s.stmtList(body)
2135
2136 s.breakTo = prevBreak
2137 if lab != nil {
2138 lab.breakTarget = nil
2139 }
2140
2141
2142
2143 if s.curBlock != nil {
2144 m := s.mem()
2145 b := s.endBlock()
2146 b.Kind = ssa.BlockExit
2147 b.SetControl(m)
2148 }
2149 s.startBlock(bEnd)
2150
2151 case ir.OJUMPTABLE:
2152 n := n.(*ir.JumpTableStmt)
2153
2154
2155 jt := s.f.NewBlock(ssa.BlockJumpTable)
2156 bEnd := s.f.NewBlock(ssa.BlockPlain)
2157
2158
2159 idx := s.expr(n.Idx)
2160 unsigned := idx.Type.IsUnsigned()
2161
2162
2163 t := types.Types[types.TUINTPTR]
2164 idx = s.conv(nil, idx, idx.Type, t)
2165
2166
2167
2168
2169
2170
2171
2172 var min, max uint64
2173 if unsigned {
2174 min, _ = constant.Uint64Val(n.Cases[0])
2175 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
2176 } else {
2177 mn, _ := constant.Int64Val(n.Cases[0])
2178 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
2179 min = uint64(mn)
2180 max = uint64(mx)
2181 }
2182
2183 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
2184 width := s.uintptrConstant(max - min)
2185 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
2186 b := s.endBlock()
2187 b.Kind = ssa.BlockIf
2188 b.SetControl(cmp)
2189 b.AddEdgeTo(jt)
2190 b.AddEdgeTo(bEnd)
2191 b.Likely = ssa.BranchLikely
2192
2193
2194 s.startBlock(jt)
2195 jt.Pos = n.Pos()
2196 if base.Flag.Cfg.SpectreIndex {
2197 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
2198 }
2199 jt.SetControl(idx)
2200
2201
2202 table := make([]*ssa.Block, max-min+1)
2203 for i := range table {
2204 table[i] = bEnd
2205 }
2206 for i := range n.Targets {
2207 c := n.Cases[i]
2208 lab := s.label(n.Targets[i])
2209 if lab.target == nil {
2210 lab.target = s.f.NewBlock(ssa.BlockPlain)
2211 }
2212 var val uint64
2213 if unsigned {
2214 val, _ = constant.Uint64Val(c)
2215 } else {
2216 vl, _ := constant.Int64Val(c)
2217 val = uint64(vl)
2218 }
2219
2220 table[val-min] = lab.target
2221 }
2222 for _, t := range table {
2223 jt.AddEdgeTo(t)
2224 }
2225 s.endBlock()
2226
2227 s.startBlock(bEnd)
2228
2229 case ir.OINTERFACESWITCH:
2230 n := n.(*ir.InterfaceSwitchStmt)
2231 typs := s.f.Config.Types
2232
2233 t := s.expr(n.RuntimeType)
2234 h := s.expr(n.Hash)
2235 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2236
2237
2238 var merge *ssa.Block
2239 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
2240
2241
2242 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2243 s.Fatalf("atomic load not available")
2244 }
2245 merge = s.f.NewBlock(ssa.BlockPlain)
2246 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2247 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2248 loopHead := s.f.NewBlock(ssa.BlockPlain)
2249 loopBody := s.f.NewBlock(ssa.BlockPlain)
2250
2251
2252 var mul, and, add, zext ssa.Op
2253 if s.config.PtrSize == 4 {
2254 mul = ssa.OpMul32
2255 and = ssa.OpAnd32
2256 add = ssa.OpAdd32
2257 zext = ssa.OpCopy
2258 } else {
2259 mul = ssa.OpMul64
2260 and = ssa.OpAnd64
2261 add = ssa.OpAdd64
2262 zext = ssa.OpZeroExt32to64
2263 }
2264
2265
2266
2267 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2268 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2269 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2270
2271
2272 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2273
2274
2275 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2276
2277 b := s.endBlock()
2278 b.AddEdgeTo(loopHead)
2279
2280
2281
2282 s.startBlock(loopHead)
2283 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2284 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2285 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2286 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2287
2288 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2289
2290
2291
2292 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2293 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2294 b = s.endBlock()
2295 b.Kind = ssa.BlockIf
2296 b.SetControl(cmp1)
2297 b.AddEdgeTo(cacheHit)
2298 b.AddEdgeTo(loopBody)
2299
2300
2301
2302 s.startBlock(loopBody)
2303 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2304 b = s.endBlock()
2305 b.Kind = ssa.BlockIf
2306 b.SetControl(cmp2)
2307 b.AddEdgeTo(cacheMiss)
2308 b.AddEdgeTo(loopHead)
2309
2310
2311
2312
2313 s.startBlock(cacheHit)
2314 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2315 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2316 s.assign(n.Case, eCase, false, 0)
2317 s.assign(n.Itab, eItab, false, 0)
2318 b = s.endBlock()
2319 b.AddEdgeTo(merge)
2320
2321
2322 s.startBlock(cacheMiss)
2323 }
2324
2325 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2326 s.assign(n.Case, r[0], false, 0)
2327 s.assign(n.Itab, r[1], false, 0)
2328
2329 if merge != nil {
2330
2331 b := s.endBlock()
2332 b.Kind = ssa.BlockPlain
2333 b.AddEdgeTo(merge)
2334 s.startBlock(merge)
2335 }
2336
2337 case ir.OCHECKNIL:
2338 n := n.(*ir.UnaryExpr)
2339 p := s.expr(n.X)
2340 _ = s.nilCheck(p)
2341
2342
2343 case ir.OINLMARK:
2344 n := n.(*ir.InlineMarkStmt)
2345 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2346
2347 default:
2348 s.Fatalf("unhandled stmt %v", n.Op())
2349 }
2350 }
2351
2352
2353
2354 const shareDeferExits = false
2355
2356
2357
2358
2359 func (s *state) exit() *ssa.Block {
2360 if s.hasdefer {
2361 if s.hasOpenDefers {
2362 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2363 if s.curBlock.Kind != ssa.BlockPlain {
2364 panic("Block for an exit should be BlockPlain")
2365 }
2366 s.curBlock.AddEdgeTo(s.lastDeferExit)
2367 s.endBlock()
2368 return s.lastDeferFinalBlock
2369 }
2370 s.openDeferExit()
2371 } else {
2372
2373
2374
2375
2376
2377
2378
2379
2380 s.pushLine(s.curfn.Endlineno)
2381 s.rtcall(ir.Syms.Deferreturn, true, nil)
2382 s.popLine()
2383 }
2384 }
2385
2386
2387
2388 resultFields := s.curfn.Type().Results()
2389 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2390
2391 for i, f := range resultFields {
2392 n := f.Nname.(*ir.Name)
2393 if s.canSSA(n) {
2394 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2395
2396 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2397 }
2398 results[i] = s.variable(n, n.Type())
2399 } else if !n.OnStack() {
2400
2401 if n.Type().HasPointers() {
2402 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2403 }
2404 ha := s.expr(n.Heapaddr)
2405 s.instrumentFields(n.Type(), ha, instrumentRead)
2406 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2407 } else {
2408
2409
2410
2411 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2412 }
2413 }
2414
2415
2416
2417
2418 if s.instrumentEnterExit {
2419 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2420 }
2421
2422 results[len(results)-1] = s.mem()
2423 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2424 m.AddArgs(results...)
2425
2426 b := s.endBlock()
2427 b.Kind = ssa.BlockRet
2428 b.SetControl(m)
2429 if s.hasdefer && s.hasOpenDefers {
2430 s.lastDeferFinalBlock = b
2431 }
2432 return b
2433 }
2434
2435 type opAndType struct {
2436 op ir.Op
2437 etype types.Kind
2438 }
2439
2440 var opToSSA = map[opAndType]ssa.Op{
2441 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2442 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2443 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2444 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2445 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2446 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2447 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2448 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2449 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2450 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2451
2452 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2453 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2454 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2455 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2456 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2457 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2458 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2459 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2460 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2461 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2462
2463 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2464
2465 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2466 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2467 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2468 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2469 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2470 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2471 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2472 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2473 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2474 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2475
2476 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2477 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2478 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2479 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2480 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2481 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2482 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2483 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2484
2485 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2486 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2487 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2488 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2489
2490 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2491 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2492 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2493 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2494 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2495 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2496 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2497 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2498 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2499 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2500
2501 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2502 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2503
2504 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2505 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2506 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2507 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2508 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2509 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2510 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2511 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2512
2513 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2514 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2515 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2516 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2517 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2518 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2519 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2520 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2521
2522 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2523 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2524 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2525 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2526 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2527 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2528 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2529 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2530
2531 {ir.OOR, types.TINT8}: ssa.OpOr8,
2532 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2533 {ir.OOR, types.TINT16}: ssa.OpOr16,
2534 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2535 {ir.OOR, types.TINT32}: ssa.OpOr32,
2536 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2537 {ir.OOR, types.TINT64}: ssa.OpOr64,
2538 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2539
2540 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2541 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2542 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2543 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2544 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2545 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2546 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2547 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2548
2549 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2550 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2551 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2552 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2553 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2554 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2555 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2556 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2557 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2558 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2559 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2560 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2561 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2562 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2563 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2564 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2565 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2566 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2567 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2568
2569 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2570 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2571 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2572 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2573 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2574 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2575 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2576 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2577 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2578 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2579 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2580 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2581 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2582 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2583 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2584 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2585 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2586 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2587 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2588
2589 {ir.OLT, types.TINT8}: ssa.OpLess8,
2590 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2591 {ir.OLT, types.TINT16}: ssa.OpLess16,
2592 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2593 {ir.OLT, types.TINT32}: ssa.OpLess32,
2594 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2595 {ir.OLT, types.TINT64}: ssa.OpLess64,
2596 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2597 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2598 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2599
2600 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2601 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2602 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2603 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2604 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2605 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2606 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2607 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2608 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2609 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2610 }
2611
2612 func (s *state) concreteEtype(t *types.Type) types.Kind {
2613 e := t.Kind()
2614 switch e {
2615 default:
2616 return e
2617 case types.TINT:
2618 if s.config.PtrSize == 8 {
2619 return types.TINT64
2620 }
2621 return types.TINT32
2622 case types.TUINT:
2623 if s.config.PtrSize == 8 {
2624 return types.TUINT64
2625 }
2626 return types.TUINT32
2627 case types.TUINTPTR:
2628 if s.config.PtrSize == 8 {
2629 return types.TUINT64
2630 }
2631 return types.TUINT32
2632 }
2633 }
2634
2635 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2636 etype := s.concreteEtype(t)
2637 x, ok := opToSSA[opAndType{op, etype}]
2638 if !ok {
2639 s.Fatalf("unhandled binary op %v %s", op, etype)
2640 }
2641 return x
2642 }
2643
2644 type opAndTwoTypes struct {
2645 op ir.Op
2646 etype1 types.Kind
2647 etype2 types.Kind
2648 }
2649
2650 type twoTypes struct {
2651 etype1 types.Kind
2652 etype2 types.Kind
2653 }
2654
2655 type twoOpsAndType struct {
2656 op1 ssa.Op
2657 op2 ssa.Op
2658 intermediateType types.Kind
2659 }
2660
2661 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2662
2663 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2664 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2665 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2666 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2667
2668 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2669 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2670 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2671 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2672
2673 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2674 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2675 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2676 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2677
2678 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2679 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2680 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2681 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2682
2683 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2684 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2685 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2686 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2687
2688 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2689 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2690 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2691 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2692
2693 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2694 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2695 {types.TFLOAT32, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2696 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2697
2698 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2699 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2700 {types.TFLOAT64, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2701 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2702
2703
2704 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2705 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2706 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2707 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2708 }
2709
2710
2711
2712 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2713 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2714 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2715 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2716 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2717 }
2718
2719
2720 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2721 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2722 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2723 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2724 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2725 }
2726
2727 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2728 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2729 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2730 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2731 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2732 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2733 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2734 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2735 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2736
2737 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2738 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2739 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2740 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2741 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2742 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2743 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2744 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2745
2746 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2747 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2748 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2749 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2750 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2751 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2752 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2753 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2754
2755 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2756 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2757 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2758 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2759 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2760 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2761 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2762 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2763
2764 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2765 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2766 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2767 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2768 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2769 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2770 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2771 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2772
2773 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2774 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2775 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2776 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2777 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2778 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2779 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2780 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2781
2782 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2783 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2784 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2785 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2786 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2787 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2788 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2789 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2790
2791 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2792 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2793 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2794 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2795 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2796 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2797 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2798 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2799 }
2800
2801 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2802 etype1 := s.concreteEtype(t)
2803 etype2 := s.concreteEtype(u)
2804 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2805 if !ok {
2806 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2807 }
2808 return x
2809 }
2810
2811 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2812 if s.config.PtrSize == 4 {
2813 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2814 }
2815 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2816 }
2817
2818 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2819 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2820
2821 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2822 }
2823 if ft.IsInteger() && tt.IsInteger() {
2824 var op ssa.Op
2825 if tt.Size() == ft.Size() {
2826 op = ssa.OpCopy
2827 } else if tt.Size() < ft.Size() {
2828
2829 switch 10*ft.Size() + tt.Size() {
2830 case 21:
2831 op = ssa.OpTrunc16to8
2832 case 41:
2833 op = ssa.OpTrunc32to8
2834 case 42:
2835 op = ssa.OpTrunc32to16
2836 case 81:
2837 op = ssa.OpTrunc64to8
2838 case 82:
2839 op = ssa.OpTrunc64to16
2840 case 84:
2841 op = ssa.OpTrunc64to32
2842 default:
2843 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2844 }
2845 } else if ft.IsSigned() {
2846
2847 switch 10*ft.Size() + tt.Size() {
2848 case 12:
2849 op = ssa.OpSignExt8to16
2850 case 14:
2851 op = ssa.OpSignExt8to32
2852 case 18:
2853 op = ssa.OpSignExt8to64
2854 case 24:
2855 op = ssa.OpSignExt16to32
2856 case 28:
2857 op = ssa.OpSignExt16to64
2858 case 48:
2859 op = ssa.OpSignExt32to64
2860 default:
2861 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2862 }
2863 } else {
2864
2865 switch 10*ft.Size() + tt.Size() {
2866 case 12:
2867 op = ssa.OpZeroExt8to16
2868 case 14:
2869 op = ssa.OpZeroExt8to32
2870 case 18:
2871 op = ssa.OpZeroExt8to64
2872 case 24:
2873 op = ssa.OpZeroExt16to32
2874 case 28:
2875 op = ssa.OpZeroExt16to64
2876 case 48:
2877 op = ssa.OpZeroExt32to64
2878 default:
2879 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2880 }
2881 }
2882 return s.newValue1(op, tt, v)
2883 }
2884
2885 if ft.IsComplex() && tt.IsComplex() {
2886 var op ssa.Op
2887 if ft.Size() == tt.Size() {
2888 switch ft.Size() {
2889 case 8:
2890 op = ssa.OpRound32F
2891 case 16:
2892 op = ssa.OpRound64F
2893 default:
2894 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2895 }
2896 } else if ft.Size() == 8 && tt.Size() == 16 {
2897 op = ssa.OpCvt32Fto64F
2898 } else if ft.Size() == 16 && tt.Size() == 8 {
2899 op = ssa.OpCvt64Fto32F
2900 } else {
2901 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2902 }
2903 ftp := types.FloatForComplex(ft)
2904 ttp := types.FloatForComplex(tt)
2905 return s.newValue2(ssa.OpComplexMake, tt,
2906 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2907 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2908 }
2909
2910 if tt.IsComplex() {
2911
2912 et := types.FloatForComplex(tt)
2913 v = s.conv(n, v, ft, et)
2914 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2915 }
2916
2917 if ft.IsFloat() || tt.IsFloat() {
2918 cft, ctt := s.concreteEtype(ft), s.concreteEtype(tt)
2919 conv, ok := fpConvOpToSSA[twoTypes{cft, ctt}]
2920
2921
2922 if ctt == types.TUINT32 && ft.IsFloat() && !base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil) {
2923
2924 conv.op1 = ssa.OpCvt64Fto64
2925 if cft == types.TFLOAT32 {
2926 conv.op1 = ssa.OpCvt32Fto64
2927 }
2928 conv.op2 = ssa.OpTrunc64to32
2929
2930 }
2931 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2932 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2933 conv = conv1
2934 }
2935 }
2936 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2937 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2938 conv = conv1
2939 }
2940 }
2941
2942 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2943 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2944
2945 if tt.Size() == 4 {
2946 return s.uint32Tofloat32(n, v, ft, tt)
2947 }
2948 if tt.Size() == 8 {
2949 return s.uint32Tofloat64(n, v, ft, tt)
2950 }
2951 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2952
2953 if ft.Size() == 4 {
2954 return s.float32ToUint32(n, v, ft, tt)
2955 }
2956 if ft.Size() == 8 {
2957 return s.float64ToUint32(n, v, ft, tt)
2958 }
2959 }
2960 }
2961
2962 if !ok {
2963 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2964 }
2965 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2966
2967 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2968
2969 if op1 == ssa.OpCopy {
2970 if op2 == ssa.OpCopy {
2971 return v
2972 }
2973 return s.newValueOrSfCall1(op2, tt, v)
2974 }
2975 if op2 == ssa.OpCopy {
2976 return s.newValueOrSfCall1(op1, tt, v)
2977 }
2978 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2979 }
2980
2981 if ft.IsInteger() {
2982
2983 if tt.Size() == 4 {
2984 return s.uint64Tofloat32(n, v, ft, tt)
2985 }
2986 if tt.Size() == 8 {
2987 return s.uint64Tofloat64(n, v, ft, tt)
2988 }
2989 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2990 }
2991
2992 if ft.Size() == 4 {
2993 switch tt.Size() {
2994 case 8:
2995 return s.float32ToUint64(n, v, ft, tt)
2996 case 4, 2, 1:
2997
2998 return s.float32ToUint32(n, v, ft, tt)
2999 }
3000 }
3001 if ft.Size() == 8 {
3002 switch tt.Size() {
3003 case 8:
3004 return s.float64ToUint64(n, v, ft, tt)
3005 case 4, 2, 1:
3006
3007 return s.float64ToUint32(n, v, ft, tt)
3008 }
3009
3010 }
3011 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
3012 return nil
3013 }
3014
3015 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
3016 return nil
3017 }
3018
3019
3020 func (s *state) expr(n ir.Node) *ssa.Value {
3021 return s.exprCheckPtr(n, true)
3022 }
3023
3024 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
3025 if ir.HasUniquePos(n) {
3026
3027
3028 s.pushLine(n.Pos())
3029 defer s.popLine()
3030 }
3031
3032 s.stmtList(n.Init())
3033 switch n.Op() {
3034 case ir.OBYTES2STRTMP:
3035 n := n.(*ir.ConvExpr)
3036 slice := s.expr(n.X)
3037 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
3038 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3039 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
3040 case ir.OSTR2BYTESTMP:
3041 n := n.(*ir.ConvExpr)
3042 str := s.expr(n.X)
3043 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
3044 if !n.NonNil() {
3045
3046
3047
3048 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
3049 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
3050 ptr = s.ternary(cond, ptr, zerobase)
3051 }
3052 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
3053 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
3054 case ir.OCFUNC:
3055 n := n.(*ir.UnaryExpr)
3056 aux := n.X.(*ir.Name).Linksym()
3057
3058
3059 if aux.ABI() != obj.ABIInternal {
3060 s.Fatalf("expected ABIInternal: %v", aux.ABI())
3061 }
3062 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
3063 case ir.ONAME:
3064 n := n.(*ir.Name)
3065 if n.Class == ir.PFUNC {
3066
3067 sym := staticdata.FuncLinksym(n)
3068 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
3069 }
3070 if s.canSSA(n) {
3071 return s.variable(n, n.Type())
3072 }
3073 return s.load(n.Type(), s.addr(n))
3074 case ir.OLINKSYMOFFSET:
3075 n := n.(*ir.LinksymOffsetExpr)
3076 return s.load(n.Type(), s.addr(n))
3077 case ir.ONIL:
3078 n := n.(*ir.NilExpr)
3079 t := n.Type()
3080 switch {
3081 case t.IsSlice():
3082 return s.constSlice(t)
3083 case t.IsInterface():
3084 return s.constInterface(t)
3085 default:
3086 return s.constNil(t)
3087 }
3088 case ir.OLITERAL:
3089 switch u := n.Val(); u.Kind() {
3090 case constant.Int:
3091 i := ir.IntVal(n.Type(), u)
3092 switch n.Type().Size() {
3093 case 1:
3094 return s.constInt8(n.Type(), int8(i))
3095 case 2:
3096 return s.constInt16(n.Type(), int16(i))
3097 case 4:
3098 return s.constInt32(n.Type(), int32(i))
3099 case 8:
3100 return s.constInt64(n.Type(), i)
3101 default:
3102 s.Fatalf("bad integer size %d", n.Type().Size())
3103 return nil
3104 }
3105 case constant.String:
3106 i := constant.StringVal(u)
3107 if i == "" {
3108 return s.constEmptyString(n.Type())
3109 }
3110 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
3111 case constant.Bool:
3112 return s.constBool(constant.BoolVal(u))
3113 case constant.Float:
3114 f, _ := constant.Float64Val(u)
3115 switch n.Type().Size() {
3116 case 4:
3117 return s.constFloat32(n.Type(), f)
3118 case 8:
3119 return s.constFloat64(n.Type(), f)
3120 default:
3121 s.Fatalf("bad float size %d", n.Type().Size())
3122 return nil
3123 }
3124 case constant.Complex:
3125 re, _ := constant.Float64Val(constant.Real(u))
3126 im, _ := constant.Float64Val(constant.Imag(u))
3127 switch n.Type().Size() {
3128 case 8:
3129 pt := types.Types[types.TFLOAT32]
3130 return s.newValue2(ssa.OpComplexMake, n.Type(),
3131 s.constFloat32(pt, re),
3132 s.constFloat32(pt, im))
3133 case 16:
3134 pt := types.Types[types.TFLOAT64]
3135 return s.newValue2(ssa.OpComplexMake, n.Type(),
3136 s.constFloat64(pt, re),
3137 s.constFloat64(pt, im))
3138 default:
3139 s.Fatalf("bad complex size %d", n.Type().Size())
3140 return nil
3141 }
3142 default:
3143 s.Fatalf("unhandled OLITERAL %v", u.Kind())
3144 return nil
3145 }
3146 case ir.OCONVNOP:
3147 n := n.(*ir.ConvExpr)
3148 to := n.Type()
3149 from := n.X.Type()
3150
3151
3152
3153 x := s.expr(n.X)
3154 if to == from {
3155 return x
3156 }
3157
3158
3159
3160
3161
3162 if to.IsPtrShaped() != from.IsPtrShaped() {
3163 return s.newValue2(ssa.OpConvert, to, x, s.mem())
3164 }
3165
3166 v := s.newValue1(ssa.OpCopy, to, x)
3167
3168
3169 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
3170 return v
3171 }
3172
3173
3174 if from.Kind() == to.Kind() {
3175 return v
3176 }
3177
3178
3179 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
3180 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
3181 s.checkPtrAlignment(n, v, nil)
3182 }
3183 return v
3184 }
3185
3186
3187 mt := types.NewPtr(reflectdata.MapType())
3188 if to.Kind() == types.TMAP && from == mt {
3189 return v
3190 }
3191
3192 types.CalcSize(from)
3193 types.CalcSize(to)
3194 if from.Size() != to.Size() {
3195 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
3196 return nil
3197 }
3198 if etypesign(from.Kind()) != etypesign(to.Kind()) {
3199 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
3200 return nil
3201 }
3202
3203 if base.Flag.Cfg.Instrumenting {
3204
3205
3206
3207 return v
3208 }
3209
3210 if etypesign(from.Kind()) == 0 {
3211 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
3212 return nil
3213 }
3214
3215
3216 return v
3217
3218 case ir.OCONV:
3219 n := n.(*ir.ConvExpr)
3220 x := s.expr(n.X)
3221 return s.conv(n, x, n.X.Type(), n.Type())
3222
3223 case ir.ODOTTYPE:
3224 n := n.(*ir.TypeAssertExpr)
3225 res, _ := s.dottype(n, false)
3226 return res
3227
3228 case ir.ODYNAMICDOTTYPE:
3229 n := n.(*ir.DynamicTypeAssertExpr)
3230 res, _ := s.dynamicDottype(n, false)
3231 return res
3232
3233
3234 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3235 n := n.(*ir.BinaryExpr)
3236 a := s.expr(n.X)
3237 b := s.expr(n.Y)
3238 if n.X.Type().IsComplex() {
3239 pt := types.FloatForComplex(n.X.Type())
3240 op := s.ssaOp(ir.OEQ, pt)
3241 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3242 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3243 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3244 switch n.Op() {
3245 case ir.OEQ:
3246 return c
3247 case ir.ONE:
3248 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3249 default:
3250 s.Fatalf("ordered complex compare %v", n.Op())
3251 }
3252 }
3253
3254
3255 op := n.Op()
3256 switch op {
3257 case ir.OGE:
3258 op, a, b = ir.OLE, b, a
3259 case ir.OGT:
3260 op, a, b = ir.OLT, b, a
3261 }
3262 if n.X.Type().IsFloat() {
3263
3264 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3265 }
3266
3267 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3268 case ir.OMUL:
3269 n := n.(*ir.BinaryExpr)
3270 a := s.expr(n.X)
3271 b := s.expr(n.Y)
3272 if n.Type().IsComplex() {
3273 mulop := ssa.OpMul64F
3274 addop := ssa.OpAdd64F
3275 subop := ssa.OpSub64F
3276 pt := types.FloatForComplex(n.Type())
3277 wt := types.Types[types.TFLOAT64]
3278
3279 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3280 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3281 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3282 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3283
3284 if pt != wt {
3285 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3286 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3287 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3288 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3289 }
3290
3291 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3292 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3293
3294 if pt != wt {
3295 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3296 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3297 }
3298
3299 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3300 }
3301
3302 if n.Type().IsFloat() {
3303 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3304 }
3305
3306 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3307
3308 case ir.ODIV:
3309 n := n.(*ir.BinaryExpr)
3310 a := s.expr(n.X)
3311 b := s.expr(n.Y)
3312 if n.Type().IsComplex() {
3313
3314
3315
3316 mulop := ssa.OpMul64F
3317 addop := ssa.OpAdd64F
3318 subop := ssa.OpSub64F
3319 divop := ssa.OpDiv64F
3320 pt := types.FloatForComplex(n.Type())
3321 wt := types.Types[types.TFLOAT64]
3322
3323 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3324 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3325 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3326 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3327
3328 if pt != wt {
3329 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3330 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3331 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3332 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3333 }
3334
3335 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3336 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3337 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3338
3339
3340
3341
3342
3343 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3344 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3345
3346 if pt != wt {
3347 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3348 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3349 }
3350 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3351 }
3352 if n.Type().IsFloat() {
3353 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3354 }
3355 return s.intDivide(n, a, b)
3356 case ir.OMOD:
3357 n := n.(*ir.BinaryExpr)
3358 a := s.expr(n.X)
3359 b := s.expr(n.Y)
3360 return s.intDivide(n, a, b)
3361 case ir.OADD, ir.OSUB:
3362 n := n.(*ir.BinaryExpr)
3363 a := s.expr(n.X)
3364 b := s.expr(n.Y)
3365 if n.Type().IsComplex() {
3366 pt := types.FloatForComplex(n.Type())
3367 op := s.ssaOp(n.Op(), pt)
3368 return s.newValue2(ssa.OpComplexMake, n.Type(),
3369 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3370 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3371 }
3372 if n.Type().IsFloat() {
3373 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3374 }
3375 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3376 case ir.OAND, ir.OOR, ir.OXOR:
3377 n := n.(*ir.BinaryExpr)
3378 a := s.expr(n.X)
3379 b := s.expr(n.Y)
3380 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3381 case ir.OANDNOT:
3382 n := n.(*ir.BinaryExpr)
3383 a := s.expr(n.X)
3384 b := s.expr(n.Y)
3385 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3386 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3387 case ir.OLSH, ir.ORSH:
3388 n := n.(*ir.BinaryExpr)
3389 a := s.expr(n.X)
3390 b := s.expr(n.Y)
3391 bt := b.Type
3392 if bt.IsSigned() {
3393 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3394 s.check(cmp, ir.Syms.Panicshift)
3395 bt = bt.ToUnsigned()
3396 }
3397 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3398 case ir.OANDAND, ir.OOROR:
3399
3400
3401
3402
3403
3404
3405
3406
3407
3408
3409
3410
3411
3412 n := n.(*ir.LogicalExpr)
3413 el := s.expr(n.X)
3414 s.vars[n] = el
3415
3416 b := s.endBlock()
3417 b.Kind = ssa.BlockIf
3418 b.SetControl(el)
3419
3420
3421
3422
3423
3424 bRight := s.f.NewBlock(ssa.BlockPlain)
3425 bResult := s.f.NewBlock(ssa.BlockPlain)
3426 if n.Op() == ir.OANDAND {
3427 b.AddEdgeTo(bRight)
3428 b.AddEdgeTo(bResult)
3429 } else if n.Op() == ir.OOROR {
3430 b.AddEdgeTo(bResult)
3431 b.AddEdgeTo(bRight)
3432 }
3433
3434 s.startBlock(bRight)
3435 er := s.expr(n.Y)
3436 s.vars[n] = er
3437
3438 b = s.endBlock()
3439 b.AddEdgeTo(bResult)
3440
3441 s.startBlock(bResult)
3442 return s.variable(n, types.Types[types.TBOOL])
3443 case ir.OCOMPLEX:
3444 n := n.(*ir.BinaryExpr)
3445 r := s.expr(n.X)
3446 i := s.expr(n.Y)
3447 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3448
3449
3450 case ir.ONEG:
3451 n := n.(*ir.UnaryExpr)
3452 a := s.expr(n.X)
3453 if n.Type().IsComplex() {
3454 tp := types.FloatForComplex(n.Type())
3455 negop := s.ssaOp(n.Op(), tp)
3456 return s.newValue2(ssa.OpComplexMake, n.Type(),
3457 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3458 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3459 }
3460 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3461 case ir.ONOT, ir.OBITNOT:
3462 n := n.(*ir.UnaryExpr)
3463 a := s.expr(n.X)
3464 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3465 case ir.OIMAG, ir.OREAL:
3466 n := n.(*ir.UnaryExpr)
3467 a := s.expr(n.X)
3468 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3469 case ir.OPLUS:
3470 n := n.(*ir.UnaryExpr)
3471 return s.expr(n.X)
3472
3473 case ir.OADDR:
3474 n := n.(*ir.AddrExpr)
3475 return s.addr(n.X)
3476
3477 case ir.ORESULT:
3478 n := n.(*ir.ResultExpr)
3479 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3480 panic("Expected to see a previous call")
3481 }
3482 which := n.Index
3483 if which == -1 {
3484 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3485 }
3486 return s.resultOfCall(s.prevCall, which, n.Type())
3487
3488 case ir.ODEREF:
3489 n := n.(*ir.StarExpr)
3490 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3491 return s.load(n.Type(), p)
3492
3493 case ir.ODOT:
3494 n := n.(*ir.SelectorExpr)
3495 if n.X.Op() == ir.OSTRUCTLIT {
3496
3497
3498
3499 if !ir.IsZero(n.X) {
3500 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3501 }
3502 return s.zeroVal(n.Type())
3503 }
3504
3505
3506
3507
3508 if ir.IsAddressable(n) && !s.canSSA(n) {
3509 p := s.addr(n)
3510 return s.load(n.Type(), p)
3511 }
3512 v := s.expr(n.X)
3513 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3514
3515 case ir.ODOTPTR:
3516 n := n.(*ir.SelectorExpr)
3517 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3518 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3519 return s.load(n.Type(), p)
3520
3521 case ir.OINDEX:
3522 n := n.(*ir.IndexExpr)
3523 switch {
3524 case n.X.Type().IsString():
3525 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3526
3527
3528
3529 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3530 }
3531 a := s.expr(n.X)
3532 i := s.expr(n.Index)
3533 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3534 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3535 ptrtyp := s.f.Config.Types.BytePtr
3536 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3537 if ir.IsConst(n.Index, constant.Int) {
3538 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3539 } else {
3540 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3541 }
3542 return s.load(types.Types[types.TUINT8], ptr)
3543 case n.X.Type().IsSlice():
3544 p := s.addr(n)
3545 return s.load(n.X.Type().Elem(), p)
3546 case n.X.Type().IsArray():
3547 if ssa.CanSSA(n.X.Type()) {
3548
3549 bound := n.X.Type().NumElem()
3550 a := s.expr(n.X)
3551 i := s.expr(n.Index)
3552 if bound == 0 {
3553
3554
3555 z := s.constInt(types.Types[types.TINT], 0)
3556 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3557
3558
3559 return s.zeroVal(n.Type())
3560 }
3561 len := s.constInt(types.Types[types.TINT], bound)
3562 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3563 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3564 }
3565 p := s.addr(n)
3566 return s.load(n.X.Type().Elem(), p)
3567 default:
3568 s.Fatalf("bad type for index %v", n.X.Type())
3569 return nil
3570 }
3571
3572 case ir.OLEN, ir.OCAP:
3573 n := n.(*ir.UnaryExpr)
3574
3575
3576 a := s.expr(n.X)
3577 t := n.X.Type()
3578 switch {
3579 case t.IsSlice():
3580 op := ssa.OpSliceLen
3581 if n.Op() == ir.OCAP {
3582 op = ssa.OpSliceCap
3583 }
3584 return s.newValue1(op, types.Types[types.TINT], a)
3585 case t.IsString():
3586 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3587 case t.IsMap(), t.IsChan():
3588 return s.referenceTypeBuiltin(n, a)
3589 case t.IsArray():
3590 return s.constInt(types.Types[types.TINT], t.NumElem())
3591 case t.IsPtr() && t.Elem().IsArray():
3592 return s.constInt(types.Types[types.TINT], t.Elem().NumElem())
3593 default:
3594 s.Fatalf("bad type in len/cap: %v", t)
3595 return nil
3596 }
3597
3598 case ir.OSPTR:
3599 n := n.(*ir.UnaryExpr)
3600 a := s.expr(n.X)
3601 if n.X.Type().IsSlice() {
3602 if n.Bounded() {
3603 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3604 }
3605 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3606 } else {
3607 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3608 }
3609
3610 case ir.OITAB:
3611 n := n.(*ir.UnaryExpr)
3612 a := s.expr(n.X)
3613 return s.newValue1(ssa.OpITab, n.Type(), a)
3614
3615 case ir.OIDATA:
3616 n := n.(*ir.UnaryExpr)
3617 a := s.expr(n.X)
3618 return s.newValue1(ssa.OpIData, n.Type(), a)
3619
3620 case ir.OMAKEFACE:
3621 n := n.(*ir.BinaryExpr)
3622 tab := s.expr(n.X)
3623 data := s.expr(n.Y)
3624 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3625
3626 case ir.OSLICEHEADER:
3627 n := n.(*ir.SliceHeaderExpr)
3628 p := s.expr(n.Ptr)
3629 l := s.expr(n.Len)
3630 c := s.expr(n.Cap)
3631 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3632
3633 case ir.OSTRINGHEADER:
3634 n := n.(*ir.StringHeaderExpr)
3635 p := s.expr(n.Ptr)
3636 l := s.expr(n.Len)
3637 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3638
3639 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3640 n := n.(*ir.SliceExpr)
3641 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3642 v := s.exprCheckPtr(n.X, !check)
3643 var i, j, k *ssa.Value
3644 if n.Low != nil {
3645 i = s.expr(n.Low)
3646 }
3647 if n.High != nil {
3648 j = s.expr(n.High)
3649 }
3650 if n.Max != nil {
3651 k = s.expr(n.Max)
3652 }
3653 p, l, c := s.slice(v, i, j, k, n.Bounded())
3654 if check {
3655
3656 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3657 }
3658 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3659
3660 case ir.OSLICESTR:
3661 n := n.(*ir.SliceExpr)
3662 v := s.expr(n.X)
3663 var i, j *ssa.Value
3664 if n.Low != nil {
3665 i = s.expr(n.Low)
3666 }
3667 if n.High != nil {
3668 j = s.expr(n.High)
3669 }
3670 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3671 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3672
3673 case ir.OSLICE2ARRPTR:
3674
3675
3676
3677
3678 n := n.(*ir.ConvExpr)
3679 v := s.expr(n.X)
3680 nelem := n.Type().Elem().NumElem()
3681 arrlen := s.constInt(types.Types[types.TINT], nelem)
3682 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3683 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3684 op := ssa.OpSlicePtr
3685 if nelem == 0 {
3686 op = ssa.OpSlicePtrUnchecked
3687 }
3688 return s.newValue1(op, n.Type(), v)
3689
3690 case ir.OCALLFUNC:
3691 n := n.(*ir.CallExpr)
3692 if ir.IsIntrinsicCall(n) {
3693 return s.intrinsicCall(n)
3694 }
3695 fallthrough
3696
3697 case ir.OCALLINTER:
3698 n := n.(*ir.CallExpr)
3699 return s.callResult(n, callNormal)
3700
3701 case ir.OGETG:
3702 n := n.(*ir.CallExpr)
3703 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3704
3705 case ir.OGETCALLERSP:
3706 n := n.(*ir.CallExpr)
3707 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3708
3709 case ir.OAPPEND:
3710 return s.append(n.(*ir.CallExpr), false)
3711
3712 case ir.OMOVE2HEAP:
3713 return s.move2heap(n.(*ir.MoveToHeapExpr))
3714
3715 case ir.OMIN, ir.OMAX:
3716 return s.minMax(n.(*ir.CallExpr))
3717
3718 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3719
3720
3721
3722 n := n.(*ir.CompLitExpr)
3723 if !ir.IsZero(n) {
3724 s.Fatalf("literal with nonzero value in SSA: %v", n)
3725 }
3726 return s.zeroVal(n.Type())
3727
3728 case ir.ONEW:
3729 n := n.(*ir.UnaryExpr)
3730 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3731 return s.newObjectNonSpecialized(n.Type().Elem(), s.expr(x.RType))
3732 }
3733 return s.newObject(n.Type().Elem())
3734
3735 case ir.OUNSAFEADD:
3736 n := n.(*ir.BinaryExpr)
3737 ptr := s.expr(n.X)
3738 len := s.expr(n.Y)
3739
3740
3741
3742 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3743
3744 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3745
3746 default:
3747 s.Fatalf("unhandled expr %v", n.Op())
3748 return nil
3749 }
3750 }
3751
3752 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3753 aux := c.Aux.(*ssa.AuxCall)
3754 pa := aux.ParamAssignmentForResult(which)
3755
3756
3757 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3758 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3759 return s.rawLoad(t, addr)
3760 }
3761 return s.newValue1I(ssa.OpSelectN, t, which, c)
3762 }
3763
3764 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3765 aux := c.Aux.(*ssa.AuxCall)
3766 pa := aux.ParamAssignmentForResult(which)
3767 if len(pa.Registers) == 0 {
3768 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3769 }
3770 _, addr := s.temp(c.Pos, t)
3771 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3772 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3773 return addr
3774 }
3775
3776
3777 func (s *state) getBackingStoreInfoForAppend(n *ir.CallExpr) *backingStoreInfo {
3778 if n.Esc() != ir.EscNone {
3779 return nil
3780 }
3781 return s.getBackingStoreInfo(n.Args[0])
3782 }
3783 func (s *state) getBackingStoreInfo(n ir.Node) *backingStoreInfo {
3784 t := n.Type()
3785 et := t.Elem()
3786 maxStackSize := int64(base.Debug.VariableMakeThreshold)
3787 if et.Size() == 0 || et.Size() > maxStackSize {
3788 return nil
3789 }
3790 if base.Flag.N != 0 {
3791 return nil
3792 }
3793 if !base.VariableMakeHash.MatchPos(n.Pos(), nil) {
3794 return nil
3795 }
3796 i := s.backingStores[n]
3797 if i != nil {
3798 return i
3799 }
3800
3801
3802 K := maxStackSize / et.Size()
3803 KT := types.NewArray(et, K)
3804 KT.SetNoalg(true)
3805 types.CalcArraySize(KT)
3806
3807 align := types.NewArray(types.Types[types.TUINTPTR], 0)
3808 types.CalcArraySize(align)
3809 storeTyp := types.NewStruct([]*types.Field{
3810 {Sym: types.BlankSym, Type: align},
3811 {Sym: types.BlankSym, Type: KT},
3812 })
3813 storeTyp.SetNoalg(true)
3814 types.CalcStructSize(storeTyp)
3815
3816
3817 backingStore := typecheck.TempAt(n.Pos(), s.curfn, storeTyp)
3818 backingStore.SetAddrtaken(true)
3819
3820
3821 used := typecheck.TempAt(n.Pos(), s.curfn, types.Types[types.TBOOL])
3822 if s.curBlock == s.f.Entry {
3823 s.vars[used] = s.constBool(false)
3824 } else {
3825
3826 s.defvars[s.f.Entry.ID][used] = s.constBool(false)
3827 }
3828
3829
3830 if s.backingStores == nil {
3831 s.backingStores = map[ir.Node]*backingStoreInfo{}
3832 }
3833 i = &backingStoreInfo{K: K, store: backingStore, used: used, usedStatic: false}
3834 s.backingStores[n] = i
3835 return i
3836 }
3837
3838
3839
3840
3841
3842
3843
3844
3845
3846 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3847
3848
3849
3850
3851
3852
3853
3854
3855
3856
3857
3858
3859
3860
3861
3862
3863
3864
3865
3866
3867
3868
3869
3870
3871
3872
3873
3874
3875
3876
3877
3878
3879 et := n.Type().Elem()
3880 pt := types.NewPtr(et)
3881
3882
3883 sn := n.Args[0]
3884 var slice, addr *ssa.Value
3885 if inplace {
3886 addr = s.addr(sn)
3887 slice = s.load(n.Type(), addr)
3888 } else {
3889 slice = s.expr(sn)
3890 }
3891
3892
3893 grow := s.f.NewBlock(ssa.BlockPlain)
3894 assign := s.f.NewBlock(ssa.BlockPlain)
3895
3896
3897 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3898 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3899 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3900
3901
3902 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3903 oldLen := l
3904 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3905
3906
3907 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3908
3909
3910 s.vars[ptrVar] = p
3911 s.vars[lenVar] = l
3912 if !inplace {
3913 s.vars[capVar] = c
3914 }
3915
3916 b := s.endBlock()
3917 b.Kind = ssa.BlockIf
3918 b.Likely = ssa.BranchUnlikely
3919 b.SetControl(cmp)
3920 b.AddEdgeTo(grow)
3921 b.AddEdgeTo(assign)
3922
3923
3924
3925
3926
3927
3928
3929
3930
3931
3932
3933
3934
3935
3936
3937
3938
3939
3940
3941
3942
3943
3944
3945 var info *backingStoreInfo
3946 if !inplace {
3947 info = s.getBackingStoreInfoForAppend(n)
3948 }
3949
3950 if !inplace && info != nil && !n.UseBuf && !info.usedStatic {
3951
3952
3953
3954
3955
3956
3957
3958
3959
3960
3961
3962
3963
3964
3965
3966
3967
3968
3969
3970
3971
3972
3973
3974 info.usedStatic = true
3975
3976
3977 usedTestBlock := s.f.NewBlock(ssa.BlockPlain)
3978 oldLenTestBlock := s.f.NewBlock(ssa.BlockPlain)
3979 bodyBlock := s.f.NewBlock(ssa.BlockPlain)
3980 growSlice := s.f.NewBlock(ssa.BlockPlain)
3981 tInt := types.Types[types.TINT]
3982 tBool := types.Types[types.TBOOL]
3983
3984
3985 s.startBlock(grow)
3986 kTest := s.newValue2(s.ssaOp(ir.OLE, tInt), tBool, l, s.constInt(tInt, info.K))
3987 b := s.endBlock()
3988 b.Kind = ssa.BlockIf
3989 b.SetControl(kTest)
3990 b.AddEdgeTo(usedTestBlock)
3991 b.AddEdgeTo(growSlice)
3992 b.Likely = ssa.BranchLikely
3993
3994
3995 s.startBlock(usedTestBlock)
3996 usedTest := s.newValue1(ssa.OpNot, tBool, s.expr(info.used))
3997 b = s.endBlock()
3998 b.Kind = ssa.BlockIf
3999 b.SetControl(usedTest)
4000 b.AddEdgeTo(oldLenTestBlock)
4001 b.AddEdgeTo(growSlice)
4002 b.Likely = ssa.BranchLikely
4003
4004
4005 s.startBlock(oldLenTestBlock)
4006 oldLenTest := s.newValue2(s.ssaOp(ir.OEQ, tInt), tBool, oldLen, s.constInt(tInt, 0))
4007 b = s.endBlock()
4008 b.Kind = ssa.BlockIf
4009 b.SetControl(oldLenTest)
4010 b.AddEdgeTo(bodyBlock)
4011 b.AddEdgeTo(growSlice)
4012 b.Likely = ssa.BranchLikely
4013
4014
4015 s.startBlock(bodyBlock)
4016 if et.HasPointers() {
4017 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, info.store, s.mem())
4018 }
4019 addr := s.addr(info.store)
4020 s.zero(info.store.Type(), addr)
4021
4022
4023 s.vars[ptrVar] = addr
4024 s.vars[lenVar] = l
4025 s.vars[capVar] = s.constInt(tInt, info.K)
4026
4027
4028 s.assign(info.used, s.constBool(true), false, 0)
4029 b = s.endBlock()
4030 b.AddEdgeTo(assign)
4031
4032
4033 grow = growSlice
4034 }
4035
4036
4037 s.startBlock(grow)
4038 taddr := s.expr(n.Fun)
4039 var r []*ssa.Value
4040 if info != nil && n.UseBuf {
4041
4042 if et.HasPointers() && !info.usedStatic {
4043
4044
4045
4046 mem := s.defvars[s.f.Entry.ID][memVar]
4047 mem = s.f.Entry.NewValue1A(n.Pos(), ssa.OpVarDef, types.TypeMem, info.store, mem)
4048 addr := s.f.Entry.NewValue2A(n.Pos(), ssa.OpLocalAddr, types.NewPtr(info.store.Type()), info.store, s.sp, mem)
4049 mem = s.f.Entry.NewValue2I(n.Pos(), ssa.OpZero, types.TypeMem, info.store.Type().Size(), addr, mem)
4050 mem.Aux = info.store.Type()
4051 s.defvars[s.f.Entry.ID][memVar] = mem
4052 info.usedStatic = true
4053 }
4054 fn := ir.Syms.GrowsliceBuf
4055 if goexperiment.RuntimeFreegc && n.AppendNoAlias && !et.HasPointers() {
4056
4057
4058
4059
4060 fn = ir.Syms.GrowsliceBufNoAlias
4061 }
4062 r = s.rtcall(fn, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr, s.addr(info.store), s.constInt(types.Types[types.TINT], info.K))
4063 } else {
4064 fn := ir.Syms.Growslice
4065 if goexperiment.RuntimeFreegc && n.AppendNoAlias && !et.HasPointers() {
4066
4067
4068
4069
4070 fn = ir.Syms.GrowsliceNoAlias
4071 }
4072 r = s.rtcall(fn, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
4073 }
4074
4075
4076 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
4077 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
4078 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
4079
4080 s.vars[ptrVar] = p
4081 s.vars[lenVar] = l
4082 s.vars[capVar] = c
4083 if inplace {
4084 if sn.Op() == ir.ONAME {
4085 sn := sn.(*ir.Name)
4086 if sn.Class != ir.PEXTERN {
4087
4088 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
4089 }
4090 }
4091 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
4092 s.store(types.Types[types.TINT], capaddr, c)
4093 s.store(pt, addr, p)
4094 }
4095
4096 b = s.endBlock()
4097 b.AddEdgeTo(assign)
4098
4099
4100 s.startBlock(assign)
4101 p = s.variable(ptrVar, pt)
4102 l = s.variable(lenVar, types.Types[types.TINT])
4103 if !inplace {
4104 c = s.variable(capVar, types.Types[types.TINT])
4105 }
4106
4107 if inplace {
4108
4109
4110 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
4111 s.store(types.Types[types.TINT], lenaddr, l)
4112 }
4113
4114
4115 type argRec struct {
4116
4117
4118 v *ssa.Value
4119 store bool
4120 }
4121 args := make([]argRec, 0, len(n.Args[1:]))
4122 for _, n := range n.Args[1:] {
4123 if ssa.CanSSA(n.Type()) {
4124 args = append(args, argRec{v: s.expr(n), store: true})
4125 } else {
4126 v := s.addr(n)
4127 args = append(args, argRec{v: v})
4128 }
4129 }
4130
4131
4132 oldLen = s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
4133 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
4134 for i, arg := range args {
4135 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
4136 if arg.store {
4137 s.storeType(et, addr, arg.v, 0, true)
4138 } else {
4139 s.move(et, addr, arg.v)
4140 }
4141 }
4142
4143
4144
4145
4146
4147 delete(s.vars, ptrVar)
4148 delete(s.vars, lenVar)
4149 if !inplace {
4150 delete(s.vars, capVar)
4151 }
4152
4153
4154 if inplace {
4155 return nil
4156 }
4157 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
4158 }
4159
4160 func (s *state) move2heap(n *ir.MoveToHeapExpr) *ssa.Value {
4161
4162
4163
4164
4165
4166
4167
4168
4169 slice := s.expr(n.Slice)
4170 et := slice.Type.Elem()
4171 pt := types.NewPtr(et)
4172
4173 info := s.getBackingStoreInfo(n)
4174 if info == nil {
4175
4176
4177 return slice
4178 }
4179
4180
4181 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
4182 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
4183 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
4184
4185 moveBlock := s.f.NewBlock(ssa.BlockPlain)
4186 mergeBlock := s.f.NewBlock(ssa.BlockPlain)
4187
4188 s.vars[ptrVar] = p
4189 s.vars[lenVar] = l
4190 s.vars[capVar] = c
4191
4192
4193
4194 sub := ssa.OpSub64
4195 less := ssa.OpLess64U
4196 if s.config.PtrSize == 4 {
4197 sub = ssa.OpSub32
4198 less = ssa.OpLess32U
4199 }
4200 callerSP := s.newValue1(ssa.OpGetCallerSP, types.Types[types.TUINTPTR], s.mem())
4201 frameSize := s.newValue2(sub, types.Types[types.TUINTPTR], callerSP, s.sp)
4202 pInt := s.newValue2(ssa.OpConvert, types.Types[types.TUINTPTR], p, s.mem())
4203 off := s.newValue2(sub, types.Types[types.TUINTPTR], pInt, s.sp)
4204 cond := s.newValue2(less, types.Types[types.TBOOL], off, frameSize)
4205
4206 b := s.endBlock()
4207 b.Kind = ssa.BlockIf
4208 b.Likely = ssa.BranchUnlikely
4209 b.SetControl(cond)
4210 b.AddEdgeTo(moveBlock)
4211 b.AddEdgeTo(mergeBlock)
4212
4213
4214 s.startBlock(moveBlock)
4215 var newSlice *ssa.Value
4216 if et.HasPointers() {
4217 typ := s.expr(n.RType)
4218 if n.PreserveCapacity {
4219 newSlice = s.rtcall(ir.Syms.MoveSlice, true, []*types.Type{slice.Type}, typ, p, l, c)[0]
4220 } else {
4221 newSlice = s.rtcall(ir.Syms.MoveSliceNoCap, true, []*types.Type{slice.Type}, typ, p, l)[0]
4222 }
4223 } else {
4224 elemSize := s.constInt(types.Types[types.TUINTPTR], et.Size())
4225 if n.PreserveCapacity {
4226 newSlice = s.rtcall(ir.Syms.MoveSliceNoScan, true, []*types.Type{slice.Type}, elemSize, p, l, c)[0]
4227 } else {
4228 newSlice = s.rtcall(ir.Syms.MoveSliceNoCapNoScan, true, []*types.Type{slice.Type}, elemSize, p, l)[0]
4229 }
4230 }
4231
4232 s.vars[ptrVar] = s.newValue1(ssa.OpSlicePtr, pt, newSlice)
4233 s.vars[lenVar] = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], newSlice)
4234 s.vars[capVar] = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], newSlice)
4235 b = s.endBlock()
4236 b.AddEdgeTo(mergeBlock)
4237
4238
4239 s.startBlock(mergeBlock)
4240 p = s.variable(ptrVar, pt)
4241 l = s.variable(lenVar, types.Types[types.TINT])
4242 c = s.variable(capVar, types.Types[types.TINT])
4243 delete(s.vars, ptrVar)
4244 delete(s.vars, lenVar)
4245 delete(s.vars, capVar)
4246 return s.newValue3(ssa.OpSliceMake, slice.Type, p, l, c)
4247 }
4248
4249
4250 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
4251
4252
4253
4254 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
4255 x := s.expr(n.Args[0])
4256 for _, arg := range n.Args[1:] {
4257 x = op(x, s.expr(arg))
4258 }
4259 return x
4260 }
4261
4262 typ := n.Type()
4263
4264 if typ.IsFloat() || typ.IsString() {
4265
4266
4267
4268
4269
4270
4271
4272
4273 if typ.IsFloat() {
4274 hasIntrinsic := false
4275 switch Arch.LinkArch.Family {
4276 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64, sys.S390X:
4277 hasIntrinsic = true
4278 case sys.PPC64:
4279 hasIntrinsic = buildcfg.GOPPC64 >= 9
4280 }
4281
4282 if hasIntrinsic {
4283 var op ssa.Op
4284 switch {
4285 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
4286 op = ssa.OpMin64F
4287 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
4288 op = ssa.OpMax64F
4289 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
4290 op = ssa.OpMin32F
4291 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
4292 op = ssa.OpMax32F
4293 }
4294 return fold(func(x, a *ssa.Value) *ssa.Value {
4295 return s.newValue2(op, typ, x, a)
4296 })
4297 }
4298 }
4299 var name string
4300 switch typ.Kind() {
4301 case types.TFLOAT32:
4302 switch n.Op() {
4303 case ir.OMIN:
4304 name = "fmin32"
4305 case ir.OMAX:
4306 name = "fmax32"
4307 }
4308 case types.TFLOAT64:
4309 switch n.Op() {
4310 case ir.OMIN:
4311 name = "fmin64"
4312 case ir.OMAX:
4313 name = "fmax64"
4314 }
4315 case types.TSTRING:
4316 switch n.Op() {
4317 case ir.OMIN:
4318 name = "strmin"
4319 case ir.OMAX:
4320 name = "strmax"
4321 }
4322 }
4323 fn := typecheck.LookupRuntimeFunc(name)
4324
4325 return fold(func(x, a *ssa.Value) *ssa.Value {
4326 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
4327 })
4328 }
4329
4330 if typ.IsInteger() {
4331 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
4332 var op ssa.Op
4333 switch {
4334 case typ.IsSigned() && n.Op() == ir.OMIN:
4335 op = ssa.OpMin64
4336 case typ.IsSigned() && n.Op() == ir.OMAX:
4337 op = ssa.OpMax64
4338 case typ.IsUnsigned() && n.Op() == ir.OMIN:
4339 op = ssa.OpMin64u
4340 case typ.IsUnsigned() && n.Op() == ir.OMAX:
4341 op = ssa.OpMax64u
4342 }
4343 return fold(func(x, a *ssa.Value) *ssa.Value {
4344 return s.newValue2(op, typ, x, a)
4345 })
4346 }
4347 }
4348
4349 lt := s.ssaOp(ir.OLT, typ)
4350
4351 return fold(func(x, a *ssa.Value) *ssa.Value {
4352 switch n.Op() {
4353 case ir.OMIN:
4354
4355 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
4356 case ir.OMAX:
4357
4358 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
4359 }
4360 panic("unreachable")
4361 })
4362 }
4363
4364
4365 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
4366
4367
4368 ternaryVar := ssaMarker("ternary")
4369
4370 bThen := s.f.NewBlock(ssa.BlockPlain)
4371 bElse := s.f.NewBlock(ssa.BlockPlain)
4372 bEnd := s.f.NewBlock(ssa.BlockPlain)
4373
4374 b := s.endBlock()
4375 b.Kind = ssa.BlockIf
4376 b.SetControl(cond)
4377 b.AddEdgeTo(bThen)
4378 b.AddEdgeTo(bElse)
4379
4380 s.startBlock(bThen)
4381 s.vars[ternaryVar] = x
4382 s.endBlock().AddEdgeTo(bEnd)
4383
4384 s.startBlock(bElse)
4385 s.vars[ternaryVar] = y
4386 s.endBlock().AddEdgeTo(bEnd)
4387
4388 s.startBlock(bEnd)
4389 r := s.variable(ternaryVar, x.Type)
4390 delete(s.vars, ternaryVar)
4391 return r
4392 }
4393
4394
4395
4396
4397
4398 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
4399 switch cond.Op() {
4400 case ir.OANDAND:
4401 cond := cond.(*ir.LogicalExpr)
4402 mid := s.f.NewBlock(ssa.BlockPlain)
4403 s.stmtList(cond.Init())
4404 s.condBranch(cond.X, mid, no, max(likely, 0))
4405 s.startBlock(mid)
4406 s.condBranch(cond.Y, yes, no, likely)
4407 return
4408
4409
4410
4411
4412
4413
4414 case ir.OOROR:
4415 cond := cond.(*ir.LogicalExpr)
4416 mid := s.f.NewBlock(ssa.BlockPlain)
4417 s.stmtList(cond.Init())
4418 s.condBranch(cond.X, yes, mid, min(likely, 0))
4419 s.startBlock(mid)
4420 s.condBranch(cond.Y, yes, no, likely)
4421 return
4422
4423
4424
4425 case ir.ONOT:
4426 cond := cond.(*ir.UnaryExpr)
4427 s.stmtList(cond.Init())
4428 s.condBranch(cond.X, no, yes, -likely)
4429 return
4430 case ir.OCONVNOP:
4431 cond := cond.(*ir.ConvExpr)
4432 s.stmtList(cond.Init())
4433 s.condBranch(cond.X, yes, no, likely)
4434 return
4435 }
4436 c := s.expr(cond)
4437 b := s.endBlock()
4438 b.Kind = ssa.BlockIf
4439 b.SetControl(c)
4440 b.Likely = ssa.BranchPrediction(likely)
4441 b.AddEdgeTo(yes)
4442 b.AddEdgeTo(no)
4443 }
4444
4445 type skipMask uint8
4446
4447 const (
4448 skipPtr skipMask = 1 << iota
4449 skipLen
4450 skipCap
4451 )
4452
4453
4454
4455
4456
4457
4458
4459 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
4460 s.assignWhichMayOverlap(left, right, deref, skip, false)
4461 }
4462 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
4463 if left.Op() == ir.ONAME && ir.IsBlank(left) {
4464 return
4465 }
4466 t := left.Type()
4467 types.CalcSize(t)
4468 if s.canSSA(left) {
4469 if deref {
4470 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
4471 }
4472 if left.Op() == ir.ODOT {
4473
4474
4475
4476
4477
4478
4479
4480
4481
4482
4483 left := left.(*ir.SelectorExpr)
4484 t := left.X.Type()
4485 nf := t.NumFields()
4486 idx := fieldIdx(left)
4487
4488
4489 old := s.expr(left.X)
4490
4491
4492 new := s.newValue0(ssa.OpStructMake, t)
4493
4494
4495 for i := 0; i < nf; i++ {
4496 if i == idx {
4497 new.AddArg(right)
4498 } else {
4499 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
4500 }
4501 }
4502
4503
4504 s.assign(left.X, new, false, 0)
4505
4506 return
4507 }
4508 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
4509 left := left.(*ir.IndexExpr)
4510 s.pushLine(left.Pos())
4511 defer s.popLine()
4512
4513
4514 t := left.X.Type()
4515 n := t.NumElem()
4516
4517 i := s.expr(left.Index)
4518 if n == 0 {
4519
4520
4521 z := s.constInt(types.Types[types.TINT], 0)
4522 s.boundsCheck(z, z, ssa.BoundsIndex, false)
4523 return
4524 }
4525 if n != 1 {
4526 s.Fatalf("assigning to non-1-length array")
4527 }
4528
4529 len := s.constInt(types.Types[types.TINT], 1)
4530 s.boundsCheck(i, len, ssa.BoundsIndex, false)
4531 v := s.newValue1(ssa.OpArrayMake1, t, right)
4532 s.assign(left.X, v, false, 0)
4533 return
4534 }
4535 left := left.(*ir.Name)
4536
4537 s.vars[left] = right
4538 s.addNamedValue(left, right)
4539 return
4540 }
4541
4542
4543
4544 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4545 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4546 }
4547
4548
4549 addr := s.addr(left)
4550 if ir.IsReflectHeaderDataField(left) {
4551
4552
4553
4554
4555
4556 t = types.Types[types.TUNSAFEPTR]
4557 }
4558 if deref {
4559
4560 if right == nil {
4561 s.zero(t, addr)
4562 } else {
4563 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4564 }
4565 return
4566 }
4567
4568 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4569 }
4570
4571
4572 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4573 switch {
4574 case t.IsInteger():
4575 switch t.Size() {
4576 case 1:
4577 return s.constInt8(t, 0)
4578 case 2:
4579 return s.constInt16(t, 0)
4580 case 4:
4581 return s.constInt32(t, 0)
4582 case 8:
4583 return s.constInt64(t, 0)
4584 default:
4585 s.Fatalf("bad sized integer type %v", t)
4586 }
4587 case t.IsFloat():
4588 switch t.Size() {
4589 case 4:
4590 return s.constFloat32(t, 0)
4591 case 8:
4592 return s.constFloat64(t, 0)
4593 default:
4594 s.Fatalf("bad sized float type %v", t)
4595 }
4596 case t.IsComplex():
4597 switch t.Size() {
4598 case 8:
4599 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4600 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4601 case 16:
4602 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4603 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4604 default:
4605 s.Fatalf("bad sized complex type %v", t)
4606 }
4607
4608 case t.IsString():
4609 return s.constEmptyString(t)
4610 case t.IsPtrShaped():
4611 return s.constNil(t)
4612 case t.IsBoolean():
4613 return s.constBool(false)
4614 case t.IsInterface():
4615 return s.constInterface(t)
4616 case t.IsSlice():
4617 return s.constSlice(t)
4618 case isStructNotSIMD(t):
4619 n := t.NumFields()
4620 v := s.entryNewValue0(ssa.OpStructMake, t)
4621 for i := 0; i < n; i++ {
4622 v.AddArg(s.zeroVal(t.FieldType(i)))
4623 }
4624 return v
4625 case t.IsArray():
4626 switch t.NumElem() {
4627 case 0:
4628 return s.entryNewValue0(ssa.OpArrayMake0, t)
4629 case 1:
4630 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4631 }
4632 case t.IsSIMD():
4633 return s.newValue0(ssa.OpZeroSIMD, t)
4634 }
4635 s.Fatalf("zero for type %v not implemented", t)
4636 return nil
4637 }
4638
4639 type callKind int8
4640
4641 const (
4642 callNormal callKind = iota
4643 callDefer
4644 callDeferStack
4645 callGo
4646 callTail
4647 )
4648
4649 type sfRtCallDef struct {
4650 rtfn *obj.LSym
4651 rtype types.Kind
4652 }
4653
4654 var softFloatOps map[ssa.Op]sfRtCallDef
4655
4656 func softfloatInit() {
4657
4658 softFloatOps = map[ssa.Op]sfRtCallDef{
4659 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4660 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4661 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4662 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4663 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4664 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4665 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4666 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4667
4668 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4669 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4670 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4671 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4672 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4673 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4674 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4675 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4676
4677 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4678 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4679 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4680 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4681 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4682 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4683 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4684 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4685 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4686 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4687 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4688 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4689 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4690 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4691 }
4692 }
4693
4694
4695
4696 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4697 f2i := func(t *types.Type) *types.Type {
4698 switch t.Kind() {
4699 case types.TFLOAT32:
4700 return types.Types[types.TUINT32]
4701 case types.TFLOAT64:
4702 return types.Types[types.TUINT64]
4703 }
4704 return t
4705 }
4706
4707 if callDef, ok := softFloatOps[op]; ok {
4708 switch op {
4709 case ssa.OpLess32F,
4710 ssa.OpLess64F,
4711 ssa.OpLeq32F,
4712 ssa.OpLeq64F:
4713 args[0], args[1] = args[1], args[0]
4714 case ssa.OpSub32F,
4715 ssa.OpSub64F:
4716 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4717 }
4718
4719
4720
4721 for i, a := range args {
4722 if a.Type.IsFloat() {
4723 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4724 }
4725 }
4726
4727 rt := types.Types[callDef.rtype]
4728 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4729 if rt.IsFloat() {
4730 result = s.newValue1(ssa.OpCopy, rt, result)
4731 }
4732 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4733 result = s.newValue1(ssa.OpNot, result.Type, result)
4734 }
4735 return result, true
4736 }
4737 return nil, false
4738 }
4739
4740
4741 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4742 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4743 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4744 return p0, p1
4745 }
4746
4747
4748 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4749 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4750 if ssa.IntrinsicsDebug > 0 {
4751 x := v
4752 if x == nil {
4753 x = s.mem()
4754 }
4755 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4756 x = x.Args[0]
4757 }
4758 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4759 }
4760 return v
4761 }
4762
4763
4764 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4765 args := make([]*ssa.Value, len(n.Args))
4766 for i, n := range n.Args {
4767 args[i] = s.expr(n)
4768 }
4769 return args
4770 }
4771
4772
4773
4774
4775
4776
4777
4778 func (s *state) openDeferRecord(n *ir.CallExpr) {
4779 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4780 s.Fatalf("defer call with arguments or results: %v", n)
4781 }
4782
4783 opendefer := &openDeferInfo{
4784 n: n,
4785 }
4786 fn := n.Fun
4787
4788
4789
4790 closureVal := s.expr(fn)
4791 closure := s.openDeferSave(fn.Type(), closureVal)
4792 opendefer.closureNode = closure.Aux.(*ir.Name)
4793 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4794 opendefer.closure = closure
4795 }
4796 index := len(s.openDefers)
4797 s.openDefers = append(s.openDefers, opendefer)
4798
4799
4800
4801 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4802 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4803 s.vars[deferBitsVar] = newDeferBits
4804 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4805 }
4806
4807
4808
4809
4810
4811
4812 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4813 if !ssa.CanSSA(t) {
4814 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4815 }
4816 if !t.HasPointers() {
4817 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4818 }
4819 pos := val.Pos
4820 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4821 temp.SetOpenDeferSlot(true)
4822 temp.SetFrameOffset(int64(len(s.openDefers)))
4823 var addrTemp *ssa.Value
4824
4825
4826 if s.curBlock.ID != s.f.Entry.ID {
4827
4828
4829
4830 if t.HasPointers() {
4831 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4832 }
4833 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4834 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4835 } else {
4836
4837
4838
4839 if t.HasPointers() {
4840 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4841 }
4842 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4843 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4844 }
4845
4846
4847
4848
4849
4850 temp.SetNeedzero(true)
4851
4852
4853 s.store(t, addrTemp, val)
4854 return addrTemp
4855 }
4856
4857
4858
4859
4860
4861 func (s *state) openDeferExit() {
4862 deferExit := s.f.NewBlock(ssa.BlockPlain)
4863 s.endBlock().AddEdgeTo(deferExit)
4864 s.startBlock(deferExit)
4865 s.lastDeferExit = deferExit
4866 s.lastDeferCount = len(s.openDefers)
4867 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4868
4869 for i := len(s.openDefers) - 1; i >= 0; i-- {
4870 r := s.openDefers[i]
4871 bCond := s.f.NewBlock(ssa.BlockPlain)
4872 bEnd := s.f.NewBlock(ssa.BlockPlain)
4873
4874 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4875
4876
4877 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4878 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4879 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4880 b := s.endBlock()
4881 b.Kind = ssa.BlockIf
4882 b.SetControl(eqVal)
4883 b.AddEdgeTo(bEnd)
4884 b.AddEdgeTo(bCond)
4885 bCond.AddEdgeTo(bEnd)
4886 s.startBlock(bCond)
4887
4888
4889
4890 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4891 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4892 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4893
4894
4895 s.vars[deferBitsVar] = maskedval
4896
4897
4898
4899
4900 fn := r.n.Fun
4901 stksize := fn.Type().ArgWidth()
4902 var callArgs []*ssa.Value
4903 var call *ssa.Value
4904 if r.closure != nil {
4905 v := s.load(r.closure.Type.Elem(), r.closure)
4906 s.maybeNilCheckClosure(v, callDefer)
4907 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4908 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4909 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4910 } else {
4911 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4912 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4913 }
4914 callArgs = append(callArgs, s.mem())
4915 call.AddArgs(callArgs...)
4916 call.AuxInt = stksize
4917 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4918
4919
4920
4921
4922 if r.closureNode != nil {
4923 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4924 }
4925
4926 s.endBlock()
4927 s.startBlock(bEnd)
4928 }
4929 }
4930
4931 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4932 return s.call(n, k, false, nil)
4933 }
4934
4935 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4936 return s.call(n, k, true, nil)
4937 }
4938
4939
4940
4941 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4942 s.prevCall = nil
4943 var calleeLSym *obj.LSym
4944 var closure *ssa.Value
4945 var codeptr *ssa.Value
4946 var dextra *ssa.Value
4947 var rcvr *ssa.Value
4948 fn := n.Fun
4949 var ACArgs []*types.Type
4950 var ACResults []*types.Type
4951 var callArgs []*ssa.Value
4952
4953 callABI := s.f.ABIDefault
4954
4955 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4956 s.Fatalf("go/defer call with arguments: %v", n)
4957 }
4958
4959 isCallDeferRangeFunc := false
4960
4961 switch n.Op() {
4962 case ir.OCALLFUNC:
4963 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4964 fn := fn.(*ir.Name)
4965 calleeLSym = callTargetLSym(fn)
4966 if buildcfg.Experiment.RegabiArgs {
4967
4968
4969
4970
4971
4972 if fn.Func != nil {
4973 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4974 }
4975 } else {
4976
4977 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4978 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4979 if inRegistersImported || inRegistersSamePackage {
4980 callABI = s.f.ABI1
4981 }
4982 }
4983 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
4984 isCallDeferRangeFunc = true
4985 }
4986 break
4987 }
4988 closure = s.expr(fn)
4989 if k != callDefer && k != callDeferStack {
4990
4991
4992 s.maybeNilCheckClosure(closure, k)
4993 }
4994 case ir.OCALLINTER:
4995 if fn.Op() != ir.ODOTINTER {
4996 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
4997 }
4998 fn := fn.(*ir.SelectorExpr)
4999 var iclosure *ssa.Value
5000 iclosure, rcvr = s.getClosureAndRcvr(fn)
5001 if k == callNormal {
5002 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
5003 } else {
5004 closure = iclosure
5005 }
5006 }
5007 if deferExtra != nil {
5008 dextra = s.expr(deferExtra)
5009 }
5010
5011 params := callABI.ABIAnalyze(n.Fun.Type(), false )
5012 types.CalcSize(fn.Type())
5013 stksize := params.ArgWidth()
5014
5015 res := n.Fun.Type().Results()
5016 if k == callNormal || k == callTail {
5017 for _, p := range params.OutParams() {
5018 ACResults = append(ACResults, p.Type)
5019 }
5020 }
5021
5022 var call *ssa.Value
5023 if k == callDeferStack {
5024 if stksize != 0 {
5025 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
5026 }
5027
5028 t := deferstruct()
5029 n, addr := s.temp(n.Pos(), t)
5030 n.SetNonMergeable(true)
5031 s.store(closure.Type,
5032 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
5033 closure)
5034
5035
5036 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5037 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5038 callArgs = append(callArgs, addr, s.mem())
5039 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5040 call.AddArgs(callArgs...)
5041 call.AuxInt = int64(types.PtrSize)
5042 } else {
5043
5044
5045 argStart := base.Ctxt.Arch.FixedFrameSize
5046
5047 if k != callNormal && k != callTail {
5048
5049 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5050 callArgs = append(callArgs, closure)
5051 stksize += int64(types.PtrSize)
5052 argStart += int64(types.PtrSize)
5053 if dextra != nil {
5054
5055 ACArgs = append(ACArgs, types.Types[types.TINTER])
5056 callArgs = append(callArgs, dextra)
5057 stksize += 2 * int64(types.PtrSize)
5058 argStart += 2 * int64(types.PtrSize)
5059 }
5060 }
5061
5062
5063 if rcvr != nil {
5064 callArgs = append(callArgs, rcvr)
5065 }
5066
5067
5068 t := n.Fun.Type()
5069 args := n.Args
5070
5071 for _, p := range params.InParams() {
5072 ACArgs = append(ACArgs, p.Type)
5073 }
5074
5075
5076
5077
5078 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
5079 b := s.endBlock()
5080 b.Kind = ssa.BlockPlain
5081 curb := s.f.NewBlock(ssa.BlockPlain)
5082 b.AddEdgeTo(curb)
5083 s.startBlock(curb)
5084 }
5085
5086 for i, n := range args {
5087 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
5088 }
5089
5090 callArgs = append(callArgs, s.mem())
5091
5092
5093 switch {
5094 case k == callDefer:
5095 sym := ir.Syms.Deferproc
5096 if dextra != nil {
5097 sym = ir.Syms.Deferprocat
5098 }
5099 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5100 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5101 case k == callGo:
5102 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5103 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5104 case closure != nil:
5105
5106
5107
5108
5109
5110 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
5111 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
5112 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
5113 case codeptr != nil:
5114
5115 aux := ssa.InterfaceAuxCall(params)
5116 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
5117 case calleeLSym != nil:
5118 aux := ssa.StaticAuxCall(calleeLSym, params)
5119 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5120 if k == callTail {
5121 call.Op = ssa.OpTailLECall
5122 stksize = 0
5123 }
5124 default:
5125 s.Fatalf("bad call type %v %v", n.Op(), n)
5126 }
5127 call.AddArgs(callArgs...)
5128 call.AuxInt = stksize
5129 }
5130 s.prevCall = call
5131 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
5132
5133 for _, v := range n.KeepAlive {
5134 if !v.Addrtaken() {
5135 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
5136 }
5137 switch v.Class {
5138 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
5139 default:
5140 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
5141 }
5142 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
5143 }
5144
5145
5146 if k == callDefer || k == callDeferStack || isCallDeferRangeFunc {
5147 b := s.endBlock()
5148 b.Kind = ssa.BlockDefer
5149 b.SetControl(call)
5150 bNext := s.f.NewBlock(ssa.BlockPlain)
5151 b.AddEdgeTo(bNext)
5152 r := s.f.DeferReturn
5153 if r == nil {
5154 r = s.f.NewBlock(ssa.BlockPlain)
5155 s.startBlock(r)
5156 s.exit()
5157 s.f.DeferReturn = r
5158 }
5159 b.AddEdgeTo(r)
5160 b.Likely = ssa.BranchLikely
5161 s.startBlock(bNext)
5162 }
5163
5164 if len(res) == 0 || k != callNormal {
5165
5166 return nil
5167 }
5168 fp := res[0]
5169 if returnResultAddr {
5170 return s.resultAddrOfCall(call, 0, fp.Type)
5171 }
5172 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
5173 }
5174
5175
5176
5177 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
5178 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
5179
5180
5181 s.nilCheck(closure)
5182 }
5183 }
5184
5185
5186
5187 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
5188 i := s.expr(fn.X)
5189 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
5190 s.nilCheck(itab)
5191 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
5192 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
5193 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
5194 return closure, rcvr
5195 }
5196
5197
5198
5199 func etypesign(e types.Kind) int8 {
5200 switch e {
5201 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
5202 return -1
5203 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
5204 return +1
5205 }
5206 return 0
5207 }
5208
5209
5210
5211 func (s *state) addr(n ir.Node) *ssa.Value {
5212 if n.Op() != ir.ONAME {
5213 s.pushLine(n.Pos())
5214 defer s.popLine()
5215 }
5216
5217 if s.canSSA(n) {
5218 s.Fatalf("addr of canSSA expression: %+v", n)
5219 }
5220
5221 t := types.NewPtr(n.Type())
5222 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
5223 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
5224
5225 if offset != 0 {
5226 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
5227 }
5228 return v
5229 }
5230 switch n.Op() {
5231 case ir.OLINKSYMOFFSET:
5232 no := n.(*ir.LinksymOffsetExpr)
5233 return linksymOffset(no.Linksym, no.Offset_)
5234 case ir.ONAME:
5235 n := n.(*ir.Name)
5236 if n.Heapaddr != nil {
5237 return s.expr(n.Heapaddr)
5238 }
5239 switch n.Class {
5240 case ir.PEXTERN:
5241
5242 return linksymOffset(n.Linksym(), 0)
5243 case ir.PPARAM:
5244
5245 v := s.decladdrs[n]
5246 if v != nil {
5247 return v
5248 }
5249 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
5250 return nil
5251 case ir.PAUTO:
5252 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
5253
5254 case ir.PPARAMOUT:
5255
5256
5257 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
5258 default:
5259 s.Fatalf("variable address class %v not implemented", n.Class)
5260 return nil
5261 }
5262 case ir.ORESULT:
5263
5264 n := n.(*ir.ResultExpr)
5265 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
5266 case ir.OINDEX:
5267 n := n.(*ir.IndexExpr)
5268 if n.X.Type().IsSlice() {
5269 a := s.expr(n.X)
5270 i := s.expr(n.Index)
5271 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
5272 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5273 p := s.newValue1(ssa.OpSlicePtr, t, a)
5274 return s.newValue2(ssa.OpPtrIndex, t, p, i)
5275 } else {
5276 a := s.addr(n.X)
5277 i := s.expr(n.Index)
5278 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
5279 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5280 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5281 }
5282 case ir.ODEREF:
5283 n := n.(*ir.StarExpr)
5284 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5285 case ir.ODOT:
5286 n := n.(*ir.SelectorExpr)
5287 p := s.addr(n.X)
5288 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5289 case ir.ODOTPTR:
5290 n := n.(*ir.SelectorExpr)
5291 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5292 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5293 case ir.OCONVNOP:
5294 n := n.(*ir.ConvExpr)
5295 if n.Type() == n.X.Type() {
5296 return s.addr(n.X)
5297 }
5298 addr := s.addr(n.X)
5299 return s.newValue1(ssa.OpCopy, t, addr)
5300 case ir.OCALLFUNC, ir.OCALLINTER:
5301 n := n.(*ir.CallExpr)
5302 return s.callAddr(n, callNormal)
5303 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5304 var v *ssa.Value
5305 if n.Op() == ir.ODOTTYPE {
5306 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5307 } else {
5308 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5309 }
5310 if v.Op != ssa.OpLoad {
5311 s.Fatalf("dottype of non-load")
5312 }
5313 if v.Args[1] != s.mem() {
5314 s.Fatalf("memory no longer live from dottype load")
5315 }
5316 return v.Args[0]
5317 default:
5318 s.Fatalf("unhandled addr %v", n.Op())
5319 return nil
5320 }
5321 }
5322
5323
5324
5325 func (s *state) canSSA(n ir.Node) bool {
5326 if base.Flag.N != 0 {
5327 return false
5328 }
5329 for {
5330 nn := n
5331 if nn.Op() == ir.ODOT {
5332 nn := nn.(*ir.SelectorExpr)
5333 n = nn.X
5334 continue
5335 }
5336 if nn.Op() == ir.OINDEX {
5337 nn := nn.(*ir.IndexExpr)
5338 if nn.X.Type().IsArray() {
5339 n = nn.X
5340 continue
5341 }
5342 }
5343 break
5344 }
5345 if n.Op() != ir.ONAME {
5346 return false
5347 }
5348 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5349 }
5350
5351 func (s *state) canSSAName(name *ir.Name) bool {
5352 if name.Addrtaken() || !name.OnStack() {
5353 return false
5354 }
5355 switch name.Class {
5356 case ir.PPARAMOUT:
5357 if s.hasdefer {
5358
5359
5360
5361
5362
5363 return false
5364 }
5365 if s.cgoUnsafeArgs {
5366
5367
5368 return false
5369 }
5370 }
5371 return true
5372
5373 }
5374
5375
5376 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5377 p := s.expr(n)
5378 if bounded || n.NonNil() {
5379 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5380 s.f.Warnl(lineno, "removed nil check")
5381 }
5382 return p
5383 }
5384 p = s.nilCheck(p)
5385 return p
5386 }
5387
5388
5389
5390
5391
5392
5393 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5394 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5395 return ptr
5396 }
5397 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5398 }
5399
5400
5401
5402
5403
5404
5405
5406 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5407 idx = s.extendIndex(idx, len, kind, bounded)
5408
5409 if bounded || base.Flag.B != 0 {
5410
5411
5412
5413
5414
5415
5416
5417
5418
5419
5420
5421
5422
5423
5424
5425
5426
5427
5428
5429
5430 return idx
5431 }
5432
5433 bNext := s.f.NewBlock(ssa.BlockPlain)
5434 bPanic := s.f.NewBlock(ssa.BlockExit)
5435
5436 if !idx.Type.IsSigned() {
5437 switch kind {
5438 case ssa.BoundsIndex:
5439 kind = ssa.BoundsIndexU
5440 case ssa.BoundsSliceAlen:
5441 kind = ssa.BoundsSliceAlenU
5442 case ssa.BoundsSliceAcap:
5443 kind = ssa.BoundsSliceAcapU
5444 case ssa.BoundsSliceB:
5445 kind = ssa.BoundsSliceBU
5446 case ssa.BoundsSlice3Alen:
5447 kind = ssa.BoundsSlice3AlenU
5448 case ssa.BoundsSlice3Acap:
5449 kind = ssa.BoundsSlice3AcapU
5450 case ssa.BoundsSlice3B:
5451 kind = ssa.BoundsSlice3BU
5452 case ssa.BoundsSlice3C:
5453 kind = ssa.BoundsSlice3CU
5454 }
5455 }
5456
5457 var cmp *ssa.Value
5458 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5459 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5460 } else {
5461 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5462 }
5463 b := s.endBlock()
5464 b.Kind = ssa.BlockIf
5465 b.SetControl(cmp)
5466 b.Likely = ssa.BranchLikely
5467 b.AddEdgeTo(bNext)
5468 b.AddEdgeTo(bPanic)
5469
5470 s.startBlock(bPanic)
5471 if Arch.LinkArch.Family == sys.Wasm {
5472
5473
5474 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5475 } else {
5476 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5477 s.endBlock().SetControl(mem)
5478 }
5479 s.startBlock(bNext)
5480
5481
5482 if base.Flag.Cfg.SpectreIndex {
5483 op := ssa.OpSpectreIndex
5484 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5485 op = ssa.OpSpectreSliceIndex
5486 }
5487 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5488 }
5489
5490 return idx
5491 }
5492
5493
5494 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5495 b := s.endBlock()
5496 b.Kind = ssa.BlockIf
5497 b.SetControl(cmp)
5498 b.Likely = ssa.BranchLikely
5499 bNext := s.f.NewBlock(ssa.BlockPlain)
5500 line := s.peekPos()
5501 pos := base.Ctxt.PosTable.Pos(line)
5502 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5503 bPanic := s.panics[fl]
5504 if bPanic == nil {
5505 bPanic = s.f.NewBlock(ssa.BlockPlain)
5506 s.panics[fl] = bPanic
5507 s.startBlock(bPanic)
5508
5509
5510 s.rtcall(fn, false, nil)
5511 }
5512 b.AddEdgeTo(bNext)
5513 b.AddEdgeTo(bPanic)
5514 s.startBlock(bNext)
5515 }
5516
5517 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5518 needcheck := true
5519 switch b.Op {
5520 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5521 if b.AuxInt != 0 {
5522 needcheck = false
5523 }
5524 }
5525 if needcheck {
5526
5527 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5528 s.check(cmp, ir.Syms.Panicdivide)
5529 }
5530 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5531 }
5532
5533
5534
5535
5536
5537 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5538 s.prevCall = nil
5539
5540 off := base.Ctxt.Arch.FixedFrameSize
5541 var callArgs []*ssa.Value
5542 var callArgTypes []*types.Type
5543
5544 for _, arg := range args {
5545 t := arg.Type
5546 off = types.RoundUp(off, t.Alignment())
5547 size := t.Size()
5548 callArgs = append(callArgs, arg)
5549 callArgTypes = append(callArgTypes, t)
5550 off += size
5551 }
5552 off = types.RoundUp(off, int64(types.RegSize))
5553
5554
5555 var call *ssa.Value
5556 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5557 callArgs = append(callArgs, s.mem())
5558 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5559 call.AddArgs(callArgs...)
5560 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5561
5562 if !returns {
5563
5564 b := s.endBlock()
5565 b.Kind = ssa.BlockExit
5566 b.SetControl(call)
5567 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5568 if len(results) > 0 {
5569 s.Fatalf("panic call can't have results")
5570 }
5571 return nil
5572 }
5573
5574
5575 res := make([]*ssa.Value, len(results))
5576 for i, t := range results {
5577 off = types.RoundUp(off, t.Alignment())
5578 res[i] = s.resultOfCall(call, int64(i), t)
5579 off += t.Size()
5580 }
5581 off = types.RoundUp(off, int64(types.PtrSize))
5582
5583
5584 call.AuxInt = off
5585
5586 return res
5587 }
5588
5589
5590 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5591 s.instrument(t, left, instrumentWrite)
5592
5593 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5594
5595 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5596 return
5597 }
5598
5599
5600
5601
5602
5603
5604 s.storeTypeScalars(t, left, right, skip)
5605 if skip&skipPtr == 0 && t.HasPointers() {
5606 s.storeTypePtrs(t, left, right)
5607 }
5608 }
5609
5610
5611 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5612 switch {
5613 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex() || t.IsSIMD():
5614 s.store(t, left, right)
5615 case t.IsPtrShaped():
5616 if t.IsPtr() && t.Elem().NotInHeap() {
5617 s.store(t, left, right)
5618 }
5619
5620 case t.IsString():
5621 if skip&skipLen != 0 {
5622 return
5623 }
5624 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5625 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5626 s.store(types.Types[types.TINT], lenAddr, len)
5627 case t.IsSlice():
5628 if skip&skipLen == 0 {
5629 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5630 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5631 s.store(types.Types[types.TINT], lenAddr, len)
5632 }
5633 if skip&skipCap == 0 {
5634 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5635 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5636 s.store(types.Types[types.TINT], capAddr, cap)
5637 }
5638 case t.IsInterface():
5639
5640 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5641 s.store(types.Types[types.TUINTPTR], left, itab)
5642 case isStructNotSIMD(t):
5643 n := t.NumFields()
5644 for i := 0; i < n; i++ {
5645 ft := t.FieldType(i)
5646 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5647 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5648 s.storeTypeScalars(ft, addr, val, 0)
5649 }
5650 case t.IsArray() && t.NumElem() == 0:
5651
5652 case t.IsArray() && t.NumElem() == 1:
5653 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5654 default:
5655 s.Fatalf("bad write barrier type %v", t)
5656 }
5657 }
5658
5659
5660 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5661 switch {
5662 case t.IsPtrShaped():
5663 if t.IsPtr() && t.Elem().NotInHeap() {
5664 break
5665 }
5666 s.store(t, left, right)
5667 case t.IsString():
5668 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5669 s.store(s.f.Config.Types.BytePtr, left, ptr)
5670 case t.IsSlice():
5671 elType := types.NewPtr(t.Elem())
5672 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5673 s.store(elType, left, ptr)
5674 case t.IsInterface():
5675
5676 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5677 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5678 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5679 case isStructNotSIMD(t):
5680 n := t.NumFields()
5681 for i := 0; i < n; i++ {
5682 ft := t.FieldType(i)
5683 if !ft.HasPointers() {
5684 continue
5685 }
5686 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5687 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5688 s.storeTypePtrs(ft, addr, val)
5689 }
5690 case t.IsArray() && t.NumElem() == 0:
5691
5692 case t.IsArray() && t.NumElem() == 1:
5693 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5694 default:
5695 s.Fatalf("bad write barrier type %v", t)
5696 }
5697 }
5698
5699
5700 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5701 var a *ssa.Value
5702 if !ssa.CanSSA(t) {
5703 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5704 } else {
5705 a = s.expr(n)
5706 }
5707 return a
5708 }
5709
5710
5711
5712
5713 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5714 t := v.Type
5715 var ptr, len, cap *ssa.Value
5716 switch {
5717 case t.IsSlice():
5718 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5719 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5720 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5721 case t.IsString():
5722 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5723 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5724 cap = len
5725 case t.IsPtr():
5726 if !t.Elem().IsArray() {
5727 s.Fatalf("bad ptr to array in slice %v\n", t)
5728 }
5729 nv := s.nilCheck(v)
5730 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5731 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5732 cap = len
5733 default:
5734 s.Fatalf("bad type in slice %v\n", t)
5735 }
5736
5737
5738 if i == nil {
5739 i = s.constInt(types.Types[types.TINT], 0)
5740 }
5741 if j == nil {
5742 j = len
5743 }
5744 three := true
5745 if k == nil {
5746 three = false
5747 k = cap
5748 }
5749
5750
5751
5752
5753 if three {
5754 if k != cap {
5755 kind := ssa.BoundsSlice3Alen
5756 if t.IsSlice() {
5757 kind = ssa.BoundsSlice3Acap
5758 }
5759 k = s.boundsCheck(k, cap, kind, bounded)
5760 }
5761 if j != k {
5762 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5763 }
5764 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5765 } else {
5766 if j != k {
5767 kind := ssa.BoundsSliceAlen
5768 if t.IsSlice() {
5769 kind = ssa.BoundsSliceAcap
5770 }
5771 j = s.boundsCheck(j, k, kind, bounded)
5772 }
5773 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5774 }
5775
5776
5777 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5778 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5779 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5780
5781
5782
5783
5784
5785 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5786 rcap := rlen
5787 if j != k && !t.IsString() {
5788 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5789 }
5790
5791 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5792
5793 return ptr, rlen, rcap
5794 }
5795
5796
5797
5798
5799
5800
5801
5802
5803
5804
5805
5806
5807
5808
5809
5810 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5811
5812
5813 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5814
5815
5816
5817 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5818 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5819
5820
5821 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5822
5823 return rptr, rlen, rcap
5824 }
5825
5826 type u642fcvtTab struct {
5827 leq, cvt2F, and, rsh, or, add ssa.Op
5828 one func(*state, *types.Type, int64) *ssa.Value
5829 }
5830
5831 var u64_f64 = u642fcvtTab{
5832 leq: ssa.OpLeq64,
5833 cvt2F: ssa.OpCvt64to64F,
5834 and: ssa.OpAnd64,
5835 rsh: ssa.OpRsh64Ux64,
5836 or: ssa.OpOr64,
5837 add: ssa.OpAdd64F,
5838 one: (*state).constInt64,
5839 }
5840
5841 var u64_f32 = u642fcvtTab{
5842 leq: ssa.OpLeq64,
5843 cvt2F: ssa.OpCvt64to32F,
5844 and: ssa.OpAnd64,
5845 rsh: ssa.OpRsh64Ux64,
5846 or: ssa.OpOr64,
5847 add: ssa.OpAdd32F,
5848 one: (*state).constInt64,
5849 }
5850
5851 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5852 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5853 }
5854
5855 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5856 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5857 }
5858
5859 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5860
5861
5862
5863
5864
5865
5866
5867
5868
5869
5870
5871
5872
5873
5874
5875
5876
5877
5878
5879
5880
5881
5882
5883
5884
5885 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5886
5887 b := s.endBlock()
5888 b.Kind = ssa.BlockIf
5889 b.SetControl(cmp)
5890 b.Likely = ssa.BranchLikely
5891
5892 bThen := s.f.NewBlock(ssa.BlockPlain)
5893 bElse := s.f.NewBlock(ssa.BlockPlain)
5894 bAfter := s.f.NewBlock(ssa.BlockPlain)
5895
5896 b.AddEdgeTo(bThen)
5897 s.startBlock(bThen)
5898 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5899 s.vars[n] = a0
5900 s.endBlock()
5901 bThen.AddEdgeTo(bAfter)
5902
5903 b.AddEdgeTo(bElse)
5904 s.startBlock(bElse)
5905 one := cvttab.one(s, ft, 1)
5906 y := s.newValue2(cvttab.and, ft, x, one)
5907 z := s.newValue2(cvttab.rsh, ft, x, one)
5908 z = s.newValue2(cvttab.or, ft, z, y)
5909 a := s.newValue1(cvttab.cvt2F, tt, z)
5910 a1 := s.newValue2(cvttab.add, tt, a, a)
5911 s.vars[n] = a1
5912 s.endBlock()
5913 bElse.AddEdgeTo(bAfter)
5914
5915 s.startBlock(bAfter)
5916 return s.variable(n, n.Type())
5917 }
5918
5919 type u322fcvtTab struct {
5920 cvtI2F, cvtF2F ssa.Op
5921 }
5922
5923 var u32_f64 = u322fcvtTab{
5924 cvtI2F: ssa.OpCvt32to64F,
5925 cvtF2F: ssa.OpCopy,
5926 }
5927
5928 var u32_f32 = u322fcvtTab{
5929 cvtI2F: ssa.OpCvt32to32F,
5930 cvtF2F: ssa.OpCvt64Fto32F,
5931 }
5932
5933 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5934 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5935 }
5936
5937 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5938 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5939 }
5940
5941 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5942
5943
5944
5945
5946
5947 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5948 b := s.endBlock()
5949 b.Kind = ssa.BlockIf
5950 b.SetControl(cmp)
5951 b.Likely = ssa.BranchLikely
5952
5953 bThen := s.f.NewBlock(ssa.BlockPlain)
5954 bElse := s.f.NewBlock(ssa.BlockPlain)
5955 bAfter := s.f.NewBlock(ssa.BlockPlain)
5956
5957 b.AddEdgeTo(bThen)
5958 s.startBlock(bThen)
5959 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5960 s.vars[n] = a0
5961 s.endBlock()
5962 bThen.AddEdgeTo(bAfter)
5963
5964 b.AddEdgeTo(bElse)
5965 s.startBlock(bElse)
5966 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5967 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5968 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
5969 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
5970
5971 s.vars[n] = a3
5972 s.endBlock()
5973 bElse.AddEdgeTo(bAfter)
5974
5975 s.startBlock(bAfter)
5976 return s.variable(n, n.Type())
5977 }
5978
5979
5980 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
5981 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
5982 s.Fatalf("node must be a map or a channel")
5983 }
5984 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
5985 s.Fatalf("cannot inline len(chan)")
5986 }
5987 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
5988 s.Fatalf("cannot inline cap(chan)")
5989 }
5990 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
5991 s.Fatalf("cannot inline cap(map)")
5992 }
5993
5994
5995
5996
5997
5998
5999
6000
6001 lenType := n.Type()
6002 nilValue := s.constNil(types.Types[types.TUINTPTR])
6003 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
6004 b := s.endBlock()
6005 b.Kind = ssa.BlockIf
6006 b.SetControl(cmp)
6007 b.Likely = ssa.BranchUnlikely
6008
6009 bThen := s.f.NewBlock(ssa.BlockPlain)
6010 bElse := s.f.NewBlock(ssa.BlockPlain)
6011 bAfter := s.f.NewBlock(ssa.BlockPlain)
6012
6013
6014 b.AddEdgeTo(bThen)
6015 s.startBlock(bThen)
6016 s.vars[n] = s.zeroVal(lenType)
6017 s.endBlock()
6018 bThen.AddEdgeTo(bAfter)
6019
6020 b.AddEdgeTo(bElse)
6021 s.startBlock(bElse)
6022 switch n.Op() {
6023 case ir.OLEN:
6024 if n.X.Type().IsMap() {
6025
6026 loadType := reflectdata.MapType().Field(0).Type
6027 load := s.load(loadType, x)
6028 s.vars[n] = s.conv(nil, load, loadType, lenType)
6029 } else {
6030
6031 s.vars[n] = s.load(lenType, x)
6032 }
6033 case ir.OCAP:
6034
6035 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
6036 s.vars[n] = s.load(lenType, sw)
6037 default:
6038 s.Fatalf("op must be OLEN or OCAP")
6039 }
6040 s.endBlock()
6041 bElse.AddEdgeTo(bAfter)
6042
6043 s.startBlock(bAfter)
6044 return s.variable(n, lenType)
6045 }
6046
6047 type f2uCvtTab struct {
6048 ltf, cvt2U, subf, or ssa.Op
6049 floatValue func(*state, *types.Type, float64) *ssa.Value
6050 intValue func(*state, *types.Type, int64) *ssa.Value
6051 cutoff uint64
6052 }
6053
6054 var f32_u64 = f2uCvtTab{
6055 ltf: ssa.OpLess32F,
6056 cvt2U: ssa.OpCvt32Fto64,
6057 subf: ssa.OpSub32F,
6058 or: ssa.OpOr64,
6059 floatValue: (*state).constFloat32,
6060 intValue: (*state).constInt64,
6061 cutoff: 1 << 63,
6062 }
6063
6064 var f64_u64 = f2uCvtTab{
6065 ltf: ssa.OpLess64F,
6066 cvt2U: ssa.OpCvt64Fto64,
6067 subf: ssa.OpSub64F,
6068 or: ssa.OpOr64,
6069 floatValue: (*state).constFloat64,
6070 intValue: (*state).constInt64,
6071 cutoff: 1 << 63,
6072 }
6073
6074 var f32_u32 = f2uCvtTab{
6075 ltf: ssa.OpLess32F,
6076 cvt2U: ssa.OpCvt32Fto32,
6077 subf: ssa.OpSub32F,
6078 or: ssa.OpOr32,
6079 floatValue: (*state).constFloat32,
6080 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6081 cutoff: 1 << 31,
6082 }
6083
6084 var f64_u32 = f2uCvtTab{
6085 ltf: ssa.OpLess64F,
6086 cvt2U: ssa.OpCvt64Fto32,
6087 subf: ssa.OpSub64F,
6088 or: ssa.OpOr32,
6089 floatValue: (*state).constFloat64,
6090 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6091 cutoff: 1 << 31,
6092 }
6093
6094 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6095 return s.floatToUint(&f32_u64, n, x, ft, tt)
6096 }
6097 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6098 return s.floatToUint(&f64_u64, n, x, ft, tt)
6099 }
6100
6101 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6102 return s.floatToUint(&f32_u32, n, x, ft, tt)
6103 }
6104
6105 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6106 return s.floatToUint(&f64_u32, n, x, ft, tt)
6107 }
6108
6109 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6110
6111
6112
6113
6114
6115
6116
6117
6118
6119
6120
6121
6122
6123 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
6124 cmp := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
6125 b := s.endBlock()
6126 b.Kind = ssa.BlockIf
6127 b.SetControl(cmp)
6128 b.Likely = ssa.BranchLikely
6129
6130 var bThen, bZero *ssa.Block
6131
6132 newConversion := base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil)
6133 if newConversion {
6134 bZero = s.f.NewBlock(ssa.BlockPlain)
6135 bThen = s.f.NewBlock(ssa.BlockIf)
6136 } else {
6137 bThen = s.f.NewBlock(ssa.BlockPlain)
6138 }
6139
6140 bElse := s.f.NewBlock(ssa.BlockPlain)
6141 bAfter := s.f.NewBlock(ssa.BlockPlain)
6142
6143 b.AddEdgeTo(bThen)
6144 s.startBlock(bThen)
6145 a0 := s.newValueOrSfCall1(cvttab.cvt2U, tt, x)
6146 s.vars[n] = a0
6147
6148 if newConversion {
6149 cmpz := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cvttab.floatValue(s, ft, 0.0))
6150 s.endBlock()
6151 bThen.SetControl(cmpz)
6152 bThen.AddEdgeTo(bZero)
6153 bThen.Likely = ssa.BranchUnlikely
6154 bThen.AddEdgeTo(bAfter)
6155
6156 s.startBlock(bZero)
6157 s.vars[n] = cvttab.intValue(s, tt, 0)
6158 s.endBlock()
6159 bZero.AddEdgeTo(bAfter)
6160 } else {
6161 s.endBlock()
6162 bThen.AddEdgeTo(bAfter)
6163 }
6164
6165 b.AddEdgeTo(bElse)
6166 s.startBlock(bElse)
6167 y := s.newValueOrSfCall2(cvttab.subf, ft, x, cutoff)
6168 y = s.newValueOrSfCall1(cvttab.cvt2U, tt, y)
6169 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
6170 a1 := s.newValue2(cvttab.or, tt, y, z)
6171 s.vars[n] = a1
6172 s.endBlock()
6173 bElse.AddEdgeTo(bAfter)
6174
6175 s.startBlock(bAfter)
6176 return s.variable(n, n.Type())
6177 }
6178
6179
6180
6181
6182 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6183 iface := s.expr(n.X)
6184 target := s.reflectType(n.Type())
6185 var targetItab *ssa.Value
6186 if n.ITab != nil {
6187 targetItab = s.expr(n.ITab)
6188 }
6189
6190 if n.UseNilPanic {
6191 if commaok {
6192 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && commaok == true")
6193 }
6194 if n.Type().IsInterface() {
6195
6196
6197 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && Type().IsInterface() == true")
6198 }
6199 typs := s.f.Config.Types
6200 iface = s.newValue2(
6201 ssa.OpIMake,
6202 iface.Type,
6203 s.nilCheck(s.newValue1(ssa.OpITab, typs.BytePtr, iface)),
6204 s.newValue1(ssa.OpIData, typs.BytePtr, iface),
6205 )
6206 }
6207
6208 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
6209 }
6210
6211 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6212 iface := s.expr(n.X)
6213 var source, target, targetItab *ssa.Value
6214 if n.SrcRType != nil {
6215 source = s.expr(n.SrcRType)
6216 }
6217 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
6218 byteptr := s.f.Config.Types.BytePtr
6219 targetItab = s.expr(n.ITab)
6220
6221
6222 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
6223 } else {
6224 target = s.expr(n.RType)
6225 }
6226 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
6227 }
6228
6229
6230
6231
6232
6233
6234
6235
6236
6237 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
6238 typs := s.f.Config.Types
6239 byteptr := typs.BytePtr
6240 if dst.IsInterface() {
6241 if dst.IsEmptyInterface() {
6242
6243
6244 if base.Debug.TypeAssert > 0 {
6245 base.WarnfAt(pos, "type assertion inlined")
6246 }
6247
6248
6249 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6250
6251 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6252
6253 if src.IsEmptyInterface() && commaok {
6254
6255 return iface, cond
6256 }
6257
6258
6259 b := s.endBlock()
6260 b.Kind = ssa.BlockIf
6261 b.SetControl(cond)
6262 b.Likely = ssa.BranchLikely
6263 bOk := s.f.NewBlock(ssa.BlockPlain)
6264 bFail := s.f.NewBlock(ssa.BlockPlain)
6265 b.AddEdgeTo(bOk)
6266 b.AddEdgeTo(bFail)
6267
6268 if !commaok {
6269
6270 s.startBlock(bFail)
6271 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6272
6273
6274 s.startBlock(bOk)
6275 if src.IsEmptyInterface() {
6276 res = iface
6277 return
6278 }
6279
6280 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6281 typ := s.load(byteptr, off)
6282 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6283 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
6284 return
6285 }
6286
6287 s.startBlock(bOk)
6288
6289
6290 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6291 s.vars[typVar] = s.load(byteptr, off)
6292 s.endBlock()
6293
6294
6295 s.startBlock(bFail)
6296 s.vars[typVar] = itab
6297 s.endBlock()
6298
6299
6300 bEnd := s.f.NewBlock(ssa.BlockPlain)
6301 bOk.AddEdgeTo(bEnd)
6302 bFail.AddEdgeTo(bEnd)
6303 s.startBlock(bEnd)
6304 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6305 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
6306 resok = cond
6307 delete(s.vars, typVar)
6308 return
6309 }
6310
6311 if base.Debug.TypeAssert > 0 {
6312 base.WarnfAt(pos, "type assertion not inlined")
6313 }
6314
6315 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6316 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6317
6318
6319 bNil := s.f.NewBlock(ssa.BlockPlain)
6320 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6321 bMerge := s.f.NewBlock(ssa.BlockPlain)
6322 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6323 b := s.endBlock()
6324 b.Kind = ssa.BlockIf
6325 b.SetControl(cond)
6326 b.Likely = ssa.BranchLikely
6327 b.AddEdgeTo(bNonNil)
6328 b.AddEdgeTo(bNil)
6329
6330 s.startBlock(bNil)
6331 if commaok {
6332 s.vars[typVar] = itab
6333 b := s.endBlock()
6334 b.AddEdgeTo(bMerge)
6335 } else {
6336
6337 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6338 }
6339
6340
6341 s.startBlock(bNonNil)
6342 typ := itab
6343 if !src.IsEmptyInterface() {
6344 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
6345 }
6346
6347
6348 var d *ssa.Value
6349 if descriptor != nil {
6350 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6351 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
6352
6353
6354 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
6355 s.Fatalf("atomic load not available")
6356 }
6357
6358 var mul, and, add, zext ssa.Op
6359 if s.config.PtrSize == 4 {
6360 mul = ssa.OpMul32
6361 and = ssa.OpAnd32
6362 add = ssa.OpAdd32
6363 zext = ssa.OpCopy
6364 } else {
6365 mul = ssa.OpMul64
6366 and = ssa.OpAnd64
6367 add = ssa.OpAdd64
6368 zext = ssa.OpZeroExt32to64
6369 }
6370
6371 loopHead := s.f.NewBlock(ssa.BlockPlain)
6372 loopBody := s.f.NewBlock(ssa.BlockPlain)
6373 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6374 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6375
6376
6377
6378 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6379 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6380 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6381
6382
6383 var hash *ssa.Value
6384 if src.IsEmptyInterface() {
6385 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6386 } else {
6387 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6388 }
6389 hash = s.newValue1(zext, typs.Uintptr, hash)
6390 s.vars[hashVar] = hash
6391
6392 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6393
6394 b := s.endBlock()
6395 b.AddEdgeTo(loopHead)
6396
6397
6398
6399 s.startBlock(loopHead)
6400 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6401 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6402 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6403 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6404
6405 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6406
6407
6408
6409 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6410 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6411 b = s.endBlock()
6412 b.Kind = ssa.BlockIf
6413 b.SetControl(cmp1)
6414 b.AddEdgeTo(cacheHit)
6415 b.AddEdgeTo(loopBody)
6416
6417
6418
6419 s.startBlock(loopBody)
6420 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6421 b = s.endBlock()
6422 b.Kind = ssa.BlockIf
6423 b.SetControl(cmp2)
6424 b.AddEdgeTo(cacheMiss)
6425 b.AddEdgeTo(loopHead)
6426
6427
6428
6429 s.startBlock(cacheHit)
6430 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6431 s.vars[typVar] = eItab
6432 b = s.endBlock()
6433 b.AddEdgeTo(bMerge)
6434
6435
6436 s.startBlock(cacheMiss)
6437 }
6438 }
6439
6440
6441 if descriptor != nil {
6442 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6443 } else {
6444 var fn *obj.LSym
6445 if commaok {
6446 fn = ir.Syms.AssertE2I2
6447 } else {
6448 fn = ir.Syms.AssertE2I
6449 }
6450 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6451 }
6452 s.vars[typVar] = itab
6453 b = s.endBlock()
6454 b.AddEdgeTo(bMerge)
6455
6456
6457 s.startBlock(bMerge)
6458 itab = s.variable(typVar, byteptr)
6459 var ok *ssa.Value
6460 if commaok {
6461 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6462 }
6463 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6464 }
6465
6466 if base.Debug.TypeAssert > 0 {
6467 base.WarnfAt(pos, "type assertion inlined")
6468 }
6469
6470
6471 direct := types.IsDirectIface(dst)
6472 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6473 if base.Debug.TypeAssert > 0 {
6474 base.WarnfAt(pos, "type assertion inlined")
6475 }
6476 var wantedFirstWord *ssa.Value
6477 if src.IsEmptyInterface() {
6478
6479 wantedFirstWord = target
6480 } else {
6481
6482 wantedFirstWord = targetItab
6483 }
6484
6485 var tmp ir.Node
6486 var addr *ssa.Value
6487 if commaok && !ssa.CanSSA(dst) {
6488
6489
6490 tmp, addr = s.temp(pos, dst)
6491 }
6492
6493 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6494 b := s.endBlock()
6495 b.Kind = ssa.BlockIf
6496 b.SetControl(cond)
6497 b.Likely = ssa.BranchLikely
6498
6499 bOk := s.f.NewBlock(ssa.BlockPlain)
6500 bFail := s.f.NewBlock(ssa.BlockPlain)
6501 b.AddEdgeTo(bOk)
6502 b.AddEdgeTo(bFail)
6503
6504 if !commaok {
6505
6506 s.startBlock(bFail)
6507 taddr := source
6508 if taddr == nil {
6509 taddr = s.reflectType(src)
6510 }
6511 if src.IsEmptyInterface() {
6512 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6513 } else {
6514 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6515 }
6516
6517
6518 s.startBlock(bOk)
6519 if direct {
6520 return s.newValue1(ssa.OpIData, dst, iface), nil
6521 }
6522 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6523 return s.load(dst, p), nil
6524 }
6525
6526
6527
6528 bEnd := s.f.NewBlock(ssa.BlockPlain)
6529
6530
6531 valVar := ssaMarker("val")
6532
6533
6534 s.startBlock(bOk)
6535 if tmp == nil {
6536 if direct {
6537 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6538 } else {
6539 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6540 s.vars[valVar] = s.load(dst, p)
6541 }
6542 } else {
6543 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6544 s.move(dst, addr, p)
6545 }
6546 s.vars[okVar] = s.constBool(true)
6547 s.endBlock()
6548 bOk.AddEdgeTo(bEnd)
6549
6550
6551 s.startBlock(bFail)
6552 if tmp == nil {
6553 s.vars[valVar] = s.zeroVal(dst)
6554 } else {
6555 s.zero(dst, addr)
6556 }
6557 s.vars[okVar] = s.constBool(false)
6558 s.endBlock()
6559 bFail.AddEdgeTo(bEnd)
6560
6561
6562 s.startBlock(bEnd)
6563 if tmp == nil {
6564 res = s.variable(valVar, dst)
6565 delete(s.vars, valVar)
6566 } else {
6567 res = s.load(dst, addr)
6568 }
6569 resok = s.variable(okVar, types.Types[types.TBOOL])
6570 delete(s.vars, okVar)
6571 return res, resok
6572 }
6573
6574
6575 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6576 tmp := typecheck.TempAt(pos, s.curfn, t)
6577 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6578 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6579 }
6580 addr := s.addr(tmp)
6581 return tmp, addr
6582 }
6583
6584
6585 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6586 v := s.vars[n]
6587 if v != nil {
6588 return v
6589 }
6590 v = s.fwdVars[n]
6591 if v != nil {
6592 return v
6593 }
6594
6595 if s.curBlock == s.f.Entry {
6596
6597 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6598 }
6599
6600
6601 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6602 s.fwdVars[n] = v
6603 if n.Op() == ir.ONAME {
6604 s.addNamedValue(n.(*ir.Name), v)
6605 }
6606 return v
6607 }
6608
6609 func (s *state) mem() *ssa.Value {
6610 return s.variable(memVar, types.TypeMem)
6611 }
6612
6613 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6614 if n.Class == ir.Pxxx {
6615
6616 return
6617 }
6618 if ir.IsAutoTmp(n) {
6619
6620 return
6621 }
6622 if n.Class == ir.PPARAMOUT {
6623
6624
6625 return
6626 }
6627 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6628 values, ok := s.f.NamedValues[loc]
6629 if !ok {
6630 s.f.Names = append(s.f.Names, &loc)
6631 s.f.CanonicalLocalSlots[loc] = &loc
6632 }
6633 s.f.NamedValues[loc] = append(values, v)
6634 }
6635
6636
6637 type Branch struct {
6638 P *obj.Prog
6639 B *ssa.Block
6640 }
6641
6642
6643 type State struct {
6644 ABI obj.ABI
6645
6646 pp *objw.Progs
6647
6648
6649
6650 Branches []Branch
6651
6652
6653 JumpTables []*ssa.Block
6654
6655
6656 bstart []*obj.Prog
6657
6658 maxarg int64
6659
6660
6661
6662 livenessMap liveness.Map
6663
6664
6665
6666 partLiveArgs map[*ir.Name]bool
6667
6668
6669
6670
6671 lineRunStart *obj.Prog
6672
6673
6674 OnWasmStackSkipped int
6675 }
6676
6677 func (s *State) FuncInfo() *obj.FuncInfo {
6678 return s.pp.CurFunc.LSym.Func()
6679 }
6680
6681
6682 func (s *State) Prog(as obj.As) *obj.Prog {
6683 p := s.pp.Prog(as)
6684 if objw.LosesStmtMark(as) {
6685 return p
6686 }
6687
6688
6689 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6690 s.lineRunStart = p
6691 } else if p.Pos.IsStmt() == src.PosIsStmt {
6692 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6693 p.Pos = p.Pos.WithNotStmt()
6694 }
6695 return p
6696 }
6697
6698
6699 func (s *State) Pc() *obj.Prog {
6700 return s.pp.Next
6701 }
6702
6703
6704 func (s *State) SetPos(pos src.XPos) {
6705 s.pp.Pos = pos
6706 }
6707
6708
6709
6710
6711 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6712 p := s.Prog(op)
6713 p.To.Type = obj.TYPE_BRANCH
6714 s.Branches = append(s.Branches, Branch{P: p, B: target})
6715 return p
6716 }
6717
6718
6719
6720
6721
6722
6723 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6724 switch v.Op {
6725 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6726
6727 s.SetPos(v.Pos.WithNotStmt())
6728 default:
6729 p := v.Pos
6730 if p != src.NoXPos {
6731
6732
6733
6734
6735 if p.IsStmt() != src.PosIsStmt {
6736 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6737
6738
6739
6740
6741
6742
6743
6744
6745
6746
6747
6748
6749
6750 return
6751 }
6752 p = p.WithNotStmt()
6753
6754 }
6755 s.SetPos(p)
6756 } else {
6757 s.SetPos(s.pp.Pos.WithNotStmt())
6758 }
6759 }
6760 }
6761
6762
6763 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6764 ft := e.curfn.Type()
6765 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6766 return
6767 }
6768
6769 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6770 x.Set(obj.AttrContentAddressable, true)
6771 e.curfn.LSym.Func().ArgInfo = x
6772
6773
6774 p := pp.Prog(obj.AFUNCDATA)
6775 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6776 p.To.Type = obj.TYPE_MEM
6777 p.To.Name = obj.NAME_EXTERN
6778 p.To.Sym = x
6779 }
6780
6781
6782 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6783 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6784
6785
6786
6787
6788 PtrSize := int64(types.PtrSize)
6789 uintptrTyp := types.Types[types.TUINTPTR]
6790
6791 isAggregate := func(t *types.Type) bool {
6792 return isStructNotSIMD(t) || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6793 }
6794
6795 wOff := 0
6796 n := 0
6797 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6798
6799
6800 write1 := func(sz, offset int64) {
6801 if offset >= rtabi.TraceArgsSpecial {
6802 writebyte(rtabi.TraceArgsOffsetTooLarge)
6803 } else {
6804 writebyte(uint8(offset))
6805 writebyte(uint8(sz))
6806 }
6807 n++
6808 }
6809
6810
6811
6812 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6813 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6814 if n >= rtabi.TraceArgsLimit {
6815 writebyte(rtabi.TraceArgsDotdotdot)
6816 return false
6817 }
6818 if !isAggregate(t) {
6819 write1(t.Size(), baseOffset)
6820 return true
6821 }
6822 writebyte(rtabi.TraceArgsStartAgg)
6823 depth++
6824 if depth >= rtabi.TraceArgsMaxDepth {
6825 writebyte(rtabi.TraceArgsDotdotdot)
6826 writebyte(rtabi.TraceArgsEndAgg)
6827 n++
6828 return true
6829 }
6830 switch {
6831 case t.IsInterface(), t.IsString():
6832 _ = visitType(baseOffset, uintptrTyp, depth) &&
6833 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6834 case t.IsSlice():
6835 _ = visitType(baseOffset, uintptrTyp, depth) &&
6836 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6837 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6838 case t.IsComplex():
6839 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6840 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6841 case t.IsArray():
6842 if t.NumElem() == 0 {
6843 n++
6844 break
6845 }
6846 for i := int64(0); i < t.NumElem(); i++ {
6847 if !visitType(baseOffset, t.Elem(), depth) {
6848 break
6849 }
6850 baseOffset += t.Elem().Size()
6851 }
6852 case isStructNotSIMD(t):
6853 if t.NumFields() == 0 {
6854 n++
6855 break
6856 }
6857 for _, field := range t.Fields() {
6858 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6859 break
6860 }
6861 }
6862 }
6863 writebyte(rtabi.TraceArgsEndAgg)
6864 return true
6865 }
6866
6867 start := 0
6868 if strings.Contains(f.LSym.Name, "[") {
6869
6870 start = 1
6871 }
6872
6873 for _, a := range abiInfo.InParams()[start:] {
6874 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6875 break
6876 }
6877 }
6878 writebyte(rtabi.TraceArgsEndSeq)
6879 if wOff > rtabi.TraceArgsMaxLen {
6880 base.Fatalf("ArgInfo too large")
6881 }
6882
6883 return x
6884 }
6885
6886
6887 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6888 if base.Ctxt.Flag_linkshared {
6889
6890
6891 return
6892 }
6893
6894 wfn := e.curfn.WrappedFunc
6895 if wfn == nil {
6896 return
6897 }
6898
6899 wsym := wfn.Linksym()
6900 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6901 objw.SymPtrOff(x, 0, wsym)
6902 x.Set(obj.AttrContentAddressable, true)
6903 })
6904 e.curfn.LSym.Func().WrapInfo = x
6905
6906
6907 p := pp.Prog(obj.AFUNCDATA)
6908 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6909 p.To.Type = obj.TYPE_MEM
6910 p.To.Name = obj.NAME_EXTERN
6911 p.To.Sym = x
6912 }
6913
6914
6915 func genssa(f *ssa.Func, pp *objw.Progs) {
6916 var s State
6917 s.ABI = f.OwnAux.Fn.ABI()
6918
6919 e := f.Frontend().(*ssafn)
6920
6921 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6922
6923 var lv *liveness.Liveness
6924 s.livenessMap, s.partLiveArgs, lv = liveness.Compute(e.curfn, f, e.stkptrsize, pp, gatherPrintInfo)
6925 emitArgInfo(e, f, pp)
6926 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6927
6928 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6929 if openDeferInfo != nil {
6930
6931
6932 p := pp.Prog(obj.AFUNCDATA)
6933 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6934 p.To.Type = obj.TYPE_MEM
6935 p.To.Name = obj.NAME_EXTERN
6936 p.To.Sym = openDeferInfo
6937 }
6938
6939 emitWrappedFuncInfo(e, pp)
6940
6941
6942 s.bstart = make([]*obj.Prog, f.NumBlocks())
6943 s.pp = pp
6944 var progToValue map[*obj.Prog]*ssa.Value
6945 var progToBlock map[*obj.Prog]*ssa.Block
6946 var valueToProgAfter []*obj.Prog
6947 if gatherPrintInfo {
6948 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6949 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6950 f.Logf("genssa %s\n", f.Name)
6951 progToBlock[s.pp.Next] = f.Blocks[0]
6952 }
6953
6954 if base.Ctxt.Flag_locationlists {
6955 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6956 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6957 }
6958 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6959 clear(valueToProgAfter)
6960 }
6961
6962
6963
6964 firstPos := src.NoXPos
6965 for _, v := range f.Entry.Values {
6966 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6967 firstPos = v.Pos
6968 v.Pos = firstPos.WithDefaultStmt()
6969 break
6970 }
6971 }
6972
6973
6974
6975
6976 var inlMarks map[*obj.Prog]int32
6977 var inlMarkList []*obj.Prog
6978
6979
6980
6981 var inlMarksByPos map[src.XPos][]*obj.Prog
6982
6983 var argLiveIdx int = -1
6984
6985
6986
6987
6988
6989 var hotAlign, hotRequire int64
6990
6991 if base.Debug.AlignHot > 0 {
6992 switch base.Ctxt.Arch.Name {
6993
6994
6995
6996
6997
6998 case "amd64", "386":
6999
7000
7001
7002 hotAlign = 64
7003 hotRequire = 31
7004 }
7005 }
7006
7007
7008 for i, b := range f.Blocks {
7009
7010 s.lineRunStart = nil
7011 s.SetPos(s.pp.Pos.WithNotStmt())
7012
7013 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
7014
7015
7016
7017
7018
7019 p := s.pp.Prog(obj.APCALIGNMAX)
7020 p.From.SetConst(hotAlign)
7021 p.To.SetConst(hotRequire)
7022 }
7023
7024 s.bstart[b.ID] = s.pp.Next
7025
7026 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
7027 argLiveIdx = idx
7028 p := s.pp.Prog(obj.APCDATA)
7029 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7030 p.To.SetConst(int64(idx))
7031 }
7032
7033
7034 Arch.SSAMarkMoves(&s, b)
7035 for _, v := range b.Values {
7036 x := s.pp.Next
7037 s.DebugFriendlySetPosFrom(v)
7038
7039 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
7040 v.Fatalf("input[0] and output not in same register %s", v.LongString())
7041 }
7042
7043 switch v.Op {
7044 case ssa.OpInitMem:
7045
7046 case ssa.OpArg:
7047
7048 case ssa.OpSP, ssa.OpSB:
7049
7050 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
7051
7052 case ssa.OpGetG:
7053
7054
7055 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
7056
7057 case ssa.OpPhi:
7058 CheckLoweredPhi(v)
7059 case ssa.OpConvert:
7060
7061 if v.Args[0].Reg() != v.Reg() {
7062 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
7063 }
7064 case ssa.OpInlMark:
7065 p := Arch.Ginsnop(s.pp)
7066 if inlMarks == nil {
7067 inlMarks = map[*obj.Prog]int32{}
7068 inlMarksByPos = map[src.XPos][]*obj.Prog{}
7069 }
7070 inlMarks[p] = v.AuxInt32()
7071 inlMarkList = append(inlMarkList, p)
7072 pos := v.Pos.AtColumn1()
7073 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
7074 firstPos = src.NoXPos
7075
7076 default:
7077
7078 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7079 s.SetPos(firstPos)
7080 firstPos = src.NoXPos
7081 }
7082
7083
7084 s.pp.NextLive = s.livenessMap.Get(v)
7085 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
7086
7087
7088 Arch.SSAGenValue(&s, v)
7089 }
7090
7091 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
7092 argLiveIdx = idx
7093 p := s.pp.Prog(obj.APCDATA)
7094 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7095 p.To.SetConst(int64(idx))
7096 }
7097
7098 if base.Ctxt.Flag_locationlists {
7099 valueToProgAfter[v.ID] = s.pp.Next
7100 }
7101
7102 if gatherPrintInfo {
7103 for ; x != s.pp.Next; x = x.Link {
7104 progToValue[x] = v
7105 }
7106 }
7107 }
7108
7109 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
7110 p := Arch.Ginsnop(s.pp)
7111 p.Pos = p.Pos.WithIsStmt()
7112 if b.Pos == src.NoXPos {
7113 b.Pos = p.Pos
7114 if b.Pos == src.NoXPos {
7115 b.Pos = s.pp.Text.Pos
7116 }
7117 }
7118 b.Pos = b.Pos.WithBogusLine()
7119 }
7120
7121
7122
7123
7124
7125 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
7126
7127
7128 var next *ssa.Block
7129 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
7130
7131
7132
7133
7134 next = f.Blocks[i+1]
7135 }
7136 x := s.pp.Next
7137 s.SetPos(b.Pos)
7138 Arch.SSAGenBlock(&s, b, next)
7139 if gatherPrintInfo {
7140 for ; x != s.pp.Next; x = x.Link {
7141 progToBlock[x] = b
7142 }
7143 }
7144 }
7145 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
7146
7147
7148
7149
7150 Arch.Ginsnop(s.pp)
7151 }
7152 if openDeferInfo != nil {
7153
7154
7155
7156
7157
7158
7159
7160
7161 s.pp.NextLive = s.livenessMap.DeferReturn
7162 p := s.pp.Prog(obj.ACALL)
7163 p.To.Type = obj.TYPE_MEM
7164 p.To.Name = obj.NAME_EXTERN
7165 p.To.Sym = ir.Syms.Deferreturn
7166
7167
7168
7169
7170
7171 for _, o := range f.OwnAux.ABIInfo().OutParams() {
7172 n := o.Name
7173 rts, offs := o.RegisterTypesAndOffsets()
7174 for i := range o.Registers {
7175 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
7176 }
7177 }
7178
7179 s.pp.Prog(obj.ARET)
7180 }
7181
7182 if inlMarks != nil {
7183 hasCall := false
7184
7185
7186
7187
7188 for p := s.pp.Text; p != nil; p = p.Link {
7189 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
7190 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
7191
7192
7193
7194
7195
7196 continue
7197 }
7198 if _, ok := inlMarks[p]; ok {
7199
7200
7201 continue
7202 }
7203 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
7204 hasCall = true
7205 }
7206 pos := p.Pos.AtColumn1()
7207 marks := inlMarksByPos[pos]
7208 if len(marks) == 0 {
7209 continue
7210 }
7211 for _, m := range marks {
7212
7213
7214
7215 p.Pos = p.Pos.WithIsStmt()
7216 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
7217
7218 m.As = obj.ANOP
7219 m.Pos = src.NoXPos
7220 m.From = obj.Addr{}
7221 m.To = obj.Addr{}
7222 }
7223 delete(inlMarksByPos, pos)
7224 }
7225
7226 for _, p := range inlMarkList {
7227 if p.As != obj.ANOP {
7228 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
7229 }
7230 }
7231
7232 if e.stksize == 0 && !hasCall {
7233
7234
7235
7236
7237
7238
7239 for p := s.pp.Text; p != nil; p = p.Link {
7240 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
7241 continue
7242 }
7243 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
7244
7245 nop := Arch.Ginsnop(s.pp)
7246 nop.Pos = e.curfn.Pos().WithIsStmt()
7247
7248
7249
7250
7251
7252 for x := s.pp.Text; x != nil; x = x.Link {
7253 if x.Link == nop {
7254 x.Link = nop.Link
7255 break
7256 }
7257 }
7258
7259 for x := s.pp.Text; x != nil; x = x.Link {
7260 if x.Link == p {
7261 nop.Link = p
7262 x.Link = nop
7263 break
7264 }
7265 }
7266 }
7267 break
7268 }
7269 }
7270 }
7271
7272 if base.Ctxt.Flag_locationlists {
7273 var debugInfo *ssa.FuncDebug
7274 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
7275
7276
7277 debugInfo.EntryID = f.Entry.ID
7278 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
7279 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7280 } else {
7281 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
7282 }
7283 bstart := s.bstart
7284 idToIdx := make([]int, f.NumBlocks())
7285 for i, b := range f.Blocks {
7286 idToIdx[b.ID] = i
7287 }
7288
7289
7290
7291 debugInfo.GetPC = func(b, v ssa.ID) int64 {
7292 switch v {
7293 case ssa.BlockStart.ID:
7294 if b == f.Entry.ID {
7295 return 0
7296
7297 }
7298 return bstart[b].Pc
7299 case ssa.BlockEnd.ID:
7300 blk := f.Blocks[idToIdx[b]]
7301 nv := len(blk.Values)
7302 return valueToProgAfter[blk.Values[nv-1].ID].Pc
7303 case ssa.FuncEnd.ID:
7304 return e.curfn.LSym.Size
7305 default:
7306 return valueToProgAfter[v].Pc
7307 }
7308 }
7309 }
7310
7311
7312 for _, br := range s.Branches {
7313 br.P.To.SetTarget(s.bstart[br.B.ID])
7314 if br.P.Pos.IsStmt() != src.PosIsStmt {
7315 br.P.Pos = br.P.Pos.WithNotStmt()
7316 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7317 br.P.Pos = br.P.Pos.WithNotStmt()
7318 }
7319
7320 }
7321
7322
7323 for _, jt := range s.JumpTables {
7324
7325 targets := make([]*obj.Prog, len(jt.Succs))
7326 for i, e := range jt.Succs {
7327 targets[i] = s.bstart[e.Block().ID]
7328 }
7329
7330
7331
7332 fi := s.pp.CurFunc.LSym.Func()
7333 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7334 }
7335
7336 if e.log {
7337 filename := ""
7338 for p := s.pp.Text; p != nil; p = p.Link {
7339 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7340 filename = p.InnermostFilename()
7341 f.Logf("# %s\n", filename)
7342 }
7343
7344 var s string
7345 if v, ok := progToValue[p]; ok {
7346 s = v.String()
7347 } else if b, ok := progToBlock[p]; ok {
7348 s = b.String()
7349 } else {
7350 s = " "
7351 }
7352 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7353 }
7354 }
7355 if f.HTMLWriter != nil {
7356 var buf strings.Builder
7357 buf.WriteString("<code>")
7358 buf.WriteString("<dl class=\"ssa-gen\">")
7359 filename := ""
7360
7361 liveness := lv.Format(nil)
7362 if liveness != "" {
7363 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7364 buf.WriteString(html.EscapeString("# " + liveness))
7365 buf.WriteString("</dd>")
7366 }
7367
7368 for p := s.pp.Text; p != nil; p = p.Link {
7369
7370
7371 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7372 filename = p.InnermostFilename()
7373 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7374 buf.WriteString(html.EscapeString("# " + filename))
7375 buf.WriteString("</dd>")
7376 }
7377
7378 buf.WriteString("<dt class=\"ssa-prog-src\">")
7379 if v, ok := progToValue[p]; ok {
7380
7381
7382 if p.As != obj.APCDATA {
7383 if liveness := lv.Format(v); liveness != "" {
7384
7385 buf.WriteString("</dt><dd class=\"ssa-prog\">")
7386 buf.WriteString(html.EscapeString("# " + liveness))
7387 buf.WriteString("</dd>")
7388
7389 buf.WriteString("<dt class=\"ssa-prog-src\">")
7390 }
7391 }
7392
7393 buf.WriteString(v.HTML())
7394 } else if b, ok := progToBlock[p]; ok {
7395 buf.WriteString("<b>" + b.HTML() + "</b>")
7396 }
7397 buf.WriteString("</dt>")
7398 buf.WriteString("<dd class=\"ssa-prog\">")
7399 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7400 buf.WriteString("</dd>")
7401 }
7402 buf.WriteString("</dl>")
7403 buf.WriteString("</code>")
7404 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7405 }
7406 if ssa.GenssaDump[f.Name] {
7407 fi := f.DumpFileForPhase("genssa")
7408 if fi != nil {
7409
7410
7411 inliningDiffers := func(a, b []src.Pos) bool {
7412 if len(a) != len(b) {
7413 return true
7414 }
7415 for i := range a {
7416 if a[i].Filename() != b[i].Filename() {
7417 return true
7418 }
7419 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7420 return true
7421 }
7422 }
7423 return false
7424 }
7425
7426 var allPosOld []src.Pos
7427 var allPos []src.Pos
7428
7429 for p := s.pp.Text; p != nil; p = p.Link {
7430 if p.Pos.IsKnown() {
7431 allPos = allPos[:0]
7432 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7433 if inliningDiffers(allPos, allPosOld) {
7434 for _, pos := range allPos {
7435 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7436 }
7437 allPos, allPosOld = allPosOld, allPos
7438 }
7439 }
7440
7441 var s string
7442 if v, ok := progToValue[p]; ok {
7443 s = v.String()
7444 } else if b, ok := progToBlock[p]; ok {
7445 s = b.String()
7446 } else {
7447 s = " "
7448 }
7449 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7450 }
7451 fi.Close()
7452 }
7453 }
7454
7455 defframe(&s, e, f)
7456
7457 f.HTMLWriter.Close()
7458 f.HTMLWriter = nil
7459 }
7460
7461 func defframe(s *State, e *ssafn, f *ssa.Func) {
7462 pp := s.pp
7463
7464 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7465 frame := s.maxarg + e.stksize
7466 if Arch.PadFrame != nil {
7467 frame = Arch.PadFrame(frame)
7468 }
7469
7470
7471 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7472 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7473 pp.Text.To.Offset = frame
7474
7475 p := pp.Text
7476
7477
7478
7479
7480
7481
7482
7483
7484
7485
7486 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7487
7488
7489 type nameOff struct {
7490 n *ir.Name
7491 off int64
7492 }
7493 partLiveArgsSpilled := make(map[nameOff]bool)
7494 for _, v := range f.Entry.Values {
7495 if v.Op.IsCall() {
7496 break
7497 }
7498 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7499 continue
7500 }
7501 n, off := ssa.AutoVar(v)
7502 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7503 continue
7504 }
7505 partLiveArgsSpilled[nameOff{n, off}] = true
7506 }
7507
7508
7509 for _, a := range f.OwnAux.ABIInfo().InParams() {
7510 n := a.Name
7511 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7512 continue
7513 }
7514 rts, offs := a.RegisterTypesAndOffsets()
7515 for i := range a.Registers {
7516 if !rts[i].HasPointers() {
7517 continue
7518 }
7519 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7520 continue
7521 }
7522 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7523 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7524 }
7525 }
7526 }
7527
7528
7529
7530
7531 var lo, hi int64
7532
7533
7534
7535 var state uint32
7536
7537
7538
7539 for _, n := range e.curfn.Dcl {
7540 if !n.Needzero() {
7541 continue
7542 }
7543 if n.Class != ir.PAUTO {
7544 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7545 }
7546 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7547 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7548 }
7549
7550 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7551
7552 lo = n.FrameOffset()
7553 continue
7554 }
7555
7556
7557 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7558
7559
7560 lo = n.FrameOffset()
7561 hi = lo + n.Type().Size()
7562 }
7563
7564
7565 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7566 }
7567
7568
7569 type IndexJump struct {
7570 Jump obj.As
7571 Index int
7572 }
7573
7574 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7575 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7576 p.Pos = b.Pos
7577 }
7578
7579
7580
7581 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7582 switch next {
7583 case b.Succs[0].Block():
7584 s.oneJump(b, &jumps[0][0])
7585 s.oneJump(b, &jumps[0][1])
7586 case b.Succs[1].Block():
7587 s.oneJump(b, &jumps[1][0])
7588 s.oneJump(b, &jumps[1][1])
7589 default:
7590 var q *obj.Prog
7591 if b.Likely != ssa.BranchUnlikely {
7592 s.oneJump(b, &jumps[1][0])
7593 s.oneJump(b, &jumps[1][1])
7594 q = s.Br(obj.AJMP, b.Succs[1].Block())
7595 } else {
7596 s.oneJump(b, &jumps[0][0])
7597 s.oneJump(b, &jumps[0][1])
7598 q = s.Br(obj.AJMP, b.Succs[0].Block())
7599 }
7600 q.Pos = b.Pos
7601 }
7602 }
7603
7604
7605 func AddAux(a *obj.Addr, v *ssa.Value) {
7606 AddAux2(a, v, v.AuxInt)
7607 }
7608 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7609 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7610 v.Fatalf("bad AddAux addr %v", a)
7611 }
7612
7613 a.Offset += offset
7614
7615
7616 if v.Aux == nil {
7617 return
7618 }
7619
7620 switch n := v.Aux.(type) {
7621 case *ssa.AuxCall:
7622 a.Name = obj.NAME_EXTERN
7623 a.Sym = n.Fn
7624 case *obj.LSym:
7625 a.Name = obj.NAME_EXTERN
7626 a.Sym = n
7627 case *ir.Name:
7628 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7629 a.Name = obj.NAME_PARAM
7630 } else {
7631 a.Name = obj.NAME_AUTO
7632 }
7633 a.Sym = n.Linksym()
7634 a.Offset += n.FrameOffset()
7635 default:
7636 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7637 }
7638 }
7639
7640
7641
7642 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7643 size := idx.Type.Size()
7644 if size == s.config.PtrSize {
7645 return idx
7646 }
7647 if size > s.config.PtrSize {
7648
7649
7650 var lo *ssa.Value
7651 if idx.Type.IsSigned() {
7652 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7653 } else {
7654 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7655 }
7656 if bounded || base.Flag.B != 0 {
7657 return lo
7658 }
7659 bNext := s.f.NewBlock(ssa.BlockPlain)
7660 bPanic := s.f.NewBlock(ssa.BlockExit)
7661 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7662 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7663 if !idx.Type.IsSigned() {
7664 switch kind {
7665 case ssa.BoundsIndex:
7666 kind = ssa.BoundsIndexU
7667 case ssa.BoundsSliceAlen:
7668 kind = ssa.BoundsSliceAlenU
7669 case ssa.BoundsSliceAcap:
7670 kind = ssa.BoundsSliceAcapU
7671 case ssa.BoundsSliceB:
7672 kind = ssa.BoundsSliceBU
7673 case ssa.BoundsSlice3Alen:
7674 kind = ssa.BoundsSlice3AlenU
7675 case ssa.BoundsSlice3Acap:
7676 kind = ssa.BoundsSlice3AcapU
7677 case ssa.BoundsSlice3B:
7678 kind = ssa.BoundsSlice3BU
7679 case ssa.BoundsSlice3C:
7680 kind = ssa.BoundsSlice3CU
7681 }
7682 }
7683 b := s.endBlock()
7684 b.Kind = ssa.BlockIf
7685 b.SetControl(cmp)
7686 b.Likely = ssa.BranchLikely
7687 b.AddEdgeTo(bNext)
7688 b.AddEdgeTo(bPanic)
7689
7690 s.startBlock(bPanic)
7691 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7692 s.endBlock().SetControl(mem)
7693 s.startBlock(bNext)
7694
7695 return lo
7696 }
7697
7698
7699 var op ssa.Op
7700 if idx.Type.IsSigned() {
7701 switch 10*size + s.config.PtrSize {
7702 case 14:
7703 op = ssa.OpSignExt8to32
7704 case 18:
7705 op = ssa.OpSignExt8to64
7706 case 24:
7707 op = ssa.OpSignExt16to32
7708 case 28:
7709 op = ssa.OpSignExt16to64
7710 case 48:
7711 op = ssa.OpSignExt32to64
7712 default:
7713 s.Fatalf("bad signed index extension %s", idx.Type)
7714 }
7715 } else {
7716 switch 10*size + s.config.PtrSize {
7717 case 14:
7718 op = ssa.OpZeroExt8to32
7719 case 18:
7720 op = ssa.OpZeroExt8to64
7721 case 24:
7722 op = ssa.OpZeroExt16to32
7723 case 28:
7724 op = ssa.OpZeroExt16to64
7725 case 48:
7726 op = ssa.OpZeroExt32to64
7727 default:
7728 s.Fatalf("bad unsigned index extension %s", idx.Type)
7729 }
7730 }
7731 return s.newValue1(op, types.Types[types.TINT], idx)
7732 }
7733
7734
7735
7736 func CheckLoweredPhi(v *ssa.Value) {
7737 if v.Op != ssa.OpPhi {
7738 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7739 }
7740 if v.Type.IsMemory() {
7741 return
7742 }
7743 f := v.Block.Func
7744 loc := f.RegAlloc[v.ID]
7745 for _, a := range v.Args {
7746 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7747 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7748 }
7749 }
7750 }
7751
7752
7753
7754
7755
7756 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7757 entry := v.Block.Func.Entry
7758 if entry != v.Block {
7759 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7760 }
7761 for _, w := range entry.Values {
7762 if w == v {
7763 break
7764 }
7765 switch w.Op {
7766 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7767
7768 default:
7769 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7770 }
7771 }
7772 }
7773
7774
7775 func CheckArgReg(v *ssa.Value) {
7776 entry := v.Block.Func.Entry
7777 if entry != v.Block {
7778 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7779 }
7780 }
7781
7782 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7783 n, off := ssa.AutoVar(v)
7784 a.Type = obj.TYPE_MEM
7785 a.Sym = n.Linksym()
7786 a.Reg = int16(Arch.REGSP)
7787 a.Offset = n.FrameOffset() + off
7788 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7789 a.Name = obj.NAME_PARAM
7790 } else {
7791 a.Name = obj.NAME_AUTO
7792 }
7793 }
7794
7795
7796
7797 func (s *State) Call(v *ssa.Value) *obj.Prog {
7798 pPosIsStmt := s.pp.Pos.IsStmt()
7799 s.PrepareCall(v)
7800
7801 p := s.Prog(obj.ACALL)
7802 if pPosIsStmt == src.PosIsStmt {
7803 p.Pos = v.Pos.WithIsStmt()
7804 } else {
7805 p.Pos = v.Pos.WithNotStmt()
7806 }
7807 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7808 p.To.Type = obj.TYPE_MEM
7809 p.To.Name = obj.NAME_EXTERN
7810 p.To.Sym = sym.Fn
7811 } else {
7812
7813 switch Arch.LinkArch.Family {
7814 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7815 p.To.Type = obj.TYPE_REG
7816 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7817 p.To.Type = obj.TYPE_MEM
7818 default:
7819 base.Fatalf("unknown indirect call family")
7820 }
7821 p.To.Reg = v.Args[0].Reg()
7822 }
7823 return p
7824 }
7825
7826
7827
7828 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7829 p := s.Call(v)
7830 p.As = obj.ARET
7831 return p
7832 }
7833
7834
7835
7836
7837 func (s *State) PrepareCall(v *ssa.Value) {
7838 idx := s.livenessMap.Get(v)
7839 if !idx.StackMapValid() {
7840
7841 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7842 base.Fatalf("missing stack map index for %v", v.LongString())
7843 }
7844 }
7845
7846 call, ok := v.Aux.(*ssa.AuxCall)
7847
7848 if ok {
7849
7850
7851 if nowritebarrierrecCheck != nil {
7852 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7853 }
7854 }
7855
7856 if s.maxarg < v.AuxInt {
7857 s.maxarg = v.AuxInt
7858 }
7859 }
7860
7861
7862
7863 func (s *State) UseArgs(n int64) {
7864 if s.maxarg < n {
7865 s.maxarg = n
7866 }
7867 }
7868
7869
7870 func fieldIdx(n *ir.SelectorExpr) int {
7871 t := n.X.Type()
7872 if !isStructNotSIMD(t) {
7873 panic("ODOT's LHS is not a struct")
7874 }
7875
7876 for i, f := range t.Fields() {
7877 if f.Sym == n.Sel {
7878 if f.Offset != n.Offset() {
7879 panic("field offset doesn't match")
7880 }
7881 return i
7882 }
7883 }
7884 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7885
7886
7887
7888 }
7889
7890
7891
7892 type ssafn struct {
7893 curfn *ir.Func
7894 strings map[string]*obj.LSym
7895 stksize int64
7896 stkptrsize int64
7897
7898
7899
7900
7901
7902 stkalign int64
7903
7904 log bool
7905 }
7906
7907
7908
7909 func (e *ssafn) StringData(s string) *obj.LSym {
7910 if aux, ok := e.strings[s]; ok {
7911 return aux
7912 }
7913 if e.strings == nil {
7914 e.strings = make(map[string]*obj.LSym)
7915 }
7916 data := staticdata.StringSym(e.curfn.Pos(), s)
7917 e.strings[s] = data
7918 return data
7919 }
7920
7921
7922 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7923 node := parent.N
7924
7925 if node.Class != ir.PAUTO || node.Addrtaken() {
7926
7927 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7928 }
7929
7930 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7931 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7932 n.SetUsed(true)
7933 n.SetEsc(ir.EscNever)
7934 types.CalcSize(t)
7935 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7936 }
7937
7938
7939 func (e *ssafn) Logf(msg string, args ...any) {
7940 if e.log {
7941 fmt.Printf(msg, args...)
7942 }
7943 }
7944
7945 func (e *ssafn) Log() bool {
7946 return e.log
7947 }
7948
7949
7950 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...any) {
7951 base.Pos = pos
7952 nargs := append([]any{ir.FuncName(e.curfn)}, args...)
7953 base.Fatalf("'%s': "+msg, nargs...)
7954 }
7955
7956
7957
7958 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...any) {
7959 base.WarnfAt(pos, fmt_, args...)
7960 }
7961
7962 func (e *ssafn) Debug_checknil() bool {
7963 return base.Debug.Nil != 0
7964 }
7965
7966 func (e *ssafn) UseWriteBarrier() bool {
7967 return base.Flag.WB
7968 }
7969
7970 func (e *ssafn) Syslook(name string) *obj.LSym {
7971 switch name {
7972 case "goschedguarded":
7973 return ir.Syms.Goschedguarded
7974 case "writeBarrier":
7975 return ir.Syms.WriteBarrier
7976 case "wbZero":
7977 return ir.Syms.WBZero
7978 case "wbMove":
7979 return ir.Syms.WBMove
7980 case "cgoCheckMemmove":
7981 return ir.Syms.CgoCheckMemmove
7982 case "cgoCheckPtrWrite":
7983 return ir.Syms.CgoCheckPtrWrite
7984 }
7985 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
7986 return nil
7987 }
7988
7989 func (e *ssafn) Func() *ir.Func {
7990 return e.curfn
7991 }
7992
7993 func clobberBase(n ir.Node) ir.Node {
7994 if n.Op() == ir.ODOT {
7995 n := n.(*ir.SelectorExpr)
7996 if n.X.Type().NumFields() == 1 {
7997 return clobberBase(n.X)
7998 }
7999 }
8000 if n.Op() == ir.OINDEX {
8001 n := n.(*ir.IndexExpr)
8002 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
8003 return clobberBase(n.X)
8004 }
8005 }
8006 return n
8007 }
8008
8009
8010 func callTargetLSym(callee *ir.Name) *obj.LSym {
8011 if callee.Func == nil {
8012
8013
8014
8015 return callee.Linksym()
8016 }
8017
8018 return callee.LinksymABI(callee.Func.ABI)
8019 }
8020
8021
8022 const deferStructFnField = 4
8023
8024 var deferType *types.Type
8025
8026
8027
8028 func deferstruct() *types.Type {
8029 if deferType != nil {
8030 return deferType
8031 }
8032
8033 makefield := func(name string, t *types.Type) *types.Field {
8034 sym := (*types.Pkg)(nil).Lookup(name)
8035 return types.NewField(src.NoXPos, sym, t)
8036 }
8037
8038 fields := []*types.Field{
8039 makefield("heap", types.Types[types.TBOOL]),
8040 makefield("rangefunc", types.Types[types.TBOOL]),
8041 makefield("sp", types.Types[types.TUINTPTR]),
8042 makefield("pc", types.Types[types.TUINTPTR]),
8043
8044
8045
8046 makefield("fn", types.Types[types.TUINTPTR]),
8047 makefield("link", types.Types[types.TUINTPTR]),
8048 makefield("head", types.Types[types.TUINTPTR]),
8049 }
8050 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
8051 base.Fatalf("deferStructFnField is %q, not fn", name)
8052 }
8053
8054 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
8055 typ := types.NewNamed(n)
8056 n.SetType(typ)
8057 n.SetTypecheck(1)
8058
8059
8060 typ.SetUnderlying(types.NewStruct(fields))
8061 types.CalcStructSize(typ)
8062
8063 deferType = typ
8064 return typ
8065 }
8066
8067
8068
8069
8070
8071 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
8072 return obj.Addr{
8073 Name: obj.NAME_NONE,
8074 Type: obj.TYPE_MEM,
8075 Reg: baseReg,
8076 Offset: spill.Offset + extraOffset,
8077 }
8078 }
8079
8080 func isStructNotSIMD(t *types.Type) bool {
8081 return t.IsStruct() && !t.IsSIMD()
8082 }
8083
8084 var BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
8085
View as plain text