1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmp"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "os"
16 "path/filepath"
17 "slices"
18 "strings"
19
20 "cmd/compile/internal/abi"
21 "cmd/compile/internal/base"
22 "cmd/compile/internal/ir"
23 "cmd/compile/internal/liveness"
24 "cmd/compile/internal/objw"
25 "cmd/compile/internal/reflectdata"
26 "cmd/compile/internal/rttype"
27 "cmd/compile/internal/ssa"
28 "cmd/compile/internal/staticdata"
29 "cmd/compile/internal/typecheck"
30 "cmd/compile/internal/types"
31 "cmd/internal/obj"
32 "cmd/internal/objabi"
33 "cmd/internal/src"
34 "cmd/internal/sys"
35
36 rtabi "internal/abi"
37 )
38
39 var ssaConfig *ssa.Config
40 var ssaCaches []ssa.Cache
41
42 var ssaDump string
43 var ssaDir string
44 var ssaDumpStdout bool
45 var ssaDumpCFG string
46 const ssaDumpFile = "ssa.html"
47
48
49 var ssaDumpInlined []*ir.Func
50
51
52
53
54 const maxAggregatedHeapAllocation = 16
55
56 func DumpInline(fn *ir.Func) {
57 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
58 ssaDumpInlined = append(ssaDumpInlined, fn)
59 }
60 }
61
62 func InitEnv() {
63 ssaDump = os.Getenv("GOSSAFUNC")
64 ssaDir = os.Getenv("GOSSADIR")
65 if ssaDump != "" {
66 if strings.HasSuffix(ssaDump, "+") {
67 ssaDump = ssaDump[:len(ssaDump)-1]
68 ssaDumpStdout = true
69 }
70 spl := strings.Split(ssaDump, ":")
71 if len(spl) > 1 {
72 ssaDump = spl[0]
73 ssaDumpCFG = spl[1]
74 }
75 }
76 }
77
78 func InitConfig() {
79 types_ := ssa.NewTypes()
80
81 if Arch.SoftFloat {
82 softfloatInit()
83 }
84
85
86
87 _ = types.NewPtr(types.Types[types.TINTER])
88 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
89 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
90 _ = types.NewPtr(types.NewPtr(types.ByteType))
91 _ = types.NewPtr(types.NewSlice(types.ByteType))
92 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
93 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
94 _ = types.NewPtr(types.Types[types.TINT16])
95 _ = types.NewPtr(types.Types[types.TINT64])
96 _ = types.NewPtr(types.ErrorType)
97 if buildcfg.Experiment.SwissMap {
98 _ = types.NewPtr(reflectdata.SwissMapType())
99 } else {
100 _ = types.NewPtr(reflectdata.OldMapType())
101 }
102 _ = types.NewPtr(deferstruct())
103 types.NewPtrCacheEnabled = false
104 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
105 ssaConfig.Race = base.Flag.Race
106 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
107
108
109 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
110 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
111 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
112 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
113 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
114 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
115 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
116 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
117 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
118 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
119 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
120 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
121 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
122 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
123 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
124 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
125 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
126 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
127 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
128 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
129 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
130 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
131 ir.Syms.MallocGC = typecheck.LookupRuntimeFunc("mallocgc")
132 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
133 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
134 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
135 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
136 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
137 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
138 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
139 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
140 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
141 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
142 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
143 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
144 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
145 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
146 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
147 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
148 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
149 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
150 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
151 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
152 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
153 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
154 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
155 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
156 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
157 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
158 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
159 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
160 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
161 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
162 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
163 ir.Syms.RISCV64HasZbb = typecheck.LookupRuntimeVar("riscv64HasZbb")
164 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
165 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
166 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
167 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
168 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
169 ir.Syms.ZeroVal = typecheck.LookupRuntimeVar("zeroVal")
170
171 if Arch.LinkArch.Family == sys.Wasm {
172 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
173 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
174 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
175 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
176 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
177 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
178 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
179 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
180 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
181 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
182 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
183 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
184 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
185 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
186 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
187 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
188 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
189 } else {
190 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
191 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
192 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
193 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
194 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
195 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
196 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
197 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
198 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
199 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
200 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
201 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
202 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
203 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
204 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
205 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
206 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("panicSliceConvert")
207 }
208 if Arch.LinkArch.PtrSize == 4 {
209 ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
210 ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
211 ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
212 ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
213 ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
214 ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
215 ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
216 ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
217 ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
218 ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
219 ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
220 ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
221 ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
222 ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
223 ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
224 ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
225 }
226
227
228 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
229 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
230 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
231 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
232 }
233
234 func InitTables() {
235 initIntrinsics(nil)
236 }
237
238
239
240
241
242
243
244
245 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
246 return ssaConfig.ABI0.Copy()
247 }
248
249
250
251 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
252 if buildcfg.Experiment.RegabiArgs {
253
254 if fn == nil {
255 return abi1
256 }
257 switch fn.ABI {
258 case obj.ABI0:
259 return abi0
260 case obj.ABIInternal:
261
262
263 return abi1
264 }
265 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
266 panic("not reachable")
267 }
268
269 a := abi0
270 if fn != nil {
271 if fn.Pragma&ir.RegisterParams != 0 {
272 a = abi1
273 }
274 }
275 return a
276 }
277
278
279
280
281
282
283
284
285
286
287
288
289 func (s *state) emitOpenDeferInfo() {
290 firstOffset := s.openDefers[0].closureNode.FrameOffset()
291
292
293 for i, r := range s.openDefers {
294 have := r.closureNode.FrameOffset()
295 want := firstOffset + int64(i)*int64(types.PtrSize)
296 if have != want {
297 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
298 }
299 }
300
301 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
302 x.Set(obj.AttrContentAddressable, true)
303 s.curfn.LSym.Func().OpenCodedDeferInfo = x
304
305 off := 0
306 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
307 off = objw.Uvarint(x, off, uint64(-firstOffset))
308 }
309
310
311
312 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
313 name := ir.FuncName(fn)
314
315 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
316
317 printssa := false
318
319
320 if strings.Contains(ssaDump, name) {
321 nameOptABI := name
322 if l := len(ssaDump); l > 1 && ssaDump[l-2] == ',' {
323 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
324 } else if strings.HasSuffix(ssaDump, ">") {
325 l := len(ssaDump)
326 if l >= 3 && ssaDump[l-3] == '<' {
327 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
328 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
329 }
330 }
331 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
332 printssa = nameOptABI == ssaDump ||
333 pkgDotName == ssaDump ||
334 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
335 }
336
337 var astBuf *bytes.Buffer
338 if printssa {
339 astBuf = &bytes.Buffer{}
340 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
341 if ssaDumpStdout {
342 fmt.Println("generating SSA for", name)
343 fmt.Print(astBuf.String())
344 }
345 }
346
347 var s state
348 s.pushLine(fn.Pos())
349 defer s.popLine()
350
351 s.hasdefer = fn.HasDefer()
352 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
353 s.cgoUnsafeArgs = true
354 }
355 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
356
357 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
358 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
359 s.instrumentMemory = true
360 }
361 if base.Flag.Race {
362 s.instrumentEnterExit = true
363 }
364 }
365
366 fe := ssafn{
367 curfn: fn,
368 log: printssa && ssaDumpStdout,
369 }
370 s.curfn = fn
371
372 cache := &ssaCaches[worker]
373 cache.Reset()
374
375 s.f = ssaConfig.NewFunc(&fe, cache)
376 s.config = ssaConfig
377 s.f.Type = fn.Type()
378 s.f.Name = name
379 s.f.PrintOrHtmlSSA = printssa
380 if fn.Pragma&ir.Nosplit != 0 {
381 s.f.NoSplit = true
382 }
383 s.f.ABI0 = ssaConfig.ABI0
384 s.f.ABI1 = ssaConfig.ABI1
385 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
386 s.f.ABISelf = abiSelf
387
388 s.panics = map[funcLine]*ssa.Block{}
389 s.softFloat = s.config.SoftFloat
390
391
392 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
393 s.f.Entry.Pos = fn.Pos()
394 s.f.IsPgoHot = isPgoHot
395
396 if printssa {
397 ssaDF := ssaDumpFile
398 if ssaDir != "" {
399 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
400 ssaD := filepath.Dir(ssaDF)
401 os.MkdirAll(ssaD, 0755)
402 }
403 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
404
405 dumpSourcesColumn(s.f.HTMLWriter, fn)
406 s.f.HTMLWriter.WriteAST("AST", astBuf)
407 }
408
409
410 s.labels = map[string]*ssaLabel{}
411 s.fwdVars = map[ir.Node]*ssa.Value{}
412 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
413
414 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
415 switch {
416 case base.Debug.NoOpenDefer != 0:
417 s.hasOpenDefers = false
418 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
419
420
421
422
423
424 s.hasOpenDefers = false
425 }
426 if s.hasOpenDefers && s.instrumentEnterExit {
427
428
429
430 s.hasOpenDefers = false
431 }
432 if s.hasOpenDefers {
433
434
435 for _, f := range s.curfn.Type().Results() {
436 if !f.Nname.(*ir.Name).OnStack() {
437 s.hasOpenDefers = false
438 break
439 }
440 }
441 }
442 if s.hasOpenDefers &&
443 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
444
445
446
447
448
449 s.hasOpenDefers = false
450 }
451
452 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
453 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
454
455 s.startBlock(s.f.Entry)
456 s.vars[memVar] = s.startmem
457 if s.hasOpenDefers {
458
459
460
461 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
462 deferBitsTemp.SetAddrtaken(true)
463 s.deferBitsTemp = deferBitsTemp
464
465 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
466 s.vars[deferBitsVar] = startDeferBits
467 s.deferBitsAddr = s.addr(deferBitsTemp)
468 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
469
470
471
472
473
474 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
475 }
476
477 var params *abi.ABIParamResultInfo
478 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
479
480
481
482
483
484
485 var debugInfo ssa.FuncDebug
486 for _, n := range fn.Dcl {
487 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
488 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
489 }
490 }
491 fn.DebugInfo = &debugInfo
492
493
494 s.decladdrs = map[*ir.Name]*ssa.Value{}
495 for _, n := range fn.Dcl {
496 switch n.Class {
497 case ir.PPARAM:
498
499 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
500 case ir.PPARAMOUT:
501 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
502 case ir.PAUTO:
503
504
505 default:
506 s.Fatalf("local variable with class %v unimplemented", n.Class)
507 }
508 }
509
510 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
511
512
513 for _, n := range fn.Dcl {
514 if n.Class == ir.PPARAM {
515 if s.canSSA(n) {
516 v := s.newValue0A(ssa.OpArg, n.Type(), n)
517 s.vars[n] = v
518 s.addNamedValue(n, v)
519 } else {
520 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
521 if len(paramAssignment.Registers) > 0 {
522 if ssa.CanSSA(n.Type()) {
523 v := s.newValue0A(ssa.OpArg, n.Type(), n)
524 s.store(n.Type(), s.decladdrs[n], v)
525 } else {
526
527
528 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
529 }
530 }
531 }
532 }
533 }
534
535
536 if fn.Needctxt() {
537 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
538 if fn.RangeParent != nil && base.Flag.N != 0 {
539
540
541
542 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
543 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
544 cloSlot.SetUsed(true)
545 cloSlot.SetEsc(ir.EscNever)
546 cloSlot.SetAddrtaken(true)
547 s.f.CloSlot = cloSlot
548 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
549 addr := s.addr(cloSlot)
550 s.store(s.f.Config.Types.BytePtr, addr, clo)
551
552 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
553 }
554 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
555 for {
556 n, typ, offset := csiter.Next()
557 if n == nil {
558 break
559 }
560
561 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
562
563
564
565
566
567
568
569
570
571 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
572 n.Class = ir.PAUTO
573 fn.Dcl = append(fn.Dcl, n)
574 s.assign(n, s.load(n.Type(), ptr), false, 0)
575 continue
576 }
577
578 if !n.Byval() {
579 ptr = s.load(typ, ptr)
580 }
581 s.setHeapaddr(fn.Pos(), n, ptr)
582 }
583 }
584
585
586 if s.instrumentEnterExit {
587 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
588 }
589 s.zeroResults()
590 s.paramsToHeap()
591 s.stmtList(fn.Body)
592
593
594 if s.curBlock != nil {
595 s.pushLine(fn.Endlineno)
596 s.exit()
597 s.popLine()
598 }
599
600 for _, b := range s.f.Blocks {
601 if b.Pos != src.NoXPos {
602 s.updateUnsetPredPos(b)
603 }
604 }
605
606 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
607
608 s.insertPhis()
609
610
611 ssa.Compile(s.f)
612
613 fe.AllocFrame(s.f)
614
615 if len(s.openDefers) != 0 {
616 s.emitOpenDeferInfo()
617 }
618
619
620
621
622
623
624 for _, p := range params.InParams() {
625 typs, offs := p.RegisterTypesAndOffsets()
626 for i, t := range typs {
627 o := offs[i]
628 fo := p.FrameOffset(params)
629 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
630 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
631 }
632 }
633
634 return s.f
635 }
636
637 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
638 typs, offs := paramAssignment.RegisterTypesAndOffsets()
639 for i, t := range typs {
640 if pointersOnly && !t.IsPtrShaped() {
641 continue
642 }
643 r := paramAssignment.Registers[i]
644 o := offs[i]
645 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
646 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
647 v := s.newValue0I(op, t, reg)
648 v.Aux = aux
649 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
650 s.store(t, p, v)
651 }
652 }
653
654
655
656
657
658
659
660 func (s *state) zeroResults() {
661 for _, f := range s.curfn.Type().Results() {
662 n := f.Nname.(*ir.Name)
663 if !n.OnStack() {
664
665
666
667 continue
668 }
669
670 if typ := n.Type(); ssa.CanSSA(typ) {
671 s.assign(n, s.zeroVal(typ), false, 0)
672 } else {
673 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
674 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
675 }
676 s.zero(n.Type(), s.decladdrs[n])
677 }
678 }
679 }
680
681
682
683 func (s *state) paramsToHeap() {
684 do := func(params []*types.Field) {
685 for _, f := range params {
686 if f.Nname == nil {
687 continue
688 }
689 n := f.Nname.(*ir.Name)
690 if ir.IsBlank(n) || n.OnStack() {
691 continue
692 }
693 s.newHeapaddr(n)
694 if n.Class == ir.PPARAM {
695 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
696 }
697 }
698 }
699
700 typ := s.curfn.Type()
701 do(typ.Recvs())
702 do(typ.Params())
703 do(typ.Results())
704 }
705
706
707
708
709 func allocSizeAndAlign(t *types.Type) (int64, int64) {
710 size, align := t.Size(), t.Alignment()
711 if types.PtrSize == 4 && align == 4 && size >= 8 {
712
713 size = types.RoundUp(size, 8)
714 align = 8
715 }
716 return size, align
717 }
718 func allocSize(t *types.Type) int64 {
719 size, _ := allocSizeAndAlign(t)
720 return size
721 }
722 func allocAlign(t *types.Type) int64 {
723 _, align := allocSizeAndAlign(t)
724 return align
725 }
726
727
728 func (s *state) newHeapaddr(n *ir.Name) {
729 size := allocSize(n.Type())
730 if n.Type().HasPointers() || size >= maxAggregatedHeapAllocation || size == 0 {
731 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type(), nil))
732 return
733 }
734
735
736
737 var used int64
738 for _, v := range s.pendingHeapAllocations {
739 used += allocSize(v.Type.Elem())
740 }
741 if used+size > maxAggregatedHeapAllocation {
742 s.flushPendingHeapAllocations()
743 }
744
745 var allocCall *ssa.Value
746 if len(s.pendingHeapAllocations) == 0 {
747
748
749
750 allocCall = s.newObject(n.Type(), nil)
751 } else {
752 allocCall = s.pendingHeapAllocations[0].Args[0]
753 }
754
755 v := s.newValue1I(ssa.OpOffPtr, n.Type().PtrTo(), 0, allocCall)
756
757
758 s.pendingHeapAllocations = append(s.pendingHeapAllocations, v)
759
760
761 s.setHeapaddr(n.Pos(), n, v)
762 }
763
764 func (s *state) flushPendingHeapAllocations() {
765 pending := s.pendingHeapAllocations
766 if len(pending) == 0 {
767 return
768 }
769 s.pendingHeapAllocations = nil
770 ptr := pending[0].Args[0]
771 call := ptr.Args[0]
772
773 if len(pending) == 1 {
774
775 v := pending[0]
776 v.Op = ssa.OpCopy
777 return
778 }
779
780
781
782
783 slices.SortStableFunc(pending, func(x, y *ssa.Value) int {
784 return cmp.Compare(allocAlign(y.Type.Elem()), allocAlign(x.Type.Elem()))
785 })
786
787
788 var size int64
789 for _, v := range pending {
790 v.AuxInt = size
791 size += allocSize(v.Type.Elem())
792 }
793 align := allocAlign(pending[0].Type.Elem())
794 size = types.RoundUp(size, align)
795
796
797 args := []*ssa.Value{
798 s.constInt(types.Types[types.TUINTPTR], size),
799 s.constNil(call.Args[0].Type),
800 s.constBool(true),
801 call.Args[1],
802 }
803 call.Aux = ssa.StaticAuxCall(ir.Syms.MallocGC, s.f.ABIDefault.ABIAnalyzeTypes(
804 []*types.Type{args[0].Type, args[1].Type, args[2].Type},
805 []*types.Type{types.Types[types.TUNSAFEPTR]},
806 ))
807 call.AuxInt = 4 * s.config.PtrSize
808 call.SetArgs4(args[0], args[1], args[2], args[3])
809
810
811 call.Type = types.NewTuple(types.Types[types.TUNSAFEPTR], types.TypeMem)
812 ptr.Type = types.Types[types.TUNSAFEPTR]
813 }
814
815
816
817 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
818 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
819 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
820 }
821
822
823 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
824 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
825 addr.SetUsed(true)
826 types.CalcSize(addr.Type())
827
828 if n.Class == ir.PPARAMOUT {
829 addr.SetIsOutputParamHeapAddr(true)
830 }
831
832 n.Heapaddr = addr
833 s.assign(addr, ptr, false, 0)
834 }
835
836
837 func (s *state) newObject(typ *types.Type, rtype *ssa.Value) *ssa.Value {
838 if typ.Size() == 0 {
839 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
840 }
841 if rtype == nil {
842 rtype = s.reflectType(typ)
843 }
844 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
845 }
846
847 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
848 if !n.Type().IsPtr() {
849 s.Fatalf("expected pointer type: %v", n.Type())
850 }
851 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
852 if count != nil {
853 if !elem.IsArray() {
854 s.Fatalf("expected array type: %v", elem)
855 }
856 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
857 }
858 size := elem.Size()
859
860 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
861 return
862 }
863 if count == nil {
864 count = s.constInt(types.Types[types.TUINTPTR], 1)
865 }
866 if count.Type.Size() != s.config.PtrSize {
867 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
868 }
869 var rtype *ssa.Value
870 if rtypeExpr != nil {
871 rtype = s.expr(rtypeExpr)
872 } else {
873 rtype = s.reflectType(elem)
874 }
875 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
876 }
877
878
879
880 func (s *state) reflectType(typ *types.Type) *ssa.Value {
881
882
883 lsym := reflectdata.TypeLinksym(typ)
884 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
885 }
886
887 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
888
889 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
890 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
891 if err != nil {
892 writer.Logf("cannot read sources for function %v: %v", fn, err)
893 }
894
895
896 var inlFns []*ssa.FuncLines
897 for _, fi := range ssaDumpInlined {
898 elno := fi.Endlineno
899 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
900 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
901 if err != nil {
902 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
903 continue
904 }
905 inlFns = append(inlFns, fnLines)
906 }
907
908 slices.SortFunc(inlFns, ssa.ByTopoCmp)
909 if targetFn != nil {
910 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
911 }
912
913 writer.WriteSources("sources", inlFns)
914 }
915
916 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
917 f, err := os.Open(os.ExpandEnv(file))
918 if err != nil {
919 return nil, err
920 }
921 defer f.Close()
922 var lines []string
923 ln := uint(1)
924 scanner := bufio.NewScanner(f)
925 for scanner.Scan() && ln <= end {
926 if ln >= start {
927 lines = append(lines, scanner.Text())
928 }
929 ln++
930 }
931 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
932 }
933
934
935
936
937 func (s *state) updateUnsetPredPos(b *ssa.Block) {
938 if b.Pos == src.NoXPos {
939 s.Fatalf("Block %s should have a position", b)
940 }
941 bestPos := src.NoXPos
942 for _, e := range b.Preds {
943 p := e.Block()
944 if !p.LackingPos() {
945 continue
946 }
947 if bestPos == src.NoXPos {
948 bestPos = b.Pos
949 for _, v := range b.Values {
950 if v.LackingPos() {
951 continue
952 }
953 if v.Pos != src.NoXPos {
954
955
956 bestPos = v.Pos
957 break
958 }
959 }
960 }
961 p.Pos = bestPos
962 s.updateUnsetPredPos(p)
963 }
964 }
965
966
967 type openDeferInfo struct {
968
969 n *ir.CallExpr
970
971
972 closure *ssa.Value
973
974
975
976 closureNode *ir.Name
977 }
978
979 type state struct {
980
981 config *ssa.Config
982
983
984 f *ssa.Func
985
986
987 curfn *ir.Func
988
989
990 labels map[string]*ssaLabel
991
992
993 breakTo *ssa.Block
994 continueTo *ssa.Block
995
996
997 curBlock *ssa.Block
998
999
1000
1001
1002 vars map[ir.Node]*ssa.Value
1003
1004
1005
1006
1007 fwdVars map[ir.Node]*ssa.Value
1008
1009
1010 defvars []map[ir.Node]*ssa.Value
1011
1012
1013 decladdrs map[*ir.Name]*ssa.Value
1014
1015
1016 startmem *ssa.Value
1017 sp *ssa.Value
1018 sb *ssa.Value
1019
1020 deferBitsAddr *ssa.Value
1021 deferBitsTemp *ir.Name
1022
1023
1024 line []src.XPos
1025
1026 lastPos src.XPos
1027
1028
1029
1030 panics map[funcLine]*ssa.Block
1031
1032 cgoUnsafeArgs bool
1033 hasdefer bool
1034 softFloat bool
1035 hasOpenDefers bool
1036 checkPtrEnabled bool
1037 instrumentEnterExit bool
1038 instrumentMemory bool
1039
1040
1041
1042
1043 openDefers []*openDeferInfo
1044
1045
1046
1047
1048 lastDeferExit *ssa.Block
1049 lastDeferFinalBlock *ssa.Block
1050 lastDeferCount int
1051
1052 prevCall *ssa.Value
1053
1054
1055
1056
1057 pendingHeapAllocations []*ssa.Value
1058
1059
1060 appendTargets map[ir.Node]bool
1061 }
1062
1063 type funcLine struct {
1064 f *obj.LSym
1065 base *src.PosBase
1066 line uint
1067 }
1068
1069 type ssaLabel struct {
1070 target *ssa.Block
1071 breakTarget *ssa.Block
1072 continueTarget *ssa.Block
1073 }
1074
1075
1076 func (s *state) label(sym *types.Sym) *ssaLabel {
1077 lab := s.labels[sym.Name]
1078 if lab == nil {
1079 lab = new(ssaLabel)
1080 s.labels[sym.Name] = lab
1081 }
1082 return lab
1083 }
1084
1085 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
1086 func (s *state) Log() bool { return s.f.Log() }
1087 func (s *state) Fatalf(msg string, args ...interface{}) {
1088 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
1089 }
1090 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
1091 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
1092
1093 func ssaMarker(name string) *ir.Name {
1094 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
1095 }
1096
1097 var (
1098
1099 memVar = ssaMarker("mem")
1100
1101
1102 ptrVar = ssaMarker("ptr")
1103 lenVar = ssaMarker("len")
1104 capVar = ssaMarker("cap")
1105 typVar = ssaMarker("typ")
1106 okVar = ssaMarker("ok")
1107 deferBitsVar = ssaMarker("deferBits")
1108 hashVar = ssaMarker("hash")
1109 )
1110
1111
1112 func (s *state) startBlock(b *ssa.Block) {
1113 if s.curBlock != nil {
1114 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
1115 }
1116 s.curBlock = b
1117 s.vars = map[ir.Node]*ssa.Value{}
1118 clear(s.fwdVars)
1119 }
1120
1121
1122
1123
1124 func (s *state) endBlock() *ssa.Block {
1125 b := s.curBlock
1126 if b == nil {
1127 return nil
1128 }
1129
1130 s.flushPendingHeapAllocations()
1131
1132 for len(s.defvars) <= int(b.ID) {
1133 s.defvars = append(s.defvars, nil)
1134 }
1135 s.defvars[b.ID] = s.vars
1136 s.curBlock = nil
1137 s.vars = nil
1138 if b.LackingPos() {
1139
1140
1141
1142 b.Pos = src.NoXPos
1143 } else {
1144 b.Pos = s.lastPos
1145 }
1146 return b
1147 }
1148
1149
1150 func (s *state) pushLine(line src.XPos) {
1151 if !line.IsKnown() {
1152
1153
1154 line = s.peekPos()
1155 if base.Flag.K != 0 {
1156 base.Warn("buildssa: unknown position (line 0)")
1157 }
1158 } else {
1159 s.lastPos = line
1160 }
1161
1162 s.line = append(s.line, line)
1163 }
1164
1165
1166 func (s *state) popLine() {
1167 s.line = s.line[:len(s.line)-1]
1168 }
1169
1170
1171 func (s *state) peekPos() src.XPos {
1172 return s.line[len(s.line)-1]
1173 }
1174
1175
1176 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1177 return s.curBlock.NewValue0(s.peekPos(), op, t)
1178 }
1179
1180
1181 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1182 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1183 }
1184
1185
1186 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1187 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1188 }
1189
1190
1191 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1192 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1193 }
1194
1195
1196 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1197 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1198 }
1199
1200
1201
1202
1203 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1204 if isStmt {
1205 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1206 }
1207 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1208 }
1209
1210
1211 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1212 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1213 }
1214
1215
1216 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1217 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1218 }
1219
1220
1221 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1222 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1223 }
1224
1225
1226
1227
1228 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1229 if isStmt {
1230 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1231 }
1232 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1233 }
1234
1235
1236 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1237 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1238 }
1239
1240
1241 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1242 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1243 }
1244
1245
1246 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1247 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1248 }
1249
1250
1251 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1252 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1253 }
1254
1255
1256
1257
1258 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1259 if isStmt {
1260 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1261 }
1262 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1263 }
1264
1265
1266 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1267 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1268 }
1269
1270
1271 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1272 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1273 }
1274
1275 func (s *state) entryBlock() *ssa.Block {
1276 b := s.f.Entry
1277 if base.Flag.N > 0 && s.curBlock != nil {
1278
1279
1280
1281
1282 b = s.curBlock
1283 }
1284 return b
1285 }
1286
1287
1288 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1289 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1290 }
1291
1292
1293 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1294 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1295 }
1296
1297
1298 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1299 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1300 }
1301
1302
1303 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1304 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1305 }
1306
1307
1308 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1309 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1310 }
1311
1312
1313 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1314 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1315 }
1316
1317
1318 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1319 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1320 }
1321
1322
1323 func (s *state) constSlice(t *types.Type) *ssa.Value {
1324 return s.f.ConstSlice(t)
1325 }
1326 func (s *state) constInterface(t *types.Type) *ssa.Value {
1327 return s.f.ConstInterface(t)
1328 }
1329 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1330 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1331 return s.f.ConstEmptyString(t)
1332 }
1333 func (s *state) constBool(c bool) *ssa.Value {
1334 return s.f.ConstBool(types.Types[types.TBOOL], c)
1335 }
1336 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1337 return s.f.ConstInt8(t, c)
1338 }
1339 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1340 return s.f.ConstInt16(t, c)
1341 }
1342 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1343 return s.f.ConstInt32(t, c)
1344 }
1345 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1346 return s.f.ConstInt64(t, c)
1347 }
1348 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1349 return s.f.ConstFloat32(t, c)
1350 }
1351 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1352 return s.f.ConstFloat64(t, c)
1353 }
1354 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1355 if s.config.PtrSize == 8 {
1356 return s.constInt64(t, c)
1357 }
1358 if int64(int32(c)) != c {
1359 s.Fatalf("integer constant too big %d", c)
1360 }
1361 return s.constInt32(t, int32(c))
1362 }
1363 func (s *state) constOffPtrSP(t *types.Type, c int64) *ssa.Value {
1364 return s.f.ConstOffPtrSP(t, c, s.sp)
1365 }
1366
1367
1368
1369 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1370 if s.softFloat {
1371 if c, ok := s.sfcall(op, arg); ok {
1372 return c
1373 }
1374 }
1375 return s.newValue1(op, t, arg)
1376 }
1377 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1378 if s.softFloat {
1379 if c, ok := s.sfcall(op, arg0, arg1); ok {
1380 return c
1381 }
1382 }
1383 return s.newValue2(op, t, arg0, arg1)
1384 }
1385
1386 type instrumentKind uint8
1387
1388 const (
1389 instrumentRead = iota
1390 instrumentWrite
1391 instrumentMove
1392 )
1393
1394 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1395 s.instrument2(t, addr, nil, kind)
1396 }
1397
1398
1399
1400
1401 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1402 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1403 s.instrument(t, addr, kind)
1404 return
1405 }
1406 for _, f := range t.Fields() {
1407 if f.Sym.IsBlank() {
1408 continue
1409 }
1410 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1411 s.instrumentFields(f.Type, offptr, kind)
1412 }
1413 }
1414
1415 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1416 if base.Flag.MSan {
1417 s.instrument2(t, dst, src, instrumentMove)
1418 } else {
1419 s.instrument(t, src, instrumentRead)
1420 s.instrument(t, dst, instrumentWrite)
1421 }
1422 }
1423
1424 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1425 if !s.instrumentMemory {
1426 return
1427 }
1428
1429 w := t.Size()
1430 if w == 0 {
1431 return
1432 }
1433
1434 if ssa.IsSanitizerSafeAddr(addr) {
1435 return
1436 }
1437
1438 var fn *obj.LSym
1439 needWidth := false
1440
1441 if addr2 != nil && kind != instrumentMove {
1442 panic("instrument2: non-nil addr2 for non-move instrumentation")
1443 }
1444
1445 if base.Flag.MSan {
1446 switch kind {
1447 case instrumentRead:
1448 fn = ir.Syms.Msanread
1449 case instrumentWrite:
1450 fn = ir.Syms.Msanwrite
1451 case instrumentMove:
1452 fn = ir.Syms.Msanmove
1453 default:
1454 panic("unreachable")
1455 }
1456 needWidth = true
1457 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1458
1459
1460
1461 switch kind {
1462 case instrumentRead:
1463 fn = ir.Syms.Racereadrange
1464 case instrumentWrite:
1465 fn = ir.Syms.Racewriterange
1466 default:
1467 panic("unreachable")
1468 }
1469 needWidth = true
1470 } else if base.Flag.Race {
1471
1472
1473 switch kind {
1474 case instrumentRead:
1475 fn = ir.Syms.Raceread
1476 case instrumentWrite:
1477 fn = ir.Syms.Racewrite
1478 default:
1479 panic("unreachable")
1480 }
1481 } else if base.Flag.ASan {
1482 switch kind {
1483 case instrumentRead:
1484 fn = ir.Syms.Asanread
1485 case instrumentWrite:
1486 fn = ir.Syms.Asanwrite
1487 default:
1488 panic("unreachable")
1489 }
1490 needWidth = true
1491 } else {
1492 panic("unreachable")
1493 }
1494
1495 args := []*ssa.Value{addr}
1496 if addr2 != nil {
1497 args = append(args, addr2)
1498 }
1499 if needWidth {
1500 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1501 }
1502 s.rtcall(fn, true, nil, args...)
1503 }
1504
1505 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1506 s.instrumentFields(t, src, instrumentRead)
1507 return s.rawLoad(t, src)
1508 }
1509
1510 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1511 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1512 }
1513
1514 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1515 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1516 }
1517
1518 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1519 s.instrument(t, dst, instrumentWrite)
1520 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1521 store.Aux = t
1522 s.vars[memVar] = store
1523 }
1524
1525 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1526 s.moveWhichMayOverlap(t, dst, src, false)
1527 }
1528 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1529 s.instrumentMove(t, dst, src)
1530 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554 if t.HasPointers() {
1555 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1556
1557
1558
1559
1560 s.curfn.SetWBPos(s.peekPos())
1561 } else {
1562 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1563 }
1564 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1565 return
1566 }
1567 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1568 store.Aux = t
1569 s.vars[memVar] = store
1570 }
1571
1572
1573 func (s *state) stmtList(l ir.Nodes) {
1574 for _, n := range l {
1575 s.stmt(n)
1576 }
1577 }
1578
1579
1580 func (s *state) stmt(n ir.Node) {
1581 s.pushLine(n.Pos())
1582 defer s.popLine()
1583
1584
1585
1586 if s.curBlock == nil && n.Op() != ir.OLABEL {
1587 return
1588 }
1589
1590 s.stmtList(n.Init())
1591 switch n.Op() {
1592
1593 case ir.OBLOCK:
1594 n := n.(*ir.BlockStmt)
1595 s.stmtList(n.List)
1596
1597 case ir.OFALL:
1598
1599
1600 case ir.OCALLFUNC:
1601 n := n.(*ir.CallExpr)
1602 if ir.IsIntrinsicCall(n) {
1603 s.intrinsicCall(n)
1604 return
1605 }
1606 fallthrough
1607
1608 case ir.OCALLINTER:
1609 n := n.(*ir.CallExpr)
1610 s.callResult(n, callNormal)
1611 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1612 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1613 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1614 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1615 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1616 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1617 fn == "panicrangestate") {
1618 m := s.mem()
1619 b := s.endBlock()
1620 b.Kind = ssa.BlockExit
1621 b.SetControl(m)
1622
1623
1624
1625 }
1626 }
1627 case ir.ODEFER:
1628 n := n.(*ir.GoDeferStmt)
1629 if base.Debug.Defer > 0 {
1630 var defertype string
1631 if s.hasOpenDefers {
1632 defertype = "open-coded"
1633 } else if n.Esc() == ir.EscNever {
1634 defertype = "stack-allocated"
1635 } else {
1636 defertype = "heap-allocated"
1637 }
1638 base.WarnfAt(n.Pos(), "%s defer", defertype)
1639 }
1640 if s.hasOpenDefers {
1641 s.openDeferRecord(n.Call.(*ir.CallExpr))
1642 } else {
1643 d := callDefer
1644 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1645 d = callDeferStack
1646 }
1647 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1648 }
1649 case ir.OGO:
1650 n := n.(*ir.GoDeferStmt)
1651 s.callResult(n.Call.(*ir.CallExpr), callGo)
1652
1653 case ir.OAS2DOTTYPE:
1654 n := n.(*ir.AssignListStmt)
1655 var res, resok *ssa.Value
1656 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1657 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1658 } else {
1659 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1660 }
1661 deref := false
1662 if !ssa.CanSSA(n.Rhs[0].Type()) {
1663 if res.Op != ssa.OpLoad {
1664 s.Fatalf("dottype of non-load")
1665 }
1666 mem := s.mem()
1667 if res.Args[1] != mem {
1668 s.Fatalf("memory no longer live from 2-result dottype load")
1669 }
1670 deref = true
1671 res = res.Args[0]
1672 }
1673 s.assign(n.Lhs[0], res, deref, 0)
1674 s.assign(n.Lhs[1], resok, false, 0)
1675 return
1676
1677 case ir.OAS2FUNC:
1678
1679 n := n.(*ir.AssignListStmt)
1680 call := n.Rhs[0].(*ir.CallExpr)
1681 if !ir.IsIntrinsicCall(call) {
1682 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1683 }
1684 v := s.intrinsicCall(call)
1685 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1686 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1687 s.assign(n.Lhs[0], v1, false, 0)
1688 s.assign(n.Lhs[1], v2, false, 0)
1689 return
1690
1691 case ir.ODCL:
1692 n := n.(*ir.Decl)
1693 if v := n.X; v.Esc() == ir.EscHeap {
1694 s.newHeapaddr(v)
1695 }
1696
1697 case ir.OLABEL:
1698 n := n.(*ir.LabelStmt)
1699 sym := n.Label
1700 if sym.IsBlank() {
1701
1702 break
1703 }
1704 lab := s.label(sym)
1705
1706
1707 if lab.target == nil {
1708 lab.target = s.f.NewBlock(ssa.BlockPlain)
1709 }
1710
1711
1712
1713 if s.curBlock != nil {
1714 b := s.endBlock()
1715 b.AddEdgeTo(lab.target)
1716 }
1717 s.startBlock(lab.target)
1718
1719 case ir.OGOTO:
1720 n := n.(*ir.BranchStmt)
1721 sym := n.Label
1722
1723 lab := s.label(sym)
1724 if lab.target == nil {
1725 lab.target = s.f.NewBlock(ssa.BlockPlain)
1726 }
1727
1728 b := s.endBlock()
1729 b.Pos = s.lastPos.WithIsStmt()
1730 b.AddEdgeTo(lab.target)
1731
1732 case ir.OAS:
1733 n := n.(*ir.AssignStmt)
1734 if n.X == n.Y && n.X.Op() == ir.ONAME {
1735
1736
1737
1738
1739
1740
1741
1742 return
1743 }
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1755 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1756 p := n.Y.(*ir.StarExpr).X
1757 for p.Op() == ir.OCONVNOP {
1758 p = p.(*ir.ConvExpr).X
1759 }
1760 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1761
1762
1763 mayOverlap = false
1764 }
1765 }
1766
1767
1768 rhs := n.Y
1769 if rhs != nil {
1770 switch rhs.Op() {
1771 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1772
1773
1774
1775 if !ir.IsZero(rhs) {
1776 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1777 }
1778 rhs = nil
1779 case ir.OAPPEND:
1780 rhs := rhs.(*ir.CallExpr)
1781
1782
1783
1784 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1785 break
1786 }
1787
1788
1789
1790 if s.canSSA(n.X) {
1791 if base.Debug.Append > 0 {
1792 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1793 }
1794 break
1795 }
1796 if base.Debug.Append > 0 {
1797 base.WarnfAt(n.Pos(), "append: len-only update")
1798 }
1799 s.append(rhs, true)
1800 return
1801 }
1802 }
1803
1804 if ir.IsBlank(n.X) {
1805
1806
1807 if rhs != nil {
1808 s.expr(rhs)
1809 }
1810 return
1811 }
1812
1813 var t *types.Type
1814 if n.Y != nil {
1815 t = n.Y.Type()
1816 } else {
1817 t = n.X.Type()
1818 }
1819
1820 var r *ssa.Value
1821 deref := !ssa.CanSSA(t)
1822 if deref {
1823 if rhs == nil {
1824 r = nil
1825 } else {
1826 r = s.addr(rhs)
1827 }
1828 } else {
1829 if rhs == nil {
1830 r = s.zeroVal(t)
1831 } else {
1832 r = s.expr(rhs)
1833 }
1834 }
1835
1836 var skip skipMask
1837 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1838
1839
1840 rhs := rhs.(*ir.SliceExpr)
1841 i, j, k := rhs.Low, rhs.High, rhs.Max
1842 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1843
1844 i = nil
1845 }
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856 if i == nil {
1857 skip |= skipPtr
1858 if j == nil {
1859 skip |= skipLen
1860 }
1861 if k == nil {
1862 skip |= skipCap
1863 }
1864 }
1865 }
1866
1867 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1868
1869 case ir.OIF:
1870 n := n.(*ir.IfStmt)
1871 if ir.IsConst(n.Cond, constant.Bool) {
1872 s.stmtList(n.Cond.Init())
1873 if ir.BoolVal(n.Cond) {
1874 s.stmtList(n.Body)
1875 } else {
1876 s.stmtList(n.Else)
1877 }
1878 break
1879 }
1880
1881 bEnd := s.f.NewBlock(ssa.BlockPlain)
1882 var likely int8
1883 if n.Likely {
1884 likely = 1
1885 }
1886 var bThen *ssa.Block
1887 if len(n.Body) != 0 {
1888 bThen = s.f.NewBlock(ssa.BlockPlain)
1889 } else {
1890 bThen = bEnd
1891 }
1892 var bElse *ssa.Block
1893 if len(n.Else) != 0 {
1894 bElse = s.f.NewBlock(ssa.BlockPlain)
1895 } else {
1896 bElse = bEnd
1897 }
1898 s.condBranch(n.Cond, bThen, bElse, likely)
1899
1900 if len(n.Body) != 0 {
1901 s.startBlock(bThen)
1902 s.stmtList(n.Body)
1903 if b := s.endBlock(); b != nil {
1904 b.AddEdgeTo(bEnd)
1905 }
1906 }
1907 if len(n.Else) != 0 {
1908 s.startBlock(bElse)
1909 s.stmtList(n.Else)
1910 if b := s.endBlock(); b != nil {
1911 b.AddEdgeTo(bEnd)
1912 }
1913 }
1914 s.startBlock(bEnd)
1915
1916 case ir.ORETURN:
1917 n := n.(*ir.ReturnStmt)
1918 s.stmtList(n.Results)
1919 b := s.exit()
1920 b.Pos = s.lastPos.WithIsStmt()
1921
1922 case ir.OTAILCALL:
1923 n := n.(*ir.TailCallStmt)
1924 s.callResult(n.Call, callTail)
1925 call := s.mem()
1926 b := s.endBlock()
1927 b.Kind = ssa.BlockRetJmp
1928 b.SetControl(call)
1929
1930 case ir.OCONTINUE, ir.OBREAK:
1931 n := n.(*ir.BranchStmt)
1932 var to *ssa.Block
1933 if n.Label == nil {
1934
1935 switch n.Op() {
1936 case ir.OCONTINUE:
1937 to = s.continueTo
1938 case ir.OBREAK:
1939 to = s.breakTo
1940 }
1941 } else {
1942
1943 sym := n.Label
1944 lab := s.label(sym)
1945 switch n.Op() {
1946 case ir.OCONTINUE:
1947 to = lab.continueTarget
1948 case ir.OBREAK:
1949 to = lab.breakTarget
1950 }
1951 }
1952
1953 b := s.endBlock()
1954 b.Pos = s.lastPos.WithIsStmt()
1955 b.AddEdgeTo(to)
1956
1957 case ir.OFOR:
1958
1959
1960 n := n.(*ir.ForStmt)
1961 base.Assert(!n.DistinctVars)
1962 bCond := s.f.NewBlock(ssa.BlockPlain)
1963 bBody := s.f.NewBlock(ssa.BlockPlain)
1964 bIncr := s.f.NewBlock(ssa.BlockPlain)
1965 bEnd := s.f.NewBlock(ssa.BlockPlain)
1966
1967
1968 bBody.Pos = n.Pos()
1969
1970
1971 b := s.endBlock()
1972 b.AddEdgeTo(bCond)
1973
1974
1975 s.startBlock(bCond)
1976 if n.Cond != nil {
1977 s.condBranch(n.Cond, bBody, bEnd, 1)
1978 } else {
1979 b := s.endBlock()
1980 b.Kind = ssa.BlockPlain
1981 b.AddEdgeTo(bBody)
1982 }
1983
1984
1985 prevContinue := s.continueTo
1986 prevBreak := s.breakTo
1987 s.continueTo = bIncr
1988 s.breakTo = bEnd
1989 var lab *ssaLabel
1990 if sym := n.Label; sym != nil {
1991
1992 lab = s.label(sym)
1993 lab.continueTarget = bIncr
1994 lab.breakTarget = bEnd
1995 }
1996
1997
1998 s.startBlock(bBody)
1999 s.stmtList(n.Body)
2000
2001
2002 s.continueTo = prevContinue
2003 s.breakTo = prevBreak
2004 if lab != nil {
2005 lab.continueTarget = nil
2006 lab.breakTarget = nil
2007 }
2008
2009
2010 if b := s.endBlock(); b != nil {
2011 b.AddEdgeTo(bIncr)
2012 }
2013
2014
2015 s.startBlock(bIncr)
2016 if n.Post != nil {
2017 s.stmt(n.Post)
2018 }
2019 if b := s.endBlock(); b != nil {
2020 b.AddEdgeTo(bCond)
2021
2022
2023 if b.Pos == src.NoXPos {
2024 b.Pos = bCond.Pos
2025 }
2026 }
2027
2028 s.startBlock(bEnd)
2029
2030 case ir.OSWITCH, ir.OSELECT:
2031
2032
2033 bEnd := s.f.NewBlock(ssa.BlockPlain)
2034
2035 prevBreak := s.breakTo
2036 s.breakTo = bEnd
2037 var sym *types.Sym
2038 var body ir.Nodes
2039 if n.Op() == ir.OSWITCH {
2040 n := n.(*ir.SwitchStmt)
2041 sym = n.Label
2042 body = n.Compiled
2043 } else {
2044 n := n.(*ir.SelectStmt)
2045 sym = n.Label
2046 body = n.Compiled
2047 }
2048
2049 var lab *ssaLabel
2050 if sym != nil {
2051
2052 lab = s.label(sym)
2053 lab.breakTarget = bEnd
2054 }
2055
2056
2057 s.stmtList(body)
2058
2059 s.breakTo = prevBreak
2060 if lab != nil {
2061 lab.breakTarget = nil
2062 }
2063
2064
2065
2066 if s.curBlock != nil {
2067 m := s.mem()
2068 b := s.endBlock()
2069 b.Kind = ssa.BlockExit
2070 b.SetControl(m)
2071 }
2072 s.startBlock(bEnd)
2073
2074 case ir.OJUMPTABLE:
2075 n := n.(*ir.JumpTableStmt)
2076
2077
2078 jt := s.f.NewBlock(ssa.BlockJumpTable)
2079 bEnd := s.f.NewBlock(ssa.BlockPlain)
2080
2081
2082 idx := s.expr(n.Idx)
2083 unsigned := idx.Type.IsUnsigned()
2084
2085
2086 t := types.Types[types.TUINTPTR]
2087 idx = s.conv(nil, idx, idx.Type, t)
2088
2089
2090
2091
2092
2093
2094
2095 var min, max uint64
2096 if unsigned {
2097 min, _ = constant.Uint64Val(n.Cases[0])
2098 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
2099 } else {
2100 mn, _ := constant.Int64Val(n.Cases[0])
2101 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
2102 min = uint64(mn)
2103 max = uint64(mx)
2104 }
2105
2106 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
2107 width := s.uintptrConstant(max - min)
2108 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
2109 b := s.endBlock()
2110 b.Kind = ssa.BlockIf
2111 b.SetControl(cmp)
2112 b.AddEdgeTo(jt)
2113 b.AddEdgeTo(bEnd)
2114 b.Likely = ssa.BranchLikely
2115
2116
2117 s.startBlock(jt)
2118 jt.Pos = n.Pos()
2119 if base.Flag.Cfg.SpectreIndex {
2120 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
2121 }
2122 jt.SetControl(idx)
2123
2124
2125 table := make([]*ssa.Block, max-min+1)
2126 for i := range table {
2127 table[i] = bEnd
2128 }
2129 for i := range n.Targets {
2130 c := n.Cases[i]
2131 lab := s.label(n.Targets[i])
2132 if lab.target == nil {
2133 lab.target = s.f.NewBlock(ssa.BlockPlain)
2134 }
2135 var val uint64
2136 if unsigned {
2137 val, _ = constant.Uint64Val(c)
2138 } else {
2139 vl, _ := constant.Int64Val(c)
2140 val = uint64(vl)
2141 }
2142
2143 table[val-min] = lab.target
2144 }
2145 for _, t := range table {
2146 jt.AddEdgeTo(t)
2147 }
2148 s.endBlock()
2149
2150 s.startBlock(bEnd)
2151
2152 case ir.OINTERFACESWITCH:
2153 n := n.(*ir.InterfaceSwitchStmt)
2154 typs := s.f.Config.Types
2155
2156 t := s.expr(n.RuntimeType)
2157 h := s.expr(n.Hash)
2158 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2159
2160
2161 var merge *ssa.Block
2162 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
2163
2164
2165 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2166 s.Fatalf("atomic load not available")
2167 }
2168 merge = s.f.NewBlock(ssa.BlockPlain)
2169 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2170 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2171 loopHead := s.f.NewBlock(ssa.BlockPlain)
2172 loopBody := s.f.NewBlock(ssa.BlockPlain)
2173
2174
2175 var mul, and, add, zext ssa.Op
2176 if s.config.PtrSize == 4 {
2177 mul = ssa.OpMul32
2178 and = ssa.OpAnd32
2179 add = ssa.OpAdd32
2180 zext = ssa.OpCopy
2181 } else {
2182 mul = ssa.OpMul64
2183 and = ssa.OpAnd64
2184 add = ssa.OpAdd64
2185 zext = ssa.OpZeroExt32to64
2186 }
2187
2188
2189
2190 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2191 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2192 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2193
2194
2195 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2196
2197
2198 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2199
2200 b := s.endBlock()
2201 b.AddEdgeTo(loopHead)
2202
2203
2204
2205 s.startBlock(loopHead)
2206 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2207 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2208 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2209 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2210
2211 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2212
2213
2214
2215 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2216 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2217 b = s.endBlock()
2218 b.Kind = ssa.BlockIf
2219 b.SetControl(cmp1)
2220 b.AddEdgeTo(cacheHit)
2221 b.AddEdgeTo(loopBody)
2222
2223
2224
2225 s.startBlock(loopBody)
2226 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2227 b = s.endBlock()
2228 b.Kind = ssa.BlockIf
2229 b.SetControl(cmp2)
2230 b.AddEdgeTo(cacheMiss)
2231 b.AddEdgeTo(loopHead)
2232
2233
2234
2235
2236 s.startBlock(cacheHit)
2237 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2238 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2239 s.assign(n.Case, eCase, false, 0)
2240 s.assign(n.Itab, eItab, false, 0)
2241 b = s.endBlock()
2242 b.AddEdgeTo(merge)
2243
2244
2245 s.startBlock(cacheMiss)
2246 }
2247
2248 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2249 s.assign(n.Case, r[0], false, 0)
2250 s.assign(n.Itab, r[1], false, 0)
2251
2252 if merge != nil {
2253
2254 b := s.endBlock()
2255 b.Kind = ssa.BlockPlain
2256 b.AddEdgeTo(merge)
2257 s.startBlock(merge)
2258 }
2259
2260 case ir.OCHECKNIL:
2261 n := n.(*ir.UnaryExpr)
2262 p := s.expr(n.X)
2263 _ = s.nilCheck(p)
2264
2265
2266 case ir.OINLMARK:
2267 n := n.(*ir.InlineMarkStmt)
2268 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2269
2270 default:
2271 s.Fatalf("unhandled stmt %v", n.Op())
2272 }
2273 }
2274
2275
2276
2277 const shareDeferExits = false
2278
2279
2280
2281
2282 func (s *state) exit() *ssa.Block {
2283 if s.hasdefer {
2284 if s.hasOpenDefers {
2285 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2286 if s.curBlock.Kind != ssa.BlockPlain {
2287 panic("Block for an exit should be BlockPlain")
2288 }
2289 s.curBlock.AddEdgeTo(s.lastDeferExit)
2290 s.endBlock()
2291 return s.lastDeferFinalBlock
2292 }
2293 s.openDeferExit()
2294 } else {
2295
2296
2297
2298
2299
2300
2301
2302
2303 s.pushLine(s.curfn.Endlineno)
2304 s.rtcall(ir.Syms.Deferreturn, true, nil)
2305 s.popLine()
2306 }
2307 }
2308
2309
2310
2311 resultFields := s.curfn.Type().Results()
2312 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2313
2314 for i, f := range resultFields {
2315 n := f.Nname.(*ir.Name)
2316 if s.canSSA(n) {
2317 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2318
2319 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2320 }
2321 results[i] = s.variable(n, n.Type())
2322 } else if !n.OnStack() {
2323
2324 if n.Type().HasPointers() {
2325 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2326 }
2327 ha := s.expr(n.Heapaddr)
2328 s.instrumentFields(n.Type(), ha, instrumentRead)
2329 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2330 } else {
2331
2332
2333
2334 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2335 }
2336 }
2337
2338
2339
2340
2341 if s.instrumentEnterExit {
2342 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2343 }
2344
2345 results[len(results)-1] = s.mem()
2346 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2347 m.AddArgs(results...)
2348
2349 b := s.endBlock()
2350 b.Kind = ssa.BlockRet
2351 b.SetControl(m)
2352 if s.hasdefer && s.hasOpenDefers {
2353 s.lastDeferFinalBlock = b
2354 }
2355 return b
2356 }
2357
2358 type opAndType struct {
2359 op ir.Op
2360 etype types.Kind
2361 }
2362
2363 var opToSSA = map[opAndType]ssa.Op{
2364 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2365 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2366 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2367 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2368 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2369 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2370 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2371 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2372 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2373 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2374
2375 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2376 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2377 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2378 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2379 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2380 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2381 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2382 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2383 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2384 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2385
2386 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2387
2388 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2389 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2390 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2391 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2392 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2393 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2394 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2395 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2396 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2397 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2398
2399 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2400 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2401 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2402 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2403 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2404 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2405 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2406 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2407
2408 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2409 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2410 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2411 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2412
2413 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2414 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2415 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2416 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2417 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2418 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2419 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2420 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2421 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2422 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2423
2424 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2425 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2426
2427 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2428 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2429 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2430 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2431 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2432 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2433 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2434 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2435
2436 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2437 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2438 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2439 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2440 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2441 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2442 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2443 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2444
2445 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2446 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2447 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2448 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2449 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2450 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2451 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2452 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2453
2454 {ir.OOR, types.TINT8}: ssa.OpOr8,
2455 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2456 {ir.OOR, types.TINT16}: ssa.OpOr16,
2457 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2458 {ir.OOR, types.TINT32}: ssa.OpOr32,
2459 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2460 {ir.OOR, types.TINT64}: ssa.OpOr64,
2461 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2462
2463 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2464 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2465 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2466 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2467 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2468 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2469 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2470 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2471
2472 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2473 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2474 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2475 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2476 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2477 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2478 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2479 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2480 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2481 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2482 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2483 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2484 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2485 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2486 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2487 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2488 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2489 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2490 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2491
2492 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2493 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2494 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2495 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2496 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2497 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2498 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2499 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2500 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2501 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2502 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2503 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2504 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2505 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2506 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2507 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2508 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2509 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2510 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2511
2512 {ir.OLT, types.TINT8}: ssa.OpLess8,
2513 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2514 {ir.OLT, types.TINT16}: ssa.OpLess16,
2515 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2516 {ir.OLT, types.TINT32}: ssa.OpLess32,
2517 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2518 {ir.OLT, types.TINT64}: ssa.OpLess64,
2519 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2520 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2521 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2522
2523 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2524 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2525 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2526 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2527 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2528 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2529 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2530 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2531 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2532 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2533 }
2534
2535 func (s *state) concreteEtype(t *types.Type) types.Kind {
2536 e := t.Kind()
2537 switch e {
2538 default:
2539 return e
2540 case types.TINT:
2541 if s.config.PtrSize == 8 {
2542 return types.TINT64
2543 }
2544 return types.TINT32
2545 case types.TUINT:
2546 if s.config.PtrSize == 8 {
2547 return types.TUINT64
2548 }
2549 return types.TUINT32
2550 case types.TUINTPTR:
2551 if s.config.PtrSize == 8 {
2552 return types.TUINT64
2553 }
2554 return types.TUINT32
2555 }
2556 }
2557
2558 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2559 etype := s.concreteEtype(t)
2560 x, ok := opToSSA[opAndType{op, etype}]
2561 if !ok {
2562 s.Fatalf("unhandled binary op %v %s", op, etype)
2563 }
2564 return x
2565 }
2566
2567 type opAndTwoTypes struct {
2568 op ir.Op
2569 etype1 types.Kind
2570 etype2 types.Kind
2571 }
2572
2573 type twoTypes struct {
2574 etype1 types.Kind
2575 etype2 types.Kind
2576 }
2577
2578 type twoOpsAndType struct {
2579 op1 ssa.Op
2580 op2 ssa.Op
2581 intermediateType types.Kind
2582 }
2583
2584 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2585
2586 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2587 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2588 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2589 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2590
2591 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2592 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2593 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2594 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2595
2596 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2597 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2598 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2599 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2600
2601 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2602 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2603 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2604 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2605
2606 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2607 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2608 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2609 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2610
2611 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2612 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2613 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2614 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2615
2616 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2617 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2618 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto64, ssa.OpTrunc64to32, types.TINT64},
2619 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2620
2621 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2622 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2623 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto64, ssa.OpTrunc64to32, types.TINT64},
2624 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2625
2626
2627 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2628 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2629 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2630 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2631 }
2632
2633
2634
2635 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2636 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2637 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2638 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2639 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2640 }
2641
2642
2643 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2644 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2645 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2646 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2647 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2648 }
2649
2650 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2651 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2652 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2653 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2654 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2655 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2656 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2657 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2658 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2659
2660 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2661 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2662 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2663 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2664 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2665 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2666 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2667 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2668
2669 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2670 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2671 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2672 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2673 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2674 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2675 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2676 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2677
2678 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2679 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2680 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2681 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2682 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2683 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2684 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2685 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2686
2687 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2688 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2689 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2690 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2691 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2692 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2693 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2694 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2695
2696 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2697 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2698 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2699 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2700 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2701 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2702 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2703 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2704
2705 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2706 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2707 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2708 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2709 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2710 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2711 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2712 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2713
2714 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2715 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2716 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2717 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2718 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2719 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2720 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2721 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2722 }
2723
2724 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2725 etype1 := s.concreteEtype(t)
2726 etype2 := s.concreteEtype(u)
2727 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2728 if !ok {
2729 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2730 }
2731 return x
2732 }
2733
2734 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2735 if s.config.PtrSize == 4 {
2736 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2737 }
2738 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2739 }
2740
2741 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2742 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2743
2744 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2745 }
2746 if ft.IsInteger() && tt.IsInteger() {
2747 var op ssa.Op
2748 if tt.Size() == ft.Size() {
2749 op = ssa.OpCopy
2750 } else if tt.Size() < ft.Size() {
2751
2752 switch 10*ft.Size() + tt.Size() {
2753 case 21:
2754 op = ssa.OpTrunc16to8
2755 case 41:
2756 op = ssa.OpTrunc32to8
2757 case 42:
2758 op = ssa.OpTrunc32to16
2759 case 81:
2760 op = ssa.OpTrunc64to8
2761 case 82:
2762 op = ssa.OpTrunc64to16
2763 case 84:
2764 op = ssa.OpTrunc64to32
2765 default:
2766 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2767 }
2768 } else if ft.IsSigned() {
2769
2770 switch 10*ft.Size() + tt.Size() {
2771 case 12:
2772 op = ssa.OpSignExt8to16
2773 case 14:
2774 op = ssa.OpSignExt8to32
2775 case 18:
2776 op = ssa.OpSignExt8to64
2777 case 24:
2778 op = ssa.OpSignExt16to32
2779 case 28:
2780 op = ssa.OpSignExt16to64
2781 case 48:
2782 op = ssa.OpSignExt32to64
2783 default:
2784 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2785 }
2786 } else {
2787
2788 switch 10*ft.Size() + tt.Size() {
2789 case 12:
2790 op = ssa.OpZeroExt8to16
2791 case 14:
2792 op = ssa.OpZeroExt8to32
2793 case 18:
2794 op = ssa.OpZeroExt8to64
2795 case 24:
2796 op = ssa.OpZeroExt16to32
2797 case 28:
2798 op = ssa.OpZeroExt16to64
2799 case 48:
2800 op = ssa.OpZeroExt32to64
2801 default:
2802 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2803 }
2804 }
2805 return s.newValue1(op, tt, v)
2806 }
2807
2808 if ft.IsComplex() && tt.IsComplex() {
2809 var op ssa.Op
2810 if ft.Size() == tt.Size() {
2811 switch ft.Size() {
2812 case 8:
2813 op = ssa.OpRound32F
2814 case 16:
2815 op = ssa.OpRound64F
2816 default:
2817 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2818 }
2819 } else if ft.Size() == 8 && tt.Size() == 16 {
2820 op = ssa.OpCvt32Fto64F
2821 } else if ft.Size() == 16 && tt.Size() == 8 {
2822 op = ssa.OpCvt64Fto32F
2823 } else {
2824 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2825 }
2826 ftp := types.FloatForComplex(ft)
2827 ttp := types.FloatForComplex(tt)
2828 return s.newValue2(ssa.OpComplexMake, tt,
2829 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2830 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2831 }
2832
2833 if tt.IsComplex() {
2834
2835 et := types.FloatForComplex(tt)
2836 v = s.conv(n, v, ft, et)
2837 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2838 }
2839
2840 if ft.IsFloat() || tt.IsFloat() {
2841 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
2842 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2843 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2844 conv = conv1
2845 }
2846 }
2847 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2848 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2849 conv = conv1
2850 }
2851 }
2852
2853 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2854 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2855
2856 if tt.Size() == 4 {
2857 return s.uint32Tofloat32(n, v, ft, tt)
2858 }
2859 if tt.Size() == 8 {
2860 return s.uint32Tofloat64(n, v, ft, tt)
2861 }
2862 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2863
2864 if ft.Size() == 4 {
2865 return s.float32ToUint32(n, v, ft, tt)
2866 }
2867 if ft.Size() == 8 {
2868 return s.float64ToUint32(n, v, ft, tt)
2869 }
2870 }
2871 }
2872
2873 if !ok {
2874 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2875 }
2876 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2877
2878 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2879
2880 if op1 == ssa.OpCopy {
2881 if op2 == ssa.OpCopy {
2882 return v
2883 }
2884 return s.newValueOrSfCall1(op2, tt, v)
2885 }
2886 if op2 == ssa.OpCopy {
2887 return s.newValueOrSfCall1(op1, tt, v)
2888 }
2889 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2890 }
2891
2892 if ft.IsInteger() {
2893
2894 if tt.Size() == 4 {
2895 return s.uint64Tofloat32(n, v, ft, tt)
2896 }
2897 if tt.Size() == 8 {
2898 return s.uint64Tofloat64(n, v, ft, tt)
2899 }
2900 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2901 }
2902
2903 if ft.Size() == 4 {
2904 return s.float32ToUint64(n, v, ft, tt)
2905 }
2906 if ft.Size() == 8 {
2907 return s.float64ToUint64(n, v, ft, tt)
2908 }
2909 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2910 return nil
2911 }
2912
2913 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2914 return nil
2915 }
2916
2917
2918 func (s *state) expr(n ir.Node) *ssa.Value {
2919 return s.exprCheckPtr(n, true)
2920 }
2921
2922 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2923 if ir.HasUniquePos(n) {
2924
2925
2926 s.pushLine(n.Pos())
2927 defer s.popLine()
2928 }
2929
2930 s.stmtList(n.Init())
2931 switch n.Op() {
2932 case ir.OBYTES2STRTMP:
2933 n := n.(*ir.ConvExpr)
2934 slice := s.expr(n.X)
2935 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
2936 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
2937 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
2938 case ir.OSTR2BYTESTMP:
2939 n := n.(*ir.ConvExpr)
2940 str := s.expr(n.X)
2941 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
2942 if !n.NonNil() {
2943
2944
2945
2946 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
2947 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
2948 ptr = s.ternary(cond, ptr, zerobase)
2949 }
2950 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
2951 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
2952 case ir.OCFUNC:
2953 n := n.(*ir.UnaryExpr)
2954 aux := n.X.(*ir.Name).Linksym()
2955
2956
2957 if aux.ABI() != obj.ABIInternal {
2958 s.Fatalf("expected ABIInternal: %v", aux.ABI())
2959 }
2960 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
2961 case ir.ONAME:
2962 n := n.(*ir.Name)
2963 if n.Class == ir.PFUNC {
2964
2965 sym := staticdata.FuncLinksym(n)
2966 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
2967 }
2968 if s.canSSA(n) {
2969 return s.variable(n, n.Type())
2970 }
2971 return s.load(n.Type(), s.addr(n))
2972 case ir.OLINKSYMOFFSET:
2973 n := n.(*ir.LinksymOffsetExpr)
2974 return s.load(n.Type(), s.addr(n))
2975 case ir.ONIL:
2976 n := n.(*ir.NilExpr)
2977 t := n.Type()
2978 switch {
2979 case t.IsSlice():
2980 return s.constSlice(t)
2981 case t.IsInterface():
2982 return s.constInterface(t)
2983 default:
2984 return s.constNil(t)
2985 }
2986 case ir.OLITERAL:
2987 switch u := n.Val(); u.Kind() {
2988 case constant.Int:
2989 i := ir.IntVal(n.Type(), u)
2990 switch n.Type().Size() {
2991 case 1:
2992 return s.constInt8(n.Type(), int8(i))
2993 case 2:
2994 return s.constInt16(n.Type(), int16(i))
2995 case 4:
2996 return s.constInt32(n.Type(), int32(i))
2997 case 8:
2998 return s.constInt64(n.Type(), i)
2999 default:
3000 s.Fatalf("bad integer size %d", n.Type().Size())
3001 return nil
3002 }
3003 case constant.String:
3004 i := constant.StringVal(u)
3005 if i == "" {
3006 return s.constEmptyString(n.Type())
3007 }
3008 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
3009 case constant.Bool:
3010 return s.constBool(constant.BoolVal(u))
3011 case constant.Float:
3012 f, _ := constant.Float64Val(u)
3013 switch n.Type().Size() {
3014 case 4:
3015 return s.constFloat32(n.Type(), f)
3016 case 8:
3017 return s.constFloat64(n.Type(), f)
3018 default:
3019 s.Fatalf("bad float size %d", n.Type().Size())
3020 return nil
3021 }
3022 case constant.Complex:
3023 re, _ := constant.Float64Val(constant.Real(u))
3024 im, _ := constant.Float64Val(constant.Imag(u))
3025 switch n.Type().Size() {
3026 case 8:
3027 pt := types.Types[types.TFLOAT32]
3028 return s.newValue2(ssa.OpComplexMake, n.Type(),
3029 s.constFloat32(pt, re),
3030 s.constFloat32(pt, im))
3031 case 16:
3032 pt := types.Types[types.TFLOAT64]
3033 return s.newValue2(ssa.OpComplexMake, n.Type(),
3034 s.constFloat64(pt, re),
3035 s.constFloat64(pt, im))
3036 default:
3037 s.Fatalf("bad complex size %d", n.Type().Size())
3038 return nil
3039 }
3040 default:
3041 s.Fatalf("unhandled OLITERAL %v", u.Kind())
3042 return nil
3043 }
3044 case ir.OCONVNOP:
3045 n := n.(*ir.ConvExpr)
3046 to := n.Type()
3047 from := n.X.Type()
3048
3049
3050
3051 x := s.expr(n.X)
3052 if to == from {
3053 return x
3054 }
3055
3056
3057
3058
3059
3060 if to.IsPtrShaped() != from.IsPtrShaped() {
3061 return s.newValue2(ssa.OpConvert, to, x, s.mem())
3062 }
3063
3064 v := s.newValue1(ssa.OpCopy, to, x)
3065
3066
3067 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
3068 return v
3069 }
3070
3071
3072 if from.Kind() == to.Kind() {
3073 return v
3074 }
3075
3076
3077 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
3078 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
3079 s.checkPtrAlignment(n, v, nil)
3080 }
3081 return v
3082 }
3083
3084
3085 var mt *types.Type
3086 if buildcfg.Experiment.SwissMap {
3087 mt = types.NewPtr(reflectdata.SwissMapType())
3088 } else {
3089 mt = types.NewPtr(reflectdata.OldMapType())
3090 }
3091 if to.Kind() == types.TMAP && from == mt {
3092 return v
3093 }
3094
3095 types.CalcSize(from)
3096 types.CalcSize(to)
3097 if from.Size() != to.Size() {
3098 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
3099 return nil
3100 }
3101 if etypesign(from.Kind()) != etypesign(to.Kind()) {
3102 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
3103 return nil
3104 }
3105
3106 if base.Flag.Cfg.Instrumenting {
3107
3108
3109
3110 return v
3111 }
3112
3113 if etypesign(from.Kind()) == 0 {
3114 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
3115 return nil
3116 }
3117
3118
3119 return v
3120
3121 case ir.OCONV:
3122 n := n.(*ir.ConvExpr)
3123 x := s.expr(n.X)
3124 return s.conv(n, x, n.X.Type(), n.Type())
3125
3126 case ir.ODOTTYPE:
3127 n := n.(*ir.TypeAssertExpr)
3128 res, _ := s.dottype(n, false)
3129 return res
3130
3131 case ir.ODYNAMICDOTTYPE:
3132 n := n.(*ir.DynamicTypeAssertExpr)
3133 res, _ := s.dynamicDottype(n, false)
3134 return res
3135
3136
3137 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3138 n := n.(*ir.BinaryExpr)
3139 a := s.expr(n.X)
3140 b := s.expr(n.Y)
3141 if n.X.Type().IsComplex() {
3142 pt := types.FloatForComplex(n.X.Type())
3143 op := s.ssaOp(ir.OEQ, pt)
3144 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3145 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3146 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3147 switch n.Op() {
3148 case ir.OEQ:
3149 return c
3150 case ir.ONE:
3151 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3152 default:
3153 s.Fatalf("ordered complex compare %v", n.Op())
3154 }
3155 }
3156
3157
3158 op := n.Op()
3159 switch op {
3160 case ir.OGE:
3161 op, a, b = ir.OLE, b, a
3162 case ir.OGT:
3163 op, a, b = ir.OLT, b, a
3164 }
3165 if n.X.Type().IsFloat() {
3166
3167 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3168 }
3169
3170 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3171 case ir.OMUL:
3172 n := n.(*ir.BinaryExpr)
3173 a := s.expr(n.X)
3174 b := s.expr(n.Y)
3175 if n.Type().IsComplex() {
3176 mulop := ssa.OpMul64F
3177 addop := ssa.OpAdd64F
3178 subop := ssa.OpSub64F
3179 pt := types.FloatForComplex(n.Type())
3180 wt := types.Types[types.TFLOAT64]
3181
3182 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3183 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3184 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3185 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3186
3187 if pt != wt {
3188 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3189 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3190 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3191 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3192 }
3193
3194 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3195 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3196
3197 if pt != wt {
3198 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3199 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3200 }
3201
3202 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3203 }
3204
3205 if n.Type().IsFloat() {
3206 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3207 }
3208
3209 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3210
3211 case ir.ODIV:
3212 n := n.(*ir.BinaryExpr)
3213 a := s.expr(n.X)
3214 b := s.expr(n.Y)
3215 if n.Type().IsComplex() {
3216
3217
3218
3219 mulop := ssa.OpMul64F
3220 addop := ssa.OpAdd64F
3221 subop := ssa.OpSub64F
3222 divop := ssa.OpDiv64F
3223 pt := types.FloatForComplex(n.Type())
3224 wt := types.Types[types.TFLOAT64]
3225
3226 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3227 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3228 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3229 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3230
3231 if pt != wt {
3232 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3233 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3234 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3235 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3236 }
3237
3238 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3239 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3240 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3241
3242
3243
3244
3245
3246 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3247 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3248
3249 if pt != wt {
3250 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3251 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3252 }
3253 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3254 }
3255 if n.Type().IsFloat() {
3256 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3257 }
3258 return s.intDivide(n, a, b)
3259 case ir.OMOD:
3260 n := n.(*ir.BinaryExpr)
3261 a := s.expr(n.X)
3262 b := s.expr(n.Y)
3263 return s.intDivide(n, a, b)
3264 case ir.OADD, ir.OSUB:
3265 n := n.(*ir.BinaryExpr)
3266 a := s.expr(n.X)
3267 b := s.expr(n.Y)
3268 if n.Type().IsComplex() {
3269 pt := types.FloatForComplex(n.Type())
3270 op := s.ssaOp(n.Op(), pt)
3271 return s.newValue2(ssa.OpComplexMake, n.Type(),
3272 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3273 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3274 }
3275 if n.Type().IsFloat() {
3276 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3277 }
3278 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3279 case ir.OAND, ir.OOR, ir.OXOR:
3280 n := n.(*ir.BinaryExpr)
3281 a := s.expr(n.X)
3282 b := s.expr(n.Y)
3283 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3284 case ir.OANDNOT:
3285 n := n.(*ir.BinaryExpr)
3286 a := s.expr(n.X)
3287 b := s.expr(n.Y)
3288 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3289 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3290 case ir.OLSH, ir.ORSH:
3291 n := n.(*ir.BinaryExpr)
3292 a := s.expr(n.X)
3293 b := s.expr(n.Y)
3294 bt := b.Type
3295 if bt.IsSigned() {
3296 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3297 s.check(cmp, ir.Syms.Panicshift)
3298 bt = bt.ToUnsigned()
3299 }
3300 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3301 case ir.OANDAND, ir.OOROR:
3302
3303
3304
3305
3306
3307
3308
3309
3310
3311
3312
3313
3314
3315 n := n.(*ir.LogicalExpr)
3316 el := s.expr(n.X)
3317 s.vars[n] = el
3318
3319 b := s.endBlock()
3320 b.Kind = ssa.BlockIf
3321 b.SetControl(el)
3322
3323
3324
3325
3326
3327 bRight := s.f.NewBlock(ssa.BlockPlain)
3328 bResult := s.f.NewBlock(ssa.BlockPlain)
3329 if n.Op() == ir.OANDAND {
3330 b.AddEdgeTo(bRight)
3331 b.AddEdgeTo(bResult)
3332 } else if n.Op() == ir.OOROR {
3333 b.AddEdgeTo(bResult)
3334 b.AddEdgeTo(bRight)
3335 }
3336
3337 s.startBlock(bRight)
3338 er := s.expr(n.Y)
3339 s.vars[n] = er
3340
3341 b = s.endBlock()
3342 b.AddEdgeTo(bResult)
3343
3344 s.startBlock(bResult)
3345 return s.variable(n, types.Types[types.TBOOL])
3346 case ir.OCOMPLEX:
3347 n := n.(*ir.BinaryExpr)
3348 r := s.expr(n.X)
3349 i := s.expr(n.Y)
3350 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3351
3352
3353 case ir.ONEG:
3354 n := n.(*ir.UnaryExpr)
3355 a := s.expr(n.X)
3356 if n.Type().IsComplex() {
3357 tp := types.FloatForComplex(n.Type())
3358 negop := s.ssaOp(n.Op(), tp)
3359 return s.newValue2(ssa.OpComplexMake, n.Type(),
3360 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3361 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3362 }
3363 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3364 case ir.ONOT, ir.OBITNOT:
3365 n := n.(*ir.UnaryExpr)
3366 a := s.expr(n.X)
3367 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3368 case ir.OIMAG, ir.OREAL:
3369 n := n.(*ir.UnaryExpr)
3370 a := s.expr(n.X)
3371 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3372 case ir.OPLUS:
3373 n := n.(*ir.UnaryExpr)
3374 return s.expr(n.X)
3375
3376 case ir.OADDR:
3377 n := n.(*ir.AddrExpr)
3378 return s.addr(n.X)
3379
3380 case ir.ORESULT:
3381 n := n.(*ir.ResultExpr)
3382 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3383 panic("Expected to see a previous call")
3384 }
3385 which := n.Index
3386 if which == -1 {
3387 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3388 }
3389 return s.resultOfCall(s.prevCall, which, n.Type())
3390
3391 case ir.ODEREF:
3392 n := n.(*ir.StarExpr)
3393 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3394 return s.load(n.Type(), p)
3395
3396 case ir.ODOT:
3397 n := n.(*ir.SelectorExpr)
3398 if n.X.Op() == ir.OSTRUCTLIT {
3399
3400
3401
3402 if !ir.IsZero(n.X) {
3403 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3404 }
3405 return s.zeroVal(n.Type())
3406 }
3407
3408
3409
3410
3411 if ir.IsAddressable(n) && !s.canSSA(n) {
3412 p := s.addr(n)
3413 return s.load(n.Type(), p)
3414 }
3415 v := s.expr(n.X)
3416 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3417
3418 case ir.ODOTPTR:
3419 n := n.(*ir.SelectorExpr)
3420 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3421 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3422 return s.load(n.Type(), p)
3423
3424 case ir.OINDEX:
3425 n := n.(*ir.IndexExpr)
3426 switch {
3427 case n.X.Type().IsString():
3428 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3429
3430
3431
3432 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3433 }
3434 a := s.expr(n.X)
3435 i := s.expr(n.Index)
3436 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3437 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3438 ptrtyp := s.f.Config.Types.BytePtr
3439 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3440 if ir.IsConst(n.Index, constant.Int) {
3441 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3442 } else {
3443 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3444 }
3445 return s.load(types.Types[types.TUINT8], ptr)
3446 case n.X.Type().IsSlice():
3447 p := s.addr(n)
3448 return s.load(n.X.Type().Elem(), p)
3449 case n.X.Type().IsArray():
3450 if ssa.CanSSA(n.X.Type()) {
3451
3452 bound := n.X.Type().NumElem()
3453 a := s.expr(n.X)
3454 i := s.expr(n.Index)
3455 if bound == 0 {
3456
3457
3458 z := s.constInt(types.Types[types.TINT], 0)
3459 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3460
3461
3462 return s.zeroVal(n.Type())
3463 }
3464 len := s.constInt(types.Types[types.TINT], bound)
3465 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3466 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3467 }
3468 p := s.addr(n)
3469 return s.load(n.X.Type().Elem(), p)
3470 default:
3471 s.Fatalf("bad type for index %v", n.X.Type())
3472 return nil
3473 }
3474
3475 case ir.OLEN, ir.OCAP:
3476 n := n.(*ir.UnaryExpr)
3477
3478
3479 a := s.expr(n.X)
3480 t := n.X.Type()
3481 switch {
3482 case t.IsSlice():
3483 op := ssa.OpSliceLen
3484 if n.Op() == ir.OCAP {
3485 op = ssa.OpSliceCap
3486 }
3487 return s.newValue1(op, types.Types[types.TINT], a)
3488 case t.IsString():
3489 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3490 case t.IsMap(), t.IsChan():
3491 return s.referenceTypeBuiltin(n, a)
3492 case t.IsArray():
3493 return s.constInt(types.Types[types.TINT], t.NumElem())
3494 case t.IsPtr() && t.Elem().IsArray():
3495 return s.constInt(types.Types[types.TINT], t.Elem().NumElem())
3496 default:
3497 s.Fatalf("bad type in len/cap: %v", t)
3498 return nil
3499 }
3500
3501 case ir.OSPTR:
3502 n := n.(*ir.UnaryExpr)
3503 a := s.expr(n.X)
3504 if n.X.Type().IsSlice() {
3505 if n.Bounded() {
3506 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3507 }
3508 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3509 } else {
3510 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3511 }
3512
3513 case ir.OITAB:
3514 n := n.(*ir.UnaryExpr)
3515 a := s.expr(n.X)
3516 return s.newValue1(ssa.OpITab, n.Type(), a)
3517
3518 case ir.OIDATA:
3519 n := n.(*ir.UnaryExpr)
3520 a := s.expr(n.X)
3521 return s.newValue1(ssa.OpIData, n.Type(), a)
3522
3523 case ir.OMAKEFACE:
3524 n := n.(*ir.BinaryExpr)
3525 tab := s.expr(n.X)
3526 data := s.expr(n.Y)
3527 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3528
3529 case ir.OSLICEHEADER:
3530 n := n.(*ir.SliceHeaderExpr)
3531 p := s.expr(n.Ptr)
3532 l := s.expr(n.Len)
3533 c := s.expr(n.Cap)
3534 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3535
3536 case ir.OSTRINGHEADER:
3537 n := n.(*ir.StringHeaderExpr)
3538 p := s.expr(n.Ptr)
3539 l := s.expr(n.Len)
3540 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3541
3542 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3543 n := n.(*ir.SliceExpr)
3544 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3545 v := s.exprCheckPtr(n.X, !check)
3546 var i, j, k *ssa.Value
3547 if n.Low != nil {
3548 i = s.expr(n.Low)
3549 }
3550 if n.High != nil {
3551 j = s.expr(n.High)
3552 }
3553 if n.Max != nil {
3554 k = s.expr(n.Max)
3555 }
3556 p, l, c := s.slice(v, i, j, k, n.Bounded())
3557 if check {
3558
3559 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3560 }
3561 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3562
3563 case ir.OSLICESTR:
3564 n := n.(*ir.SliceExpr)
3565 v := s.expr(n.X)
3566 var i, j *ssa.Value
3567 if n.Low != nil {
3568 i = s.expr(n.Low)
3569 }
3570 if n.High != nil {
3571 j = s.expr(n.High)
3572 }
3573 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3574 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3575
3576 case ir.OSLICE2ARRPTR:
3577
3578
3579
3580
3581 n := n.(*ir.ConvExpr)
3582 v := s.expr(n.X)
3583 nelem := n.Type().Elem().NumElem()
3584 arrlen := s.constInt(types.Types[types.TINT], nelem)
3585 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3586 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3587 op := ssa.OpSlicePtr
3588 if nelem == 0 {
3589 op = ssa.OpSlicePtrUnchecked
3590 }
3591 return s.newValue1(op, n.Type(), v)
3592
3593 case ir.OCALLFUNC:
3594 n := n.(*ir.CallExpr)
3595 if ir.IsIntrinsicCall(n) {
3596 return s.intrinsicCall(n)
3597 }
3598 fallthrough
3599
3600 case ir.OCALLINTER:
3601 n := n.(*ir.CallExpr)
3602 return s.callResult(n, callNormal)
3603
3604 case ir.OGETG:
3605 n := n.(*ir.CallExpr)
3606 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3607
3608 case ir.OGETCALLERSP:
3609 n := n.(*ir.CallExpr)
3610 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3611
3612 case ir.OAPPEND:
3613 return s.append(n.(*ir.CallExpr), false)
3614
3615 case ir.OMIN, ir.OMAX:
3616 return s.minMax(n.(*ir.CallExpr))
3617
3618 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3619
3620
3621
3622 n := n.(*ir.CompLitExpr)
3623 if !ir.IsZero(n) {
3624 s.Fatalf("literal with nonzero value in SSA: %v", n)
3625 }
3626 return s.zeroVal(n.Type())
3627
3628 case ir.ONEW:
3629 n := n.(*ir.UnaryExpr)
3630 var rtype *ssa.Value
3631 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3632 rtype = s.expr(x.RType)
3633 }
3634 return s.newObject(n.Type().Elem(), rtype)
3635
3636 case ir.OUNSAFEADD:
3637 n := n.(*ir.BinaryExpr)
3638 ptr := s.expr(n.X)
3639 len := s.expr(n.Y)
3640
3641
3642
3643 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3644
3645 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3646
3647 default:
3648 s.Fatalf("unhandled expr %v", n.Op())
3649 return nil
3650 }
3651 }
3652
3653 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3654 aux := c.Aux.(*ssa.AuxCall)
3655 pa := aux.ParamAssignmentForResult(which)
3656
3657
3658 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3659 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3660 return s.rawLoad(t, addr)
3661 }
3662 return s.newValue1I(ssa.OpSelectN, t, which, c)
3663 }
3664
3665 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3666 aux := c.Aux.(*ssa.AuxCall)
3667 pa := aux.ParamAssignmentForResult(which)
3668 if len(pa.Registers) == 0 {
3669 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3670 }
3671 _, addr := s.temp(c.Pos, t)
3672 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3673 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3674 return addr
3675 }
3676
3677
3678
3679
3680
3681
3682
3683
3684
3685 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3686
3687
3688
3689
3690
3691
3692
3693
3694
3695
3696
3697
3698
3699
3700
3701
3702
3703
3704
3705
3706
3707
3708
3709
3710
3711
3712
3713
3714
3715
3716
3717
3718 et := n.Type().Elem()
3719 pt := types.NewPtr(et)
3720
3721
3722 sn := n.Args[0]
3723 var slice, addr *ssa.Value
3724 if inplace {
3725 addr = s.addr(sn)
3726 slice = s.load(n.Type(), addr)
3727 } else {
3728 slice = s.expr(sn)
3729 }
3730
3731
3732 grow := s.f.NewBlock(ssa.BlockPlain)
3733 assign := s.f.NewBlock(ssa.BlockPlain)
3734
3735
3736 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3737 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3738 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3739
3740
3741 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3742 oldLen := l
3743 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3744
3745
3746 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3747
3748
3749 s.vars[ptrVar] = p
3750 s.vars[lenVar] = l
3751 if !inplace {
3752 s.vars[capVar] = c
3753 }
3754
3755 b := s.endBlock()
3756 b.Kind = ssa.BlockIf
3757 b.Likely = ssa.BranchUnlikely
3758 b.SetControl(cmp)
3759 b.AddEdgeTo(grow)
3760 b.AddEdgeTo(assign)
3761
3762
3763
3764
3765
3766
3767
3768 maxStackSize := int64(base.Debug.VariableMakeThreshold)
3769 if !inplace && n.Esc() == ir.EscNone && et.Size() > 0 && et.Size() <= maxStackSize && base.Flag.N == 0 && base.VariableMakeHash.MatchPos(n.Pos(), nil) && !s.appendTargets[sn] {
3770
3771
3772
3773
3774
3775
3776
3777
3778
3779
3780
3781
3782
3783
3784
3785
3786
3787
3788
3789
3790
3791
3792
3793 if s.appendTargets == nil {
3794 s.appendTargets = map[ir.Node]bool{}
3795 }
3796 s.appendTargets[sn] = true
3797
3798 K := maxStackSize / et.Size()
3799 KT := types.NewArray(et, K)
3800 KT.SetNoalg(true)
3801 types.CalcArraySize(KT)
3802
3803 align := types.NewArray(types.Types[types.TUINTPTR], 0)
3804 types.CalcArraySize(align)
3805 storeTyp := types.NewStruct([]*types.Field{
3806 {Sym: types.BlankSym, Type: align},
3807 {Sym: types.BlankSym, Type: KT},
3808 })
3809 storeTyp.SetNoalg(true)
3810 types.CalcStructSize(storeTyp)
3811
3812 usedTestBlock := s.f.NewBlock(ssa.BlockPlain)
3813 oldLenTestBlock := s.f.NewBlock(ssa.BlockPlain)
3814 bodyBlock := s.f.NewBlock(ssa.BlockPlain)
3815 growSlice := s.f.NewBlock(ssa.BlockPlain)
3816
3817
3818 tBool := types.Types[types.TBOOL]
3819 used := typecheck.TempAt(n.Pos(), s.curfn, tBool)
3820 s.defvars[s.f.Entry.ID][used] = s.constBool(false)
3821
3822
3823 tInt := types.Types[types.TINT]
3824 backingStore := typecheck.TempAt(n.Pos(), s.curfn, storeTyp)
3825 backingStore.SetAddrtaken(true)
3826
3827
3828 s.startBlock(grow)
3829 kTest := s.newValue2(s.ssaOp(ir.OLE, tInt), tBool, l, s.constInt(tInt, K))
3830 b := s.endBlock()
3831 b.Kind = ssa.BlockIf
3832 b.SetControl(kTest)
3833 b.AddEdgeTo(usedTestBlock)
3834 b.AddEdgeTo(growSlice)
3835 b.Likely = ssa.BranchLikely
3836
3837
3838 s.startBlock(usedTestBlock)
3839 usedTest := s.newValue1(ssa.OpNot, tBool, s.expr(used))
3840 b = s.endBlock()
3841 b.Kind = ssa.BlockIf
3842 b.SetControl(usedTest)
3843 b.AddEdgeTo(oldLenTestBlock)
3844 b.AddEdgeTo(growSlice)
3845 b.Likely = ssa.BranchLikely
3846
3847
3848 s.startBlock(oldLenTestBlock)
3849 oldLenTest := s.newValue2(s.ssaOp(ir.OEQ, tInt), tBool, oldLen, s.constInt(tInt, 0))
3850 b = s.endBlock()
3851 b.Kind = ssa.BlockIf
3852 b.SetControl(oldLenTest)
3853 b.AddEdgeTo(bodyBlock)
3854 b.AddEdgeTo(growSlice)
3855 b.Likely = ssa.BranchLikely
3856
3857
3858 s.startBlock(bodyBlock)
3859 if et.HasPointers() {
3860 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, backingStore, s.mem())
3861 }
3862 addr := s.addr(backingStore)
3863 s.zero(storeTyp, addr)
3864
3865
3866 s.vars[ptrVar] = addr
3867 s.vars[lenVar] = l
3868 s.vars[capVar] = s.constInt(tInt, K)
3869
3870
3871 s.assign(used, s.constBool(true), false, 0)
3872 b = s.endBlock()
3873 b.AddEdgeTo(assign)
3874
3875
3876 grow = growSlice
3877 }
3878
3879
3880 s.startBlock(grow)
3881 taddr := s.expr(n.Fun)
3882 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
3883
3884
3885 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
3886 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
3887 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
3888
3889 s.vars[ptrVar] = p
3890 s.vars[lenVar] = l
3891 s.vars[capVar] = c
3892 if inplace {
3893 if sn.Op() == ir.ONAME {
3894 sn := sn.(*ir.Name)
3895 if sn.Class != ir.PEXTERN {
3896
3897 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3898 }
3899 }
3900 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3901 s.store(types.Types[types.TINT], capaddr, c)
3902 s.store(pt, addr, p)
3903 }
3904
3905 b = s.endBlock()
3906 b.AddEdgeTo(assign)
3907
3908
3909 s.startBlock(assign)
3910 p = s.variable(ptrVar, pt)
3911 l = s.variable(lenVar, types.Types[types.TINT])
3912 if !inplace {
3913 c = s.variable(capVar, types.Types[types.TINT])
3914 }
3915
3916 if inplace {
3917
3918
3919 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3920 s.store(types.Types[types.TINT], lenaddr, l)
3921 }
3922
3923
3924 type argRec struct {
3925
3926
3927 v *ssa.Value
3928 store bool
3929 }
3930 args := make([]argRec, 0, len(n.Args[1:]))
3931 for _, n := range n.Args[1:] {
3932 if ssa.CanSSA(n.Type()) {
3933 args = append(args, argRec{v: s.expr(n), store: true})
3934 } else {
3935 v := s.addr(n)
3936 args = append(args, argRec{v: v})
3937 }
3938 }
3939
3940
3941 oldLen = s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3942 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
3943 for i, arg := range args {
3944 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
3945 if arg.store {
3946 s.storeType(et, addr, arg.v, 0, true)
3947 } else {
3948 s.move(et, addr, arg.v)
3949 }
3950 }
3951
3952
3953
3954
3955
3956 delete(s.vars, ptrVar)
3957 delete(s.vars, lenVar)
3958 if !inplace {
3959 delete(s.vars, capVar)
3960 }
3961
3962
3963 if inplace {
3964 return nil
3965 }
3966 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3967 }
3968
3969
3970 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
3971
3972
3973
3974 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
3975 x := s.expr(n.Args[0])
3976 for _, arg := range n.Args[1:] {
3977 x = op(x, s.expr(arg))
3978 }
3979 return x
3980 }
3981
3982 typ := n.Type()
3983
3984 if typ.IsFloat() || typ.IsString() {
3985
3986
3987
3988
3989
3990
3991
3992
3993 if typ.IsFloat() {
3994 hasIntrinsic := false
3995 switch Arch.LinkArch.Family {
3996 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64:
3997 hasIntrinsic = true
3998 case sys.PPC64:
3999 hasIntrinsic = buildcfg.GOPPC64 >= 9
4000 }
4001
4002 if hasIntrinsic {
4003 var op ssa.Op
4004 switch {
4005 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
4006 op = ssa.OpMin64F
4007 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
4008 op = ssa.OpMax64F
4009 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
4010 op = ssa.OpMin32F
4011 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
4012 op = ssa.OpMax32F
4013 }
4014 return fold(func(x, a *ssa.Value) *ssa.Value {
4015 return s.newValue2(op, typ, x, a)
4016 })
4017 }
4018 }
4019 var name string
4020 switch typ.Kind() {
4021 case types.TFLOAT32:
4022 switch n.Op() {
4023 case ir.OMIN:
4024 name = "fmin32"
4025 case ir.OMAX:
4026 name = "fmax32"
4027 }
4028 case types.TFLOAT64:
4029 switch n.Op() {
4030 case ir.OMIN:
4031 name = "fmin64"
4032 case ir.OMAX:
4033 name = "fmax64"
4034 }
4035 case types.TSTRING:
4036 switch n.Op() {
4037 case ir.OMIN:
4038 name = "strmin"
4039 case ir.OMAX:
4040 name = "strmax"
4041 }
4042 }
4043 fn := typecheck.LookupRuntimeFunc(name)
4044
4045 return fold(func(x, a *ssa.Value) *ssa.Value {
4046 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
4047 })
4048 }
4049
4050 if typ.IsInteger() {
4051 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
4052 var op ssa.Op
4053 switch {
4054 case typ.IsSigned() && n.Op() == ir.OMIN:
4055 op = ssa.OpMin64
4056 case typ.IsSigned() && n.Op() == ir.OMAX:
4057 op = ssa.OpMax64
4058 case typ.IsUnsigned() && n.Op() == ir.OMIN:
4059 op = ssa.OpMin64u
4060 case typ.IsUnsigned() && n.Op() == ir.OMAX:
4061 op = ssa.OpMax64u
4062 }
4063 return fold(func(x, a *ssa.Value) *ssa.Value {
4064 return s.newValue2(op, typ, x, a)
4065 })
4066 }
4067 }
4068
4069 lt := s.ssaOp(ir.OLT, typ)
4070
4071 return fold(func(x, a *ssa.Value) *ssa.Value {
4072 switch n.Op() {
4073 case ir.OMIN:
4074
4075 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
4076 case ir.OMAX:
4077
4078 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
4079 }
4080 panic("unreachable")
4081 })
4082 }
4083
4084
4085 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
4086
4087
4088 ternaryVar := ssaMarker("ternary")
4089
4090 bThen := s.f.NewBlock(ssa.BlockPlain)
4091 bElse := s.f.NewBlock(ssa.BlockPlain)
4092 bEnd := s.f.NewBlock(ssa.BlockPlain)
4093
4094 b := s.endBlock()
4095 b.Kind = ssa.BlockIf
4096 b.SetControl(cond)
4097 b.AddEdgeTo(bThen)
4098 b.AddEdgeTo(bElse)
4099
4100 s.startBlock(bThen)
4101 s.vars[ternaryVar] = x
4102 s.endBlock().AddEdgeTo(bEnd)
4103
4104 s.startBlock(bElse)
4105 s.vars[ternaryVar] = y
4106 s.endBlock().AddEdgeTo(bEnd)
4107
4108 s.startBlock(bEnd)
4109 r := s.variable(ternaryVar, x.Type)
4110 delete(s.vars, ternaryVar)
4111 return r
4112 }
4113
4114
4115
4116
4117
4118 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
4119 switch cond.Op() {
4120 case ir.OANDAND:
4121 cond := cond.(*ir.LogicalExpr)
4122 mid := s.f.NewBlock(ssa.BlockPlain)
4123 s.stmtList(cond.Init())
4124 s.condBranch(cond.X, mid, no, max(likely, 0))
4125 s.startBlock(mid)
4126 s.condBranch(cond.Y, yes, no, likely)
4127 return
4128
4129
4130
4131
4132
4133
4134 case ir.OOROR:
4135 cond := cond.(*ir.LogicalExpr)
4136 mid := s.f.NewBlock(ssa.BlockPlain)
4137 s.stmtList(cond.Init())
4138 s.condBranch(cond.X, yes, mid, min(likely, 0))
4139 s.startBlock(mid)
4140 s.condBranch(cond.Y, yes, no, likely)
4141 return
4142
4143
4144
4145 case ir.ONOT:
4146 cond := cond.(*ir.UnaryExpr)
4147 s.stmtList(cond.Init())
4148 s.condBranch(cond.X, no, yes, -likely)
4149 return
4150 case ir.OCONVNOP:
4151 cond := cond.(*ir.ConvExpr)
4152 s.stmtList(cond.Init())
4153 s.condBranch(cond.X, yes, no, likely)
4154 return
4155 }
4156 c := s.expr(cond)
4157 b := s.endBlock()
4158 b.Kind = ssa.BlockIf
4159 b.SetControl(c)
4160 b.Likely = ssa.BranchPrediction(likely)
4161 b.AddEdgeTo(yes)
4162 b.AddEdgeTo(no)
4163 }
4164
4165 type skipMask uint8
4166
4167 const (
4168 skipPtr skipMask = 1 << iota
4169 skipLen
4170 skipCap
4171 )
4172
4173
4174
4175
4176
4177
4178
4179 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
4180 s.assignWhichMayOverlap(left, right, deref, skip, false)
4181 }
4182 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
4183 if left.Op() == ir.ONAME && ir.IsBlank(left) {
4184 return
4185 }
4186 t := left.Type()
4187 types.CalcSize(t)
4188 if s.canSSA(left) {
4189 if deref {
4190 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
4191 }
4192 if left.Op() == ir.ODOT {
4193
4194
4195
4196
4197
4198
4199
4200
4201
4202
4203 left := left.(*ir.SelectorExpr)
4204 t := left.X.Type()
4205 nf := t.NumFields()
4206 idx := fieldIdx(left)
4207
4208
4209 old := s.expr(left.X)
4210
4211
4212 new := s.newValue0(ssa.OpStructMake, t)
4213
4214
4215 for i := 0; i < nf; i++ {
4216 if i == idx {
4217 new.AddArg(right)
4218 } else {
4219 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
4220 }
4221 }
4222
4223
4224 s.assign(left.X, new, false, 0)
4225
4226 return
4227 }
4228 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
4229 left := left.(*ir.IndexExpr)
4230 s.pushLine(left.Pos())
4231 defer s.popLine()
4232
4233
4234 t := left.X.Type()
4235 n := t.NumElem()
4236
4237 i := s.expr(left.Index)
4238 if n == 0 {
4239
4240
4241 z := s.constInt(types.Types[types.TINT], 0)
4242 s.boundsCheck(z, z, ssa.BoundsIndex, false)
4243 return
4244 }
4245 if n != 1 {
4246 s.Fatalf("assigning to non-1-length array")
4247 }
4248
4249 len := s.constInt(types.Types[types.TINT], 1)
4250 s.boundsCheck(i, len, ssa.BoundsIndex, false)
4251 v := s.newValue1(ssa.OpArrayMake1, t, right)
4252 s.assign(left.X, v, false, 0)
4253 return
4254 }
4255 left := left.(*ir.Name)
4256
4257 s.vars[left] = right
4258 s.addNamedValue(left, right)
4259 return
4260 }
4261
4262
4263
4264 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4265 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4266 }
4267
4268
4269 addr := s.addr(left)
4270 if ir.IsReflectHeaderDataField(left) {
4271
4272
4273
4274
4275
4276 t = types.Types[types.TUNSAFEPTR]
4277 }
4278 if deref {
4279
4280 if right == nil {
4281 s.zero(t, addr)
4282 } else {
4283 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4284 }
4285 return
4286 }
4287
4288 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4289 }
4290
4291
4292 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4293 switch {
4294 case t.IsInteger():
4295 switch t.Size() {
4296 case 1:
4297 return s.constInt8(t, 0)
4298 case 2:
4299 return s.constInt16(t, 0)
4300 case 4:
4301 return s.constInt32(t, 0)
4302 case 8:
4303 return s.constInt64(t, 0)
4304 default:
4305 s.Fatalf("bad sized integer type %v", t)
4306 }
4307 case t.IsFloat():
4308 switch t.Size() {
4309 case 4:
4310 return s.constFloat32(t, 0)
4311 case 8:
4312 return s.constFloat64(t, 0)
4313 default:
4314 s.Fatalf("bad sized float type %v", t)
4315 }
4316 case t.IsComplex():
4317 switch t.Size() {
4318 case 8:
4319 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4320 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4321 case 16:
4322 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4323 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4324 default:
4325 s.Fatalf("bad sized complex type %v", t)
4326 }
4327
4328 case t.IsString():
4329 return s.constEmptyString(t)
4330 case t.IsPtrShaped():
4331 return s.constNil(t)
4332 case t.IsBoolean():
4333 return s.constBool(false)
4334 case t.IsInterface():
4335 return s.constInterface(t)
4336 case t.IsSlice():
4337 return s.constSlice(t)
4338 case t.IsStruct():
4339 n := t.NumFields()
4340 v := s.entryNewValue0(ssa.OpStructMake, t)
4341 for i := 0; i < n; i++ {
4342 v.AddArg(s.zeroVal(t.FieldType(i)))
4343 }
4344 return v
4345 case t.IsArray():
4346 switch t.NumElem() {
4347 case 0:
4348 return s.entryNewValue0(ssa.OpArrayMake0, t)
4349 case 1:
4350 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4351 }
4352 }
4353 s.Fatalf("zero for type %v not implemented", t)
4354 return nil
4355 }
4356
4357 type callKind int8
4358
4359 const (
4360 callNormal callKind = iota
4361 callDefer
4362 callDeferStack
4363 callGo
4364 callTail
4365 )
4366
4367 type sfRtCallDef struct {
4368 rtfn *obj.LSym
4369 rtype types.Kind
4370 }
4371
4372 var softFloatOps map[ssa.Op]sfRtCallDef
4373
4374 func softfloatInit() {
4375
4376 softFloatOps = map[ssa.Op]sfRtCallDef{
4377 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4378 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4379 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4380 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4381 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4382 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4383 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4384 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4385
4386 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4387 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4388 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4389 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4390 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4391 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4392 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4393 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4394
4395 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4396 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4397 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4398 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4399 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4400 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4401 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4402 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4403 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4404 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4405 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4406 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4407 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4408 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4409 }
4410 }
4411
4412
4413
4414 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4415 f2i := func(t *types.Type) *types.Type {
4416 switch t.Kind() {
4417 case types.TFLOAT32:
4418 return types.Types[types.TUINT32]
4419 case types.TFLOAT64:
4420 return types.Types[types.TUINT64]
4421 }
4422 return t
4423 }
4424
4425 if callDef, ok := softFloatOps[op]; ok {
4426 switch op {
4427 case ssa.OpLess32F,
4428 ssa.OpLess64F,
4429 ssa.OpLeq32F,
4430 ssa.OpLeq64F:
4431 args[0], args[1] = args[1], args[0]
4432 case ssa.OpSub32F,
4433 ssa.OpSub64F:
4434 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4435 }
4436
4437
4438
4439 for i, a := range args {
4440 if a.Type.IsFloat() {
4441 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4442 }
4443 }
4444
4445 rt := types.Types[callDef.rtype]
4446 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4447 if rt.IsFloat() {
4448 result = s.newValue1(ssa.OpCopy, rt, result)
4449 }
4450 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4451 result = s.newValue1(ssa.OpNot, result.Type, result)
4452 }
4453 return result, true
4454 }
4455 return nil, false
4456 }
4457
4458
4459 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4460 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4461 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4462 return p0, p1
4463 }
4464
4465
4466 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4467 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4468 if ssa.IntrinsicsDebug > 0 {
4469 x := v
4470 if x == nil {
4471 x = s.mem()
4472 }
4473 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4474 x = x.Args[0]
4475 }
4476 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4477 }
4478 return v
4479 }
4480
4481
4482 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4483 args := make([]*ssa.Value, len(n.Args))
4484 for i, n := range n.Args {
4485 args[i] = s.expr(n)
4486 }
4487 return args
4488 }
4489
4490
4491
4492
4493
4494
4495
4496 func (s *state) openDeferRecord(n *ir.CallExpr) {
4497 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4498 s.Fatalf("defer call with arguments or results: %v", n)
4499 }
4500
4501 opendefer := &openDeferInfo{
4502 n: n,
4503 }
4504 fn := n.Fun
4505
4506
4507
4508 closureVal := s.expr(fn)
4509 closure := s.openDeferSave(fn.Type(), closureVal)
4510 opendefer.closureNode = closure.Aux.(*ir.Name)
4511 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4512 opendefer.closure = closure
4513 }
4514 index := len(s.openDefers)
4515 s.openDefers = append(s.openDefers, opendefer)
4516
4517
4518
4519 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4520 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4521 s.vars[deferBitsVar] = newDeferBits
4522 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4523 }
4524
4525
4526
4527
4528
4529
4530 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4531 if !ssa.CanSSA(t) {
4532 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4533 }
4534 if !t.HasPointers() {
4535 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4536 }
4537 pos := val.Pos
4538 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4539 temp.SetOpenDeferSlot(true)
4540 temp.SetFrameOffset(int64(len(s.openDefers)))
4541 var addrTemp *ssa.Value
4542
4543
4544 if s.curBlock.ID != s.f.Entry.ID {
4545
4546
4547
4548 if t.HasPointers() {
4549 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4550 }
4551 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4552 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4553 } else {
4554
4555
4556
4557 if t.HasPointers() {
4558 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4559 }
4560 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4561 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4562 }
4563
4564
4565
4566
4567
4568 temp.SetNeedzero(true)
4569
4570
4571 s.store(t, addrTemp, val)
4572 return addrTemp
4573 }
4574
4575
4576
4577
4578
4579 func (s *state) openDeferExit() {
4580 deferExit := s.f.NewBlock(ssa.BlockPlain)
4581 s.endBlock().AddEdgeTo(deferExit)
4582 s.startBlock(deferExit)
4583 s.lastDeferExit = deferExit
4584 s.lastDeferCount = len(s.openDefers)
4585 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4586
4587 for i := len(s.openDefers) - 1; i >= 0; i-- {
4588 r := s.openDefers[i]
4589 bCond := s.f.NewBlock(ssa.BlockPlain)
4590 bEnd := s.f.NewBlock(ssa.BlockPlain)
4591
4592 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4593
4594
4595 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4596 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4597 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4598 b := s.endBlock()
4599 b.Kind = ssa.BlockIf
4600 b.SetControl(eqVal)
4601 b.AddEdgeTo(bEnd)
4602 b.AddEdgeTo(bCond)
4603 bCond.AddEdgeTo(bEnd)
4604 s.startBlock(bCond)
4605
4606
4607
4608 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4609 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4610 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4611
4612
4613 s.vars[deferBitsVar] = maskedval
4614
4615
4616
4617
4618 fn := r.n.Fun
4619 stksize := fn.Type().ArgWidth()
4620 var callArgs []*ssa.Value
4621 var call *ssa.Value
4622 if r.closure != nil {
4623 v := s.load(r.closure.Type.Elem(), r.closure)
4624 s.maybeNilCheckClosure(v, callDefer)
4625 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4626 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4627 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4628 } else {
4629 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4630 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4631 }
4632 callArgs = append(callArgs, s.mem())
4633 call.AddArgs(callArgs...)
4634 call.AuxInt = stksize
4635 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4636
4637
4638
4639
4640 if r.closureNode != nil {
4641 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4642 }
4643
4644 s.endBlock()
4645 s.startBlock(bEnd)
4646 }
4647 }
4648
4649 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4650 return s.call(n, k, false, nil)
4651 }
4652
4653 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4654 return s.call(n, k, true, nil)
4655 }
4656
4657
4658
4659 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4660 s.prevCall = nil
4661 var calleeLSym *obj.LSym
4662 var closure *ssa.Value
4663 var codeptr *ssa.Value
4664 var dextra *ssa.Value
4665 var rcvr *ssa.Value
4666 fn := n.Fun
4667 var ACArgs []*types.Type
4668 var ACResults []*types.Type
4669 var callArgs []*ssa.Value
4670
4671 callABI := s.f.ABIDefault
4672
4673 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4674 s.Fatalf("go/defer call with arguments: %v", n)
4675 }
4676
4677 isCallDeferRangeFunc := false
4678
4679 switch n.Op() {
4680 case ir.OCALLFUNC:
4681 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4682 fn := fn.(*ir.Name)
4683 calleeLSym = callTargetLSym(fn)
4684 if buildcfg.Experiment.RegabiArgs {
4685
4686
4687
4688
4689
4690 if fn.Func != nil {
4691 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4692 }
4693 } else {
4694
4695 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4696 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4697 if inRegistersImported || inRegistersSamePackage {
4698 callABI = s.f.ABI1
4699 }
4700 }
4701 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
4702 isCallDeferRangeFunc = true
4703 }
4704 break
4705 }
4706 closure = s.expr(fn)
4707 if k != callDefer && k != callDeferStack {
4708
4709
4710 s.maybeNilCheckClosure(closure, k)
4711 }
4712 case ir.OCALLINTER:
4713 if fn.Op() != ir.ODOTINTER {
4714 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
4715 }
4716 fn := fn.(*ir.SelectorExpr)
4717 var iclosure *ssa.Value
4718 iclosure, rcvr = s.getClosureAndRcvr(fn)
4719 if k == callNormal {
4720 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
4721 } else {
4722 closure = iclosure
4723 }
4724 }
4725 if deferExtra != nil {
4726 dextra = s.expr(deferExtra)
4727 }
4728
4729 params := callABI.ABIAnalyze(n.Fun.Type(), false )
4730 types.CalcSize(fn.Type())
4731 stksize := params.ArgWidth()
4732
4733 res := n.Fun.Type().Results()
4734 if k == callNormal || k == callTail {
4735 for _, p := range params.OutParams() {
4736 ACResults = append(ACResults, p.Type)
4737 }
4738 }
4739
4740 var call *ssa.Value
4741 if k == callDeferStack {
4742 if stksize != 0 {
4743 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
4744 }
4745
4746 t := deferstruct()
4747 n, addr := s.temp(n.Pos(), t)
4748 n.SetNonMergeable(true)
4749 s.store(closure.Type,
4750 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
4751 closure)
4752
4753
4754 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4755 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4756 callArgs = append(callArgs, addr, s.mem())
4757 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4758 call.AddArgs(callArgs...)
4759 call.AuxInt = int64(types.PtrSize)
4760 } else {
4761
4762
4763 argStart := base.Ctxt.Arch.FixedFrameSize
4764
4765 if k != callNormal && k != callTail {
4766
4767 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4768 callArgs = append(callArgs, closure)
4769 stksize += int64(types.PtrSize)
4770 argStart += int64(types.PtrSize)
4771 if dextra != nil {
4772
4773 ACArgs = append(ACArgs, types.Types[types.TINTER])
4774 callArgs = append(callArgs, dextra)
4775 stksize += 2 * int64(types.PtrSize)
4776 argStart += 2 * int64(types.PtrSize)
4777 }
4778 }
4779
4780
4781 if rcvr != nil {
4782 callArgs = append(callArgs, rcvr)
4783 }
4784
4785
4786 t := n.Fun.Type()
4787 args := n.Args
4788
4789 for _, p := range params.InParams() {
4790 ACArgs = append(ACArgs, p.Type)
4791 }
4792
4793
4794
4795
4796 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
4797 b := s.endBlock()
4798 b.Kind = ssa.BlockPlain
4799 curb := s.f.NewBlock(ssa.BlockPlain)
4800 b.AddEdgeTo(curb)
4801 s.startBlock(curb)
4802 }
4803
4804 for i, n := range args {
4805 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
4806 }
4807
4808 callArgs = append(callArgs, s.mem())
4809
4810
4811 switch {
4812 case k == callDefer:
4813 sym := ir.Syms.Deferproc
4814 if dextra != nil {
4815 sym = ir.Syms.Deferprocat
4816 }
4817 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4818 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4819 case k == callGo:
4820 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4821 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4822 case closure != nil:
4823
4824
4825
4826
4827
4828 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
4829 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
4830 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
4831 case codeptr != nil:
4832
4833 aux := ssa.InterfaceAuxCall(params)
4834 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
4835 case calleeLSym != nil:
4836 aux := ssa.StaticAuxCall(calleeLSym, params)
4837 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4838 if k == callTail {
4839 call.Op = ssa.OpTailLECall
4840 stksize = 0
4841 }
4842 default:
4843 s.Fatalf("bad call type %v %v", n.Op(), n)
4844 }
4845 call.AddArgs(callArgs...)
4846 call.AuxInt = stksize
4847 }
4848 s.prevCall = call
4849 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
4850
4851 for _, v := range n.KeepAlive {
4852 if !v.Addrtaken() {
4853 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
4854 }
4855 switch v.Class {
4856 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
4857 default:
4858 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
4859 }
4860 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
4861 }
4862
4863
4864 if k == callDefer || k == callDeferStack || isCallDeferRangeFunc {
4865 b := s.endBlock()
4866 b.Kind = ssa.BlockDefer
4867 b.SetControl(call)
4868 bNext := s.f.NewBlock(ssa.BlockPlain)
4869 b.AddEdgeTo(bNext)
4870 r := s.f.DeferReturn
4871 if r == nil {
4872 r = s.f.NewBlock(ssa.BlockPlain)
4873 s.startBlock(r)
4874 s.exit()
4875 s.f.DeferReturn = r
4876 }
4877 b.AddEdgeTo(r)
4878 b.Likely = ssa.BranchLikely
4879 s.startBlock(bNext)
4880 }
4881
4882 if len(res) == 0 || k != callNormal {
4883
4884 return nil
4885 }
4886 fp := res[0]
4887 if returnResultAddr {
4888 return s.resultAddrOfCall(call, 0, fp.Type)
4889 }
4890 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
4891 }
4892
4893
4894
4895 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
4896 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
4897
4898
4899 s.nilCheck(closure)
4900 }
4901 }
4902
4903
4904
4905 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
4906 i := s.expr(fn.X)
4907 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
4908 s.nilCheck(itab)
4909 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
4910 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
4911 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
4912 return closure, rcvr
4913 }
4914
4915
4916
4917 func etypesign(e types.Kind) int8 {
4918 switch e {
4919 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
4920 return -1
4921 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
4922 return +1
4923 }
4924 return 0
4925 }
4926
4927
4928
4929 func (s *state) addr(n ir.Node) *ssa.Value {
4930 if n.Op() != ir.ONAME {
4931 s.pushLine(n.Pos())
4932 defer s.popLine()
4933 }
4934
4935 if s.canSSA(n) {
4936 s.Fatalf("addr of canSSA expression: %+v", n)
4937 }
4938
4939 t := types.NewPtr(n.Type())
4940 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
4941 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
4942
4943 if offset != 0 {
4944 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
4945 }
4946 return v
4947 }
4948 switch n.Op() {
4949 case ir.OLINKSYMOFFSET:
4950 no := n.(*ir.LinksymOffsetExpr)
4951 return linksymOffset(no.Linksym, no.Offset_)
4952 case ir.ONAME:
4953 n := n.(*ir.Name)
4954 if n.Heapaddr != nil {
4955 return s.expr(n.Heapaddr)
4956 }
4957 switch n.Class {
4958 case ir.PEXTERN:
4959
4960 return linksymOffset(n.Linksym(), 0)
4961 case ir.PPARAM:
4962
4963 v := s.decladdrs[n]
4964 if v != nil {
4965 return v
4966 }
4967 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
4968 return nil
4969 case ir.PAUTO:
4970 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
4971
4972 case ir.PPARAMOUT:
4973
4974
4975 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
4976 default:
4977 s.Fatalf("variable address class %v not implemented", n.Class)
4978 return nil
4979 }
4980 case ir.ORESULT:
4981
4982 n := n.(*ir.ResultExpr)
4983 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
4984 case ir.OINDEX:
4985 n := n.(*ir.IndexExpr)
4986 if n.X.Type().IsSlice() {
4987 a := s.expr(n.X)
4988 i := s.expr(n.Index)
4989 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
4990 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4991 p := s.newValue1(ssa.OpSlicePtr, t, a)
4992 return s.newValue2(ssa.OpPtrIndex, t, p, i)
4993 } else {
4994 a := s.addr(n.X)
4995 i := s.expr(n.Index)
4996 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
4997 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4998 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
4999 }
5000 case ir.ODEREF:
5001 n := n.(*ir.StarExpr)
5002 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5003 case ir.ODOT:
5004 n := n.(*ir.SelectorExpr)
5005 p := s.addr(n.X)
5006 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5007 case ir.ODOTPTR:
5008 n := n.(*ir.SelectorExpr)
5009 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5010 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5011 case ir.OCONVNOP:
5012 n := n.(*ir.ConvExpr)
5013 if n.Type() == n.X.Type() {
5014 return s.addr(n.X)
5015 }
5016 addr := s.addr(n.X)
5017 return s.newValue1(ssa.OpCopy, t, addr)
5018 case ir.OCALLFUNC, ir.OCALLINTER:
5019 n := n.(*ir.CallExpr)
5020 return s.callAddr(n, callNormal)
5021 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5022 var v *ssa.Value
5023 if n.Op() == ir.ODOTTYPE {
5024 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5025 } else {
5026 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5027 }
5028 if v.Op != ssa.OpLoad {
5029 s.Fatalf("dottype of non-load")
5030 }
5031 if v.Args[1] != s.mem() {
5032 s.Fatalf("memory no longer live from dottype load")
5033 }
5034 return v.Args[0]
5035 default:
5036 s.Fatalf("unhandled addr %v", n.Op())
5037 return nil
5038 }
5039 }
5040
5041
5042
5043 func (s *state) canSSA(n ir.Node) bool {
5044 if base.Flag.N != 0 {
5045 return false
5046 }
5047 for {
5048 nn := n
5049 if nn.Op() == ir.ODOT {
5050 nn := nn.(*ir.SelectorExpr)
5051 n = nn.X
5052 continue
5053 }
5054 if nn.Op() == ir.OINDEX {
5055 nn := nn.(*ir.IndexExpr)
5056 if nn.X.Type().IsArray() {
5057 n = nn.X
5058 continue
5059 }
5060 }
5061 break
5062 }
5063 if n.Op() != ir.ONAME {
5064 return false
5065 }
5066 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5067 }
5068
5069 func (s *state) canSSAName(name *ir.Name) bool {
5070 if name.Addrtaken() || !name.OnStack() {
5071 return false
5072 }
5073 switch name.Class {
5074 case ir.PPARAMOUT:
5075 if s.hasdefer {
5076
5077
5078
5079
5080
5081 return false
5082 }
5083 if s.cgoUnsafeArgs {
5084
5085
5086 return false
5087 }
5088 }
5089 return true
5090
5091 }
5092
5093
5094 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5095 p := s.expr(n)
5096 if bounded || n.NonNil() {
5097 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5098 s.f.Warnl(lineno, "removed nil check")
5099 }
5100 return p
5101 }
5102 p = s.nilCheck(p)
5103 return p
5104 }
5105
5106
5107
5108
5109
5110
5111 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5112 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5113 return ptr
5114 }
5115 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5116 }
5117
5118
5119
5120
5121
5122
5123
5124 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5125 idx = s.extendIndex(idx, len, kind, bounded)
5126
5127 if bounded || base.Flag.B != 0 {
5128
5129
5130
5131
5132
5133
5134
5135
5136
5137
5138
5139
5140
5141
5142
5143
5144
5145
5146
5147
5148 return idx
5149 }
5150
5151 bNext := s.f.NewBlock(ssa.BlockPlain)
5152 bPanic := s.f.NewBlock(ssa.BlockExit)
5153
5154 if !idx.Type.IsSigned() {
5155 switch kind {
5156 case ssa.BoundsIndex:
5157 kind = ssa.BoundsIndexU
5158 case ssa.BoundsSliceAlen:
5159 kind = ssa.BoundsSliceAlenU
5160 case ssa.BoundsSliceAcap:
5161 kind = ssa.BoundsSliceAcapU
5162 case ssa.BoundsSliceB:
5163 kind = ssa.BoundsSliceBU
5164 case ssa.BoundsSlice3Alen:
5165 kind = ssa.BoundsSlice3AlenU
5166 case ssa.BoundsSlice3Acap:
5167 kind = ssa.BoundsSlice3AcapU
5168 case ssa.BoundsSlice3B:
5169 kind = ssa.BoundsSlice3BU
5170 case ssa.BoundsSlice3C:
5171 kind = ssa.BoundsSlice3CU
5172 }
5173 }
5174
5175 var cmp *ssa.Value
5176 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5177 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5178 } else {
5179 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5180 }
5181 b := s.endBlock()
5182 b.Kind = ssa.BlockIf
5183 b.SetControl(cmp)
5184 b.Likely = ssa.BranchLikely
5185 b.AddEdgeTo(bNext)
5186 b.AddEdgeTo(bPanic)
5187
5188 s.startBlock(bPanic)
5189 if Arch.LinkArch.Family == sys.Wasm {
5190
5191
5192 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5193 } else {
5194 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5195 s.endBlock().SetControl(mem)
5196 }
5197 s.startBlock(bNext)
5198
5199
5200 if base.Flag.Cfg.SpectreIndex {
5201 op := ssa.OpSpectreIndex
5202 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5203 op = ssa.OpSpectreSliceIndex
5204 }
5205 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5206 }
5207
5208 return idx
5209 }
5210
5211
5212 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5213 b := s.endBlock()
5214 b.Kind = ssa.BlockIf
5215 b.SetControl(cmp)
5216 b.Likely = ssa.BranchLikely
5217 bNext := s.f.NewBlock(ssa.BlockPlain)
5218 line := s.peekPos()
5219 pos := base.Ctxt.PosTable.Pos(line)
5220 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5221 bPanic := s.panics[fl]
5222 if bPanic == nil {
5223 bPanic = s.f.NewBlock(ssa.BlockPlain)
5224 s.panics[fl] = bPanic
5225 s.startBlock(bPanic)
5226
5227
5228 s.rtcall(fn, false, nil)
5229 }
5230 b.AddEdgeTo(bNext)
5231 b.AddEdgeTo(bPanic)
5232 s.startBlock(bNext)
5233 }
5234
5235 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5236 needcheck := true
5237 switch b.Op {
5238 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5239 if b.AuxInt != 0 {
5240 needcheck = false
5241 }
5242 }
5243 if needcheck {
5244
5245 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5246 s.check(cmp, ir.Syms.Panicdivide)
5247 }
5248 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5249 }
5250
5251
5252
5253
5254
5255 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5256 s.prevCall = nil
5257
5258 off := base.Ctxt.Arch.FixedFrameSize
5259 var callArgs []*ssa.Value
5260 var callArgTypes []*types.Type
5261
5262 for _, arg := range args {
5263 t := arg.Type
5264 off = types.RoundUp(off, t.Alignment())
5265 size := t.Size()
5266 callArgs = append(callArgs, arg)
5267 callArgTypes = append(callArgTypes, t)
5268 off += size
5269 }
5270 off = types.RoundUp(off, int64(types.RegSize))
5271
5272
5273 var call *ssa.Value
5274 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5275 callArgs = append(callArgs, s.mem())
5276 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5277 call.AddArgs(callArgs...)
5278 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5279
5280 if !returns {
5281
5282 b := s.endBlock()
5283 b.Kind = ssa.BlockExit
5284 b.SetControl(call)
5285 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5286 if len(results) > 0 {
5287 s.Fatalf("panic call can't have results")
5288 }
5289 return nil
5290 }
5291
5292
5293 res := make([]*ssa.Value, len(results))
5294 for i, t := range results {
5295 off = types.RoundUp(off, t.Alignment())
5296 res[i] = s.resultOfCall(call, int64(i), t)
5297 off += t.Size()
5298 }
5299 off = types.RoundUp(off, int64(types.PtrSize))
5300
5301
5302 call.AuxInt = off
5303
5304 return res
5305 }
5306
5307
5308 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5309 s.instrument(t, left, instrumentWrite)
5310
5311 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5312
5313 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5314 return
5315 }
5316
5317
5318
5319
5320
5321
5322 s.storeTypeScalars(t, left, right, skip)
5323 if skip&skipPtr == 0 && t.HasPointers() {
5324 s.storeTypePtrs(t, left, right)
5325 }
5326 }
5327
5328
5329 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5330 switch {
5331 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
5332 s.store(t, left, right)
5333 case t.IsPtrShaped():
5334 if t.IsPtr() && t.Elem().NotInHeap() {
5335 s.store(t, left, right)
5336 }
5337
5338 case t.IsString():
5339 if skip&skipLen != 0 {
5340 return
5341 }
5342 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5343 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5344 s.store(types.Types[types.TINT], lenAddr, len)
5345 case t.IsSlice():
5346 if skip&skipLen == 0 {
5347 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5348 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5349 s.store(types.Types[types.TINT], lenAddr, len)
5350 }
5351 if skip&skipCap == 0 {
5352 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5353 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5354 s.store(types.Types[types.TINT], capAddr, cap)
5355 }
5356 case t.IsInterface():
5357
5358 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5359 s.store(types.Types[types.TUINTPTR], left, itab)
5360 case t.IsStruct():
5361 n := t.NumFields()
5362 for i := 0; i < n; i++ {
5363 ft := t.FieldType(i)
5364 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5365 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5366 s.storeTypeScalars(ft, addr, val, 0)
5367 }
5368 case t.IsArray() && t.NumElem() == 0:
5369
5370 case t.IsArray() && t.NumElem() == 1:
5371 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5372 default:
5373 s.Fatalf("bad write barrier type %v", t)
5374 }
5375 }
5376
5377
5378 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5379 switch {
5380 case t.IsPtrShaped():
5381 if t.IsPtr() && t.Elem().NotInHeap() {
5382 break
5383 }
5384 s.store(t, left, right)
5385 case t.IsString():
5386 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5387 s.store(s.f.Config.Types.BytePtr, left, ptr)
5388 case t.IsSlice():
5389 elType := types.NewPtr(t.Elem())
5390 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5391 s.store(elType, left, ptr)
5392 case t.IsInterface():
5393
5394 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5395 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5396 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5397 case t.IsStruct():
5398 n := t.NumFields()
5399 for i := 0; i < n; i++ {
5400 ft := t.FieldType(i)
5401 if !ft.HasPointers() {
5402 continue
5403 }
5404 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5405 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5406 s.storeTypePtrs(ft, addr, val)
5407 }
5408 case t.IsArray() && t.NumElem() == 0:
5409
5410 case t.IsArray() && t.NumElem() == 1:
5411 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5412 default:
5413 s.Fatalf("bad write barrier type %v", t)
5414 }
5415 }
5416
5417
5418 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5419 var a *ssa.Value
5420 if !ssa.CanSSA(t) {
5421 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5422 } else {
5423 a = s.expr(n)
5424 }
5425 return a
5426 }
5427
5428 func (s *state) storeArgWithBase(n ir.Node, t *types.Type, base *ssa.Value, off int64) {
5429 pt := types.NewPtr(t)
5430 var addr *ssa.Value
5431 if base == s.sp {
5432
5433 addr = s.constOffPtrSP(pt, off)
5434 } else {
5435 addr = s.newValue1I(ssa.OpOffPtr, pt, off, base)
5436 }
5437
5438 if !ssa.CanSSA(t) {
5439 a := s.addr(n)
5440 s.move(t, addr, a)
5441 return
5442 }
5443
5444 a := s.expr(n)
5445 s.storeType(t, addr, a, 0, false)
5446 }
5447
5448
5449
5450
5451 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5452 t := v.Type
5453 var ptr, len, cap *ssa.Value
5454 switch {
5455 case t.IsSlice():
5456 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5457 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5458 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5459 case t.IsString():
5460 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5461 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5462 cap = len
5463 case t.IsPtr():
5464 if !t.Elem().IsArray() {
5465 s.Fatalf("bad ptr to array in slice %v\n", t)
5466 }
5467 nv := s.nilCheck(v)
5468 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5469 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5470 cap = len
5471 default:
5472 s.Fatalf("bad type in slice %v\n", t)
5473 }
5474
5475
5476 if i == nil {
5477 i = s.constInt(types.Types[types.TINT], 0)
5478 }
5479 if j == nil {
5480 j = len
5481 }
5482 three := true
5483 if k == nil {
5484 three = false
5485 k = cap
5486 }
5487
5488
5489
5490
5491 if three {
5492 if k != cap {
5493 kind := ssa.BoundsSlice3Alen
5494 if t.IsSlice() {
5495 kind = ssa.BoundsSlice3Acap
5496 }
5497 k = s.boundsCheck(k, cap, kind, bounded)
5498 }
5499 if j != k {
5500 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5501 }
5502 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5503 } else {
5504 if j != k {
5505 kind := ssa.BoundsSliceAlen
5506 if t.IsSlice() {
5507 kind = ssa.BoundsSliceAcap
5508 }
5509 j = s.boundsCheck(j, k, kind, bounded)
5510 }
5511 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5512 }
5513
5514
5515 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5516 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5517 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5518
5519
5520
5521
5522
5523 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5524 rcap := rlen
5525 if j != k && !t.IsString() {
5526 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5527 }
5528
5529 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5530
5531 return ptr, rlen, rcap
5532 }
5533
5534
5535
5536
5537
5538
5539
5540
5541
5542
5543
5544
5545
5546
5547
5548 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5549
5550
5551 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5552
5553
5554
5555 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5556 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5557
5558
5559 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5560
5561 return rptr, rlen, rcap
5562 }
5563
5564 type u642fcvtTab struct {
5565 leq, cvt2F, and, rsh, or, add ssa.Op
5566 one func(*state, *types.Type, int64) *ssa.Value
5567 }
5568
5569 var u64_f64 = u642fcvtTab{
5570 leq: ssa.OpLeq64,
5571 cvt2F: ssa.OpCvt64to64F,
5572 and: ssa.OpAnd64,
5573 rsh: ssa.OpRsh64Ux64,
5574 or: ssa.OpOr64,
5575 add: ssa.OpAdd64F,
5576 one: (*state).constInt64,
5577 }
5578
5579 var u64_f32 = u642fcvtTab{
5580 leq: ssa.OpLeq64,
5581 cvt2F: ssa.OpCvt64to32F,
5582 and: ssa.OpAnd64,
5583 rsh: ssa.OpRsh64Ux64,
5584 or: ssa.OpOr64,
5585 add: ssa.OpAdd32F,
5586 one: (*state).constInt64,
5587 }
5588
5589 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5590 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5591 }
5592
5593 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5594 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5595 }
5596
5597 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5598
5599
5600
5601
5602
5603
5604
5605
5606
5607
5608
5609
5610
5611
5612
5613
5614
5615
5616
5617
5618
5619
5620
5621
5622 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5623 b := s.endBlock()
5624 b.Kind = ssa.BlockIf
5625 b.SetControl(cmp)
5626 b.Likely = ssa.BranchLikely
5627
5628 bThen := s.f.NewBlock(ssa.BlockPlain)
5629 bElse := s.f.NewBlock(ssa.BlockPlain)
5630 bAfter := s.f.NewBlock(ssa.BlockPlain)
5631
5632 b.AddEdgeTo(bThen)
5633 s.startBlock(bThen)
5634 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5635 s.vars[n] = a0
5636 s.endBlock()
5637 bThen.AddEdgeTo(bAfter)
5638
5639 b.AddEdgeTo(bElse)
5640 s.startBlock(bElse)
5641 one := cvttab.one(s, ft, 1)
5642 y := s.newValue2(cvttab.and, ft, x, one)
5643 z := s.newValue2(cvttab.rsh, ft, x, one)
5644 z = s.newValue2(cvttab.or, ft, z, y)
5645 a := s.newValue1(cvttab.cvt2F, tt, z)
5646 a1 := s.newValue2(cvttab.add, tt, a, a)
5647 s.vars[n] = a1
5648 s.endBlock()
5649 bElse.AddEdgeTo(bAfter)
5650
5651 s.startBlock(bAfter)
5652 return s.variable(n, n.Type())
5653 }
5654
5655 type u322fcvtTab struct {
5656 cvtI2F, cvtF2F ssa.Op
5657 }
5658
5659 var u32_f64 = u322fcvtTab{
5660 cvtI2F: ssa.OpCvt32to64F,
5661 cvtF2F: ssa.OpCopy,
5662 }
5663
5664 var u32_f32 = u322fcvtTab{
5665 cvtI2F: ssa.OpCvt32to32F,
5666 cvtF2F: ssa.OpCvt64Fto32F,
5667 }
5668
5669 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5670 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5671 }
5672
5673 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5674 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5675 }
5676
5677 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5678
5679
5680
5681
5682
5683 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5684 b := s.endBlock()
5685 b.Kind = ssa.BlockIf
5686 b.SetControl(cmp)
5687 b.Likely = ssa.BranchLikely
5688
5689 bThen := s.f.NewBlock(ssa.BlockPlain)
5690 bElse := s.f.NewBlock(ssa.BlockPlain)
5691 bAfter := s.f.NewBlock(ssa.BlockPlain)
5692
5693 b.AddEdgeTo(bThen)
5694 s.startBlock(bThen)
5695 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5696 s.vars[n] = a0
5697 s.endBlock()
5698 bThen.AddEdgeTo(bAfter)
5699
5700 b.AddEdgeTo(bElse)
5701 s.startBlock(bElse)
5702 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5703 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5704 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
5705 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
5706
5707 s.vars[n] = a3
5708 s.endBlock()
5709 bElse.AddEdgeTo(bAfter)
5710
5711 s.startBlock(bAfter)
5712 return s.variable(n, n.Type())
5713 }
5714
5715
5716 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
5717 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
5718 s.Fatalf("node must be a map or a channel")
5719 }
5720 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
5721 s.Fatalf("cannot inline len(chan)")
5722 }
5723 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
5724 s.Fatalf("cannot inline cap(chan)")
5725 }
5726 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
5727 s.Fatalf("cannot inline cap(map)")
5728 }
5729
5730
5731
5732
5733
5734
5735
5736
5737 lenType := n.Type()
5738 nilValue := s.constNil(types.Types[types.TUINTPTR])
5739 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
5740 b := s.endBlock()
5741 b.Kind = ssa.BlockIf
5742 b.SetControl(cmp)
5743 b.Likely = ssa.BranchUnlikely
5744
5745 bThen := s.f.NewBlock(ssa.BlockPlain)
5746 bElse := s.f.NewBlock(ssa.BlockPlain)
5747 bAfter := s.f.NewBlock(ssa.BlockPlain)
5748
5749
5750 b.AddEdgeTo(bThen)
5751 s.startBlock(bThen)
5752 s.vars[n] = s.zeroVal(lenType)
5753 s.endBlock()
5754 bThen.AddEdgeTo(bAfter)
5755
5756 b.AddEdgeTo(bElse)
5757 s.startBlock(bElse)
5758 switch n.Op() {
5759 case ir.OLEN:
5760 if buildcfg.Experiment.SwissMap && n.X.Type().IsMap() {
5761
5762 loadType := reflectdata.SwissMapType().Field(0).Type
5763 load := s.load(loadType, x)
5764 s.vars[n] = s.conv(nil, load, loadType, lenType)
5765 } else {
5766
5767 s.vars[n] = s.load(lenType, x)
5768 }
5769 case ir.OCAP:
5770
5771 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
5772 s.vars[n] = s.load(lenType, sw)
5773 default:
5774 s.Fatalf("op must be OLEN or OCAP")
5775 }
5776 s.endBlock()
5777 bElse.AddEdgeTo(bAfter)
5778
5779 s.startBlock(bAfter)
5780 return s.variable(n, lenType)
5781 }
5782
5783 type f2uCvtTab struct {
5784 ltf, cvt2U, subf, or ssa.Op
5785 floatValue func(*state, *types.Type, float64) *ssa.Value
5786 intValue func(*state, *types.Type, int64) *ssa.Value
5787 cutoff uint64
5788 }
5789
5790 var f32_u64 = f2uCvtTab{
5791 ltf: ssa.OpLess32F,
5792 cvt2U: ssa.OpCvt32Fto64,
5793 subf: ssa.OpSub32F,
5794 or: ssa.OpOr64,
5795 floatValue: (*state).constFloat32,
5796 intValue: (*state).constInt64,
5797 cutoff: 1 << 63,
5798 }
5799
5800 var f64_u64 = f2uCvtTab{
5801 ltf: ssa.OpLess64F,
5802 cvt2U: ssa.OpCvt64Fto64,
5803 subf: ssa.OpSub64F,
5804 or: ssa.OpOr64,
5805 floatValue: (*state).constFloat64,
5806 intValue: (*state).constInt64,
5807 cutoff: 1 << 63,
5808 }
5809
5810 var f32_u32 = f2uCvtTab{
5811 ltf: ssa.OpLess32F,
5812 cvt2U: ssa.OpCvt32Fto32,
5813 subf: ssa.OpSub32F,
5814 or: ssa.OpOr32,
5815 floatValue: (*state).constFloat32,
5816 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5817 cutoff: 1 << 31,
5818 }
5819
5820 var f64_u32 = f2uCvtTab{
5821 ltf: ssa.OpLess64F,
5822 cvt2U: ssa.OpCvt64Fto32,
5823 subf: ssa.OpSub64F,
5824 or: ssa.OpOr32,
5825 floatValue: (*state).constFloat64,
5826 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5827 cutoff: 1 << 31,
5828 }
5829
5830 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5831 return s.floatToUint(&f32_u64, n, x, ft, tt)
5832 }
5833 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5834 return s.floatToUint(&f64_u64, n, x, ft, tt)
5835 }
5836
5837 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5838 return s.floatToUint(&f32_u32, n, x, ft, tt)
5839 }
5840
5841 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5842 return s.floatToUint(&f64_u32, n, x, ft, tt)
5843 }
5844
5845 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5846
5847
5848
5849
5850
5851
5852
5853
5854 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
5855 cmp := s.newValue2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
5856 b := s.endBlock()
5857 b.Kind = ssa.BlockIf
5858 b.SetControl(cmp)
5859 b.Likely = ssa.BranchLikely
5860
5861 bThen := s.f.NewBlock(ssa.BlockPlain)
5862 bElse := s.f.NewBlock(ssa.BlockPlain)
5863 bAfter := s.f.NewBlock(ssa.BlockPlain)
5864
5865 b.AddEdgeTo(bThen)
5866 s.startBlock(bThen)
5867 a0 := s.newValue1(cvttab.cvt2U, tt, x)
5868 s.vars[n] = a0
5869 s.endBlock()
5870 bThen.AddEdgeTo(bAfter)
5871
5872 b.AddEdgeTo(bElse)
5873 s.startBlock(bElse)
5874 y := s.newValue2(cvttab.subf, ft, x, cutoff)
5875 y = s.newValue1(cvttab.cvt2U, tt, y)
5876 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
5877 a1 := s.newValue2(cvttab.or, tt, y, z)
5878 s.vars[n] = a1
5879 s.endBlock()
5880 bElse.AddEdgeTo(bAfter)
5881
5882 s.startBlock(bAfter)
5883 return s.variable(n, n.Type())
5884 }
5885
5886
5887
5888
5889 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5890 iface := s.expr(n.X)
5891 target := s.reflectType(n.Type())
5892 var targetItab *ssa.Value
5893 if n.ITab != nil {
5894 targetItab = s.expr(n.ITab)
5895 }
5896 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
5897 }
5898
5899 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5900 iface := s.expr(n.X)
5901 var source, target, targetItab *ssa.Value
5902 if n.SrcRType != nil {
5903 source = s.expr(n.SrcRType)
5904 }
5905 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
5906 byteptr := s.f.Config.Types.BytePtr
5907 targetItab = s.expr(n.ITab)
5908
5909
5910 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
5911 } else {
5912 target = s.expr(n.RType)
5913 }
5914 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
5915 }
5916
5917
5918
5919
5920
5921
5922
5923
5924
5925 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
5926 typs := s.f.Config.Types
5927 byteptr := typs.BytePtr
5928 if dst.IsInterface() {
5929 if dst.IsEmptyInterface() {
5930
5931
5932 if base.Debug.TypeAssert > 0 {
5933 base.WarnfAt(pos, "type assertion inlined")
5934 }
5935
5936
5937 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5938
5939 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5940
5941 if src.IsEmptyInterface() && commaok {
5942
5943 return iface, cond
5944 }
5945
5946
5947 b := s.endBlock()
5948 b.Kind = ssa.BlockIf
5949 b.SetControl(cond)
5950 b.Likely = ssa.BranchLikely
5951 bOk := s.f.NewBlock(ssa.BlockPlain)
5952 bFail := s.f.NewBlock(ssa.BlockPlain)
5953 b.AddEdgeTo(bOk)
5954 b.AddEdgeTo(bFail)
5955
5956 if !commaok {
5957
5958 s.startBlock(bFail)
5959 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
5960
5961
5962 s.startBlock(bOk)
5963 if src.IsEmptyInterface() {
5964 res = iface
5965 return
5966 }
5967
5968 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5969 typ := s.load(byteptr, off)
5970 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5971 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
5972 return
5973 }
5974
5975 s.startBlock(bOk)
5976
5977
5978 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5979 s.vars[typVar] = s.load(byteptr, off)
5980 s.endBlock()
5981
5982
5983 s.startBlock(bFail)
5984 s.vars[typVar] = itab
5985 s.endBlock()
5986
5987
5988 bEnd := s.f.NewBlock(ssa.BlockPlain)
5989 bOk.AddEdgeTo(bEnd)
5990 bFail.AddEdgeTo(bEnd)
5991 s.startBlock(bEnd)
5992 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5993 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
5994 resok = cond
5995 delete(s.vars, typVar)
5996 return
5997 }
5998
5999 if base.Debug.TypeAssert > 0 {
6000 base.WarnfAt(pos, "type assertion not inlined")
6001 }
6002
6003 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6004 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6005
6006
6007 bNil := s.f.NewBlock(ssa.BlockPlain)
6008 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6009 bMerge := s.f.NewBlock(ssa.BlockPlain)
6010 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6011 b := s.endBlock()
6012 b.Kind = ssa.BlockIf
6013 b.SetControl(cond)
6014 b.Likely = ssa.BranchLikely
6015 b.AddEdgeTo(bNonNil)
6016 b.AddEdgeTo(bNil)
6017
6018 s.startBlock(bNil)
6019 if commaok {
6020 s.vars[typVar] = itab
6021 b := s.endBlock()
6022 b.AddEdgeTo(bMerge)
6023 } else {
6024
6025 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6026 }
6027
6028
6029 s.startBlock(bNonNil)
6030 typ := itab
6031 if !src.IsEmptyInterface() {
6032 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
6033 }
6034
6035
6036 var d *ssa.Value
6037 if descriptor != nil {
6038 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6039 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
6040
6041
6042 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
6043 s.Fatalf("atomic load not available")
6044 }
6045
6046 var mul, and, add, zext ssa.Op
6047 if s.config.PtrSize == 4 {
6048 mul = ssa.OpMul32
6049 and = ssa.OpAnd32
6050 add = ssa.OpAdd32
6051 zext = ssa.OpCopy
6052 } else {
6053 mul = ssa.OpMul64
6054 and = ssa.OpAnd64
6055 add = ssa.OpAdd64
6056 zext = ssa.OpZeroExt32to64
6057 }
6058
6059 loopHead := s.f.NewBlock(ssa.BlockPlain)
6060 loopBody := s.f.NewBlock(ssa.BlockPlain)
6061 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6062 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6063
6064
6065
6066 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6067 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6068 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6069
6070
6071 var hash *ssa.Value
6072 if src.IsEmptyInterface() {
6073 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6074 } else {
6075 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6076 }
6077 hash = s.newValue1(zext, typs.Uintptr, hash)
6078 s.vars[hashVar] = hash
6079
6080 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6081
6082 b := s.endBlock()
6083 b.AddEdgeTo(loopHead)
6084
6085
6086
6087 s.startBlock(loopHead)
6088 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6089 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6090 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6091 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6092
6093 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6094
6095
6096
6097 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6098 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6099 b = s.endBlock()
6100 b.Kind = ssa.BlockIf
6101 b.SetControl(cmp1)
6102 b.AddEdgeTo(cacheHit)
6103 b.AddEdgeTo(loopBody)
6104
6105
6106
6107 s.startBlock(loopBody)
6108 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6109 b = s.endBlock()
6110 b.Kind = ssa.BlockIf
6111 b.SetControl(cmp2)
6112 b.AddEdgeTo(cacheMiss)
6113 b.AddEdgeTo(loopHead)
6114
6115
6116
6117 s.startBlock(cacheHit)
6118 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6119 s.vars[typVar] = eItab
6120 b = s.endBlock()
6121 b.AddEdgeTo(bMerge)
6122
6123
6124 s.startBlock(cacheMiss)
6125 }
6126 }
6127
6128
6129 if descriptor != nil {
6130 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6131 } else {
6132 var fn *obj.LSym
6133 if commaok {
6134 fn = ir.Syms.AssertE2I2
6135 } else {
6136 fn = ir.Syms.AssertE2I
6137 }
6138 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6139 }
6140 s.vars[typVar] = itab
6141 b = s.endBlock()
6142 b.AddEdgeTo(bMerge)
6143
6144
6145 s.startBlock(bMerge)
6146 itab = s.variable(typVar, byteptr)
6147 var ok *ssa.Value
6148 if commaok {
6149 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6150 }
6151 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6152 }
6153
6154 if base.Debug.TypeAssert > 0 {
6155 base.WarnfAt(pos, "type assertion inlined")
6156 }
6157
6158
6159 direct := types.IsDirectIface(dst)
6160 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6161 if base.Debug.TypeAssert > 0 {
6162 base.WarnfAt(pos, "type assertion inlined")
6163 }
6164 var wantedFirstWord *ssa.Value
6165 if src.IsEmptyInterface() {
6166
6167 wantedFirstWord = target
6168 } else {
6169
6170 wantedFirstWord = targetItab
6171 }
6172
6173 var tmp ir.Node
6174 var addr *ssa.Value
6175 if commaok && !ssa.CanSSA(dst) {
6176
6177
6178 tmp, addr = s.temp(pos, dst)
6179 }
6180
6181 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6182 b := s.endBlock()
6183 b.Kind = ssa.BlockIf
6184 b.SetControl(cond)
6185 b.Likely = ssa.BranchLikely
6186
6187 bOk := s.f.NewBlock(ssa.BlockPlain)
6188 bFail := s.f.NewBlock(ssa.BlockPlain)
6189 b.AddEdgeTo(bOk)
6190 b.AddEdgeTo(bFail)
6191
6192 if !commaok {
6193
6194 s.startBlock(bFail)
6195 taddr := source
6196 if taddr == nil {
6197 taddr = s.reflectType(src)
6198 }
6199 if src.IsEmptyInterface() {
6200 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6201 } else {
6202 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6203 }
6204
6205
6206 s.startBlock(bOk)
6207 if direct {
6208 return s.newValue1(ssa.OpIData, dst, iface), nil
6209 }
6210 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6211 return s.load(dst, p), nil
6212 }
6213
6214
6215
6216 bEnd := s.f.NewBlock(ssa.BlockPlain)
6217
6218
6219 valVar := ssaMarker("val")
6220
6221
6222 s.startBlock(bOk)
6223 if tmp == nil {
6224 if direct {
6225 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6226 } else {
6227 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6228 s.vars[valVar] = s.load(dst, p)
6229 }
6230 } else {
6231 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6232 s.move(dst, addr, p)
6233 }
6234 s.vars[okVar] = s.constBool(true)
6235 s.endBlock()
6236 bOk.AddEdgeTo(bEnd)
6237
6238
6239 s.startBlock(bFail)
6240 if tmp == nil {
6241 s.vars[valVar] = s.zeroVal(dst)
6242 } else {
6243 s.zero(dst, addr)
6244 }
6245 s.vars[okVar] = s.constBool(false)
6246 s.endBlock()
6247 bFail.AddEdgeTo(bEnd)
6248
6249
6250 s.startBlock(bEnd)
6251 if tmp == nil {
6252 res = s.variable(valVar, dst)
6253 delete(s.vars, valVar)
6254 } else {
6255 res = s.load(dst, addr)
6256 }
6257 resok = s.variable(okVar, types.Types[types.TBOOL])
6258 delete(s.vars, okVar)
6259 return res, resok
6260 }
6261
6262
6263 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6264 tmp := typecheck.TempAt(pos, s.curfn, t)
6265 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6266 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6267 }
6268 addr := s.addr(tmp)
6269 return tmp, addr
6270 }
6271
6272
6273 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6274 v := s.vars[n]
6275 if v != nil {
6276 return v
6277 }
6278 v = s.fwdVars[n]
6279 if v != nil {
6280 return v
6281 }
6282
6283 if s.curBlock == s.f.Entry {
6284
6285 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6286 }
6287
6288
6289 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6290 s.fwdVars[n] = v
6291 if n.Op() == ir.ONAME {
6292 s.addNamedValue(n.(*ir.Name), v)
6293 }
6294 return v
6295 }
6296
6297 func (s *state) mem() *ssa.Value {
6298 return s.variable(memVar, types.TypeMem)
6299 }
6300
6301 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6302 if n.Class == ir.Pxxx {
6303
6304 return
6305 }
6306 if ir.IsAutoTmp(n) {
6307
6308 return
6309 }
6310 if n.Class == ir.PPARAMOUT {
6311
6312
6313 return
6314 }
6315 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6316 values, ok := s.f.NamedValues[loc]
6317 if !ok {
6318 s.f.Names = append(s.f.Names, &loc)
6319 s.f.CanonicalLocalSlots[loc] = &loc
6320 }
6321 s.f.NamedValues[loc] = append(values, v)
6322 }
6323
6324
6325 type Branch struct {
6326 P *obj.Prog
6327 B *ssa.Block
6328 }
6329
6330
6331 type State struct {
6332 ABI obj.ABI
6333
6334 pp *objw.Progs
6335
6336
6337
6338 Branches []Branch
6339
6340
6341 JumpTables []*ssa.Block
6342
6343
6344 bstart []*obj.Prog
6345
6346 maxarg int64
6347
6348
6349
6350 livenessMap liveness.Map
6351
6352
6353
6354 partLiveArgs map[*ir.Name]bool
6355
6356
6357
6358
6359 lineRunStart *obj.Prog
6360
6361
6362 OnWasmStackSkipped int
6363 }
6364
6365 func (s *State) FuncInfo() *obj.FuncInfo {
6366 return s.pp.CurFunc.LSym.Func()
6367 }
6368
6369
6370 func (s *State) Prog(as obj.As) *obj.Prog {
6371 p := s.pp.Prog(as)
6372 if objw.LosesStmtMark(as) {
6373 return p
6374 }
6375
6376
6377 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6378 s.lineRunStart = p
6379 } else if p.Pos.IsStmt() == src.PosIsStmt {
6380 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6381 p.Pos = p.Pos.WithNotStmt()
6382 }
6383 return p
6384 }
6385
6386
6387 func (s *State) Pc() *obj.Prog {
6388 return s.pp.Next
6389 }
6390
6391
6392 func (s *State) SetPos(pos src.XPos) {
6393 s.pp.Pos = pos
6394 }
6395
6396
6397
6398
6399 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6400 p := s.Prog(op)
6401 p.To.Type = obj.TYPE_BRANCH
6402 s.Branches = append(s.Branches, Branch{P: p, B: target})
6403 return p
6404 }
6405
6406
6407
6408
6409
6410
6411 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6412 switch v.Op {
6413 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6414
6415 s.SetPos(v.Pos.WithNotStmt())
6416 default:
6417 p := v.Pos
6418 if p != src.NoXPos {
6419
6420
6421
6422
6423 if p.IsStmt() != src.PosIsStmt {
6424 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6425
6426
6427
6428
6429
6430
6431
6432
6433
6434
6435
6436
6437
6438 return
6439 }
6440 p = p.WithNotStmt()
6441
6442 }
6443 s.SetPos(p)
6444 } else {
6445 s.SetPos(s.pp.Pos.WithNotStmt())
6446 }
6447 }
6448 }
6449
6450
6451 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6452 ft := e.curfn.Type()
6453 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6454 return
6455 }
6456
6457 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6458 x.Set(obj.AttrContentAddressable, true)
6459 e.curfn.LSym.Func().ArgInfo = x
6460
6461
6462 p := pp.Prog(obj.AFUNCDATA)
6463 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6464 p.To.Type = obj.TYPE_MEM
6465 p.To.Name = obj.NAME_EXTERN
6466 p.To.Sym = x
6467 }
6468
6469
6470 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6471 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6472
6473
6474
6475
6476 PtrSize := int64(types.PtrSize)
6477 uintptrTyp := types.Types[types.TUINTPTR]
6478
6479 isAggregate := func(t *types.Type) bool {
6480 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6481 }
6482
6483 wOff := 0
6484 n := 0
6485 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6486
6487
6488 write1 := func(sz, offset int64) {
6489 if offset >= rtabi.TraceArgsSpecial {
6490 writebyte(rtabi.TraceArgsOffsetTooLarge)
6491 } else {
6492 writebyte(uint8(offset))
6493 writebyte(uint8(sz))
6494 }
6495 n++
6496 }
6497
6498
6499
6500 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6501 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6502 if n >= rtabi.TraceArgsLimit {
6503 writebyte(rtabi.TraceArgsDotdotdot)
6504 return false
6505 }
6506 if !isAggregate(t) {
6507 write1(t.Size(), baseOffset)
6508 return true
6509 }
6510 writebyte(rtabi.TraceArgsStartAgg)
6511 depth++
6512 if depth >= rtabi.TraceArgsMaxDepth {
6513 writebyte(rtabi.TraceArgsDotdotdot)
6514 writebyte(rtabi.TraceArgsEndAgg)
6515 n++
6516 return true
6517 }
6518 switch {
6519 case t.IsInterface(), t.IsString():
6520 _ = visitType(baseOffset, uintptrTyp, depth) &&
6521 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6522 case t.IsSlice():
6523 _ = visitType(baseOffset, uintptrTyp, depth) &&
6524 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6525 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6526 case t.IsComplex():
6527 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6528 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6529 case t.IsArray():
6530 if t.NumElem() == 0 {
6531 n++
6532 break
6533 }
6534 for i := int64(0); i < t.NumElem(); i++ {
6535 if !visitType(baseOffset, t.Elem(), depth) {
6536 break
6537 }
6538 baseOffset += t.Elem().Size()
6539 }
6540 case t.IsStruct():
6541 if t.NumFields() == 0 {
6542 n++
6543 break
6544 }
6545 for _, field := range t.Fields() {
6546 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6547 break
6548 }
6549 }
6550 }
6551 writebyte(rtabi.TraceArgsEndAgg)
6552 return true
6553 }
6554
6555 start := 0
6556 if strings.Contains(f.LSym.Name, "[") {
6557
6558 start = 1
6559 }
6560
6561 for _, a := range abiInfo.InParams()[start:] {
6562 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6563 break
6564 }
6565 }
6566 writebyte(rtabi.TraceArgsEndSeq)
6567 if wOff > rtabi.TraceArgsMaxLen {
6568 base.Fatalf("ArgInfo too large")
6569 }
6570
6571 return x
6572 }
6573
6574
6575 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6576 if base.Ctxt.Flag_linkshared {
6577
6578
6579 return
6580 }
6581
6582 wfn := e.curfn.WrappedFunc
6583 if wfn == nil {
6584 return
6585 }
6586
6587 wsym := wfn.Linksym()
6588 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6589 objw.SymPtrOff(x, 0, wsym)
6590 x.Set(obj.AttrContentAddressable, true)
6591 })
6592 e.curfn.LSym.Func().WrapInfo = x
6593
6594
6595 p := pp.Prog(obj.AFUNCDATA)
6596 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6597 p.To.Type = obj.TYPE_MEM
6598 p.To.Name = obj.NAME_EXTERN
6599 p.To.Sym = x
6600 }
6601
6602
6603 func genssa(f *ssa.Func, pp *objw.Progs) {
6604 var s State
6605 s.ABI = f.OwnAux.Fn.ABI()
6606
6607 e := f.Frontend().(*ssafn)
6608
6609 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6610
6611 var lv *liveness.Liveness
6612 s.livenessMap, s.partLiveArgs, lv = liveness.Compute(e.curfn, f, e.stkptrsize, pp, gatherPrintInfo)
6613 emitArgInfo(e, f, pp)
6614 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6615
6616 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6617 if openDeferInfo != nil {
6618
6619
6620 p := pp.Prog(obj.AFUNCDATA)
6621 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6622 p.To.Type = obj.TYPE_MEM
6623 p.To.Name = obj.NAME_EXTERN
6624 p.To.Sym = openDeferInfo
6625 }
6626
6627 emitWrappedFuncInfo(e, pp)
6628
6629
6630 s.bstart = make([]*obj.Prog, f.NumBlocks())
6631 s.pp = pp
6632 var progToValue map[*obj.Prog]*ssa.Value
6633 var progToBlock map[*obj.Prog]*ssa.Block
6634 var valueToProgAfter []*obj.Prog
6635 if gatherPrintInfo {
6636 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6637 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6638 f.Logf("genssa %s\n", f.Name)
6639 progToBlock[s.pp.Next] = f.Blocks[0]
6640 }
6641
6642 if base.Ctxt.Flag_locationlists {
6643 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6644 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6645 }
6646 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6647 clear(valueToProgAfter)
6648 }
6649
6650
6651
6652 firstPos := src.NoXPos
6653 for _, v := range f.Entry.Values {
6654 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6655 firstPos = v.Pos
6656 v.Pos = firstPos.WithDefaultStmt()
6657 break
6658 }
6659 }
6660
6661
6662
6663
6664 var inlMarks map[*obj.Prog]int32
6665 var inlMarkList []*obj.Prog
6666
6667
6668
6669 var inlMarksByPos map[src.XPos][]*obj.Prog
6670
6671 var argLiveIdx int = -1
6672
6673
6674
6675
6676
6677 var hotAlign, hotRequire int64
6678
6679 if base.Debug.AlignHot > 0 {
6680 switch base.Ctxt.Arch.Name {
6681
6682
6683
6684
6685
6686 case "amd64", "386":
6687
6688
6689
6690 hotAlign = 64
6691 hotRequire = 31
6692 }
6693 }
6694
6695
6696 for i, b := range f.Blocks {
6697
6698 s.lineRunStart = nil
6699 s.SetPos(s.pp.Pos.WithNotStmt())
6700
6701 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
6702
6703
6704
6705
6706
6707 p := s.pp.Prog(obj.APCALIGNMAX)
6708 p.From.SetConst(hotAlign)
6709 p.To.SetConst(hotRequire)
6710 }
6711
6712 s.bstart[b.ID] = s.pp.Next
6713
6714 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
6715 argLiveIdx = idx
6716 p := s.pp.Prog(obj.APCDATA)
6717 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6718 p.To.SetConst(int64(idx))
6719 }
6720
6721
6722 Arch.SSAMarkMoves(&s, b)
6723 for _, v := range b.Values {
6724 x := s.pp.Next
6725 s.DebugFriendlySetPosFrom(v)
6726
6727 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
6728 v.Fatalf("input[0] and output not in same register %s", v.LongString())
6729 }
6730
6731 switch v.Op {
6732 case ssa.OpInitMem:
6733
6734 case ssa.OpArg:
6735
6736 case ssa.OpSP, ssa.OpSB:
6737
6738 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
6739
6740 case ssa.OpGetG:
6741
6742
6743 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
6744
6745 case ssa.OpPhi:
6746 CheckLoweredPhi(v)
6747 case ssa.OpConvert:
6748
6749 if v.Args[0].Reg() != v.Reg() {
6750 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
6751 }
6752 case ssa.OpInlMark:
6753 p := Arch.Ginsnop(s.pp)
6754 if inlMarks == nil {
6755 inlMarks = map[*obj.Prog]int32{}
6756 inlMarksByPos = map[src.XPos][]*obj.Prog{}
6757 }
6758 inlMarks[p] = v.AuxInt32()
6759 inlMarkList = append(inlMarkList, p)
6760 pos := v.Pos.AtColumn1()
6761 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
6762 firstPos = src.NoXPos
6763
6764 default:
6765
6766 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6767 s.SetPos(firstPos)
6768 firstPos = src.NoXPos
6769 }
6770
6771
6772 s.pp.NextLive = s.livenessMap.Get(v)
6773 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
6774
6775
6776 Arch.SSAGenValue(&s, v)
6777 }
6778
6779 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
6780 argLiveIdx = idx
6781 p := s.pp.Prog(obj.APCDATA)
6782 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6783 p.To.SetConst(int64(idx))
6784 }
6785
6786 if base.Ctxt.Flag_locationlists {
6787 valueToProgAfter[v.ID] = s.pp.Next
6788 }
6789
6790 if gatherPrintInfo {
6791 for ; x != s.pp.Next; x = x.Link {
6792 progToValue[x] = v
6793 }
6794 }
6795 }
6796
6797 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
6798 p := Arch.Ginsnop(s.pp)
6799 p.Pos = p.Pos.WithIsStmt()
6800 if b.Pos == src.NoXPos {
6801 b.Pos = p.Pos
6802 if b.Pos == src.NoXPos {
6803 b.Pos = s.pp.Text.Pos
6804 }
6805 }
6806 b.Pos = b.Pos.WithBogusLine()
6807 }
6808
6809
6810
6811
6812
6813 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
6814
6815
6816 var next *ssa.Block
6817 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
6818
6819
6820
6821
6822 next = f.Blocks[i+1]
6823 }
6824 x := s.pp.Next
6825 s.SetPos(b.Pos)
6826 Arch.SSAGenBlock(&s, b, next)
6827 if gatherPrintInfo {
6828 for ; x != s.pp.Next; x = x.Link {
6829 progToBlock[x] = b
6830 }
6831 }
6832 }
6833 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
6834
6835
6836
6837
6838 Arch.Ginsnop(s.pp)
6839 }
6840 if openDeferInfo != nil {
6841
6842
6843
6844
6845
6846
6847
6848
6849 s.pp.NextLive = s.livenessMap.DeferReturn
6850 p := s.pp.Prog(obj.ACALL)
6851 p.To.Type = obj.TYPE_MEM
6852 p.To.Name = obj.NAME_EXTERN
6853 p.To.Sym = ir.Syms.Deferreturn
6854
6855
6856
6857
6858
6859 for _, o := range f.OwnAux.ABIInfo().OutParams() {
6860 n := o.Name
6861 rts, offs := o.RegisterTypesAndOffsets()
6862 for i := range o.Registers {
6863 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
6864 }
6865 }
6866
6867 s.pp.Prog(obj.ARET)
6868 }
6869
6870 if inlMarks != nil {
6871 hasCall := false
6872
6873
6874
6875
6876 for p := s.pp.Text; p != nil; p = p.Link {
6877 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
6878 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
6879
6880
6881
6882
6883
6884 continue
6885 }
6886 if _, ok := inlMarks[p]; ok {
6887
6888
6889 continue
6890 }
6891 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
6892 hasCall = true
6893 }
6894 pos := p.Pos.AtColumn1()
6895 marks := inlMarksByPos[pos]
6896 if len(marks) == 0 {
6897 continue
6898 }
6899 for _, m := range marks {
6900
6901
6902
6903 p.Pos = p.Pos.WithIsStmt()
6904 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
6905
6906 m.As = obj.ANOP
6907 m.Pos = src.NoXPos
6908 m.From = obj.Addr{}
6909 m.To = obj.Addr{}
6910 }
6911 delete(inlMarksByPos, pos)
6912 }
6913
6914 for _, p := range inlMarkList {
6915 if p.As != obj.ANOP {
6916 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
6917 }
6918 }
6919
6920 if e.stksize == 0 && !hasCall {
6921
6922
6923
6924
6925
6926
6927 for p := s.pp.Text; p != nil; p = p.Link {
6928 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
6929 continue
6930 }
6931 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
6932
6933 nop := Arch.Ginsnop(s.pp)
6934 nop.Pos = e.curfn.Pos().WithIsStmt()
6935
6936
6937
6938
6939
6940 for x := s.pp.Text; x != nil; x = x.Link {
6941 if x.Link == nop {
6942 x.Link = nop.Link
6943 break
6944 }
6945 }
6946
6947 for x := s.pp.Text; x != nil; x = x.Link {
6948 if x.Link == p {
6949 nop.Link = p
6950 x.Link = nop
6951 break
6952 }
6953 }
6954 }
6955 break
6956 }
6957 }
6958 }
6959
6960 if base.Ctxt.Flag_locationlists {
6961 var debugInfo *ssa.FuncDebug
6962 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
6963 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
6964 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
6965 } else {
6966 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
6967 }
6968 bstart := s.bstart
6969 idToIdx := make([]int, f.NumBlocks())
6970 for i, b := range f.Blocks {
6971 idToIdx[b.ID] = i
6972 }
6973
6974
6975
6976 debugInfo.GetPC = func(b, v ssa.ID) int64 {
6977 switch v {
6978 case ssa.BlockStart.ID:
6979 if b == f.Entry.ID {
6980 return 0
6981
6982 }
6983 return bstart[b].Pc
6984 case ssa.BlockEnd.ID:
6985 blk := f.Blocks[idToIdx[b]]
6986 nv := len(blk.Values)
6987 return valueToProgAfter[blk.Values[nv-1].ID].Pc
6988 case ssa.FuncEnd.ID:
6989 return e.curfn.LSym.Size
6990 default:
6991 return valueToProgAfter[v].Pc
6992 }
6993 }
6994 }
6995
6996
6997 for _, br := range s.Branches {
6998 br.P.To.SetTarget(s.bstart[br.B.ID])
6999 if br.P.Pos.IsStmt() != src.PosIsStmt {
7000 br.P.Pos = br.P.Pos.WithNotStmt()
7001 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7002 br.P.Pos = br.P.Pos.WithNotStmt()
7003 }
7004
7005 }
7006
7007
7008 for _, jt := range s.JumpTables {
7009
7010 targets := make([]*obj.Prog, len(jt.Succs))
7011 for i, e := range jt.Succs {
7012 targets[i] = s.bstart[e.Block().ID]
7013 }
7014
7015
7016
7017 fi := s.pp.CurFunc.LSym.Func()
7018 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7019 }
7020
7021 if e.log {
7022 filename := ""
7023 for p := s.pp.Text; p != nil; p = p.Link {
7024 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7025 filename = p.InnermostFilename()
7026 f.Logf("# %s\n", filename)
7027 }
7028
7029 var s string
7030 if v, ok := progToValue[p]; ok {
7031 s = v.String()
7032 } else if b, ok := progToBlock[p]; ok {
7033 s = b.String()
7034 } else {
7035 s = " "
7036 }
7037 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7038 }
7039 }
7040 if f.HTMLWriter != nil {
7041 var buf strings.Builder
7042 buf.WriteString("<code>")
7043 buf.WriteString("<dl class=\"ssa-gen\">")
7044 filename := ""
7045
7046 liveness := lv.Format(nil)
7047 if liveness != "" {
7048 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7049 buf.WriteString(html.EscapeString("# " + liveness))
7050 buf.WriteString("</dd>")
7051 }
7052
7053 for p := s.pp.Text; p != nil; p = p.Link {
7054
7055
7056 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7057 filename = p.InnermostFilename()
7058 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7059 buf.WriteString(html.EscapeString("# " + filename))
7060 buf.WriteString("</dd>")
7061 }
7062
7063 buf.WriteString("<dt class=\"ssa-prog-src\">")
7064 if v, ok := progToValue[p]; ok {
7065
7066
7067 if p.As != obj.APCDATA {
7068 if liveness := lv.Format(v); liveness != "" {
7069
7070 buf.WriteString("</dt><dd class=\"ssa-prog\">")
7071 buf.WriteString(html.EscapeString("# " + liveness))
7072 buf.WriteString("</dd>")
7073
7074 buf.WriteString("<dt class=\"ssa-prog-src\">")
7075 }
7076 }
7077
7078 buf.WriteString(v.HTML())
7079 } else if b, ok := progToBlock[p]; ok {
7080 buf.WriteString("<b>" + b.HTML() + "</b>")
7081 }
7082 buf.WriteString("</dt>")
7083 buf.WriteString("<dd class=\"ssa-prog\">")
7084 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7085 buf.WriteString("</dd>")
7086 }
7087 buf.WriteString("</dl>")
7088 buf.WriteString("</code>")
7089 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7090 }
7091 if ssa.GenssaDump[f.Name] {
7092 fi := f.DumpFileForPhase("genssa")
7093 if fi != nil {
7094
7095
7096 inliningDiffers := func(a, b []src.Pos) bool {
7097 if len(a) != len(b) {
7098 return true
7099 }
7100 for i := range a {
7101 if a[i].Filename() != b[i].Filename() {
7102 return true
7103 }
7104 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7105 return true
7106 }
7107 }
7108 return false
7109 }
7110
7111 var allPosOld []src.Pos
7112 var allPos []src.Pos
7113
7114 for p := s.pp.Text; p != nil; p = p.Link {
7115 if p.Pos.IsKnown() {
7116 allPos = allPos[:0]
7117 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7118 if inliningDiffers(allPos, allPosOld) {
7119 for _, pos := range allPos {
7120 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7121 }
7122 allPos, allPosOld = allPosOld, allPos
7123 }
7124 }
7125
7126 var s string
7127 if v, ok := progToValue[p]; ok {
7128 s = v.String()
7129 } else if b, ok := progToBlock[p]; ok {
7130 s = b.String()
7131 } else {
7132 s = " "
7133 }
7134 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7135 }
7136 fi.Close()
7137 }
7138 }
7139
7140 defframe(&s, e, f)
7141
7142 f.HTMLWriter.Close()
7143 f.HTMLWriter = nil
7144 }
7145
7146 func defframe(s *State, e *ssafn, f *ssa.Func) {
7147 pp := s.pp
7148
7149 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7150 frame := s.maxarg + e.stksize
7151 if Arch.PadFrame != nil {
7152 frame = Arch.PadFrame(frame)
7153 }
7154
7155
7156 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7157 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7158 pp.Text.To.Offset = frame
7159
7160 p := pp.Text
7161
7162
7163
7164
7165
7166
7167
7168
7169
7170
7171 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7172
7173
7174 type nameOff struct {
7175 n *ir.Name
7176 off int64
7177 }
7178 partLiveArgsSpilled := make(map[nameOff]bool)
7179 for _, v := range f.Entry.Values {
7180 if v.Op.IsCall() {
7181 break
7182 }
7183 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7184 continue
7185 }
7186 n, off := ssa.AutoVar(v)
7187 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7188 continue
7189 }
7190 partLiveArgsSpilled[nameOff{n, off}] = true
7191 }
7192
7193
7194 for _, a := range f.OwnAux.ABIInfo().InParams() {
7195 n := a.Name
7196 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7197 continue
7198 }
7199 rts, offs := a.RegisterTypesAndOffsets()
7200 for i := range a.Registers {
7201 if !rts[i].HasPointers() {
7202 continue
7203 }
7204 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7205 continue
7206 }
7207 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7208 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7209 }
7210 }
7211 }
7212
7213
7214
7215
7216 var lo, hi int64
7217
7218
7219
7220 var state uint32
7221
7222
7223
7224 for _, n := range e.curfn.Dcl {
7225 if !n.Needzero() {
7226 continue
7227 }
7228 if n.Class != ir.PAUTO {
7229 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7230 }
7231 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7232 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7233 }
7234
7235 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7236
7237 lo = n.FrameOffset()
7238 continue
7239 }
7240
7241
7242 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7243
7244
7245 lo = n.FrameOffset()
7246 hi = lo + n.Type().Size()
7247 }
7248
7249
7250 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7251 }
7252
7253
7254 type IndexJump struct {
7255 Jump obj.As
7256 Index int
7257 }
7258
7259 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7260 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7261 p.Pos = b.Pos
7262 }
7263
7264
7265
7266 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7267 switch next {
7268 case b.Succs[0].Block():
7269 s.oneJump(b, &jumps[0][0])
7270 s.oneJump(b, &jumps[0][1])
7271 case b.Succs[1].Block():
7272 s.oneJump(b, &jumps[1][0])
7273 s.oneJump(b, &jumps[1][1])
7274 default:
7275 var q *obj.Prog
7276 if b.Likely != ssa.BranchUnlikely {
7277 s.oneJump(b, &jumps[1][0])
7278 s.oneJump(b, &jumps[1][1])
7279 q = s.Br(obj.AJMP, b.Succs[1].Block())
7280 } else {
7281 s.oneJump(b, &jumps[0][0])
7282 s.oneJump(b, &jumps[0][1])
7283 q = s.Br(obj.AJMP, b.Succs[0].Block())
7284 }
7285 q.Pos = b.Pos
7286 }
7287 }
7288
7289
7290 func AddAux(a *obj.Addr, v *ssa.Value) {
7291 AddAux2(a, v, v.AuxInt)
7292 }
7293 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7294 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7295 v.Fatalf("bad AddAux addr %v", a)
7296 }
7297
7298 a.Offset += offset
7299
7300
7301 if v.Aux == nil {
7302 return
7303 }
7304
7305 switch n := v.Aux.(type) {
7306 case *ssa.AuxCall:
7307 a.Name = obj.NAME_EXTERN
7308 a.Sym = n.Fn
7309 case *obj.LSym:
7310 a.Name = obj.NAME_EXTERN
7311 a.Sym = n
7312 case *ir.Name:
7313 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7314 a.Name = obj.NAME_PARAM
7315 } else {
7316 a.Name = obj.NAME_AUTO
7317 }
7318 a.Sym = n.Linksym()
7319 a.Offset += n.FrameOffset()
7320 default:
7321 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7322 }
7323 }
7324
7325
7326
7327 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7328 size := idx.Type.Size()
7329 if size == s.config.PtrSize {
7330 return idx
7331 }
7332 if size > s.config.PtrSize {
7333
7334
7335 var lo *ssa.Value
7336 if idx.Type.IsSigned() {
7337 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7338 } else {
7339 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7340 }
7341 if bounded || base.Flag.B != 0 {
7342 return lo
7343 }
7344 bNext := s.f.NewBlock(ssa.BlockPlain)
7345 bPanic := s.f.NewBlock(ssa.BlockExit)
7346 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7347 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7348 if !idx.Type.IsSigned() {
7349 switch kind {
7350 case ssa.BoundsIndex:
7351 kind = ssa.BoundsIndexU
7352 case ssa.BoundsSliceAlen:
7353 kind = ssa.BoundsSliceAlenU
7354 case ssa.BoundsSliceAcap:
7355 kind = ssa.BoundsSliceAcapU
7356 case ssa.BoundsSliceB:
7357 kind = ssa.BoundsSliceBU
7358 case ssa.BoundsSlice3Alen:
7359 kind = ssa.BoundsSlice3AlenU
7360 case ssa.BoundsSlice3Acap:
7361 kind = ssa.BoundsSlice3AcapU
7362 case ssa.BoundsSlice3B:
7363 kind = ssa.BoundsSlice3BU
7364 case ssa.BoundsSlice3C:
7365 kind = ssa.BoundsSlice3CU
7366 }
7367 }
7368 b := s.endBlock()
7369 b.Kind = ssa.BlockIf
7370 b.SetControl(cmp)
7371 b.Likely = ssa.BranchLikely
7372 b.AddEdgeTo(bNext)
7373 b.AddEdgeTo(bPanic)
7374
7375 s.startBlock(bPanic)
7376 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7377 s.endBlock().SetControl(mem)
7378 s.startBlock(bNext)
7379
7380 return lo
7381 }
7382
7383
7384 var op ssa.Op
7385 if idx.Type.IsSigned() {
7386 switch 10*size + s.config.PtrSize {
7387 case 14:
7388 op = ssa.OpSignExt8to32
7389 case 18:
7390 op = ssa.OpSignExt8to64
7391 case 24:
7392 op = ssa.OpSignExt16to32
7393 case 28:
7394 op = ssa.OpSignExt16to64
7395 case 48:
7396 op = ssa.OpSignExt32to64
7397 default:
7398 s.Fatalf("bad signed index extension %s", idx.Type)
7399 }
7400 } else {
7401 switch 10*size + s.config.PtrSize {
7402 case 14:
7403 op = ssa.OpZeroExt8to32
7404 case 18:
7405 op = ssa.OpZeroExt8to64
7406 case 24:
7407 op = ssa.OpZeroExt16to32
7408 case 28:
7409 op = ssa.OpZeroExt16to64
7410 case 48:
7411 op = ssa.OpZeroExt32to64
7412 default:
7413 s.Fatalf("bad unsigned index extension %s", idx.Type)
7414 }
7415 }
7416 return s.newValue1(op, types.Types[types.TINT], idx)
7417 }
7418
7419
7420
7421 func CheckLoweredPhi(v *ssa.Value) {
7422 if v.Op != ssa.OpPhi {
7423 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7424 }
7425 if v.Type.IsMemory() {
7426 return
7427 }
7428 f := v.Block.Func
7429 loc := f.RegAlloc[v.ID]
7430 for _, a := range v.Args {
7431 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7432 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7433 }
7434 }
7435 }
7436
7437
7438
7439
7440
7441 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7442 entry := v.Block.Func.Entry
7443 if entry != v.Block {
7444 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7445 }
7446 for _, w := range entry.Values {
7447 if w == v {
7448 break
7449 }
7450 switch w.Op {
7451 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7452
7453 default:
7454 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7455 }
7456 }
7457 }
7458
7459
7460 func CheckArgReg(v *ssa.Value) {
7461 entry := v.Block.Func.Entry
7462 if entry != v.Block {
7463 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7464 }
7465 }
7466
7467 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7468 n, off := ssa.AutoVar(v)
7469 a.Type = obj.TYPE_MEM
7470 a.Sym = n.Linksym()
7471 a.Reg = int16(Arch.REGSP)
7472 a.Offset = n.FrameOffset() + off
7473 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7474 a.Name = obj.NAME_PARAM
7475 } else {
7476 a.Name = obj.NAME_AUTO
7477 }
7478 }
7479
7480
7481
7482 func (s *State) Call(v *ssa.Value) *obj.Prog {
7483 pPosIsStmt := s.pp.Pos.IsStmt()
7484 s.PrepareCall(v)
7485
7486 p := s.Prog(obj.ACALL)
7487 if pPosIsStmt == src.PosIsStmt {
7488 p.Pos = v.Pos.WithIsStmt()
7489 } else {
7490 p.Pos = v.Pos.WithNotStmt()
7491 }
7492 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7493 p.To.Type = obj.TYPE_MEM
7494 p.To.Name = obj.NAME_EXTERN
7495 p.To.Sym = sym.Fn
7496 } else {
7497
7498 switch Arch.LinkArch.Family {
7499 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7500 p.To.Type = obj.TYPE_REG
7501 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7502 p.To.Type = obj.TYPE_MEM
7503 default:
7504 base.Fatalf("unknown indirect call family")
7505 }
7506 p.To.Reg = v.Args[0].Reg()
7507 }
7508 return p
7509 }
7510
7511
7512
7513 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7514 p := s.Call(v)
7515 p.As = obj.ARET
7516 return p
7517 }
7518
7519
7520
7521
7522 func (s *State) PrepareCall(v *ssa.Value) {
7523 idx := s.livenessMap.Get(v)
7524 if !idx.StackMapValid() {
7525
7526 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7527 base.Fatalf("missing stack map index for %v", v.LongString())
7528 }
7529 }
7530
7531 call, ok := v.Aux.(*ssa.AuxCall)
7532
7533 if ok {
7534
7535
7536 if nowritebarrierrecCheck != nil {
7537 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7538 }
7539 }
7540
7541 if s.maxarg < v.AuxInt {
7542 s.maxarg = v.AuxInt
7543 }
7544 }
7545
7546
7547
7548 func (s *State) UseArgs(n int64) {
7549 if s.maxarg < n {
7550 s.maxarg = n
7551 }
7552 }
7553
7554
7555 func fieldIdx(n *ir.SelectorExpr) int {
7556 t := n.X.Type()
7557 if !t.IsStruct() {
7558 panic("ODOT's LHS is not a struct")
7559 }
7560
7561 for i, f := range t.Fields() {
7562 if f.Sym == n.Sel {
7563 if f.Offset != n.Offset() {
7564 panic("field offset doesn't match")
7565 }
7566 return i
7567 }
7568 }
7569 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7570
7571
7572
7573 }
7574
7575
7576
7577 type ssafn struct {
7578 curfn *ir.Func
7579 strings map[string]*obj.LSym
7580 stksize int64
7581 stkptrsize int64
7582
7583
7584
7585
7586
7587 stkalign int64
7588
7589 log bool
7590 }
7591
7592
7593
7594 func (e *ssafn) StringData(s string) *obj.LSym {
7595 if aux, ok := e.strings[s]; ok {
7596 return aux
7597 }
7598 if e.strings == nil {
7599 e.strings = make(map[string]*obj.LSym)
7600 }
7601 data := staticdata.StringSym(e.curfn.Pos(), s)
7602 e.strings[s] = data
7603 return data
7604 }
7605
7606
7607 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7608 node := parent.N
7609
7610 if node.Class != ir.PAUTO || node.Addrtaken() {
7611
7612 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7613 }
7614
7615 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7616 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7617 n.SetUsed(true)
7618 n.SetEsc(ir.EscNever)
7619 types.CalcSize(t)
7620 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7621 }
7622
7623
7624 func (e *ssafn) Logf(msg string, args ...interface{}) {
7625 if e.log {
7626 fmt.Printf(msg, args...)
7627 }
7628 }
7629
7630 func (e *ssafn) Log() bool {
7631 return e.log
7632 }
7633
7634
7635 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
7636 base.Pos = pos
7637 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
7638 base.Fatalf("'%s': "+msg, nargs...)
7639 }
7640
7641
7642
7643 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
7644 base.WarnfAt(pos, fmt_, args...)
7645 }
7646
7647 func (e *ssafn) Debug_checknil() bool {
7648 return base.Debug.Nil != 0
7649 }
7650
7651 func (e *ssafn) UseWriteBarrier() bool {
7652 return base.Flag.WB
7653 }
7654
7655 func (e *ssafn) Syslook(name string) *obj.LSym {
7656 switch name {
7657 case "goschedguarded":
7658 return ir.Syms.Goschedguarded
7659 case "writeBarrier":
7660 return ir.Syms.WriteBarrier
7661 case "wbZero":
7662 return ir.Syms.WBZero
7663 case "wbMove":
7664 return ir.Syms.WBMove
7665 case "cgoCheckMemmove":
7666 return ir.Syms.CgoCheckMemmove
7667 case "cgoCheckPtrWrite":
7668 return ir.Syms.CgoCheckPtrWrite
7669 }
7670 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
7671 return nil
7672 }
7673
7674 func (e *ssafn) Func() *ir.Func {
7675 return e.curfn
7676 }
7677
7678 func clobberBase(n ir.Node) ir.Node {
7679 if n.Op() == ir.ODOT {
7680 n := n.(*ir.SelectorExpr)
7681 if n.X.Type().NumFields() == 1 {
7682 return clobberBase(n.X)
7683 }
7684 }
7685 if n.Op() == ir.OINDEX {
7686 n := n.(*ir.IndexExpr)
7687 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
7688 return clobberBase(n.X)
7689 }
7690 }
7691 return n
7692 }
7693
7694
7695 func callTargetLSym(callee *ir.Name) *obj.LSym {
7696 if callee.Func == nil {
7697
7698
7699
7700 return callee.Linksym()
7701 }
7702
7703 return callee.LinksymABI(callee.Func.ABI)
7704 }
7705
7706
7707 const deferStructFnField = 4
7708
7709 var deferType *types.Type
7710
7711
7712
7713 func deferstruct() *types.Type {
7714 if deferType != nil {
7715 return deferType
7716 }
7717
7718 makefield := func(name string, t *types.Type) *types.Field {
7719 sym := (*types.Pkg)(nil).Lookup(name)
7720 return types.NewField(src.NoXPos, sym, t)
7721 }
7722
7723 fields := []*types.Field{
7724 makefield("heap", types.Types[types.TBOOL]),
7725 makefield("rangefunc", types.Types[types.TBOOL]),
7726 makefield("sp", types.Types[types.TUINTPTR]),
7727 makefield("pc", types.Types[types.TUINTPTR]),
7728
7729
7730
7731 makefield("fn", types.Types[types.TUINTPTR]),
7732 makefield("link", types.Types[types.TUINTPTR]),
7733 makefield("head", types.Types[types.TUINTPTR]),
7734 }
7735 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
7736 base.Fatalf("deferStructFnField is %q, not fn", name)
7737 }
7738
7739 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
7740 typ := types.NewNamed(n)
7741 n.SetType(typ)
7742 n.SetTypecheck(1)
7743
7744
7745 typ.SetUnderlying(types.NewStruct(fields))
7746 types.CalcStructSize(typ)
7747
7748 deferType = typ
7749 return typ
7750 }
7751
7752
7753
7754
7755
7756 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
7757 return obj.Addr{
7758 Name: obj.NAME_NONE,
7759 Type: obj.TYPE_MEM,
7760 Reg: baseReg,
7761 Offset: spill.Offset + extraOffset,
7762 }
7763 }
7764
7765 var (
7766 BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
7767 ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
7768 )
7769
View as plain text