1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "fmt"
11 "go/constant"
12 "html"
13 "internal/buildcfg"
14 "os"
15 "path/filepath"
16 "slices"
17 "strings"
18
19 "cmd/compile/internal/abi"
20 "cmd/compile/internal/base"
21 "cmd/compile/internal/ir"
22 "cmd/compile/internal/liveness"
23 "cmd/compile/internal/objw"
24 "cmd/compile/internal/reflectdata"
25 "cmd/compile/internal/rttype"
26 "cmd/compile/internal/ssa"
27 "cmd/compile/internal/staticdata"
28 "cmd/compile/internal/typecheck"
29 "cmd/compile/internal/types"
30 "cmd/internal/obj"
31 "cmd/internal/objabi"
32 "cmd/internal/src"
33 "cmd/internal/sys"
34
35 rtabi "internal/abi"
36 )
37
38 var ssaConfig *ssa.Config
39 var ssaCaches []ssa.Cache
40
41 var ssaDump string
42 var ssaDir string
43 var ssaDumpStdout bool
44 var ssaDumpCFG string
45 const ssaDumpFile = "ssa.html"
46
47
48 var ssaDumpInlined []*ir.Func
49
50 func DumpInline(fn *ir.Func) {
51 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
52 ssaDumpInlined = append(ssaDumpInlined, fn)
53 }
54 }
55
56 func InitEnv() {
57 ssaDump = os.Getenv("GOSSAFUNC")
58 ssaDir = os.Getenv("GOSSADIR")
59 if ssaDump != "" {
60 if strings.HasSuffix(ssaDump, "+") {
61 ssaDump = ssaDump[:len(ssaDump)-1]
62 ssaDumpStdout = true
63 }
64 spl := strings.Split(ssaDump, ":")
65 if len(spl) > 1 {
66 ssaDump = spl[0]
67 ssaDumpCFG = spl[1]
68 }
69 }
70 }
71
72 func InitConfig() {
73 types_ := ssa.NewTypes()
74
75 if Arch.SoftFloat {
76 softfloatInit()
77 }
78
79
80
81 _ = types.NewPtr(types.Types[types.TINTER])
82 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
83 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
84 _ = types.NewPtr(types.NewPtr(types.ByteType))
85 _ = types.NewPtr(types.NewSlice(types.ByteType))
86 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
87 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
88 _ = types.NewPtr(types.Types[types.TINT16])
89 _ = types.NewPtr(types.Types[types.TINT64])
90 _ = types.NewPtr(types.ErrorType)
91 if buildcfg.Experiment.SwissMap {
92 _ = types.NewPtr(reflectdata.SwissMapType())
93 } else {
94 _ = types.NewPtr(reflectdata.OldMapType())
95 }
96 _ = types.NewPtr(deferstruct())
97 types.NewPtrCacheEnabled = false
98 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
99 ssaConfig.Race = base.Flag.Race
100 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
101
102
103 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
104 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
105 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
106 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
107 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
108 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
109 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
110 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
111 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
112 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
113 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
114 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
115 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
116 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
117 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
118 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
119 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
120 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
121 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
122 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
123 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
124 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
125 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
126 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
127 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
128 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
129 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
130 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
131 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
132 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
133 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
134 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
135 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
136 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
137 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
138 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
139 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
140 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
141 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
142 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
143 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
144 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
145 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
146 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
147 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
148 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
149 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
150 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
151 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
152 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
153 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
154 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
155 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
156 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
157 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
158 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
159 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
160
161 if Arch.LinkArch.Family == sys.Wasm {
162 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
163 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
164 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
165 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
166 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
167 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
168 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
169 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
170 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
171 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
172 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
173 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
174 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
175 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
176 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
177 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
178 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
179 } else {
180 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
181 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
182 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
183 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
184 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
185 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
186 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
187 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
188 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
189 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
190 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
191 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
192 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
193 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
194 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
195 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
196 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("panicSliceConvert")
197 }
198 if Arch.LinkArch.PtrSize == 4 {
199 ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
200 ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
201 ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
202 ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
203 ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
204 ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
205 ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
206 ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
207 ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
208 ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
209 ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
210 ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
211 ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
212 ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
213 ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
214 ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
215 }
216
217
218 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
219 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
220 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
221 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
222 }
223
224 func InitTables() {
225 initIntrinsics(nil)
226 }
227
228
229
230
231
232
233
234
235 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
236 return ssaConfig.ABI0.Copy()
237 }
238
239
240
241 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
242 if buildcfg.Experiment.RegabiArgs {
243
244 if fn == nil {
245 return abi1
246 }
247 switch fn.ABI {
248 case obj.ABI0:
249 return abi0
250 case obj.ABIInternal:
251
252
253 return abi1
254 }
255 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
256 panic("not reachable")
257 }
258
259 a := abi0
260 if fn != nil {
261 if fn.Pragma&ir.RegisterParams != 0 {
262 a = abi1
263 }
264 }
265 return a
266 }
267
268
269
270
271
272
273
274
275
276
277
278
279 func (s *state) emitOpenDeferInfo() {
280 firstOffset := s.openDefers[0].closureNode.FrameOffset()
281
282
283 for i, r := range s.openDefers {
284 have := r.closureNode.FrameOffset()
285 want := firstOffset + int64(i)*int64(types.PtrSize)
286 if have != want {
287 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
288 }
289 }
290
291 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
292 x.Set(obj.AttrContentAddressable, true)
293 s.curfn.LSym.Func().OpenCodedDeferInfo = x
294
295 off := 0
296 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
297 off = objw.Uvarint(x, off, uint64(-firstOffset))
298 }
299
300
301
302 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
303 name := ir.FuncName(fn)
304
305 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
306
307 printssa := false
308
309
310 if strings.Contains(ssaDump, name) {
311 nameOptABI := name
312 if strings.Contains(ssaDump, ",") {
313 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
314 } else if strings.HasSuffix(ssaDump, ">") {
315 l := len(ssaDump)
316 if l >= 3 && ssaDump[l-3] == '<' {
317 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
318 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
319 }
320 }
321 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
322 printssa = nameOptABI == ssaDump ||
323 pkgDotName == ssaDump ||
324 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
325 }
326
327 var astBuf *bytes.Buffer
328 if printssa {
329 astBuf = &bytes.Buffer{}
330 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
331 if ssaDumpStdout {
332 fmt.Println("generating SSA for", name)
333 fmt.Print(astBuf.String())
334 }
335 }
336
337 var s state
338 s.pushLine(fn.Pos())
339 defer s.popLine()
340
341 s.hasdefer = fn.HasDefer()
342 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
343 s.cgoUnsafeArgs = true
344 }
345 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
346
347 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
348 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
349 s.instrumentMemory = true
350 }
351 if base.Flag.Race {
352 s.instrumentEnterExit = true
353 }
354 }
355
356 fe := ssafn{
357 curfn: fn,
358 log: printssa && ssaDumpStdout,
359 }
360 s.curfn = fn
361
362 cache := &ssaCaches[worker]
363 cache.Reset()
364
365 s.f = ssaConfig.NewFunc(&fe, cache)
366 s.config = ssaConfig
367 s.f.Type = fn.Type()
368 s.f.Name = name
369 s.f.PrintOrHtmlSSA = printssa
370 if fn.Pragma&ir.Nosplit != 0 {
371 s.f.NoSplit = true
372 }
373 s.f.ABI0 = ssaConfig.ABI0
374 s.f.ABI1 = ssaConfig.ABI1
375 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
376 s.f.ABISelf = abiSelf
377
378 s.panics = map[funcLine]*ssa.Block{}
379 s.softFloat = s.config.SoftFloat
380
381
382 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
383 s.f.Entry.Pos = fn.Pos()
384 s.f.IsPgoHot = isPgoHot
385
386 if printssa {
387 ssaDF := ssaDumpFile
388 if ssaDir != "" {
389 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
390 ssaD := filepath.Dir(ssaDF)
391 os.MkdirAll(ssaD, 0755)
392 }
393 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
394
395 dumpSourcesColumn(s.f.HTMLWriter, fn)
396 s.f.HTMLWriter.WriteAST("AST", astBuf)
397 }
398
399
400 s.labels = map[string]*ssaLabel{}
401 s.fwdVars = map[ir.Node]*ssa.Value{}
402 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
403
404 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
405 switch {
406 case base.Debug.NoOpenDefer != 0:
407 s.hasOpenDefers = false
408 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
409
410
411
412 s.hasOpenDefers = false
413 }
414 if s.hasOpenDefers && s.instrumentEnterExit {
415
416
417
418 s.hasOpenDefers = false
419 }
420 if s.hasOpenDefers {
421
422
423 for _, f := range s.curfn.Type().Results() {
424 if !f.Nname.(*ir.Name).OnStack() {
425 s.hasOpenDefers = false
426 break
427 }
428 }
429 }
430 if s.hasOpenDefers &&
431 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
432
433
434
435
436
437 s.hasOpenDefers = false
438 }
439
440 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
441 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
442
443 s.startBlock(s.f.Entry)
444 s.vars[memVar] = s.startmem
445 if s.hasOpenDefers {
446
447
448
449 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
450 deferBitsTemp.SetAddrtaken(true)
451 s.deferBitsTemp = deferBitsTemp
452
453 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
454 s.vars[deferBitsVar] = startDeferBits
455 s.deferBitsAddr = s.addr(deferBitsTemp)
456 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
457
458
459
460
461
462 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
463 }
464
465 var params *abi.ABIParamResultInfo
466 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
467
468
469
470
471
472
473 var debugInfo ssa.FuncDebug
474 for _, n := range fn.Dcl {
475 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
476 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
477 }
478 }
479 fn.DebugInfo = &debugInfo
480
481
482 s.decladdrs = map[*ir.Name]*ssa.Value{}
483 for _, n := range fn.Dcl {
484 switch n.Class {
485 case ir.PPARAM:
486
487 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
488 case ir.PPARAMOUT:
489 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
490 case ir.PAUTO:
491
492
493 default:
494 s.Fatalf("local variable with class %v unimplemented", n.Class)
495 }
496 }
497
498 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
499
500
501 for _, n := range fn.Dcl {
502 if n.Class == ir.PPARAM {
503 if s.canSSA(n) {
504 v := s.newValue0A(ssa.OpArg, n.Type(), n)
505 s.vars[n] = v
506 s.addNamedValue(n, v)
507 } else {
508 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
509 if len(paramAssignment.Registers) > 0 {
510 if ssa.CanSSA(n.Type()) {
511 v := s.newValue0A(ssa.OpArg, n.Type(), n)
512 s.store(n.Type(), s.decladdrs[n], v)
513 } else {
514
515
516 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
517 }
518 }
519 }
520 }
521 }
522
523
524 if fn.Needctxt() {
525 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
526 if fn.RangeParent != nil && base.Flag.N != 0 {
527
528
529
530 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
531 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
532 cloSlot.SetUsed(true)
533 cloSlot.SetEsc(ir.EscNever)
534 cloSlot.SetAddrtaken(true)
535 s.f.CloSlot = cloSlot
536 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
537 addr := s.addr(cloSlot)
538 s.store(s.f.Config.Types.BytePtr, addr, clo)
539
540 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
541 }
542 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
543 for {
544 n, typ, offset := csiter.Next()
545 if n == nil {
546 break
547 }
548
549 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
550
551
552
553
554
555
556
557
558
559 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
560 n.Class = ir.PAUTO
561 fn.Dcl = append(fn.Dcl, n)
562 s.assign(n, s.load(n.Type(), ptr), false, 0)
563 continue
564 }
565
566 if !n.Byval() {
567 ptr = s.load(typ, ptr)
568 }
569 s.setHeapaddr(fn.Pos(), n, ptr)
570 }
571 }
572
573
574 if s.instrumentEnterExit {
575 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
576 }
577 s.zeroResults()
578 s.paramsToHeap()
579 s.stmtList(fn.Body)
580
581
582 if s.curBlock != nil {
583 s.pushLine(fn.Endlineno)
584 s.exit()
585 s.popLine()
586 }
587
588 for _, b := range s.f.Blocks {
589 if b.Pos != src.NoXPos {
590 s.updateUnsetPredPos(b)
591 }
592 }
593
594 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
595
596 s.insertPhis()
597
598
599 ssa.Compile(s.f)
600
601 fe.AllocFrame(s.f)
602
603 if len(s.openDefers) != 0 {
604 s.emitOpenDeferInfo()
605 }
606
607
608
609
610
611
612 for _, p := range params.InParams() {
613 typs, offs := p.RegisterTypesAndOffsets()
614 for i, t := range typs {
615 o := offs[i]
616 fo := p.FrameOffset(params)
617 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
618 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
619 }
620 }
621
622 return s.f
623 }
624
625 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
626 typs, offs := paramAssignment.RegisterTypesAndOffsets()
627 for i, t := range typs {
628 if pointersOnly && !t.IsPtrShaped() {
629 continue
630 }
631 r := paramAssignment.Registers[i]
632 o := offs[i]
633 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
634 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
635 v := s.newValue0I(op, t, reg)
636 v.Aux = aux
637 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
638 s.store(t, p, v)
639 }
640 }
641
642
643
644
645
646
647
648 func (s *state) zeroResults() {
649 for _, f := range s.curfn.Type().Results() {
650 n := f.Nname.(*ir.Name)
651 if !n.OnStack() {
652
653
654
655 continue
656 }
657
658 if typ := n.Type(); ssa.CanSSA(typ) {
659 s.assign(n, s.zeroVal(typ), false, 0)
660 } else {
661 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
662 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
663 }
664 s.zero(n.Type(), s.decladdrs[n])
665 }
666 }
667 }
668
669
670
671 func (s *state) paramsToHeap() {
672 do := func(params []*types.Field) {
673 for _, f := range params {
674 if f.Nname == nil {
675 continue
676 }
677 n := f.Nname.(*ir.Name)
678 if ir.IsBlank(n) || n.OnStack() {
679 continue
680 }
681 s.newHeapaddr(n)
682 if n.Class == ir.PPARAM {
683 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
684 }
685 }
686 }
687
688 typ := s.curfn.Type()
689 do(typ.Recvs())
690 do(typ.Params())
691 do(typ.Results())
692 }
693
694
695 func (s *state) newHeapaddr(n *ir.Name) {
696 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type(), nil))
697 }
698
699
700
701 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
702 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
703 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
704 }
705
706
707 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
708 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
709 addr.SetUsed(true)
710 types.CalcSize(addr.Type())
711
712 if n.Class == ir.PPARAMOUT {
713 addr.SetIsOutputParamHeapAddr(true)
714 }
715
716 n.Heapaddr = addr
717 s.assign(addr, ptr, false, 0)
718 }
719
720
721 func (s *state) newObject(typ *types.Type, rtype *ssa.Value) *ssa.Value {
722 if typ.Size() == 0 {
723 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
724 }
725 if rtype == nil {
726 rtype = s.reflectType(typ)
727 }
728 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
729 }
730
731 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
732 if !n.Type().IsPtr() {
733 s.Fatalf("expected pointer type: %v", n.Type())
734 }
735 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
736 if count != nil {
737 if !elem.IsArray() {
738 s.Fatalf("expected array type: %v", elem)
739 }
740 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
741 }
742 size := elem.Size()
743
744 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
745 return
746 }
747 if count == nil {
748 count = s.constInt(types.Types[types.TUINTPTR], 1)
749 }
750 if count.Type.Size() != s.config.PtrSize {
751 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
752 }
753 var rtype *ssa.Value
754 if rtypeExpr != nil {
755 rtype = s.expr(rtypeExpr)
756 } else {
757 rtype = s.reflectType(elem)
758 }
759 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
760 }
761
762
763
764 func (s *state) reflectType(typ *types.Type) *ssa.Value {
765
766
767 lsym := reflectdata.TypeLinksym(typ)
768 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
769 }
770
771 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
772
773 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
774 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
775 if err != nil {
776 writer.Logf("cannot read sources for function %v: %v", fn, err)
777 }
778
779
780 var inlFns []*ssa.FuncLines
781 for _, fi := range ssaDumpInlined {
782 elno := fi.Endlineno
783 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
784 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
785 if err != nil {
786 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
787 continue
788 }
789 inlFns = append(inlFns, fnLines)
790 }
791
792 slices.SortFunc(inlFns, ssa.ByTopoCmp)
793 if targetFn != nil {
794 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
795 }
796
797 writer.WriteSources("sources", inlFns)
798 }
799
800 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
801 f, err := os.Open(os.ExpandEnv(file))
802 if err != nil {
803 return nil, err
804 }
805 defer f.Close()
806 var lines []string
807 ln := uint(1)
808 scanner := bufio.NewScanner(f)
809 for scanner.Scan() && ln <= end {
810 if ln >= start {
811 lines = append(lines, scanner.Text())
812 }
813 ln++
814 }
815 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
816 }
817
818
819
820
821 func (s *state) updateUnsetPredPos(b *ssa.Block) {
822 if b.Pos == src.NoXPos {
823 s.Fatalf("Block %s should have a position", b)
824 }
825 bestPos := src.NoXPos
826 for _, e := range b.Preds {
827 p := e.Block()
828 if !p.LackingPos() {
829 continue
830 }
831 if bestPos == src.NoXPos {
832 bestPos = b.Pos
833 for _, v := range b.Values {
834 if v.LackingPos() {
835 continue
836 }
837 if v.Pos != src.NoXPos {
838
839
840 bestPos = v.Pos
841 break
842 }
843 }
844 }
845 p.Pos = bestPos
846 s.updateUnsetPredPos(p)
847 }
848 }
849
850
851 type openDeferInfo struct {
852
853 n *ir.CallExpr
854
855
856 closure *ssa.Value
857
858
859
860 closureNode *ir.Name
861 }
862
863 type state struct {
864
865 config *ssa.Config
866
867
868 f *ssa.Func
869
870
871 curfn *ir.Func
872
873
874 labels map[string]*ssaLabel
875
876
877 breakTo *ssa.Block
878 continueTo *ssa.Block
879
880
881 curBlock *ssa.Block
882
883
884
885
886 vars map[ir.Node]*ssa.Value
887
888
889
890
891 fwdVars map[ir.Node]*ssa.Value
892
893
894 defvars []map[ir.Node]*ssa.Value
895
896
897 decladdrs map[*ir.Name]*ssa.Value
898
899
900 startmem *ssa.Value
901 sp *ssa.Value
902 sb *ssa.Value
903
904 deferBitsAddr *ssa.Value
905 deferBitsTemp *ir.Name
906
907
908 line []src.XPos
909
910 lastPos src.XPos
911
912
913
914 panics map[funcLine]*ssa.Block
915
916 cgoUnsafeArgs bool
917 hasdefer bool
918 softFloat bool
919 hasOpenDefers bool
920 checkPtrEnabled bool
921 instrumentEnterExit bool
922 instrumentMemory bool
923
924
925
926
927 openDefers []*openDeferInfo
928
929
930
931
932 lastDeferExit *ssa.Block
933 lastDeferFinalBlock *ssa.Block
934 lastDeferCount int
935
936 prevCall *ssa.Value
937 }
938
939 type funcLine struct {
940 f *obj.LSym
941 base *src.PosBase
942 line uint
943 }
944
945 type ssaLabel struct {
946 target *ssa.Block
947 breakTarget *ssa.Block
948 continueTarget *ssa.Block
949 }
950
951
952 func (s *state) label(sym *types.Sym) *ssaLabel {
953 lab := s.labels[sym.Name]
954 if lab == nil {
955 lab = new(ssaLabel)
956 s.labels[sym.Name] = lab
957 }
958 return lab
959 }
960
961 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
962 func (s *state) Log() bool { return s.f.Log() }
963 func (s *state) Fatalf(msg string, args ...interface{}) {
964 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
965 }
966 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
967 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
968
969 func ssaMarker(name string) *ir.Name {
970 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
971 }
972
973 var (
974
975 memVar = ssaMarker("mem")
976
977
978 ptrVar = ssaMarker("ptr")
979 lenVar = ssaMarker("len")
980 capVar = ssaMarker("cap")
981 typVar = ssaMarker("typ")
982 okVar = ssaMarker("ok")
983 deferBitsVar = ssaMarker("deferBits")
984 hashVar = ssaMarker("hash")
985 )
986
987
988 func (s *state) startBlock(b *ssa.Block) {
989 if s.curBlock != nil {
990 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
991 }
992 s.curBlock = b
993 s.vars = map[ir.Node]*ssa.Value{}
994 clear(s.fwdVars)
995 }
996
997
998
999
1000 func (s *state) endBlock() *ssa.Block {
1001 b := s.curBlock
1002 if b == nil {
1003 return nil
1004 }
1005 for len(s.defvars) <= int(b.ID) {
1006 s.defvars = append(s.defvars, nil)
1007 }
1008 s.defvars[b.ID] = s.vars
1009 s.curBlock = nil
1010 s.vars = nil
1011 if b.LackingPos() {
1012
1013
1014
1015 b.Pos = src.NoXPos
1016 } else {
1017 b.Pos = s.lastPos
1018 }
1019 return b
1020 }
1021
1022
1023 func (s *state) pushLine(line src.XPos) {
1024 if !line.IsKnown() {
1025
1026
1027 line = s.peekPos()
1028 if base.Flag.K != 0 {
1029 base.Warn("buildssa: unknown position (line 0)")
1030 }
1031 } else {
1032 s.lastPos = line
1033 }
1034
1035 s.line = append(s.line, line)
1036 }
1037
1038
1039 func (s *state) popLine() {
1040 s.line = s.line[:len(s.line)-1]
1041 }
1042
1043
1044 func (s *state) peekPos() src.XPos {
1045 return s.line[len(s.line)-1]
1046 }
1047
1048
1049 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1050 return s.curBlock.NewValue0(s.peekPos(), op, t)
1051 }
1052
1053
1054 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1055 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1056 }
1057
1058
1059 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1060 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1061 }
1062
1063
1064 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1065 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1066 }
1067
1068
1069 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1070 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1071 }
1072
1073
1074
1075
1076 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1077 if isStmt {
1078 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1079 }
1080 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1081 }
1082
1083
1084 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1085 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1086 }
1087
1088
1089 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1090 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1091 }
1092
1093
1094 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1095 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1096 }
1097
1098
1099
1100
1101 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1102 if isStmt {
1103 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1104 }
1105 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1106 }
1107
1108
1109 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1110 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1111 }
1112
1113
1114 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1115 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1116 }
1117
1118
1119 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1120 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1121 }
1122
1123
1124 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1125 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1126 }
1127
1128
1129
1130
1131 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1132 if isStmt {
1133 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1134 }
1135 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1136 }
1137
1138
1139 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1140 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1141 }
1142
1143
1144 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1145 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1146 }
1147
1148 func (s *state) entryBlock() *ssa.Block {
1149 b := s.f.Entry
1150 if base.Flag.N > 0 && s.curBlock != nil {
1151
1152
1153
1154
1155 b = s.curBlock
1156 }
1157 return b
1158 }
1159
1160
1161 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1162 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1163 }
1164
1165
1166 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1167 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1168 }
1169
1170
1171 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1172 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1173 }
1174
1175
1176 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1177 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1178 }
1179
1180
1181 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1182 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1183 }
1184
1185
1186 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1187 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1188 }
1189
1190
1191 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1192 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1193 }
1194
1195
1196 func (s *state) constSlice(t *types.Type) *ssa.Value {
1197 return s.f.ConstSlice(t)
1198 }
1199 func (s *state) constInterface(t *types.Type) *ssa.Value {
1200 return s.f.ConstInterface(t)
1201 }
1202 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1203 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1204 return s.f.ConstEmptyString(t)
1205 }
1206 func (s *state) constBool(c bool) *ssa.Value {
1207 return s.f.ConstBool(types.Types[types.TBOOL], c)
1208 }
1209 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1210 return s.f.ConstInt8(t, c)
1211 }
1212 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1213 return s.f.ConstInt16(t, c)
1214 }
1215 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1216 return s.f.ConstInt32(t, c)
1217 }
1218 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1219 return s.f.ConstInt64(t, c)
1220 }
1221 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1222 return s.f.ConstFloat32(t, c)
1223 }
1224 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1225 return s.f.ConstFloat64(t, c)
1226 }
1227 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1228 if s.config.PtrSize == 8 {
1229 return s.constInt64(t, c)
1230 }
1231 if int64(int32(c)) != c {
1232 s.Fatalf("integer constant too big %d", c)
1233 }
1234 return s.constInt32(t, int32(c))
1235 }
1236 func (s *state) constOffPtrSP(t *types.Type, c int64) *ssa.Value {
1237 return s.f.ConstOffPtrSP(t, c, s.sp)
1238 }
1239
1240
1241
1242 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1243 if s.softFloat {
1244 if c, ok := s.sfcall(op, arg); ok {
1245 return c
1246 }
1247 }
1248 return s.newValue1(op, t, arg)
1249 }
1250 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1251 if s.softFloat {
1252 if c, ok := s.sfcall(op, arg0, arg1); ok {
1253 return c
1254 }
1255 }
1256 return s.newValue2(op, t, arg0, arg1)
1257 }
1258
1259 type instrumentKind uint8
1260
1261 const (
1262 instrumentRead = iota
1263 instrumentWrite
1264 instrumentMove
1265 )
1266
1267 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1268 s.instrument2(t, addr, nil, kind)
1269 }
1270
1271
1272
1273
1274 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1275 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1276 s.instrument(t, addr, kind)
1277 return
1278 }
1279 for _, f := range t.Fields() {
1280 if f.Sym.IsBlank() {
1281 continue
1282 }
1283 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1284 s.instrumentFields(f.Type, offptr, kind)
1285 }
1286 }
1287
1288 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1289 if base.Flag.MSan {
1290 s.instrument2(t, dst, src, instrumentMove)
1291 } else {
1292 s.instrument(t, src, instrumentRead)
1293 s.instrument(t, dst, instrumentWrite)
1294 }
1295 }
1296
1297 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1298 if !s.instrumentMemory {
1299 return
1300 }
1301
1302 w := t.Size()
1303 if w == 0 {
1304 return
1305 }
1306
1307 if ssa.IsSanitizerSafeAddr(addr) {
1308 return
1309 }
1310
1311 var fn *obj.LSym
1312 needWidth := false
1313
1314 if addr2 != nil && kind != instrumentMove {
1315 panic("instrument2: non-nil addr2 for non-move instrumentation")
1316 }
1317
1318 if base.Flag.MSan {
1319 switch kind {
1320 case instrumentRead:
1321 fn = ir.Syms.Msanread
1322 case instrumentWrite:
1323 fn = ir.Syms.Msanwrite
1324 case instrumentMove:
1325 fn = ir.Syms.Msanmove
1326 default:
1327 panic("unreachable")
1328 }
1329 needWidth = true
1330 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1331
1332
1333
1334 switch kind {
1335 case instrumentRead:
1336 fn = ir.Syms.Racereadrange
1337 case instrumentWrite:
1338 fn = ir.Syms.Racewriterange
1339 default:
1340 panic("unreachable")
1341 }
1342 needWidth = true
1343 } else if base.Flag.Race {
1344
1345
1346 switch kind {
1347 case instrumentRead:
1348 fn = ir.Syms.Raceread
1349 case instrumentWrite:
1350 fn = ir.Syms.Racewrite
1351 default:
1352 panic("unreachable")
1353 }
1354 } else if base.Flag.ASan {
1355 switch kind {
1356 case instrumentRead:
1357 fn = ir.Syms.Asanread
1358 case instrumentWrite:
1359 fn = ir.Syms.Asanwrite
1360 default:
1361 panic("unreachable")
1362 }
1363 needWidth = true
1364 } else {
1365 panic("unreachable")
1366 }
1367
1368 args := []*ssa.Value{addr}
1369 if addr2 != nil {
1370 args = append(args, addr2)
1371 }
1372 if needWidth {
1373 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1374 }
1375 s.rtcall(fn, true, nil, args...)
1376 }
1377
1378 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1379 s.instrumentFields(t, src, instrumentRead)
1380 return s.rawLoad(t, src)
1381 }
1382
1383 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1384 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1385 }
1386
1387 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1388 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1389 }
1390
1391 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1392 s.instrument(t, dst, instrumentWrite)
1393 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1394 store.Aux = t
1395 s.vars[memVar] = store
1396 }
1397
1398 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1399 s.moveWhichMayOverlap(t, dst, src, false)
1400 }
1401 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1402 s.instrumentMove(t, dst, src)
1403 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427 if t.HasPointers() {
1428 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1429
1430
1431
1432
1433 s.curfn.SetWBPos(s.peekPos())
1434 } else {
1435 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1436 }
1437 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1438 return
1439 }
1440 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1441 store.Aux = t
1442 s.vars[memVar] = store
1443 }
1444
1445
1446 func (s *state) stmtList(l ir.Nodes) {
1447 for _, n := range l {
1448 s.stmt(n)
1449 }
1450 }
1451
1452
1453 func (s *state) stmt(n ir.Node) {
1454 s.pushLine(n.Pos())
1455 defer s.popLine()
1456
1457
1458
1459 if s.curBlock == nil && n.Op() != ir.OLABEL {
1460 return
1461 }
1462
1463 s.stmtList(n.Init())
1464 switch n.Op() {
1465
1466 case ir.OBLOCK:
1467 n := n.(*ir.BlockStmt)
1468 s.stmtList(n.List)
1469
1470 case ir.OFALL:
1471
1472
1473 case ir.OCALLFUNC:
1474 n := n.(*ir.CallExpr)
1475 if ir.IsIntrinsicCall(n) {
1476 s.intrinsicCall(n)
1477 return
1478 }
1479 fallthrough
1480
1481 case ir.OCALLINTER:
1482 n := n.(*ir.CallExpr)
1483 s.callResult(n, callNormal)
1484 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1485 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1486 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1487 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1488 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1489 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1490 fn == "panicrangestate") {
1491 m := s.mem()
1492 b := s.endBlock()
1493 b.Kind = ssa.BlockExit
1494 b.SetControl(m)
1495
1496
1497
1498 }
1499 }
1500 case ir.ODEFER:
1501 n := n.(*ir.GoDeferStmt)
1502 if base.Debug.Defer > 0 {
1503 var defertype string
1504 if s.hasOpenDefers {
1505 defertype = "open-coded"
1506 } else if n.Esc() == ir.EscNever {
1507 defertype = "stack-allocated"
1508 } else {
1509 defertype = "heap-allocated"
1510 }
1511 base.WarnfAt(n.Pos(), "%s defer", defertype)
1512 }
1513 if s.hasOpenDefers {
1514 s.openDeferRecord(n.Call.(*ir.CallExpr))
1515 } else {
1516 d := callDefer
1517 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1518 d = callDeferStack
1519 }
1520 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1521 }
1522 case ir.OGO:
1523 n := n.(*ir.GoDeferStmt)
1524 s.callResult(n.Call.(*ir.CallExpr), callGo)
1525
1526 case ir.OAS2DOTTYPE:
1527 n := n.(*ir.AssignListStmt)
1528 var res, resok *ssa.Value
1529 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1530 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1531 } else {
1532 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1533 }
1534 deref := false
1535 if !ssa.CanSSA(n.Rhs[0].Type()) {
1536 if res.Op != ssa.OpLoad {
1537 s.Fatalf("dottype of non-load")
1538 }
1539 mem := s.mem()
1540 if res.Args[1] != mem {
1541 s.Fatalf("memory no longer live from 2-result dottype load")
1542 }
1543 deref = true
1544 res = res.Args[0]
1545 }
1546 s.assign(n.Lhs[0], res, deref, 0)
1547 s.assign(n.Lhs[1], resok, false, 0)
1548 return
1549
1550 case ir.OAS2FUNC:
1551
1552 n := n.(*ir.AssignListStmt)
1553 call := n.Rhs[0].(*ir.CallExpr)
1554 if !ir.IsIntrinsicCall(call) {
1555 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1556 }
1557 v := s.intrinsicCall(call)
1558 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1559 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1560 s.assign(n.Lhs[0], v1, false, 0)
1561 s.assign(n.Lhs[1], v2, false, 0)
1562 return
1563
1564 case ir.ODCL:
1565 n := n.(*ir.Decl)
1566 if v := n.X; v.Esc() == ir.EscHeap {
1567 s.newHeapaddr(v)
1568 }
1569
1570 case ir.OLABEL:
1571 n := n.(*ir.LabelStmt)
1572 sym := n.Label
1573 if sym.IsBlank() {
1574
1575 break
1576 }
1577 lab := s.label(sym)
1578
1579
1580 if lab.target == nil {
1581 lab.target = s.f.NewBlock(ssa.BlockPlain)
1582 }
1583
1584
1585
1586 if s.curBlock != nil {
1587 b := s.endBlock()
1588 b.AddEdgeTo(lab.target)
1589 }
1590 s.startBlock(lab.target)
1591
1592 case ir.OGOTO:
1593 n := n.(*ir.BranchStmt)
1594 sym := n.Label
1595
1596 lab := s.label(sym)
1597 if lab.target == nil {
1598 lab.target = s.f.NewBlock(ssa.BlockPlain)
1599 }
1600
1601 b := s.endBlock()
1602 b.Pos = s.lastPos.WithIsStmt()
1603 b.AddEdgeTo(lab.target)
1604
1605 case ir.OAS:
1606 n := n.(*ir.AssignStmt)
1607 if n.X == n.Y && n.X.Op() == ir.ONAME {
1608
1609
1610
1611
1612
1613
1614
1615 return
1616 }
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1628 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1629 p := n.Y.(*ir.StarExpr).X
1630 for p.Op() == ir.OCONVNOP {
1631 p = p.(*ir.ConvExpr).X
1632 }
1633 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1634
1635
1636 mayOverlap = false
1637 }
1638 }
1639
1640
1641 rhs := n.Y
1642 if rhs != nil {
1643 switch rhs.Op() {
1644 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1645
1646
1647
1648 if !ir.IsZero(rhs) {
1649 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1650 }
1651 rhs = nil
1652 case ir.OAPPEND:
1653 rhs := rhs.(*ir.CallExpr)
1654
1655
1656
1657 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1658 break
1659 }
1660
1661
1662
1663 if s.canSSA(n.X) {
1664 if base.Debug.Append > 0 {
1665 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1666 }
1667 break
1668 }
1669 if base.Debug.Append > 0 {
1670 base.WarnfAt(n.Pos(), "append: len-only update")
1671 }
1672 s.append(rhs, true)
1673 return
1674 }
1675 }
1676
1677 if ir.IsBlank(n.X) {
1678
1679
1680 if rhs != nil {
1681 s.expr(rhs)
1682 }
1683 return
1684 }
1685
1686 var t *types.Type
1687 if n.Y != nil {
1688 t = n.Y.Type()
1689 } else {
1690 t = n.X.Type()
1691 }
1692
1693 var r *ssa.Value
1694 deref := !ssa.CanSSA(t)
1695 if deref {
1696 if rhs == nil {
1697 r = nil
1698 } else {
1699 r = s.addr(rhs)
1700 }
1701 } else {
1702 if rhs == nil {
1703 r = s.zeroVal(t)
1704 } else {
1705 r = s.expr(rhs)
1706 }
1707 }
1708
1709 var skip skipMask
1710 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1711
1712
1713 rhs := rhs.(*ir.SliceExpr)
1714 i, j, k := rhs.Low, rhs.High, rhs.Max
1715 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1716
1717 i = nil
1718 }
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729 if i == nil {
1730 skip |= skipPtr
1731 if j == nil {
1732 skip |= skipLen
1733 }
1734 if k == nil {
1735 skip |= skipCap
1736 }
1737 }
1738 }
1739
1740 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1741
1742 case ir.OIF:
1743 n := n.(*ir.IfStmt)
1744 if ir.IsConst(n.Cond, constant.Bool) {
1745 s.stmtList(n.Cond.Init())
1746 if ir.BoolVal(n.Cond) {
1747 s.stmtList(n.Body)
1748 } else {
1749 s.stmtList(n.Else)
1750 }
1751 break
1752 }
1753
1754 bEnd := s.f.NewBlock(ssa.BlockPlain)
1755 var likely int8
1756 if n.Likely {
1757 likely = 1
1758 }
1759 var bThen *ssa.Block
1760 if len(n.Body) != 0 {
1761 bThen = s.f.NewBlock(ssa.BlockPlain)
1762 } else {
1763 bThen = bEnd
1764 }
1765 var bElse *ssa.Block
1766 if len(n.Else) != 0 {
1767 bElse = s.f.NewBlock(ssa.BlockPlain)
1768 } else {
1769 bElse = bEnd
1770 }
1771 s.condBranch(n.Cond, bThen, bElse, likely)
1772
1773 if len(n.Body) != 0 {
1774 s.startBlock(bThen)
1775 s.stmtList(n.Body)
1776 if b := s.endBlock(); b != nil {
1777 b.AddEdgeTo(bEnd)
1778 }
1779 }
1780 if len(n.Else) != 0 {
1781 s.startBlock(bElse)
1782 s.stmtList(n.Else)
1783 if b := s.endBlock(); b != nil {
1784 b.AddEdgeTo(bEnd)
1785 }
1786 }
1787 s.startBlock(bEnd)
1788
1789 case ir.ORETURN:
1790 n := n.(*ir.ReturnStmt)
1791 s.stmtList(n.Results)
1792 b := s.exit()
1793 b.Pos = s.lastPos.WithIsStmt()
1794
1795 case ir.OTAILCALL:
1796 n := n.(*ir.TailCallStmt)
1797 s.callResult(n.Call, callTail)
1798 call := s.mem()
1799 b := s.endBlock()
1800 b.Kind = ssa.BlockRetJmp
1801 b.SetControl(call)
1802
1803 case ir.OCONTINUE, ir.OBREAK:
1804 n := n.(*ir.BranchStmt)
1805 var to *ssa.Block
1806 if n.Label == nil {
1807
1808 switch n.Op() {
1809 case ir.OCONTINUE:
1810 to = s.continueTo
1811 case ir.OBREAK:
1812 to = s.breakTo
1813 }
1814 } else {
1815
1816 sym := n.Label
1817 lab := s.label(sym)
1818 switch n.Op() {
1819 case ir.OCONTINUE:
1820 to = lab.continueTarget
1821 case ir.OBREAK:
1822 to = lab.breakTarget
1823 }
1824 }
1825
1826 b := s.endBlock()
1827 b.Pos = s.lastPos.WithIsStmt()
1828 b.AddEdgeTo(to)
1829
1830 case ir.OFOR:
1831
1832
1833 n := n.(*ir.ForStmt)
1834 base.Assert(!n.DistinctVars)
1835 bCond := s.f.NewBlock(ssa.BlockPlain)
1836 bBody := s.f.NewBlock(ssa.BlockPlain)
1837 bIncr := s.f.NewBlock(ssa.BlockPlain)
1838 bEnd := s.f.NewBlock(ssa.BlockPlain)
1839
1840
1841 bBody.Pos = n.Pos()
1842
1843
1844 b := s.endBlock()
1845 b.AddEdgeTo(bCond)
1846
1847
1848 s.startBlock(bCond)
1849 if n.Cond != nil {
1850 s.condBranch(n.Cond, bBody, bEnd, 1)
1851 } else {
1852 b := s.endBlock()
1853 b.Kind = ssa.BlockPlain
1854 b.AddEdgeTo(bBody)
1855 }
1856
1857
1858 prevContinue := s.continueTo
1859 prevBreak := s.breakTo
1860 s.continueTo = bIncr
1861 s.breakTo = bEnd
1862 var lab *ssaLabel
1863 if sym := n.Label; sym != nil {
1864
1865 lab = s.label(sym)
1866 lab.continueTarget = bIncr
1867 lab.breakTarget = bEnd
1868 }
1869
1870
1871 s.startBlock(bBody)
1872 s.stmtList(n.Body)
1873
1874
1875 s.continueTo = prevContinue
1876 s.breakTo = prevBreak
1877 if lab != nil {
1878 lab.continueTarget = nil
1879 lab.breakTarget = nil
1880 }
1881
1882
1883 if b := s.endBlock(); b != nil {
1884 b.AddEdgeTo(bIncr)
1885 }
1886
1887
1888 s.startBlock(bIncr)
1889 if n.Post != nil {
1890 s.stmt(n.Post)
1891 }
1892 if b := s.endBlock(); b != nil {
1893 b.AddEdgeTo(bCond)
1894
1895
1896 if b.Pos == src.NoXPos {
1897 b.Pos = bCond.Pos
1898 }
1899 }
1900
1901 s.startBlock(bEnd)
1902
1903 case ir.OSWITCH, ir.OSELECT:
1904
1905
1906 bEnd := s.f.NewBlock(ssa.BlockPlain)
1907
1908 prevBreak := s.breakTo
1909 s.breakTo = bEnd
1910 var sym *types.Sym
1911 var body ir.Nodes
1912 if n.Op() == ir.OSWITCH {
1913 n := n.(*ir.SwitchStmt)
1914 sym = n.Label
1915 body = n.Compiled
1916 } else {
1917 n := n.(*ir.SelectStmt)
1918 sym = n.Label
1919 body = n.Compiled
1920 }
1921
1922 var lab *ssaLabel
1923 if sym != nil {
1924
1925 lab = s.label(sym)
1926 lab.breakTarget = bEnd
1927 }
1928
1929
1930 s.stmtList(body)
1931
1932 s.breakTo = prevBreak
1933 if lab != nil {
1934 lab.breakTarget = nil
1935 }
1936
1937
1938
1939 if s.curBlock != nil {
1940 m := s.mem()
1941 b := s.endBlock()
1942 b.Kind = ssa.BlockExit
1943 b.SetControl(m)
1944 }
1945 s.startBlock(bEnd)
1946
1947 case ir.OJUMPTABLE:
1948 n := n.(*ir.JumpTableStmt)
1949
1950
1951 jt := s.f.NewBlock(ssa.BlockJumpTable)
1952 bEnd := s.f.NewBlock(ssa.BlockPlain)
1953
1954
1955 idx := s.expr(n.Idx)
1956 unsigned := idx.Type.IsUnsigned()
1957
1958
1959 t := types.Types[types.TUINTPTR]
1960 idx = s.conv(nil, idx, idx.Type, t)
1961
1962
1963
1964
1965
1966
1967
1968 var min, max uint64
1969 if unsigned {
1970 min, _ = constant.Uint64Val(n.Cases[0])
1971 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
1972 } else {
1973 mn, _ := constant.Int64Val(n.Cases[0])
1974 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
1975 min = uint64(mn)
1976 max = uint64(mx)
1977 }
1978
1979 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
1980 width := s.uintptrConstant(max - min)
1981 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
1982 b := s.endBlock()
1983 b.Kind = ssa.BlockIf
1984 b.SetControl(cmp)
1985 b.AddEdgeTo(jt)
1986 b.AddEdgeTo(bEnd)
1987 b.Likely = ssa.BranchLikely
1988
1989
1990 s.startBlock(jt)
1991 jt.Pos = n.Pos()
1992 if base.Flag.Cfg.SpectreIndex {
1993 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
1994 }
1995 jt.SetControl(idx)
1996
1997
1998 table := make([]*ssa.Block, max-min+1)
1999 for i := range table {
2000 table[i] = bEnd
2001 }
2002 for i := range n.Targets {
2003 c := n.Cases[i]
2004 lab := s.label(n.Targets[i])
2005 if lab.target == nil {
2006 lab.target = s.f.NewBlock(ssa.BlockPlain)
2007 }
2008 var val uint64
2009 if unsigned {
2010 val, _ = constant.Uint64Val(c)
2011 } else {
2012 vl, _ := constant.Int64Val(c)
2013 val = uint64(vl)
2014 }
2015
2016 table[val-min] = lab.target
2017 }
2018 for _, t := range table {
2019 jt.AddEdgeTo(t)
2020 }
2021 s.endBlock()
2022
2023 s.startBlock(bEnd)
2024
2025 case ir.OINTERFACESWITCH:
2026 n := n.(*ir.InterfaceSwitchStmt)
2027 typs := s.f.Config.Types
2028
2029 t := s.expr(n.RuntimeType)
2030 h := s.expr(n.Hash)
2031 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2032
2033
2034 var merge *ssa.Block
2035 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
2036
2037
2038 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2039 s.Fatalf("atomic load not available")
2040 }
2041 merge = s.f.NewBlock(ssa.BlockPlain)
2042 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2043 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2044 loopHead := s.f.NewBlock(ssa.BlockPlain)
2045 loopBody := s.f.NewBlock(ssa.BlockPlain)
2046
2047
2048 var mul, and, add, zext ssa.Op
2049 if s.config.PtrSize == 4 {
2050 mul = ssa.OpMul32
2051 and = ssa.OpAnd32
2052 add = ssa.OpAdd32
2053 zext = ssa.OpCopy
2054 } else {
2055 mul = ssa.OpMul64
2056 and = ssa.OpAnd64
2057 add = ssa.OpAdd64
2058 zext = ssa.OpZeroExt32to64
2059 }
2060
2061
2062
2063 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2064 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2065 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2066
2067
2068 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2069
2070
2071 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2072
2073 b := s.endBlock()
2074 b.AddEdgeTo(loopHead)
2075
2076
2077
2078 s.startBlock(loopHead)
2079 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2080 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2081 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2082 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2083
2084 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2085
2086
2087
2088 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2089 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2090 b = s.endBlock()
2091 b.Kind = ssa.BlockIf
2092 b.SetControl(cmp1)
2093 b.AddEdgeTo(cacheHit)
2094 b.AddEdgeTo(loopBody)
2095
2096
2097
2098 s.startBlock(loopBody)
2099 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2100 b = s.endBlock()
2101 b.Kind = ssa.BlockIf
2102 b.SetControl(cmp2)
2103 b.AddEdgeTo(cacheMiss)
2104 b.AddEdgeTo(loopHead)
2105
2106
2107
2108
2109 s.startBlock(cacheHit)
2110 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2111 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2112 s.assign(n.Case, eCase, false, 0)
2113 s.assign(n.Itab, eItab, false, 0)
2114 b = s.endBlock()
2115 b.AddEdgeTo(merge)
2116
2117
2118 s.startBlock(cacheMiss)
2119 }
2120
2121 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2122 s.assign(n.Case, r[0], false, 0)
2123 s.assign(n.Itab, r[1], false, 0)
2124
2125 if merge != nil {
2126
2127 b := s.endBlock()
2128 b.Kind = ssa.BlockPlain
2129 b.AddEdgeTo(merge)
2130 s.startBlock(merge)
2131 }
2132
2133 case ir.OCHECKNIL:
2134 n := n.(*ir.UnaryExpr)
2135 p := s.expr(n.X)
2136 _ = s.nilCheck(p)
2137
2138
2139 case ir.OINLMARK:
2140 n := n.(*ir.InlineMarkStmt)
2141 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2142
2143 default:
2144 s.Fatalf("unhandled stmt %v", n.Op())
2145 }
2146 }
2147
2148
2149
2150 const shareDeferExits = false
2151
2152
2153
2154
2155 func (s *state) exit() *ssa.Block {
2156 if s.hasdefer {
2157 if s.hasOpenDefers {
2158 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2159 if s.curBlock.Kind != ssa.BlockPlain {
2160 panic("Block for an exit should be BlockPlain")
2161 }
2162 s.curBlock.AddEdgeTo(s.lastDeferExit)
2163 s.endBlock()
2164 return s.lastDeferFinalBlock
2165 }
2166 s.openDeferExit()
2167 } else {
2168 s.rtcall(ir.Syms.Deferreturn, true, nil)
2169 }
2170 }
2171
2172
2173
2174 resultFields := s.curfn.Type().Results()
2175 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2176
2177 for i, f := range resultFields {
2178 n := f.Nname.(*ir.Name)
2179 if s.canSSA(n) {
2180 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2181
2182 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2183 }
2184 results[i] = s.variable(n, n.Type())
2185 } else if !n.OnStack() {
2186
2187 if n.Type().HasPointers() {
2188 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2189 }
2190 ha := s.expr(n.Heapaddr)
2191 s.instrumentFields(n.Type(), ha, instrumentRead)
2192 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2193 } else {
2194
2195
2196
2197 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2198 }
2199 }
2200
2201
2202
2203
2204 if s.instrumentEnterExit {
2205 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2206 }
2207
2208 results[len(results)-1] = s.mem()
2209 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2210 m.AddArgs(results...)
2211
2212 b := s.endBlock()
2213 b.Kind = ssa.BlockRet
2214 b.SetControl(m)
2215 if s.hasdefer && s.hasOpenDefers {
2216 s.lastDeferFinalBlock = b
2217 }
2218 return b
2219 }
2220
2221 type opAndType struct {
2222 op ir.Op
2223 etype types.Kind
2224 }
2225
2226 var opToSSA = map[opAndType]ssa.Op{
2227 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2228 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2229 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2230 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2231 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2232 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2233 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2234 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2235 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2236 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2237
2238 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2239 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2240 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2241 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2242 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2243 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2244 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2245 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2246 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2247 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2248
2249 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2250
2251 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2252 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2253 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2254 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2255 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2256 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2257 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2258 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2259 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2260 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2261
2262 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2263 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2264 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2265 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2266 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2267 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2268 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2269 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2270
2271 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2272 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2273 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2274 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2275
2276 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2277 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2278 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2279 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2280 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2281 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2282 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2283 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2284 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2285 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2286
2287 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2288 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2289
2290 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2291 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2292 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2293 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2294 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2295 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2296 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2297 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2298
2299 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2300 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2301 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2302 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2303 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2304 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2305 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2306 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2307
2308 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2309 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2310 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2311 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2312 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2313 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2314 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2315 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2316
2317 {ir.OOR, types.TINT8}: ssa.OpOr8,
2318 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2319 {ir.OOR, types.TINT16}: ssa.OpOr16,
2320 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2321 {ir.OOR, types.TINT32}: ssa.OpOr32,
2322 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2323 {ir.OOR, types.TINT64}: ssa.OpOr64,
2324 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2325
2326 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2327 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2328 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2329 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2330 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2331 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2332 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2333 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2334
2335 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2336 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2337 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2338 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2339 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2340 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2341 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2342 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2343 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2344 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2345 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2346 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2347 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2348 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2349 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2350 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2351 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2352 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2353 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2354
2355 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2356 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2357 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2358 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2359 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2360 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2361 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2362 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2363 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2364 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2365 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2366 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2367 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2368 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2369 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2370 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2371 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2372 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2373 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2374
2375 {ir.OLT, types.TINT8}: ssa.OpLess8,
2376 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2377 {ir.OLT, types.TINT16}: ssa.OpLess16,
2378 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2379 {ir.OLT, types.TINT32}: ssa.OpLess32,
2380 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2381 {ir.OLT, types.TINT64}: ssa.OpLess64,
2382 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2383 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2384 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2385
2386 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2387 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2388 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2389 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2390 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2391 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2392 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2393 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2394 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2395 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2396 }
2397
2398 func (s *state) concreteEtype(t *types.Type) types.Kind {
2399 e := t.Kind()
2400 switch e {
2401 default:
2402 return e
2403 case types.TINT:
2404 if s.config.PtrSize == 8 {
2405 return types.TINT64
2406 }
2407 return types.TINT32
2408 case types.TUINT:
2409 if s.config.PtrSize == 8 {
2410 return types.TUINT64
2411 }
2412 return types.TUINT32
2413 case types.TUINTPTR:
2414 if s.config.PtrSize == 8 {
2415 return types.TUINT64
2416 }
2417 return types.TUINT32
2418 }
2419 }
2420
2421 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2422 etype := s.concreteEtype(t)
2423 x, ok := opToSSA[opAndType{op, etype}]
2424 if !ok {
2425 s.Fatalf("unhandled binary op %v %s", op, etype)
2426 }
2427 return x
2428 }
2429
2430 type opAndTwoTypes struct {
2431 op ir.Op
2432 etype1 types.Kind
2433 etype2 types.Kind
2434 }
2435
2436 type twoTypes struct {
2437 etype1 types.Kind
2438 etype2 types.Kind
2439 }
2440
2441 type twoOpsAndType struct {
2442 op1 ssa.Op
2443 op2 ssa.Op
2444 intermediateType types.Kind
2445 }
2446
2447 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2448
2449 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2450 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2451 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2452 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2453
2454 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2455 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2456 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2457 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2458
2459 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2460 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2461 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2462 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2463
2464 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2465 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2466 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2467 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2468
2469 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2470 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2471 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2472 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2473
2474 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2475 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2476 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2477 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2478
2479 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2480 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2481 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto64, ssa.OpTrunc64to32, types.TINT64},
2482 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2483
2484 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2485 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2486 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto64, ssa.OpTrunc64to32, types.TINT64},
2487 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2488
2489
2490 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2491 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2492 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2493 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2494 }
2495
2496
2497
2498 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2499 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2500 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2501 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2502 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2503 }
2504
2505
2506 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2507 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2508 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2509 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2510 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2511 }
2512
2513 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2514 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2515 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2516 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2517 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2518 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2519 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2520 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2521 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2522
2523 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2524 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2525 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2526 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2527 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2528 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2529 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2530 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2531
2532 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2533 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2534 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2535 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2536 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2537 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2538 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2539 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2540
2541 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2542 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2543 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2544 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2545 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2546 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2547 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2548 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2549
2550 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2551 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2552 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2553 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2554 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2555 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2556 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2557 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2558
2559 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2560 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2561 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2562 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2563 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2564 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2565 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2566 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2567
2568 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2569 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2570 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2571 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2572 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2573 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2574 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2575 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2576
2577 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2578 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2579 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2580 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2581 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2582 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2583 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2584 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2585 }
2586
2587 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2588 etype1 := s.concreteEtype(t)
2589 etype2 := s.concreteEtype(u)
2590 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2591 if !ok {
2592 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2593 }
2594 return x
2595 }
2596
2597 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2598 if s.config.PtrSize == 4 {
2599 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2600 }
2601 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2602 }
2603
2604 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2605 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2606
2607 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2608 }
2609 if ft.IsInteger() && tt.IsInteger() {
2610 var op ssa.Op
2611 if tt.Size() == ft.Size() {
2612 op = ssa.OpCopy
2613 } else if tt.Size() < ft.Size() {
2614
2615 switch 10*ft.Size() + tt.Size() {
2616 case 21:
2617 op = ssa.OpTrunc16to8
2618 case 41:
2619 op = ssa.OpTrunc32to8
2620 case 42:
2621 op = ssa.OpTrunc32to16
2622 case 81:
2623 op = ssa.OpTrunc64to8
2624 case 82:
2625 op = ssa.OpTrunc64to16
2626 case 84:
2627 op = ssa.OpTrunc64to32
2628 default:
2629 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2630 }
2631 } else if ft.IsSigned() {
2632
2633 switch 10*ft.Size() + tt.Size() {
2634 case 12:
2635 op = ssa.OpSignExt8to16
2636 case 14:
2637 op = ssa.OpSignExt8to32
2638 case 18:
2639 op = ssa.OpSignExt8to64
2640 case 24:
2641 op = ssa.OpSignExt16to32
2642 case 28:
2643 op = ssa.OpSignExt16to64
2644 case 48:
2645 op = ssa.OpSignExt32to64
2646 default:
2647 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2648 }
2649 } else {
2650
2651 switch 10*ft.Size() + tt.Size() {
2652 case 12:
2653 op = ssa.OpZeroExt8to16
2654 case 14:
2655 op = ssa.OpZeroExt8to32
2656 case 18:
2657 op = ssa.OpZeroExt8to64
2658 case 24:
2659 op = ssa.OpZeroExt16to32
2660 case 28:
2661 op = ssa.OpZeroExt16to64
2662 case 48:
2663 op = ssa.OpZeroExt32to64
2664 default:
2665 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2666 }
2667 }
2668 return s.newValue1(op, tt, v)
2669 }
2670
2671 if ft.IsComplex() && tt.IsComplex() {
2672 var op ssa.Op
2673 if ft.Size() == tt.Size() {
2674 switch ft.Size() {
2675 case 8:
2676 op = ssa.OpRound32F
2677 case 16:
2678 op = ssa.OpRound64F
2679 default:
2680 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2681 }
2682 } else if ft.Size() == 8 && tt.Size() == 16 {
2683 op = ssa.OpCvt32Fto64F
2684 } else if ft.Size() == 16 && tt.Size() == 8 {
2685 op = ssa.OpCvt64Fto32F
2686 } else {
2687 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2688 }
2689 ftp := types.FloatForComplex(ft)
2690 ttp := types.FloatForComplex(tt)
2691 return s.newValue2(ssa.OpComplexMake, tt,
2692 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2693 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2694 }
2695
2696 if tt.IsComplex() {
2697
2698 et := types.FloatForComplex(tt)
2699 v = s.conv(n, v, ft, et)
2700 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2701 }
2702
2703 if ft.IsFloat() || tt.IsFloat() {
2704 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
2705 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2706 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2707 conv = conv1
2708 }
2709 }
2710 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2711 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2712 conv = conv1
2713 }
2714 }
2715
2716 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2717 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2718
2719 if tt.Size() == 4 {
2720 return s.uint32Tofloat32(n, v, ft, tt)
2721 }
2722 if tt.Size() == 8 {
2723 return s.uint32Tofloat64(n, v, ft, tt)
2724 }
2725 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2726
2727 if ft.Size() == 4 {
2728 return s.float32ToUint32(n, v, ft, tt)
2729 }
2730 if ft.Size() == 8 {
2731 return s.float64ToUint32(n, v, ft, tt)
2732 }
2733 }
2734 }
2735
2736 if !ok {
2737 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2738 }
2739 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2740
2741 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2742
2743 if op1 == ssa.OpCopy {
2744 if op2 == ssa.OpCopy {
2745 return v
2746 }
2747 return s.newValueOrSfCall1(op2, tt, v)
2748 }
2749 if op2 == ssa.OpCopy {
2750 return s.newValueOrSfCall1(op1, tt, v)
2751 }
2752 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2753 }
2754
2755 if ft.IsInteger() {
2756
2757 if tt.Size() == 4 {
2758 return s.uint64Tofloat32(n, v, ft, tt)
2759 }
2760 if tt.Size() == 8 {
2761 return s.uint64Tofloat64(n, v, ft, tt)
2762 }
2763 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2764 }
2765
2766 if ft.Size() == 4 {
2767 return s.float32ToUint64(n, v, ft, tt)
2768 }
2769 if ft.Size() == 8 {
2770 return s.float64ToUint64(n, v, ft, tt)
2771 }
2772 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2773 return nil
2774 }
2775
2776 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2777 return nil
2778 }
2779
2780
2781 func (s *state) expr(n ir.Node) *ssa.Value {
2782 return s.exprCheckPtr(n, true)
2783 }
2784
2785 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2786 if ir.HasUniquePos(n) {
2787
2788
2789 s.pushLine(n.Pos())
2790 defer s.popLine()
2791 }
2792
2793 s.stmtList(n.Init())
2794 switch n.Op() {
2795 case ir.OBYTES2STRTMP:
2796 n := n.(*ir.ConvExpr)
2797 slice := s.expr(n.X)
2798 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
2799 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
2800 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
2801 case ir.OSTR2BYTESTMP:
2802 n := n.(*ir.ConvExpr)
2803 str := s.expr(n.X)
2804 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
2805 if !n.NonNil() {
2806
2807
2808
2809 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
2810 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
2811 ptr = s.ternary(cond, ptr, zerobase)
2812 }
2813 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
2814 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
2815 case ir.OCFUNC:
2816 n := n.(*ir.UnaryExpr)
2817 aux := n.X.(*ir.Name).Linksym()
2818
2819
2820 if aux.ABI() != obj.ABIInternal {
2821 s.Fatalf("expected ABIInternal: %v", aux.ABI())
2822 }
2823 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
2824 case ir.ONAME:
2825 n := n.(*ir.Name)
2826 if n.Class == ir.PFUNC {
2827
2828 sym := staticdata.FuncLinksym(n)
2829 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
2830 }
2831 if s.canSSA(n) {
2832 return s.variable(n, n.Type())
2833 }
2834 return s.load(n.Type(), s.addr(n))
2835 case ir.OLINKSYMOFFSET:
2836 n := n.(*ir.LinksymOffsetExpr)
2837 return s.load(n.Type(), s.addr(n))
2838 case ir.ONIL:
2839 n := n.(*ir.NilExpr)
2840 t := n.Type()
2841 switch {
2842 case t.IsSlice():
2843 return s.constSlice(t)
2844 case t.IsInterface():
2845 return s.constInterface(t)
2846 default:
2847 return s.constNil(t)
2848 }
2849 case ir.OLITERAL:
2850 switch u := n.Val(); u.Kind() {
2851 case constant.Int:
2852 i := ir.IntVal(n.Type(), u)
2853 switch n.Type().Size() {
2854 case 1:
2855 return s.constInt8(n.Type(), int8(i))
2856 case 2:
2857 return s.constInt16(n.Type(), int16(i))
2858 case 4:
2859 return s.constInt32(n.Type(), int32(i))
2860 case 8:
2861 return s.constInt64(n.Type(), i)
2862 default:
2863 s.Fatalf("bad integer size %d", n.Type().Size())
2864 return nil
2865 }
2866 case constant.String:
2867 i := constant.StringVal(u)
2868 if i == "" {
2869 return s.constEmptyString(n.Type())
2870 }
2871 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
2872 case constant.Bool:
2873 return s.constBool(constant.BoolVal(u))
2874 case constant.Float:
2875 f, _ := constant.Float64Val(u)
2876 switch n.Type().Size() {
2877 case 4:
2878 return s.constFloat32(n.Type(), f)
2879 case 8:
2880 return s.constFloat64(n.Type(), f)
2881 default:
2882 s.Fatalf("bad float size %d", n.Type().Size())
2883 return nil
2884 }
2885 case constant.Complex:
2886 re, _ := constant.Float64Val(constant.Real(u))
2887 im, _ := constant.Float64Val(constant.Imag(u))
2888 switch n.Type().Size() {
2889 case 8:
2890 pt := types.Types[types.TFLOAT32]
2891 return s.newValue2(ssa.OpComplexMake, n.Type(),
2892 s.constFloat32(pt, re),
2893 s.constFloat32(pt, im))
2894 case 16:
2895 pt := types.Types[types.TFLOAT64]
2896 return s.newValue2(ssa.OpComplexMake, n.Type(),
2897 s.constFloat64(pt, re),
2898 s.constFloat64(pt, im))
2899 default:
2900 s.Fatalf("bad complex size %d", n.Type().Size())
2901 return nil
2902 }
2903 default:
2904 s.Fatalf("unhandled OLITERAL %v", u.Kind())
2905 return nil
2906 }
2907 case ir.OCONVNOP:
2908 n := n.(*ir.ConvExpr)
2909 to := n.Type()
2910 from := n.X.Type()
2911
2912
2913
2914 x := s.expr(n.X)
2915 if to == from {
2916 return x
2917 }
2918
2919
2920
2921
2922
2923 if to.IsPtrShaped() != from.IsPtrShaped() {
2924 return s.newValue2(ssa.OpConvert, to, x, s.mem())
2925 }
2926
2927 v := s.newValue1(ssa.OpCopy, to, x)
2928
2929
2930 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
2931 return v
2932 }
2933
2934
2935 if from.Kind() == to.Kind() {
2936 return v
2937 }
2938
2939
2940 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
2941 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
2942 s.checkPtrAlignment(n, v, nil)
2943 }
2944 return v
2945 }
2946
2947
2948 var mt *types.Type
2949 if buildcfg.Experiment.SwissMap {
2950 mt = types.NewPtr(reflectdata.SwissMapType())
2951 } else {
2952 mt = types.NewPtr(reflectdata.OldMapType())
2953 }
2954 if to.Kind() == types.TMAP && from == mt {
2955 return v
2956 }
2957
2958 types.CalcSize(from)
2959 types.CalcSize(to)
2960 if from.Size() != to.Size() {
2961 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
2962 return nil
2963 }
2964 if etypesign(from.Kind()) != etypesign(to.Kind()) {
2965 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
2966 return nil
2967 }
2968
2969 if base.Flag.Cfg.Instrumenting {
2970
2971
2972
2973 return v
2974 }
2975
2976 if etypesign(from.Kind()) == 0 {
2977 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
2978 return nil
2979 }
2980
2981
2982 return v
2983
2984 case ir.OCONV:
2985 n := n.(*ir.ConvExpr)
2986 x := s.expr(n.X)
2987 return s.conv(n, x, n.X.Type(), n.Type())
2988
2989 case ir.ODOTTYPE:
2990 n := n.(*ir.TypeAssertExpr)
2991 res, _ := s.dottype(n, false)
2992 return res
2993
2994 case ir.ODYNAMICDOTTYPE:
2995 n := n.(*ir.DynamicTypeAssertExpr)
2996 res, _ := s.dynamicDottype(n, false)
2997 return res
2998
2999
3000 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3001 n := n.(*ir.BinaryExpr)
3002 a := s.expr(n.X)
3003 b := s.expr(n.Y)
3004 if n.X.Type().IsComplex() {
3005 pt := types.FloatForComplex(n.X.Type())
3006 op := s.ssaOp(ir.OEQ, pt)
3007 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3008 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3009 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3010 switch n.Op() {
3011 case ir.OEQ:
3012 return c
3013 case ir.ONE:
3014 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3015 default:
3016 s.Fatalf("ordered complex compare %v", n.Op())
3017 }
3018 }
3019
3020
3021 op := n.Op()
3022 switch op {
3023 case ir.OGE:
3024 op, a, b = ir.OLE, b, a
3025 case ir.OGT:
3026 op, a, b = ir.OLT, b, a
3027 }
3028 if n.X.Type().IsFloat() {
3029
3030 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3031 }
3032
3033 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3034 case ir.OMUL:
3035 n := n.(*ir.BinaryExpr)
3036 a := s.expr(n.X)
3037 b := s.expr(n.Y)
3038 if n.Type().IsComplex() {
3039 mulop := ssa.OpMul64F
3040 addop := ssa.OpAdd64F
3041 subop := ssa.OpSub64F
3042 pt := types.FloatForComplex(n.Type())
3043 wt := types.Types[types.TFLOAT64]
3044
3045 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3046 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3047 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3048 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3049
3050 if pt != wt {
3051 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3052 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3053 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3054 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3055 }
3056
3057 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3058 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3059
3060 if pt != wt {
3061 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3062 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3063 }
3064
3065 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3066 }
3067
3068 if n.Type().IsFloat() {
3069 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3070 }
3071
3072 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3073
3074 case ir.ODIV:
3075 n := n.(*ir.BinaryExpr)
3076 a := s.expr(n.X)
3077 b := s.expr(n.Y)
3078 if n.Type().IsComplex() {
3079
3080
3081
3082 mulop := ssa.OpMul64F
3083 addop := ssa.OpAdd64F
3084 subop := ssa.OpSub64F
3085 divop := ssa.OpDiv64F
3086 pt := types.FloatForComplex(n.Type())
3087 wt := types.Types[types.TFLOAT64]
3088
3089 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3090 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3091 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3092 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3093
3094 if pt != wt {
3095 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3096 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3097 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3098 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3099 }
3100
3101 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3102 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3103 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3104
3105
3106
3107
3108
3109 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3110 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3111
3112 if pt != wt {
3113 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3114 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3115 }
3116 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3117 }
3118 if n.Type().IsFloat() {
3119 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3120 }
3121 return s.intDivide(n, a, b)
3122 case ir.OMOD:
3123 n := n.(*ir.BinaryExpr)
3124 a := s.expr(n.X)
3125 b := s.expr(n.Y)
3126 return s.intDivide(n, a, b)
3127 case ir.OADD, ir.OSUB:
3128 n := n.(*ir.BinaryExpr)
3129 a := s.expr(n.X)
3130 b := s.expr(n.Y)
3131 if n.Type().IsComplex() {
3132 pt := types.FloatForComplex(n.Type())
3133 op := s.ssaOp(n.Op(), pt)
3134 return s.newValue2(ssa.OpComplexMake, n.Type(),
3135 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3136 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3137 }
3138 if n.Type().IsFloat() {
3139 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3140 }
3141 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3142 case ir.OAND, ir.OOR, ir.OXOR:
3143 n := n.(*ir.BinaryExpr)
3144 a := s.expr(n.X)
3145 b := s.expr(n.Y)
3146 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3147 case ir.OANDNOT:
3148 n := n.(*ir.BinaryExpr)
3149 a := s.expr(n.X)
3150 b := s.expr(n.Y)
3151 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3152 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3153 case ir.OLSH, ir.ORSH:
3154 n := n.(*ir.BinaryExpr)
3155 a := s.expr(n.X)
3156 b := s.expr(n.Y)
3157 bt := b.Type
3158 if bt.IsSigned() {
3159 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3160 s.check(cmp, ir.Syms.Panicshift)
3161 bt = bt.ToUnsigned()
3162 }
3163 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3164 case ir.OANDAND, ir.OOROR:
3165
3166
3167
3168
3169
3170
3171
3172
3173
3174
3175
3176
3177
3178 n := n.(*ir.LogicalExpr)
3179 el := s.expr(n.X)
3180 s.vars[n] = el
3181
3182 b := s.endBlock()
3183 b.Kind = ssa.BlockIf
3184 b.SetControl(el)
3185
3186
3187
3188
3189
3190 bRight := s.f.NewBlock(ssa.BlockPlain)
3191 bResult := s.f.NewBlock(ssa.BlockPlain)
3192 if n.Op() == ir.OANDAND {
3193 b.AddEdgeTo(bRight)
3194 b.AddEdgeTo(bResult)
3195 } else if n.Op() == ir.OOROR {
3196 b.AddEdgeTo(bResult)
3197 b.AddEdgeTo(bRight)
3198 }
3199
3200 s.startBlock(bRight)
3201 er := s.expr(n.Y)
3202 s.vars[n] = er
3203
3204 b = s.endBlock()
3205 b.AddEdgeTo(bResult)
3206
3207 s.startBlock(bResult)
3208 return s.variable(n, types.Types[types.TBOOL])
3209 case ir.OCOMPLEX:
3210 n := n.(*ir.BinaryExpr)
3211 r := s.expr(n.X)
3212 i := s.expr(n.Y)
3213 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3214
3215
3216 case ir.ONEG:
3217 n := n.(*ir.UnaryExpr)
3218 a := s.expr(n.X)
3219 if n.Type().IsComplex() {
3220 tp := types.FloatForComplex(n.Type())
3221 negop := s.ssaOp(n.Op(), tp)
3222 return s.newValue2(ssa.OpComplexMake, n.Type(),
3223 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3224 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3225 }
3226 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3227 case ir.ONOT, ir.OBITNOT:
3228 n := n.(*ir.UnaryExpr)
3229 a := s.expr(n.X)
3230 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3231 case ir.OIMAG, ir.OREAL:
3232 n := n.(*ir.UnaryExpr)
3233 a := s.expr(n.X)
3234 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3235 case ir.OPLUS:
3236 n := n.(*ir.UnaryExpr)
3237 return s.expr(n.X)
3238
3239 case ir.OADDR:
3240 n := n.(*ir.AddrExpr)
3241 return s.addr(n.X)
3242
3243 case ir.ORESULT:
3244 n := n.(*ir.ResultExpr)
3245 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3246 panic("Expected to see a previous call")
3247 }
3248 which := n.Index
3249 if which == -1 {
3250 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3251 }
3252 return s.resultOfCall(s.prevCall, which, n.Type())
3253
3254 case ir.ODEREF:
3255 n := n.(*ir.StarExpr)
3256 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3257 return s.load(n.Type(), p)
3258
3259 case ir.ODOT:
3260 n := n.(*ir.SelectorExpr)
3261 if n.X.Op() == ir.OSTRUCTLIT {
3262
3263
3264
3265 if !ir.IsZero(n.X) {
3266 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3267 }
3268 return s.zeroVal(n.Type())
3269 }
3270
3271
3272
3273
3274 if ir.IsAddressable(n) && !s.canSSA(n) {
3275 p := s.addr(n)
3276 return s.load(n.Type(), p)
3277 }
3278 v := s.expr(n.X)
3279 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3280
3281 case ir.ODOTPTR:
3282 n := n.(*ir.SelectorExpr)
3283 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3284 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3285 return s.load(n.Type(), p)
3286
3287 case ir.OINDEX:
3288 n := n.(*ir.IndexExpr)
3289 switch {
3290 case n.X.Type().IsString():
3291 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3292
3293
3294
3295 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3296 }
3297 a := s.expr(n.X)
3298 i := s.expr(n.Index)
3299 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3300 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3301 ptrtyp := s.f.Config.Types.BytePtr
3302 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3303 if ir.IsConst(n.Index, constant.Int) {
3304 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3305 } else {
3306 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3307 }
3308 return s.load(types.Types[types.TUINT8], ptr)
3309 case n.X.Type().IsSlice():
3310 p := s.addr(n)
3311 return s.load(n.X.Type().Elem(), p)
3312 case n.X.Type().IsArray():
3313 if ssa.CanSSA(n.X.Type()) {
3314
3315 bound := n.X.Type().NumElem()
3316 a := s.expr(n.X)
3317 i := s.expr(n.Index)
3318 if bound == 0 {
3319
3320
3321 z := s.constInt(types.Types[types.TINT], 0)
3322 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3323
3324
3325 return s.zeroVal(n.Type())
3326 }
3327 len := s.constInt(types.Types[types.TINT], bound)
3328 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3329 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3330 }
3331 p := s.addr(n)
3332 return s.load(n.X.Type().Elem(), p)
3333 default:
3334 s.Fatalf("bad type for index %v", n.X.Type())
3335 return nil
3336 }
3337
3338 case ir.OLEN, ir.OCAP:
3339 n := n.(*ir.UnaryExpr)
3340 switch {
3341 case n.X.Type().IsSlice():
3342 op := ssa.OpSliceLen
3343 if n.Op() == ir.OCAP {
3344 op = ssa.OpSliceCap
3345 }
3346 return s.newValue1(op, types.Types[types.TINT], s.expr(n.X))
3347 case n.X.Type().IsString():
3348 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], s.expr(n.X))
3349 case n.X.Type().IsMap(), n.X.Type().IsChan():
3350 return s.referenceTypeBuiltin(n, s.expr(n.X))
3351 default:
3352 return s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
3353 }
3354
3355 case ir.OSPTR:
3356 n := n.(*ir.UnaryExpr)
3357 a := s.expr(n.X)
3358 if n.X.Type().IsSlice() {
3359 if n.Bounded() {
3360 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3361 }
3362 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3363 } else {
3364 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3365 }
3366
3367 case ir.OITAB:
3368 n := n.(*ir.UnaryExpr)
3369 a := s.expr(n.X)
3370 return s.newValue1(ssa.OpITab, n.Type(), a)
3371
3372 case ir.OIDATA:
3373 n := n.(*ir.UnaryExpr)
3374 a := s.expr(n.X)
3375 return s.newValue1(ssa.OpIData, n.Type(), a)
3376
3377 case ir.OMAKEFACE:
3378 n := n.(*ir.BinaryExpr)
3379 tab := s.expr(n.X)
3380 data := s.expr(n.Y)
3381 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3382
3383 case ir.OSLICEHEADER:
3384 n := n.(*ir.SliceHeaderExpr)
3385 p := s.expr(n.Ptr)
3386 l := s.expr(n.Len)
3387 c := s.expr(n.Cap)
3388 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3389
3390 case ir.OSTRINGHEADER:
3391 n := n.(*ir.StringHeaderExpr)
3392 p := s.expr(n.Ptr)
3393 l := s.expr(n.Len)
3394 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3395
3396 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3397 n := n.(*ir.SliceExpr)
3398 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3399 v := s.exprCheckPtr(n.X, !check)
3400 var i, j, k *ssa.Value
3401 if n.Low != nil {
3402 i = s.expr(n.Low)
3403 }
3404 if n.High != nil {
3405 j = s.expr(n.High)
3406 }
3407 if n.Max != nil {
3408 k = s.expr(n.Max)
3409 }
3410 p, l, c := s.slice(v, i, j, k, n.Bounded())
3411 if check {
3412
3413 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3414 }
3415 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3416
3417 case ir.OSLICESTR:
3418 n := n.(*ir.SliceExpr)
3419 v := s.expr(n.X)
3420 var i, j *ssa.Value
3421 if n.Low != nil {
3422 i = s.expr(n.Low)
3423 }
3424 if n.High != nil {
3425 j = s.expr(n.High)
3426 }
3427 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3428 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3429
3430 case ir.OSLICE2ARRPTR:
3431
3432
3433
3434
3435 n := n.(*ir.ConvExpr)
3436 v := s.expr(n.X)
3437 nelem := n.Type().Elem().NumElem()
3438 arrlen := s.constInt(types.Types[types.TINT], nelem)
3439 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3440 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3441 op := ssa.OpSlicePtr
3442 if nelem == 0 {
3443 op = ssa.OpSlicePtrUnchecked
3444 }
3445 return s.newValue1(op, n.Type(), v)
3446
3447 case ir.OCALLFUNC:
3448 n := n.(*ir.CallExpr)
3449 if ir.IsIntrinsicCall(n) {
3450 return s.intrinsicCall(n)
3451 }
3452 fallthrough
3453
3454 case ir.OCALLINTER:
3455 n := n.(*ir.CallExpr)
3456 return s.callResult(n, callNormal)
3457
3458 case ir.OGETG:
3459 n := n.(*ir.CallExpr)
3460 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3461
3462 case ir.OGETCALLERSP:
3463 n := n.(*ir.CallExpr)
3464 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3465
3466 case ir.OAPPEND:
3467 return s.append(n.(*ir.CallExpr), false)
3468
3469 case ir.OMIN, ir.OMAX:
3470 return s.minMax(n.(*ir.CallExpr))
3471
3472 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3473
3474
3475
3476 n := n.(*ir.CompLitExpr)
3477 if !ir.IsZero(n) {
3478 s.Fatalf("literal with nonzero value in SSA: %v", n)
3479 }
3480 return s.zeroVal(n.Type())
3481
3482 case ir.ONEW:
3483 n := n.(*ir.UnaryExpr)
3484 var rtype *ssa.Value
3485 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3486 rtype = s.expr(x.RType)
3487 }
3488 return s.newObject(n.Type().Elem(), rtype)
3489
3490 case ir.OUNSAFEADD:
3491 n := n.(*ir.BinaryExpr)
3492 ptr := s.expr(n.X)
3493 len := s.expr(n.Y)
3494
3495
3496
3497 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3498
3499 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3500
3501 default:
3502 s.Fatalf("unhandled expr %v", n.Op())
3503 return nil
3504 }
3505 }
3506
3507 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3508 aux := c.Aux.(*ssa.AuxCall)
3509 pa := aux.ParamAssignmentForResult(which)
3510
3511
3512 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3513 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3514 return s.rawLoad(t, addr)
3515 }
3516 return s.newValue1I(ssa.OpSelectN, t, which, c)
3517 }
3518
3519 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3520 aux := c.Aux.(*ssa.AuxCall)
3521 pa := aux.ParamAssignmentForResult(which)
3522 if len(pa.Registers) == 0 {
3523 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3524 }
3525 _, addr := s.temp(c.Pos, t)
3526 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3527 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3528 return addr
3529 }
3530
3531
3532
3533
3534
3535
3536
3537
3538
3539 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3540
3541
3542
3543
3544
3545
3546
3547
3548
3549
3550
3551
3552
3553
3554
3555
3556
3557
3558
3559
3560
3561
3562
3563
3564
3565
3566
3567
3568
3569
3570
3571
3572 et := n.Type().Elem()
3573 pt := types.NewPtr(et)
3574
3575
3576 sn := n.Args[0]
3577 var slice, addr *ssa.Value
3578 if inplace {
3579 addr = s.addr(sn)
3580 slice = s.load(n.Type(), addr)
3581 } else {
3582 slice = s.expr(sn)
3583 }
3584
3585
3586 grow := s.f.NewBlock(ssa.BlockPlain)
3587 assign := s.f.NewBlock(ssa.BlockPlain)
3588
3589
3590 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3591 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3592 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3593
3594
3595 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3596 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3597
3598
3599 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3600
3601
3602 s.vars[ptrVar] = p
3603 s.vars[lenVar] = l
3604 if !inplace {
3605 s.vars[capVar] = c
3606 }
3607
3608 b := s.endBlock()
3609 b.Kind = ssa.BlockIf
3610 b.Likely = ssa.BranchUnlikely
3611 b.SetControl(cmp)
3612 b.AddEdgeTo(grow)
3613 b.AddEdgeTo(assign)
3614
3615
3616 s.startBlock(grow)
3617 taddr := s.expr(n.Fun)
3618 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
3619
3620
3621 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
3622 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
3623 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
3624
3625 s.vars[ptrVar] = p
3626 s.vars[lenVar] = l
3627 s.vars[capVar] = c
3628 if inplace {
3629 if sn.Op() == ir.ONAME {
3630 sn := sn.(*ir.Name)
3631 if sn.Class != ir.PEXTERN {
3632
3633 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3634 }
3635 }
3636 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3637 s.store(types.Types[types.TINT], capaddr, c)
3638 s.store(pt, addr, p)
3639 }
3640
3641 b = s.endBlock()
3642 b.AddEdgeTo(assign)
3643
3644
3645 s.startBlock(assign)
3646 p = s.variable(ptrVar, pt)
3647 l = s.variable(lenVar, types.Types[types.TINT])
3648 if !inplace {
3649 c = s.variable(capVar, types.Types[types.TINT])
3650 }
3651
3652 if inplace {
3653
3654
3655 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3656 s.store(types.Types[types.TINT], lenaddr, l)
3657 }
3658
3659
3660 type argRec struct {
3661
3662
3663 v *ssa.Value
3664 store bool
3665 }
3666 args := make([]argRec, 0, len(n.Args[1:]))
3667 for _, n := range n.Args[1:] {
3668 if ssa.CanSSA(n.Type()) {
3669 args = append(args, argRec{v: s.expr(n), store: true})
3670 } else {
3671 v := s.addr(n)
3672 args = append(args, argRec{v: v})
3673 }
3674 }
3675
3676
3677 oldLen := s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3678 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
3679 for i, arg := range args {
3680 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
3681 if arg.store {
3682 s.storeType(et, addr, arg.v, 0, true)
3683 } else {
3684 s.move(et, addr, arg.v)
3685 }
3686 }
3687
3688
3689
3690
3691
3692 delete(s.vars, ptrVar)
3693 delete(s.vars, lenVar)
3694 if !inplace {
3695 delete(s.vars, capVar)
3696 }
3697
3698
3699 if inplace {
3700 return nil
3701 }
3702 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3703 }
3704
3705
3706 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
3707
3708
3709
3710 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
3711 x := s.expr(n.Args[0])
3712 for _, arg := range n.Args[1:] {
3713 x = op(x, s.expr(arg))
3714 }
3715 return x
3716 }
3717
3718 typ := n.Type()
3719
3720 if typ.IsFloat() || typ.IsString() {
3721
3722
3723
3724
3725
3726
3727
3728
3729 if typ.IsFloat() {
3730 hasIntrinsic := false
3731 switch Arch.LinkArch.Family {
3732 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64:
3733 hasIntrinsic = true
3734 case sys.PPC64:
3735 hasIntrinsic = buildcfg.GOPPC64 >= 9
3736 }
3737
3738 if hasIntrinsic {
3739 var op ssa.Op
3740 switch {
3741 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
3742 op = ssa.OpMin64F
3743 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
3744 op = ssa.OpMax64F
3745 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
3746 op = ssa.OpMin32F
3747 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
3748 op = ssa.OpMax32F
3749 }
3750 return fold(func(x, a *ssa.Value) *ssa.Value {
3751 return s.newValue2(op, typ, x, a)
3752 })
3753 }
3754 }
3755 var name string
3756 switch typ.Kind() {
3757 case types.TFLOAT32:
3758 switch n.Op() {
3759 case ir.OMIN:
3760 name = "fmin32"
3761 case ir.OMAX:
3762 name = "fmax32"
3763 }
3764 case types.TFLOAT64:
3765 switch n.Op() {
3766 case ir.OMIN:
3767 name = "fmin64"
3768 case ir.OMAX:
3769 name = "fmax64"
3770 }
3771 case types.TSTRING:
3772 switch n.Op() {
3773 case ir.OMIN:
3774 name = "strmin"
3775 case ir.OMAX:
3776 name = "strmax"
3777 }
3778 }
3779 fn := typecheck.LookupRuntimeFunc(name)
3780
3781 return fold(func(x, a *ssa.Value) *ssa.Value {
3782 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
3783 })
3784 }
3785
3786 if typ.IsInteger() {
3787 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
3788 var op ssa.Op
3789 switch {
3790 case typ.IsSigned() && n.Op() == ir.OMIN:
3791 op = ssa.OpMin64
3792 case typ.IsSigned() && n.Op() == ir.OMAX:
3793 op = ssa.OpMax64
3794 case typ.IsUnsigned() && n.Op() == ir.OMIN:
3795 op = ssa.OpMin64u
3796 case typ.IsUnsigned() && n.Op() == ir.OMAX:
3797 op = ssa.OpMax64u
3798 }
3799 return fold(func(x, a *ssa.Value) *ssa.Value {
3800 return s.newValue2(op, typ, x, a)
3801 })
3802 }
3803 }
3804
3805 lt := s.ssaOp(ir.OLT, typ)
3806
3807 return fold(func(x, a *ssa.Value) *ssa.Value {
3808 switch n.Op() {
3809 case ir.OMIN:
3810
3811 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
3812 case ir.OMAX:
3813
3814 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
3815 }
3816 panic("unreachable")
3817 })
3818 }
3819
3820
3821 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
3822
3823
3824 ternaryVar := ssaMarker("ternary")
3825
3826 bThen := s.f.NewBlock(ssa.BlockPlain)
3827 bElse := s.f.NewBlock(ssa.BlockPlain)
3828 bEnd := s.f.NewBlock(ssa.BlockPlain)
3829
3830 b := s.endBlock()
3831 b.Kind = ssa.BlockIf
3832 b.SetControl(cond)
3833 b.AddEdgeTo(bThen)
3834 b.AddEdgeTo(bElse)
3835
3836 s.startBlock(bThen)
3837 s.vars[ternaryVar] = x
3838 s.endBlock().AddEdgeTo(bEnd)
3839
3840 s.startBlock(bElse)
3841 s.vars[ternaryVar] = y
3842 s.endBlock().AddEdgeTo(bEnd)
3843
3844 s.startBlock(bEnd)
3845 r := s.variable(ternaryVar, x.Type)
3846 delete(s.vars, ternaryVar)
3847 return r
3848 }
3849
3850
3851
3852
3853
3854 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
3855 switch cond.Op() {
3856 case ir.OANDAND:
3857 cond := cond.(*ir.LogicalExpr)
3858 mid := s.f.NewBlock(ssa.BlockPlain)
3859 s.stmtList(cond.Init())
3860 s.condBranch(cond.X, mid, no, max(likely, 0))
3861 s.startBlock(mid)
3862 s.condBranch(cond.Y, yes, no, likely)
3863 return
3864
3865
3866
3867
3868
3869
3870 case ir.OOROR:
3871 cond := cond.(*ir.LogicalExpr)
3872 mid := s.f.NewBlock(ssa.BlockPlain)
3873 s.stmtList(cond.Init())
3874 s.condBranch(cond.X, yes, mid, min(likely, 0))
3875 s.startBlock(mid)
3876 s.condBranch(cond.Y, yes, no, likely)
3877 return
3878
3879
3880
3881 case ir.ONOT:
3882 cond := cond.(*ir.UnaryExpr)
3883 s.stmtList(cond.Init())
3884 s.condBranch(cond.X, no, yes, -likely)
3885 return
3886 case ir.OCONVNOP:
3887 cond := cond.(*ir.ConvExpr)
3888 s.stmtList(cond.Init())
3889 s.condBranch(cond.X, yes, no, likely)
3890 return
3891 }
3892 c := s.expr(cond)
3893 b := s.endBlock()
3894 b.Kind = ssa.BlockIf
3895 b.SetControl(c)
3896 b.Likely = ssa.BranchPrediction(likely)
3897 b.AddEdgeTo(yes)
3898 b.AddEdgeTo(no)
3899 }
3900
3901 type skipMask uint8
3902
3903 const (
3904 skipPtr skipMask = 1 << iota
3905 skipLen
3906 skipCap
3907 )
3908
3909
3910
3911
3912
3913
3914
3915 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
3916 s.assignWhichMayOverlap(left, right, deref, skip, false)
3917 }
3918 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
3919 if left.Op() == ir.ONAME && ir.IsBlank(left) {
3920 return
3921 }
3922 t := left.Type()
3923 types.CalcSize(t)
3924 if s.canSSA(left) {
3925 if deref {
3926 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
3927 }
3928 if left.Op() == ir.ODOT {
3929
3930
3931
3932
3933
3934
3935
3936
3937
3938
3939 left := left.(*ir.SelectorExpr)
3940 t := left.X.Type()
3941 nf := t.NumFields()
3942 idx := fieldIdx(left)
3943
3944
3945 old := s.expr(left.X)
3946
3947
3948 new := s.newValue0(ssa.OpStructMake, t)
3949
3950
3951 for i := 0; i < nf; i++ {
3952 if i == idx {
3953 new.AddArg(right)
3954 } else {
3955 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
3956 }
3957 }
3958
3959
3960 s.assign(left.X, new, false, 0)
3961
3962 return
3963 }
3964 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
3965 left := left.(*ir.IndexExpr)
3966 s.pushLine(left.Pos())
3967 defer s.popLine()
3968
3969
3970 t := left.X.Type()
3971 n := t.NumElem()
3972
3973 i := s.expr(left.Index)
3974 if n == 0 {
3975
3976
3977 z := s.constInt(types.Types[types.TINT], 0)
3978 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3979 return
3980 }
3981 if n != 1 {
3982 s.Fatalf("assigning to non-1-length array")
3983 }
3984
3985 len := s.constInt(types.Types[types.TINT], 1)
3986 s.boundsCheck(i, len, ssa.BoundsIndex, false)
3987 v := s.newValue1(ssa.OpArrayMake1, t, right)
3988 s.assign(left.X, v, false, 0)
3989 return
3990 }
3991 left := left.(*ir.Name)
3992
3993 s.vars[left] = right
3994 s.addNamedValue(left, right)
3995 return
3996 }
3997
3998
3999
4000 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4001 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4002 }
4003
4004
4005 addr := s.addr(left)
4006 if ir.IsReflectHeaderDataField(left) {
4007
4008
4009
4010
4011
4012 t = types.Types[types.TUNSAFEPTR]
4013 }
4014 if deref {
4015
4016 if right == nil {
4017 s.zero(t, addr)
4018 } else {
4019 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4020 }
4021 return
4022 }
4023
4024 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4025 }
4026
4027
4028 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4029 switch {
4030 case t.IsInteger():
4031 switch t.Size() {
4032 case 1:
4033 return s.constInt8(t, 0)
4034 case 2:
4035 return s.constInt16(t, 0)
4036 case 4:
4037 return s.constInt32(t, 0)
4038 case 8:
4039 return s.constInt64(t, 0)
4040 default:
4041 s.Fatalf("bad sized integer type %v", t)
4042 }
4043 case t.IsFloat():
4044 switch t.Size() {
4045 case 4:
4046 return s.constFloat32(t, 0)
4047 case 8:
4048 return s.constFloat64(t, 0)
4049 default:
4050 s.Fatalf("bad sized float type %v", t)
4051 }
4052 case t.IsComplex():
4053 switch t.Size() {
4054 case 8:
4055 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4056 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4057 case 16:
4058 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4059 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4060 default:
4061 s.Fatalf("bad sized complex type %v", t)
4062 }
4063
4064 case t.IsString():
4065 return s.constEmptyString(t)
4066 case t.IsPtrShaped():
4067 return s.constNil(t)
4068 case t.IsBoolean():
4069 return s.constBool(false)
4070 case t.IsInterface():
4071 return s.constInterface(t)
4072 case t.IsSlice():
4073 return s.constSlice(t)
4074 case t.IsStruct():
4075 n := t.NumFields()
4076 v := s.entryNewValue0(ssa.OpStructMake, t)
4077 for i := 0; i < n; i++ {
4078 v.AddArg(s.zeroVal(t.FieldType(i)))
4079 }
4080 return v
4081 case t.IsArray():
4082 switch t.NumElem() {
4083 case 0:
4084 return s.entryNewValue0(ssa.OpArrayMake0, t)
4085 case 1:
4086 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4087 }
4088 }
4089 s.Fatalf("zero for type %v not implemented", t)
4090 return nil
4091 }
4092
4093 type callKind int8
4094
4095 const (
4096 callNormal callKind = iota
4097 callDefer
4098 callDeferStack
4099 callGo
4100 callTail
4101 )
4102
4103 type sfRtCallDef struct {
4104 rtfn *obj.LSym
4105 rtype types.Kind
4106 }
4107
4108 var softFloatOps map[ssa.Op]sfRtCallDef
4109
4110 func softfloatInit() {
4111
4112 softFloatOps = map[ssa.Op]sfRtCallDef{
4113 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4114 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4115 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4116 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4117 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4118 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4119 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4120 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4121
4122 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4123 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4124 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4125 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4126 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4127 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4128 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4129 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4130
4131 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4132 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4133 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4134 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4135 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4136 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4137 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4138 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4139 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4140 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4141 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4142 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4143 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4144 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4145 }
4146 }
4147
4148
4149
4150 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4151 f2i := func(t *types.Type) *types.Type {
4152 switch t.Kind() {
4153 case types.TFLOAT32:
4154 return types.Types[types.TUINT32]
4155 case types.TFLOAT64:
4156 return types.Types[types.TUINT64]
4157 }
4158 return t
4159 }
4160
4161 if callDef, ok := softFloatOps[op]; ok {
4162 switch op {
4163 case ssa.OpLess32F,
4164 ssa.OpLess64F,
4165 ssa.OpLeq32F,
4166 ssa.OpLeq64F:
4167 args[0], args[1] = args[1], args[0]
4168 case ssa.OpSub32F,
4169 ssa.OpSub64F:
4170 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4171 }
4172
4173
4174
4175 for i, a := range args {
4176 if a.Type.IsFloat() {
4177 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4178 }
4179 }
4180
4181 rt := types.Types[callDef.rtype]
4182 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4183 if rt.IsFloat() {
4184 result = s.newValue1(ssa.OpCopy, rt, result)
4185 }
4186 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4187 result = s.newValue1(ssa.OpNot, result.Type, result)
4188 }
4189 return result, true
4190 }
4191 return nil, false
4192 }
4193
4194
4195 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4196 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4197 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4198 return p0, p1
4199 }
4200
4201
4202 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4203 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4204 if ssa.IntrinsicsDebug > 0 {
4205 x := v
4206 if x == nil {
4207 x = s.mem()
4208 }
4209 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4210 x = x.Args[0]
4211 }
4212 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4213 }
4214 return v
4215 }
4216
4217
4218 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4219 args := make([]*ssa.Value, len(n.Args))
4220 for i, n := range n.Args {
4221 args[i] = s.expr(n)
4222 }
4223 return args
4224 }
4225
4226
4227
4228
4229
4230
4231
4232 func (s *state) openDeferRecord(n *ir.CallExpr) {
4233 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4234 s.Fatalf("defer call with arguments or results: %v", n)
4235 }
4236
4237 opendefer := &openDeferInfo{
4238 n: n,
4239 }
4240 fn := n.Fun
4241
4242
4243
4244 closureVal := s.expr(fn)
4245 closure := s.openDeferSave(fn.Type(), closureVal)
4246 opendefer.closureNode = closure.Aux.(*ir.Name)
4247 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4248 opendefer.closure = closure
4249 }
4250 index := len(s.openDefers)
4251 s.openDefers = append(s.openDefers, opendefer)
4252
4253
4254
4255 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4256 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4257 s.vars[deferBitsVar] = newDeferBits
4258 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4259 }
4260
4261
4262
4263
4264
4265
4266 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4267 if !ssa.CanSSA(t) {
4268 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4269 }
4270 if !t.HasPointers() {
4271 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4272 }
4273 pos := val.Pos
4274 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4275 temp.SetOpenDeferSlot(true)
4276 temp.SetFrameOffset(int64(len(s.openDefers)))
4277 var addrTemp *ssa.Value
4278
4279
4280 if s.curBlock.ID != s.f.Entry.ID {
4281
4282
4283
4284 if t.HasPointers() {
4285 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4286 }
4287 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4288 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4289 } else {
4290
4291
4292
4293 if t.HasPointers() {
4294 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4295 }
4296 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4297 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4298 }
4299
4300
4301
4302
4303
4304 temp.SetNeedzero(true)
4305
4306
4307 s.store(t, addrTemp, val)
4308 return addrTemp
4309 }
4310
4311
4312
4313
4314
4315 func (s *state) openDeferExit() {
4316 deferExit := s.f.NewBlock(ssa.BlockPlain)
4317 s.endBlock().AddEdgeTo(deferExit)
4318 s.startBlock(deferExit)
4319 s.lastDeferExit = deferExit
4320 s.lastDeferCount = len(s.openDefers)
4321 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4322
4323 for i := len(s.openDefers) - 1; i >= 0; i-- {
4324 r := s.openDefers[i]
4325 bCond := s.f.NewBlock(ssa.BlockPlain)
4326 bEnd := s.f.NewBlock(ssa.BlockPlain)
4327
4328 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4329
4330
4331 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4332 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4333 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4334 b := s.endBlock()
4335 b.Kind = ssa.BlockIf
4336 b.SetControl(eqVal)
4337 b.AddEdgeTo(bEnd)
4338 b.AddEdgeTo(bCond)
4339 bCond.AddEdgeTo(bEnd)
4340 s.startBlock(bCond)
4341
4342
4343
4344 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4345 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4346 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4347
4348
4349 s.vars[deferBitsVar] = maskedval
4350
4351
4352
4353
4354 fn := r.n.Fun
4355 stksize := fn.Type().ArgWidth()
4356 var callArgs []*ssa.Value
4357 var call *ssa.Value
4358 if r.closure != nil {
4359 v := s.load(r.closure.Type.Elem(), r.closure)
4360 s.maybeNilCheckClosure(v, callDefer)
4361 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4362 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4363 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4364 } else {
4365 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4366 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4367 }
4368 callArgs = append(callArgs, s.mem())
4369 call.AddArgs(callArgs...)
4370 call.AuxInt = stksize
4371 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4372
4373
4374
4375
4376 if r.closureNode != nil {
4377 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4378 }
4379
4380 s.endBlock()
4381 s.startBlock(bEnd)
4382 }
4383 }
4384
4385 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4386 return s.call(n, k, false, nil)
4387 }
4388
4389 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4390 return s.call(n, k, true, nil)
4391 }
4392
4393
4394
4395 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4396 s.prevCall = nil
4397 var calleeLSym *obj.LSym
4398 var closure *ssa.Value
4399 var codeptr *ssa.Value
4400 var dextra *ssa.Value
4401 var rcvr *ssa.Value
4402 fn := n.Fun
4403 var ACArgs []*types.Type
4404 var ACResults []*types.Type
4405 var callArgs []*ssa.Value
4406
4407 callABI := s.f.ABIDefault
4408
4409 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4410 s.Fatalf("go/defer call with arguments: %v", n)
4411 }
4412
4413 switch n.Op() {
4414 case ir.OCALLFUNC:
4415 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4416 fn := fn.(*ir.Name)
4417 calleeLSym = callTargetLSym(fn)
4418 if buildcfg.Experiment.RegabiArgs {
4419
4420
4421
4422
4423
4424 if fn.Func != nil {
4425 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4426 }
4427 } else {
4428
4429 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4430 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4431 if inRegistersImported || inRegistersSamePackage {
4432 callABI = s.f.ABI1
4433 }
4434 }
4435 break
4436 }
4437 closure = s.expr(fn)
4438 if k != callDefer && k != callDeferStack {
4439
4440
4441 s.maybeNilCheckClosure(closure, k)
4442 }
4443 case ir.OCALLINTER:
4444 if fn.Op() != ir.ODOTINTER {
4445 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
4446 }
4447 fn := fn.(*ir.SelectorExpr)
4448 var iclosure *ssa.Value
4449 iclosure, rcvr = s.getClosureAndRcvr(fn)
4450 if k == callNormal {
4451 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
4452 } else {
4453 closure = iclosure
4454 }
4455 }
4456 if deferExtra != nil {
4457 dextra = s.expr(deferExtra)
4458 }
4459
4460 params := callABI.ABIAnalyze(n.Fun.Type(), false )
4461 types.CalcSize(fn.Type())
4462 stksize := params.ArgWidth()
4463
4464 res := n.Fun.Type().Results()
4465 if k == callNormal || k == callTail {
4466 for _, p := range params.OutParams() {
4467 ACResults = append(ACResults, p.Type)
4468 }
4469 }
4470
4471 var call *ssa.Value
4472 if k == callDeferStack {
4473 if stksize != 0 {
4474 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
4475 }
4476
4477 t := deferstruct()
4478 n, addr := s.temp(n.Pos(), t)
4479 n.SetNonMergeable(true)
4480 s.store(closure.Type,
4481 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
4482 closure)
4483
4484
4485 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4486 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4487 callArgs = append(callArgs, addr, s.mem())
4488 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4489 call.AddArgs(callArgs...)
4490 call.AuxInt = int64(types.PtrSize)
4491 } else {
4492
4493
4494 argStart := base.Ctxt.Arch.FixedFrameSize
4495
4496 if k != callNormal && k != callTail {
4497
4498 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4499 callArgs = append(callArgs, closure)
4500 stksize += int64(types.PtrSize)
4501 argStart += int64(types.PtrSize)
4502 if dextra != nil {
4503
4504 ACArgs = append(ACArgs, types.Types[types.TINTER])
4505 callArgs = append(callArgs, dextra)
4506 stksize += 2 * int64(types.PtrSize)
4507 argStart += 2 * int64(types.PtrSize)
4508 }
4509 }
4510
4511
4512 if rcvr != nil {
4513 callArgs = append(callArgs, rcvr)
4514 }
4515
4516
4517 t := n.Fun.Type()
4518 args := n.Args
4519
4520 for _, p := range params.InParams() {
4521 ACArgs = append(ACArgs, p.Type)
4522 }
4523
4524
4525
4526
4527 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
4528 b := s.endBlock()
4529 b.Kind = ssa.BlockPlain
4530 curb := s.f.NewBlock(ssa.BlockPlain)
4531 b.AddEdgeTo(curb)
4532 s.startBlock(curb)
4533 }
4534
4535 for i, n := range args {
4536 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
4537 }
4538
4539 callArgs = append(callArgs, s.mem())
4540
4541
4542 switch {
4543 case k == callDefer:
4544 sym := ir.Syms.Deferproc
4545 if dextra != nil {
4546 sym = ir.Syms.Deferprocat
4547 }
4548 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4549 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4550 case k == callGo:
4551 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4552 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4553 case closure != nil:
4554
4555
4556
4557
4558
4559 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
4560 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
4561 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
4562 case codeptr != nil:
4563
4564 aux := ssa.InterfaceAuxCall(params)
4565 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
4566 case calleeLSym != nil:
4567 aux := ssa.StaticAuxCall(calleeLSym, params)
4568 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4569 if k == callTail {
4570 call.Op = ssa.OpTailLECall
4571 stksize = 0
4572 }
4573 default:
4574 s.Fatalf("bad call type %v %v", n.Op(), n)
4575 }
4576 call.AddArgs(callArgs...)
4577 call.AuxInt = stksize
4578 }
4579 s.prevCall = call
4580 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
4581
4582 for _, v := range n.KeepAlive {
4583 if !v.Addrtaken() {
4584 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
4585 }
4586 switch v.Class {
4587 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
4588 default:
4589 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
4590 }
4591 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
4592 }
4593
4594
4595 if k == callDefer || k == callDeferStack {
4596 b := s.endBlock()
4597 b.Kind = ssa.BlockDefer
4598 b.SetControl(call)
4599 bNext := s.f.NewBlock(ssa.BlockPlain)
4600 b.AddEdgeTo(bNext)
4601
4602 r := s.f.NewBlock(ssa.BlockPlain)
4603 s.startBlock(r)
4604 s.exit()
4605 b.AddEdgeTo(r)
4606 b.Likely = ssa.BranchLikely
4607 s.startBlock(bNext)
4608 }
4609
4610 if len(res) == 0 || k != callNormal {
4611
4612 return nil
4613 }
4614 fp := res[0]
4615 if returnResultAddr {
4616 return s.resultAddrOfCall(call, 0, fp.Type)
4617 }
4618 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
4619 }
4620
4621
4622
4623 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
4624 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
4625
4626
4627 s.nilCheck(closure)
4628 }
4629 }
4630
4631
4632
4633 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
4634 i := s.expr(fn.X)
4635 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
4636 s.nilCheck(itab)
4637 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
4638 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
4639 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
4640 return closure, rcvr
4641 }
4642
4643
4644
4645 func etypesign(e types.Kind) int8 {
4646 switch e {
4647 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
4648 return -1
4649 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
4650 return +1
4651 }
4652 return 0
4653 }
4654
4655
4656
4657 func (s *state) addr(n ir.Node) *ssa.Value {
4658 if n.Op() != ir.ONAME {
4659 s.pushLine(n.Pos())
4660 defer s.popLine()
4661 }
4662
4663 if s.canSSA(n) {
4664 s.Fatalf("addr of canSSA expression: %+v", n)
4665 }
4666
4667 t := types.NewPtr(n.Type())
4668 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
4669 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
4670
4671 if offset != 0 {
4672 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
4673 }
4674 return v
4675 }
4676 switch n.Op() {
4677 case ir.OLINKSYMOFFSET:
4678 no := n.(*ir.LinksymOffsetExpr)
4679 return linksymOffset(no.Linksym, no.Offset_)
4680 case ir.ONAME:
4681 n := n.(*ir.Name)
4682 if n.Heapaddr != nil {
4683 return s.expr(n.Heapaddr)
4684 }
4685 switch n.Class {
4686 case ir.PEXTERN:
4687
4688 return linksymOffset(n.Linksym(), 0)
4689 case ir.PPARAM:
4690
4691 v := s.decladdrs[n]
4692 if v != nil {
4693 return v
4694 }
4695 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
4696 return nil
4697 case ir.PAUTO:
4698 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
4699
4700 case ir.PPARAMOUT:
4701
4702
4703 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
4704 default:
4705 s.Fatalf("variable address class %v not implemented", n.Class)
4706 return nil
4707 }
4708 case ir.ORESULT:
4709
4710 n := n.(*ir.ResultExpr)
4711 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
4712 case ir.OINDEX:
4713 n := n.(*ir.IndexExpr)
4714 if n.X.Type().IsSlice() {
4715 a := s.expr(n.X)
4716 i := s.expr(n.Index)
4717 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
4718 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4719 p := s.newValue1(ssa.OpSlicePtr, t, a)
4720 return s.newValue2(ssa.OpPtrIndex, t, p, i)
4721 } else {
4722 a := s.addr(n.X)
4723 i := s.expr(n.Index)
4724 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
4725 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4726 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
4727 }
4728 case ir.ODEREF:
4729 n := n.(*ir.StarExpr)
4730 return s.exprPtr(n.X, n.Bounded(), n.Pos())
4731 case ir.ODOT:
4732 n := n.(*ir.SelectorExpr)
4733 p := s.addr(n.X)
4734 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
4735 case ir.ODOTPTR:
4736 n := n.(*ir.SelectorExpr)
4737 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
4738 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
4739 case ir.OCONVNOP:
4740 n := n.(*ir.ConvExpr)
4741 if n.Type() == n.X.Type() {
4742 return s.addr(n.X)
4743 }
4744 addr := s.addr(n.X)
4745 return s.newValue1(ssa.OpCopy, t, addr)
4746 case ir.OCALLFUNC, ir.OCALLINTER:
4747 n := n.(*ir.CallExpr)
4748 return s.callAddr(n, callNormal)
4749 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
4750 var v *ssa.Value
4751 if n.Op() == ir.ODOTTYPE {
4752 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
4753 } else {
4754 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
4755 }
4756 if v.Op != ssa.OpLoad {
4757 s.Fatalf("dottype of non-load")
4758 }
4759 if v.Args[1] != s.mem() {
4760 s.Fatalf("memory no longer live from dottype load")
4761 }
4762 return v.Args[0]
4763 default:
4764 s.Fatalf("unhandled addr %v", n.Op())
4765 return nil
4766 }
4767 }
4768
4769
4770
4771 func (s *state) canSSA(n ir.Node) bool {
4772 if base.Flag.N != 0 {
4773 return false
4774 }
4775 for {
4776 nn := n
4777 if nn.Op() == ir.ODOT {
4778 nn := nn.(*ir.SelectorExpr)
4779 n = nn.X
4780 continue
4781 }
4782 if nn.Op() == ir.OINDEX {
4783 nn := nn.(*ir.IndexExpr)
4784 if nn.X.Type().IsArray() {
4785 n = nn.X
4786 continue
4787 }
4788 }
4789 break
4790 }
4791 if n.Op() != ir.ONAME {
4792 return false
4793 }
4794 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
4795 }
4796
4797 func (s *state) canSSAName(name *ir.Name) bool {
4798 if name.Addrtaken() || !name.OnStack() {
4799 return false
4800 }
4801 switch name.Class {
4802 case ir.PPARAMOUT:
4803 if s.hasdefer {
4804
4805
4806
4807
4808
4809 return false
4810 }
4811 if s.cgoUnsafeArgs {
4812
4813
4814 return false
4815 }
4816 }
4817 return true
4818
4819 }
4820
4821
4822 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
4823 p := s.expr(n)
4824 if bounded || n.NonNil() {
4825 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
4826 s.f.Warnl(lineno, "removed nil check")
4827 }
4828 return p
4829 }
4830 p = s.nilCheck(p)
4831 return p
4832 }
4833
4834
4835
4836
4837
4838
4839 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
4840 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
4841 return ptr
4842 }
4843 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
4844 }
4845
4846
4847
4848
4849
4850
4851
4852 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
4853 idx = s.extendIndex(idx, len, kind, bounded)
4854
4855 if bounded || base.Flag.B != 0 {
4856
4857
4858
4859
4860
4861
4862
4863
4864
4865
4866
4867
4868
4869
4870
4871
4872
4873
4874
4875
4876 return idx
4877 }
4878
4879 bNext := s.f.NewBlock(ssa.BlockPlain)
4880 bPanic := s.f.NewBlock(ssa.BlockExit)
4881
4882 if !idx.Type.IsSigned() {
4883 switch kind {
4884 case ssa.BoundsIndex:
4885 kind = ssa.BoundsIndexU
4886 case ssa.BoundsSliceAlen:
4887 kind = ssa.BoundsSliceAlenU
4888 case ssa.BoundsSliceAcap:
4889 kind = ssa.BoundsSliceAcapU
4890 case ssa.BoundsSliceB:
4891 kind = ssa.BoundsSliceBU
4892 case ssa.BoundsSlice3Alen:
4893 kind = ssa.BoundsSlice3AlenU
4894 case ssa.BoundsSlice3Acap:
4895 kind = ssa.BoundsSlice3AcapU
4896 case ssa.BoundsSlice3B:
4897 kind = ssa.BoundsSlice3BU
4898 case ssa.BoundsSlice3C:
4899 kind = ssa.BoundsSlice3CU
4900 }
4901 }
4902
4903 var cmp *ssa.Value
4904 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
4905 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
4906 } else {
4907 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
4908 }
4909 b := s.endBlock()
4910 b.Kind = ssa.BlockIf
4911 b.SetControl(cmp)
4912 b.Likely = ssa.BranchLikely
4913 b.AddEdgeTo(bNext)
4914 b.AddEdgeTo(bPanic)
4915
4916 s.startBlock(bPanic)
4917 if Arch.LinkArch.Family == sys.Wasm {
4918
4919
4920 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
4921 } else {
4922 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
4923 s.endBlock().SetControl(mem)
4924 }
4925 s.startBlock(bNext)
4926
4927
4928 if base.Flag.Cfg.SpectreIndex {
4929 op := ssa.OpSpectreIndex
4930 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
4931 op = ssa.OpSpectreSliceIndex
4932 }
4933 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
4934 }
4935
4936 return idx
4937 }
4938
4939
4940 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
4941 b := s.endBlock()
4942 b.Kind = ssa.BlockIf
4943 b.SetControl(cmp)
4944 b.Likely = ssa.BranchLikely
4945 bNext := s.f.NewBlock(ssa.BlockPlain)
4946 line := s.peekPos()
4947 pos := base.Ctxt.PosTable.Pos(line)
4948 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
4949 bPanic := s.panics[fl]
4950 if bPanic == nil {
4951 bPanic = s.f.NewBlock(ssa.BlockPlain)
4952 s.panics[fl] = bPanic
4953 s.startBlock(bPanic)
4954
4955
4956 s.rtcall(fn, false, nil)
4957 }
4958 b.AddEdgeTo(bNext)
4959 b.AddEdgeTo(bPanic)
4960 s.startBlock(bNext)
4961 }
4962
4963 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
4964 needcheck := true
4965 switch b.Op {
4966 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
4967 if b.AuxInt != 0 {
4968 needcheck = false
4969 }
4970 }
4971 if needcheck {
4972
4973 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
4974 s.check(cmp, ir.Syms.Panicdivide)
4975 }
4976 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
4977 }
4978
4979
4980
4981
4982
4983 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
4984 s.prevCall = nil
4985
4986 off := base.Ctxt.Arch.FixedFrameSize
4987 var callArgs []*ssa.Value
4988 var callArgTypes []*types.Type
4989
4990 for _, arg := range args {
4991 t := arg.Type
4992 off = types.RoundUp(off, t.Alignment())
4993 size := t.Size()
4994 callArgs = append(callArgs, arg)
4995 callArgTypes = append(callArgTypes, t)
4996 off += size
4997 }
4998 off = types.RoundUp(off, int64(types.RegSize))
4999
5000
5001 var call *ssa.Value
5002 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5003 callArgs = append(callArgs, s.mem())
5004 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5005 call.AddArgs(callArgs...)
5006 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5007
5008 if !returns {
5009
5010 b := s.endBlock()
5011 b.Kind = ssa.BlockExit
5012 b.SetControl(call)
5013 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5014 if len(results) > 0 {
5015 s.Fatalf("panic call can't have results")
5016 }
5017 return nil
5018 }
5019
5020
5021 res := make([]*ssa.Value, len(results))
5022 for i, t := range results {
5023 off = types.RoundUp(off, t.Alignment())
5024 res[i] = s.resultOfCall(call, int64(i), t)
5025 off += t.Size()
5026 }
5027 off = types.RoundUp(off, int64(types.PtrSize))
5028
5029
5030 call.AuxInt = off
5031
5032 return res
5033 }
5034
5035
5036 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5037 s.instrument(t, left, instrumentWrite)
5038
5039 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5040
5041 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5042 return
5043 }
5044
5045
5046
5047
5048
5049
5050 s.storeTypeScalars(t, left, right, skip)
5051 if skip&skipPtr == 0 && t.HasPointers() {
5052 s.storeTypePtrs(t, left, right)
5053 }
5054 }
5055
5056
5057 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5058 switch {
5059 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
5060 s.store(t, left, right)
5061 case t.IsPtrShaped():
5062 if t.IsPtr() && t.Elem().NotInHeap() {
5063 s.store(t, left, right)
5064 }
5065
5066 case t.IsString():
5067 if skip&skipLen != 0 {
5068 return
5069 }
5070 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5071 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5072 s.store(types.Types[types.TINT], lenAddr, len)
5073 case t.IsSlice():
5074 if skip&skipLen == 0 {
5075 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5076 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5077 s.store(types.Types[types.TINT], lenAddr, len)
5078 }
5079 if skip&skipCap == 0 {
5080 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5081 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5082 s.store(types.Types[types.TINT], capAddr, cap)
5083 }
5084 case t.IsInterface():
5085
5086 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5087 s.store(types.Types[types.TUINTPTR], left, itab)
5088 case t.IsStruct():
5089 n := t.NumFields()
5090 for i := 0; i < n; i++ {
5091 ft := t.FieldType(i)
5092 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5093 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5094 s.storeTypeScalars(ft, addr, val, 0)
5095 }
5096 case t.IsArray() && t.NumElem() == 0:
5097
5098 case t.IsArray() && t.NumElem() == 1:
5099 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5100 default:
5101 s.Fatalf("bad write barrier type %v", t)
5102 }
5103 }
5104
5105
5106 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5107 switch {
5108 case t.IsPtrShaped():
5109 if t.IsPtr() && t.Elem().NotInHeap() {
5110 break
5111 }
5112 s.store(t, left, right)
5113 case t.IsString():
5114 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5115 s.store(s.f.Config.Types.BytePtr, left, ptr)
5116 case t.IsSlice():
5117 elType := types.NewPtr(t.Elem())
5118 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5119 s.store(elType, left, ptr)
5120 case t.IsInterface():
5121
5122 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5123 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5124 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5125 case t.IsStruct():
5126 n := t.NumFields()
5127 for i := 0; i < n; i++ {
5128 ft := t.FieldType(i)
5129 if !ft.HasPointers() {
5130 continue
5131 }
5132 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5133 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5134 s.storeTypePtrs(ft, addr, val)
5135 }
5136 case t.IsArray() && t.NumElem() == 0:
5137
5138 case t.IsArray() && t.NumElem() == 1:
5139 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5140 default:
5141 s.Fatalf("bad write barrier type %v", t)
5142 }
5143 }
5144
5145
5146 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5147 var a *ssa.Value
5148 if !ssa.CanSSA(t) {
5149 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5150 } else {
5151 a = s.expr(n)
5152 }
5153 return a
5154 }
5155
5156 func (s *state) storeArgWithBase(n ir.Node, t *types.Type, base *ssa.Value, off int64) {
5157 pt := types.NewPtr(t)
5158 var addr *ssa.Value
5159 if base == s.sp {
5160
5161 addr = s.constOffPtrSP(pt, off)
5162 } else {
5163 addr = s.newValue1I(ssa.OpOffPtr, pt, off, base)
5164 }
5165
5166 if !ssa.CanSSA(t) {
5167 a := s.addr(n)
5168 s.move(t, addr, a)
5169 return
5170 }
5171
5172 a := s.expr(n)
5173 s.storeType(t, addr, a, 0, false)
5174 }
5175
5176
5177
5178
5179 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5180 t := v.Type
5181 var ptr, len, cap *ssa.Value
5182 switch {
5183 case t.IsSlice():
5184 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5185 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5186 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5187 case t.IsString():
5188 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5189 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5190 cap = len
5191 case t.IsPtr():
5192 if !t.Elem().IsArray() {
5193 s.Fatalf("bad ptr to array in slice %v\n", t)
5194 }
5195 nv := s.nilCheck(v)
5196 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5197 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5198 cap = len
5199 default:
5200 s.Fatalf("bad type in slice %v\n", t)
5201 }
5202
5203
5204 if i == nil {
5205 i = s.constInt(types.Types[types.TINT], 0)
5206 }
5207 if j == nil {
5208 j = len
5209 }
5210 three := true
5211 if k == nil {
5212 three = false
5213 k = cap
5214 }
5215
5216
5217
5218
5219 if three {
5220 if k != cap {
5221 kind := ssa.BoundsSlice3Alen
5222 if t.IsSlice() {
5223 kind = ssa.BoundsSlice3Acap
5224 }
5225 k = s.boundsCheck(k, cap, kind, bounded)
5226 }
5227 if j != k {
5228 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5229 }
5230 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5231 } else {
5232 if j != k {
5233 kind := ssa.BoundsSliceAlen
5234 if t.IsSlice() {
5235 kind = ssa.BoundsSliceAcap
5236 }
5237 j = s.boundsCheck(j, k, kind, bounded)
5238 }
5239 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5240 }
5241
5242
5243 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5244 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5245 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5246
5247
5248
5249
5250
5251 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5252 rcap := rlen
5253 if j != k && !t.IsString() {
5254 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5255 }
5256
5257 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5258
5259 return ptr, rlen, rcap
5260 }
5261
5262
5263
5264
5265
5266
5267
5268
5269
5270
5271
5272
5273
5274
5275
5276 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5277
5278
5279 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5280
5281
5282
5283 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5284 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5285
5286
5287 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5288
5289 return rptr, rlen, rcap
5290 }
5291
5292 type u642fcvtTab struct {
5293 leq, cvt2F, and, rsh, or, add ssa.Op
5294 one func(*state, *types.Type, int64) *ssa.Value
5295 }
5296
5297 var u64_f64 = u642fcvtTab{
5298 leq: ssa.OpLeq64,
5299 cvt2F: ssa.OpCvt64to64F,
5300 and: ssa.OpAnd64,
5301 rsh: ssa.OpRsh64Ux64,
5302 or: ssa.OpOr64,
5303 add: ssa.OpAdd64F,
5304 one: (*state).constInt64,
5305 }
5306
5307 var u64_f32 = u642fcvtTab{
5308 leq: ssa.OpLeq64,
5309 cvt2F: ssa.OpCvt64to32F,
5310 and: ssa.OpAnd64,
5311 rsh: ssa.OpRsh64Ux64,
5312 or: ssa.OpOr64,
5313 add: ssa.OpAdd32F,
5314 one: (*state).constInt64,
5315 }
5316
5317 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5318 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5319 }
5320
5321 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5322 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5323 }
5324
5325 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5326
5327
5328
5329
5330
5331
5332
5333
5334
5335
5336
5337
5338
5339
5340
5341
5342
5343
5344
5345
5346
5347
5348
5349
5350 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5351 b := s.endBlock()
5352 b.Kind = ssa.BlockIf
5353 b.SetControl(cmp)
5354 b.Likely = ssa.BranchLikely
5355
5356 bThen := s.f.NewBlock(ssa.BlockPlain)
5357 bElse := s.f.NewBlock(ssa.BlockPlain)
5358 bAfter := s.f.NewBlock(ssa.BlockPlain)
5359
5360 b.AddEdgeTo(bThen)
5361 s.startBlock(bThen)
5362 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5363 s.vars[n] = a0
5364 s.endBlock()
5365 bThen.AddEdgeTo(bAfter)
5366
5367 b.AddEdgeTo(bElse)
5368 s.startBlock(bElse)
5369 one := cvttab.one(s, ft, 1)
5370 y := s.newValue2(cvttab.and, ft, x, one)
5371 z := s.newValue2(cvttab.rsh, ft, x, one)
5372 z = s.newValue2(cvttab.or, ft, z, y)
5373 a := s.newValue1(cvttab.cvt2F, tt, z)
5374 a1 := s.newValue2(cvttab.add, tt, a, a)
5375 s.vars[n] = a1
5376 s.endBlock()
5377 bElse.AddEdgeTo(bAfter)
5378
5379 s.startBlock(bAfter)
5380 return s.variable(n, n.Type())
5381 }
5382
5383 type u322fcvtTab struct {
5384 cvtI2F, cvtF2F ssa.Op
5385 }
5386
5387 var u32_f64 = u322fcvtTab{
5388 cvtI2F: ssa.OpCvt32to64F,
5389 cvtF2F: ssa.OpCopy,
5390 }
5391
5392 var u32_f32 = u322fcvtTab{
5393 cvtI2F: ssa.OpCvt32to32F,
5394 cvtF2F: ssa.OpCvt64Fto32F,
5395 }
5396
5397 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5398 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5399 }
5400
5401 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5402 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5403 }
5404
5405 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5406
5407
5408
5409
5410
5411 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5412 b := s.endBlock()
5413 b.Kind = ssa.BlockIf
5414 b.SetControl(cmp)
5415 b.Likely = ssa.BranchLikely
5416
5417 bThen := s.f.NewBlock(ssa.BlockPlain)
5418 bElse := s.f.NewBlock(ssa.BlockPlain)
5419 bAfter := s.f.NewBlock(ssa.BlockPlain)
5420
5421 b.AddEdgeTo(bThen)
5422 s.startBlock(bThen)
5423 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5424 s.vars[n] = a0
5425 s.endBlock()
5426 bThen.AddEdgeTo(bAfter)
5427
5428 b.AddEdgeTo(bElse)
5429 s.startBlock(bElse)
5430 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5431 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5432 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
5433 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
5434
5435 s.vars[n] = a3
5436 s.endBlock()
5437 bElse.AddEdgeTo(bAfter)
5438
5439 s.startBlock(bAfter)
5440 return s.variable(n, n.Type())
5441 }
5442
5443
5444 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
5445 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
5446 s.Fatalf("node must be a map or a channel")
5447 }
5448 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
5449 s.Fatalf("cannot inline len(chan)")
5450 }
5451 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
5452 s.Fatalf("cannot inline cap(chan)")
5453 }
5454
5455
5456
5457
5458
5459
5460
5461
5462 lenType := n.Type()
5463 nilValue := s.constNil(types.Types[types.TUINTPTR])
5464 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
5465 b := s.endBlock()
5466 b.Kind = ssa.BlockIf
5467 b.SetControl(cmp)
5468 b.Likely = ssa.BranchUnlikely
5469
5470 bThen := s.f.NewBlock(ssa.BlockPlain)
5471 bElse := s.f.NewBlock(ssa.BlockPlain)
5472 bAfter := s.f.NewBlock(ssa.BlockPlain)
5473
5474
5475 b.AddEdgeTo(bThen)
5476 s.startBlock(bThen)
5477 s.vars[n] = s.zeroVal(lenType)
5478 s.endBlock()
5479 bThen.AddEdgeTo(bAfter)
5480
5481 b.AddEdgeTo(bElse)
5482 s.startBlock(bElse)
5483 switch n.Op() {
5484 case ir.OLEN:
5485 if buildcfg.Experiment.SwissMap && n.X.Type().IsMap() {
5486
5487 s.vars[n] = s.load(lenType, x)
5488 } else {
5489
5490 s.vars[n] = s.load(lenType, x)
5491 }
5492 case ir.OCAP:
5493
5494 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
5495 s.vars[n] = s.load(lenType, sw)
5496 default:
5497 s.Fatalf("op must be OLEN or OCAP")
5498 }
5499 s.endBlock()
5500 bElse.AddEdgeTo(bAfter)
5501
5502 s.startBlock(bAfter)
5503 return s.variable(n, lenType)
5504 }
5505
5506 type f2uCvtTab struct {
5507 ltf, cvt2U, subf, or ssa.Op
5508 floatValue func(*state, *types.Type, float64) *ssa.Value
5509 intValue func(*state, *types.Type, int64) *ssa.Value
5510 cutoff uint64
5511 }
5512
5513 var f32_u64 = f2uCvtTab{
5514 ltf: ssa.OpLess32F,
5515 cvt2U: ssa.OpCvt32Fto64,
5516 subf: ssa.OpSub32F,
5517 or: ssa.OpOr64,
5518 floatValue: (*state).constFloat32,
5519 intValue: (*state).constInt64,
5520 cutoff: 1 << 63,
5521 }
5522
5523 var f64_u64 = f2uCvtTab{
5524 ltf: ssa.OpLess64F,
5525 cvt2U: ssa.OpCvt64Fto64,
5526 subf: ssa.OpSub64F,
5527 or: ssa.OpOr64,
5528 floatValue: (*state).constFloat64,
5529 intValue: (*state).constInt64,
5530 cutoff: 1 << 63,
5531 }
5532
5533 var f32_u32 = f2uCvtTab{
5534 ltf: ssa.OpLess32F,
5535 cvt2U: ssa.OpCvt32Fto32,
5536 subf: ssa.OpSub32F,
5537 or: ssa.OpOr32,
5538 floatValue: (*state).constFloat32,
5539 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5540 cutoff: 1 << 31,
5541 }
5542
5543 var f64_u32 = f2uCvtTab{
5544 ltf: ssa.OpLess64F,
5545 cvt2U: ssa.OpCvt64Fto32,
5546 subf: ssa.OpSub64F,
5547 or: ssa.OpOr32,
5548 floatValue: (*state).constFloat64,
5549 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5550 cutoff: 1 << 31,
5551 }
5552
5553 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5554 return s.floatToUint(&f32_u64, n, x, ft, tt)
5555 }
5556 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5557 return s.floatToUint(&f64_u64, n, x, ft, tt)
5558 }
5559
5560 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5561 return s.floatToUint(&f32_u32, n, x, ft, tt)
5562 }
5563
5564 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5565 return s.floatToUint(&f64_u32, n, x, ft, tt)
5566 }
5567
5568 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5569
5570
5571
5572
5573
5574
5575
5576
5577 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
5578 cmp := s.newValue2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
5579 b := s.endBlock()
5580 b.Kind = ssa.BlockIf
5581 b.SetControl(cmp)
5582 b.Likely = ssa.BranchLikely
5583
5584 bThen := s.f.NewBlock(ssa.BlockPlain)
5585 bElse := s.f.NewBlock(ssa.BlockPlain)
5586 bAfter := s.f.NewBlock(ssa.BlockPlain)
5587
5588 b.AddEdgeTo(bThen)
5589 s.startBlock(bThen)
5590 a0 := s.newValue1(cvttab.cvt2U, tt, x)
5591 s.vars[n] = a0
5592 s.endBlock()
5593 bThen.AddEdgeTo(bAfter)
5594
5595 b.AddEdgeTo(bElse)
5596 s.startBlock(bElse)
5597 y := s.newValue2(cvttab.subf, ft, x, cutoff)
5598 y = s.newValue1(cvttab.cvt2U, tt, y)
5599 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
5600 a1 := s.newValue2(cvttab.or, tt, y, z)
5601 s.vars[n] = a1
5602 s.endBlock()
5603 bElse.AddEdgeTo(bAfter)
5604
5605 s.startBlock(bAfter)
5606 return s.variable(n, n.Type())
5607 }
5608
5609
5610
5611
5612 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5613 iface := s.expr(n.X)
5614 target := s.reflectType(n.Type())
5615 var targetItab *ssa.Value
5616 if n.ITab != nil {
5617 targetItab = s.expr(n.ITab)
5618 }
5619 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
5620 }
5621
5622 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5623 iface := s.expr(n.X)
5624 var source, target, targetItab *ssa.Value
5625 if n.SrcRType != nil {
5626 source = s.expr(n.SrcRType)
5627 }
5628 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
5629 byteptr := s.f.Config.Types.BytePtr
5630 targetItab = s.expr(n.ITab)
5631
5632
5633 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
5634 } else {
5635 target = s.expr(n.RType)
5636 }
5637 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
5638 }
5639
5640
5641
5642
5643
5644
5645
5646
5647
5648 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
5649 typs := s.f.Config.Types
5650 byteptr := typs.BytePtr
5651 if dst.IsInterface() {
5652 if dst.IsEmptyInterface() {
5653
5654
5655 if base.Debug.TypeAssert > 0 {
5656 base.WarnfAt(pos, "type assertion inlined")
5657 }
5658
5659
5660 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5661
5662 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5663
5664 if src.IsEmptyInterface() && commaok {
5665
5666 return iface, cond
5667 }
5668
5669
5670 b := s.endBlock()
5671 b.Kind = ssa.BlockIf
5672 b.SetControl(cond)
5673 b.Likely = ssa.BranchLikely
5674 bOk := s.f.NewBlock(ssa.BlockPlain)
5675 bFail := s.f.NewBlock(ssa.BlockPlain)
5676 b.AddEdgeTo(bOk)
5677 b.AddEdgeTo(bFail)
5678
5679 if !commaok {
5680
5681 s.startBlock(bFail)
5682 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
5683
5684
5685 s.startBlock(bOk)
5686 if src.IsEmptyInterface() {
5687 res = iface
5688 return
5689 }
5690
5691 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5692 typ := s.load(byteptr, off)
5693 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5694 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
5695 return
5696 }
5697
5698 s.startBlock(bOk)
5699
5700
5701 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5702 s.vars[typVar] = s.load(byteptr, off)
5703 s.endBlock()
5704
5705
5706 s.startBlock(bFail)
5707 s.vars[typVar] = itab
5708 s.endBlock()
5709
5710
5711 bEnd := s.f.NewBlock(ssa.BlockPlain)
5712 bOk.AddEdgeTo(bEnd)
5713 bFail.AddEdgeTo(bEnd)
5714 s.startBlock(bEnd)
5715 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5716 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
5717 resok = cond
5718 delete(s.vars, typVar)
5719 return
5720 }
5721
5722 if base.Debug.TypeAssert > 0 {
5723 base.WarnfAt(pos, "type assertion not inlined")
5724 }
5725
5726 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5727 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
5728
5729
5730 bNil := s.f.NewBlock(ssa.BlockPlain)
5731 bNonNil := s.f.NewBlock(ssa.BlockPlain)
5732 bMerge := s.f.NewBlock(ssa.BlockPlain)
5733 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5734 b := s.endBlock()
5735 b.Kind = ssa.BlockIf
5736 b.SetControl(cond)
5737 b.Likely = ssa.BranchLikely
5738 b.AddEdgeTo(bNonNil)
5739 b.AddEdgeTo(bNil)
5740
5741 s.startBlock(bNil)
5742 if commaok {
5743 s.vars[typVar] = itab
5744 b := s.endBlock()
5745 b.AddEdgeTo(bMerge)
5746 } else {
5747
5748 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
5749 }
5750
5751
5752 s.startBlock(bNonNil)
5753 typ := itab
5754 if !src.IsEmptyInterface() {
5755 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
5756 }
5757
5758
5759 var d *ssa.Value
5760 if descriptor != nil {
5761 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
5762 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
5763
5764
5765 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
5766 s.Fatalf("atomic load not available")
5767 }
5768
5769 var mul, and, add, zext ssa.Op
5770 if s.config.PtrSize == 4 {
5771 mul = ssa.OpMul32
5772 and = ssa.OpAnd32
5773 add = ssa.OpAdd32
5774 zext = ssa.OpCopy
5775 } else {
5776 mul = ssa.OpMul64
5777 and = ssa.OpAnd64
5778 add = ssa.OpAdd64
5779 zext = ssa.OpZeroExt32to64
5780 }
5781
5782 loopHead := s.f.NewBlock(ssa.BlockPlain)
5783 loopBody := s.f.NewBlock(ssa.BlockPlain)
5784 cacheHit := s.f.NewBlock(ssa.BlockPlain)
5785 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
5786
5787
5788
5789 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
5790 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
5791 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
5792
5793
5794 var hash *ssa.Value
5795 if src.IsEmptyInterface() {
5796 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
5797 } else {
5798 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
5799 }
5800 hash = s.newValue1(zext, typs.Uintptr, hash)
5801 s.vars[hashVar] = hash
5802
5803 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
5804
5805 b := s.endBlock()
5806 b.AddEdgeTo(loopHead)
5807
5808
5809
5810 s.startBlock(loopHead)
5811 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
5812 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
5813 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
5814 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
5815
5816 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
5817
5818
5819
5820 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
5821 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
5822 b = s.endBlock()
5823 b.Kind = ssa.BlockIf
5824 b.SetControl(cmp1)
5825 b.AddEdgeTo(cacheHit)
5826 b.AddEdgeTo(loopBody)
5827
5828
5829
5830 s.startBlock(loopBody)
5831 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
5832 b = s.endBlock()
5833 b.Kind = ssa.BlockIf
5834 b.SetControl(cmp2)
5835 b.AddEdgeTo(cacheMiss)
5836 b.AddEdgeTo(loopHead)
5837
5838
5839
5840 s.startBlock(cacheHit)
5841 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
5842 s.vars[typVar] = eItab
5843 b = s.endBlock()
5844 b.AddEdgeTo(bMerge)
5845
5846
5847 s.startBlock(cacheMiss)
5848 }
5849 }
5850
5851
5852 if descriptor != nil {
5853 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
5854 } else {
5855 var fn *obj.LSym
5856 if commaok {
5857 fn = ir.Syms.AssertE2I2
5858 } else {
5859 fn = ir.Syms.AssertE2I
5860 }
5861 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
5862 }
5863 s.vars[typVar] = itab
5864 b = s.endBlock()
5865 b.AddEdgeTo(bMerge)
5866
5867
5868 s.startBlock(bMerge)
5869 itab = s.variable(typVar, byteptr)
5870 var ok *ssa.Value
5871 if commaok {
5872 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5873 }
5874 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
5875 }
5876
5877 if base.Debug.TypeAssert > 0 {
5878 base.WarnfAt(pos, "type assertion inlined")
5879 }
5880
5881
5882 direct := types.IsDirectIface(dst)
5883 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5884 if base.Debug.TypeAssert > 0 {
5885 base.WarnfAt(pos, "type assertion inlined")
5886 }
5887 var wantedFirstWord *ssa.Value
5888 if src.IsEmptyInterface() {
5889
5890 wantedFirstWord = target
5891 } else {
5892
5893 wantedFirstWord = targetItab
5894 }
5895
5896 var tmp ir.Node
5897 var addr *ssa.Value
5898 if commaok && !ssa.CanSSA(dst) {
5899
5900
5901 tmp, addr = s.temp(pos, dst)
5902 }
5903
5904 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
5905 b := s.endBlock()
5906 b.Kind = ssa.BlockIf
5907 b.SetControl(cond)
5908 b.Likely = ssa.BranchLikely
5909
5910 bOk := s.f.NewBlock(ssa.BlockPlain)
5911 bFail := s.f.NewBlock(ssa.BlockPlain)
5912 b.AddEdgeTo(bOk)
5913 b.AddEdgeTo(bFail)
5914
5915 if !commaok {
5916
5917 s.startBlock(bFail)
5918 taddr := source
5919 if taddr == nil {
5920 taddr = s.reflectType(src)
5921 }
5922 if src.IsEmptyInterface() {
5923 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
5924 } else {
5925 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
5926 }
5927
5928
5929 s.startBlock(bOk)
5930 if direct {
5931 return s.newValue1(ssa.OpIData, dst, iface), nil
5932 }
5933 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
5934 return s.load(dst, p), nil
5935 }
5936
5937
5938
5939 bEnd := s.f.NewBlock(ssa.BlockPlain)
5940
5941
5942 valVar := ssaMarker("val")
5943
5944
5945 s.startBlock(bOk)
5946 if tmp == nil {
5947 if direct {
5948 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
5949 } else {
5950 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
5951 s.vars[valVar] = s.load(dst, p)
5952 }
5953 } else {
5954 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
5955 s.move(dst, addr, p)
5956 }
5957 s.vars[okVar] = s.constBool(true)
5958 s.endBlock()
5959 bOk.AddEdgeTo(bEnd)
5960
5961
5962 s.startBlock(bFail)
5963 if tmp == nil {
5964 s.vars[valVar] = s.zeroVal(dst)
5965 } else {
5966 s.zero(dst, addr)
5967 }
5968 s.vars[okVar] = s.constBool(false)
5969 s.endBlock()
5970 bFail.AddEdgeTo(bEnd)
5971
5972
5973 s.startBlock(bEnd)
5974 if tmp == nil {
5975 res = s.variable(valVar, dst)
5976 delete(s.vars, valVar)
5977 } else {
5978 res = s.load(dst, addr)
5979 }
5980 resok = s.variable(okVar, types.Types[types.TBOOL])
5981 delete(s.vars, okVar)
5982 return res, resok
5983 }
5984
5985
5986 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
5987 tmp := typecheck.TempAt(pos, s.curfn, t)
5988 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
5989 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
5990 }
5991 addr := s.addr(tmp)
5992 return tmp, addr
5993 }
5994
5995
5996 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
5997 v := s.vars[n]
5998 if v != nil {
5999 return v
6000 }
6001 v = s.fwdVars[n]
6002 if v != nil {
6003 return v
6004 }
6005
6006 if s.curBlock == s.f.Entry {
6007
6008 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6009 }
6010
6011
6012 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6013 s.fwdVars[n] = v
6014 if n.Op() == ir.ONAME {
6015 s.addNamedValue(n.(*ir.Name), v)
6016 }
6017 return v
6018 }
6019
6020 func (s *state) mem() *ssa.Value {
6021 return s.variable(memVar, types.TypeMem)
6022 }
6023
6024 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6025 if n.Class == ir.Pxxx {
6026
6027 return
6028 }
6029 if ir.IsAutoTmp(n) {
6030
6031 return
6032 }
6033 if n.Class == ir.PPARAMOUT {
6034
6035
6036 return
6037 }
6038 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6039 values, ok := s.f.NamedValues[loc]
6040 if !ok {
6041 s.f.Names = append(s.f.Names, &loc)
6042 s.f.CanonicalLocalSlots[loc] = &loc
6043 }
6044 s.f.NamedValues[loc] = append(values, v)
6045 }
6046
6047
6048 type Branch struct {
6049 P *obj.Prog
6050 B *ssa.Block
6051 }
6052
6053
6054 type State struct {
6055 ABI obj.ABI
6056
6057 pp *objw.Progs
6058
6059
6060
6061 Branches []Branch
6062
6063
6064 JumpTables []*ssa.Block
6065
6066
6067 bstart []*obj.Prog
6068
6069 maxarg int64
6070
6071
6072
6073 livenessMap liveness.Map
6074
6075
6076
6077 partLiveArgs map[*ir.Name]bool
6078
6079
6080
6081
6082 lineRunStart *obj.Prog
6083
6084
6085 OnWasmStackSkipped int
6086 }
6087
6088 func (s *State) FuncInfo() *obj.FuncInfo {
6089 return s.pp.CurFunc.LSym.Func()
6090 }
6091
6092
6093 func (s *State) Prog(as obj.As) *obj.Prog {
6094 p := s.pp.Prog(as)
6095 if objw.LosesStmtMark(as) {
6096 return p
6097 }
6098
6099
6100 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6101 s.lineRunStart = p
6102 } else if p.Pos.IsStmt() == src.PosIsStmt {
6103 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6104 p.Pos = p.Pos.WithNotStmt()
6105 }
6106 return p
6107 }
6108
6109
6110 func (s *State) Pc() *obj.Prog {
6111 return s.pp.Next
6112 }
6113
6114
6115 func (s *State) SetPos(pos src.XPos) {
6116 s.pp.Pos = pos
6117 }
6118
6119
6120
6121
6122 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6123 p := s.Prog(op)
6124 p.To.Type = obj.TYPE_BRANCH
6125 s.Branches = append(s.Branches, Branch{P: p, B: target})
6126 return p
6127 }
6128
6129
6130
6131
6132
6133
6134 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6135 switch v.Op {
6136 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6137
6138 s.SetPos(v.Pos.WithNotStmt())
6139 default:
6140 p := v.Pos
6141 if p != src.NoXPos {
6142
6143
6144
6145
6146 if p.IsStmt() != src.PosIsStmt {
6147 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6148
6149
6150
6151
6152
6153
6154
6155
6156
6157
6158
6159
6160
6161 return
6162 }
6163 p = p.WithNotStmt()
6164
6165 }
6166 s.SetPos(p)
6167 } else {
6168 s.SetPos(s.pp.Pos.WithNotStmt())
6169 }
6170 }
6171 }
6172
6173
6174 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6175 ft := e.curfn.Type()
6176 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6177 return
6178 }
6179
6180 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6181 x.Set(obj.AttrContentAddressable, true)
6182 e.curfn.LSym.Func().ArgInfo = x
6183
6184
6185 p := pp.Prog(obj.AFUNCDATA)
6186 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6187 p.To.Type = obj.TYPE_MEM
6188 p.To.Name = obj.NAME_EXTERN
6189 p.To.Sym = x
6190 }
6191
6192
6193 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6194 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6195
6196
6197
6198
6199 PtrSize := int64(types.PtrSize)
6200 uintptrTyp := types.Types[types.TUINTPTR]
6201
6202 isAggregate := func(t *types.Type) bool {
6203 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6204 }
6205
6206 wOff := 0
6207 n := 0
6208 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6209
6210
6211 write1 := func(sz, offset int64) {
6212 if offset >= rtabi.TraceArgsSpecial {
6213 writebyte(rtabi.TraceArgsOffsetTooLarge)
6214 } else {
6215 writebyte(uint8(offset))
6216 writebyte(uint8(sz))
6217 }
6218 n++
6219 }
6220
6221
6222
6223 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6224 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6225 if n >= rtabi.TraceArgsLimit {
6226 writebyte(rtabi.TraceArgsDotdotdot)
6227 return false
6228 }
6229 if !isAggregate(t) {
6230 write1(t.Size(), baseOffset)
6231 return true
6232 }
6233 writebyte(rtabi.TraceArgsStartAgg)
6234 depth++
6235 if depth >= rtabi.TraceArgsMaxDepth {
6236 writebyte(rtabi.TraceArgsDotdotdot)
6237 writebyte(rtabi.TraceArgsEndAgg)
6238 n++
6239 return true
6240 }
6241 switch {
6242 case t.IsInterface(), t.IsString():
6243 _ = visitType(baseOffset, uintptrTyp, depth) &&
6244 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6245 case t.IsSlice():
6246 _ = visitType(baseOffset, uintptrTyp, depth) &&
6247 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6248 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6249 case t.IsComplex():
6250 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6251 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6252 case t.IsArray():
6253 if t.NumElem() == 0 {
6254 n++
6255 break
6256 }
6257 for i := int64(0); i < t.NumElem(); i++ {
6258 if !visitType(baseOffset, t.Elem(), depth) {
6259 break
6260 }
6261 baseOffset += t.Elem().Size()
6262 }
6263 case t.IsStruct():
6264 if t.NumFields() == 0 {
6265 n++
6266 break
6267 }
6268 for _, field := range t.Fields() {
6269 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6270 break
6271 }
6272 }
6273 }
6274 writebyte(rtabi.TraceArgsEndAgg)
6275 return true
6276 }
6277
6278 start := 0
6279 if strings.Contains(f.LSym.Name, "[") {
6280
6281 start = 1
6282 }
6283
6284 for _, a := range abiInfo.InParams()[start:] {
6285 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6286 break
6287 }
6288 }
6289 writebyte(rtabi.TraceArgsEndSeq)
6290 if wOff > rtabi.TraceArgsMaxLen {
6291 base.Fatalf("ArgInfo too large")
6292 }
6293
6294 return x
6295 }
6296
6297
6298 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6299 if base.Ctxt.Flag_linkshared {
6300
6301
6302 return
6303 }
6304
6305 wfn := e.curfn.WrappedFunc
6306 if wfn == nil {
6307 return
6308 }
6309
6310 wsym := wfn.Linksym()
6311 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6312 objw.SymPtrOff(x, 0, wsym)
6313 x.Set(obj.AttrContentAddressable, true)
6314 })
6315 e.curfn.LSym.Func().WrapInfo = x
6316
6317
6318 p := pp.Prog(obj.AFUNCDATA)
6319 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6320 p.To.Type = obj.TYPE_MEM
6321 p.To.Name = obj.NAME_EXTERN
6322 p.To.Sym = x
6323 }
6324
6325
6326 func genssa(f *ssa.Func, pp *objw.Progs) {
6327 var s State
6328 s.ABI = f.OwnAux.Fn.ABI()
6329
6330 e := f.Frontend().(*ssafn)
6331
6332 s.livenessMap, s.partLiveArgs = liveness.Compute(e.curfn, f, e.stkptrsize, pp)
6333 emitArgInfo(e, f, pp)
6334 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6335
6336 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6337 if openDeferInfo != nil {
6338
6339
6340 p := pp.Prog(obj.AFUNCDATA)
6341 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6342 p.To.Type = obj.TYPE_MEM
6343 p.To.Name = obj.NAME_EXTERN
6344 p.To.Sym = openDeferInfo
6345 }
6346
6347 emitWrappedFuncInfo(e, pp)
6348
6349
6350 s.bstart = make([]*obj.Prog, f.NumBlocks())
6351 s.pp = pp
6352 var progToValue map[*obj.Prog]*ssa.Value
6353 var progToBlock map[*obj.Prog]*ssa.Block
6354 var valueToProgAfter []*obj.Prog
6355 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6356 if gatherPrintInfo {
6357 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6358 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6359 f.Logf("genssa %s\n", f.Name)
6360 progToBlock[s.pp.Next] = f.Blocks[0]
6361 }
6362
6363 if base.Ctxt.Flag_locationlists {
6364 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6365 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6366 }
6367 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6368 for i := range valueToProgAfter {
6369 valueToProgAfter[i] = nil
6370 }
6371 }
6372
6373
6374
6375 firstPos := src.NoXPos
6376 for _, v := range f.Entry.Values {
6377 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6378 firstPos = v.Pos
6379 v.Pos = firstPos.WithDefaultStmt()
6380 break
6381 }
6382 }
6383
6384
6385
6386
6387 var inlMarks map[*obj.Prog]int32
6388 var inlMarkList []*obj.Prog
6389
6390
6391
6392 var inlMarksByPos map[src.XPos][]*obj.Prog
6393
6394 var argLiveIdx int = -1
6395
6396
6397
6398
6399
6400 var hotAlign, hotRequire int64
6401
6402 if base.Debug.AlignHot > 0 {
6403 switch base.Ctxt.Arch.Name {
6404
6405
6406
6407
6408
6409 case "amd64", "386":
6410
6411
6412
6413 hotAlign = 64
6414 hotRequire = 31
6415 }
6416 }
6417
6418
6419 for i, b := range f.Blocks {
6420
6421 s.lineRunStart = nil
6422 s.SetPos(s.pp.Pos.WithNotStmt())
6423
6424 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
6425
6426
6427
6428
6429
6430 p := s.pp.Prog(obj.APCALIGNMAX)
6431 p.From.SetConst(hotAlign)
6432 p.To.SetConst(hotRequire)
6433 }
6434
6435 s.bstart[b.ID] = s.pp.Next
6436
6437 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
6438 argLiveIdx = idx
6439 p := s.pp.Prog(obj.APCDATA)
6440 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6441 p.To.SetConst(int64(idx))
6442 }
6443
6444
6445 Arch.SSAMarkMoves(&s, b)
6446 for _, v := range b.Values {
6447 x := s.pp.Next
6448 s.DebugFriendlySetPosFrom(v)
6449
6450 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
6451 v.Fatalf("input[0] and output not in same register %s", v.LongString())
6452 }
6453
6454 switch v.Op {
6455 case ssa.OpInitMem:
6456
6457 case ssa.OpArg:
6458
6459 case ssa.OpSP, ssa.OpSB:
6460
6461 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
6462
6463 case ssa.OpGetG:
6464
6465
6466 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
6467
6468 case ssa.OpPhi:
6469 CheckLoweredPhi(v)
6470 case ssa.OpConvert:
6471
6472 if v.Args[0].Reg() != v.Reg() {
6473 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
6474 }
6475 case ssa.OpInlMark:
6476 p := Arch.Ginsnop(s.pp)
6477 if inlMarks == nil {
6478 inlMarks = map[*obj.Prog]int32{}
6479 inlMarksByPos = map[src.XPos][]*obj.Prog{}
6480 }
6481 inlMarks[p] = v.AuxInt32()
6482 inlMarkList = append(inlMarkList, p)
6483 pos := v.Pos.AtColumn1()
6484 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
6485 firstPos = src.NoXPos
6486
6487 default:
6488
6489 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6490 s.SetPos(firstPos)
6491 firstPos = src.NoXPos
6492 }
6493
6494
6495 s.pp.NextLive = s.livenessMap.Get(v)
6496 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
6497
6498
6499 Arch.SSAGenValue(&s, v)
6500 }
6501
6502 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
6503 argLiveIdx = idx
6504 p := s.pp.Prog(obj.APCDATA)
6505 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6506 p.To.SetConst(int64(idx))
6507 }
6508
6509 if base.Ctxt.Flag_locationlists {
6510 valueToProgAfter[v.ID] = s.pp.Next
6511 }
6512
6513 if gatherPrintInfo {
6514 for ; x != s.pp.Next; x = x.Link {
6515 progToValue[x] = v
6516 }
6517 }
6518 }
6519
6520 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
6521 p := Arch.Ginsnop(s.pp)
6522 p.Pos = p.Pos.WithIsStmt()
6523 if b.Pos == src.NoXPos {
6524 b.Pos = p.Pos
6525 if b.Pos == src.NoXPos {
6526 b.Pos = s.pp.Text.Pos
6527 }
6528 }
6529 b.Pos = b.Pos.WithBogusLine()
6530 }
6531
6532
6533
6534
6535
6536 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
6537
6538
6539 var next *ssa.Block
6540 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
6541
6542
6543
6544
6545 next = f.Blocks[i+1]
6546 }
6547 x := s.pp.Next
6548 s.SetPos(b.Pos)
6549 Arch.SSAGenBlock(&s, b, next)
6550 if gatherPrintInfo {
6551 for ; x != s.pp.Next; x = x.Link {
6552 progToBlock[x] = b
6553 }
6554 }
6555 }
6556 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
6557
6558
6559
6560
6561 Arch.Ginsnop(s.pp)
6562 }
6563 if openDeferInfo != nil {
6564
6565
6566
6567 s.pp.NextLive = s.livenessMap.DeferReturn
6568 p := s.pp.Prog(obj.ACALL)
6569 p.To.Type = obj.TYPE_MEM
6570 p.To.Name = obj.NAME_EXTERN
6571 p.To.Sym = ir.Syms.Deferreturn
6572
6573
6574
6575
6576
6577 for _, o := range f.OwnAux.ABIInfo().OutParams() {
6578 n := o.Name
6579 rts, offs := o.RegisterTypesAndOffsets()
6580 for i := range o.Registers {
6581 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
6582 }
6583 }
6584
6585 s.pp.Prog(obj.ARET)
6586 }
6587
6588 if inlMarks != nil {
6589 hasCall := false
6590
6591
6592
6593
6594 for p := s.pp.Text; p != nil; p = p.Link {
6595 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
6596 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
6597
6598
6599
6600
6601
6602 continue
6603 }
6604 if _, ok := inlMarks[p]; ok {
6605
6606
6607 continue
6608 }
6609 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
6610 hasCall = true
6611 }
6612 pos := p.Pos.AtColumn1()
6613 marks := inlMarksByPos[pos]
6614 if len(marks) == 0 {
6615 continue
6616 }
6617 for _, m := range marks {
6618
6619
6620
6621 p.Pos = p.Pos.WithIsStmt()
6622 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
6623
6624 m.As = obj.ANOP
6625 m.Pos = src.NoXPos
6626 m.From = obj.Addr{}
6627 m.To = obj.Addr{}
6628 }
6629 delete(inlMarksByPos, pos)
6630 }
6631
6632 for _, p := range inlMarkList {
6633 if p.As != obj.ANOP {
6634 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
6635 }
6636 }
6637
6638 if e.stksize == 0 && !hasCall {
6639
6640
6641
6642
6643
6644
6645 for p := s.pp.Text; p != nil; p = p.Link {
6646 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
6647 continue
6648 }
6649 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
6650
6651 nop := Arch.Ginsnop(s.pp)
6652 nop.Pos = e.curfn.Pos().WithIsStmt()
6653
6654
6655
6656
6657
6658 for x := s.pp.Text; x != nil; x = x.Link {
6659 if x.Link == nop {
6660 x.Link = nop.Link
6661 break
6662 }
6663 }
6664
6665 for x := s.pp.Text; x != nil; x = x.Link {
6666 if x.Link == p {
6667 nop.Link = p
6668 x.Link = nop
6669 break
6670 }
6671 }
6672 }
6673 break
6674 }
6675 }
6676 }
6677
6678 if base.Ctxt.Flag_locationlists {
6679 var debugInfo *ssa.FuncDebug
6680 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
6681 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
6682 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
6683 } else {
6684 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
6685 }
6686 bstart := s.bstart
6687 idToIdx := make([]int, f.NumBlocks())
6688 for i, b := range f.Blocks {
6689 idToIdx[b.ID] = i
6690 }
6691
6692
6693
6694 debugInfo.GetPC = func(b, v ssa.ID) int64 {
6695 switch v {
6696 case ssa.BlockStart.ID:
6697 if b == f.Entry.ID {
6698 return 0
6699
6700 }
6701 return bstart[b].Pc
6702 case ssa.BlockEnd.ID:
6703 blk := f.Blocks[idToIdx[b]]
6704 nv := len(blk.Values)
6705 return valueToProgAfter[blk.Values[nv-1].ID].Pc
6706 case ssa.FuncEnd.ID:
6707 return e.curfn.LSym.Size
6708 default:
6709 return valueToProgAfter[v].Pc
6710 }
6711 }
6712 }
6713
6714
6715 for _, br := range s.Branches {
6716 br.P.To.SetTarget(s.bstart[br.B.ID])
6717 if br.P.Pos.IsStmt() != src.PosIsStmt {
6718 br.P.Pos = br.P.Pos.WithNotStmt()
6719 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
6720 br.P.Pos = br.P.Pos.WithNotStmt()
6721 }
6722
6723 }
6724
6725
6726 for _, jt := range s.JumpTables {
6727
6728 targets := make([]*obj.Prog, len(jt.Succs))
6729 for i, e := range jt.Succs {
6730 targets[i] = s.bstart[e.Block().ID]
6731 }
6732
6733
6734
6735 fi := s.pp.CurFunc.LSym.Func()
6736 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
6737 }
6738
6739 if e.log {
6740 filename := ""
6741 for p := s.pp.Text; p != nil; p = p.Link {
6742 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
6743 filename = p.InnermostFilename()
6744 f.Logf("# %s\n", filename)
6745 }
6746
6747 var s string
6748 if v, ok := progToValue[p]; ok {
6749 s = v.String()
6750 } else if b, ok := progToBlock[p]; ok {
6751 s = b.String()
6752 } else {
6753 s = " "
6754 }
6755 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
6756 }
6757 }
6758 if f.HTMLWriter != nil {
6759 var buf strings.Builder
6760 buf.WriteString("<code>")
6761 buf.WriteString("<dl class=\"ssa-gen\">")
6762 filename := ""
6763 for p := s.pp.Text; p != nil; p = p.Link {
6764
6765
6766 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
6767 filename = p.InnermostFilename()
6768 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
6769 buf.WriteString(html.EscapeString("# " + filename))
6770 buf.WriteString("</dd>")
6771 }
6772
6773 buf.WriteString("<dt class=\"ssa-prog-src\">")
6774 if v, ok := progToValue[p]; ok {
6775 buf.WriteString(v.HTML())
6776 } else if b, ok := progToBlock[p]; ok {
6777 buf.WriteString("<b>" + b.HTML() + "</b>")
6778 }
6779 buf.WriteString("</dt>")
6780 buf.WriteString("<dd class=\"ssa-prog\">")
6781 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
6782 buf.WriteString("</dd>")
6783 }
6784 buf.WriteString("</dl>")
6785 buf.WriteString("</code>")
6786 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
6787 }
6788 if ssa.GenssaDump[f.Name] {
6789 fi := f.DumpFileForPhase("genssa")
6790 if fi != nil {
6791
6792
6793 inliningDiffers := func(a, b []src.Pos) bool {
6794 if len(a) != len(b) {
6795 return true
6796 }
6797 for i := range a {
6798 if a[i].Filename() != b[i].Filename() {
6799 return true
6800 }
6801 if i != len(a)-1 && a[i].Line() != b[i].Line() {
6802 return true
6803 }
6804 }
6805 return false
6806 }
6807
6808 var allPosOld []src.Pos
6809 var allPos []src.Pos
6810
6811 for p := s.pp.Text; p != nil; p = p.Link {
6812 if p.Pos.IsKnown() {
6813 allPos = allPos[:0]
6814 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
6815 if inliningDiffers(allPos, allPosOld) {
6816 for _, pos := range allPos {
6817 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
6818 }
6819 allPos, allPosOld = allPosOld, allPos
6820 }
6821 }
6822
6823 var s string
6824 if v, ok := progToValue[p]; ok {
6825 s = v.String()
6826 } else if b, ok := progToBlock[p]; ok {
6827 s = b.String()
6828 } else {
6829 s = " "
6830 }
6831 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
6832 }
6833 fi.Close()
6834 }
6835 }
6836
6837 defframe(&s, e, f)
6838
6839 f.HTMLWriter.Close()
6840 f.HTMLWriter = nil
6841 }
6842
6843 func defframe(s *State, e *ssafn, f *ssa.Func) {
6844 pp := s.pp
6845
6846 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
6847 frame := s.maxarg + e.stksize
6848 if Arch.PadFrame != nil {
6849 frame = Arch.PadFrame(frame)
6850 }
6851
6852
6853 pp.Text.To.Type = obj.TYPE_TEXTSIZE
6854 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
6855 pp.Text.To.Offset = frame
6856
6857 p := pp.Text
6858
6859
6860
6861
6862
6863
6864
6865
6866
6867
6868 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
6869
6870
6871 type nameOff struct {
6872 n *ir.Name
6873 off int64
6874 }
6875 partLiveArgsSpilled := make(map[nameOff]bool)
6876 for _, v := range f.Entry.Values {
6877 if v.Op.IsCall() {
6878 break
6879 }
6880 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
6881 continue
6882 }
6883 n, off := ssa.AutoVar(v)
6884 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
6885 continue
6886 }
6887 partLiveArgsSpilled[nameOff{n, off}] = true
6888 }
6889
6890
6891 for _, a := range f.OwnAux.ABIInfo().InParams() {
6892 n := a.Name
6893 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
6894 continue
6895 }
6896 rts, offs := a.RegisterTypesAndOffsets()
6897 for i := range a.Registers {
6898 if !rts[i].HasPointers() {
6899 continue
6900 }
6901 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
6902 continue
6903 }
6904 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
6905 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
6906 }
6907 }
6908 }
6909
6910
6911
6912
6913 var lo, hi int64
6914
6915
6916
6917 var state uint32
6918
6919
6920
6921 for _, n := range e.curfn.Dcl {
6922 if !n.Needzero() {
6923 continue
6924 }
6925 if n.Class != ir.PAUTO {
6926 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
6927 }
6928 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
6929 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
6930 }
6931
6932 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
6933
6934 lo = n.FrameOffset()
6935 continue
6936 }
6937
6938
6939 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
6940
6941
6942 lo = n.FrameOffset()
6943 hi = lo + n.Type().Size()
6944 }
6945
6946
6947 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
6948 }
6949
6950
6951 type IndexJump struct {
6952 Jump obj.As
6953 Index int
6954 }
6955
6956 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
6957 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
6958 p.Pos = b.Pos
6959 }
6960
6961
6962
6963 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
6964 switch next {
6965 case b.Succs[0].Block():
6966 s.oneJump(b, &jumps[0][0])
6967 s.oneJump(b, &jumps[0][1])
6968 case b.Succs[1].Block():
6969 s.oneJump(b, &jumps[1][0])
6970 s.oneJump(b, &jumps[1][1])
6971 default:
6972 var q *obj.Prog
6973 if b.Likely != ssa.BranchUnlikely {
6974 s.oneJump(b, &jumps[1][0])
6975 s.oneJump(b, &jumps[1][1])
6976 q = s.Br(obj.AJMP, b.Succs[1].Block())
6977 } else {
6978 s.oneJump(b, &jumps[0][0])
6979 s.oneJump(b, &jumps[0][1])
6980 q = s.Br(obj.AJMP, b.Succs[0].Block())
6981 }
6982 q.Pos = b.Pos
6983 }
6984 }
6985
6986
6987 func AddAux(a *obj.Addr, v *ssa.Value) {
6988 AddAux2(a, v, v.AuxInt)
6989 }
6990 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
6991 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
6992 v.Fatalf("bad AddAux addr %v", a)
6993 }
6994
6995 a.Offset += offset
6996
6997
6998 if v.Aux == nil {
6999 return
7000 }
7001
7002 switch n := v.Aux.(type) {
7003 case *ssa.AuxCall:
7004 a.Name = obj.NAME_EXTERN
7005 a.Sym = n.Fn
7006 case *obj.LSym:
7007 a.Name = obj.NAME_EXTERN
7008 a.Sym = n
7009 case *ir.Name:
7010 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7011 a.Name = obj.NAME_PARAM
7012 } else {
7013 a.Name = obj.NAME_AUTO
7014 }
7015 a.Sym = n.Linksym()
7016 a.Offset += n.FrameOffset()
7017 default:
7018 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7019 }
7020 }
7021
7022
7023
7024 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7025 size := idx.Type.Size()
7026 if size == s.config.PtrSize {
7027 return idx
7028 }
7029 if size > s.config.PtrSize {
7030
7031
7032 var lo *ssa.Value
7033 if idx.Type.IsSigned() {
7034 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7035 } else {
7036 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7037 }
7038 if bounded || base.Flag.B != 0 {
7039 return lo
7040 }
7041 bNext := s.f.NewBlock(ssa.BlockPlain)
7042 bPanic := s.f.NewBlock(ssa.BlockExit)
7043 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7044 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7045 if !idx.Type.IsSigned() {
7046 switch kind {
7047 case ssa.BoundsIndex:
7048 kind = ssa.BoundsIndexU
7049 case ssa.BoundsSliceAlen:
7050 kind = ssa.BoundsSliceAlenU
7051 case ssa.BoundsSliceAcap:
7052 kind = ssa.BoundsSliceAcapU
7053 case ssa.BoundsSliceB:
7054 kind = ssa.BoundsSliceBU
7055 case ssa.BoundsSlice3Alen:
7056 kind = ssa.BoundsSlice3AlenU
7057 case ssa.BoundsSlice3Acap:
7058 kind = ssa.BoundsSlice3AcapU
7059 case ssa.BoundsSlice3B:
7060 kind = ssa.BoundsSlice3BU
7061 case ssa.BoundsSlice3C:
7062 kind = ssa.BoundsSlice3CU
7063 }
7064 }
7065 b := s.endBlock()
7066 b.Kind = ssa.BlockIf
7067 b.SetControl(cmp)
7068 b.Likely = ssa.BranchLikely
7069 b.AddEdgeTo(bNext)
7070 b.AddEdgeTo(bPanic)
7071
7072 s.startBlock(bPanic)
7073 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7074 s.endBlock().SetControl(mem)
7075 s.startBlock(bNext)
7076
7077 return lo
7078 }
7079
7080
7081 var op ssa.Op
7082 if idx.Type.IsSigned() {
7083 switch 10*size + s.config.PtrSize {
7084 case 14:
7085 op = ssa.OpSignExt8to32
7086 case 18:
7087 op = ssa.OpSignExt8to64
7088 case 24:
7089 op = ssa.OpSignExt16to32
7090 case 28:
7091 op = ssa.OpSignExt16to64
7092 case 48:
7093 op = ssa.OpSignExt32to64
7094 default:
7095 s.Fatalf("bad signed index extension %s", idx.Type)
7096 }
7097 } else {
7098 switch 10*size + s.config.PtrSize {
7099 case 14:
7100 op = ssa.OpZeroExt8to32
7101 case 18:
7102 op = ssa.OpZeroExt8to64
7103 case 24:
7104 op = ssa.OpZeroExt16to32
7105 case 28:
7106 op = ssa.OpZeroExt16to64
7107 case 48:
7108 op = ssa.OpZeroExt32to64
7109 default:
7110 s.Fatalf("bad unsigned index extension %s", idx.Type)
7111 }
7112 }
7113 return s.newValue1(op, types.Types[types.TINT], idx)
7114 }
7115
7116
7117
7118 func CheckLoweredPhi(v *ssa.Value) {
7119 if v.Op != ssa.OpPhi {
7120 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7121 }
7122 if v.Type.IsMemory() {
7123 return
7124 }
7125 f := v.Block.Func
7126 loc := f.RegAlloc[v.ID]
7127 for _, a := range v.Args {
7128 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7129 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7130 }
7131 }
7132 }
7133
7134
7135
7136
7137
7138 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7139 entry := v.Block.Func.Entry
7140 if entry != v.Block {
7141 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7142 }
7143 for _, w := range entry.Values {
7144 if w == v {
7145 break
7146 }
7147 switch w.Op {
7148 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7149
7150 default:
7151 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7152 }
7153 }
7154 }
7155
7156
7157 func CheckArgReg(v *ssa.Value) {
7158 entry := v.Block.Func.Entry
7159 if entry != v.Block {
7160 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7161 }
7162 }
7163
7164 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7165 n, off := ssa.AutoVar(v)
7166 a.Type = obj.TYPE_MEM
7167 a.Sym = n.Linksym()
7168 a.Reg = int16(Arch.REGSP)
7169 a.Offset = n.FrameOffset() + off
7170 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7171 a.Name = obj.NAME_PARAM
7172 } else {
7173 a.Name = obj.NAME_AUTO
7174 }
7175 }
7176
7177
7178
7179 func (s *State) Call(v *ssa.Value) *obj.Prog {
7180 pPosIsStmt := s.pp.Pos.IsStmt()
7181 s.PrepareCall(v)
7182
7183 p := s.Prog(obj.ACALL)
7184 if pPosIsStmt == src.PosIsStmt {
7185 p.Pos = v.Pos.WithIsStmt()
7186 } else {
7187 p.Pos = v.Pos.WithNotStmt()
7188 }
7189 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7190 p.To.Type = obj.TYPE_MEM
7191 p.To.Name = obj.NAME_EXTERN
7192 p.To.Sym = sym.Fn
7193 } else {
7194
7195 switch Arch.LinkArch.Family {
7196 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7197 p.To.Type = obj.TYPE_REG
7198 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7199 p.To.Type = obj.TYPE_MEM
7200 default:
7201 base.Fatalf("unknown indirect call family")
7202 }
7203 p.To.Reg = v.Args[0].Reg()
7204 }
7205 return p
7206 }
7207
7208
7209
7210 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7211 p := s.Call(v)
7212 p.As = obj.ARET
7213 return p
7214 }
7215
7216
7217
7218
7219 func (s *State) PrepareCall(v *ssa.Value) {
7220 idx := s.livenessMap.Get(v)
7221 if !idx.StackMapValid() {
7222
7223 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7224 base.Fatalf("missing stack map index for %v", v.LongString())
7225 }
7226 }
7227
7228 call, ok := v.Aux.(*ssa.AuxCall)
7229
7230 if ok {
7231
7232
7233 if nowritebarrierrecCheck != nil {
7234 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7235 }
7236 }
7237
7238 if s.maxarg < v.AuxInt {
7239 s.maxarg = v.AuxInt
7240 }
7241 }
7242
7243
7244
7245 func (s *State) UseArgs(n int64) {
7246 if s.maxarg < n {
7247 s.maxarg = n
7248 }
7249 }
7250
7251
7252 func fieldIdx(n *ir.SelectorExpr) int {
7253 t := n.X.Type()
7254 if !t.IsStruct() {
7255 panic("ODOT's LHS is not a struct")
7256 }
7257
7258 for i, f := range t.Fields() {
7259 if f.Sym == n.Sel {
7260 if f.Offset != n.Offset() {
7261 panic("field offset doesn't match")
7262 }
7263 return i
7264 }
7265 }
7266 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7267
7268
7269
7270 }
7271
7272
7273
7274 type ssafn struct {
7275 curfn *ir.Func
7276 strings map[string]*obj.LSym
7277 stksize int64
7278 stkptrsize int64
7279
7280
7281
7282
7283
7284 stkalign int64
7285
7286 log bool
7287 }
7288
7289
7290
7291 func (e *ssafn) StringData(s string) *obj.LSym {
7292 if aux, ok := e.strings[s]; ok {
7293 return aux
7294 }
7295 if e.strings == nil {
7296 e.strings = make(map[string]*obj.LSym)
7297 }
7298 data := staticdata.StringSym(e.curfn.Pos(), s)
7299 e.strings[s] = data
7300 return data
7301 }
7302
7303
7304 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7305 node := parent.N
7306
7307 if node.Class != ir.PAUTO || node.Addrtaken() {
7308
7309 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7310 }
7311
7312 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7313 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7314 n.SetUsed(true)
7315 n.SetEsc(ir.EscNever)
7316 types.CalcSize(t)
7317 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7318 }
7319
7320
7321 func (e *ssafn) Logf(msg string, args ...interface{}) {
7322 if e.log {
7323 fmt.Printf(msg, args...)
7324 }
7325 }
7326
7327 func (e *ssafn) Log() bool {
7328 return e.log
7329 }
7330
7331
7332 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
7333 base.Pos = pos
7334 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
7335 base.Fatalf("'%s': "+msg, nargs...)
7336 }
7337
7338
7339
7340 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
7341 base.WarnfAt(pos, fmt_, args...)
7342 }
7343
7344 func (e *ssafn) Debug_checknil() bool {
7345 return base.Debug.Nil != 0
7346 }
7347
7348 func (e *ssafn) UseWriteBarrier() bool {
7349 return base.Flag.WB
7350 }
7351
7352 func (e *ssafn) Syslook(name string) *obj.LSym {
7353 switch name {
7354 case "goschedguarded":
7355 return ir.Syms.Goschedguarded
7356 case "writeBarrier":
7357 return ir.Syms.WriteBarrier
7358 case "wbZero":
7359 return ir.Syms.WBZero
7360 case "wbMove":
7361 return ir.Syms.WBMove
7362 case "cgoCheckMemmove":
7363 return ir.Syms.CgoCheckMemmove
7364 case "cgoCheckPtrWrite":
7365 return ir.Syms.CgoCheckPtrWrite
7366 }
7367 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
7368 return nil
7369 }
7370
7371 func (e *ssafn) Func() *ir.Func {
7372 return e.curfn
7373 }
7374
7375 func clobberBase(n ir.Node) ir.Node {
7376 if n.Op() == ir.ODOT {
7377 n := n.(*ir.SelectorExpr)
7378 if n.X.Type().NumFields() == 1 {
7379 return clobberBase(n.X)
7380 }
7381 }
7382 if n.Op() == ir.OINDEX {
7383 n := n.(*ir.IndexExpr)
7384 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
7385 return clobberBase(n.X)
7386 }
7387 }
7388 return n
7389 }
7390
7391
7392 func callTargetLSym(callee *ir.Name) *obj.LSym {
7393 if callee.Func == nil {
7394
7395
7396
7397 return callee.Linksym()
7398 }
7399
7400 return callee.LinksymABI(callee.Func.ABI)
7401 }
7402
7403
7404 const deferStructFnField = 4
7405
7406 var deferType *types.Type
7407
7408
7409
7410 func deferstruct() *types.Type {
7411 if deferType != nil {
7412 return deferType
7413 }
7414
7415 makefield := func(name string, t *types.Type) *types.Field {
7416 sym := (*types.Pkg)(nil).Lookup(name)
7417 return types.NewField(src.NoXPos, sym, t)
7418 }
7419
7420 fields := []*types.Field{
7421 makefield("heap", types.Types[types.TBOOL]),
7422 makefield("rangefunc", types.Types[types.TBOOL]),
7423 makefield("sp", types.Types[types.TUINTPTR]),
7424 makefield("pc", types.Types[types.TUINTPTR]),
7425
7426
7427
7428 makefield("fn", types.Types[types.TUINTPTR]),
7429 makefield("link", types.Types[types.TUINTPTR]),
7430 makefield("head", types.Types[types.TUINTPTR]),
7431 }
7432 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
7433 base.Fatalf("deferStructFnField is %q, not fn", name)
7434 }
7435
7436 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
7437 typ := types.NewNamed(n)
7438 n.SetType(typ)
7439 n.SetTypecheck(1)
7440
7441
7442 typ.SetUnderlying(types.NewStruct(fields))
7443 types.CalcStructSize(typ)
7444
7445 deferType = typ
7446 return typ
7447 }
7448
7449
7450
7451
7452
7453 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
7454 return obj.Addr{
7455 Name: obj.NAME_NONE,
7456 Type: obj.TYPE_MEM,
7457 Reg: baseReg,
7458 Offset: spill.Offset + extraOffset,
7459 }
7460 }
7461
7462 var (
7463 BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
7464 ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
7465 )
7466
View as plain text