1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "fmt"
11 "go/constant"
12 "html"
13 "internal/buildcfg"
14 "os"
15 "path/filepath"
16 "sort"
17 "strings"
18
19 "cmd/compile/internal/abi"
20 "cmd/compile/internal/base"
21 "cmd/compile/internal/ir"
22 "cmd/compile/internal/liveness"
23 "cmd/compile/internal/objw"
24 "cmd/compile/internal/reflectdata"
25 "cmd/compile/internal/rttype"
26 "cmd/compile/internal/ssa"
27 "cmd/compile/internal/staticdata"
28 "cmd/compile/internal/typecheck"
29 "cmd/compile/internal/types"
30 "cmd/internal/obj"
31 "cmd/internal/objabi"
32 "cmd/internal/src"
33 "cmd/internal/sys"
34
35 rtabi "internal/abi"
36 )
37
38 var ssaConfig *ssa.Config
39 var ssaCaches []ssa.Cache
40
41 var ssaDump string
42 var ssaDir string
43 var ssaDumpStdout bool
44 var ssaDumpCFG string
45 const ssaDumpFile = "ssa.html"
46
47
48 var ssaDumpInlined []*ir.Func
49
50 func DumpInline(fn *ir.Func) {
51 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
52 ssaDumpInlined = append(ssaDumpInlined, fn)
53 }
54 }
55
56 func InitEnv() {
57 ssaDump = os.Getenv("GOSSAFUNC")
58 ssaDir = os.Getenv("GOSSADIR")
59 if ssaDump != "" {
60 if strings.HasSuffix(ssaDump, "+") {
61 ssaDump = ssaDump[:len(ssaDump)-1]
62 ssaDumpStdout = true
63 }
64 spl := strings.Split(ssaDump, ":")
65 if len(spl) > 1 {
66 ssaDump = spl[0]
67 ssaDumpCFG = spl[1]
68 }
69 }
70 }
71
72 func InitConfig() {
73 types_ := ssa.NewTypes()
74
75 if Arch.SoftFloat {
76 softfloatInit()
77 }
78
79
80
81 _ = types.NewPtr(types.Types[types.TINTER])
82 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
83 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
84 _ = types.NewPtr(types.NewPtr(types.ByteType))
85 _ = types.NewPtr(types.NewSlice(types.ByteType))
86 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
87 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
88 _ = types.NewPtr(types.Types[types.TINT16])
89 _ = types.NewPtr(types.Types[types.TINT64])
90 _ = types.NewPtr(types.ErrorType)
91 _ = types.NewPtr(reflectdata.MapType())
92 _ = types.NewPtr(deferstruct())
93 types.NewPtrCacheEnabled = false
94 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
95 ssaConfig.Race = base.Flag.Race
96 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
97
98
99 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
100 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
101 ir.Syms.AssertI2I = typecheck.LookupRuntimeFunc("assertI2I")
102 ir.Syms.AssertI2I2 = typecheck.LookupRuntimeFunc("assertI2I2")
103 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
104 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
105 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
106 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
107 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
108 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
109 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
110 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
111 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
112 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
113 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
114 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
115 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
116 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
117 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
118 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
119 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
120 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
121 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
122 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
123 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
124 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
125 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
126 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
127 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
128 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
129 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
130 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
131 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
132 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
133 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
134 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
135 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
136 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
137 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
138 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
139 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
140 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
141 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
142 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
143 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
144 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
145 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
146 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
147 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
148 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
149 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
150 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
151 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
152 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
153 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
154 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
155 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
156
157 if Arch.LinkArch.Family == sys.Wasm {
158 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
159 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
160 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
161 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
162 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
163 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
164 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
165 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
166 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
167 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
168 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
169 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
170 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
171 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
172 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
173 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
174 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
175 } else {
176 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
177 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
178 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
179 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
180 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
181 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
182 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
183 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
184 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
185 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
186 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
187 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
188 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
189 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
190 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
191 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
192 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("panicSliceConvert")
193 }
194 if Arch.LinkArch.PtrSize == 4 {
195 ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
196 ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
197 ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
198 ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
199 ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
200 ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
201 ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
202 ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
203 ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
204 ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
205 ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
206 ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
207 ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
208 ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
209 ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
210 ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
211 }
212
213
214 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
215 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
216 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
217 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
218 }
219
220
221
222
223
224
225
226
227 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
228 return ssaConfig.ABI0.Copy()
229 }
230
231
232
233 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
234 if buildcfg.Experiment.RegabiArgs {
235
236 if fn == nil {
237 return abi1
238 }
239 switch fn.ABI {
240 case obj.ABI0:
241 return abi0
242 case obj.ABIInternal:
243
244
245 return abi1
246 }
247 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
248 panic("not reachable")
249 }
250
251 a := abi0
252 if fn != nil {
253 if fn.Pragma&ir.RegisterParams != 0 {
254 a = abi1
255 }
256 }
257 return a
258 }
259
260
261
262
263
264
265
266
267
268
269
270
271 func (s *state) emitOpenDeferInfo() {
272 firstOffset := s.openDefers[0].closureNode.FrameOffset()
273
274
275 for i, r := range s.openDefers {
276 have := r.closureNode.FrameOffset()
277 want := firstOffset + int64(i)*int64(types.PtrSize)
278 if have != want {
279 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
280 }
281 }
282
283 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
284 x.Set(obj.AttrContentAddressable, true)
285 s.curfn.LSym.Func().OpenCodedDeferInfo = x
286
287 off := 0
288 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
289 off = objw.Uvarint(x, off, uint64(-firstOffset))
290 }
291
292
293
294 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
295 name := ir.FuncName(fn)
296
297 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
298
299 printssa := false
300
301
302 if strings.Contains(ssaDump, name) {
303 nameOptABI := name
304 if strings.Contains(ssaDump, ",") {
305 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
306 } else if strings.HasSuffix(ssaDump, ">") {
307 l := len(ssaDump)
308 if l >= 3 && ssaDump[l-3] == '<' {
309 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
310 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
311 }
312 }
313 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
314 printssa = nameOptABI == ssaDump ||
315 pkgDotName == ssaDump ||
316 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
317 }
318
319 var astBuf *bytes.Buffer
320 if printssa {
321 astBuf = &bytes.Buffer{}
322 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
323 if ssaDumpStdout {
324 fmt.Println("generating SSA for", name)
325 fmt.Print(astBuf.String())
326 }
327 }
328
329 var s state
330 s.pushLine(fn.Pos())
331 defer s.popLine()
332
333 s.hasdefer = fn.HasDefer()
334 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
335 s.cgoUnsafeArgs = true
336 }
337 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
338
339 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
340 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
341 s.instrumentMemory = true
342 }
343 if base.Flag.Race {
344 s.instrumentEnterExit = true
345 }
346 }
347
348 fe := ssafn{
349 curfn: fn,
350 log: printssa && ssaDumpStdout,
351 }
352 s.curfn = fn
353
354 cache := &ssaCaches[worker]
355 cache.Reset()
356
357 s.f = ssaConfig.NewFunc(&fe, cache)
358 s.config = ssaConfig
359 s.f.Type = fn.Type()
360 s.f.Name = name
361 s.f.PrintOrHtmlSSA = printssa
362 if fn.Pragma&ir.Nosplit != 0 {
363 s.f.NoSplit = true
364 }
365 s.f.ABI0 = ssaConfig.ABI0
366 s.f.ABI1 = ssaConfig.ABI1
367 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
368 s.f.ABISelf = abiSelf
369
370 s.panics = map[funcLine]*ssa.Block{}
371 s.softFloat = s.config.SoftFloat
372
373
374 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
375 s.f.Entry.Pos = fn.Pos()
376 s.f.IsPgoHot = isPgoHot
377
378 if printssa {
379 ssaDF := ssaDumpFile
380 if ssaDir != "" {
381 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
382 ssaD := filepath.Dir(ssaDF)
383 os.MkdirAll(ssaD, 0755)
384 }
385 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
386
387 dumpSourcesColumn(s.f.HTMLWriter, fn)
388 s.f.HTMLWriter.WriteAST("AST", astBuf)
389 }
390
391
392 s.labels = map[string]*ssaLabel{}
393 s.fwdVars = map[ir.Node]*ssa.Value{}
394 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
395
396 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
397 switch {
398 case base.Debug.NoOpenDefer != 0:
399 s.hasOpenDefers = false
400 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
401
402
403
404 s.hasOpenDefers = false
405 }
406 if s.hasOpenDefers && s.instrumentEnterExit {
407
408
409
410 s.hasOpenDefers = false
411 }
412 if s.hasOpenDefers {
413
414
415 for _, f := range s.curfn.Type().Results() {
416 if !f.Nname.(*ir.Name).OnStack() {
417 s.hasOpenDefers = false
418 break
419 }
420 }
421 }
422 if s.hasOpenDefers &&
423 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
424
425
426
427
428
429 s.hasOpenDefers = false
430 }
431
432 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
433 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
434
435 s.startBlock(s.f.Entry)
436 s.vars[memVar] = s.startmem
437 if s.hasOpenDefers {
438
439
440
441 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
442 deferBitsTemp.SetAddrtaken(true)
443 s.deferBitsTemp = deferBitsTemp
444
445 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
446 s.vars[deferBitsVar] = startDeferBits
447 s.deferBitsAddr = s.addr(deferBitsTemp)
448 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
449
450
451
452
453
454 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
455 }
456
457 var params *abi.ABIParamResultInfo
458 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
459
460
461
462
463
464
465 var debugInfo ssa.FuncDebug
466 for _, n := range fn.Dcl {
467 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
468 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
469 }
470 }
471 fn.DebugInfo = &debugInfo
472
473
474 s.decladdrs = map[*ir.Name]*ssa.Value{}
475 for _, n := range fn.Dcl {
476 switch n.Class {
477 case ir.PPARAM:
478
479 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
480 case ir.PPARAMOUT:
481 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
482 case ir.PAUTO:
483
484
485 default:
486 s.Fatalf("local variable with class %v unimplemented", n.Class)
487 }
488 }
489
490 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
491
492
493 for _, n := range fn.Dcl {
494 if n.Class == ir.PPARAM {
495 if s.canSSA(n) {
496 v := s.newValue0A(ssa.OpArg, n.Type(), n)
497 s.vars[n] = v
498 s.addNamedValue(n, v)
499 } else {
500 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
501 if len(paramAssignment.Registers) > 0 {
502 if ssa.CanSSA(n.Type()) {
503 v := s.newValue0A(ssa.OpArg, n.Type(), n)
504 s.store(n.Type(), s.decladdrs[n], v)
505 } else {
506
507
508 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
509 }
510 }
511 }
512 }
513 }
514
515
516 if fn.Needctxt() {
517 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
518 if fn.RangeParent != nil {
519
520
521
522 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
523 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
524 cloSlot.SetUsed(true)
525 cloSlot.SetEsc(ir.EscNever)
526 cloSlot.SetAddrtaken(true)
527 s.f.CloSlot = cloSlot
528 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
529 addr := s.addr(cloSlot)
530 s.store(s.f.Config.Types.BytePtr, addr, clo)
531
532 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
533 }
534 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
535 for {
536 n, typ, offset := csiter.Next()
537 if n == nil {
538 break
539 }
540
541 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
542
543
544
545
546
547
548
549
550
551 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
552 n.Class = ir.PAUTO
553 fn.Dcl = append(fn.Dcl, n)
554 s.assign(n, s.load(n.Type(), ptr), false, 0)
555 continue
556 }
557
558 if !n.Byval() {
559 ptr = s.load(typ, ptr)
560 }
561 s.setHeapaddr(fn.Pos(), n, ptr)
562 }
563 }
564
565
566 if s.instrumentEnterExit {
567 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
568 }
569 s.zeroResults()
570 s.paramsToHeap()
571 s.stmtList(fn.Body)
572
573
574 if s.curBlock != nil {
575 s.pushLine(fn.Endlineno)
576 s.exit()
577 s.popLine()
578 }
579
580 for _, b := range s.f.Blocks {
581 if b.Pos != src.NoXPos {
582 s.updateUnsetPredPos(b)
583 }
584 }
585
586 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
587
588 s.insertPhis()
589
590
591 ssa.Compile(s.f)
592
593 fe.AllocFrame(s.f)
594
595 if len(s.openDefers) != 0 {
596 s.emitOpenDeferInfo()
597 }
598
599
600
601
602
603
604 for _, p := range params.InParams() {
605 typs, offs := p.RegisterTypesAndOffsets()
606 for i, t := range typs {
607 o := offs[i]
608 fo := p.FrameOffset(params)
609 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
610 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
611 }
612 }
613
614 return s.f
615 }
616
617 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
618 typs, offs := paramAssignment.RegisterTypesAndOffsets()
619 for i, t := range typs {
620 if pointersOnly && !t.IsPtrShaped() {
621 continue
622 }
623 r := paramAssignment.Registers[i]
624 o := offs[i]
625 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
626 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
627 v := s.newValue0I(op, t, reg)
628 v.Aux = aux
629 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
630 s.store(t, p, v)
631 }
632 }
633
634
635
636
637
638
639
640 func (s *state) zeroResults() {
641 for _, f := range s.curfn.Type().Results() {
642 n := f.Nname.(*ir.Name)
643 if !n.OnStack() {
644
645
646
647 continue
648 }
649
650 if typ := n.Type(); ssa.CanSSA(typ) {
651 s.assign(n, s.zeroVal(typ), false, 0)
652 } else {
653 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
654 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
655 }
656 s.zero(n.Type(), s.decladdrs[n])
657 }
658 }
659 }
660
661
662
663 func (s *state) paramsToHeap() {
664 do := func(params []*types.Field) {
665 for _, f := range params {
666 if f.Nname == nil {
667 continue
668 }
669 n := f.Nname.(*ir.Name)
670 if ir.IsBlank(n) || n.OnStack() {
671 continue
672 }
673 s.newHeapaddr(n)
674 if n.Class == ir.PPARAM {
675 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
676 }
677 }
678 }
679
680 typ := s.curfn.Type()
681 do(typ.Recvs())
682 do(typ.Params())
683 do(typ.Results())
684 }
685
686
687 func (s *state) newHeapaddr(n *ir.Name) {
688 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type(), nil))
689 }
690
691
692
693 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
694 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
695 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
696 }
697
698
699 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
700 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
701 addr.SetUsed(true)
702 types.CalcSize(addr.Type())
703
704 if n.Class == ir.PPARAMOUT {
705 addr.SetIsOutputParamHeapAddr(true)
706 }
707
708 n.Heapaddr = addr
709 s.assign(addr, ptr, false, 0)
710 }
711
712
713 func (s *state) newObject(typ *types.Type, rtype *ssa.Value) *ssa.Value {
714 if typ.Size() == 0 {
715 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
716 }
717 if rtype == nil {
718 rtype = s.reflectType(typ)
719 }
720 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
721 }
722
723 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
724 if !n.Type().IsPtr() {
725 s.Fatalf("expected pointer type: %v", n.Type())
726 }
727 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
728 if count != nil {
729 if !elem.IsArray() {
730 s.Fatalf("expected array type: %v", elem)
731 }
732 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
733 }
734 size := elem.Size()
735
736 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
737 return
738 }
739 if count == nil {
740 count = s.constInt(types.Types[types.TUINTPTR], 1)
741 }
742 if count.Type.Size() != s.config.PtrSize {
743 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
744 }
745 var rtype *ssa.Value
746 if rtypeExpr != nil {
747 rtype = s.expr(rtypeExpr)
748 } else {
749 rtype = s.reflectType(elem)
750 }
751 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
752 }
753
754
755
756 func (s *state) reflectType(typ *types.Type) *ssa.Value {
757
758
759 lsym := reflectdata.TypeLinksym(typ)
760 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
761 }
762
763 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
764
765 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
766 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
767 if err != nil {
768 writer.Logf("cannot read sources for function %v: %v", fn, err)
769 }
770
771
772 var inlFns []*ssa.FuncLines
773 for _, fi := range ssaDumpInlined {
774 elno := fi.Endlineno
775 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
776 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
777 if err != nil {
778 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
779 continue
780 }
781 inlFns = append(inlFns, fnLines)
782 }
783
784 sort.Sort(ssa.ByTopo(inlFns))
785 if targetFn != nil {
786 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
787 }
788
789 writer.WriteSources("sources", inlFns)
790 }
791
792 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
793 f, err := os.Open(os.ExpandEnv(file))
794 if err != nil {
795 return nil, err
796 }
797 defer f.Close()
798 var lines []string
799 ln := uint(1)
800 scanner := bufio.NewScanner(f)
801 for scanner.Scan() && ln <= end {
802 if ln >= start {
803 lines = append(lines, scanner.Text())
804 }
805 ln++
806 }
807 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
808 }
809
810
811
812
813 func (s *state) updateUnsetPredPos(b *ssa.Block) {
814 if b.Pos == src.NoXPos {
815 s.Fatalf("Block %s should have a position", b)
816 }
817 bestPos := src.NoXPos
818 for _, e := range b.Preds {
819 p := e.Block()
820 if !p.LackingPos() {
821 continue
822 }
823 if bestPos == src.NoXPos {
824 bestPos = b.Pos
825 for _, v := range b.Values {
826 if v.LackingPos() {
827 continue
828 }
829 if v.Pos != src.NoXPos {
830
831
832 bestPos = v.Pos
833 break
834 }
835 }
836 }
837 p.Pos = bestPos
838 s.updateUnsetPredPos(p)
839 }
840 }
841
842
843 type openDeferInfo struct {
844
845 n *ir.CallExpr
846
847
848 closure *ssa.Value
849
850
851
852 closureNode *ir.Name
853 }
854
855 type state struct {
856
857 config *ssa.Config
858
859
860 f *ssa.Func
861
862
863 curfn *ir.Func
864
865
866 labels map[string]*ssaLabel
867
868
869 breakTo *ssa.Block
870 continueTo *ssa.Block
871
872
873 curBlock *ssa.Block
874
875
876
877
878 vars map[ir.Node]*ssa.Value
879
880
881
882
883 fwdVars map[ir.Node]*ssa.Value
884
885
886 defvars []map[ir.Node]*ssa.Value
887
888
889 decladdrs map[*ir.Name]*ssa.Value
890
891
892 startmem *ssa.Value
893 sp *ssa.Value
894 sb *ssa.Value
895
896 deferBitsAddr *ssa.Value
897 deferBitsTemp *ir.Name
898
899
900 line []src.XPos
901
902 lastPos src.XPos
903
904
905
906 panics map[funcLine]*ssa.Block
907
908 cgoUnsafeArgs bool
909 hasdefer bool
910 softFloat bool
911 hasOpenDefers bool
912 checkPtrEnabled bool
913 instrumentEnterExit bool
914 instrumentMemory bool
915
916
917
918
919 openDefers []*openDeferInfo
920
921
922
923
924 lastDeferExit *ssa.Block
925 lastDeferFinalBlock *ssa.Block
926 lastDeferCount int
927
928 prevCall *ssa.Value
929 }
930
931 type funcLine struct {
932 f *obj.LSym
933 base *src.PosBase
934 line uint
935 }
936
937 type ssaLabel struct {
938 target *ssa.Block
939 breakTarget *ssa.Block
940 continueTarget *ssa.Block
941 }
942
943
944 func (s *state) label(sym *types.Sym) *ssaLabel {
945 lab := s.labels[sym.Name]
946 if lab == nil {
947 lab = new(ssaLabel)
948 s.labels[sym.Name] = lab
949 }
950 return lab
951 }
952
953 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
954 func (s *state) Log() bool { return s.f.Log() }
955 func (s *state) Fatalf(msg string, args ...interface{}) {
956 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
957 }
958 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
959 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
960
961 func ssaMarker(name string) *ir.Name {
962 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
963 }
964
965 var (
966
967 memVar = ssaMarker("mem")
968
969
970 ptrVar = ssaMarker("ptr")
971 lenVar = ssaMarker("len")
972 capVar = ssaMarker("cap")
973 typVar = ssaMarker("typ")
974 okVar = ssaMarker("ok")
975 deferBitsVar = ssaMarker("deferBits")
976 hashVar = ssaMarker("hash")
977 )
978
979
980 func (s *state) startBlock(b *ssa.Block) {
981 if s.curBlock != nil {
982 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
983 }
984 s.curBlock = b
985 s.vars = map[ir.Node]*ssa.Value{}
986 for n := range s.fwdVars {
987 delete(s.fwdVars, n)
988 }
989 }
990
991
992
993
994 func (s *state) endBlock() *ssa.Block {
995 b := s.curBlock
996 if b == nil {
997 return nil
998 }
999 for len(s.defvars) <= int(b.ID) {
1000 s.defvars = append(s.defvars, nil)
1001 }
1002 s.defvars[b.ID] = s.vars
1003 s.curBlock = nil
1004 s.vars = nil
1005 if b.LackingPos() {
1006
1007
1008
1009 b.Pos = src.NoXPos
1010 } else {
1011 b.Pos = s.lastPos
1012 }
1013 return b
1014 }
1015
1016
1017 func (s *state) pushLine(line src.XPos) {
1018 if !line.IsKnown() {
1019
1020
1021 line = s.peekPos()
1022 if base.Flag.K != 0 {
1023 base.Warn("buildssa: unknown position (line 0)")
1024 }
1025 } else {
1026 s.lastPos = line
1027 }
1028
1029 s.line = append(s.line, line)
1030 }
1031
1032
1033 func (s *state) popLine() {
1034 s.line = s.line[:len(s.line)-1]
1035 }
1036
1037
1038 func (s *state) peekPos() src.XPos {
1039 return s.line[len(s.line)-1]
1040 }
1041
1042
1043 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1044 return s.curBlock.NewValue0(s.peekPos(), op, t)
1045 }
1046
1047
1048 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1049 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1050 }
1051
1052
1053 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1054 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1055 }
1056
1057
1058 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1059 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1060 }
1061
1062
1063 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1064 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1065 }
1066
1067
1068
1069
1070 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1071 if isStmt {
1072 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1073 }
1074 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1075 }
1076
1077
1078 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1079 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1080 }
1081
1082
1083 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1084 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1085 }
1086
1087
1088 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1089 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1090 }
1091
1092
1093
1094
1095 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1096 if isStmt {
1097 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1098 }
1099 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1100 }
1101
1102
1103 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1104 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1105 }
1106
1107
1108 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1109 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1110 }
1111
1112
1113 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1114 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1115 }
1116
1117
1118 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1119 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1120 }
1121
1122
1123
1124
1125 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1126 if isStmt {
1127 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1128 }
1129 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1130 }
1131
1132
1133 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1134 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1135 }
1136
1137
1138 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1139 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1140 }
1141
1142 func (s *state) entryBlock() *ssa.Block {
1143 b := s.f.Entry
1144 if base.Flag.N > 0 && s.curBlock != nil {
1145
1146
1147
1148
1149 b = s.curBlock
1150 }
1151 return b
1152 }
1153
1154
1155 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1156 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1157 }
1158
1159
1160 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1161 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1162 }
1163
1164
1165 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1166 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1167 }
1168
1169
1170 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1171 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1172 }
1173
1174
1175 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1176 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1177 }
1178
1179
1180 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1181 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1182 }
1183
1184
1185 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1186 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1187 }
1188
1189
1190 func (s *state) constSlice(t *types.Type) *ssa.Value {
1191 return s.f.ConstSlice(t)
1192 }
1193 func (s *state) constInterface(t *types.Type) *ssa.Value {
1194 return s.f.ConstInterface(t)
1195 }
1196 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1197 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1198 return s.f.ConstEmptyString(t)
1199 }
1200 func (s *state) constBool(c bool) *ssa.Value {
1201 return s.f.ConstBool(types.Types[types.TBOOL], c)
1202 }
1203 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1204 return s.f.ConstInt8(t, c)
1205 }
1206 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1207 return s.f.ConstInt16(t, c)
1208 }
1209 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1210 return s.f.ConstInt32(t, c)
1211 }
1212 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1213 return s.f.ConstInt64(t, c)
1214 }
1215 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1216 return s.f.ConstFloat32(t, c)
1217 }
1218 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1219 return s.f.ConstFloat64(t, c)
1220 }
1221 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1222 if s.config.PtrSize == 8 {
1223 return s.constInt64(t, c)
1224 }
1225 if int64(int32(c)) != c {
1226 s.Fatalf("integer constant too big %d", c)
1227 }
1228 return s.constInt32(t, int32(c))
1229 }
1230 func (s *state) constOffPtrSP(t *types.Type, c int64) *ssa.Value {
1231 return s.f.ConstOffPtrSP(t, c, s.sp)
1232 }
1233
1234
1235
1236 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1237 if s.softFloat {
1238 if c, ok := s.sfcall(op, arg); ok {
1239 return c
1240 }
1241 }
1242 return s.newValue1(op, t, arg)
1243 }
1244 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1245 if s.softFloat {
1246 if c, ok := s.sfcall(op, arg0, arg1); ok {
1247 return c
1248 }
1249 }
1250 return s.newValue2(op, t, arg0, arg1)
1251 }
1252
1253 type instrumentKind uint8
1254
1255 const (
1256 instrumentRead = iota
1257 instrumentWrite
1258 instrumentMove
1259 )
1260
1261 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1262 s.instrument2(t, addr, nil, kind)
1263 }
1264
1265
1266
1267
1268 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1269 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1270 s.instrument(t, addr, kind)
1271 return
1272 }
1273 for _, f := range t.Fields() {
1274 if f.Sym.IsBlank() {
1275 continue
1276 }
1277 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1278 s.instrumentFields(f.Type, offptr, kind)
1279 }
1280 }
1281
1282 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1283 if base.Flag.MSan {
1284 s.instrument2(t, dst, src, instrumentMove)
1285 } else {
1286 s.instrument(t, src, instrumentRead)
1287 s.instrument(t, dst, instrumentWrite)
1288 }
1289 }
1290
1291 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1292 if !s.instrumentMemory {
1293 return
1294 }
1295
1296 w := t.Size()
1297 if w == 0 {
1298 return
1299 }
1300
1301 if ssa.IsSanitizerSafeAddr(addr) {
1302 return
1303 }
1304
1305 var fn *obj.LSym
1306 needWidth := false
1307
1308 if addr2 != nil && kind != instrumentMove {
1309 panic("instrument2: non-nil addr2 for non-move instrumentation")
1310 }
1311
1312 if base.Flag.MSan {
1313 switch kind {
1314 case instrumentRead:
1315 fn = ir.Syms.Msanread
1316 case instrumentWrite:
1317 fn = ir.Syms.Msanwrite
1318 case instrumentMove:
1319 fn = ir.Syms.Msanmove
1320 default:
1321 panic("unreachable")
1322 }
1323 needWidth = true
1324 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1325
1326
1327
1328 switch kind {
1329 case instrumentRead:
1330 fn = ir.Syms.Racereadrange
1331 case instrumentWrite:
1332 fn = ir.Syms.Racewriterange
1333 default:
1334 panic("unreachable")
1335 }
1336 needWidth = true
1337 } else if base.Flag.Race {
1338
1339
1340 switch kind {
1341 case instrumentRead:
1342 fn = ir.Syms.Raceread
1343 case instrumentWrite:
1344 fn = ir.Syms.Racewrite
1345 default:
1346 panic("unreachable")
1347 }
1348 } else if base.Flag.ASan {
1349 switch kind {
1350 case instrumentRead:
1351 fn = ir.Syms.Asanread
1352 case instrumentWrite:
1353 fn = ir.Syms.Asanwrite
1354 default:
1355 panic("unreachable")
1356 }
1357 needWidth = true
1358 } else {
1359 panic("unreachable")
1360 }
1361
1362 args := []*ssa.Value{addr}
1363 if addr2 != nil {
1364 args = append(args, addr2)
1365 }
1366 if needWidth {
1367 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1368 }
1369 s.rtcall(fn, true, nil, args...)
1370 }
1371
1372 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1373 s.instrumentFields(t, src, instrumentRead)
1374 return s.rawLoad(t, src)
1375 }
1376
1377 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1378 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1379 }
1380
1381 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1382 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1383 }
1384
1385 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1386 s.instrument(t, dst, instrumentWrite)
1387 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1388 store.Aux = t
1389 s.vars[memVar] = store
1390 }
1391
1392 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1393 s.moveWhichMayOverlap(t, dst, src, false)
1394 }
1395 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1396 s.instrumentMove(t, dst, src)
1397 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421 if t.HasPointers() {
1422 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1423
1424
1425
1426
1427 s.curfn.SetWBPos(s.peekPos())
1428 } else {
1429 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1430 }
1431 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1432 return
1433 }
1434 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1435 store.Aux = t
1436 s.vars[memVar] = store
1437 }
1438
1439
1440 func (s *state) stmtList(l ir.Nodes) {
1441 for _, n := range l {
1442 s.stmt(n)
1443 }
1444 }
1445
1446
1447 func (s *state) stmt(n ir.Node) {
1448 s.pushLine(n.Pos())
1449 defer s.popLine()
1450
1451
1452
1453 if s.curBlock == nil && n.Op() != ir.OLABEL {
1454 return
1455 }
1456
1457 s.stmtList(n.Init())
1458 switch n.Op() {
1459
1460 case ir.OBLOCK:
1461 n := n.(*ir.BlockStmt)
1462 s.stmtList(n.List)
1463
1464 case ir.OFALL:
1465
1466
1467 case ir.OCALLFUNC:
1468 n := n.(*ir.CallExpr)
1469 if ir.IsIntrinsicCall(n) {
1470 s.intrinsicCall(n)
1471 return
1472 }
1473 fallthrough
1474
1475 case ir.OCALLINTER:
1476 n := n.(*ir.CallExpr)
1477 s.callResult(n, callNormal)
1478 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1479 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1480 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1481 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1482 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1483 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1484 fn == "panicrangestate") {
1485 m := s.mem()
1486 b := s.endBlock()
1487 b.Kind = ssa.BlockExit
1488 b.SetControl(m)
1489
1490
1491
1492 }
1493 }
1494 case ir.ODEFER:
1495 n := n.(*ir.GoDeferStmt)
1496 if base.Debug.Defer > 0 {
1497 var defertype string
1498 if s.hasOpenDefers {
1499 defertype = "open-coded"
1500 } else if n.Esc() == ir.EscNever {
1501 defertype = "stack-allocated"
1502 } else {
1503 defertype = "heap-allocated"
1504 }
1505 base.WarnfAt(n.Pos(), "%s defer", defertype)
1506 }
1507 if s.hasOpenDefers {
1508 s.openDeferRecord(n.Call.(*ir.CallExpr))
1509 } else {
1510 d := callDefer
1511 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1512 d = callDeferStack
1513 }
1514 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1515 }
1516 case ir.OGO:
1517 n := n.(*ir.GoDeferStmt)
1518 s.callResult(n.Call.(*ir.CallExpr), callGo)
1519
1520 case ir.OAS2DOTTYPE:
1521 n := n.(*ir.AssignListStmt)
1522 var res, resok *ssa.Value
1523 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1524 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1525 } else {
1526 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1527 }
1528 deref := false
1529 if !ssa.CanSSA(n.Rhs[0].Type()) {
1530 if res.Op != ssa.OpLoad {
1531 s.Fatalf("dottype of non-load")
1532 }
1533 mem := s.mem()
1534 if res.Args[1] != mem {
1535 s.Fatalf("memory no longer live from 2-result dottype load")
1536 }
1537 deref = true
1538 res = res.Args[0]
1539 }
1540 s.assign(n.Lhs[0], res, deref, 0)
1541 s.assign(n.Lhs[1], resok, false, 0)
1542 return
1543
1544 case ir.OAS2FUNC:
1545
1546 n := n.(*ir.AssignListStmt)
1547 call := n.Rhs[0].(*ir.CallExpr)
1548 if !ir.IsIntrinsicCall(call) {
1549 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1550 }
1551 v := s.intrinsicCall(call)
1552 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1553 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1554 s.assign(n.Lhs[0], v1, false, 0)
1555 s.assign(n.Lhs[1], v2, false, 0)
1556 return
1557
1558 case ir.ODCL:
1559 n := n.(*ir.Decl)
1560 if v := n.X; v.Esc() == ir.EscHeap {
1561 s.newHeapaddr(v)
1562 }
1563
1564 case ir.OLABEL:
1565 n := n.(*ir.LabelStmt)
1566 sym := n.Label
1567 if sym.IsBlank() {
1568
1569 break
1570 }
1571 lab := s.label(sym)
1572
1573
1574 if lab.target == nil {
1575 lab.target = s.f.NewBlock(ssa.BlockPlain)
1576 }
1577
1578
1579
1580 if s.curBlock != nil {
1581 b := s.endBlock()
1582 b.AddEdgeTo(lab.target)
1583 }
1584 s.startBlock(lab.target)
1585
1586 case ir.OGOTO:
1587 n := n.(*ir.BranchStmt)
1588 sym := n.Label
1589
1590 lab := s.label(sym)
1591 if lab.target == nil {
1592 lab.target = s.f.NewBlock(ssa.BlockPlain)
1593 }
1594
1595 b := s.endBlock()
1596 b.Pos = s.lastPos.WithIsStmt()
1597 b.AddEdgeTo(lab.target)
1598
1599 case ir.OAS:
1600 n := n.(*ir.AssignStmt)
1601 if n.X == n.Y && n.X.Op() == ir.ONAME {
1602
1603
1604
1605
1606
1607
1608
1609 return
1610 }
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1622 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1623 p := n.Y.(*ir.StarExpr).X
1624 for p.Op() == ir.OCONVNOP {
1625 p = p.(*ir.ConvExpr).X
1626 }
1627 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1628
1629
1630 mayOverlap = false
1631 }
1632 }
1633
1634
1635 rhs := n.Y
1636 if rhs != nil {
1637 switch rhs.Op() {
1638 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1639
1640
1641
1642 if !ir.IsZero(rhs) {
1643 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1644 }
1645 rhs = nil
1646 case ir.OAPPEND:
1647 rhs := rhs.(*ir.CallExpr)
1648
1649
1650
1651 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1652 break
1653 }
1654
1655
1656
1657 if s.canSSA(n.X) {
1658 if base.Debug.Append > 0 {
1659 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1660 }
1661 break
1662 }
1663 if base.Debug.Append > 0 {
1664 base.WarnfAt(n.Pos(), "append: len-only update")
1665 }
1666 s.append(rhs, true)
1667 return
1668 }
1669 }
1670
1671 if ir.IsBlank(n.X) {
1672
1673
1674 if rhs != nil {
1675 s.expr(rhs)
1676 }
1677 return
1678 }
1679
1680 var t *types.Type
1681 if n.Y != nil {
1682 t = n.Y.Type()
1683 } else {
1684 t = n.X.Type()
1685 }
1686
1687 var r *ssa.Value
1688 deref := !ssa.CanSSA(t)
1689 if deref {
1690 if rhs == nil {
1691 r = nil
1692 } else {
1693 r = s.addr(rhs)
1694 }
1695 } else {
1696 if rhs == nil {
1697 r = s.zeroVal(t)
1698 } else {
1699 r = s.expr(rhs)
1700 }
1701 }
1702
1703 var skip skipMask
1704 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1705
1706
1707 rhs := rhs.(*ir.SliceExpr)
1708 i, j, k := rhs.Low, rhs.High, rhs.Max
1709 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1710
1711 i = nil
1712 }
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723 if i == nil {
1724 skip |= skipPtr
1725 if j == nil {
1726 skip |= skipLen
1727 }
1728 if k == nil {
1729 skip |= skipCap
1730 }
1731 }
1732 }
1733
1734 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1735
1736 case ir.OIF:
1737 n := n.(*ir.IfStmt)
1738 if ir.IsConst(n.Cond, constant.Bool) {
1739 s.stmtList(n.Cond.Init())
1740 if ir.BoolVal(n.Cond) {
1741 s.stmtList(n.Body)
1742 } else {
1743 s.stmtList(n.Else)
1744 }
1745 break
1746 }
1747
1748 bEnd := s.f.NewBlock(ssa.BlockPlain)
1749 var likely int8
1750 if n.Likely {
1751 likely = 1
1752 }
1753 var bThen *ssa.Block
1754 if len(n.Body) != 0 {
1755 bThen = s.f.NewBlock(ssa.BlockPlain)
1756 } else {
1757 bThen = bEnd
1758 }
1759 var bElse *ssa.Block
1760 if len(n.Else) != 0 {
1761 bElse = s.f.NewBlock(ssa.BlockPlain)
1762 } else {
1763 bElse = bEnd
1764 }
1765 s.condBranch(n.Cond, bThen, bElse, likely)
1766
1767 if len(n.Body) != 0 {
1768 s.startBlock(bThen)
1769 s.stmtList(n.Body)
1770 if b := s.endBlock(); b != nil {
1771 b.AddEdgeTo(bEnd)
1772 }
1773 }
1774 if len(n.Else) != 0 {
1775 s.startBlock(bElse)
1776 s.stmtList(n.Else)
1777 if b := s.endBlock(); b != nil {
1778 b.AddEdgeTo(bEnd)
1779 }
1780 }
1781 s.startBlock(bEnd)
1782
1783 case ir.ORETURN:
1784 n := n.(*ir.ReturnStmt)
1785 s.stmtList(n.Results)
1786 b := s.exit()
1787 b.Pos = s.lastPos.WithIsStmt()
1788
1789 case ir.OTAILCALL:
1790 n := n.(*ir.TailCallStmt)
1791 s.callResult(n.Call, callTail)
1792 call := s.mem()
1793 b := s.endBlock()
1794 b.Kind = ssa.BlockRetJmp
1795 b.SetControl(call)
1796
1797 case ir.OCONTINUE, ir.OBREAK:
1798 n := n.(*ir.BranchStmt)
1799 var to *ssa.Block
1800 if n.Label == nil {
1801
1802 switch n.Op() {
1803 case ir.OCONTINUE:
1804 to = s.continueTo
1805 case ir.OBREAK:
1806 to = s.breakTo
1807 }
1808 } else {
1809
1810 sym := n.Label
1811 lab := s.label(sym)
1812 switch n.Op() {
1813 case ir.OCONTINUE:
1814 to = lab.continueTarget
1815 case ir.OBREAK:
1816 to = lab.breakTarget
1817 }
1818 }
1819
1820 b := s.endBlock()
1821 b.Pos = s.lastPos.WithIsStmt()
1822 b.AddEdgeTo(to)
1823
1824 case ir.OFOR:
1825
1826
1827 n := n.(*ir.ForStmt)
1828 base.Assert(!n.DistinctVars)
1829 bCond := s.f.NewBlock(ssa.BlockPlain)
1830 bBody := s.f.NewBlock(ssa.BlockPlain)
1831 bIncr := s.f.NewBlock(ssa.BlockPlain)
1832 bEnd := s.f.NewBlock(ssa.BlockPlain)
1833
1834
1835 bBody.Pos = n.Pos()
1836
1837
1838 b := s.endBlock()
1839 b.AddEdgeTo(bCond)
1840
1841
1842 s.startBlock(bCond)
1843 if n.Cond != nil {
1844 s.condBranch(n.Cond, bBody, bEnd, 1)
1845 } else {
1846 b := s.endBlock()
1847 b.Kind = ssa.BlockPlain
1848 b.AddEdgeTo(bBody)
1849 }
1850
1851
1852 prevContinue := s.continueTo
1853 prevBreak := s.breakTo
1854 s.continueTo = bIncr
1855 s.breakTo = bEnd
1856 var lab *ssaLabel
1857 if sym := n.Label; sym != nil {
1858
1859 lab = s.label(sym)
1860 lab.continueTarget = bIncr
1861 lab.breakTarget = bEnd
1862 }
1863
1864
1865 s.startBlock(bBody)
1866 s.stmtList(n.Body)
1867
1868
1869 s.continueTo = prevContinue
1870 s.breakTo = prevBreak
1871 if lab != nil {
1872 lab.continueTarget = nil
1873 lab.breakTarget = nil
1874 }
1875
1876
1877 if b := s.endBlock(); b != nil {
1878 b.AddEdgeTo(bIncr)
1879 }
1880
1881
1882 s.startBlock(bIncr)
1883 if n.Post != nil {
1884 s.stmt(n.Post)
1885 }
1886 if b := s.endBlock(); b != nil {
1887 b.AddEdgeTo(bCond)
1888
1889
1890 if b.Pos == src.NoXPos {
1891 b.Pos = bCond.Pos
1892 }
1893 }
1894
1895 s.startBlock(bEnd)
1896
1897 case ir.OSWITCH, ir.OSELECT:
1898
1899
1900 bEnd := s.f.NewBlock(ssa.BlockPlain)
1901
1902 prevBreak := s.breakTo
1903 s.breakTo = bEnd
1904 var sym *types.Sym
1905 var body ir.Nodes
1906 if n.Op() == ir.OSWITCH {
1907 n := n.(*ir.SwitchStmt)
1908 sym = n.Label
1909 body = n.Compiled
1910 } else {
1911 n := n.(*ir.SelectStmt)
1912 sym = n.Label
1913 body = n.Compiled
1914 }
1915
1916 var lab *ssaLabel
1917 if sym != nil {
1918
1919 lab = s.label(sym)
1920 lab.breakTarget = bEnd
1921 }
1922
1923
1924 s.stmtList(body)
1925
1926 s.breakTo = prevBreak
1927 if lab != nil {
1928 lab.breakTarget = nil
1929 }
1930
1931
1932
1933 if s.curBlock != nil {
1934 m := s.mem()
1935 b := s.endBlock()
1936 b.Kind = ssa.BlockExit
1937 b.SetControl(m)
1938 }
1939 s.startBlock(bEnd)
1940
1941 case ir.OJUMPTABLE:
1942 n := n.(*ir.JumpTableStmt)
1943
1944
1945 jt := s.f.NewBlock(ssa.BlockJumpTable)
1946 bEnd := s.f.NewBlock(ssa.BlockPlain)
1947
1948
1949 idx := s.expr(n.Idx)
1950 unsigned := idx.Type.IsUnsigned()
1951
1952
1953 t := types.Types[types.TUINTPTR]
1954 idx = s.conv(nil, idx, idx.Type, t)
1955
1956
1957
1958
1959
1960
1961
1962 var min, max uint64
1963 if unsigned {
1964 min, _ = constant.Uint64Val(n.Cases[0])
1965 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
1966 } else {
1967 mn, _ := constant.Int64Val(n.Cases[0])
1968 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
1969 min = uint64(mn)
1970 max = uint64(mx)
1971 }
1972
1973 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
1974 width := s.uintptrConstant(max - min)
1975 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
1976 b := s.endBlock()
1977 b.Kind = ssa.BlockIf
1978 b.SetControl(cmp)
1979 b.AddEdgeTo(jt)
1980 b.AddEdgeTo(bEnd)
1981 b.Likely = ssa.BranchLikely
1982
1983
1984 s.startBlock(jt)
1985 jt.Pos = n.Pos()
1986 if base.Flag.Cfg.SpectreIndex {
1987 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
1988 }
1989 jt.SetControl(idx)
1990
1991
1992 table := make([]*ssa.Block, max-min+1)
1993 for i := range table {
1994 table[i] = bEnd
1995 }
1996 for i := range n.Targets {
1997 c := n.Cases[i]
1998 lab := s.label(n.Targets[i])
1999 if lab.target == nil {
2000 lab.target = s.f.NewBlock(ssa.BlockPlain)
2001 }
2002 var val uint64
2003 if unsigned {
2004 val, _ = constant.Uint64Val(c)
2005 } else {
2006 vl, _ := constant.Int64Val(c)
2007 val = uint64(vl)
2008 }
2009
2010 table[val-min] = lab.target
2011 }
2012 for _, t := range table {
2013 jt.AddEdgeTo(t)
2014 }
2015 s.endBlock()
2016
2017 s.startBlock(bEnd)
2018
2019 case ir.OINTERFACESWITCH:
2020 n := n.(*ir.InterfaceSwitchStmt)
2021 typs := s.f.Config.Types
2022
2023 t := s.expr(n.RuntimeType)
2024 h := s.expr(n.Hash)
2025 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2026
2027
2028 var merge *ssa.Block
2029 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
2030
2031
2032 if _, ok := intrinsics[intrinsicKey{Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp"}]; !ok {
2033 s.Fatalf("atomic load not available")
2034 }
2035 merge = s.f.NewBlock(ssa.BlockPlain)
2036 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2037 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2038 loopHead := s.f.NewBlock(ssa.BlockPlain)
2039 loopBody := s.f.NewBlock(ssa.BlockPlain)
2040
2041
2042 var mul, and, add, zext ssa.Op
2043 if s.config.PtrSize == 4 {
2044 mul = ssa.OpMul32
2045 and = ssa.OpAnd32
2046 add = ssa.OpAdd32
2047 zext = ssa.OpCopy
2048 } else {
2049 mul = ssa.OpMul64
2050 and = ssa.OpAnd64
2051 add = ssa.OpAdd64
2052 zext = ssa.OpZeroExt32to64
2053 }
2054
2055
2056
2057 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2058 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2059 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2060
2061
2062 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2063
2064
2065 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2066
2067 b := s.endBlock()
2068 b.AddEdgeTo(loopHead)
2069
2070
2071
2072 s.startBlock(loopHead)
2073 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2074 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2075 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2076 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2077
2078 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2079
2080
2081
2082 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2083 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2084 b = s.endBlock()
2085 b.Kind = ssa.BlockIf
2086 b.SetControl(cmp1)
2087 b.AddEdgeTo(cacheHit)
2088 b.AddEdgeTo(loopBody)
2089
2090
2091
2092 s.startBlock(loopBody)
2093 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2094 b = s.endBlock()
2095 b.Kind = ssa.BlockIf
2096 b.SetControl(cmp2)
2097 b.AddEdgeTo(cacheMiss)
2098 b.AddEdgeTo(loopHead)
2099
2100
2101
2102
2103 s.startBlock(cacheHit)
2104 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2105 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2106 s.assign(n.Case, eCase, false, 0)
2107 s.assign(n.Itab, eItab, false, 0)
2108 b = s.endBlock()
2109 b.AddEdgeTo(merge)
2110
2111
2112 s.startBlock(cacheMiss)
2113 }
2114
2115 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2116 s.assign(n.Case, r[0], false, 0)
2117 s.assign(n.Itab, r[1], false, 0)
2118
2119 if merge != nil {
2120
2121 b := s.endBlock()
2122 b.Kind = ssa.BlockPlain
2123 b.AddEdgeTo(merge)
2124 s.startBlock(merge)
2125 }
2126
2127 case ir.OCHECKNIL:
2128 n := n.(*ir.UnaryExpr)
2129 p := s.expr(n.X)
2130 _ = s.nilCheck(p)
2131
2132
2133 case ir.OINLMARK:
2134 n := n.(*ir.InlineMarkStmt)
2135 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2136
2137 default:
2138 s.Fatalf("unhandled stmt %v", n.Op())
2139 }
2140 }
2141
2142
2143
2144 const shareDeferExits = false
2145
2146
2147
2148
2149 func (s *state) exit() *ssa.Block {
2150 if s.hasdefer {
2151 if s.hasOpenDefers {
2152 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2153 if s.curBlock.Kind != ssa.BlockPlain {
2154 panic("Block for an exit should be BlockPlain")
2155 }
2156 s.curBlock.AddEdgeTo(s.lastDeferExit)
2157 s.endBlock()
2158 return s.lastDeferFinalBlock
2159 }
2160 s.openDeferExit()
2161 } else {
2162 s.rtcall(ir.Syms.Deferreturn, true, nil)
2163 }
2164 }
2165
2166
2167
2168 resultFields := s.curfn.Type().Results()
2169 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2170
2171 for i, f := range resultFields {
2172 n := f.Nname.(*ir.Name)
2173 if s.canSSA(n) {
2174 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2175
2176 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2177 }
2178 results[i] = s.variable(n, n.Type())
2179 } else if !n.OnStack() {
2180
2181 if n.Type().HasPointers() {
2182 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2183 }
2184 ha := s.expr(n.Heapaddr)
2185 s.instrumentFields(n.Type(), ha, instrumentRead)
2186 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2187 } else {
2188
2189
2190
2191 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2192 }
2193 }
2194
2195
2196
2197
2198 if s.instrumentEnterExit {
2199 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2200 }
2201
2202 results[len(results)-1] = s.mem()
2203 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2204 m.AddArgs(results...)
2205
2206 b := s.endBlock()
2207 b.Kind = ssa.BlockRet
2208 b.SetControl(m)
2209 if s.hasdefer && s.hasOpenDefers {
2210 s.lastDeferFinalBlock = b
2211 }
2212 return b
2213 }
2214
2215 type opAndType struct {
2216 op ir.Op
2217 etype types.Kind
2218 }
2219
2220 var opToSSA = map[opAndType]ssa.Op{
2221 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2222 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2223 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2224 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2225 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2226 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2227 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2228 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2229 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2230 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2231
2232 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2233 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2234 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2235 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2236 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2237 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2238 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2239 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2240 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2241 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2242
2243 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2244
2245 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2246 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2247 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2248 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2249 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2250 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2251 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2252 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2253 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2254 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2255
2256 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2257 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2258 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2259 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2260 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2261 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2262 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2263 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2264
2265 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2266 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2267 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2268 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2269
2270 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2271 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2272 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2273 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2274 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2275 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2276 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2277 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2278 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2279 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2280
2281 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2282 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2283
2284 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2285 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2286 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2287 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2288 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2289 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2290 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2291 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2292
2293 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2294 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2295 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2296 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2297 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2298 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2299 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2300 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2301
2302 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2303 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2304 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2305 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2306 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2307 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2308 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2309 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2310
2311 {ir.OOR, types.TINT8}: ssa.OpOr8,
2312 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2313 {ir.OOR, types.TINT16}: ssa.OpOr16,
2314 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2315 {ir.OOR, types.TINT32}: ssa.OpOr32,
2316 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2317 {ir.OOR, types.TINT64}: ssa.OpOr64,
2318 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2319
2320 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2321 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2322 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2323 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2324 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2325 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2326 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2327 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2328
2329 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2330 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2331 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2332 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2333 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2334 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2335 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2336 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2337 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2338 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2339 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2340 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2341 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2342 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2343 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2344 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2345 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2346 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2347 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2348
2349 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2350 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2351 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2352 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2353 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2354 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2355 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2356 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2357 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2358 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2359 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2360 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2361 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2362 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2363 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2364 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2365 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2366 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2367 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2368
2369 {ir.OLT, types.TINT8}: ssa.OpLess8,
2370 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2371 {ir.OLT, types.TINT16}: ssa.OpLess16,
2372 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2373 {ir.OLT, types.TINT32}: ssa.OpLess32,
2374 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2375 {ir.OLT, types.TINT64}: ssa.OpLess64,
2376 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2377 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2378 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2379
2380 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2381 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2382 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2383 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2384 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2385 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2386 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2387 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2388 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2389 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2390 }
2391
2392 func (s *state) concreteEtype(t *types.Type) types.Kind {
2393 e := t.Kind()
2394 switch e {
2395 default:
2396 return e
2397 case types.TINT:
2398 if s.config.PtrSize == 8 {
2399 return types.TINT64
2400 }
2401 return types.TINT32
2402 case types.TUINT:
2403 if s.config.PtrSize == 8 {
2404 return types.TUINT64
2405 }
2406 return types.TUINT32
2407 case types.TUINTPTR:
2408 if s.config.PtrSize == 8 {
2409 return types.TUINT64
2410 }
2411 return types.TUINT32
2412 }
2413 }
2414
2415 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2416 etype := s.concreteEtype(t)
2417 x, ok := opToSSA[opAndType{op, etype}]
2418 if !ok {
2419 s.Fatalf("unhandled binary op %v %s", op, etype)
2420 }
2421 return x
2422 }
2423
2424 type opAndTwoTypes struct {
2425 op ir.Op
2426 etype1 types.Kind
2427 etype2 types.Kind
2428 }
2429
2430 type twoTypes struct {
2431 etype1 types.Kind
2432 etype2 types.Kind
2433 }
2434
2435 type twoOpsAndType struct {
2436 op1 ssa.Op
2437 op2 ssa.Op
2438 intermediateType types.Kind
2439 }
2440
2441 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2442
2443 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2444 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2445 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2446 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2447
2448 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2449 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2450 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2451 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2452
2453 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2454 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2455 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2456 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2457
2458 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2459 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2460 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2461 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2462
2463 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2464 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2465 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2466 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2467
2468 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2469 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2470 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2471 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2472
2473 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2474 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2475 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto64, ssa.OpTrunc64to32, types.TINT64},
2476 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2477
2478 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2479 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2480 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto64, ssa.OpTrunc64to32, types.TINT64},
2481 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2482
2483
2484 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2485 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2486 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2487 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2488 }
2489
2490
2491
2492 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2493 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2494 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2495 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2496 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2497 }
2498
2499
2500 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2501 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2502 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2503 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2504 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2505 }
2506
2507 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2508 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2509 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2510 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2511 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2512 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2513 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2514 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2515 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2516
2517 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2518 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2519 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2520 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2521 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2522 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2523 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2524 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2525
2526 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2527 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2528 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2529 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2530 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2531 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2532 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2533 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2534
2535 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2536 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2537 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2538 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2539 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2540 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2541 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2542 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2543
2544 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2545 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2546 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2547 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2548 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2549 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2550 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2551 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2552
2553 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2554 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2555 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2556 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2557 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2558 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2559 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2560 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2561
2562 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2563 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2564 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2565 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2566 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2567 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2568 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2569 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2570
2571 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2572 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2573 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2574 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2575 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2576 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2577 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2578 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2579 }
2580
2581 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2582 etype1 := s.concreteEtype(t)
2583 etype2 := s.concreteEtype(u)
2584 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2585 if !ok {
2586 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2587 }
2588 return x
2589 }
2590
2591 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2592 if s.config.PtrSize == 4 {
2593 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2594 }
2595 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2596 }
2597
2598 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2599 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2600
2601 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2602 }
2603 if ft.IsInteger() && tt.IsInteger() {
2604 var op ssa.Op
2605 if tt.Size() == ft.Size() {
2606 op = ssa.OpCopy
2607 } else if tt.Size() < ft.Size() {
2608
2609 switch 10*ft.Size() + tt.Size() {
2610 case 21:
2611 op = ssa.OpTrunc16to8
2612 case 41:
2613 op = ssa.OpTrunc32to8
2614 case 42:
2615 op = ssa.OpTrunc32to16
2616 case 81:
2617 op = ssa.OpTrunc64to8
2618 case 82:
2619 op = ssa.OpTrunc64to16
2620 case 84:
2621 op = ssa.OpTrunc64to32
2622 default:
2623 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2624 }
2625 } else if ft.IsSigned() {
2626
2627 switch 10*ft.Size() + tt.Size() {
2628 case 12:
2629 op = ssa.OpSignExt8to16
2630 case 14:
2631 op = ssa.OpSignExt8to32
2632 case 18:
2633 op = ssa.OpSignExt8to64
2634 case 24:
2635 op = ssa.OpSignExt16to32
2636 case 28:
2637 op = ssa.OpSignExt16to64
2638 case 48:
2639 op = ssa.OpSignExt32to64
2640 default:
2641 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2642 }
2643 } else {
2644
2645 switch 10*ft.Size() + tt.Size() {
2646 case 12:
2647 op = ssa.OpZeroExt8to16
2648 case 14:
2649 op = ssa.OpZeroExt8to32
2650 case 18:
2651 op = ssa.OpZeroExt8to64
2652 case 24:
2653 op = ssa.OpZeroExt16to32
2654 case 28:
2655 op = ssa.OpZeroExt16to64
2656 case 48:
2657 op = ssa.OpZeroExt32to64
2658 default:
2659 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2660 }
2661 }
2662 return s.newValue1(op, tt, v)
2663 }
2664
2665 if ft.IsComplex() && tt.IsComplex() {
2666 var op ssa.Op
2667 if ft.Size() == tt.Size() {
2668 switch ft.Size() {
2669 case 8:
2670 op = ssa.OpRound32F
2671 case 16:
2672 op = ssa.OpRound64F
2673 default:
2674 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2675 }
2676 } else if ft.Size() == 8 && tt.Size() == 16 {
2677 op = ssa.OpCvt32Fto64F
2678 } else if ft.Size() == 16 && tt.Size() == 8 {
2679 op = ssa.OpCvt64Fto32F
2680 } else {
2681 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2682 }
2683 ftp := types.FloatForComplex(ft)
2684 ttp := types.FloatForComplex(tt)
2685 return s.newValue2(ssa.OpComplexMake, tt,
2686 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2687 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2688 }
2689
2690 if tt.IsComplex() {
2691
2692 et := types.FloatForComplex(tt)
2693 v = s.conv(n, v, ft, et)
2694 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2695 }
2696
2697 if ft.IsFloat() || tt.IsFloat() {
2698 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
2699 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2700 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2701 conv = conv1
2702 }
2703 }
2704 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2705 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2706 conv = conv1
2707 }
2708 }
2709
2710 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2711 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2712
2713 if tt.Size() == 4 {
2714 return s.uint32Tofloat32(n, v, ft, tt)
2715 }
2716 if tt.Size() == 8 {
2717 return s.uint32Tofloat64(n, v, ft, tt)
2718 }
2719 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2720
2721 if ft.Size() == 4 {
2722 return s.float32ToUint32(n, v, ft, tt)
2723 }
2724 if ft.Size() == 8 {
2725 return s.float64ToUint32(n, v, ft, tt)
2726 }
2727 }
2728 }
2729
2730 if !ok {
2731 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2732 }
2733 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2734
2735 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2736
2737 if op1 == ssa.OpCopy {
2738 if op2 == ssa.OpCopy {
2739 return v
2740 }
2741 return s.newValueOrSfCall1(op2, tt, v)
2742 }
2743 if op2 == ssa.OpCopy {
2744 return s.newValueOrSfCall1(op1, tt, v)
2745 }
2746 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2747 }
2748
2749 if ft.IsInteger() {
2750
2751 if tt.Size() == 4 {
2752 return s.uint64Tofloat32(n, v, ft, tt)
2753 }
2754 if tt.Size() == 8 {
2755 return s.uint64Tofloat64(n, v, ft, tt)
2756 }
2757 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2758 }
2759
2760 if ft.Size() == 4 {
2761 return s.float32ToUint64(n, v, ft, tt)
2762 }
2763 if ft.Size() == 8 {
2764 return s.float64ToUint64(n, v, ft, tt)
2765 }
2766 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2767 return nil
2768 }
2769
2770 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2771 return nil
2772 }
2773
2774
2775 func (s *state) expr(n ir.Node) *ssa.Value {
2776 return s.exprCheckPtr(n, true)
2777 }
2778
2779 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2780 if ir.HasUniquePos(n) {
2781
2782
2783 s.pushLine(n.Pos())
2784 defer s.popLine()
2785 }
2786
2787 s.stmtList(n.Init())
2788 switch n.Op() {
2789 case ir.OBYTES2STRTMP:
2790 n := n.(*ir.ConvExpr)
2791 slice := s.expr(n.X)
2792 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
2793 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
2794 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
2795 case ir.OSTR2BYTESTMP:
2796 n := n.(*ir.ConvExpr)
2797 str := s.expr(n.X)
2798 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
2799 if !n.NonNil() {
2800
2801
2802
2803 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
2804 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
2805 ptr = s.ternary(cond, ptr, zerobase)
2806 }
2807 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
2808 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
2809 case ir.OCFUNC:
2810 n := n.(*ir.UnaryExpr)
2811 aux := n.X.(*ir.Name).Linksym()
2812
2813
2814 if aux.ABI() != obj.ABIInternal {
2815 s.Fatalf("expected ABIInternal: %v", aux.ABI())
2816 }
2817 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
2818 case ir.ONAME:
2819 n := n.(*ir.Name)
2820 if n.Class == ir.PFUNC {
2821
2822 sym := staticdata.FuncLinksym(n)
2823 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
2824 }
2825 if s.canSSA(n) {
2826 return s.variable(n, n.Type())
2827 }
2828 return s.load(n.Type(), s.addr(n))
2829 case ir.OLINKSYMOFFSET:
2830 n := n.(*ir.LinksymOffsetExpr)
2831 return s.load(n.Type(), s.addr(n))
2832 case ir.ONIL:
2833 n := n.(*ir.NilExpr)
2834 t := n.Type()
2835 switch {
2836 case t.IsSlice():
2837 return s.constSlice(t)
2838 case t.IsInterface():
2839 return s.constInterface(t)
2840 default:
2841 return s.constNil(t)
2842 }
2843 case ir.OLITERAL:
2844 switch u := n.Val(); u.Kind() {
2845 case constant.Int:
2846 i := ir.IntVal(n.Type(), u)
2847 switch n.Type().Size() {
2848 case 1:
2849 return s.constInt8(n.Type(), int8(i))
2850 case 2:
2851 return s.constInt16(n.Type(), int16(i))
2852 case 4:
2853 return s.constInt32(n.Type(), int32(i))
2854 case 8:
2855 return s.constInt64(n.Type(), i)
2856 default:
2857 s.Fatalf("bad integer size %d", n.Type().Size())
2858 return nil
2859 }
2860 case constant.String:
2861 i := constant.StringVal(u)
2862 if i == "" {
2863 return s.constEmptyString(n.Type())
2864 }
2865 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
2866 case constant.Bool:
2867 return s.constBool(constant.BoolVal(u))
2868 case constant.Float:
2869 f, _ := constant.Float64Val(u)
2870 switch n.Type().Size() {
2871 case 4:
2872 return s.constFloat32(n.Type(), f)
2873 case 8:
2874 return s.constFloat64(n.Type(), f)
2875 default:
2876 s.Fatalf("bad float size %d", n.Type().Size())
2877 return nil
2878 }
2879 case constant.Complex:
2880 re, _ := constant.Float64Val(constant.Real(u))
2881 im, _ := constant.Float64Val(constant.Imag(u))
2882 switch n.Type().Size() {
2883 case 8:
2884 pt := types.Types[types.TFLOAT32]
2885 return s.newValue2(ssa.OpComplexMake, n.Type(),
2886 s.constFloat32(pt, re),
2887 s.constFloat32(pt, im))
2888 case 16:
2889 pt := types.Types[types.TFLOAT64]
2890 return s.newValue2(ssa.OpComplexMake, n.Type(),
2891 s.constFloat64(pt, re),
2892 s.constFloat64(pt, im))
2893 default:
2894 s.Fatalf("bad complex size %d", n.Type().Size())
2895 return nil
2896 }
2897 default:
2898 s.Fatalf("unhandled OLITERAL %v", u.Kind())
2899 return nil
2900 }
2901 case ir.OCONVNOP:
2902 n := n.(*ir.ConvExpr)
2903 to := n.Type()
2904 from := n.X.Type()
2905
2906
2907
2908 x := s.expr(n.X)
2909 if to == from {
2910 return x
2911 }
2912
2913
2914
2915
2916
2917 if to.IsPtrShaped() != from.IsPtrShaped() {
2918 return s.newValue2(ssa.OpConvert, to, x, s.mem())
2919 }
2920
2921 v := s.newValue1(ssa.OpCopy, to, x)
2922
2923
2924 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
2925 return v
2926 }
2927
2928
2929 if from.Kind() == to.Kind() {
2930 return v
2931 }
2932
2933
2934 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
2935 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
2936 s.checkPtrAlignment(n, v, nil)
2937 }
2938 return v
2939 }
2940
2941
2942 if to.Kind() == types.TMAP && from == types.NewPtr(reflectdata.MapType()) {
2943 return v
2944 }
2945
2946 types.CalcSize(from)
2947 types.CalcSize(to)
2948 if from.Size() != to.Size() {
2949 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
2950 return nil
2951 }
2952 if etypesign(from.Kind()) != etypesign(to.Kind()) {
2953 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
2954 return nil
2955 }
2956
2957 if base.Flag.Cfg.Instrumenting {
2958
2959
2960
2961 return v
2962 }
2963
2964 if etypesign(from.Kind()) == 0 {
2965 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
2966 return nil
2967 }
2968
2969
2970 return v
2971
2972 case ir.OCONV:
2973 n := n.(*ir.ConvExpr)
2974 x := s.expr(n.X)
2975 return s.conv(n, x, n.X.Type(), n.Type())
2976
2977 case ir.ODOTTYPE:
2978 n := n.(*ir.TypeAssertExpr)
2979 res, _ := s.dottype(n, false)
2980 return res
2981
2982 case ir.ODYNAMICDOTTYPE:
2983 n := n.(*ir.DynamicTypeAssertExpr)
2984 res, _ := s.dynamicDottype(n, false)
2985 return res
2986
2987
2988 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
2989 n := n.(*ir.BinaryExpr)
2990 a := s.expr(n.X)
2991 b := s.expr(n.Y)
2992 if n.X.Type().IsComplex() {
2993 pt := types.FloatForComplex(n.X.Type())
2994 op := s.ssaOp(ir.OEQ, pt)
2995 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
2996 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
2997 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
2998 switch n.Op() {
2999 case ir.OEQ:
3000 return c
3001 case ir.ONE:
3002 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3003 default:
3004 s.Fatalf("ordered complex compare %v", n.Op())
3005 }
3006 }
3007
3008
3009 op := n.Op()
3010 switch op {
3011 case ir.OGE:
3012 op, a, b = ir.OLE, b, a
3013 case ir.OGT:
3014 op, a, b = ir.OLT, b, a
3015 }
3016 if n.X.Type().IsFloat() {
3017
3018 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3019 }
3020
3021 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3022 case ir.OMUL:
3023 n := n.(*ir.BinaryExpr)
3024 a := s.expr(n.X)
3025 b := s.expr(n.Y)
3026 if n.Type().IsComplex() {
3027 mulop := ssa.OpMul64F
3028 addop := ssa.OpAdd64F
3029 subop := ssa.OpSub64F
3030 pt := types.FloatForComplex(n.Type())
3031 wt := types.Types[types.TFLOAT64]
3032
3033 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3034 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3035 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3036 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3037
3038 if pt != wt {
3039 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3040 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3041 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3042 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3043 }
3044
3045 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3046 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3047
3048 if pt != wt {
3049 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3050 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3051 }
3052
3053 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3054 }
3055
3056 if n.Type().IsFloat() {
3057 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3058 }
3059
3060 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3061
3062 case ir.ODIV:
3063 n := n.(*ir.BinaryExpr)
3064 a := s.expr(n.X)
3065 b := s.expr(n.Y)
3066 if n.Type().IsComplex() {
3067
3068
3069
3070 mulop := ssa.OpMul64F
3071 addop := ssa.OpAdd64F
3072 subop := ssa.OpSub64F
3073 divop := ssa.OpDiv64F
3074 pt := types.FloatForComplex(n.Type())
3075 wt := types.Types[types.TFLOAT64]
3076
3077 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3078 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3079 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3080 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3081
3082 if pt != wt {
3083 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3084 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3085 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3086 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3087 }
3088
3089 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3090 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3091 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3092
3093
3094
3095
3096
3097 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3098 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3099
3100 if pt != wt {
3101 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3102 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3103 }
3104 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3105 }
3106 if n.Type().IsFloat() {
3107 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3108 }
3109 return s.intDivide(n, a, b)
3110 case ir.OMOD:
3111 n := n.(*ir.BinaryExpr)
3112 a := s.expr(n.X)
3113 b := s.expr(n.Y)
3114 return s.intDivide(n, a, b)
3115 case ir.OADD, ir.OSUB:
3116 n := n.(*ir.BinaryExpr)
3117 a := s.expr(n.X)
3118 b := s.expr(n.Y)
3119 if n.Type().IsComplex() {
3120 pt := types.FloatForComplex(n.Type())
3121 op := s.ssaOp(n.Op(), pt)
3122 return s.newValue2(ssa.OpComplexMake, n.Type(),
3123 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3124 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3125 }
3126 if n.Type().IsFloat() {
3127 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3128 }
3129 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3130 case ir.OAND, ir.OOR, ir.OXOR:
3131 n := n.(*ir.BinaryExpr)
3132 a := s.expr(n.X)
3133 b := s.expr(n.Y)
3134 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3135 case ir.OANDNOT:
3136 n := n.(*ir.BinaryExpr)
3137 a := s.expr(n.X)
3138 b := s.expr(n.Y)
3139 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3140 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3141 case ir.OLSH, ir.ORSH:
3142 n := n.(*ir.BinaryExpr)
3143 a := s.expr(n.X)
3144 b := s.expr(n.Y)
3145 bt := b.Type
3146 if bt.IsSigned() {
3147 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3148 s.check(cmp, ir.Syms.Panicshift)
3149 bt = bt.ToUnsigned()
3150 }
3151 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3152 case ir.OANDAND, ir.OOROR:
3153
3154
3155
3156
3157
3158
3159
3160
3161
3162
3163
3164
3165
3166 n := n.(*ir.LogicalExpr)
3167 el := s.expr(n.X)
3168 s.vars[n] = el
3169
3170 b := s.endBlock()
3171 b.Kind = ssa.BlockIf
3172 b.SetControl(el)
3173
3174
3175
3176
3177
3178 bRight := s.f.NewBlock(ssa.BlockPlain)
3179 bResult := s.f.NewBlock(ssa.BlockPlain)
3180 if n.Op() == ir.OANDAND {
3181 b.AddEdgeTo(bRight)
3182 b.AddEdgeTo(bResult)
3183 } else if n.Op() == ir.OOROR {
3184 b.AddEdgeTo(bResult)
3185 b.AddEdgeTo(bRight)
3186 }
3187
3188 s.startBlock(bRight)
3189 er := s.expr(n.Y)
3190 s.vars[n] = er
3191
3192 b = s.endBlock()
3193 b.AddEdgeTo(bResult)
3194
3195 s.startBlock(bResult)
3196 return s.variable(n, types.Types[types.TBOOL])
3197 case ir.OCOMPLEX:
3198 n := n.(*ir.BinaryExpr)
3199 r := s.expr(n.X)
3200 i := s.expr(n.Y)
3201 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3202
3203
3204 case ir.ONEG:
3205 n := n.(*ir.UnaryExpr)
3206 a := s.expr(n.X)
3207 if n.Type().IsComplex() {
3208 tp := types.FloatForComplex(n.Type())
3209 negop := s.ssaOp(n.Op(), tp)
3210 return s.newValue2(ssa.OpComplexMake, n.Type(),
3211 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3212 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3213 }
3214 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3215 case ir.ONOT, ir.OBITNOT:
3216 n := n.(*ir.UnaryExpr)
3217 a := s.expr(n.X)
3218 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3219 case ir.OIMAG, ir.OREAL:
3220 n := n.(*ir.UnaryExpr)
3221 a := s.expr(n.X)
3222 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3223 case ir.OPLUS:
3224 n := n.(*ir.UnaryExpr)
3225 return s.expr(n.X)
3226
3227 case ir.OADDR:
3228 n := n.(*ir.AddrExpr)
3229 return s.addr(n.X)
3230
3231 case ir.ORESULT:
3232 n := n.(*ir.ResultExpr)
3233 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3234 panic("Expected to see a previous call")
3235 }
3236 which := n.Index
3237 if which == -1 {
3238 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3239 }
3240 return s.resultOfCall(s.prevCall, which, n.Type())
3241
3242 case ir.ODEREF:
3243 n := n.(*ir.StarExpr)
3244 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3245 return s.load(n.Type(), p)
3246
3247 case ir.ODOT:
3248 n := n.(*ir.SelectorExpr)
3249 if n.X.Op() == ir.OSTRUCTLIT {
3250
3251
3252
3253 if !ir.IsZero(n.X) {
3254 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3255 }
3256 return s.zeroVal(n.Type())
3257 }
3258
3259
3260
3261
3262 if ir.IsAddressable(n) && !s.canSSA(n) {
3263 p := s.addr(n)
3264 return s.load(n.Type(), p)
3265 }
3266 v := s.expr(n.X)
3267 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3268
3269 case ir.ODOTPTR:
3270 n := n.(*ir.SelectorExpr)
3271 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3272 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3273 return s.load(n.Type(), p)
3274
3275 case ir.OINDEX:
3276 n := n.(*ir.IndexExpr)
3277 switch {
3278 case n.X.Type().IsString():
3279 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3280
3281
3282
3283 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3284 }
3285 a := s.expr(n.X)
3286 i := s.expr(n.Index)
3287 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3288 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3289 ptrtyp := s.f.Config.Types.BytePtr
3290 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3291 if ir.IsConst(n.Index, constant.Int) {
3292 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3293 } else {
3294 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3295 }
3296 return s.load(types.Types[types.TUINT8], ptr)
3297 case n.X.Type().IsSlice():
3298 p := s.addr(n)
3299 return s.load(n.X.Type().Elem(), p)
3300 case n.X.Type().IsArray():
3301 if ssa.CanSSA(n.X.Type()) {
3302
3303 bound := n.X.Type().NumElem()
3304 a := s.expr(n.X)
3305 i := s.expr(n.Index)
3306 if bound == 0 {
3307
3308
3309 z := s.constInt(types.Types[types.TINT], 0)
3310 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3311
3312
3313 return s.zeroVal(n.Type())
3314 }
3315 len := s.constInt(types.Types[types.TINT], bound)
3316 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3317 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3318 }
3319 p := s.addr(n)
3320 return s.load(n.X.Type().Elem(), p)
3321 default:
3322 s.Fatalf("bad type for index %v", n.X.Type())
3323 return nil
3324 }
3325
3326 case ir.OLEN, ir.OCAP:
3327 n := n.(*ir.UnaryExpr)
3328 switch {
3329 case n.X.Type().IsSlice():
3330 op := ssa.OpSliceLen
3331 if n.Op() == ir.OCAP {
3332 op = ssa.OpSliceCap
3333 }
3334 return s.newValue1(op, types.Types[types.TINT], s.expr(n.X))
3335 case n.X.Type().IsString():
3336 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], s.expr(n.X))
3337 case n.X.Type().IsMap(), n.X.Type().IsChan():
3338 return s.referenceTypeBuiltin(n, s.expr(n.X))
3339 default:
3340 return s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
3341 }
3342
3343 case ir.OSPTR:
3344 n := n.(*ir.UnaryExpr)
3345 a := s.expr(n.X)
3346 if n.X.Type().IsSlice() {
3347 if n.Bounded() {
3348 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3349 }
3350 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3351 } else {
3352 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3353 }
3354
3355 case ir.OITAB:
3356 n := n.(*ir.UnaryExpr)
3357 a := s.expr(n.X)
3358 return s.newValue1(ssa.OpITab, n.Type(), a)
3359
3360 case ir.OIDATA:
3361 n := n.(*ir.UnaryExpr)
3362 a := s.expr(n.X)
3363 return s.newValue1(ssa.OpIData, n.Type(), a)
3364
3365 case ir.OMAKEFACE:
3366 n := n.(*ir.BinaryExpr)
3367 tab := s.expr(n.X)
3368 data := s.expr(n.Y)
3369 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3370
3371 case ir.OSLICEHEADER:
3372 n := n.(*ir.SliceHeaderExpr)
3373 p := s.expr(n.Ptr)
3374 l := s.expr(n.Len)
3375 c := s.expr(n.Cap)
3376 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3377
3378 case ir.OSTRINGHEADER:
3379 n := n.(*ir.StringHeaderExpr)
3380 p := s.expr(n.Ptr)
3381 l := s.expr(n.Len)
3382 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3383
3384 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3385 n := n.(*ir.SliceExpr)
3386 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3387 v := s.exprCheckPtr(n.X, !check)
3388 var i, j, k *ssa.Value
3389 if n.Low != nil {
3390 i = s.expr(n.Low)
3391 }
3392 if n.High != nil {
3393 j = s.expr(n.High)
3394 }
3395 if n.Max != nil {
3396 k = s.expr(n.Max)
3397 }
3398 p, l, c := s.slice(v, i, j, k, n.Bounded())
3399 if check {
3400
3401 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3402 }
3403 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3404
3405 case ir.OSLICESTR:
3406 n := n.(*ir.SliceExpr)
3407 v := s.expr(n.X)
3408 var i, j *ssa.Value
3409 if n.Low != nil {
3410 i = s.expr(n.Low)
3411 }
3412 if n.High != nil {
3413 j = s.expr(n.High)
3414 }
3415 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3416 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3417
3418 case ir.OSLICE2ARRPTR:
3419
3420
3421
3422
3423 n := n.(*ir.ConvExpr)
3424 v := s.expr(n.X)
3425 nelem := n.Type().Elem().NumElem()
3426 arrlen := s.constInt(types.Types[types.TINT], nelem)
3427 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3428 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3429 op := ssa.OpSlicePtr
3430 if nelem == 0 {
3431 op = ssa.OpSlicePtrUnchecked
3432 }
3433 return s.newValue1(op, n.Type(), v)
3434
3435 case ir.OCALLFUNC:
3436 n := n.(*ir.CallExpr)
3437 if ir.IsIntrinsicCall(n) {
3438 return s.intrinsicCall(n)
3439 }
3440 fallthrough
3441
3442 case ir.OCALLINTER:
3443 n := n.(*ir.CallExpr)
3444 return s.callResult(n, callNormal)
3445
3446 case ir.OGETG:
3447 n := n.(*ir.CallExpr)
3448 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3449
3450 case ir.OGETCALLERPC:
3451 n := n.(*ir.CallExpr)
3452 return s.newValue0(ssa.OpGetCallerPC, n.Type())
3453
3454 case ir.OGETCALLERSP:
3455 n := n.(*ir.CallExpr)
3456 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3457
3458 case ir.OAPPEND:
3459 return s.append(n.(*ir.CallExpr), false)
3460
3461 case ir.OMIN, ir.OMAX:
3462 return s.minMax(n.(*ir.CallExpr))
3463
3464 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3465
3466
3467
3468 n := n.(*ir.CompLitExpr)
3469 if !ir.IsZero(n) {
3470 s.Fatalf("literal with nonzero value in SSA: %v", n)
3471 }
3472 return s.zeroVal(n.Type())
3473
3474 case ir.ONEW:
3475 n := n.(*ir.UnaryExpr)
3476 var rtype *ssa.Value
3477 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3478 rtype = s.expr(x.RType)
3479 }
3480 return s.newObject(n.Type().Elem(), rtype)
3481
3482 case ir.OUNSAFEADD:
3483 n := n.(*ir.BinaryExpr)
3484 ptr := s.expr(n.X)
3485 len := s.expr(n.Y)
3486
3487
3488
3489 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3490
3491 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3492
3493 default:
3494 s.Fatalf("unhandled expr %v", n.Op())
3495 return nil
3496 }
3497 }
3498
3499 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3500 aux := c.Aux.(*ssa.AuxCall)
3501 pa := aux.ParamAssignmentForResult(which)
3502
3503
3504 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3505 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3506 return s.rawLoad(t, addr)
3507 }
3508 return s.newValue1I(ssa.OpSelectN, t, which, c)
3509 }
3510
3511 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3512 aux := c.Aux.(*ssa.AuxCall)
3513 pa := aux.ParamAssignmentForResult(which)
3514 if len(pa.Registers) == 0 {
3515 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3516 }
3517 _, addr := s.temp(c.Pos, t)
3518 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3519 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3520 return addr
3521 }
3522
3523
3524
3525
3526
3527
3528
3529
3530
3531 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3532
3533
3534
3535
3536
3537
3538
3539
3540
3541
3542
3543
3544
3545
3546
3547
3548
3549
3550
3551
3552
3553
3554
3555
3556
3557
3558
3559
3560
3561
3562
3563
3564 et := n.Type().Elem()
3565 pt := types.NewPtr(et)
3566
3567
3568 sn := n.Args[0]
3569 var slice, addr *ssa.Value
3570 if inplace {
3571 addr = s.addr(sn)
3572 slice = s.load(n.Type(), addr)
3573 } else {
3574 slice = s.expr(sn)
3575 }
3576
3577
3578 grow := s.f.NewBlock(ssa.BlockPlain)
3579 assign := s.f.NewBlock(ssa.BlockPlain)
3580
3581
3582 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3583 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3584 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3585
3586
3587 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3588 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3589
3590
3591 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3592
3593
3594 s.vars[ptrVar] = p
3595 s.vars[lenVar] = l
3596 if !inplace {
3597 s.vars[capVar] = c
3598 }
3599
3600 b := s.endBlock()
3601 b.Kind = ssa.BlockIf
3602 b.Likely = ssa.BranchUnlikely
3603 b.SetControl(cmp)
3604 b.AddEdgeTo(grow)
3605 b.AddEdgeTo(assign)
3606
3607
3608 s.startBlock(grow)
3609 taddr := s.expr(n.Fun)
3610 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
3611
3612
3613 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
3614 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
3615 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
3616
3617 s.vars[ptrVar] = p
3618 s.vars[lenVar] = l
3619 s.vars[capVar] = c
3620 if inplace {
3621 if sn.Op() == ir.ONAME {
3622 sn := sn.(*ir.Name)
3623 if sn.Class != ir.PEXTERN {
3624
3625 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3626 }
3627 }
3628 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3629 s.store(types.Types[types.TINT], capaddr, c)
3630 s.store(pt, addr, p)
3631 }
3632
3633 b = s.endBlock()
3634 b.AddEdgeTo(assign)
3635
3636
3637 s.startBlock(assign)
3638 p = s.variable(ptrVar, pt)
3639 l = s.variable(lenVar, types.Types[types.TINT])
3640 if !inplace {
3641 c = s.variable(capVar, types.Types[types.TINT])
3642 }
3643
3644 if inplace {
3645
3646
3647 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3648 s.store(types.Types[types.TINT], lenaddr, l)
3649 }
3650
3651
3652 type argRec struct {
3653
3654
3655 v *ssa.Value
3656 store bool
3657 }
3658 args := make([]argRec, 0, len(n.Args[1:]))
3659 for _, n := range n.Args[1:] {
3660 if ssa.CanSSA(n.Type()) {
3661 args = append(args, argRec{v: s.expr(n), store: true})
3662 } else {
3663 v := s.addr(n)
3664 args = append(args, argRec{v: v})
3665 }
3666 }
3667
3668
3669 oldLen := s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3670 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
3671 for i, arg := range args {
3672 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
3673 if arg.store {
3674 s.storeType(et, addr, arg.v, 0, true)
3675 } else {
3676 s.move(et, addr, arg.v)
3677 }
3678 }
3679
3680
3681
3682
3683
3684 delete(s.vars, ptrVar)
3685 delete(s.vars, lenVar)
3686 if !inplace {
3687 delete(s.vars, capVar)
3688 }
3689
3690
3691 if inplace {
3692 return nil
3693 }
3694 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3695 }
3696
3697
3698 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
3699
3700
3701
3702 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
3703 x := s.expr(n.Args[0])
3704 for _, arg := range n.Args[1:] {
3705 x = op(x, s.expr(arg))
3706 }
3707 return x
3708 }
3709
3710 typ := n.Type()
3711
3712 if typ.IsFloat() || typ.IsString() {
3713
3714
3715
3716
3717
3718
3719
3720
3721 if typ.IsFloat() {
3722 hasIntrinsic := false
3723 switch Arch.LinkArch.Family {
3724 case sys.AMD64, sys.ARM64, sys.RISCV64:
3725 hasIntrinsic = true
3726 case sys.PPC64:
3727 hasIntrinsic = buildcfg.GOPPC64 >= 9
3728 }
3729
3730 if hasIntrinsic {
3731 var op ssa.Op
3732 switch {
3733 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
3734 op = ssa.OpMin64F
3735 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
3736 op = ssa.OpMax64F
3737 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
3738 op = ssa.OpMin32F
3739 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
3740 op = ssa.OpMax32F
3741 }
3742 return fold(func(x, a *ssa.Value) *ssa.Value {
3743 return s.newValue2(op, typ, x, a)
3744 })
3745 }
3746 }
3747 var name string
3748 switch typ.Kind() {
3749 case types.TFLOAT32:
3750 switch n.Op() {
3751 case ir.OMIN:
3752 name = "fmin32"
3753 case ir.OMAX:
3754 name = "fmax32"
3755 }
3756 case types.TFLOAT64:
3757 switch n.Op() {
3758 case ir.OMIN:
3759 name = "fmin64"
3760 case ir.OMAX:
3761 name = "fmax64"
3762 }
3763 case types.TSTRING:
3764 switch n.Op() {
3765 case ir.OMIN:
3766 name = "strmin"
3767 case ir.OMAX:
3768 name = "strmax"
3769 }
3770 }
3771 fn := typecheck.LookupRuntimeFunc(name)
3772
3773 return fold(func(x, a *ssa.Value) *ssa.Value {
3774 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
3775 })
3776 }
3777
3778 lt := s.ssaOp(ir.OLT, typ)
3779
3780 return fold(func(x, a *ssa.Value) *ssa.Value {
3781 switch n.Op() {
3782 case ir.OMIN:
3783
3784 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
3785 case ir.OMAX:
3786
3787 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
3788 }
3789 panic("unreachable")
3790 })
3791 }
3792
3793
3794 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
3795
3796
3797 ternaryVar := ssaMarker("ternary")
3798
3799 bThen := s.f.NewBlock(ssa.BlockPlain)
3800 bElse := s.f.NewBlock(ssa.BlockPlain)
3801 bEnd := s.f.NewBlock(ssa.BlockPlain)
3802
3803 b := s.endBlock()
3804 b.Kind = ssa.BlockIf
3805 b.SetControl(cond)
3806 b.AddEdgeTo(bThen)
3807 b.AddEdgeTo(bElse)
3808
3809 s.startBlock(bThen)
3810 s.vars[ternaryVar] = x
3811 s.endBlock().AddEdgeTo(bEnd)
3812
3813 s.startBlock(bElse)
3814 s.vars[ternaryVar] = y
3815 s.endBlock().AddEdgeTo(bEnd)
3816
3817 s.startBlock(bEnd)
3818 r := s.variable(ternaryVar, x.Type)
3819 delete(s.vars, ternaryVar)
3820 return r
3821 }
3822
3823
3824
3825
3826
3827 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
3828 switch cond.Op() {
3829 case ir.OANDAND:
3830 cond := cond.(*ir.LogicalExpr)
3831 mid := s.f.NewBlock(ssa.BlockPlain)
3832 s.stmtList(cond.Init())
3833 s.condBranch(cond.X, mid, no, max8(likely, 0))
3834 s.startBlock(mid)
3835 s.condBranch(cond.Y, yes, no, likely)
3836 return
3837
3838
3839
3840
3841
3842
3843 case ir.OOROR:
3844 cond := cond.(*ir.LogicalExpr)
3845 mid := s.f.NewBlock(ssa.BlockPlain)
3846 s.stmtList(cond.Init())
3847 s.condBranch(cond.X, yes, mid, min8(likely, 0))
3848 s.startBlock(mid)
3849 s.condBranch(cond.Y, yes, no, likely)
3850 return
3851
3852
3853
3854 case ir.ONOT:
3855 cond := cond.(*ir.UnaryExpr)
3856 s.stmtList(cond.Init())
3857 s.condBranch(cond.X, no, yes, -likely)
3858 return
3859 case ir.OCONVNOP:
3860 cond := cond.(*ir.ConvExpr)
3861 s.stmtList(cond.Init())
3862 s.condBranch(cond.X, yes, no, likely)
3863 return
3864 }
3865 c := s.expr(cond)
3866 b := s.endBlock()
3867 b.Kind = ssa.BlockIf
3868 b.SetControl(c)
3869 b.Likely = ssa.BranchPrediction(likely)
3870 b.AddEdgeTo(yes)
3871 b.AddEdgeTo(no)
3872 }
3873
3874 type skipMask uint8
3875
3876 const (
3877 skipPtr skipMask = 1 << iota
3878 skipLen
3879 skipCap
3880 )
3881
3882
3883
3884
3885
3886
3887
3888 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
3889 s.assignWhichMayOverlap(left, right, deref, skip, false)
3890 }
3891 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
3892 if left.Op() == ir.ONAME && ir.IsBlank(left) {
3893 return
3894 }
3895 t := left.Type()
3896 types.CalcSize(t)
3897 if s.canSSA(left) {
3898 if deref {
3899 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
3900 }
3901 if left.Op() == ir.ODOT {
3902
3903
3904
3905
3906
3907
3908
3909
3910
3911
3912 left := left.(*ir.SelectorExpr)
3913 t := left.X.Type()
3914 nf := t.NumFields()
3915 idx := fieldIdx(left)
3916
3917
3918 old := s.expr(left.X)
3919
3920
3921 new := s.newValue0(ssa.StructMakeOp(t.NumFields()), t)
3922
3923
3924 for i := 0; i < nf; i++ {
3925 if i == idx {
3926 new.AddArg(right)
3927 } else {
3928 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
3929 }
3930 }
3931
3932
3933 s.assign(left.X, new, false, 0)
3934
3935 return
3936 }
3937 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
3938 left := left.(*ir.IndexExpr)
3939 s.pushLine(left.Pos())
3940 defer s.popLine()
3941
3942
3943 t := left.X.Type()
3944 n := t.NumElem()
3945
3946 i := s.expr(left.Index)
3947 if n == 0 {
3948
3949
3950 z := s.constInt(types.Types[types.TINT], 0)
3951 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3952 return
3953 }
3954 if n != 1 {
3955 s.Fatalf("assigning to non-1-length array")
3956 }
3957
3958 len := s.constInt(types.Types[types.TINT], 1)
3959 s.boundsCheck(i, len, ssa.BoundsIndex, false)
3960 v := s.newValue1(ssa.OpArrayMake1, t, right)
3961 s.assign(left.X, v, false, 0)
3962 return
3963 }
3964 left := left.(*ir.Name)
3965
3966 s.vars[left] = right
3967 s.addNamedValue(left, right)
3968 return
3969 }
3970
3971
3972
3973 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
3974 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
3975 }
3976
3977
3978 addr := s.addr(left)
3979 if ir.IsReflectHeaderDataField(left) {
3980
3981
3982
3983
3984
3985 t = types.Types[types.TUNSAFEPTR]
3986 }
3987 if deref {
3988
3989 if right == nil {
3990 s.zero(t, addr)
3991 } else {
3992 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
3993 }
3994 return
3995 }
3996
3997 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
3998 }
3999
4000
4001 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4002 switch {
4003 case t.IsInteger():
4004 switch t.Size() {
4005 case 1:
4006 return s.constInt8(t, 0)
4007 case 2:
4008 return s.constInt16(t, 0)
4009 case 4:
4010 return s.constInt32(t, 0)
4011 case 8:
4012 return s.constInt64(t, 0)
4013 default:
4014 s.Fatalf("bad sized integer type %v", t)
4015 }
4016 case t.IsFloat():
4017 switch t.Size() {
4018 case 4:
4019 return s.constFloat32(t, 0)
4020 case 8:
4021 return s.constFloat64(t, 0)
4022 default:
4023 s.Fatalf("bad sized float type %v", t)
4024 }
4025 case t.IsComplex():
4026 switch t.Size() {
4027 case 8:
4028 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4029 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4030 case 16:
4031 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4032 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4033 default:
4034 s.Fatalf("bad sized complex type %v", t)
4035 }
4036
4037 case t.IsString():
4038 return s.constEmptyString(t)
4039 case t.IsPtrShaped():
4040 return s.constNil(t)
4041 case t.IsBoolean():
4042 return s.constBool(false)
4043 case t.IsInterface():
4044 return s.constInterface(t)
4045 case t.IsSlice():
4046 return s.constSlice(t)
4047 case t.IsStruct():
4048 n := t.NumFields()
4049 v := s.entryNewValue0(ssa.StructMakeOp(t.NumFields()), t)
4050 for i := 0; i < n; i++ {
4051 v.AddArg(s.zeroVal(t.FieldType(i)))
4052 }
4053 return v
4054 case t.IsArray():
4055 switch t.NumElem() {
4056 case 0:
4057 return s.entryNewValue0(ssa.OpArrayMake0, t)
4058 case 1:
4059 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4060 }
4061 }
4062 s.Fatalf("zero for type %v not implemented", t)
4063 return nil
4064 }
4065
4066 type callKind int8
4067
4068 const (
4069 callNormal callKind = iota
4070 callDefer
4071 callDeferStack
4072 callGo
4073 callTail
4074 )
4075
4076 type sfRtCallDef struct {
4077 rtfn *obj.LSym
4078 rtype types.Kind
4079 }
4080
4081 var softFloatOps map[ssa.Op]sfRtCallDef
4082
4083 func softfloatInit() {
4084
4085 softFloatOps = map[ssa.Op]sfRtCallDef{
4086 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4087 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4088 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4089 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4090 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4091 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4092 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4093 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4094
4095 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4096 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4097 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4098 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4099 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4100 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4101 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4102 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4103
4104 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4105 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4106 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4107 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4108 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4109 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4110 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4111 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4112 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4113 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4114 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4115 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4116 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4117 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4118 }
4119 }
4120
4121
4122
4123 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4124 f2i := func(t *types.Type) *types.Type {
4125 switch t.Kind() {
4126 case types.TFLOAT32:
4127 return types.Types[types.TUINT32]
4128 case types.TFLOAT64:
4129 return types.Types[types.TUINT64]
4130 }
4131 return t
4132 }
4133
4134 if callDef, ok := softFloatOps[op]; ok {
4135 switch op {
4136 case ssa.OpLess32F,
4137 ssa.OpLess64F,
4138 ssa.OpLeq32F,
4139 ssa.OpLeq64F:
4140 args[0], args[1] = args[1], args[0]
4141 case ssa.OpSub32F,
4142 ssa.OpSub64F:
4143 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4144 }
4145
4146
4147
4148 for i, a := range args {
4149 if a.Type.IsFloat() {
4150 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4151 }
4152 }
4153
4154 rt := types.Types[callDef.rtype]
4155 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4156 if rt.IsFloat() {
4157 result = s.newValue1(ssa.OpCopy, rt, result)
4158 }
4159 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4160 result = s.newValue1(ssa.OpNot, result.Type, result)
4161 }
4162 return result, true
4163 }
4164 return nil, false
4165 }
4166
4167 var intrinsics map[intrinsicKey]intrinsicBuilder
4168
4169
4170
4171 type intrinsicBuilder func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value
4172
4173 type intrinsicKey struct {
4174 arch *sys.Arch
4175 pkg string
4176 fn string
4177 }
4178
4179 func InitTables() {
4180 intrinsics = map[intrinsicKey]intrinsicBuilder{}
4181
4182 var all []*sys.Arch
4183 var p4 []*sys.Arch
4184 var p8 []*sys.Arch
4185 var lwatomics []*sys.Arch
4186 for _, a := range &sys.Archs {
4187 all = append(all, a)
4188 if a.PtrSize == 4 {
4189 p4 = append(p4, a)
4190 } else {
4191 p8 = append(p8, a)
4192 }
4193 if a.Family != sys.PPC64 {
4194 lwatomics = append(lwatomics, a)
4195 }
4196 }
4197
4198
4199 add := func(pkg, fn string, b intrinsicBuilder, archs ...*sys.Arch) {
4200 for _, a := range archs {
4201 intrinsics[intrinsicKey{a, pkg, fn}] = b
4202 }
4203 }
4204
4205 addF := func(pkg, fn string, b intrinsicBuilder, archFamilies ...sys.ArchFamily) {
4206 m := 0
4207 for _, f := range archFamilies {
4208 if f >= 32 {
4209 panic("too many architecture families")
4210 }
4211 m |= 1 << uint(f)
4212 }
4213 for _, a := range all {
4214 if m>>uint(a.Family)&1 != 0 {
4215 intrinsics[intrinsicKey{a, pkg, fn}] = b
4216 }
4217 }
4218 }
4219
4220 alias := func(pkg, fn, pkg2, fn2 string, archs ...*sys.Arch) {
4221 aliased := false
4222 for _, a := range archs {
4223 if b, ok := intrinsics[intrinsicKey{a, pkg2, fn2}]; ok {
4224 intrinsics[intrinsicKey{a, pkg, fn}] = b
4225 aliased = true
4226 }
4227 }
4228 if !aliased {
4229 panic(fmt.Sprintf("attempted to alias undefined intrinsic: %s.%s", pkg, fn))
4230 }
4231 }
4232
4233
4234 if !base.Flag.Cfg.Instrumenting {
4235 add("runtime", "slicebytetostringtmp",
4236 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4237
4238
4239
4240 return s.newValue2(ssa.OpStringMake, n.Type(), args[0], args[1])
4241 },
4242 all...)
4243 }
4244 addF("runtime/internal/math", "MulUintptr",
4245 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4246 if s.config.PtrSize == 4 {
4247 return s.newValue2(ssa.OpMul32uover, types.NewTuple(types.Types[types.TUINT], types.Types[types.TUINT]), args[0], args[1])
4248 }
4249 return s.newValue2(ssa.OpMul64uover, types.NewTuple(types.Types[types.TUINT], types.Types[types.TUINT]), args[0], args[1])
4250 },
4251 sys.AMD64, sys.I386, sys.Loong64, sys.MIPS64, sys.RISCV64, sys.ARM64)
4252 add("runtime", "KeepAlive",
4253 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4254 data := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, args[0])
4255 s.vars[memVar] = s.newValue2(ssa.OpKeepAlive, types.TypeMem, data, s.mem())
4256 return nil
4257 },
4258 all...)
4259 add("runtime", "getclosureptr",
4260 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4261 return s.newValue0(ssa.OpGetClosurePtr, s.f.Config.Types.Uintptr)
4262 },
4263 all...)
4264
4265 add("runtime", "getcallerpc",
4266 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4267 return s.newValue0(ssa.OpGetCallerPC, s.f.Config.Types.Uintptr)
4268 },
4269 all...)
4270
4271 add("runtime", "getcallersp",
4272 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4273 return s.newValue1(ssa.OpGetCallerSP, s.f.Config.Types.Uintptr, s.mem())
4274 },
4275 all...)
4276
4277 addF("runtime", "publicationBarrier",
4278 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4279 s.vars[memVar] = s.newValue1(ssa.OpPubBarrier, types.TypeMem, s.mem())
4280 return nil
4281 },
4282 sys.ARM64, sys.PPC64, sys.RISCV64)
4283
4284 brev_arch := []sys.ArchFamily{sys.AMD64, sys.I386, sys.ARM64, sys.ARM, sys.S390X}
4285 if buildcfg.GOPPC64 >= 10 {
4286
4287
4288 brev_arch = append(brev_arch, sys.PPC64)
4289 }
4290
4291 addF("runtime/internal/sys", "Bswap32",
4292 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4293 return s.newValue1(ssa.OpBswap32, types.Types[types.TUINT32], args[0])
4294 },
4295 brev_arch...)
4296 addF("runtime/internal/sys", "Bswap64",
4297 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4298 return s.newValue1(ssa.OpBswap64, types.Types[types.TUINT64], args[0])
4299 },
4300 brev_arch...)
4301
4302
4303 makePrefetchFunc := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4304 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4305 s.vars[memVar] = s.newValue2(op, types.TypeMem, args[0], s.mem())
4306 return nil
4307 }
4308 }
4309
4310
4311
4312 addF("runtime/internal/sys", "Prefetch", makePrefetchFunc(ssa.OpPrefetchCache),
4313 sys.AMD64, sys.ARM64, sys.PPC64)
4314 addF("runtime/internal/sys", "PrefetchStreamed", makePrefetchFunc(ssa.OpPrefetchCacheStreamed),
4315 sys.AMD64, sys.ARM64, sys.PPC64)
4316
4317
4318 addF("internal/runtime/atomic", "Load",
4319 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4320 v := s.newValue2(ssa.OpAtomicLoad32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], s.mem())
4321 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4322 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4323 },
4324 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4325 addF("internal/runtime/atomic", "Load8",
4326 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4327 v := s.newValue2(ssa.OpAtomicLoad8, types.NewTuple(types.Types[types.TUINT8], types.TypeMem), args[0], s.mem())
4328 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4329 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT8], v)
4330 },
4331 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4332 addF("internal/runtime/atomic", "Load64",
4333 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4334 v := s.newValue2(ssa.OpAtomicLoad64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], s.mem())
4335 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4336 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4337 },
4338 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4339 addF("internal/runtime/atomic", "LoadAcq",
4340 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4341 v := s.newValue2(ssa.OpAtomicLoadAcq32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], s.mem())
4342 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4343 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4344 },
4345 sys.PPC64, sys.S390X)
4346 addF("internal/runtime/atomic", "LoadAcq64",
4347 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4348 v := s.newValue2(ssa.OpAtomicLoadAcq64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], s.mem())
4349 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4350 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4351 },
4352 sys.PPC64)
4353 addF("internal/runtime/atomic", "Loadp",
4354 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4355 v := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(s.f.Config.Types.BytePtr, types.TypeMem), args[0], s.mem())
4356 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4357 return s.newValue1(ssa.OpSelect0, s.f.Config.Types.BytePtr, v)
4358 },
4359 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4360
4361 addF("internal/runtime/atomic", "Store",
4362 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4363 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore32, types.TypeMem, args[0], args[1], s.mem())
4364 return nil
4365 },
4366 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4367 addF("internal/runtime/atomic", "Store8",
4368 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4369 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore8, types.TypeMem, args[0], args[1], s.mem())
4370 return nil
4371 },
4372 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4373 addF("internal/runtime/atomic", "Store64",
4374 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4375 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore64, types.TypeMem, args[0], args[1], s.mem())
4376 return nil
4377 },
4378 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4379 addF("internal/runtime/atomic", "StorepNoWB",
4380 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4381 s.vars[memVar] = s.newValue3(ssa.OpAtomicStorePtrNoWB, types.TypeMem, args[0], args[1], s.mem())
4382 return nil
4383 },
4384 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.RISCV64, sys.S390X)
4385 addF("internal/runtime/atomic", "StoreRel",
4386 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4387 s.vars[memVar] = s.newValue3(ssa.OpAtomicStoreRel32, types.TypeMem, args[0], args[1], s.mem())
4388 return nil
4389 },
4390 sys.PPC64, sys.S390X)
4391 addF("internal/runtime/atomic", "StoreRel64",
4392 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4393 s.vars[memVar] = s.newValue3(ssa.OpAtomicStoreRel64, types.TypeMem, args[0], args[1], s.mem())
4394 return nil
4395 },
4396 sys.PPC64)
4397
4398 addF("internal/runtime/atomic", "Xchg",
4399 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4400 v := s.newValue3(ssa.OpAtomicExchange32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], args[1], s.mem())
4401 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4402 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4403 },
4404 sys.AMD64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4405 addF("internal/runtime/atomic", "Xchg64",
4406 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4407 v := s.newValue3(ssa.OpAtomicExchange64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], args[1], s.mem())
4408 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4409 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4410 },
4411 sys.AMD64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4412
4413 type atomicOpEmitter func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind)
4414
4415 makeAtomicGuardedIntrinsicARM64 := func(op0, op1 ssa.Op, typ types.Kind, emit atomicOpEmitter) intrinsicBuilder {
4416
4417 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4418 if buildcfg.GOARM64.LSE {
4419 emit(s, n, args, op1, typ)
4420 } else {
4421
4422 addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARM64HasATOMICS, s.sb)
4423 v := s.load(types.Types[types.TBOOL], addr)
4424 b := s.endBlock()
4425 b.Kind = ssa.BlockIf
4426 b.SetControl(v)
4427 bTrue := s.f.NewBlock(ssa.BlockPlain)
4428 bFalse := s.f.NewBlock(ssa.BlockPlain)
4429 bEnd := s.f.NewBlock(ssa.BlockPlain)
4430 b.AddEdgeTo(bTrue)
4431 b.AddEdgeTo(bFalse)
4432 b.Likely = ssa.BranchLikely
4433
4434
4435 s.startBlock(bTrue)
4436 emit(s, n, args, op1, typ)
4437 s.endBlock().AddEdgeTo(bEnd)
4438
4439
4440 s.startBlock(bFalse)
4441 emit(s, n, args, op0, typ)
4442 s.endBlock().AddEdgeTo(bEnd)
4443
4444
4445 s.startBlock(bEnd)
4446 }
4447 if typ == types.TNIL {
4448 return nil
4449 } else {
4450 return s.variable(n, types.Types[typ])
4451 }
4452 }
4453 }
4454
4455 atomicEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4456 v := s.newValue3(op, types.NewTuple(types.Types[typ], types.TypeMem), args[0], args[1], s.mem())
4457 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4458 s.vars[n] = s.newValue1(ssa.OpSelect0, types.Types[typ], v)
4459 }
4460 addF("internal/runtime/atomic", "Xchg",
4461 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicExchange32, ssa.OpAtomicExchange32Variant, types.TUINT32, atomicEmitterARM64),
4462 sys.ARM64)
4463 addF("internal/runtime/atomic", "Xchg64",
4464 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicExchange64, ssa.OpAtomicExchange64Variant, types.TUINT64, atomicEmitterARM64),
4465 sys.ARM64)
4466
4467 addF("internal/runtime/atomic", "Xadd",
4468 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4469 v := s.newValue3(ssa.OpAtomicAdd32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], args[1], s.mem())
4470 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4471 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4472 },
4473 sys.AMD64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4474 addF("internal/runtime/atomic", "Xadd64",
4475 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4476 v := s.newValue3(ssa.OpAtomicAdd64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], args[1], s.mem())
4477 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4478 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4479 },
4480 sys.AMD64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4481
4482 addF("internal/runtime/atomic", "Xadd",
4483 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAdd32, ssa.OpAtomicAdd32Variant, types.TUINT32, atomicEmitterARM64),
4484 sys.ARM64)
4485 addF("internal/runtime/atomic", "Xadd64",
4486 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAdd64, ssa.OpAtomicAdd64Variant, types.TUINT64, atomicEmitterARM64),
4487 sys.ARM64)
4488
4489 addF("internal/runtime/atomic", "Cas",
4490 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4491 v := s.newValue4(ssa.OpAtomicCompareAndSwap32, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4492 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4493 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4494 },
4495 sys.AMD64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4496 addF("internal/runtime/atomic", "Cas64",
4497 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4498 v := s.newValue4(ssa.OpAtomicCompareAndSwap64, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4499 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4500 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4501 },
4502 sys.AMD64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4503 addF("internal/runtime/atomic", "CasRel",
4504 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4505 v := s.newValue4(ssa.OpAtomicCompareAndSwap32, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4506 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4507 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4508 },
4509 sys.PPC64)
4510
4511 atomicCasEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4512 v := s.newValue4(op, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4513 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4514 s.vars[n] = s.newValue1(ssa.OpSelect0, types.Types[typ], v)
4515 }
4516
4517 addF("internal/runtime/atomic", "Cas",
4518 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicCompareAndSwap32, ssa.OpAtomicCompareAndSwap32Variant, types.TBOOL, atomicCasEmitterARM64),
4519 sys.ARM64)
4520 addF("internal/runtime/atomic", "Cas64",
4521 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicCompareAndSwap64, ssa.OpAtomicCompareAndSwap64Variant, types.TBOOL, atomicCasEmitterARM64),
4522 sys.ARM64)
4523
4524 addF("internal/runtime/atomic", "And8",
4525 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4526 s.vars[memVar] = s.newValue3(ssa.OpAtomicAnd8, types.TypeMem, args[0], args[1], s.mem())
4527 return nil
4528 },
4529 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4530 addF("internal/runtime/atomic", "And",
4531 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4532 s.vars[memVar] = s.newValue3(ssa.OpAtomicAnd32, types.TypeMem, args[0], args[1], s.mem())
4533 return nil
4534 },
4535 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4536 addF("internal/runtime/atomic", "Or8",
4537 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4538 s.vars[memVar] = s.newValue3(ssa.OpAtomicOr8, types.TypeMem, args[0], args[1], s.mem())
4539 return nil
4540 },
4541 sys.AMD64, sys.ARM64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4542 addF("internal/runtime/atomic", "Or",
4543 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4544 s.vars[memVar] = s.newValue3(ssa.OpAtomicOr32, types.TypeMem, args[0], args[1], s.mem())
4545 return nil
4546 },
4547 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4548
4549 addF("internal/runtime/atomic", "And8",
4550 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAnd8, ssa.OpAtomicAnd8Variant, types.TUINT8, atomicEmitterARM64),
4551 sys.ARM64)
4552 addF("internal/runtime/atomic", "Or8",
4553 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicOr8, ssa.OpAtomicOr8Variant, types.TUINT8, atomicEmitterARM64),
4554 sys.ARM64)
4555 addF("internal/runtime/atomic", "And64",
4556 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAnd64, ssa.OpAtomicAnd64Variant, types.TUINT64, atomicEmitterARM64),
4557 sys.ARM64)
4558 addF("internal/runtime/atomic", "And32",
4559 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAnd32, ssa.OpAtomicAnd32Variant, types.TUINT32, atomicEmitterARM64),
4560 sys.ARM64)
4561 addF("internal/runtime/atomic", "And",
4562 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAnd32, ssa.OpAtomicAnd32Variant, types.TUINT32, atomicEmitterARM64),
4563 sys.ARM64)
4564 addF("internal/runtime/atomic", "Or64",
4565 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicOr64, ssa.OpAtomicOr64Variant, types.TUINT64, atomicEmitterARM64),
4566 sys.ARM64)
4567 addF("internal/runtime/atomic", "Or32",
4568 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicOr32, ssa.OpAtomicOr32Variant, types.TUINT32, atomicEmitterARM64),
4569 sys.ARM64)
4570 addF("internal/runtime/atomic", "Or",
4571 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicOr32, ssa.OpAtomicOr32Variant, types.TUINT32, atomicEmitterARM64),
4572 sys.ARM64)
4573
4574
4575 alias("internal/runtime/atomic", "Loadint32", "internal/runtime/atomic", "Load", all...)
4576 alias("internal/runtime/atomic", "Loadint64", "internal/runtime/atomic", "Load64", all...)
4577 alias("internal/runtime/atomic", "Loaduintptr", "internal/runtime/atomic", "Load", p4...)
4578 alias("internal/runtime/atomic", "Loaduintptr", "internal/runtime/atomic", "Load64", p8...)
4579 alias("internal/runtime/atomic", "Loaduint", "internal/runtime/atomic", "Load", p4...)
4580 alias("internal/runtime/atomic", "Loaduint", "internal/runtime/atomic", "Load64", p8...)
4581 alias("internal/runtime/atomic", "LoadAcq", "internal/runtime/atomic", "Load", lwatomics...)
4582 alias("internal/runtime/atomic", "LoadAcq64", "internal/runtime/atomic", "Load64", lwatomics...)
4583 alias("internal/runtime/atomic", "LoadAcquintptr", "internal/runtime/atomic", "LoadAcq", p4...)
4584 alias("sync", "runtime_LoadAcquintptr", "internal/runtime/atomic", "LoadAcq", p4...)
4585 alias("internal/runtime/atomic", "LoadAcquintptr", "internal/runtime/atomic", "LoadAcq64", p8...)
4586 alias("sync", "runtime_LoadAcquintptr", "internal/runtime/atomic", "LoadAcq64", p8...)
4587
4588
4589 alias("internal/runtime/atomic", "Storeint32", "internal/runtime/atomic", "Store", all...)
4590 alias("internal/runtime/atomic", "Storeint64", "internal/runtime/atomic", "Store64", all...)
4591 alias("internal/runtime/atomic", "Storeuintptr", "internal/runtime/atomic", "Store", p4...)
4592 alias("internal/runtime/atomic", "Storeuintptr", "internal/runtime/atomic", "Store64", p8...)
4593 alias("internal/runtime/atomic", "StoreRel", "internal/runtime/atomic", "Store", lwatomics...)
4594 alias("internal/runtime/atomic", "StoreRel64", "internal/runtime/atomic", "Store64", lwatomics...)
4595 alias("internal/runtime/atomic", "StoreReluintptr", "internal/runtime/atomic", "StoreRel", p4...)
4596 alias("sync", "runtime_StoreReluintptr", "internal/runtime/atomic", "StoreRel", p4...)
4597 alias("internal/runtime/atomic", "StoreReluintptr", "internal/runtime/atomic", "StoreRel64", p8...)
4598 alias("sync", "runtime_StoreReluintptr", "internal/runtime/atomic", "StoreRel64", p8...)
4599
4600
4601 alias("internal/runtime/atomic", "Xchgint32", "internal/runtime/atomic", "Xchg", all...)
4602 alias("internal/runtime/atomic", "Xchgint64", "internal/runtime/atomic", "Xchg64", all...)
4603 alias("internal/runtime/atomic", "Xchguintptr", "internal/runtime/atomic", "Xchg", p4...)
4604 alias("internal/runtime/atomic", "Xchguintptr", "internal/runtime/atomic", "Xchg64", p8...)
4605
4606
4607 alias("internal/runtime/atomic", "Xaddint32", "internal/runtime/atomic", "Xadd", all...)
4608 alias("internal/runtime/atomic", "Xaddint64", "internal/runtime/atomic", "Xadd64", all...)
4609 alias("internal/runtime/atomic", "Xadduintptr", "internal/runtime/atomic", "Xadd", p4...)
4610 alias("internal/runtime/atomic", "Xadduintptr", "internal/runtime/atomic", "Xadd64", p8...)
4611
4612
4613 alias("internal/runtime/atomic", "Casint32", "internal/runtime/atomic", "Cas", all...)
4614 alias("internal/runtime/atomic", "Casint64", "internal/runtime/atomic", "Cas64", all...)
4615 alias("internal/runtime/atomic", "Casuintptr", "internal/runtime/atomic", "Cas", p4...)
4616 alias("internal/runtime/atomic", "Casuintptr", "internal/runtime/atomic", "Cas64", p8...)
4617 alias("internal/runtime/atomic", "Casp1", "internal/runtime/atomic", "Cas", p4...)
4618 alias("internal/runtime/atomic", "Casp1", "internal/runtime/atomic", "Cas64", p8...)
4619 alias("internal/runtime/atomic", "CasRel", "internal/runtime/atomic", "Cas", lwatomics...)
4620
4621
4622 alias("internal/runtime/atomic", "Anduintptr", "internal/runtime/atomic", "And64", sys.ArchARM64)
4623 alias("internal/runtime/atomic", "Oruintptr", "internal/runtime/atomic", "Or64", sys.ArchARM64)
4624
4625
4626 addF("math", "sqrt",
4627 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4628 return s.newValue1(ssa.OpSqrt, types.Types[types.TFLOAT64], args[0])
4629 },
4630 sys.I386, sys.AMD64, sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm)
4631 addF("math", "Trunc",
4632 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4633 return s.newValue1(ssa.OpTrunc, types.Types[types.TFLOAT64], args[0])
4634 },
4635 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4636 addF("math", "Ceil",
4637 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4638 return s.newValue1(ssa.OpCeil, types.Types[types.TFLOAT64], args[0])
4639 },
4640 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4641 addF("math", "Floor",
4642 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4643 return s.newValue1(ssa.OpFloor, types.Types[types.TFLOAT64], args[0])
4644 },
4645 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4646 addF("math", "Round",
4647 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4648 return s.newValue1(ssa.OpRound, types.Types[types.TFLOAT64], args[0])
4649 },
4650 sys.ARM64, sys.PPC64, sys.S390X)
4651 addF("math", "RoundToEven",
4652 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4653 return s.newValue1(ssa.OpRoundToEven, types.Types[types.TFLOAT64], args[0])
4654 },
4655 sys.ARM64, sys.S390X, sys.Wasm)
4656 addF("math", "Abs",
4657 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4658 return s.newValue1(ssa.OpAbs, types.Types[types.TFLOAT64], args[0])
4659 },
4660 sys.ARM64, sys.ARM, sys.PPC64, sys.RISCV64, sys.Wasm, sys.MIPS, sys.MIPS64)
4661 addF("math", "Copysign",
4662 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4663 return s.newValue2(ssa.OpCopysign, types.Types[types.TFLOAT64], args[0], args[1])
4664 },
4665 sys.PPC64, sys.RISCV64, sys.Wasm)
4666 addF("math", "FMA",
4667 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4668 return s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4669 },
4670 sys.ARM64, sys.PPC64, sys.RISCV64, sys.S390X)
4671 addF("math", "FMA",
4672 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4673 if !s.config.UseFMA {
4674 s.vars[n] = s.callResult(n, callNormal)
4675 return s.variable(n, types.Types[types.TFLOAT64])
4676 }
4677
4678 if buildcfg.GOAMD64 >= 3 {
4679 return s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4680 }
4681
4682 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasFMA)
4683 b := s.endBlock()
4684 b.Kind = ssa.BlockIf
4685 b.SetControl(v)
4686 bTrue := s.f.NewBlock(ssa.BlockPlain)
4687 bFalse := s.f.NewBlock(ssa.BlockPlain)
4688 bEnd := s.f.NewBlock(ssa.BlockPlain)
4689 b.AddEdgeTo(bTrue)
4690 b.AddEdgeTo(bFalse)
4691 b.Likely = ssa.BranchLikely
4692
4693
4694 s.startBlock(bTrue)
4695 s.vars[n] = s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4696 s.endBlock().AddEdgeTo(bEnd)
4697
4698
4699 s.startBlock(bFalse)
4700 s.vars[n] = s.callResult(n, callNormal)
4701 s.endBlock().AddEdgeTo(bEnd)
4702
4703
4704 s.startBlock(bEnd)
4705 return s.variable(n, types.Types[types.TFLOAT64])
4706 },
4707 sys.AMD64)
4708 addF("math", "FMA",
4709 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4710 if !s.config.UseFMA {
4711 s.vars[n] = s.callResult(n, callNormal)
4712 return s.variable(n, types.Types[types.TFLOAT64])
4713 }
4714 addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARMHasVFPv4, s.sb)
4715 v := s.load(types.Types[types.TBOOL], addr)
4716 b := s.endBlock()
4717 b.Kind = ssa.BlockIf
4718 b.SetControl(v)
4719 bTrue := s.f.NewBlock(ssa.BlockPlain)
4720 bFalse := s.f.NewBlock(ssa.BlockPlain)
4721 bEnd := s.f.NewBlock(ssa.BlockPlain)
4722 b.AddEdgeTo(bTrue)
4723 b.AddEdgeTo(bFalse)
4724 b.Likely = ssa.BranchLikely
4725
4726
4727 s.startBlock(bTrue)
4728 s.vars[n] = s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4729 s.endBlock().AddEdgeTo(bEnd)
4730
4731
4732 s.startBlock(bFalse)
4733 s.vars[n] = s.callResult(n, callNormal)
4734 s.endBlock().AddEdgeTo(bEnd)
4735
4736
4737 s.startBlock(bEnd)
4738 return s.variable(n, types.Types[types.TFLOAT64])
4739 },
4740 sys.ARM)
4741
4742 makeRoundAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4743 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4744 if buildcfg.GOAMD64 >= 2 {
4745 return s.newValue1(op, types.Types[types.TFLOAT64], args[0])
4746 }
4747
4748 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasSSE41)
4749 b := s.endBlock()
4750 b.Kind = ssa.BlockIf
4751 b.SetControl(v)
4752 bTrue := s.f.NewBlock(ssa.BlockPlain)
4753 bFalse := s.f.NewBlock(ssa.BlockPlain)
4754 bEnd := s.f.NewBlock(ssa.BlockPlain)
4755 b.AddEdgeTo(bTrue)
4756 b.AddEdgeTo(bFalse)
4757 b.Likely = ssa.BranchLikely
4758
4759
4760 s.startBlock(bTrue)
4761 s.vars[n] = s.newValue1(op, types.Types[types.TFLOAT64], args[0])
4762 s.endBlock().AddEdgeTo(bEnd)
4763
4764
4765 s.startBlock(bFalse)
4766 s.vars[n] = s.callResult(n, callNormal)
4767 s.endBlock().AddEdgeTo(bEnd)
4768
4769
4770 s.startBlock(bEnd)
4771 return s.variable(n, types.Types[types.TFLOAT64])
4772 }
4773 }
4774 addF("math", "RoundToEven",
4775 makeRoundAMD64(ssa.OpRoundToEven),
4776 sys.AMD64)
4777 addF("math", "Floor",
4778 makeRoundAMD64(ssa.OpFloor),
4779 sys.AMD64)
4780 addF("math", "Ceil",
4781 makeRoundAMD64(ssa.OpCeil),
4782 sys.AMD64)
4783 addF("math", "Trunc",
4784 makeRoundAMD64(ssa.OpTrunc),
4785 sys.AMD64)
4786
4787
4788 addF("math/bits", "TrailingZeros64",
4789 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4790 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], args[0])
4791 },
4792 sys.AMD64, sys.I386, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4793 addF("math/bits", "TrailingZeros32",
4794 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4795 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], args[0])
4796 },
4797 sys.AMD64, sys.I386, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4798 addF("math/bits", "TrailingZeros16",
4799 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4800 x := s.newValue1(ssa.OpZeroExt16to32, types.Types[types.TUINT32], args[0])
4801 c := s.constInt32(types.Types[types.TUINT32], 1<<16)
4802 y := s.newValue2(ssa.OpOr32, types.Types[types.TUINT32], x, c)
4803 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], y)
4804 },
4805 sys.MIPS)
4806 addF("math/bits", "TrailingZeros16",
4807 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4808 return s.newValue1(ssa.OpCtz16, types.Types[types.TINT], args[0])
4809 },
4810 sys.AMD64, sys.I386, sys.ARM, sys.ARM64, sys.Wasm)
4811 addF("math/bits", "TrailingZeros16",
4812 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4813 x := s.newValue1(ssa.OpZeroExt16to64, types.Types[types.TUINT64], args[0])
4814 c := s.constInt64(types.Types[types.TUINT64], 1<<16)
4815 y := s.newValue2(ssa.OpOr64, types.Types[types.TUINT64], x, c)
4816 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], y)
4817 },
4818 sys.S390X, sys.PPC64)
4819 addF("math/bits", "TrailingZeros8",
4820 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4821 x := s.newValue1(ssa.OpZeroExt8to32, types.Types[types.TUINT32], args[0])
4822 c := s.constInt32(types.Types[types.TUINT32], 1<<8)
4823 y := s.newValue2(ssa.OpOr32, types.Types[types.TUINT32], x, c)
4824 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], y)
4825 },
4826 sys.MIPS)
4827 addF("math/bits", "TrailingZeros8",
4828 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4829 return s.newValue1(ssa.OpCtz8, types.Types[types.TINT], args[0])
4830 },
4831 sys.AMD64, sys.I386, sys.ARM, sys.ARM64, sys.Wasm)
4832 addF("math/bits", "TrailingZeros8",
4833 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4834 x := s.newValue1(ssa.OpZeroExt8to64, types.Types[types.TUINT64], args[0])
4835 c := s.constInt64(types.Types[types.TUINT64], 1<<8)
4836 y := s.newValue2(ssa.OpOr64, types.Types[types.TUINT64], x, c)
4837 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], y)
4838 },
4839 sys.S390X)
4840 alias("math/bits", "ReverseBytes64", "runtime/internal/sys", "Bswap64", all...)
4841 alias("math/bits", "ReverseBytes32", "runtime/internal/sys", "Bswap32", all...)
4842
4843
4844
4845 if buildcfg.GOPPC64 >= 10 {
4846 addF("math/bits", "ReverseBytes16",
4847 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4848 return s.newValue1(ssa.OpBswap16, types.Types[types.TUINT], args[0])
4849 },
4850 sys.PPC64)
4851 }
4852
4853 addF("math/bits", "Len64",
4854 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4855 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], args[0])
4856 },
4857 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4858 addF("math/bits", "Len32",
4859 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4860 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4861 },
4862 sys.AMD64, sys.ARM64, sys.PPC64)
4863 addF("math/bits", "Len32",
4864 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4865 if s.config.PtrSize == 4 {
4866 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4867 }
4868 x := s.newValue1(ssa.OpZeroExt32to64, types.Types[types.TUINT64], args[0])
4869 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4870 },
4871 sys.ARM, sys.S390X, sys.MIPS, sys.Wasm)
4872 addF("math/bits", "Len16",
4873 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4874 if s.config.PtrSize == 4 {
4875 x := s.newValue1(ssa.OpZeroExt16to32, types.Types[types.TUINT32], args[0])
4876 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], x)
4877 }
4878 x := s.newValue1(ssa.OpZeroExt16to64, types.Types[types.TUINT64], args[0])
4879 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4880 },
4881 sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4882 addF("math/bits", "Len16",
4883 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4884 return s.newValue1(ssa.OpBitLen16, types.Types[types.TINT], args[0])
4885 },
4886 sys.AMD64)
4887 addF("math/bits", "Len8",
4888 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4889 if s.config.PtrSize == 4 {
4890 x := s.newValue1(ssa.OpZeroExt8to32, types.Types[types.TUINT32], args[0])
4891 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], x)
4892 }
4893 x := s.newValue1(ssa.OpZeroExt8to64, types.Types[types.TUINT64], args[0])
4894 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4895 },
4896 sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4897 addF("math/bits", "Len8",
4898 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4899 return s.newValue1(ssa.OpBitLen8, types.Types[types.TINT], args[0])
4900 },
4901 sys.AMD64)
4902 addF("math/bits", "Len",
4903 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4904 if s.config.PtrSize == 4 {
4905 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4906 }
4907 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], args[0])
4908 },
4909 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4910
4911 addF("math/bits", "Reverse64",
4912 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4913 return s.newValue1(ssa.OpBitRev64, types.Types[types.TINT], args[0])
4914 },
4915 sys.ARM64)
4916 addF("math/bits", "Reverse32",
4917 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4918 return s.newValue1(ssa.OpBitRev32, types.Types[types.TINT], args[0])
4919 },
4920 sys.ARM64)
4921 addF("math/bits", "Reverse16",
4922 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4923 return s.newValue1(ssa.OpBitRev16, types.Types[types.TINT], args[0])
4924 },
4925 sys.ARM64)
4926 addF("math/bits", "Reverse8",
4927 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4928 return s.newValue1(ssa.OpBitRev8, types.Types[types.TINT], args[0])
4929 },
4930 sys.ARM64)
4931 addF("math/bits", "Reverse",
4932 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4933 return s.newValue1(ssa.OpBitRev64, types.Types[types.TINT], args[0])
4934 },
4935 sys.ARM64)
4936 addF("math/bits", "RotateLeft8",
4937 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4938 return s.newValue2(ssa.OpRotateLeft8, types.Types[types.TUINT8], args[0], args[1])
4939 },
4940 sys.AMD64, sys.RISCV64)
4941 addF("math/bits", "RotateLeft16",
4942 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4943 return s.newValue2(ssa.OpRotateLeft16, types.Types[types.TUINT16], args[0], args[1])
4944 },
4945 sys.AMD64, sys.RISCV64)
4946 addF("math/bits", "RotateLeft32",
4947 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4948 return s.newValue2(ssa.OpRotateLeft32, types.Types[types.TUINT32], args[0], args[1])
4949 },
4950 sys.AMD64, sys.ARM, sys.ARM64, sys.Loong64, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm)
4951 addF("math/bits", "RotateLeft64",
4952 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4953 return s.newValue2(ssa.OpRotateLeft64, types.Types[types.TUINT64], args[0], args[1])
4954 },
4955 sys.AMD64, sys.ARM64, sys.Loong64, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm)
4956 alias("math/bits", "RotateLeft", "math/bits", "RotateLeft64", p8...)
4957
4958 makeOnesCountAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4959 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4960 if buildcfg.GOAMD64 >= 2 {
4961 return s.newValue1(op, types.Types[types.TINT], args[0])
4962 }
4963
4964 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasPOPCNT)
4965 b := s.endBlock()
4966 b.Kind = ssa.BlockIf
4967 b.SetControl(v)
4968 bTrue := s.f.NewBlock(ssa.BlockPlain)
4969 bFalse := s.f.NewBlock(ssa.BlockPlain)
4970 bEnd := s.f.NewBlock(ssa.BlockPlain)
4971 b.AddEdgeTo(bTrue)
4972 b.AddEdgeTo(bFalse)
4973 b.Likely = ssa.BranchLikely
4974
4975
4976 s.startBlock(bTrue)
4977 s.vars[n] = s.newValue1(op, types.Types[types.TINT], args[0])
4978 s.endBlock().AddEdgeTo(bEnd)
4979
4980
4981 s.startBlock(bFalse)
4982 s.vars[n] = s.callResult(n, callNormal)
4983 s.endBlock().AddEdgeTo(bEnd)
4984
4985
4986 s.startBlock(bEnd)
4987 return s.variable(n, types.Types[types.TINT])
4988 }
4989 }
4990 addF("math/bits", "OnesCount64",
4991 makeOnesCountAMD64(ssa.OpPopCount64),
4992 sys.AMD64)
4993 addF("math/bits", "OnesCount64",
4994 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4995 return s.newValue1(ssa.OpPopCount64, types.Types[types.TINT], args[0])
4996 },
4997 sys.PPC64, sys.ARM64, sys.S390X, sys.Wasm)
4998 addF("math/bits", "OnesCount32",
4999 makeOnesCountAMD64(ssa.OpPopCount32),
5000 sys.AMD64)
5001 addF("math/bits", "OnesCount32",
5002 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
5003 return s.newValue1(ssa.OpPopCount32, types.Types[types.TINT], args[0])
5004 },
5005 sys.PPC64, sys.ARM64, sys.S390X, sys.Wasm)
5006 addF("math/bits", "OnesCount16",
5007 makeOnesCountAMD64(ssa.OpPopCount16),
5008 sys.AMD64)
5009 addF("math/bits", "OnesCount16",
5010 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
5011 return s.newValue1(ssa.OpPopCount16, types.Types[types.TINT], args[0])
5012 },
5013 sys.ARM64, sys.S390X, sys.PPC64, sys.Wasm)
5014 addF("math/bits", "OnesCount8",
5015 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
5016 return s.newValue1(ssa.OpPopCount8, types.Types[types.TINT], args[0])
5017 },
5018 sys.S390X, sys.PPC64, sys.Wasm)
5019 addF("math/bits", "OnesCount",
5020 makeOnesCountAMD64(ssa.OpPopCount64),
5021 sys.AMD64)
5022 addF("math/bits", "Mul64",
5023 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
5024 return s.newValue2(ssa.OpMul64uhilo, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1])
5025 },
5026 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.MIPS64, sys.RISCV64, sys.Loong64)
5027 alias("math/bits", "Mul", "math/bits", "Mul64", p8...)
5028 alias("runtime/internal/math", "Mul64", "math/bits", "Mul64", p8...)
5029 addF("math/bits", "Add64",
5030 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
5031 return s.newValue3(ssa.OpAdd64carry, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
5032 },
5033 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.RISCV64, sys.Loong64, sys.MIPS64)
5034 alias("math/bits", "Add", "math/bits", "Add64", p8...)
5035 alias("runtime/internal/math", "Add64", "math/bits", "Add64", all...)
5036 addF("math/bits", "Sub64",
5037 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
5038 return s.newValue3(ssa.OpSub64borrow, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
5039 },
5040 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.RISCV64, sys.Loong64, sys.MIPS64)
5041 alias("math/bits", "Sub", "math/bits", "Sub64", p8...)
5042 addF("math/bits", "Div64",
5043 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
5044
5045 cmpZero := s.newValue2(s.ssaOp(ir.ONE, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[2], s.zeroVal(types.Types[types.TUINT64]))
5046 s.check(cmpZero, ir.Syms.Panicdivide)
5047 cmpOverflow := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[0], args[2])
5048 s.check(cmpOverflow, ir.Syms.Panicoverflow)
5049 return s.newValue3(ssa.OpDiv128u, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
5050 },
5051 sys.AMD64)
5052 alias("math/bits", "Div", "math/bits", "Div64", sys.ArchAMD64)
5053
5054 alias("runtime/internal/sys", "TrailingZeros8", "math/bits", "TrailingZeros8", all...)
5055 alias("runtime/internal/sys", "TrailingZeros32", "math/bits", "TrailingZeros32", all...)
5056 alias("runtime/internal/sys", "TrailingZeros64", "math/bits", "TrailingZeros64", all...)
5057 alias("runtime/internal/sys", "Len8", "math/bits", "Len8", all...)
5058 alias("runtime/internal/sys", "Len64", "math/bits", "Len64", all...)
5059 alias("runtime/internal/sys", "OnesCount64", "math/bits", "OnesCount64", all...)
5060
5061
5062
5063
5064 alias("sync/atomic", "LoadInt32", "internal/runtime/atomic", "Load", all...)
5065 alias("sync/atomic", "LoadInt64", "internal/runtime/atomic", "Load64", all...)
5066 alias("sync/atomic", "LoadPointer", "internal/runtime/atomic", "Loadp", all...)
5067 alias("sync/atomic", "LoadUint32", "internal/runtime/atomic", "Load", all...)
5068 alias("sync/atomic", "LoadUint64", "internal/runtime/atomic", "Load64", all...)
5069 alias("sync/atomic", "LoadUintptr", "internal/runtime/atomic", "Load", p4...)
5070 alias("sync/atomic", "LoadUintptr", "internal/runtime/atomic", "Load64", p8...)
5071
5072 alias("sync/atomic", "StoreInt32", "internal/runtime/atomic", "Store", all...)
5073 alias("sync/atomic", "StoreInt64", "internal/runtime/atomic", "Store64", all...)
5074
5075 alias("sync/atomic", "StoreUint32", "internal/runtime/atomic", "Store", all...)
5076 alias("sync/atomic", "StoreUint64", "internal/runtime/atomic", "Store64", all...)
5077 alias("sync/atomic", "StoreUintptr", "internal/runtime/atomic", "Store", p4...)
5078 alias("sync/atomic", "StoreUintptr", "internal/runtime/atomic", "Store64", p8...)
5079
5080 alias("sync/atomic", "SwapInt32", "internal/runtime/atomic", "Xchg", all...)
5081 alias("sync/atomic", "SwapInt64", "internal/runtime/atomic", "Xchg64", all...)
5082 alias("sync/atomic", "SwapUint32", "internal/runtime/atomic", "Xchg", all...)
5083 alias("sync/atomic", "SwapUint64", "internal/runtime/atomic", "Xchg64", all...)
5084 alias("sync/atomic", "SwapUintptr", "internal/runtime/atomic", "Xchg", p4...)
5085 alias("sync/atomic", "SwapUintptr", "internal/runtime/atomic", "Xchg64", p8...)
5086
5087 alias("sync/atomic", "CompareAndSwapInt32", "internal/runtime/atomic", "Cas", all...)
5088 alias("sync/atomic", "CompareAndSwapInt64", "internal/runtime/atomic", "Cas64", all...)
5089 alias("sync/atomic", "CompareAndSwapUint32", "internal/runtime/atomic", "Cas", all...)
5090 alias("sync/atomic", "CompareAndSwapUint64", "internal/runtime/atomic", "Cas64", all...)
5091 alias("sync/atomic", "CompareAndSwapUintptr", "internal/runtime/atomic", "Cas", p4...)
5092 alias("sync/atomic", "CompareAndSwapUintptr", "internal/runtime/atomic", "Cas64", p8...)
5093
5094 alias("sync/atomic", "AddInt32", "internal/runtime/atomic", "Xadd", all...)
5095 alias("sync/atomic", "AddInt64", "internal/runtime/atomic", "Xadd64", all...)
5096 alias("sync/atomic", "AddUint32", "internal/runtime/atomic", "Xadd", all...)
5097 alias("sync/atomic", "AddUint64", "internal/runtime/atomic", "Xadd64", all...)
5098 alias("sync/atomic", "AddUintptr", "internal/runtime/atomic", "Xadd", p4...)
5099 alias("sync/atomic", "AddUintptr", "internal/runtime/atomic", "Xadd64", p8...)
5100
5101 alias("sync/atomic", "AndInt32", "internal/runtime/atomic", "And32", sys.ArchARM64)
5102 alias("sync/atomic", "AndUint32", "internal/runtime/atomic", "And32", sys.ArchARM64)
5103 alias("sync/atomic", "AndInt64", "internal/runtime/atomic", "And64", sys.ArchARM64)
5104 alias("sync/atomic", "AndUint64", "internal/runtime/atomic", "And64", sys.ArchARM64)
5105 alias("sync/atomic", "AndUintptr", "internal/runtime/atomic", "And64", sys.ArchARM64)
5106 alias("sync/atomic", "OrInt32", "internal/runtime/atomic", "Or32", sys.ArchARM64)
5107 alias("sync/atomic", "OrUint32", "internal/runtime/atomic", "Or32", sys.ArchARM64)
5108 alias("sync/atomic", "OrInt64", "internal/runtime/atomic", "Or64", sys.ArchARM64)
5109 alias("sync/atomic", "OrUint64", "internal/runtime/atomic", "Or64", sys.ArchARM64)
5110 alias("sync/atomic", "OrUintptr", "internal/runtime/atomic", "Or64", sys.ArchARM64)
5111
5112
5113 alias("math/big", "mulWW", "math/bits", "Mul64", p8...)
5114 }
5115
5116
5117
5118 func findIntrinsic(sym *types.Sym) intrinsicBuilder {
5119 if sym == nil || sym.Pkg == nil {
5120 return nil
5121 }
5122 pkg := sym.Pkg.Path
5123 if sym.Pkg == ir.Pkgs.Runtime {
5124 pkg = "runtime"
5125 }
5126 if base.Flag.Race && pkg == "sync/atomic" {
5127
5128
5129 return nil
5130 }
5131
5132
5133 if Arch.SoftFloat && pkg == "math" {
5134 return nil
5135 }
5136
5137 fn := sym.Name
5138 if ssa.IntrinsicsDisable {
5139 if pkg == "runtime" && (fn == "getcallerpc" || fn == "getcallersp" || fn == "getclosureptr") {
5140
5141 } else {
5142 return nil
5143 }
5144 }
5145 return intrinsics[intrinsicKey{Arch.LinkArch.Arch, pkg, fn}]
5146 }
5147
5148 func IsIntrinsicCall(n *ir.CallExpr) bool {
5149 if n == nil {
5150 return false
5151 }
5152 name, ok := n.Fun.(*ir.Name)
5153 if !ok {
5154 return false
5155 }
5156 return findIntrinsic(name.Sym()) != nil
5157 }
5158
5159
5160 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
5161 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
5162 if ssa.IntrinsicsDebug > 0 {
5163 x := v
5164 if x == nil {
5165 x = s.mem()
5166 }
5167 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
5168 x = x.Args[0]
5169 }
5170 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
5171 }
5172 return v
5173 }
5174
5175
5176 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
5177 args := make([]*ssa.Value, len(n.Args))
5178 for i, n := range n.Args {
5179 args[i] = s.expr(n)
5180 }
5181 return args
5182 }
5183
5184
5185
5186
5187
5188
5189
5190 func (s *state) openDeferRecord(n *ir.CallExpr) {
5191 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
5192 s.Fatalf("defer call with arguments or results: %v", n)
5193 }
5194
5195 opendefer := &openDeferInfo{
5196 n: n,
5197 }
5198 fn := n.Fun
5199
5200
5201
5202 closureVal := s.expr(fn)
5203 closure := s.openDeferSave(fn.Type(), closureVal)
5204 opendefer.closureNode = closure.Aux.(*ir.Name)
5205 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
5206 opendefer.closure = closure
5207 }
5208 index := len(s.openDefers)
5209 s.openDefers = append(s.openDefers, opendefer)
5210
5211
5212
5213 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
5214 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
5215 s.vars[deferBitsVar] = newDeferBits
5216 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
5217 }
5218
5219
5220
5221
5222
5223
5224 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
5225 if !ssa.CanSSA(t) {
5226 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
5227 }
5228 if !t.HasPointers() {
5229 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
5230 }
5231 pos := val.Pos
5232 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
5233 temp.SetOpenDeferSlot(true)
5234 temp.SetFrameOffset(int64(len(s.openDefers)))
5235 var addrTemp *ssa.Value
5236
5237
5238 if s.curBlock.ID != s.f.Entry.ID {
5239
5240
5241
5242 if t.HasPointers() {
5243 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
5244 }
5245 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
5246 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
5247 } else {
5248
5249
5250
5251 if t.HasPointers() {
5252 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
5253 }
5254 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
5255 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
5256 }
5257
5258
5259
5260
5261
5262 temp.SetNeedzero(true)
5263
5264
5265 s.store(t, addrTemp, val)
5266 return addrTemp
5267 }
5268
5269
5270
5271
5272
5273 func (s *state) openDeferExit() {
5274 deferExit := s.f.NewBlock(ssa.BlockPlain)
5275 s.endBlock().AddEdgeTo(deferExit)
5276 s.startBlock(deferExit)
5277 s.lastDeferExit = deferExit
5278 s.lastDeferCount = len(s.openDefers)
5279 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
5280
5281 for i := len(s.openDefers) - 1; i >= 0; i-- {
5282 r := s.openDefers[i]
5283 bCond := s.f.NewBlock(ssa.BlockPlain)
5284 bEnd := s.f.NewBlock(ssa.BlockPlain)
5285
5286 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
5287
5288
5289 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
5290 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
5291 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
5292 b := s.endBlock()
5293 b.Kind = ssa.BlockIf
5294 b.SetControl(eqVal)
5295 b.AddEdgeTo(bEnd)
5296 b.AddEdgeTo(bCond)
5297 bCond.AddEdgeTo(bEnd)
5298 s.startBlock(bCond)
5299
5300
5301
5302 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
5303 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
5304 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
5305
5306
5307 s.vars[deferBitsVar] = maskedval
5308
5309
5310
5311
5312 fn := r.n.Fun
5313 stksize := fn.Type().ArgWidth()
5314 var callArgs []*ssa.Value
5315 var call *ssa.Value
5316 if r.closure != nil {
5317 v := s.load(r.closure.Type.Elem(), r.closure)
5318 s.maybeNilCheckClosure(v, callDefer)
5319 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
5320 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
5321 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
5322 } else {
5323 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
5324 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5325 }
5326 callArgs = append(callArgs, s.mem())
5327 call.AddArgs(callArgs...)
5328 call.AuxInt = stksize
5329 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
5330
5331
5332
5333
5334 if r.closureNode != nil {
5335 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
5336 }
5337
5338 s.endBlock()
5339 s.startBlock(bEnd)
5340 }
5341 }
5342
5343 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
5344 return s.call(n, k, false, nil)
5345 }
5346
5347 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
5348 return s.call(n, k, true, nil)
5349 }
5350
5351
5352
5353 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
5354 s.prevCall = nil
5355 var calleeLSym *obj.LSym
5356 var closure *ssa.Value
5357 var codeptr *ssa.Value
5358 var dextra *ssa.Value
5359 var rcvr *ssa.Value
5360 fn := n.Fun
5361 var ACArgs []*types.Type
5362 var ACResults []*types.Type
5363 var callArgs []*ssa.Value
5364
5365 callABI := s.f.ABIDefault
5366
5367 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
5368 s.Fatalf("go/defer call with arguments: %v", n)
5369 }
5370
5371 switch n.Op() {
5372 case ir.OCALLFUNC:
5373 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
5374 fn := fn.(*ir.Name)
5375 calleeLSym = callTargetLSym(fn)
5376 if buildcfg.Experiment.RegabiArgs {
5377
5378
5379
5380
5381
5382 if fn.Func != nil {
5383 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
5384 }
5385 } else {
5386
5387 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
5388 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
5389 if inRegistersImported || inRegistersSamePackage {
5390 callABI = s.f.ABI1
5391 }
5392 }
5393 break
5394 }
5395 closure = s.expr(fn)
5396 if k != callDefer && k != callDeferStack {
5397
5398
5399 s.maybeNilCheckClosure(closure, k)
5400 }
5401 case ir.OCALLINTER:
5402 if fn.Op() != ir.ODOTINTER {
5403 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
5404 }
5405 fn := fn.(*ir.SelectorExpr)
5406 var iclosure *ssa.Value
5407 iclosure, rcvr = s.getClosureAndRcvr(fn)
5408 if k == callNormal {
5409 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
5410 } else {
5411 closure = iclosure
5412 }
5413 }
5414 if deferExtra != nil {
5415 dextra = s.expr(deferExtra)
5416 }
5417
5418 params := callABI.ABIAnalyze(n.Fun.Type(), false )
5419 types.CalcSize(fn.Type())
5420 stksize := params.ArgWidth()
5421
5422 res := n.Fun.Type().Results()
5423 if k == callNormal || k == callTail {
5424 for _, p := range params.OutParams() {
5425 ACResults = append(ACResults, p.Type)
5426 }
5427 }
5428
5429 var call *ssa.Value
5430 if k == callDeferStack {
5431 if stksize != 0 {
5432 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
5433 }
5434
5435 t := deferstruct()
5436 n, addr := s.temp(n.Pos(), t)
5437 n.SetNonMergeable(true)
5438 s.store(closure.Type,
5439 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
5440 closure)
5441
5442
5443 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5444 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5445 callArgs = append(callArgs, addr, s.mem())
5446 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5447 call.AddArgs(callArgs...)
5448 call.AuxInt = int64(types.PtrSize)
5449 } else {
5450
5451
5452 argStart := base.Ctxt.Arch.FixedFrameSize
5453
5454 if k != callNormal && k != callTail {
5455
5456 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5457 callArgs = append(callArgs, closure)
5458 stksize += int64(types.PtrSize)
5459 argStart += int64(types.PtrSize)
5460 if dextra != nil {
5461
5462 ACArgs = append(ACArgs, types.Types[types.TINTER])
5463 callArgs = append(callArgs, dextra)
5464 stksize += 2 * int64(types.PtrSize)
5465 argStart += 2 * int64(types.PtrSize)
5466 }
5467 }
5468
5469
5470 if rcvr != nil {
5471 callArgs = append(callArgs, rcvr)
5472 }
5473
5474
5475 t := n.Fun.Type()
5476 args := n.Args
5477
5478 for _, p := range params.InParams() {
5479 ACArgs = append(ACArgs, p.Type)
5480 }
5481
5482
5483
5484
5485 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
5486 b := s.endBlock()
5487 b.Kind = ssa.BlockPlain
5488 curb := s.f.NewBlock(ssa.BlockPlain)
5489 b.AddEdgeTo(curb)
5490 s.startBlock(curb)
5491 }
5492
5493 for i, n := range args {
5494 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
5495 }
5496
5497 callArgs = append(callArgs, s.mem())
5498
5499
5500 switch {
5501 case k == callDefer:
5502 sym := ir.Syms.Deferproc
5503 if dextra != nil {
5504 sym = ir.Syms.Deferprocat
5505 }
5506 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5507 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5508 case k == callGo:
5509 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5510 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5511 case closure != nil:
5512
5513
5514
5515
5516
5517 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
5518 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
5519 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
5520 case codeptr != nil:
5521
5522 aux := ssa.InterfaceAuxCall(params)
5523 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
5524 case calleeLSym != nil:
5525 aux := ssa.StaticAuxCall(calleeLSym, params)
5526 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5527 if k == callTail {
5528 call.Op = ssa.OpTailLECall
5529 stksize = 0
5530 }
5531 default:
5532 s.Fatalf("bad call type %v %v", n.Op(), n)
5533 }
5534 call.AddArgs(callArgs...)
5535 call.AuxInt = stksize
5536 }
5537 s.prevCall = call
5538 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
5539
5540 for _, v := range n.KeepAlive {
5541 if !v.Addrtaken() {
5542 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
5543 }
5544 switch v.Class {
5545 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
5546 default:
5547 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
5548 }
5549 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
5550 }
5551
5552
5553 if k == callDefer || k == callDeferStack {
5554 b := s.endBlock()
5555 b.Kind = ssa.BlockDefer
5556 b.SetControl(call)
5557 bNext := s.f.NewBlock(ssa.BlockPlain)
5558 b.AddEdgeTo(bNext)
5559
5560 r := s.f.NewBlock(ssa.BlockPlain)
5561 s.startBlock(r)
5562 s.exit()
5563 b.AddEdgeTo(r)
5564 b.Likely = ssa.BranchLikely
5565 s.startBlock(bNext)
5566 }
5567
5568 if len(res) == 0 || k != callNormal {
5569
5570 return nil
5571 }
5572 fp := res[0]
5573 if returnResultAddr {
5574 return s.resultAddrOfCall(call, 0, fp.Type)
5575 }
5576 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
5577 }
5578
5579
5580
5581 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
5582 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
5583
5584
5585 s.nilCheck(closure)
5586 }
5587 }
5588
5589
5590
5591 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
5592 i := s.expr(fn.X)
5593 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
5594 s.nilCheck(itab)
5595 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
5596 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
5597 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
5598 return closure, rcvr
5599 }
5600
5601
5602
5603 func etypesign(e types.Kind) int8 {
5604 switch e {
5605 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
5606 return -1
5607 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
5608 return +1
5609 }
5610 return 0
5611 }
5612
5613
5614
5615 func (s *state) addr(n ir.Node) *ssa.Value {
5616 if n.Op() != ir.ONAME {
5617 s.pushLine(n.Pos())
5618 defer s.popLine()
5619 }
5620
5621 if s.canSSA(n) {
5622 s.Fatalf("addr of canSSA expression: %+v", n)
5623 }
5624
5625 t := types.NewPtr(n.Type())
5626 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
5627 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
5628
5629 if offset != 0 {
5630 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
5631 }
5632 return v
5633 }
5634 switch n.Op() {
5635 case ir.OLINKSYMOFFSET:
5636 no := n.(*ir.LinksymOffsetExpr)
5637 return linksymOffset(no.Linksym, no.Offset_)
5638 case ir.ONAME:
5639 n := n.(*ir.Name)
5640 if n.Heapaddr != nil {
5641 return s.expr(n.Heapaddr)
5642 }
5643 switch n.Class {
5644 case ir.PEXTERN:
5645
5646 return linksymOffset(n.Linksym(), 0)
5647 case ir.PPARAM:
5648
5649 v := s.decladdrs[n]
5650 if v != nil {
5651 return v
5652 }
5653 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
5654 return nil
5655 case ir.PAUTO:
5656 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
5657
5658 case ir.PPARAMOUT:
5659
5660
5661 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
5662 default:
5663 s.Fatalf("variable address class %v not implemented", n.Class)
5664 return nil
5665 }
5666 case ir.ORESULT:
5667
5668 n := n.(*ir.ResultExpr)
5669 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
5670 case ir.OINDEX:
5671 n := n.(*ir.IndexExpr)
5672 if n.X.Type().IsSlice() {
5673 a := s.expr(n.X)
5674 i := s.expr(n.Index)
5675 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
5676 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5677 p := s.newValue1(ssa.OpSlicePtr, t, a)
5678 return s.newValue2(ssa.OpPtrIndex, t, p, i)
5679 } else {
5680 a := s.addr(n.X)
5681 i := s.expr(n.Index)
5682 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
5683 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5684 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5685 }
5686 case ir.ODEREF:
5687 n := n.(*ir.StarExpr)
5688 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5689 case ir.ODOT:
5690 n := n.(*ir.SelectorExpr)
5691 p := s.addr(n.X)
5692 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5693 case ir.ODOTPTR:
5694 n := n.(*ir.SelectorExpr)
5695 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5696 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5697 case ir.OCONVNOP:
5698 n := n.(*ir.ConvExpr)
5699 if n.Type() == n.X.Type() {
5700 return s.addr(n.X)
5701 }
5702 addr := s.addr(n.X)
5703 return s.newValue1(ssa.OpCopy, t, addr)
5704 case ir.OCALLFUNC, ir.OCALLINTER:
5705 n := n.(*ir.CallExpr)
5706 return s.callAddr(n, callNormal)
5707 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5708 var v *ssa.Value
5709 if n.Op() == ir.ODOTTYPE {
5710 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5711 } else {
5712 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5713 }
5714 if v.Op != ssa.OpLoad {
5715 s.Fatalf("dottype of non-load")
5716 }
5717 if v.Args[1] != s.mem() {
5718 s.Fatalf("memory no longer live from dottype load")
5719 }
5720 return v.Args[0]
5721 default:
5722 s.Fatalf("unhandled addr %v", n.Op())
5723 return nil
5724 }
5725 }
5726
5727
5728
5729 func (s *state) canSSA(n ir.Node) bool {
5730 if base.Flag.N != 0 {
5731 return false
5732 }
5733 for {
5734 nn := n
5735 if nn.Op() == ir.ODOT {
5736 nn := nn.(*ir.SelectorExpr)
5737 n = nn.X
5738 continue
5739 }
5740 if nn.Op() == ir.OINDEX {
5741 nn := nn.(*ir.IndexExpr)
5742 if nn.X.Type().IsArray() {
5743 n = nn.X
5744 continue
5745 }
5746 }
5747 break
5748 }
5749 if n.Op() != ir.ONAME {
5750 return false
5751 }
5752 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5753 }
5754
5755 func (s *state) canSSAName(name *ir.Name) bool {
5756 if name.Addrtaken() || !name.OnStack() {
5757 return false
5758 }
5759 switch name.Class {
5760 case ir.PPARAMOUT:
5761 if s.hasdefer {
5762
5763
5764
5765
5766
5767 return false
5768 }
5769 if s.cgoUnsafeArgs {
5770
5771
5772 return false
5773 }
5774 }
5775 return true
5776
5777 }
5778
5779
5780 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5781 p := s.expr(n)
5782 if bounded || n.NonNil() {
5783 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5784 s.f.Warnl(lineno, "removed nil check")
5785 }
5786 return p
5787 }
5788 p = s.nilCheck(p)
5789 return p
5790 }
5791
5792
5793
5794
5795
5796
5797 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5798 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5799 return ptr
5800 }
5801 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5802 }
5803
5804
5805
5806
5807
5808
5809
5810 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5811 idx = s.extendIndex(idx, len, kind, bounded)
5812
5813 if bounded || base.Flag.B != 0 {
5814
5815
5816
5817
5818
5819
5820
5821
5822
5823
5824
5825
5826
5827
5828
5829
5830
5831
5832
5833
5834 return idx
5835 }
5836
5837 bNext := s.f.NewBlock(ssa.BlockPlain)
5838 bPanic := s.f.NewBlock(ssa.BlockExit)
5839
5840 if !idx.Type.IsSigned() {
5841 switch kind {
5842 case ssa.BoundsIndex:
5843 kind = ssa.BoundsIndexU
5844 case ssa.BoundsSliceAlen:
5845 kind = ssa.BoundsSliceAlenU
5846 case ssa.BoundsSliceAcap:
5847 kind = ssa.BoundsSliceAcapU
5848 case ssa.BoundsSliceB:
5849 kind = ssa.BoundsSliceBU
5850 case ssa.BoundsSlice3Alen:
5851 kind = ssa.BoundsSlice3AlenU
5852 case ssa.BoundsSlice3Acap:
5853 kind = ssa.BoundsSlice3AcapU
5854 case ssa.BoundsSlice3B:
5855 kind = ssa.BoundsSlice3BU
5856 case ssa.BoundsSlice3C:
5857 kind = ssa.BoundsSlice3CU
5858 }
5859 }
5860
5861 var cmp *ssa.Value
5862 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5863 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5864 } else {
5865 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5866 }
5867 b := s.endBlock()
5868 b.Kind = ssa.BlockIf
5869 b.SetControl(cmp)
5870 b.Likely = ssa.BranchLikely
5871 b.AddEdgeTo(bNext)
5872 b.AddEdgeTo(bPanic)
5873
5874 s.startBlock(bPanic)
5875 if Arch.LinkArch.Family == sys.Wasm {
5876
5877
5878 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5879 } else {
5880 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5881 s.endBlock().SetControl(mem)
5882 }
5883 s.startBlock(bNext)
5884
5885
5886 if base.Flag.Cfg.SpectreIndex {
5887 op := ssa.OpSpectreIndex
5888 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5889 op = ssa.OpSpectreSliceIndex
5890 }
5891 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5892 }
5893
5894 return idx
5895 }
5896
5897
5898 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5899 b := s.endBlock()
5900 b.Kind = ssa.BlockIf
5901 b.SetControl(cmp)
5902 b.Likely = ssa.BranchLikely
5903 bNext := s.f.NewBlock(ssa.BlockPlain)
5904 line := s.peekPos()
5905 pos := base.Ctxt.PosTable.Pos(line)
5906 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5907 bPanic := s.panics[fl]
5908 if bPanic == nil {
5909 bPanic = s.f.NewBlock(ssa.BlockPlain)
5910 s.panics[fl] = bPanic
5911 s.startBlock(bPanic)
5912
5913
5914 s.rtcall(fn, false, nil)
5915 }
5916 b.AddEdgeTo(bNext)
5917 b.AddEdgeTo(bPanic)
5918 s.startBlock(bNext)
5919 }
5920
5921 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5922 needcheck := true
5923 switch b.Op {
5924 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5925 if b.AuxInt != 0 {
5926 needcheck = false
5927 }
5928 }
5929 if needcheck {
5930
5931 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5932 s.check(cmp, ir.Syms.Panicdivide)
5933 }
5934 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5935 }
5936
5937
5938
5939
5940
5941 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5942 s.prevCall = nil
5943
5944 off := base.Ctxt.Arch.FixedFrameSize
5945 var callArgs []*ssa.Value
5946 var callArgTypes []*types.Type
5947
5948 for _, arg := range args {
5949 t := arg.Type
5950 off = types.RoundUp(off, t.Alignment())
5951 size := t.Size()
5952 callArgs = append(callArgs, arg)
5953 callArgTypes = append(callArgTypes, t)
5954 off += size
5955 }
5956 off = types.RoundUp(off, int64(types.RegSize))
5957
5958
5959 var call *ssa.Value
5960 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5961 callArgs = append(callArgs, s.mem())
5962 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5963 call.AddArgs(callArgs...)
5964 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5965
5966 if !returns {
5967
5968 b := s.endBlock()
5969 b.Kind = ssa.BlockExit
5970 b.SetControl(call)
5971 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5972 if len(results) > 0 {
5973 s.Fatalf("panic call can't have results")
5974 }
5975 return nil
5976 }
5977
5978
5979 res := make([]*ssa.Value, len(results))
5980 for i, t := range results {
5981 off = types.RoundUp(off, t.Alignment())
5982 res[i] = s.resultOfCall(call, int64(i), t)
5983 off += t.Size()
5984 }
5985 off = types.RoundUp(off, int64(types.PtrSize))
5986
5987
5988 call.AuxInt = off
5989
5990 return res
5991 }
5992
5993
5994 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5995 s.instrument(t, left, instrumentWrite)
5996
5997 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5998
5999 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
6000 return
6001 }
6002
6003
6004
6005
6006
6007
6008 s.storeTypeScalars(t, left, right, skip)
6009 if skip&skipPtr == 0 && t.HasPointers() {
6010 s.storeTypePtrs(t, left, right)
6011 }
6012 }
6013
6014
6015 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
6016 switch {
6017 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
6018 s.store(t, left, right)
6019 case t.IsPtrShaped():
6020 if t.IsPtr() && t.Elem().NotInHeap() {
6021 s.store(t, left, right)
6022 }
6023
6024 case t.IsString():
6025 if skip&skipLen != 0 {
6026 return
6027 }
6028 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
6029 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
6030 s.store(types.Types[types.TINT], lenAddr, len)
6031 case t.IsSlice():
6032 if skip&skipLen == 0 {
6033 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
6034 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
6035 s.store(types.Types[types.TINT], lenAddr, len)
6036 }
6037 if skip&skipCap == 0 {
6038 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
6039 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
6040 s.store(types.Types[types.TINT], capAddr, cap)
6041 }
6042 case t.IsInterface():
6043
6044 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
6045 s.store(types.Types[types.TUINTPTR], left, itab)
6046 case t.IsStruct():
6047 n := t.NumFields()
6048 for i := 0; i < n; i++ {
6049 ft := t.FieldType(i)
6050 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
6051 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
6052 s.storeTypeScalars(ft, addr, val, 0)
6053 }
6054 case t.IsArray() && t.NumElem() == 0:
6055
6056 case t.IsArray() && t.NumElem() == 1:
6057 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
6058 default:
6059 s.Fatalf("bad write barrier type %v", t)
6060 }
6061 }
6062
6063
6064 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
6065 switch {
6066 case t.IsPtrShaped():
6067 if t.IsPtr() && t.Elem().NotInHeap() {
6068 break
6069 }
6070 s.store(t, left, right)
6071 case t.IsString():
6072 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
6073 s.store(s.f.Config.Types.BytePtr, left, ptr)
6074 case t.IsSlice():
6075 elType := types.NewPtr(t.Elem())
6076 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
6077 s.store(elType, left, ptr)
6078 case t.IsInterface():
6079
6080 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
6081 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
6082 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
6083 case t.IsStruct():
6084 n := t.NumFields()
6085 for i := 0; i < n; i++ {
6086 ft := t.FieldType(i)
6087 if !ft.HasPointers() {
6088 continue
6089 }
6090 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
6091 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
6092 s.storeTypePtrs(ft, addr, val)
6093 }
6094 case t.IsArray() && t.NumElem() == 0:
6095
6096 case t.IsArray() && t.NumElem() == 1:
6097 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
6098 default:
6099 s.Fatalf("bad write barrier type %v", t)
6100 }
6101 }
6102
6103
6104 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
6105 var a *ssa.Value
6106 if !ssa.CanSSA(t) {
6107 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
6108 } else {
6109 a = s.expr(n)
6110 }
6111 return a
6112 }
6113
6114 func (s *state) storeArgWithBase(n ir.Node, t *types.Type, base *ssa.Value, off int64) {
6115 pt := types.NewPtr(t)
6116 var addr *ssa.Value
6117 if base == s.sp {
6118
6119 addr = s.constOffPtrSP(pt, off)
6120 } else {
6121 addr = s.newValue1I(ssa.OpOffPtr, pt, off, base)
6122 }
6123
6124 if !ssa.CanSSA(t) {
6125 a := s.addr(n)
6126 s.move(t, addr, a)
6127 return
6128 }
6129
6130 a := s.expr(n)
6131 s.storeType(t, addr, a, 0, false)
6132 }
6133
6134
6135
6136
6137 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
6138 t := v.Type
6139 var ptr, len, cap *ssa.Value
6140 switch {
6141 case t.IsSlice():
6142 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
6143 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
6144 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
6145 case t.IsString():
6146 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
6147 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
6148 cap = len
6149 case t.IsPtr():
6150 if !t.Elem().IsArray() {
6151 s.Fatalf("bad ptr to array in slice %v\n", t)
6152 }
6153 nv := s.nilCheck(v)
6154 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
6155 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
6156 cap = len
6157 default:
6158 s.Fatalf("bad type in slice %v\n", t)
6159 }
6160
6161
6162 if i == nil {
6163 i = s.constInt(types.Types[types.TINT], 0)
6164 }
6165 if j == nil {
6166 j = len
6167 }
6168 three := true
6169 if k == nil {
6170 three = false
6171 k = cap
6172 }
6173
6174
6175
6176
6177 if three {
6178 if k != cap {
6179 kind := ssa.BoundsSlice3Alen
6180 if t.IsSlice() {
6181 kind = ssa.BoundsSlice3Acap
6182 }
6183 k = s.boundsCheck(k, cap, kind, bounded)
6184 }
6185 if j != k {
6186 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
6187 }
6188 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
6189 } else {
6190 if j != k {
6191 kind := ssa.BoundsSliceAlen
6192 if t.IsSlice() {
6193 kind = ssa.BoundsSliceAcap
6194 }
6195 j = s.boundsCheck(j, k, kind, bounded)
6196 }
6197 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
6198 }
6199
6200
6201 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
6202 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
6203 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
6204
6205
6206
6207
6208
6209 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
6210 rcap := rlen
6211 if j != k && !t.IsString() {
6212 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
6213 }
6214
6215 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
6216
6217 return ptr, rlen, rcap
6218 }
6219
6220
6221
6222
6223
6224
6225
6226
6227
6228
6229
6230
6231
6232
6233
6234 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
6235
6236
6237 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
6238
6239
6240
6241 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
6242 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
6243
6244
6245 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
6246
6247 return rptr, rlen, rcap
6248 }
6249
6250 type u642fcvtTab struct {
6251 leq, cvt2F, and, rsh, or, add ssa.Op
6252 one func(*state, *types.Type, int64) *ssa.Value
6253 }
6254
6255 var u64_f64 = u642fcvtTab{
6256 leq: ssa.OpLeq64,
6257 cvt2F: ssa.OpCvt64to64F,
6258 and: ssa.OpAnd64,
6259 rsh: ssa.OpRsh64Ux64,
6260 or: ssa.OpOr64,
6261 add: ssa.OpAdd64F,
6262 one: (*state).constInt64,
6263 }
6264
6265 var u64_f32 = u642fcvtTab{
6266 leq: ssa.OpLeq64,
6267 cvt2F: ssa.OpCvt64to32F,
6268 and: ssa.OpAnd64,
6269 rsh: ssa.OpRsh64Ux64,
6270 or: ssa.OpOr64,
6271 add: ssa.OpAdd32F,
6272 one: (*state).constInt64,
6273 }
6274
6275 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6276 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
6277 }
6278
6279 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6280 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
6281 }
6282
6283 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6284
6285
6286
6287
6288
6289
6290
6291
6292
6293
6294
6295
6296
6297
6298
6299
6300
6301
6302
6303
6304
6305
6306
6307
6308 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
6309 b := s.endBlock()
6310 b.Kind = ssa.BlockIf
6311 b.SetControl(cmp)
6312 b.Likely = ssa.BranchLikely
6313
6314 bThen := s.f.NewBlock(ssa.BlockPlain)
6315 bElse := s.f.NewBlock(ssa.BlockPlain)
6316 bAfter := s.f.NewBlock(ssa.BlockPlain)
6317
6318 b.AddEdgeTo(bThen)
6319 s.startBlock(bThen)
6320 a0 := s.newValue1(cvttab.cvt2F, tt, x)
6321 s.vars[n] = a0
6322 s.endBlock()
6323 bThen.AddEdgeTo(bAfter)
6324
6325 b.AddEdgeTo(bElse)
6326 s.startBlock(bElse)
6327 one := cvttab.one(s, ft, 1)
6328 y := s.newValue2(cvttab.and, ft, x, one)
6329 z := s.newValue2(cvttab.rsh, ft, x, one)
6330 z = s.newValue2(cvttab.or, ft, z, y)
6331 a := s.newValue1(cvttab.cvt2F, tt, z)
6332 a1 := s.newValue2(cvttab.add, tt, a, a)
6333 s.vars[n] = a1
6334 s.endBlock()
6335 bElse.AddEdgeTo(bAfter)
6336
6337 s.startBlock(bAfter)
6338 return s.variable(n, n.Type())
6339 }
6340
6341 type u322fcvtTab struct {
6342 cvtI2F, cvtF2F ssa.Op
6343 }
6344
6345 var u32_f64 = u322fcvtTab{
6346 cvtI2F: ssa.OpCvt32to64F,
6347 cvtF2F: ssa.OpCopy,
6348 }
6349
6350 var u32_f32 = u322fcvtTab{
6351 cvtI2F: ssa.OpCvt32to32F,
6352 cvtF2F: ssa.OpCvt64Fto32F,
6353 }
6354
6355 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6356 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
6357 }
6358
6359 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6360 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
6361 }
6362
6363 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6364
6365
6366
6367
6368
6369 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
6370 b := s.endBlock()
6371 b.Kind = ssa.BlockIf
6372 b.SetControl(cmp)
6373 b.Likely = ssa.BranchLikely
6374
6375 bThen := s.f.NewBlock(ssa.BlockPlain)
6376 bElse := s.f.NewBlock(ssa.BlockPlain)
6377 bAfter := s.f.NewBlock(ssa.BlockPlain)
6378
6379 b.AddEdgeTo(bThen)
6380 s.startBlock(bThen)
6381 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
6382 s.vars[n] = a0
6383 s.endBlock()
6384 bThen.AddEdgeTo(bAfter)
6385
6386 b.AddEdgeTo(bElse)
6387 s.startBlock(bElse)
6388 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
6389 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
6390 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
6391 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
6392
6393 s.vars[n] = a3
6394 s.endBlock()
6395 bElse.AddEdgeTo(bAfter)
6396
6397 s.startBlock(bAfter)
6398 return s.variable(n, n.Type())
6399 }
6400
6401
6402 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
6403 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
6404 s.Fatalf("node must be a map or a channel")
6405 }
6406 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
6407 s.Fatalf("cannot inline len(chan)")
6408 }
6409 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
6410 s.Fatalf("cannot inline cap(chan)")
6411 }
6412
6413
6414
6415
6416
6417
6418
6419
6420 lenType := n.Type()
6421 nilValue := s.constNil(types.Types[types.TUINTPTR])
6422 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
6423 b := s.endBlock()
6424 b.Kind = ssa.BlockIf
6425 b.SetControl(cmp)
6426 b.Likely = ssa.BranchUnlikely
6427
6428 bThen := s.f.NewBlock(ssa.BlockPlain)
6429 bElse := s.f.NewBlock(ssa.BlockPlain)
6430 bAfter := s.f.NewBlock(ssa.BlockPlain)
6431
6432
6433 b.AddEdgeTo(bThen)
6434 s.startBlock(bThen)
6435 s.vars[n] = s.zeroVal(lenType)
6436 s.endBlock()
6437 bThen.AddEdgeTo(bAfter)
6438
6439 b.AddEdgeTo(bElse)
6440 s.startBlock(bElse)
6441 switch n.Op() {
6442 case ir.OLEN:
6443
6444 s.vars[n] = s.load(lenType, x)
6445 case ir.OCAP:
6446
6447 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
6448 s.vars[n] = s.load(lenType, sw)
6449 default:
6450 s.Fatalf("op must be OLEN or OCAP")
6451 }
6452 s.endBlock()
6453 bElse.AddEdgeTo(bAfter)
6454
6455 s.startBlock(bAfter)
6456 return s.variable(n, lenType)
6457 }
6458
6459 type f2uCvtTab struct {
6460 ltf, cvt2U, subf, or ssa.Op
6461 floatValue func(*state, *types.Type, float64) *ssa.Value
6462 intValue func(*state, *types.Type, int64) *ssa.Value
6463 cutoff uint64
6464 }
6465
6466 var f32_u64 = f2uCvtTab{
6467 ltf: ssa.OpLess32F,
6468 cvt2U: ssa.OpCvt32Fto64,
6469 subf: ssa.OpSub32F,
6470 or: ssa.OpOr64,
6471 floatValue: (*state).constFloat32,
6472 intValue: (*state).constInt64,
6473 cutoff: 1 << 63,
6474 }
6475
6476 var f64_u64 = f2uCvtTab{
6477 ltf: ssa.OpLess64F,
6478 cvt2U: ssa.OpCvt64Fto64,
6479 subf: ssa.OpSub64F,
6480 or: ssa.OpOr64,
6481 floatValue: (*state).constFloat64,
6482 intValue: (*state).constInt64,
6483 cutoff: 1 << 63,
6484 }
6485
6486 var f32_u32 = f2uCvtTab{
6487 ltf: ssa.OpLess32F,
6488 cvt2U: ssa.OpCvt32Fto32,
6489 subf: ssa.OpSub32F,
6490 or: ssa.OpOr32,
6491 floatValue: (*state).constFloat32,
6492 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6493 cutoff: 1 << 31,
6494 }
6495
6496 var f64_u32 = f2uCvtTab{
6497 ltf: ssa.OpLess64F,
6498 cvt2U: ssa.OpCvt64Fto32,
6499 subf: ssa.OpSub64F,
6500 or: ssa.OpOr32,
6501 floatValue: (*state).constFloat64,
6502 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6503 cutoff: 1 << 31,
6504 }
6505
6506 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6507 return s.floatToUint(&f32_u64, n, x, ft, tt)
6508 }
6509 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6510 return s.floatToUint(&f64_u64, n, x, ft, tt)
6511 }
6512
6513 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6514 return s.floatToUint(&f32_u32, n, x, ft, tt)
6515 }
6516
6517 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6518 return s.floatToUint(&f64_u32, n, x, ft, tt)
6519 }
6520
6521 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6522
6523
6524
6525
6526
6527
6528
6529
6530 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
6531 cmp := s.newValue2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
6532 b := s.endBlock()
6533 b.Kind = ssa.BlockIf
6534 b.SetControl(cmp)
6535 b.Likely = ssa.BranchLikely
6536
6537 bThen := s.f.NewBlock(ssa.BlockPlain)
6538 bElse := s.f.NewBlock(ssa.BlockPlain)
6539 bAfter := s.f.NewBlock(ssa.BlockPlain)
6540
6541 b.AddEdgeTo(bThen)
6542 s.startBlock(bThen)
6543 a0 := s.newValue1(cvttab.cvt2U, tt, x)
6544 s.vars[n] = a0
6545 s.endBlock()
6546 bThen.AddEdgeTo(bAfter)
6547
6548 b.AddEdgeTo(bElse)
6549 s.startBlock(bElse)
6550 y := s.newValue2(cvttab.subf, ft, x, cutoff)
6551 y = s.newValue1(cvttab.cvt2U, tt, y)
6552 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
6553 a1 := s.newValue2(cvttab.or, tt, y, z)
6554 s.vars[n] = a1
6555 s.endBlock()
6556 bElse.AddEdgeTo(bAfter)
6557
6558 s.startBlock(bAfter)
6559 return s.variable(n, n.Type())
6560 }
6561
6562
6563
6564
6565 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6566 iface := s.expr(n.X)
6567 target := s.reflectType(n.Type())
6568 var targetItab *ssa.Value
6569 if n.ITab != nil {
6570 targetItab = s.expr(n.ITab)
6571 }
6572 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
6573 }
6574
6575 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6576 iface := s.expr(n.X)
6577 var source, target, targetItab *ssa.Value
6578 if n.SrcRType != nil {
6579 source = s.expr(n.SrcRType)
6580 }
6581 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
6582 byteptr := s.f.Config.Types.BytePtr
6583 targetItab = s.expr(n.ITab)
6584
6585
6586 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
6587 } else {
6588 target = s.expr(n.RType)
6589 }
6590 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
6591 }
6592
6593
6594
6595
6596
6597
6598
6599
6600
6601 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
6602 typs := s.f.Config.Types
6603 byteptr := typs.BytePtr
6604 if dst.IsInterface() {
6605 if dst.IsEmptyInterface() {
6606
6607
6608 if base.Debug.TypeAssert > 0 {
6609 base.WarnfAt(pos, "type assertion inlined")
6610 }
6611
6612
6613 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6614
6615 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6616
6617 if src.IsEmptyInterface() && commaok {
6618
6619 return iface, cond
6620 }
6621
6622
6623 b := s.endBlock()
6624 b.Kind = ssa.BlockIf
6625 b.SetControl(cond)
6626 b.Likely = ssa.BranchLikely
6627 bOk := s.f.NewBlock(ssa.BlockPlain)
6628 bFail := s.f.NewBlock(ssa.BlockPlain)
6629 b.AddEdgeTo(bOk)
6630 b.AddEdgeTo(bFail)
6631
6632 if !commaok {
6633
6634 s.startBlock(bFail)
6635 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6636
6637
6638 s.startBlock(bOk)
6639 if src.IsEmptyInterface() {
6640 res = iface
6641 return
6642 }
6643
6644 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6645 typ := s.load(byteptr, off)
6646 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6647 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
6648 return
6649 }
6650
6651 s.startBlock(bOk)
6652
6653
6654 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6655 s.vars[typVar] = s.load(byteptr, off)
6656 s.endBlock()
6657
6658
6659 s.startBlock(bFail)
6660 s.vars[typVar] = itab
6661 s.endBlock()
6662
6663
6664 bEnd := s.f.NewBlock(ssa.BlockPlain)
6665 bOk.AddEdgeTo(bEnd)
6666 bFail.AddEdgeTo(bEnd)
6667 s.startBlock(bEnd)
6668 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6669 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
6670 resok = cond
6671 delete(s.vars, typVar)
6672 return
6673 }
6674
6675 if base.Debug.TypeAssert > 0 {
6676 base.WarnfAt(pos, "type assertion not inlined")
6677 }
6678
6679 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6680 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6681
6682
6683 bNil := s.f.NewBlock(ssa.BlockPlain)
6684 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6685 bMerge := s.f.NewBlock(ssa.BlockPlain)
6686 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6687 b := s.endBlock()
6688 b.Kind = ssa.BlockIf
6689 b.SetControl(cond)
6690 b.Likely = ssa.BranchLikely
6691 b.AddEdgeTo(bNonNil)
6692 b.AddEdgeTo(bNil)
6693
6694 s.startBlock(bNil)
6695 if commaok {
6696 s.vars[typVar] = itab
6697 b := s.endBlock()
6698 b.AddEdgeTo(bMerge)
6699 } else {
6700
6701 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6702 }
6703
6704
6705 s.startBlock(bNonNil)
6706 typ := itab
6707 if !src.IsEmptyInterface() {
6708 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
6709 }
6710
6711
6712 var d *ssa.Value
6713 if descriptor != nil {
6714 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6715 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
6716
6717
6718 if _, ok := intrinsics[intrinsicKey{Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp"}]; !ok {
6719 s.Fatalf("atomic load not available")
6720 }
6721
6722 var mul, and, add, zext ssa.Op
6723 if s.config.PtrSize == 4 {
6724 mul = ssa.OpMul32
6725 and = ssa.OpAnd32
6726 add = ssa.OpAdd32
6727 zext = ssa.OpCopy
6728 } else {
6729 mul = ssa.OpMul64
6730 and = ssa.OpAnd64
6731 add = ssa.OpAdd64
6732 zext = ssa.OpZeroExt32to64
6733 }
6734
6735 loopHead := s.f.NewBlock(ssa.BlockPlain)
6736 loopBody := s.f.NewBlock(ssa.BlockPlain)
6737 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6738 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6739
6740
6741
6742 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6743 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6744 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6745
6746
6747 var hash *ssa.Value
6748 if src.IsEmptyInterface() {
6749 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6750 } else {
6751 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6752 }
6753 hash = s.newValue1(zext, typs.Uintptr, hash)
6754 s.vars[hashVar] = hash
6755
6756 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6757
6758 b := s.endBlock()
6759 b.AddEdgeTo(loopHead)
6760
6761
6762
6763 s.startBlock(loopHead)
6764 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6765 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6766 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6767 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6768
6769 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6770
6771
6772
6773 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6774 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6775 b = s.endBlock()
6776 b.Kind = ssa.BlockIf
6777 b.SetControl(cmp1)
6778 b.AddEdgeTo(cacheHit)
6779 b.AddEdgeTo(loopBody)
6780
6781
6782
6783 s.startBlock(loopBody)
6784 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6785 b = s.endBlock()
6786 b.Kind = ssa.BlockIf
6787 b.SetControl(cmp2)
6788 b.AddEdgeTo(cacheMiss)
6789 b.AddEdgeTo(loopHead)
6790
6791
6792
6793 s.startBlock(cacheHit)
6794 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6795 s.vars[typVar] = eItab
6796 b = s.endBlock()
6797 b.AddEdgeTo(bMerge)
6798
6799
6800 s.startBlock(cacheMiss)
6801 }
6802 }
6803
6804
6805 if descriptor != nil {
6806 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6807 } else {
6808 var fn *obj.LSym
6809 if commaok {
6810 fn = ir.Syms.AssertE2I2
6811 } else {
6812 fn = ir.Syms.AssertE2I
6813 }
6814 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6815 }
6816 s.vars[typVar] = itab
6817 b = s.endBlock()
6818 b.AddEdgeTo(bMerge)
6819
6820
6821 s.startBlock(bMerge)
6822 itab = s.variable(typVar, byteptr)
6823 var ok *ssa.Value
6824 if commaok {
6825 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6826 }
6827 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6828 }
6829
6830 if base.Debug.TypeAssert > 0 {
6831 base.WarnfAt(pos, "type assertion inlined")
6832 }
6833
6834
6835 direct := types.IsDirectIface(dst)
6836 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6837 if base.Debug.TypeAssert > 0 {
6838 base.WarnfAt(pos, "type assertion inlined")
6839 }
6840 var wantedFirstWord *ssa.Value
6841 if src.IsEmptyInterface() {
6842
6843 wantedFirstWord = target
6844 } else {
6845
6846 wantedFirstWord = targetItab
6847 }
6848
6849 var tmp ir.Node
6850 var addr *ssa.Value
6851 if commaok && !ssa.CanSSA(dst) {
6852
6853
6854 tmp, addr = s.temp(pos, dst)
6855 }
6856
6857 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6858 b := s.endBlock()
6859 b.Kind = ssa.BlockIf
6860 b.SetControl(cond)
6861 b.Likely = ssa.BranchLikely
6862
6863 bOk := s.f.NewBlock(ssa.BlockPlain)
6864 bFail := s.f.NewBlock(ssa.BlockPlain)
6865 b.AddEdgeTo(bOk)
6866 b.AddEdgeTo(bFail)
6867
6868 if !commaok {
6869
6870 s.startBlock(bFail)
6871 taddr := source
6872 if taddr == nil {
6873 taddr = s.reflectType(src)
6874 }
6875 if src.IsEmptyInterface() {
6876 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6877 } else {
6878 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6879 }
6880
6881
6882 s.startBlock(bOk)
6883 if direct {
6884 return s.newValue1(ssa.OpIData, dst, iface), nil
6885 }
6886 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6887 return s.load(dst, p), nil
6888 }
6889
6890
6891
6892 bEnd := s.f.NewBlock(ssa.BlockPlain)
6893
6894
6895 valVar := ssaMarker("val")
6896
6897
6898 s.startBlock(bOk)
6899 if tmp == nil {
6900 if direct {
6901 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6902 } else {
6903 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6904 s.vars[valVar] = s.load(dst, p)
6905 }
6906 } else {
6907 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6908 s.move(dst, addr, p)
6909 }
6910 s.vars[okVar] = s.constBool(true)
6911 s.endBlock()
6912 bOk.AddEdgeTo(bEnd)
6913
6914
6915 s.startBlock(bFail)
6916 if tmp == nil {
6917 s.vars[valVar] = s.zeroVal(dst)
6918 } else {
6919 s.zero(dst, addr)
6920 }
6921 s.vars[okVar] = s.constBool(false)
6922 s.endBlock()
6923 bFail.AddEdgeTo(bEnd)
6924
6925
6926 s.startBlock(bEnd)
6927 if tmp == nil {
6928 res = s.variable(valVar, dst)
6929 delete(s.vars, valVar)
6930 } else {
6931 res = s.load(dst, addr)
6932 }
6933 resok = s.variable(okVar, types.Types[types.TBOOL])
6934 delete(s.vars, okVar)
6935 return res, resok
6936 }
6937
6938
6939 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6940 tmp := typecheck.TempAt(pos, s.curfn, t)
6941 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6942 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6943 }
6944 addr := s.addr(tmp)
6945 return tmp, addr
6946 }
6947
6948
6949 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6950 v := s.vars[n]
6951 if v != nil {
6952 return v
6953 }
6954 v = s.fwdVars[n]
6955 if v != nil {
6956 return v
6957 }
6958
6959 if s.curBlock == s.f.Entry {
6960
6961 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6962 }
6963
6964
6965 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6966 s.fwdVars[n] = v
6967 if n.Op() == ir.ONAME {
6968 s.addNamedValue(n.(*ir.Name), v)
6969 }
6970 return v
6971 }
6972
6973 func (s *state) mem() *ssa.Value {
6974 return s.variable(memVar, types.TypeMem)
6975 }
6976
6977 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6978 if n.Class == ir.Pxxx {
6979
6980 return
6981 }
6982 if ir.IsAutoTmp(n) {
6983
6984 return
6985 }
6986 if n.Class == ir.PPARAMOUT {
6987
6988
6989 return
6990 }
6991 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6992 values, ok := s.f.NamedValues[loc]
6993 if !ok {
6994 s.f.Names = append(s.f.Names, &loc)
6995 s.f.CanonicalLocalSlots[loc] = &loc
6996 }
6997 s.f.NamedValues[loc] = append(values, v)
6998 }
6999
7000
7001 type Branch struct {
7002 P *obj.Prog
7003 B *ssa.Block
7004 }
7005
7006
7007 type State struct {
7008 ABI obj.ABI
7009
7010 pp *objw.Progs
7011
7012
7013
7014 Branches []Branch
7015
7016
7017 JumpTables []*ssa.Block
7018
7019
7020 bstart []*obj.Prog
7021
7022 maxarg int64
7023
7024
7025
7026 livenessMap liveness.Map
7027
7028
7029
7030 partLiveArgs map[*ir.Name]bool
7031
7032
7033
7034
7035 lineRunStart *obj.Prog
7036
7037
7038 OnWasmStackSkipped int
7039 }
7040
7041 func (s *State) FuncInfo() *obj.FuncInfo {
7042 return s.pp.CurFunc.LSym.Func()
7043 }
7044
7045
7046 func (s *State) Prog(as obj.As) *obj.Prog {
7047 p := s.pp.Prog(as)
7048 if objw.LosesStmtMark(as) {
7049 return p
7050 }
7051
7052
7053 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
7054 s.lineRunStart = p
7055 } else if p.Pos.IsStmt() == src.PosIsStmt {
7056 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
7057 p.Pos = p.Pos.WithNotStmt()
7058 }
7059 return p
7060 }
7061
7062
7063 func (s *State) Pc() *obj.Prog {
7064 return s.pp.Next
7065 }
7066
7067
7068 func (s *State) SetPos(pos src.XPos) {
7069 s.pp.Pos = pos
7070 }
7071
7072
7073
7074
7075 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
7076 p := s.Prog(op)
7077 p.To.Type = obj.TYPE_BRANCH
7078 s.Branches = append(s.Branches, Branch{P: p, B: target})
7079 return p
7080 }
7081
7082
7083
7084
7085
7086
7087 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
7088 switch v.Op {
7089 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
7090
7091 s.SetPos(v.Pos.WithNotStmt())
7092 default:
7093 p := v.Pos
7094 if p != src.NoXPos {
7095
7096
7097
7098
7099 if p.IsStmt() != src.PosIsStmt {
7100 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
7101
7102
7103
7104
7105
7106
7107
7108
7109
7110
7111
7112
7113
7114 return
7115 }
7116 p = p.WithNotStmt()
7117
7118 }
7119 s.SetPos(p)
7120 } else {
7121 s.SetPos(s.pp.Pos.WithNotStmt())
7122 }
7123 }
7124 }
7125
7126
7127 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
7128 ft := e.curfn.Type()
7129 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
7130 return
7131 }
7132
7133 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
7134 x.Set(obj.AttrContentAddressable, true)
7135 e.curfn.LSym.Func().ArgInfo = x
7136
7137
7138 p := pp.Prog(obj.AFUNCDATA)
7139 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
7140 p.To.Type = obj.TYPE_MEM
7141 p.To.Name = obj.NAME_EXTERN
7142 p.To.Sym = x
7143 }
7144
7145
7146 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
7147 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
7148
7149
7150
7151
7152 PtrSize := int64(types.PtrSize)
7153 uintptrTyp := types.Types[types.TUINTPTR]
7154
7155 isAggregate := func(t *types.Type) bool {
7156 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
7157 }
7158
7159 wOff := 0
7160 n := 0
7161 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
7162
7163
7164 write1 := func(sz, offset int64) {
7165 if offset >= rtabi.TraceArgsSpecial {
7166 writebyte(rtabi.TraceArgsOffsetTooLarge)
7167 } else {
7168 writebyte(uint8(offset))
7169 writebyte(uint8(sz))
7170 }
7171 n++
7172 }
7173
7174
7175
7176 var visitType func(baseOffset int64, t *types.Type, depth int) bool
7177 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
7178 if n >= rtabi.TraceArgsLimit {
7179 writebyte(rtabi.TraceArgsDotdotdot)
7180 return false
7181 }
7182 if !isAggregate(t) {
7183 write1(t.Size(), baseOffset)
7184 return true
7185 }
7186 writebyte(rtabi.TraceArgsStartAgg)
7187 depth++
7188 if depth >= rtabi.TraceArgsMaxDepth {
7189 writebyte(rtabi.TraceArgsDotdotdot)
7190 writebyte(rtabi.TraceArgsEndAgg)
7191 n++
7192 return true
7193 }
7194 switch {
7195 case t.IsInterface(), t.IsString():
7196 _ = visitType(baseOffset, uintptrTyp, depth) &&
7197 visitType(baseOffset+PtrSize, uintptrTyp, depth)
7198 case t.IsSlice():
7199 _ = visitType(baseOffset, uintptrTyp, depth) &&
7200 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
7201 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
7202 case t.IsComplex():
7203 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
7204 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
7205 case t.IsArray():
7206 if t.NumElem() == 0 {
7207 n++
7208 break
7209 }
7210 for i := int64(0); i < t.NumElem(); i++ {
7211 if !visitType(baseOffset, t.Elem(), depth) {
7212 break
7213 }
7214 baseOffset += t.Elem().Size()
7215 }
7216 case t.IsStruct():
7217 if t.NumFields() == 0 {
7218 n++
7219 break
7220 }
7221 for _, field := range t.Fields() {
7222 if !visitType(baseOffset+field.Offset, field.Type, depth) {
7223 break
7224 }
7225 }
7226 }
7227 writebyte(rtabi.TraceArgsEndAgg)
7228 return true
7229 }
7230
7231 start := 0
7232 if strings.Contains(f.LSym.Name, "[") {
7233
7234 start = 1
7235 }
7236
7237 for _, a := range abiInfo.InParams()[start:] {
7238 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
7239 break
7240 }
7241 }
7242 writebyte(rtabi.TraceArgsEndSeq)
7243 if wOff > rtabi.TraceArgsMaxLen {
7244 base.Fatalf("ArgInfo too large")
7245 }
7246
7247 return x
7248 }
7249
7250
7251 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
7252 if base.Ctxt.Flag_linkshared {
7253
7254
7255 return
7256 }
7257
7258 wfn := e.curfn.WrappedFunc
7259 if wfn == nil {
7260 return
7261 }
7262
7263 wsym := wfn.Linksym()
7264 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
7265 objw.SymPtrOff(x, 0, wsym)
7266 x.Set(obj.AttrContentAddressable, true)
7267 })
7268 e.curfn.LSym.Func().WrapInfo = x
7269
7270
7271 p := pp.Prog(obj.AFUNCDATA)
7272 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
7273 p.To.Type = obj.TYPE_MEM
7274 p.To.Name = obj.NAME_EXTERN
7275 p.To.Sym = x
7276 }
7277
7278
7279 func genssa(f *ssa.Func, pp *objw.Progs) {
7280 var s State
7281 s.ABI = f.OwnAux.Fn.ABI()
7282
7283 e := f.Frontend().(*ssafn)
7284
7285 s.livenessMap, s.partLiveArgs = liveness.Compute(e.curfn, f, e.stkptrsize, pp)
7286 emitArgInfo(e, f, pp)
7287 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
7288
7289 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
7290 if openDeferInfo != nil {
7291
7292
7293 p := pp.Prog(obj.AFUNCDATA)
7294 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
7295 p.To.Type = obj.TYPE_MEM
7296 p.To.Name = obj.NAME_EXTERN
7297 p.To.Sym = openDeferInfo
7298 }
7299
7300 emitWrappedFuncInfo(e, pp)
7301
7302
7303 s.bstart = make([]*obj.Prog, f.NumBlocks())
7304 s.pp = pp
7305 var progToValue map[*obj.Prog]*ssa.Value
7306 var progToBlock map[*obj.Prog]*ssa.Block
7307 var valueToProgAfter []*obj.Prog
7308 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
7309 if gatherPrintInfo {
7310 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
7311 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
7312 f.Logf("genssa %s\n", f.Name)
7313 progToBlock[s.pp.Next] = f.Blocks[0]
7314 }
7315
7316 if base.Ctxt.Flag_locationlists {
7317 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
7318 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
7319 }
7320 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
7321 for i := range valueToProgAfter {
7322 valueToProgAfter[i] = nil
7323 }
7324 }
7325
7326
7327
7328 firstPos := src.NoXPos
7329 for _, v := range f.Entry.Values {
7330 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7331 firstPos = v.Pos
7332 v.Pos = firstPos.WithDefaultStmt()
7333 break
7334 }
7335 }
7336
7337
7338
7339
7340 var inlMarks map[*obj.Prog]int32
7341 var inlMarkList []*obj.Prog
7342
7343
7344
7345 var inlMarksByPos map[src.XPos][]*obj.Prog
7346
7347 var argLiveIdx int = -1
7348
7349
7350
7351
7352
7353 var hotAlign, hotRequire int64
7354
7355 if base.Debug.AlignHot > 0 {
7356 switch base.Ctxt.Arch.Name {
7357
7358
7359
7360
7361
7362 case "amd64", "386":
7363
7364
7365
7366 hotAlign = 64
7367 hotRequire = 31
7368 }
7369 }
7370
7371
7372 for i, b := range f.Blocks {
7373
7374 s.lineRunStart = nil
7375 s.SetPos(s.pp.Pos.WithNotStmt())
7376
7377 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
7378
7379
7380
7381
7382
7383 p := s.pp.Prog(obj.APCALIGNMAX)
7384 p.From.SetConst(hotAlign)
7385 p.To.SetConst(hotRequire)
7386 }
7387
7388 s.bstart[b.ID] = s.pp.Next
7389
7390 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
7391 argLiveIdx = idx
7392 p := s.pp.Prog(obj.APCDATA)
7393 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7394 p.To.SetConst(int64(idx))
7395 }
7396
7397
7398 Arch.SSAMarkMoves(&s, b)
7399 for _, v := range b.Values {
7400 x := s.pp.Next
7401 s.DebugFriendlySetPosFrom(v)
7402
7403 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
7404 v.Fatalf("input[0] and output not in same register %s", v.LongString())
7405 }
7406
7407 switch v.Op {
7408 case ssa.OpInitMem:
7409
7410 case ssa.OpArg:
7411
7412 case ssa.OpSP, ssa.OpSB:
7413
7414 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
7415
7416 case ssa.OpGetG:
7417
7418
7419 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
7420
7421 case ssa.OpPhi:
7422 CheckLoweredPhi(v)
7423 case ssa.OpConvert:
7424
7425 if v.Args[0].Reg() != v.Reg() {
7426 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
7427 }
7428 case ssa.OpInlMark:
7429 p := Arch.Ginsnop(s.pp)
7430 if inlMarks == nil {
7431 inlMarks = map[*obj.Prog]int32{}
7432 inlMarksByPos = map[src.XPos][]*obj.Prog{}
7433 }
7434 inlMarks[p] = v.AuxInt32()
7435 inlMarkList = append(inlMarkList, p)
7436 pos := v.Pos.AtColumn1()
7437 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
7438 firstPos = src.NoXPos
7439
7440 default:
7441
7442 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7443 s.SetPos(firstPos)
7444 firstPos = src.NoXPos
7445 }
7446
7447
7448 s.pp.NextLive = s.livenessMap.Get(v)
7449 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
7450
7451
7452 Arch.SSAGenValue(&s, v)
7453 }
7454
7455 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
7456 argLiveIdx = idx
7457 p := s.pp.Prog(obj.APCDATA)
7458 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7459 p.To.SetConst(int64(idx))
7460 }
7461
7462 if base.Ctxt.Flag_locationlists {
7463 valueToProgAfter[v.ID] = s.pp.Next
7464 }
7465
7466 if gatherPrintInfo {
7467 for ; x != s.pp.Next; x = x.Link {
7468 progToValue[x] = v
7469 }
7470 }
7471 }
7472
7473 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
7474 p := Arch.Ginsnop(s.pp)
7475 p.Pos = p.Pos.WithIsStmt()
7476 if b.Pos == src.NoXPos {
7477 b.Pos = p.Pos
7478 if b.Pos == src.NoXPos {
7479 b.Pos = s.pp.Text.Pos
7480 }
7481 }
7482 b.Pos = b.Pos.WithBogusLine()
7483 }
7484
7485
7486
7487
7488
7489 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
7490
7491
7492 var next *ssa.Block
7493 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
7494
7495
7496
7497
7498 next = f.Blocks[i+1]
7499 }
7500 x := s.pp.Next
7501 s.SetPos(b.Pos)
7502 Arch.SSAGenBlock(&s, b, next)
7503 if gatherPrintInfo {
7504 for ; x != s.pp.Next; x = x.Link {
7505 progToBlock[x] = b
7506 }
7507 }
7508 }
7509 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
7510
7511
7512
7513
7514 Arch.Ginsnop(s.pp)
7515 }
7516 if openDeferInfo != nil {
7517
7518
7519
7520 s.pp.NextLive = s.livenessMap.DeferReturn
7521 p := s.pp.Prog(obj.ACALL)
7522 p.To.Type = obj.TYPE_MEM
7523 p.To.Name = obj.NAME_EXTERN
7524 p.To.Sym = ir.Syms.Deferreturn
7525
7526
7527
7528
7529
7530 for _, o := range f.OwnAux.ABIInfo().OutParams() {
7531 n := o.Name
7532 rts, offs := o.RegisterTypesAndOffsets()
7533 for i := range o.Registers {
7534 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
7535 }
7536 }
7537
7538 s.pp.Prog(obj.ARET)
7539 }
7540
7541 if inlMarks != nil {
7542 hasCall := false
7543
7544
7545
7546
7547 for p := s.pp.Text; p != nil; p = p.Link {
7548 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
7549 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
7550
7551
7552
7553
7554
7555 continue
7556 }
7557 if _, ok := inlMarks[p]; ok {
7558
7559
7560 continue
7561 }
7562 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
7563 hasCall = true
7564 }
7565 pos := p.Pos.AtColumn1()
7566 marks := inlMarksByPos[pos]
7567 if len(marks) == 0 {
7568 continue
7569 }
7570 for _, m := range marks {
7571
7572
7573
7574 p.Pos = p.Pos.WithIsStmt()
7575 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
7576
7577 m.As = obj.ANOP
7578 m.Pos = src.NoXPos
7579 m.From = obj.Addr{}
7580 m.To = obj.Addr{}
7581 }
7582 delete(inlMarksByPos, pos)
7583 }
7584
7585 for _, p := range inlMarkList {
7586 if p.As != obj.ANOP {
7587 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
7588 }
7589 }
7590
7591 if e.stksize == 0 && !hasCall {
7592
7593
7594
7595
7596
7597
7598 for p := s.pp.Text; p != nil; p = p.Link {
7599 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
7600 continue
7601 }
7602 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
7603
7604 nop := Arch.Ginsnop(s.pp)
7605 nop.Pos = e.curfn.Pos().WithIsStmt()
7606
7607
7608
7609
7610
7611 for x := s.pp.Text; x != nil; x = x.Link {
7612 if x.Link == nop {
7613 x.Link = nop.Link
7614 break
7615 }
7616 }
7617
7618 for x := s.pp.Text; x != nil; x = x.Link {
7619 if x.Link == p {
7620 nop.Link = p
7621 x.Link = nop
7622 break
7623 }
7624 }
7625 }
7626 break
7627 }
7628 }
7629 }
7630
7631 if base.Ctxt.Flag_locationlists {
7632 var debugInfo *ssa.FuncDebug
7633 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
7634 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
7635 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7636 } else {
7637 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
7638 }
7639 bstart := s.bstart
7640 idToIdx := make([]int, f.NumBlocks())
7641 for i, b := range f.Blocks {
7642 idToIdx[b.ID] = i
7643 }
7644
7645
7646
7647 debugInfo.GetPC = func(b, v ssa.ID) int64 {
7648 switch v {
7649 case ssa.BlockStart.ID:
7650 if b == f.Entry.ID {
7651 return 0
7652
7653 }
7654 return bstart[b].Pc
7655 case ssa.BlockEnd.ID:
7656 blk := f.Blocks[idToIdx[b]]
7657 nv := len(blk.Values)
7658 return valueToProgAfter[blk.Values[nv-1].ID].Pc
7659 case ssa.FuncEnd.ID:
7660 return e.curfn.LSym.Size
7661 default:
7662 return valueToProgAfter[v].Pc
7663 }
7664 }
7665 }
7666
7667
7668 for _, br := range s.Branches {
7669 br.P.To.SetTarget(s.bstart[br.B.ID])
7670 if br.P.Pos.IsStmt() != src.PosIsStmt {
7671 br.P.Pos = br.P.Pos.WithNotStmt()
7672 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7673 br.P.Pos = br.P.Pos.WithNotStmt()
7674 }
7675
7676 }
7677
7678
7679 for _, jt := range s.JumpTables {
7680
7681 targets := make([]*obj.Prog, len(jt.Succs))
7682 for i, e := range jt.Succs {
7683 targets[i] = s.bstart[e.Block().ID]
7684 }
7685
7686
7687
7688 fi := s.pp.CurFunc.LSym.Func()
7689 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7690 }
7691
7692 if e.log {
7693 filename := ""
7694 for p := s.pp.Text; p != nil; p = p.Link {
7695 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7696 filename = p.InnermostFilename()
7697 f.Logf("# %s\n", filename)
7698 }
7699
7700 var s string
7701 if v, ok := progToValue[p]; ok {
7702 s = v.String()
7703 } else if b, ok := progToBlock[p]; ok {
7704 s = b.String()
7705 } else {
7706 s = " "
7707 }
7708 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7709 }
7710 }
7711 if f.HTMLWriter != nil {
7712 var buf strings.Builder
7713 buf.WriteString("<code>")
7714 buf.WriteString("<dl class=\"ssa-gen\">")
7715 filename := ""
7716 for p := s.pp.Text; p != nil; p = p.Link {
7717
7718
7719 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7720 filename = p.InnermostFilename()
7721 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7722 buf.WriteString(html.EscapeString("# " + filename))
7723 buf.WriteString("</dd>")
7724 }
7725
7726 buf.WriteString("<dt class=\"ssa-prog-src\">")
7727 if v, ok := progToValue[p]; ok {
7728 buf.WriteString(v.HTML())
7729 } else if b, ok := progToBlock[p]; ok {
7730 buf.WriteString("<b>" + b.HTML() + "</b>")
7731 }
7732 buf.WriteString("</dt>")
7733 buf.WriteString("<dd class=\"ssa-prog\">")
7734 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7735 buf.WriteString("</dd>")
7736 }
7737 buf.WriteString("</dl>")
7738 buf.WriteString("</code>")
7739 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7740 }
7741 if ssa.GenssaDump[f.Name] {
7742 fi := f.DumpFileForPhase("genssa")
7743 if fi != nil {
7744
7745
7746 inliningDiffers := func(a, b []src.Pos) bool {
7747 if len(a) != len(b) {
7748 return true
7749 }
7750 for i := range a {
7751 if a[i].Filename() != b[i].Filename() {
7752 return true
7753 }
7754 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7755 return true
7756 }
7757 }
7758 return false
7759 }
7760
7761 var allPosOld []src.Pos
7762 var allPos []src.Pos
7763
7764 for p := s.pp.Text; p != nil; p = p.Link {
7765 if p.Pos.IsKnown() {
7766 allPos = allPos[:0]
7767 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7768 if inliningDiffers(allPos, allPosOld) {
7769 for _, pos := range allPos {
7770 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7771 }
7772 allPos, allPosOld = allPosOld, allPos
7773 }
7774 }
7775
7776 var s string
7777 if v, ok := progToValue[p]; ok {
7778 s = v.String()
7779 } else if b, ok := progToBlock[p]; ok {
7780 s = b.String()
7781 } else {
7782 s = " "
7783 }
7784 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7785 }
7786 fi.Close()
7787 }
7788 }
7789
7790 defframe(&s, e, f)
7791
7792 f.HTMLWriter.Close()
7793 f.HTMLWriter = nil
7794 }
7795
7796 func defframe(s *State, e *ssafn, f *ssa.Func) {
7797 pp := s.pp
7798
7799 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7800 frame := s.maxarg + e.stksize
7801 if Arch.PadFrame != nil {
7802 frame = Arch.PadFrame(frame)
7803 }
7804
7805
7806 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7807 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7808 pp.Text.To.Offset = frame
7809
7810 p := pp.Text
7811
7812
7813
7814
7815
7816
7817
7818
7819
7820
7821 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7822
7823
7824 type nameOff struct {
7825 n *ir.Name
7826 off int64
7827 }
7828 partLiveArgsSpilled := make(map[nameOff]bool)
7829 for _, v := range f.Entry.Values {
7830 if v.Op.IsCall() {
7831 break
7832 }
7833 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7834 continue
7835 }
7836 n, off := ssa.AutoVar(v)
7837 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7838 continue
7839 }
7840 partLiveArgsSpilled[nameOff{n, off}] = true
7841 }
7842
7843
7844 for _, a := range f.OwnAux.ABIInfo().InParams() {
7845 n := a.Name
7846 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7847 continue
7848 }
7849 rts, offs := a.RegisterTypesAndOffsets()
7850 for i := range a.Registers {
7851 if !rts[i].HasPointers() {
7852 continue
7853 }
7854 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7855 continue
7856 }
7857 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7858 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7859 }
7860 }
7861 }
7862
7863
7864
7865
7866 var lo, hi int64
7867
7868
7869
7870 var state uint32
7871
7872
7873
7874 for _, n := range e.curfn.Dcl {
7875 if !n.Needzero() {
7876 continue
7877 }
7878 if n.Class != ir.PAUTO {
7879 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7880 }
7881 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7882 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7883 }
7884
7885 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7886
7887 lo = n.FrameOffset()
7888 continue
7889 }
7890
7891
7892 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7893
7894
7895 lo = n.FrameOffset()
7896 hi = lo + n.Type().Size()
7897 }
7898
7899
7900 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7901 }
7902
7903
7904 type IndexJump struct {
7905 Jump obj.As
7906 Index int
7907 }
7908
7909 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7910 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7911 p.Pos = b.Pos
7912 }
7913
7914
7915
7916 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7917 switch next {
7918 case b.Succs[0].Block():
7919 s.oneJump(b, &jumps[0][0])
7920 s.oneJump(b, &jumps[0][1])
7921 case b.Succs[1].Block():
7922 s.oneJump(b, &jumps[1][0])
7923 s.oneJump(b, &jumps[1][1])
7924 default:
7925 var q *obj.Prog
7926 if b.Likely != ssa.BranchUnlikely {
7927 s.oneJump(b, &jumps[1][0])
7928 s.oneJump(b, &jumps[1][1])
7929 q = s.Br(obj.AJMP, b.Succs[1].Block())
7930 } else {
7931 s.oneJump(b, &jumps[0][0])
7932 s.oneJump(b, &jumps[0][1])
7933 q = s.Br(obj.AJMP, b.Succs[0].Block())
7934 }
7935 q.Pos = b.Pos
7936 }
7937 }
7938
7939
7940 func AddAux(a *obj.Addr, v *ssa.Value) {
7941 AddAux2(a, v, v.AuxInt)
7942 }
7943 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7944 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7945 v.Fatalf("bad AddAux addr %v", a)
7946 }
7947
7948 a.Offset += offset
7949
7950
7951 if v.Aux == nil {
7952 return
7953 }
7954
7955 switch n := v.Aux.(type) {
7956 case *ssa.AuxCall:
7957 a.Name = obj.NAME_EXTERN
7958 a.Sym = n.Fn
7959 case *obj.LSym:
7960 a.Name = obj.NAME_EXTERN
7961 a.Sym = n
7962 case *ir.Name:
7963 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7964 a.Name = obj.NAME_PARAM
7965 } else {
7966 a.Name = obj.NAME_AUTO
7967 }
7968 a.Sym = n.Linksym()
7969 a.Offset += n.FrameOffset()
7970 default:
7971 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7972 }
7973 }
7974
7975
7976
7977 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7978 size := idx.Type.Size()
7979 if size == s.config.PtrSize {
7980 return idx
7981 }
7982 if size > s.config.PtrSize {
7983
7984
7985 var lo *ssa.Value
7986 if idx.Type.IsSigned() {
7987 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7988 } else {
7989 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7990 }
7991 if bounded || base.Flag.B != 0 {
7992 return lo
7993 }
7994 bNext := s.f.NewBlock(ssa.BlockPlain)
7995 bPanic := s.f.NewBlock(ssa.BlockExit)
7996 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7997 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7998 if !idx.Type.IsSigned() {
7999 switch kind {
8000 case ssa.BoundsIndex:
8001 kind = ssa.BoundsIndexU
8002 case ssa.BoundsSliceAlen:
8003 kind = ssa.BoundsSliceAlenU
8004 case ssa.BoundsSliceAcap:
8005 kind = ssa.BoundsSliceAcapU
8006 case ssa.BoundsSliceB:
8007 kind = ssa.BoundsSliceBU
8008 case ssa.BoundsSlice3Alen:
8009 kind = ssa.BoundsSlice3AlenU
8010 case ssa.BoundsSlice3Acap:
8011 kind = ssa.BoundsSlice3AcapU
8012 case ssa.BoundsSlice3B:
8013 kind = ssa.BoundsSlice3BU
8014 case ssa.BoundsSlice3C:
8015 kind = ssa.BoundsSlice3CU
8016 }
8017 }
8018 b := s.endBlock()
8019 b.Kind = ssa.BlockIf
8020 b.SetControl(cmp)
8021 b.Likely = ssa.BranchLikely
8022 b.AddEdgeTo(bNext)
8023 b.AddEdgeTo(bPanic)
8024
8025 s.startBlock(bPanic)
8026 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
8027 s.endBlock().SetControl(mem)
8028 s.startBlock(bNext)
8029
8030 return lo
8031 }
8032
8033
8034 var op ssa.Op
8035 if idx.Type.IsSigned() {
8036 switch 10*size + s.config.PtrSize {
8037 case 14:
8038 op = ssa.OpSignExt8to32
8039 case 18:
8040 op = ssa.OpSignExt8to64
8041 case 24:
8042 op = ssa.OpSignExt16to32
8043 case 28:
8044 op = ssa.OpSignExt16to64
8045 case 48:
8046 op = ssa.OpSignExt32to64
8047 default:
8048 s.Fatalf("bad signed index extension %s", idx.Type)
8049 }
8050 } else {
8051 switch 10*size + s.config.PtrSize {
8052 case 14:
8053 op = ssa.OpZeroExt8to32
8054 case 18:
8055 op = ssa.OpZeroExt8to64
8056 case 24:
8057 op = ssa.OpZeroExt16to32
8058 case 28:
8059 op = ssa.OpZeroExt16to64
8060 case 48:
8061 op = ssa.OpZeroExt32to64
8062 default:
8063 s.Fatalf("bad unsigned index extension %s", idx.Type)
8064 }
8065 }
8066 return s.newValue1(op, types.Types[types.TINT], idx)
8067 }
8068
8069
8070
8071 func CheckLoweredPhi(v *ssa.Value) {
8072 if v.Op != ssa.OpPhi {
8073 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
8074 }
8075 if v.Type.IsMemory() {
8076 return
8077 }
8078 f := v.Block.Func
8079 loc := f.RegAlloc[v.ID]
8080 for _, a := range v.Args {
8081 if aloc := f.RegAlloc[a.ID]; aloc != loc {
8082 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
8083 }
8084 }
8085 }
8086
8087
8088
8089
8090
8091 func CheckLoweredGetClosurePtr(v *ssa.Value) {
8092 entry := v.Block.Func.Entry
8093 if entry != v.Block {
8094 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
8095 }
8096 for _, w := range entry.Values {
8097 if w == v {
8098 break
8099 }
8100 switch w.Op {
8101 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
8102
8103 default:
8104 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
8105 }
8106 }
8107 }
8108
8109
8110 func CheckArgReg(v *ssa.Value) {
8111 entry := v.Block.Func.Entry
8112 if entry != v.Block {
8113 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
8114 }
8115 }
8116
8117 func AddrAuto(a *obj.Addr, v *ssa.Value) {
8118 n, off := ssa.AutoVar(v)
8119 a.Type = obj.TYPE_MEM
8120 a.Sym = n.Linksym()
8121 a.Reg = int16(Arch.REGSP)
8122 a.Offset = n.FrameOffset() + off
8123 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
8124 a.Name = obj.NAME_PARAM
8125 } else {
8126 a.Name = obj.NAME_AUTO
8127 }
8128 }
8129
8130
8131
8132 func (s *State) Call(v *ssa.Value) *obj.Prog {
8133 pPosIsStmt := s.pp.Pos.IsStmt()
8134 s.PrepareCall(v)
8135
8136 p := s.Prog(obj.ACALL)
8137 if pPosIsStmt == src.PosIsStmt {
8138 p.Pos = v.Pos.WithIsStmt()
8139 } else {
8140 p.Pos = v.Pos.WithNotStmt()
8141 }
8142 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
8143 p.To.Type = obj.TYPE_MEM
8144 p.To.Name = obj.NAME_EXTERN
8145 p.To.Sym = sym.Fn
8146 } else {
8147
8148 switch Arch.LinkArch.Family {
8149 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
8150 p.To.Type = obj.TYPE_REG
8151 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
8152 p.To.Type = obj.TYPE_MEM
8153 default:
8154 base.Fatalf("unknown indirect call family")
8155 }
8156 p.To.Reg = v.Args[0].Reg()
8157 }
8158 return p
8159 }
8160
8161
8162
8163 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
8164 p := s.Call(v)
8165 p.As = obj.ARET
8166 return p
8167 }
8168
8169
8170
8171
8172 func (s *State) PrepareCall(v *ssa.Value) {
8173 idx := s.livenessMap.Get(v)
8174 if !idx.StackMapValid() {
8175
8176 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
8177 base.Fatalf("missing stack map index for %v", v.LongString())
8178 }
8179 }
8180
8181 call, ok := v.Aux.(*ssa.AuxCall)
8182
8183 if ok {
8184
8185
8186 if nowritebarrierrecCheck != nil {
8187 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
8188 }
8189 }
8190
8191 if s.maxarg < v.AuxInt {
8192 s.maxarg = v.AuxInt
8193 }
8194 }
8195
8196
8197
8198 func (s *State) UseArgs(n int64) {
8199 if s.maxarg < n {
8200 s.maxarg = n
8201 }
8202 }
8203
8204
8205 func fieldIdx(n *ir.SelectorExpr) int {
8206 t := n.X.Type()
8207 if !t.IsStruct() {
8208 panic("ODOT's LHS is not a struct")
8209 }
8210
8211 for i, f := range t.Fields() {
8212 if f.Sym == n.Sel {
8213 if f.Offset != n.Offset() {
8214 panic("field offset doesn't match")
8215 }
8216 return i
8217 }
8218 }
8219 panic(fmt.Sprintf("can't find field in expr %v\n", n))
8220
8221
8222
8223 }
8224
8225
8226
8227 type ssafn struct {
8228 curfn *ir.Func
8229 strings map[string]*obj.LSym
8230 stksize int64
8231 stkptrsize int64
8232
8233
8234
8235
8236
8237 stkalign int64
8238
8239 log bool
8240 }
8241
8242
8243
8244 func (e *ssafn) StringData(s string) *obj.LSym {
8245 if aux, ok := e.strings[s]; ok {
8246 return aux
8247 }
8248 if e.strings == nil {
8249 e.strings = make(map[string]*obj.LSym)
8250 }
8251 data := staticdata.StringSym(e.curfn.Pos(), s)
8252 e.strings[s] = data
8253 return data
8254 }
8255
8256
8257 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
8258 node := parent.N
8259
8260 if node.Class != ir.PAUTO || node.Addrtaken() {
8261
8262 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
8263 }
8264
8265 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
8266 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
8267 n.SetUsed(true)
8268 n.SetEsc(ir.EscNever)
8269 types.CalcSize(t)
8270 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
8271 }
8272
8273
8274 func (e *ssafn) Logf(msg string, args ...interface{}) {
8275 if e.log {
8276 fmt.Printf(msg, args...)
8277 }
8278 }
8279
8280 func (e *ssafn) Log() bool {
8281 return e.log
8282 }
8283
8284
8285 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
8286 base.Pos = pos
8287 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
8288 base.Fatalf("'%s': "+msg, nargs...)
8289 }
8290
8291
8292
8293 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
8294 base.WarnfAt(pos, fmt_, args...)
8295 }
8296
8297 func (e *ssafn) Debug_checknil() bool {
8298 return base.Debug.Nil != 0
8299 }
8300
8301 func (e *ssafn) UseWriteBarrier() bool {
8302 return base.Flag.WB
8303 }
8304
8305 func (e *ssafn) Syslook(name string) *obj.LSym {
8306 switch name {
8307 case "goschedguarded":
8308 return ir.Syms.Goschedguarded
8309 case "writeBarrier":
8310 return ir.Syms.WriteBarrier
8311 case "wbZero":
8312 return ir.Syms.WBZero
8313 case "wbMove":
8314 return ir.Syms.WBMove
8315 case "cgoCheckMemmove":
8316 return ir.Syms.CgoCheckMemmove
8317 case "cgoCheckPtrWrite":
8318 return ir.Syms.CgoCheckPtrWrite
8319 }
8320 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
8321 return nil
8322 }
8323
8324 func (e *ssafn) Func() *ir.Func {
8325 return e.curfn
8326 }
8327
8328 func clobberBase(n ir.Node) ir.Node {
8329 if n.Op() == ir.ODOT {
8330 n := n.(*ir.SelectorExpr)
8331 if n.X.Type().NumFields() == 1 {
8332 return clobberBase(n.X)
8333 }
8334 }
8335 if n.Op() == ir.OINDEX {
8336 n := n.(*ir.IndexExpr)
8337 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
8338 return clobberBase(n.X)
8339 }
8340 }
8341 return n
8342 }
8343
8344
8345 func callTargetLSym(callee *ir.Name) *obj.LSym {
8346 if callee.Func == nil {
8347
8348
8349
8350 return callee.Linksym()
8351 }
8352
8353 return callee.LinksymABI(callee.Func.ABI)
8354 }
8355
8356 func min8(a, b int8) int8 {
8357 if a < b {
8358 return a
8359 }
8360 return b
8361 }
8362
8363 func max8(a, b int8) int8 {
8364 if a > b {
8365 return a
8366 }
8367 return b
8368 }
8369
8370
8371 const deferStructFnField = 4
8372
8373 var deferType *types.Type
8374
8375
8376
8377 func deferstruct() *types.Type {
8378 if deferType != nil {
8379 return deferType
8380 }
8381
8382 makefield := func(name string, t *types.Type) *types.Field {
8383 sym := (*types.Pkg)(nil).Lookup(name)
8384 return types.NewField(src.NoXPos, sym, t)
8385 }
8386
8387 fields := []*types.Field{
8388 makefield("heap", types.Types[types.TBOOL]),
8389 makefield("rangefunc", types.Types[types.TBOOL]),
8390 makefield("sp", types.Types[types.TUINTPTR]),
8391 makefield("pc", types.Types[types.TUINTPTR]),
8392
8393
8394
8395 makefield("fn", types.Types[types.TUINTPTR]),
8396 makefield("link", types.Types[types.TUINTPTR]),
8397 makefield("head", types.Types[types.TUINTPTR]),
8398 }
8399 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
8400 base.Fatalf("deferStructFnField is %q, not fn", name)
8401 }
8402
8403 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
8404 typ := types.NewNamed(n)
8405 n.SetType(typ)
8406 n.SetTypecheck(1)
8407
8408
8409 typ.SetUnderlying(types.NewStruct(fields))
8410 types.CalcStructSize(typ)
8411
8412 deferType = typ
8413 return typ
8414 }
8415
8416
8417
8418
8419
8420 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
8421 return obj.Addr{
8422 Name: obj.NAME_NONE,
8423 Type: obj.TYPE_MEM,
8424 Reg: baseReg,
8425 Offset: spill.Offset + extraOffset,
8426 }
8427 }
8428
8429 var (
8430 BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
8431 ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
8432 )
8433
View as plain text