1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "fmt"
11 "go/constant"
12 "html"
13 "internal/buildcfg"
14 "os"
15 "path/filepath"
16 "sort"
17 "strings"
18
19 "cmd/compile/internal/abi"
20 "cmd/compile/internal/base"
21 "cmd/compile/internal/ir"
22 "cmd/compile/internal/liveness"
23 "cmd/compile/internal/objw"
24 "cmd/compile/internal/reflectdata"
25 "cmd/compile/internal/rttype"
26 "cmd/compile/internal/ssa"
27 "cmd/compile/internal/staticdata"
28 "cmd/compile/internal/typecheck"
29 "cmd/compile/internal/types"
30 "cmd/internal/obj"
31 "cmd/internal/objabi"
32 "cmd/internal/src"
33 "cmd/internal/sys"
34
35 rtabi "internal/abi"
36 )
37
38 var ssaConfig *ssa.Config
39 var ssaCaches []ssa.Cache
40
41 var ssaDump string
42 var ssaDir string
43 var ssaDumpStdout bool
44 var ssaDumpCFG string
45 const ssaDumpFile = "ssa.html"
46
47
48 var ssaDumpInlined []*ir.Func
49
50 func DumpInline(fn *ir.Func) {
51 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
52 ssaDumpInlined = append(ssaDumpInlined, fn)
53 }
54 }
55
56 func InitEnv() {
57 ssaDump = os.Getenv("GOSSAFUNC")
58 ssaDir = os.Getenv("GOSSADIR")
59 if ssaDump != "" {
60 if strings.HasSuffix(ssaDump, "+") {
61 ssaDump = ssaDump[:len(ssaDump)-1]
62 ssaDumpStdout = true
63 }
64 spl := strings.Split(ssaDump, ":")
65 if len(spl) > 1 {
66 ssaDump = spl[0]
67 ssaDumpCFG = spl[1]
68 }
69 }
70 }
71
72 func InitConfig() {
73 types_ := ssa.NewTypes()
74
75 if Arch.SoftFloat {
76 softfloatInit()
77 }
78
79
80
81 _ = types.NewPtr(types.Types[types.TINTER])
82 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
83 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
84 _ = types.NewPtr(types.NewPtr(types.ByteType))
85 _ = types.NewPtr(types.NewSlice(types.ByteType))
86 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
87 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
88 _ = types.NewPtr(types.Types[types.TINT16])
89 _ = types.NewPtr(types.Types[types.TINT64])
90 _ = types.NewPtr(types.ErrorType)
91 _ = types.NewPtr(reflectdata.MapType())
92 _ = types.NewPtr(deferstruct())
93 types.NewPtrCacheEnabled = false
94 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
95 ssaConfig.Race = base.Flag.Race
96 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
97
98
99 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
100 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
101 ir.Syms.AssertI2I = typecheck.LookupRuntimeFunc("assertI2I")
102 ir.Syms.AssertI2I2 = typecheck.LookupRuntimeFunc("assertI2I2")
103 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
104 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
105 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
106 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
107 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
108 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
109 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
110 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
111 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
112 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
113 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
114 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
115 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
116 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
117 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
118 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
119 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
120 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
121 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
122 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
123 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
124 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
125 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
126 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
127 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
128 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
129 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
130 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
131 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
132 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
133 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
134 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
135 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
136 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
137 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
138 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
139 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
140 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
141 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
142 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
143 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
144 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
145 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
146 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
147 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
148 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
149 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
150 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
151 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
152 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
153 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
154 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
155 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
156
157 if Arch.LinkArch.Family == sys.Wasm {
158 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
159 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
160 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
161 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
162 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
163 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
164 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
165 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
166 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
167 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
168 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
169 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
170 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
171 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
172 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
173 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
174 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
175 } else {
176 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
177 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
178 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
179 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
180 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
181 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
182 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
183 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
184 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
185 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
186 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
187 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
188 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
189 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
190 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
191 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
192 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("panicSliceConvert")
193 }
194 if Arch.LinkArch.PtrSize == 4 {
195 ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
196 ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
197 ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
198 ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
199 ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
200 ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
201 ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
202 ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
203 ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
204 ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
205 ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
206 ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
207 ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
208 ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
209 ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
210 ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
211 }
212
213
214 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
215 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
216 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
217 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
218 }
219
220
221
222
223
224
225
226
227 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
228 return ssaConfig.ABI0.Copy()
229 }
230
231
232
233 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
234 if buildcfg.Experiment.RegabiArgs {
235
236 if fn == nil {
237 return abi1
238 }
239 switch fn.ABI {
240 case obj.ABI0:
241 return abi0
242 case obj.ABIInternal:
243
244
245 return abi1
246 }
247 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
248 panic("not reachable")
249 }
250
251 a := abi0
252 if fn != nil {
253 if fn.Pragma&ir.RegisterParams != 0 {
254 a = abi1
255 }
256 }
257 return a
258 }
259
260
261
262
263
264
265
266
267
268
269
270
271 func (s *state) emitOpenDeferInfo() {
272 firstOffset := s.openDefers[0].closureNode.FrameOffset()
273
274
275 for i, r := range s.openDefers {
276 have := r.closureNode.FrameOffset()
277 want := firstOffset + int64(i)*int64(types.PtrSize)
278 if have != want {
279 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
280 }
281 }
282
283 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
284 x.Set(obj.AttrContentAddressable, true)
285 s.curfn.LSym.Func().OpenCodedDeferInfo = x
286
287 off := 0
288 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
289 off = objw.Uvarint(x, off, uint64(-firstOffset))
290 }
291
292
293
294 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
295 name := ir.FuncName(fn)
296
297 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
298
299 printssa := false
300
301
302 if strings.Contains(ssaDump, name) {
303 nameOptABI := name
304 if strings.Contains(ssaDump, ",") {
305 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
306 } else if strings.HasSuffix(ssaDump, ">") {
307 l := len(ssaDump)
308 if l >= 3 && ssaDump[l-3] == '<' {
309 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
310 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
311 }
312 }
313 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
314 printssa = nameOptABI == ssaDump ||
315 pkgDotName == ssaDump ||
316 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
317 }
318
319 var astBuf *bytes.Buffer
320 if printssa {
321 astBuf = &bytes.Buffer{}
322 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
323 if ssaDumpStdout {
324 fmt.Println("generating SSA for", name)
325 fmt.Print(astBuf.String())
326 }
327 }
328
329 var s state
330 s.pushLine(fn.Pos())
331 defer s.popLine()
332
333 s.hasdefer = fn.HasDefer()
334 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
335 s.cgoUnsafeArgs = true
336 }
337 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
338
339 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
340 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
341 s.instrumentMemory = true
342 }
343 if base.Flag.Race {
344 s.instrumentEnterExit = true
345 }
346 }
347
348 fe := ssafn{
349 curfn: fn,
350 log: printssa && ssaDumpStdout,
351 }
352 s.curfn = fn
353
354 cache := &ssaCaches[worker]
355 cache.Reset()
356
357 s.f = ssaConfig.NewFunc(&fe, cache)
358 s.config = ssaConfig
359 s.f.Type = fn.Type()
360 s.f.Name = name
361 s.f.PrintOrHtmlSSA = printssa
362 if fn.Pragma&ir.Nosplit != 0 {
363 s.f.NoSplit = true
364 }
365 s.f.ABI0 = ssaConfig.ABI0
366 s.f.ABI1 = ssaConfig.ABI1
367 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
368 s.f.ABISelf = abiSelf
369
370 s.panics = map[funcLine]*ssa.Block{}
371 s.softFloat = s.config.SoftFloat
372
373
374 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
375 s.f.Entry.Pos = fn.Pos()
376 s.f.IsPgoHot = isPgoHot
377
378 if printssa {
379 ssaDF := ssaDumpFile
380 if ssaDir != "" {
381 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
382 ssaD := filepath.Dir(ssaDF)
383 os.MkdirAll(ssaD, 0755)
384 }
385 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
386
387 dumpSourcesColumn(s.f.HTMLWriter, fn)
388 s.f.HTMLWriter.WriteAST("AST", astBuf)
389 }
390
391
392 s.labels = map[string]*ssaLabel{}
393 s.fwdVars = map[ir.Node]*ssa.Value{}
394 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
395
396 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
397 switch {
398 case base.Debug.NoOpenDefer != 0:
399 s.hasOpenDefers = false
400 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
401
402
403
404 s.hasOpenDefers = false
405 }
406 if s.hasOpenDefers && s.instrumentEnterExit {
407
408
409
410 s.hasOpenDefers = false
411 }
412 if s.hasOpenDefers {
413
414
415 for _, f := range s.curfn.Type().Results() {
416 if !f.Nname.(*ir.Name).OnStack() {
417 s.hasOpenDefers = false
418 break
419 }
420 }
421 }
422 if s.hasOpenDefers &&
423 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
424
425
426
427
428
429 s.hasOpenDefers = false
430 }
431
432 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
433 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
434
435 s.startBlock(s.f.Entry)
436 s.vars[memVar] = s.startmem
437 if s.hasOpenDefers {
438
439
440
441 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
442 deferBitsTemp.SetAddrtaken(true)
443 s.deferBitsTemp = deferBitsTemp
444
445 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
446 s.vars[deferBitsVar] = startDeferBits
447 s.deferBitsAddr = s.addr(deferBitsTemp)
448 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
449
450
451
452
453
454 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
455 }
456
457 var params *abi.ABIParamResultInfo
458 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
459
460
461
462
463
464
465 var debugInfo ssa.FuncDebug
466 for _, n := range fn.Dcl {
467 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
468 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
469 }
470 }
471 fn.DebugInfo = &debugInfo
472
473
474 s.decladdrs = map[*ir.Name]*ssa.Value{}
475 for _, n := range fn.Dcl {
476 switch n.Class {
477 case ir.PPARAM:
478
479 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
480 case ir.PPARAMOUT:
481 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
482 case ir.PAUTO:
483
484
485 default:
486 s.Fatalf("local variable with class %v unimplemented", n.Class)
487 }
488 }
489
490 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
491
492
493 for _, n := range fn.Dcl {
494 if n.Class == ir.PPARAM {
495 if s.canSSA(n) {
496 v := s.newValue0A(ssa.OpArg, n.Type(), n)
497 s.vars[n] = v
498 s.addNamedValue(n, v)
499 } else {
500 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
501 if len(paramAssignment.Registers) > 0 {
502 if ssa.CanSSA(n.Type()) {
503 v := s.newValue0A(ssa.OpArg, n.Type(), n)
504 s.store(n.Type(), s.decladdrs[n], v)
505 } else {
506
507
508 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
509 }
510 }
511 }
512 }
513 }
514
515
516 if fn.Needctxt() {
517 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
518 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
519 for {
520 n, typ, offset := csiter.Next()
521 if n == nil {
522 break
523 }
524
525 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
526
527
528
529
530
531
532
533
534
535 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
536 n.Class = ir.PAUTO
537 fn.Dcl = append(fn.Dcl, n)
538 s.assign(n, s.load(n.Type(), ptr), false, 0)
539 continue
540 }
541
542 if !n.Byval() {
543 ptr = s.load(typ, ptr)
544 }
545 s.setHeapaddr(fn.Pos(), n, ptr)
546 }
547 }
548
549
550 if s.instrumentEnterExit {
551 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
552 }
553 s.zeroResults()
554 s.paramsToHeap()
555 s.stmtList(fn.Body)
556
557
558 if s.curBlock != nil {
559 s.pushLine(fn.Endlineno)
560 s.exit()
561 s.popLine()
562 }
563
564 for _, b := range s.f.Blocks {
565 if b.Pos != src.NoXPos {
566 s.updateUnsetPredPos(b)
567 }
568 }
569
570 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
571
572 s.insertPhis()
573
574
575 ssa.Compile(s.f)
576
577 fe.AllocFrame(s.f)
578
579 if len(s.openDefers) != 0 {
580 s.emitOpenDeferInfo()
581 }
582
583
584
585
586
587
588 for _, p := range params.InParams() {
589 typs, offs := p.RegisterTypesAndOffsets()
590 for i, t := range typs {
591 o := offs[i]
592 fo := p.FrameOffset(params)
593 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
594 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
595 }
596 }
597
598 return s.f
599 }
600
601 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
602 typs, offs := paramAssignment.RegisterTypesAndOffsets()
603 for i, t := range typs {
604 if pointersOnly && !t.IsPtrShaped() {
605 continue
606 }
607 r := paramAssignment.Registers[i]
608 o := offs[i]
609 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
610 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
611 v := s.newValue0I(op, t, reg)
612 v.Aux = aux
613 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
614 s.store(t, p, v)
615 }
616 }
617
618
619
620
621
622
623
624 func (s *state) zeroResults() {
625 for _, f := range s.curfn.Type().Results() {
626 n := f.Nname.(*ir.Name)
627 if !n.OnStack() {
628
629
630
631 continue
632 }
633
634 if typ := n.Type(); ssa.CanSSA(typ) {
635 s.assign(n, s.zeroVal(typ), false, 0)
636 } else {
637 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
638 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
639 }
640 s.zero(n.Type(), s.decladdrs[n])
641 }
642 }
643 }
644
645
646
647 func (s *state) paramsToHeap() {
648 do := func(params []*types.Field) {
649 for _, f := range params {
650 if f.Nname == nil {
651 continue
652 }
653 n := f.Nname.(*ir.Name)
654 if ir.IsBlank(n) || n.OnStack() {
655 continue
656 }
657 s.newHeapaddr(n)
658 if n.Class == ir.PPARAM {
659 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
660 }
661 }
662 }
663
664 typ := s.curfn.Type()
665 do(typ.Recvs())
666 do(typ.Params())
667 do(typ.Results())
668 }
669
670
671 func (s *state) newHeapaddr(n *ir.Name) {
672 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type(), nil))
673 }
674
675
676
677 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
678 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
679 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
680 }
681
682
683 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
684 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
685 addr.SetUsed(true)
686 types.CalcSize(addr.Type())
687
688 if n.Class == ir.PPARAMOUT {
689 addr.SetIsOutputParamHeapAddr(true)
690 }
691
692 n.Heapaddr = addr
693 s.assign(addr, ptr, false, 0)
694 }
695
696
697 func (s *state) newObject(typ *types.Type, rtype *ssa.Value) *ssa.Value {
698 if typ.Size() == 0 {
699 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
700 }
701 if rtype == nil {
702 rtype = s.reflectType(typ)
703 }
704 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
705 }
706
707 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
708 if !n.Type().IsPtr() {
709 s.Fatalf("expected pointer type: %v", n.Type())
710 }
711 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
712 if count != nil {
713 if !elem.IsArray() {
714 s.Fatalf("expected array type: %v", elem)
715 }
716 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
717 }
718 size := elem.Size()
719
720 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
721 return
722 }
723 if count == nil {
724 count = s.constInt(types.Types[types.TUINTPTR], 1)
725 }
726 if count.Type.Size() != s.config.PtrSize {
727 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
728 }
729 var rtype *ssa.Value
730 if rtypeExpr != nil {
731 rtype = s.expr(rtypeExpr)
732 } else {
733 rtype = s.reflectType(elem)
734 }
735 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
736 }
737
738
739
740 func (s *state) reflectType(typ *types.Type) *ssa.Value {
741
742
743 lsym := reflectdata.TypeLinksym(typ)
744 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
745 }
746
747 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
748
749 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
750 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
751 if err != nil {
752 writer.Logf("cannot read sources for function %v: %v", fn, err)
753 }
754
755
756 var inlFns []*ssa.FuncLines
757 for _, fi := range ssaDumpInlined {
758 elno := fi.Endlineno
759 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
760 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
761 if err != nil {
762 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
763 continue
764 }
765 inlFns = append(inlFns, fnLines)
766 }
767
768 sort.Sort(ssa.ByTopo(inlFns))
769 if targetFn != nil {
770 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
771 }
772
773 writer.WriteSources("sources", inlFns)
774 }
775
776 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
777 f, err := os.Open(os.ExpandEnv(file))
778 if err != nil {
779 return nil, err
780 }
781 defer f.Close()
782 var lines []string
783 ln := uint(1)
784 scanner := bufio.NewScanner(f)
785 for scanner.Scan() && ln <= end {
786 if ln >= start {
787 lines = append(lines, scanner.Text())
788 }
789 ln++
790 }
791 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
792 }
793
794
795
796
797 func (s *state) updateUnsetPredPos(b *ssa.Block) {
798 if b.Pos == src.NoXPos {
799 s.Fatalf("Block %s should have a position", b)
800 }
801 bestPos := src.NoXPos
802 for _, e := range b.Preds {
803 p := e.Block()
804 if !p.LackingPos() {
805 continue
806 }
807 if bestPos == src.NoXPos {
808 bestPos = b.Pos
809 for _, v := range b.Values {
810 if v.LackingPos() {
811 continue
812 }
813 if v.Pos != src.NoXPos {
814
815
816 bestPos = v.Pos
817 break
818 }
819 }
820 }
821 p.Pos = bestPos
822 s.updateUnsetPredPos(p)
823 }
824 }
825
826
827 type openDeferInfo struct {
828
829 n *ir.CallExpr
830
831
832 closure *ssa.Value
833
834
835
836 closureNode *ir.Name
837 }
838
839 type state struct {
840
841 config *ssa.Config
842
843
844 f *ssa.Func
845
846
847 curfn *ir.Func
848
849
850 labels map[string]*ssaLabel
851
852
853 breakTo *ssa.Block
854 continueTo *ssa.Block
855
856
857 curBlock *ssa.Block
858
859
860
861
862 vars map[ir.Node]*ssa.Value
863
864
865
866
867 fwdVars map[ir.Node]*ssa.Value
868
869
870 defvars []map[ir.Node]*ssa.Value
871
872
873 decladdrs map[*ir.Name]*ssa.Value
874
875
876 startmem *ssa.Value
877 sp *ssa.Value
878 sb *ssa.Value
879
880 deferBitsAddr *ssa.Value
881 deferBitsTemp *ir.Name
882
883
884 line []src.XPos
885
886 lastPos src.XPos
887
888
889
890 panics map[funcLine]*ssa.Block
891
892 cgoUnsafeArgs bool
893 hasdefer bool
894 softFloat bool
895 hasOpenDefers bool
896 checkPtrEnabled bool
897 instrumentEnterExit bool
898 instrumentMemory bool
899
900
901
902
903 openDefers []*openDeferInfo
904
905
906
907
908 lastDeferExit *ssa.Block
909 lastDeferFinalBlock *ssa.Block
910 lastDeferCount int
911
912 prevCall *ssa.Value
913 }
914
915 type funcLine struct {
916 f *obj.LSym
917 base *src.PosBase
918 line uint
919 }
920
921 type ssaLabel struct {
922 target *ssa.Block
923 breakTarget *ssa.Block
924 continueTarget *ssa.Block
925 }
926
927
928 func (s *state) label(sym *types.Sym) *ssaLabel {
929 lab := s.labels[sym.Name]
930 if lab == nil {
931 lab = new(ssaLabel)
932 s.labels[sym.Name] = lab
933 }
934 return lab
935 }
936
937 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
938 func (s *state) Log() bool { return s.f.Log() }
939 func (s *state) Fatalf(msg string, args ...interface{}) {
940 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
941 }
942 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
943 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
944
945 func ssaMarker(name string) *ir.Name {
946 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
947 }
948
949 var (
950
951 memVar = ssaMarker("mem")
952
953
954 ptrVar = ssaMarker("ptr")
955 lenVar = ssaMarker("len")
956 capVar = ssaMarker("cap")
957 typVar = ssaMarker("typ")
958 okVar = ssaMarker("ok")
959 deferBitsVar = ssaMarker("deferBits")
960 hashVar = ssaMarker("hash")
961 )
962
963
964 func (s *state) startBlock(b *ssa.Block) {
965 if s.curBlock != nil {
966 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
967 }
968 s.curBlock = b
969 s.vars = map[ir.Node]*ssa.Value{}
970 for n := range s.fwdVars {
971 delete(s.fwdVars, n)
972 }
973 }
974
975
976
977
978 func (s *state) endBlock() *ssa.Block {
979 b := s.curBlock
980 if b == nil {
981 return nil
982 }
983 for len(s.defvars) <= int(b.ID) {
984 s.defvars = append(s.defvars, nil)
985 }
986 s.defvars[b.ID] = s.vars
987 s.curBlock = nil
988 s.vars = nil
989 if b.LackingPos() {
990
991
992
993 b.Pos = src.NoXPos
994 } else {
995 b.Pos = s.lastPos
996 }
997 return b
998 }
999
1000
1001 func (s *state) pushLine(line src.XPos) {
1002 if !line.IsKnown() {
1003
1004
1005 line = s.peekPos()
1006 if base.Flag.K != 0 {
1007 base.Warn("buildssa: unknown position (line 0)")
1008 }
1009 } else {
1010 s.lastPos = line
1011 }
1012
1013 s.line = append(s.line, line)
1014 }
1015
1016
1017 func (s *state) popLine() {
1018 s.line = s.line[:len(s.line)-1]
1019 }
1020
1021
1022 func (s *state) peekPos() src.XPos {
1023 return s.line[len(s.line)-1]
1024 }
1025
1026
1027 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1028 return s.curBlock.NewValue0(s.peekPos(), op, t)
1029 }
1030
1031
1032 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1033 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1034 }
1035
1036
1037 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1038 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1039 }
1040
1041
1042 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1043 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1044 }
1045
1046
1047 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1048 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1049 }
1050
1051
1052
1053
1054 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1055 if isStmt {
1056 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1057 }
1058 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1059 }
1060
1061
1062 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1063 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1064 }
1065
1066
1067 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1068 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1069 }
1070
1071
1072 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1073 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1074 }
1075
1076
1077
1078
1079 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1080 if isStmt {
1081 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1082 }
1083 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1084 }
1085
1086
1087 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1088 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1089 }
1090
1091
1092 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1093 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1094 }
1095
1096
1097 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1098 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1099 }
1100
1101
1102 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1103 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1104 }
1105
1106
1107
1108
1109 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1110 if isStmt {
1111 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1112 }
1113 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1114 }
1115
1116
1117 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1118 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1119 }
1120
1121
1122 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1123 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1124 }
1125
1126 func (s *state) entryBlock() *ssa.Block {
1127 b := s.f.Entry
1128 if base.Flag.N > 0 && s.curBlock != nil {
1129
1130
1131
1132
1133 b = s.curBlock
1134 }
1135 return b
1136 }
1137
1138
1139 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1140 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1141 }
1142
1143
1144 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1145 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1146 }
1147
1148
1149 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1150 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1151 }
1152
1153
1154 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1155 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1156 }
1157
1158
1159 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1160 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1161 }
1162
1163
1164 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1165 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1166 }
1167
1168
1169 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1170 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1171 }
1172
1173
1174 func (s *state) constSlice(t *types.Type) *ssa.Value {
1175 return s.f.ConstSlice(t)
1176 }
1177 func (s *state) constInterface(t *types.Type) *ssa.Value {
1178 return s.f.ConstInterface(t)
1179 }
1180 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1181 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1182 return s.f.ConstEmptyString(t)
1183 }
1184 func (s *state) constBool(c bool) *ssa.Value {
1185 return s.f.ConstBool(types.Types[types.TBOOL], c)
1186 }
1187 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1188 return s.f.ConstInt8(t, c)
1189 }
1190 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1191 return s.f.ConstInt16(t, c)
1192 }
1193 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1194 return s.f.ConstInt32(t, c)
1195 }
1196 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1197 return s.f.ConstInt64(t, c)
1198 }
1199 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1200 return s.f.ConstFloat32(t, c)
1201 }
1202 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1203 return s.f.ConstFloat64(t, c)
1204 }
1205 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1206 if s.config.PtrSize == 8 {
1207 return s.constInt64(t, c)
1208 }
1209 if int64(int32(c)) != c {
1210 s.Fatalf("integer constant too big %d", c)
1211 }
1212 return s.constInt32(t, int32(c))
1213 }
1214 func (s *state) constOffPtrSP(t *types.Type, c int64) *ssa.Value {
1215 return s.f.ConstOffPtrSP(t, c, s.sp)
1216 }
1217
1218
1219
1220 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1221 if s.softFloat {
1222 if c, ok := s.sfcall(op, arg); ok {
1223 return c
1224 }
1225 }
1226 return s.newValue1(op, t, arg)
1227 }
1228 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1229 if s.softFloat {
1230 if c, ok := s.sfcall(op, arg0, arg1); ok {
1231 return c
1232 }
1233 }
1234 return s.newValue2(op, t, arg0, arg1)
1235 }
1236
1237 type instrumentKind uint8
1238
1239 const (
1240 instrumentRead = iota
1241 instrumentWrite
1242 instrumentMove
1243 )
1244
1245 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1246 s.instrument2(t, addr, nil, kind)
1247 }
1248
1249
1250
1251
1252 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1253 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1254 s.instrument(t, addr, kind)
1255 return
1256 }
1257 for _, f := range t.Fields() {
1258 if f.Sym.IsBlank() {
1259 continue
1260 }
1261 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1262 s.instrumentFields(f.Type, offptr, kind)
1263 }
1264 }
1265
1266 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1267 if base.Flag.MSan {
1268 s.instrument2(t, dst, src, instrumentMove)
1269 } else {
1270 s.instrument(t, src, instrumentRead)
1271 s.instrument(t, dst, instrumentWrite)
1272 }
1273 }
1274
1275 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1276 if !s.instrumentMemory {
1277 return
1278 }
1279
1280 w := t.Size()
1281 if w == 0 {
1282 return
1283 }
1284
1285 if ssa.IsSanitizerSafeAddr(addr) {
1286 return
1287 }
1288
1289 var fn *obj.LSym
1290 needWidth := false
1291
1292 if addr2 != nil && kind != instrumentMove {
1293 panic("instrument2: non-nil addr2 for non-move instrumentation")
1294 }
1295
1296 if base.Flag.MSan {
1297 switch kind {
1298 case instrumentRead:
1299 fn = ir.Syms.Msanread
1300 case instrumentWrite:
1301 fn = ir.Syms.Msanwrite
1302 case instrumentMove:
1303 fn = ir.Syms.Msanmove
1304 default:
1305 panic("unreachable")
1306 }
1307 needWidth = true
1308 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1309
1310
1311
1312 switch kind {
1313 case instrumentRead:
1314 fn = ir.Syms.Racereadrange
1315 case instrumentWrite:
1316 fn = ir.Syms.Racewriterange
1317 default:
1318 panic("unreachable")
1319 }
1320 needWidth = true
1321 } else if base.Flag.Race {
1322
1323
1324 switch kind {
1325 case instrumentRead:
1326 fn = ir.Syms.Raceread
1327 case instrumentWrite:
1328 fn = ir.Syms.Racewrite
1329 default:
1330 panic("unreachable")
1331 }
1332 } else if base.Flag.ASan {
1333 switch kind {
1334 case instrumentRead:
1335 fn = ir.Syms.Asanread
1336 case instrumentWrite:
1337 fn = ir.Syms.Asanwrite
1338 default:
1339 panic("unreachable")
1340 }
1341 needWidth = true
1342 } else {
1343 panic("unreachable")
1344 }
1345
1346 args := []*ssa.Value{addr}
1347 if addr2 != nil {
1348 args = append(args, addr2)
1349 }
1350 if needWidth {
1351 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1352 }
1353 s.rtcall(fn, true, nil, args...)
1354 }
1355
1356 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1357 s.instrumentFields(t, src, instrumentRead)
1358 return s.rawLoad(t, src)
1359 }
1360
1361 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1362 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1363 }
1364
1365 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1366 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1367 }
1368
1369 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1370 s.instrument(t, dst, instrumentWrite)
1371 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1372 store.Aux = t
1373 s.vars[memVar] = store
1374 }
1375
1376 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1377 s.moveWhichMayOverlap(t, dst, src, false)
1378 }
1379 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1380 s.instrumentMove(t, dst, src)
1381 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405 if t.HasPointers() {
1406 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1407
1408
1409
1410
1411 s.curfn.SetWBPos(s.peekPos())
1412 } else {
1413 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1414 }
1415 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1416 return
1417 }
1418 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1419 store.Aux = t
1420 s.vars[memVar] = store
1421 }
1422
1423
1424 func (s *state) stmtList(l ir.Nodes) {
1425 for _, n := range l {
1426 s.stmt(n)
1427 }
1428 }
1429
1430
1431 func (s *state) stmt(n ir.Node) {
1432 s.pushLine(n.Pos())
1433 defer s.popLine()
1434
1435
1436
1437 if s.curBlock == nil && n.Op() != ir.OLABEL {
1438 return
1439 }
1440
1441 s.stmtList(n.Init())
1442 switch n.Op() {
1443
1444 case ir.OBLOCK:
1445 n := n.(*ir.BlockStmt)
1446 s.stmtList(n.List)
1447
1448 case ir.OFALL:
1449
1450
1451 case ir.OCALLFUNC:
1452 n := n.(*ir.CallExpr)
1453 if ir.IsIntrinsicCall(n) {
1454 s.intrinsicCall(n)
1455 return
1456 }
1457 fallthrough
1458
1459 case ir.OCALLINTER:
1460 n := n.(*ir.CallExpr)
1461 s.callResult(n, callNormal)
1462 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1463 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1464 n.Fun.Sym().Pkg == ir.Pkgs.Runtime && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" || fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr") {
1465 m := s.mem()
1466 b := s.endBlock()
1467 b.Kind = ssa.BlockExit
1468 b.SetControl(m)
1469
1470
1471
1472 }
1473 }
1474 case ir.ODEFER:
1475 n := n.(*ir.GoDeferStmt)
1476 if base.Debug.Defer > 0 {
1477 var defertype string
1478 if s.hasOpenDefers {
1479 defertype = "open-coded"
1480 } else if n.Esc() == ir.EscNever {
1481 defertype = "stack-allocated"
1482 } else {
1483 defertype = "heap-allocated"
1484 }
1485 base.WarnfAt(n.Pos(), "%s defer", defertype)
1486 }
1487 if s.hasOpenDefers {
1488 s.openDeferRecord(n.Call.(*ir.CallExpr))
1489 } else {
1490 d := callDefer
1491 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1492 d = callDeferStack
1493 }
1494 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1495 }
1496 case ir.OGO:
1497 n := n.(*ir.GoDeferStmt)
1498 s.callResult(n.Call.(*ir.CallExpr), callGo)
1499
1500 case ir.OAS2DOTTYPE:
1501 n := n.(*ir.AssignListStmt)
1502 var res, resok *ssa.Value
1503 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1504 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1505 } else {
1506 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1507 }
1508 deref := false
1509 if !ssa.CanSSA(n.Rhs[0].Type()) {
1510 if res.Op != ssa.OpLoad {
1511 s.Fatalf("dottype of non-load")
1512 }
1513 mem := s.mem()
1514 if res.Args[1] != mem {
1515 s.Fatalf("memory no longer live from 2-result dottype load")
1516 }
1517 deref = true
1518 res = res.Args[0]
1519 }
1520 s.assign(n.Lhs[0], res, deref, 0)
1521 s.assign(n.Lhs[1], resok, false, 0)
1522 return
1523
1524 case ir.OAS2FUNC:
1525
1526 n := n.(*ir.AssignListStmt)
1527 call := n.Rhs[0].(*ir.CallExpr)
1528 if !ir.IsIntrinsicCall(call) {
1529 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1530 }
1531 v := s.intrinsicCall(call)
1532 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1533 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1534 s.assign(n.Lhs[0], v1, false, 0)
1535 s.assign(n.Lhs[1], v2, false, 0)
1536 return
1537
1538 case ir.ODCL:
1539 n := n.(*ir.Decl)
1540 if v := n.X; v.Esc() == ir.EscHeap {
1541 s.newHeapaddr(v)
1542 }
1543
1544 case ir.OLABEL:
1545 n := n.(*ir.LabelStmt)
1546 sym := n.Label
1547 if sym.IsBlank() {
1548
1549 break
1550 }
1551 lab := s.label(sym)
1552
1553
1554 if lab.target == nil {
1555 lab.target = s.f.NewBlock(ssa.BlockPlain)
1556 }
1557
1558
1559
1560 if s.curBlock != nil {
1561 b := s.endBlock()
1562 b.AddEdgeTo(lab.target)
1563 }
1564 s.startBlock(lab.target)
1565
1566 case ir.OGOTO:
1567 n := n.(*ir.BranchStmt)
1568 sym := n.Label
1569
1570 lab := s.label(sym)
1571 if lab.target == nil {
1572 lab.target = s.f.NewBlock(ssa.BlockPlain)
1573 }
1574
1575 b := s.endBlock()
1576 b.Pos = s.lastPos.WithIsStmt()
1577 b.AddEdgeTo(lab.target)
1578
1579 case ir.OAS:
1580 n := n.(*ir.AssignStmt)
1581 if n.X == n.Y && n.X.Op() == ir.ONAME {
1582
1583
1584
1585
1586
1587
1588
1589 return
1590 }
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1602 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1603 p := n.Y.(*ir.StarExpr).X
1604 for p.Op() == ir.OCONVNOP {
1605 p = p.(*ir.ConvExpr).X
1606 }
1607 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1608
1609
1610 mayOverlap = false
1611 }
1612 }
1613
1614
1615 rhs := n.Y
1616 if rhs != nil {
1617 switch rhs.Op() {
1618 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1619
1620
1621
1622 if !ir.IsZero(rhs) {
1623 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1624 }
1625 rhs = nil
1626 case ir.OAPPEND:
1627 rhs := rhs.(*ir.CallExpr)
1628
1629
1630
1631 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1632 break
1633 }
1634
1635
1636
1637 if s.canSSA(n.X) {
1638 if base.Debug.Append > 0 {
1639 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1640 }
1641 break
1642 }
1643 if base.Debug.Append > 0 {
1644 base.WarnfAt(n.Pos(), "append: len-only update")
1645 }
1646 s.append(rhs, true)
1647 return
1648 }
1649 }
1650
1651 if ir.IsBlank(n.X) {
1652
1653
1654 if rhs != nil {
1655 s.expr(rhs)
1656 }
1657 return
1658 }
1659
1660 var t *types.Type
1661 if n.Y != nil {
1662 t = n.Y.Type()
1663 } else {
1664 t = n.X.Type()
1665 }
1666
1667 var r *ssa.Value
1668 deref := !ssa.CanSSA(t)
1669 if deref {
1670 if rhs == nil {
1671 r = nil
1672 } else {
1673 r = s.addr(rhs)
1674 }
1675 } else {
1676 if rhs == nil {
1677 r = s.zeroVal(t)
1678 } else {
1679 r = s.expr(rhs)
1680 }
1681 }
1682
1683 var skip skipMask
1684 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1685
1686
1687 rhs := rhs.(*ir.SliceExpr)
1688 i, j, k := rhs.Low, rhs.High, rhs.Max
1689 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1690
1691 i = nil
1692 }
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703 if i == nil {
1704 skip |= skipPtr
1705 if j == nil {
1706 skip |= skipLen
1707 }
1708 if k == nil {
1709 skip |= skipCap
1710 }
1711 }
1712 }
1713
1714 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1715
1716 case ir.OIF:
1717 n := n.(*ir.IfStmt)
1718 if ir.IsConst(n.Cond, constant.Bool) {
1719 s.stmtList(n.Cond.Init())
1720 if ir.BoolVal(n.Cond) {
1721 s.stmtList(n.Body)
1722 } else {
1723 s.stmtList(n.Else)
1724 }
1725 break
1726 }
1727
1728 bEnd := s.f.NewBlock(ssa.BlockPlain)
1729 var likely int8
1730 if n.Likely {
1731 likely = 1
1732 }
1733 var bThen *ssa.Block
1734 if len(n.Body) != 0 {
1735 bThen = s.f.NewBlock(ssa.BlockPlain)
1736 } else {
1737 bThen = bEnd
1738 }
1739 var bElse *ssa.Block
1740 if len(n.Else) != 0 {
1741 bElse = s.f.NewBlock(ssa.BlockPlain)
1742 } else {
1743 bElse = bEnd
1744 }
1745 s.condBranch(n.Cond, bThen, bElse, likely)
1746
1747 if len(n.Body) != 0 {
1748 s.startBlock(bThen)
1749 s.stmtList(n.Body)
1750 if b := s.endBlock(); b != nil {
1751 b.AddEdgeTo(bEnd)
1752 }
1753 }
1754 if len(n.Else) != 0 {
1755 s.startBlock(bElse)
1756 s.stmtList(n.Else)
1757 if b := s.endBlock(); b != nil {
1758 b.AddEdgeTo(bEnd)
1759 }
1760 }
1761 s.startBlock(bEnd)
1762
1763 case ir.ORETURN:
1764 n := n.(*ir.ReturnStmt)
1765 s.stmtList(n.Results)
1766 b := s.exit()
1767 b.Pos = s.lastPos.WithIsStmt()
1768
1769 case ir.OTAILCALL:
1770 n := n.(*ir.TailCallStmt)
1771 s.callResult(n.Call, callTail)
1772 call := s.mem()
1773 b := s.endBlock()
1774 b.Kind = ssa.BlockRetJmp
1775 b.SetControl(call)
1776
1777 case ir.OCONTINUE, ir.OBREAK:
1778 n := n.(*ir.BranchStmt)
1779 var to *ssa.Block
1780 if n.Label == nil {
1781
1782 switch n.Op() {
1783 case ir.OCONTINUE:
1784 to = s.continueTo
1785 case ir.OBREAK:
1786 to = s.breakTo
1787 }
1788 } else {
1789
1790 sym := n.Label
1791 lab := s.label(sym)
1792 switch n.Op() {
1793 case ir.OCONTINUE:
1794 to = lab.continueTarget
1795 case ir.OBREAK:
1796 to = lab.breakTarget
1797 }
1798 }
1799
1800 b := s.endBlock()
1801 b.Pos = s.lastPos.WithIsStmt()
1802 b.AddEdgeTo(to)
1803
1804 case ir.OFOR:
1805
1806
1807 n := n.(*ir.ForStmt)
1808 base.Assert(!n.DistinctVars)
1809 bCond := s.f.NewBlock(ssa.BlockPlain)
1810 bBody := s.f.NewBlock(ssa.BlockPlain)
1811 bIncr := s.f.NewBlock(ssa.BlockPlain)
1812 bEnd := s.f.NewBlock(ssa.BlockPlain)
1813
1814
1815 bBody.Pos = n.Pos()
1816
1817
1818 b := s.endBlock()
1819 b.AddEdgeTo(bCond)
1820
1821
1822 s.startBlock(bCond)
1823 if n.Cond != nil {
1824 s.condBranch(n.Cond, bBody, bEnd, 1)
1825 } else {
1826 b := s.endBlock()
1827 b.Kind = ssa.BlockPlain
1828 b.AddEdgeTo(bBody)
1829 }
1830
1831
1832 prevContinue := s.continueTo
1833 prevBreak := s.breakTo
1834 s.continueTo = bIncr
1835 s.breakTo = bEnd
1836 var lab *ssaLabel
1837 if sym := n.Label; sym != nil {
1838
1839 lab = s.label(sym)
1840 lab.continueTarget = bIncr
1841 lab.breakTarget = bEnd
1842 }
1843
1844
1845 s.startBlock(bBody)
1846 s.stmtList(n.Body)
1847
1848
1849 s.continueTo = prevContinue
1850 s.breakTo = prevBreak
1851 if lab != nil {
1852 lab.continueTarget = nil
1853 lab.breakTarget = nil
1854 }
1855
1856
1857 if b := s.endBlock(); b != nil {
1858 b.AddEdgeTo(bIncr)
1859 }
1860
1861
1862 s.startBlock(bIncr)
1863 if n.Post != nil {
1864 s.stmt(n.Post)
1865 }
1866 if b := s.endBlock(); b != nil {
1867 b.AddEdgeTo(bCond)
1868
1869
1870 if b.Pos == src.NoXPos {
1871 b.Pos = bCond.Pos
1872 }
1873 }
1874
1875 s.startBlock(bEnd)
1876
1877 case ir.OSWITCH, ir.OSELECT:
1878
1879
1880 bEnd := s.f.NewBlock(ssa.BlockPlain)
1881
1882 prevBreak := s.breakTo
1883 s.breakTo = bEnd
1884 var sym *types.Sym
1885 var body ir.Nodes
1886 if n.Op() == ir.OSWITCH {
1887 n := n.(*ir.SwitchStmt)
1888 sym = n.Label
1889 body = n.Compiled
1890 } else {
1891 n := n.(*ir.SelectStmt)
1892 sym = n.Label
1893 body = n.Compiled
1894 }
1895
1896 var lab *ssaLabel
1897 if sym != nil {
1898
1899 lab = s.label(sym)
1900 lab.breakTarget = bEnd
1901 }
1902
1903
1904 s.stmtList(body)
1905
1906 s.breakTo = prevBreak
1907 if lab != nil {
1908 lab.breakTarget = nil
1909 }
1910
1911
1912
1913 if s.curBlock != nil {
1914 m := s.mem()
1915 b := s.endBlock()
1916 b.Kind = ssa.BlockExit
1917 b.SetControl(m)
1918 }
1919 s.startBlock(bEnd)
1920
1921 case ir.OJUMPTABLE:
1922 n := n.(*ir.JumpTableStmt)
1923
1924
1925 jt := s.f.NewBlock(ssa.BlockJumpTable)
1926 bEnd := s.f.NewBlock(ssa.BlockPlain)
1927
1928
1929 idx := s.expr(n.Idx)
1930 unsigned := idx.Type.IsUnsigned()
1931
1932
1933 t := types.Types[types.TUINTPTR]
1934 idx = s.conv(nil, idx, idx.Type, t)
1935
1936
1937
1938
1939
1940
1941
1942 var min, max uint64
1943 if unsigned {
1944 min, _ = constant.Uint64Val(n.Cases[0])
1945 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
1946 } else {
1947 mn, _ := constant.Int64Val(n.Cases[0])
1948 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
1949 min = uint64(mn)
1950 max = uint64(mx)
1951 }
1952
1953 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
1954 width := s.uintptrConstant(max - min)
1955 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
1956 b := s.endBlock()
1957 b.Kind = ssa.BlockIf
1958 b.SetControl(cmp)
1959 b.AddEdgeTo(jt)
1960 b.AddEdgeTo(bEnd)
1961 b.Likely = ssa.BranchLikely
1962
1963
1964 s.startBlock(jt)
1965 jt.Pos = n.Pos()
1966 if base.Flag.Cfg.SpectreIndex {
1967 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
1968 }
1969 jt.SetControl(idx)
1970
1971
1972 table := make([]*ssa.Block, max-min+1)
1973 for i := range table {
1974 table[i] = bEnd
1975 }
1976 for i := range n.Targets {
1977 c := n.Cases[i]
1978 lab := s.label(n.Targets[i])
1979 if lab.target == nil {
1980 lab.target = s.f.NewBlock(ssa.BlockPlain)
1981 }
1982 var val uint64
1983 if unsigned {
1984 val, _ = constant.Uint64Val(c)
1985 } else {
1986 vl, _ := constant.Int64Val(c)
1987 val = uint64(vl)
1988 }
1989
1990 table[val-min] = lab.target
1991 }
1992 for _, t := range table {
1993 jt.AddEdgeTo(t)
1994 }
1995 s.endBlock()
1996
1997 s.startBlock(bEnd)
1998
1999 case ir.OINTERFACESWITCH:
2000 n := n.(*ir.InterfaceSwitchStmt)
2001 typs := s.f.Config.Types
2002
2003 t := s.expr(n.RuntimeType)
2004 h := s.expr(n.Hash)
2005 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2006
2007
2008 var merge *ssa.Block
2009 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
2010
2011
2012 if _, ok := intrinsics[intrinsicKey{Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp"}]; !ok {
2013 s.Fatalf("atomic load not available")
2014 }
2015 merge = s.f.NewBlock(ssa.BlockPlain)
2016 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2017 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2018 loopHead := s.f.NewBlock(ssa.BlockPlain)
2019 loopBody := s.f.NewBlock(ssa.BlockPlain)
2020
2021
2022 var mul, and, add, zext ssa.Op
2023 if s.config.PtrSize == 4 {
2024 mul = ssa.OpMul32
2025 and = ssa.OpAnd32
2026 add = ssa.OpAdd32
2027 zext = ssa.OpCopy
2028 } else {
2029 mul = ssa.OpMul64
2030 and = ssa.OpAnd64
2031 add = ssa.OpAdd64
2032 zext = ssa.OpZeroExt32to64
2033 }
2034
2035
2036
2037 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2038 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2039 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2040
2041
2042 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2043
2044
2045 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2046
2047 b := s.endBlock()
2048 b.AddEdgeTo(loopHead)
2049
2050
2051
2052 s.startBlock(loopHead)
2053 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2054 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2055 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2056 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2057
2058 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2059
2060
2061
2062 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2063 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2064 b = s.endBlock()
2065 b.Kind = ssa.BlockIf
2066 b.SetControl(cmp1)
2067 b.AddEdgeTo(cacheHit)
2068 b.AddEdgeTo(loopBody)
2069
2070
2071
2072 s.startBlock(loopBody)
2073 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2074 b = s.endBlock()
2075 b.Kind = ssa.BlockIf
2076 b.SetControl(cmp2)
2077 b.AddEdgeTo(cacheMiss)
2078 b.AddEdgeTo(loopHead)
2079
2080
2081
2082
2083 s.startBlock(cacheHit)
2084 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2085 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2086 s.assign(n.Case, eCase, false, 0)
2087 s.assign(n.Itab, eItab, false, 0)
2088 b = s.endBlock()
2089 b.AddEdgeTo(merge)
2090
2091
2092 s.startBlock(cacheMiss)
2093 }
2094
2095 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2096 s.assign(n.Case, r[0], false, 0)
2097 s.assign(n.Itab, r[1], false, 0)
2098
2099 if merge != nil {
2100
2101 b := s.endBlock()
2102 b.Kind = ssa.BlockPlain
2103 b.AddEdgeTo(merge)
2104 s.startBlock(merge)
2105 }
2106
2107 case ir.OCHECKNIL:
2108 n := n.(*ir.UnaryExpr)
2109 p := s.expr(n.X)
2110 _ = s.nilCheck(p)
2111
2112
2113 case ir.OINLMARK:
2114 n := n.(*ir.InlineMarkStmt)
2115 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2116
2117 default:
2118 s.Fatalf("unhandled stmt %v", n.Op())
2119 }
2120 }
2121
2122
2123
2124 const shareDeferExits = false
2125
2126
2127
2128
2129 func (s *state) exit() *ssa.Block {
2130 if s.hasdefer {
2131 if s.hasOpenDefers {
2132 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2133 if s.curBlock.Kind != ssa.BlockPlain {
2134 panic("Block for an exit should be BlockPlain")
2135 }
2136 s.curBlock.AddEdgeTo(s.lastDeferExit)
2137 s.endBlock()
2138 return s.lastDeferFinalBlock
2139 }
2140 s.openDeferExit()
2141 } else {
2142 s.rtcall(ir.Syms.Deferreturn, true, nil)
2143 }
2144 }
2145
2146
2147
2148 resultFields := s.curfn.Type().Results()
2149 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2150
2151 for i, f := range resultFields {
2152 n := f.Nname.(*ir.Name)
2153 if s.canSSA(n) {
2154 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2155
2156 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2157 }
2158 results[i] = s.variable(n, n.Type())
2159 } else if !n.OnStack() {
2160
2161 if n.Type().HasPointers() {
2162 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2163 }
2164 ha := s.expr(n.Heapaddr)
2165 s.instrumentFields(n.Type(), ha, instrumentRead)
2166 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2167 } else {
2168
2169
2170
2171 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2172 }
2173 }
2174
2175
2176
2177
2178 if s.instrumentEnterExit {
2179 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2180 }
2181
2182 results[len(results)-1] = s.mem()
2183 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2184 m.AddArgs(results...)
2185
2186 b := s.endBlock()
2187 b.Kind = ssa.BlockRet
2188 b.SetControl(m)
2189 if s.hasdefer && s.hasOpenDefers {
2190 s.lastDeferFinalBlock = b
2191 }
2192 return b
2193 }
2194
2195 type opAndType struct {
2196 op ir.Op
2197 etype types.Kind
2198 }
2199
2200 var opToSSA = map[opAndType]ssa.Op{
2201 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2202 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2203 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2204 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2205 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2206 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2207 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2208 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2209 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2210 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2211
2212 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2213 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2214 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2215 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2216 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2217 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2218 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2219 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2220 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2221 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2222
2223 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2224
2225 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2226 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2227 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2228 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2229 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2230 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2231 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2232 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2233 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2234 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2235
2236 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2237 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2238 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2239 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2240 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2241 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2242 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2243 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2244
2245 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2246 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2247 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2248 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2249
2250 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2251 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2252 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2253 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2254 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2255 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2256 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2257 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2258 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2259 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2260
2261 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2262 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2263
2264 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2265 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2266 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2267 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2268 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2269 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2270 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2271 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2272
2273 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2274 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2275 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2276 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2277 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2278 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2279 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2280 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2281
2282 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2283 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2284 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2285 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2286 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2287 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2288 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2289 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2290
2291 {ir.OOR, types.TINT8}: ssa.OpOr8,
2292 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2293 {ir.OOR, types.TINT16}: ssa.OpOr16,
2294 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2295 {ir.OOR, types.TINT32}: ssa.OpOr32,
2296 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2297 {ir.OOR, types.TINT64}: ssa.OpOr64,
2298 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2299
2300 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2301 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2302 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2303 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2304 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2305 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2306 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2307 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2308
2309 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2310 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2311 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2312 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2313 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2314 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2315 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2316 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2317 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2318 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2319 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2320 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2321 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2322 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2323 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2324 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2325 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2326 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2327 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2328
2329 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2330 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2331 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2332 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2333 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2334 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2335 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2336 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2337 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2338 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2339 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2340 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2341 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2342 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2343 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2344 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2345 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2346 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2347 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2348
2349 {ir.OLT, types.TINT8}: ssa.OpLess8,
2350 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2351 {ir.OLT, types.TINT16}: ssa.OpLess16,
2352 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2353 {ir.OLT, types.TINT32}: ssa.OpLess32,
2354 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2355 {ir.OLT, types.TINT64}: ssa.OpLess64,
2356 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2357 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2358 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2359
2360 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2361 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2362 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2363 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2364 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2365 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2366 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2367 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2368 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2369 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2370 }
2371
2372 func (s *state) concreteEtype(t *types.Type) types.Kind {
2373 e := t.Kind()
2374 switch e {
2375 default:
2376 return e
2377 case types.TINT:
2378 if s.config.PtrSize == 8 {
2379 return types.TINT64
2380 }
2381 return types.TINT32
2382 case types.TUINT:
2383 if s.config.PtrSize == 8 {
2384 return types.TUINT64
2385 }
2386 return types.TUINT32
2387 case types.TUINTPTR:
2388 if s.config.PtrSize == 8 {
2389 return types.TUINT64
2390 }
2391 return types.TUINT32
2392 }
2393 }
2394
2395 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2396 etype := s.concreteEtype(t)
2397 x, ok := opToSSA[opAndType{op, etype}]
2398 if !ok {
2399 s.Fatalf("unhandled binary op %v %s", op, etype)
2400 }
2401 return x
2402 }
2403
2404 type opAndTwoTypes struct {
2405 op ir.Op
2406 etype1 types.Kind
2407 etype2 types.Kind
2408 }
2409
2410 type twoTypes struct {
2411 etype1 types.Kind
2412 etype2 types.Kind
2413 }
2414
2415 type twoOpsAndType struct {
2416 op1 ssa.Op
2417 op2 ssa.Op
2418 intermediateType types.Kind
2419 }
2420
2421 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2422
2423 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2424 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2425 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2426 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2427
2428 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2429 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2430 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2431 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2432
2433 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2434 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2435 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2436 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2437
2438 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2439 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2440 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2441 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2442
2443 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2444 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2445 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2446 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2447
2448 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2449 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2450 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2451 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2452
2453 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2454 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2455 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto64, ssa.OpTrunc64to32, types.TINT64},
2456 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2457
2458 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2459 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2460 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto64, ssa.OpTrunc64to32, types.TINT64},
2461 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2462
2463
2464 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2465 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2466 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2467 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2468 }
2469
2470
2471
2472 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2473 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2474 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2475 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2476 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2477 }
2478
2479
2480 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2481 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2482 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2483 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2484 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2485 }
2486
2487 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2488 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2489 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2490 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2491 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2492 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2493 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2494 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2495 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2496
2497 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2498 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2499 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2500 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2501 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2502 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2503 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2504 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2505
2506 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2507 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2508 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2509 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2510 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2511 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2512 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2513 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2514
2515 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2516 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2517 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2518 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2519 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2520 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2521 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2522 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2523
2524 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2525 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2526 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2527 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2528 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2529 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2530 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2531 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2532
2533 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2534 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2535 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2536 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2537 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2538 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2539 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2540 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2541
2542 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2543 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2544 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2545 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2546 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2547 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2548 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2549 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2550
2551 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2552 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2553 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2554 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2555 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2556 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2557 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2558 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2559 }
2560
2561 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2562 etype1 := s.concreteEtype(t)
2563 etype2 := s.concreteEtype(u)
2564 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2565 if !ok {
2566 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2567 }
2568 return x
2569 }
2570
2571 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2572 if s.config.PtrSize == 4 {
2573 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2574 }
2575 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2576 }
2577
2578 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2579 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2580
2581 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2582 }
2583 if ft.IsInteger() && tt.IsInteger() {
2584 var op ssa.Op
2585 if tt.Size() == ft.Size() {
2586 op = ssa.OpCopy
2587 } else if tt.Size() < ft.Size() {
2588
2589 switch 10*ft.Size() + tt.Size() {
2590 case 21:
2591 op = ssa.OpTrunc16to8
2592 case 41:
2593 op = ssa.OpTrunc32to8
2594 case 42:
2595 op = ssa.OpTrunc32to16
2596 case 81:
2597 op = ssa.OpTrunc64to8
2598 case 82:
2599 op = ssa.OpTrunc64to16
2600 case 84:
2601 op = ssa.OpTrunc64to32
2602 default:
2603 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2604 }
2605 } else if ft.IsSigned() {
2606
2607 switch 10*ft.Size() + tt.Size() {
2608 case 12:
2609 op = ssa.OpSignExt8to16
2610 case 14:
2611 op = ssa.OpSignExt8to32
2612 case 18:
2613 op = ssa.OpSignExt8to64
2614 case 24:
2615 op = ssa.OpSignExt16to32
2616 case 28:
2617 op = ssa.OpSignExt16to64
2618 case 48:
2619 op = ssa.OpSignExt32to64
2620 default:
2621 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2622 }
2623 } else {
2624
2625 switch 10*ft.Size() + tt.Size() {
2626 case 12:
2627 op = ssa.OpZeroExt8to16
2628 case 14:
2629 op = ssa.OpZeroExt8to32
2630 case 18:
2631 op = ssa.OpZeroExt8to64
2632 case 24:
2633 op = ssa.OpZeroExt16to32
2634 case 28:
2635 op = ssa.OpZeroExt16to64
2636 case 48:
2637 op = ssa.OpZeroExt32to64
2638 default:
2639 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2640 }
2641 }
2642 return s.newValue1(op, tt, v)
2643 }
2644
2645 if ft.IsComplex() && tt.IsComplex() {
2646 var op ssa.Op
2647 if ft.Size() == tt.Size() {
2648 switch ft.Size() {
2649 case 8:
2650 op = ssa.OpRound32F
2651 case 16:
2652 op = ssa.OpRound64F
2653 default:
2654 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2655 }
2656 } else if ft.Size() == 8 && tt.Size() == 16 {
2657 op = ssa.OpCvt32Fto64F
2658 } else if ft.Size() == 16 && tt.Size() == 8 {
2659 op = ssa.OpCvt64Fto32F
2660 } else {
2661 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2662 }
2663 ftp := types.FloatForComplex(ft)
2664 ttp := types.FloatForComplex(tt)
2665 return s.newValue2(ssa.OpComplexMake, tt,
2666 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2667 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2668 }
2669
2670 if tt.IsComplex() {
2671
2672 et := types.FloatForComplex(tt)
2673 v = s.conv(n, v, ft, et)
2674 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2675 }
2676
2677 if ft.IsFloat() || tt.IsFloat() {
2678 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
2679 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2680 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2681 conv = conv1
2682 }
2683 }
2684 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2685 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2686 conv = conv1
2687 }
2688 }
2689
2690 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2691 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2692
2693 if tt.Size() == 4 {
2694 return s.uint32Tofloat32(n, v, ft, tt)
2695 }
2696 if tt.Size() == 8 {
2697 return s.uint32Tofloat64(n, v, ft, tt)
2698 }
2699 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2700
2701 if ft.Size() == 4 {
2702 return s.float32ToUint32(n, v, ft, tt)
2703 }
2704 if ft.Size() == 8 {
2705 return s.float64ToUint32(n, v, ft, tt)
2706 }
2707 }
2708 }
2709
2710 if !ok {
2711 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2712 }
2713 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2714
2715 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2716
2717 if op1 == ssa.OpCopy {
2718 if op2 == ssa.OpCopy {
2719 return v
2720 }
2721 return s.newValueOrSfCall1(op2, tt, v)
2722 }
2723 if op2 == ssa.OpCopy {
2724 return s.newValueOrSfCall1(op1, tt, v)
2725 }
2726 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2727 }
2728
2729 if ft.IsInteger() {
2730
2731 if tt.Size() == 4 {
2732 return s.uint64Tofloat32(n, v, ft, tt)
2733 }
2734 if tt.Size() == 8 {
2735 return s.uint64Tofloat64(n, v, ft, tt)
2736 }
2737 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2738 }
2739
2740 if ft.Size() == 4 {
2741 return s.float32ToUint64(n, v, ft, tt)
2742 }
2743 if ft.Size() == 8 {
2744 return s.float64ToUint64(n, v, ft, tt)
2745 }
2746 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2747 return nil
2748 }
2749
2750 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2751 return nil
2752 }
2753
2754
2755 func (s *state) expr(n ir.Node) *ssa.Value {
2756 return s.exprCheckPtr(n, true)
2757 }
2758
2759 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2760 if ir.HasUniquePos(n) {
2761
2762
2763 s.pushLine(n.Pos())
2764 defer s.popLine()
2765 }
2766
2767 s.stmtList(n.Init())
2768 switch n.Op() {
2769 case ir.OBYTES2STRTMP:
2770 n := n.(*ir.ConvExpr)
2771 slice := s.expr(n.X)
2772 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
2773 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
2774 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
2775 case ir.OSTR2BYTESTMP:
2776 n := n.(*ir.ConvExpr)
2777 str := s.expr(n.X)
2778 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
2779 if !n.NonNil() {
2780
2781
2782
2783 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
2784 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
2785 ptr = s.ternary(cond, ptr, zerobase)
2786 }
2787 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
2788 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
2789 case ir.OCFUNC:
2790 n := n.(*ir.UnaryExpr)
2791 aux := n.X.(*ir.Name).Linksym()
2792
2793
2794 if aux.ABI() != obj.ABIInternal {
2795 s.Fatalf("expected ABIInternal: %v", aux.ABI())
2796 }
2797 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
2798 case ir.ONAME:
2799 n := n.(*ir.Name)
2800 if n.Class == ir.PFUNC {
2801
2802 sym := staticdata.FuncLinksym(n)
2803 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
2804 }
2805 if s.canSSA(n) {
2806 return s.variable(n, n.Type())
2807 }
2808 return s.load(n.Type(), s.addr(n))
2809 case ir.OLINKSYMOFFSET:
2810 n := n.(*ir.LinksymOffsetExpr)
2811 return s.load(n.Type(), s.addr(n))
2812 case ir.ONIL:
2813 n := n.(*ir.NilExpr)
2814 t := n.Type()
2815 switch {
2816 case t.IsSlice():
2817 return s.constSlice(t)
2818 case t.IsInterface():
2819 return s.constInterface(t)
2820 default:
2821 return s.constNil(t)
2822 }
2823 case ir.OLITERAL:
2824 switch u := n.Val(); u.Kind() {
2825 case constant.Int:
2826 i := ir.IntVal(n.Type(), u)
2827 switch n.Type().Size() {
2828 case 1:
2829 return s.constInt8(n.Type(), int8(i))
2830 case 2:
2831 return s.constInt16(n.Type(), int16(i))
2832 case 4:
2833 return s.constInt32(n.Type(), int32(i))
2834 case 8:
2835 return s.constInt64(n.Type(), i)
2836 default:
2837 s.Fatalf("bad integer size %d", n.Type().Size())
2838 return nil
2839 }
2840 case constant.String:
2841 i := constant.StringVal(u)
2842 if i == "" {
2843 return s.constEmptyString(n.Type())
2844 }
2845 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
2846 case constant.Bool:
2847 return s.constBool(constant.BoolVal(u))
2848 case constant.Float:
2849 f, _ := constant.Float64Val(u)
2850 switch n.Type().Size() {
2851 case 4:
2852 return s.constFloat32(n.Type(), f)
2853 case 8:
2854 return s.constFloat64(n.Type(), f)
2855 default:
2856 s.Fatalf("bad float size %d", n.Type().Size())
2857 return nil
2858 }
2859 case constant.Complex:
2860 re, _ := constant.Float64Val(constant.Real(u))
2861 im, _ := constant.Float64Val(constant.Imag(u))
2862 switch n.Type().Size() {
2863 case 8:
2864 pt := types.Types[types.TFLOAT32]
2865 return s.newValue2(ssa.OpComplexMake, n.Type(),
2866 s.constFloat32(pt, re),
2867 s.constFloat32(pt, im))
2868 case 16:
2869 pt := types.Types[types.TFLOAT64]
2870 return s.newValue2(ssa.OpComplexMake, n.Type(),
2871 s.constFloat64(pt, re),
2872 s.constFloat64(pt, im))
2873 default:
2874 s.Fatalf("bad complex size %d", n.Type().Size())
2875 return nil
2876 }
2877 default:
2878 s.Fatalf("unhandled OLITERAL %v", u.Kind())
2879 return nil
2880 }
2881 case ir.OCONVNOP:
2882 n := n.(*ir.ConvExpr)
2883 to := n.Type()
2884 from := n.X.Type()
2885
2886
2887
2888 x := s.expr(n.X)
2889 if to == from {
2890 return x
2891 }
2892
2893
2894
2895
2896
2897 if to.IsPtrShaped() != from.IsPtrShaped() {
2898 return s.newValue2(ssa.OpConvert, to, x, s.mem())
2899 }
2900
2901 v := s.newValue1(ssa.OpCopy, to, x)
2902
2903
2904 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
2905 return v
2906 }
2907
2908
2909 if from.Kind() == to.Kind() {
2910 return v
2911 }
2912
2913
2914 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
2915 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
2916 s.checkPtrAlignment(n, v, nil)
2917 }
2918 return v
2919 }
2920
2921
2922 if to.Kind() == types.TMAP && from == types.NewPtr(reflectdata.MapType()) {
2923 return v
2924 }
2925
2926 types.CalcSize(from)
2927 types.CalcSize(to)
2928 if from.Size() != to.Size() {
2929 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
2930 return nil
2931 }
2932 if etypesign(from.Kind()) != etypesign(to.Kind()) {
2933 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
2934 return nil
2935 }
2936
2937 if base.Flag.Cfg.Instrumenting {
2938
2939
2940
2941 return v
2942 }
2943
2944 if etypesign(from.Kind()) == 0 {
2945 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
2946 return nil
2947 }
2948
2949
2950 return v
2951
2952 case ir.OCONV:
2953 n := n.(*ir.ConvExpr)
2954 x := s.expr(n.X)
2955 return s.conv(n, x, n.X.Type(), n.Type())
2956
2957 case ir.ODOTTYPE:
2958 n := n.(*ir.TypeAssertExpr)
2959 res, _ := s.dottype(n, false)
2960 return res
2961
2962 case ir.ODYNAMICDOTTYPE:
2963 n := n.(*ir.DynamicTypeAssertExpr)
2964 res, _ := s.dynamicDottype(n, false)
2965 return res
2966
2967
2968 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
2969 n := n.(*ir.BinaryExpr)
2970 a := s.expr(n.X)
2971 b := s.expr(n.Y)
2972 if n.X.Type().IsComplex() {
2973 pt := types.FloatForComplex(n.X.Type())
2974 op := s.ssaOp(ir.OEQ, pt)
2975 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
2976 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
2977 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
2978 switch n.Op() {
2979 case ir.OEQ:
2980 return c
2981 case ir.ONE:
2982 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
2983 default:
2984 s.Fatalf("ordered complex compare %v", n.Op())
2985 }
2986 }
2987
2988
2989 op := n.Op()
2990 switch op {
2991 case ir.OGE:
2992 op, a, b = ir.OLE, b, a
2993 case ir.OGT:
2994 op, a, b = ir.OLT, b, a
2995 }
2996 if n.X.Type().IsFloat() {
2997
2998 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
2999 }
3000
3001 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3002 case ir.OMUL:
3003 n := n.(*ir.BinaryExpr)
3004 a := s.expr(n.X)
3005 b := s.expr(n.Y)
3006 if n.Type().IsComplex() {
3007 mulop := ssa.OpMul64F
3008 addop := ssa.OpAdd64F
3009 subop := ssa.OpSub64F
3010 pt := types.FloatForComplex(n.Type())
3011 wt := types.Types[types.TFLOAT64]
3012
3013 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3014 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3015 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3016 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3017
3018 if pt != wt {
3019 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3020 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3021 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3022 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3023 }
3024
3025 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3026 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3027
3028 if pt != wt {
3029 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3030 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3031 }
3032
3033 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3034 }
3035
3036 if n.Type().IsFloat() {
3037 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3038 }
3039
3040 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3041
3042 case ir.ODIV:
3043 n := n.(*ir.BinaryExpr)
3044 a := s.expr(n.X)
3045 b := s.expr(n.Y)
3046 if n.Type().IsComplex() {
3047
3048
3049
3050 mulop := ssa.OpMul64F
3051 addop := ssa.OpAdd64F
3052 subop := ssa.OpSub64F
3053 divop := ssa.OpDiv64F
3054 pt := types.FloatForComplex(n.Type())
3055 wt := types.Types[types.TFLOAT64]
3056
3057 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3058 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3059 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3060 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3061
3062 if pt != wt {
3063 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3064 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3065 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3066 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3067 }
3068
3069 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3070 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3071 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3072
3073
3074
3075
3076
3077 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3078 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3079
3080 if pt != wt {
3081 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3082 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3083 }
3084 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3085 }
3086 if n.Type().IsFloat() {
3087 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3088 }
3089 return s.intDivide(n, a, b)
3090 case ir.OMOD:
3091 n := n.(*ir.BinaryExpr)
3092 a := s.expr(n.X)
3093 b := s.expr(n.Y)
3094 return s.intDivide(n, a, b)
3095 case ir.OADD, ir.OSUB:
3096 n := n.(*ir.BinaryExpr)
3097 a := s.expr(n.X)
3098 b := s.expr(n.Y)
3099 if n.Type().IsComplex() {
3100 pt := types.FloatForComplex(n.Type())
3101 op := s.ssaOp(n.Op(), pt)
3102 return s.newValue2(ssa.OpComplexMake, n.Type(),
3103 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3104 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3105 }
3106 if n.Type().IsFloat() {
3107 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3108 }
3109 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3110 case ir.OAND, ir.OOR, ir.OXOR:
3111 n := n.(*ir.BinaryExpr)
3112 a := s.expr(n.X)
3113 b := s.expr(n.Y)
3114 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3115 case ir.OANDNOT:
3116 n := n.(*ir.BinaryExpr)
3117 a := s.expr(n.X)
3118 b := s.expr(n.Y)
3119 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3120 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3121 case ir.OLSH, ir.ORSH:
3122 n := n.(*ir.BinaryExpr)
3123 a := s.expr(n.X)
3124 b := s.expr(n.Y)
3125 bt := b.Type
3126 if bt.IsSigned() {
3127 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3128 s.check(cmp, ir.Syms.Panicshift)
3129 bt = bt.ToUnsigned()
3130 }
3131 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3132 case ir.OANDAND, ir.OOROR:
3133
3134
3135
3136
3137
3138
3139
3140
3141
3142
3143
3144
3145
3146 n := n.(*ir.LogicalExpr)
3147 el := s.expr(n.X)
3148 s.vars[n] = el
3149
3150 b := s.endBlock()
3151 b.Kind = ssa.BlockIf
3152 b.SetControl(el)
3153
3154
3155
3156
3157
3158 bRight := s.f.NewBlock(ssa.BlockPlain)
3159 bResult := s.f.NewBlock(ssa.BlockPlain)
3160 if n.Op() == ir.OANDAND {
3161 b.AddEdgeTo(bRight)
3162 b.AddEdgeTo(bResult)
3163 } else if n.Op() == ir.OOROR {
3164 b.AddEdgeTo(bResult)
3165 b.AddEdgeTo(bRight)
3166 }
3167
3168 s.startBlock(bRight)
3169 er := s.expr(n.Y)
3170 s.vars[n] = er
3171
3172 b = s.endBlock()
3173 b.AddEdgeTo(bResult)
3174
3175 s.startBlock(bResult)
3176 return s.variable(n, types.Types[types.TBOOL])
3177 case ir.OCOMPLEX:
3178 n := n.(*ir.BinaryExpr)
3179 r := s.expr(n.X)
3180 i := s.expr(n.Y)
3181 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3182
3183
3184 case ir.ONEG:
3185 n := n.(*ir.UnaryExpr)
3186 a := s.expr(n.X)
3187 if n.Type().IsComplex() {
3188 tp := types.FloatForComplex(n.Type())
3189 negop := s.ssaOp(n.Op(), tp)
3190 return s.newValue2(ssa.OpComplexMake, n.Type(),
3191 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3192 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3193 }
3194 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3195 case ir.ONOT, ir.OBITNOT:
3196 n := n.(*ir.UnaryExpr)
3197 a := s.expr(n.X)
3198 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3199 case ir.OIMAG, ir.OREAL:
3200 n := n.(*ir.UnaryExpr)
3201 a := s.expr(n.X)
3202 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3203 case ir.OPLUS:
3204 n := n.(*ir.UnaryExpr)
3205 return s.expr(n.X)
3206
3207 case ir.OADDR:
3208 n := n.(*ir.AddrExpr)
3209 return s.addr(n.X)
3210
3211 case ir.ORESULT:
3212 n := n.(*ir.ResultExpr)
3213 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3214 panic("Expected to see a previous call")
3215 }
3216 which := n.Index
3217 if which == -1 {
3218 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3219 }
3220 return s.resultOfCall(s.prevCall, which, n.Type())
3221
3222 case ir.ODEREF:
3223 n := n.(*ir.StarExpr)
3224 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3225 return s.load(n.Type(), p)
3226
3227 case ir.ODOT:
3228 n := n.(*ir.SelectorExpr)
3229 if n.X.Op() == ir.OSTRUCTLIT {
3230
3231
3232
3233 if !ir.IsZero(n.X) {
3234 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3235 }
3236 return s.zeroVal(n.Type())
3237 }
3238
3239
3240
3241
3242 if ir.IsAddressable(n) && !s.canSSA(n) {
3243 p := s.addr(n)
3244 return s.load(n.Type(), p)
3245 }
3246 v := s.expr(n.X)
3247 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3248
3249 case ir.ODOTPTR:
3250 n := n.(*ir.SelectorExpr)
3251 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3252 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3253 return s.load(n.Type(), p)
3254
3255 case ir.OINDEX:
3256 n := n.(*ir.IndexExpr)
3257 switch {
3258 case n.X.Type().IsString():
3259 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3260
3261
3262
3263 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3264 }
3265 a := s.expr(n.X)
3266 i := s.expr(n.Index)
3267 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3268 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3269 ptrtyp := s.f.Config.Types.BytePtr
3270 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3271 if ir.IsConst(n.Index, constant.Int) {
3272 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3273 } else {
3274 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3275 }
3276 return s.load(types.Types[types.TUINT8], ptr)
3277 case n.X.Type().IsSlice():
3278 p := s.addr(n)
3279 return s.load(n.X.Type().Elem(), p)
3280 case n.X.Type().IsArray():
3281 if ssa.CanSSA(n.X.Type()) {
3282
3283 bound := n.X.Type().NumElem()
3284 a := s.expr(n.X)
3285 i := s.expr(n.Index)
3286 if bound == 0 {
3287
3288
3289 z := s.constInt(types.Types[types.TINT], 0)
3290 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3291
3292
3293 return s.zeroVal(n.Type())
3294 }
3295 len := s.constInt(types.Types[types.TINT], bound)
3296 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3297 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3298 }
3299 p := s.addr(n)
3300 return s.load(n.X.Type().Elem(), p)
3301 default:
3302 s.Fatalf("bad type for index %v", n.X.Type())
3303 return nil
3304 }
3305
3306 case ir.OLEN, ir.OCAP:
3307 n := n.(*ir.UnaryExpr)
3308 switch {
3309 case n.X.Type().IsSlice():
3310 op := ssa.OpSliceLen
3311 if n.Op() == ir.OCAP {
3312 op = ssa.OpSliceCap
3313 }
3314 return s.newValue1(op, types.Types[types.TINT], s.expr(n.X))
3315 case n.X.Type().IsString():
3316 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], s.expr(n.X))
3317 case n.X.Type().IsMap(), n.X.Type().IsChan():
3318 return s.referenceTypeBuiltin(n, s.expr(n.X))
3319 default:
3320 return s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
3321 }
3322
3323 case ir.OSPTR:
3324 n := n.(*ir.UnaryExpr)
3325 a := s.expr(n.X)
3326 if n.X.Type().IsSlice() {
3327 if n.Bounded() {
3328 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3329 }
3330 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3331 } else {
3332 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3333 }
3334
3335 case ir.OITAB:
3336 n := n.(*ir.UnaryExpr)
3337 a := s.expr(n.X)
3338 return s.newValue1(ssa.OpITab, n.Type(), a)
3339
3340 case ir.OIDATA:
3341 n := n.(*ir.UnaryExpr)
3342 a := s.expr(n.X)
3343 return s.newValue1(ssa.OpIData, n.Type(), a)
3344
3345 case ir.OMAKEFACE:
3346 n := n.(*ir.BinaryExpr)
3347 tab := s.expr(n.X)
3348 data := s.expr(n.Y)
3349 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3350
3351 case ir.OSLICEHEADER:
3352 n := n.(*ir.SliceHeaderExpr)
3353 p := s.expr(n.Ptr)
3354 l := s.expr(n.Len)
3355 c := s.expr(n.Cap)
3356 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3357
3358 case ir.OSTRINGHEADER:
3359 n := n.(*ir.StringHeaderExpr)
3360 p := s.expr(n.Ptr)
3361 l := s.expr(n.Len)
3362 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3363
3364 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3365 n := n.(*ir.SliceExpr)
3366 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3367 v := s.exprCheckPtr(n.X, !check)
3368 var i, j, k *ssa.Value
3369 if n.Low != nil {
3370 i = s.expr(n.Low)
3371 }
3372 if n.High != nil {
3373 j = s.expr(n.High)
3374 }
3375 if n.Max != nil {
3376 k = s.expr(n.Max)
3377 }
3378 p, l, c := s.slice(v, i, j, k, n.Bounded())
3379 if check {
3380
3381 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3382 }
3383 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3384
3385 case ir.OSLICESTR:
3386 n := n.(*ir.SliceExpr)
3387 v := s.expr(n.X)
3388 var i, j *ssa.Value
3389 if n.Low != nil {
3390 i = s.expr(n.Low)
3391 }
3392 if n.High != nil {
3393 j = s.expr(n.High)
3394 }
3395 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3396 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3397
3398 case ir.OSLICE2ARRPTR:
3399
3400
3401
3402
3403 n := n.(*ir.ConvExpr)
3404 v := s.expr(n.X)
3405 nelem := n.Type().Elem().NumElem()
3406 arrlen := s.constInt(types.Types[types.TINT], nelem)
3407 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3408 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3409 op := ssa.OpSlicePtr
3410 if nelem == 0 {
3411 op = ssa.OpSlicePtrUnchecked
3412 }
3413 return s.newValue1(op, n.Type(), v)
3414
3415 case ir.OCALLFUNC:
3416 n := n.(*ir.CallExpr)
3417 if ir.IsIntrinsicCall(n) {
3418 return s.intrinsicCall(n)
3419 }
3420 fallthrough
3421
3422 case ir.OCALLINTER:
3423 n := n.(*ir.CallExpr)
3424 return s.callResult(n, callNormal)
3425
3426 case ir.OGETG:
3427 n := n.(*ir.CallExpr)
3428 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3429
3430 case ir.OGETCALLERPC:
3431 n := n.(*ir.CallExpr)
3432 return s.newValue0(ssa.OpGetCallerPC, n.Type())
3433
3434 case ir.OGETCALLERSP:
3435 n := n.(*ir.CallExpr)
3436 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3437
3438 case ir.OAPPEND:
3439 return s.append(n.(*ir.CallExpr), false)
3440
3441 case ir.OMIN, ir.OMAX:
3442 return s.minMax(n.(*ir.CallExpr))
3443
3444 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3445
3446
3447
3448 n := n.(*ir.CompLitExpr)
3449 if !ir.IsZero(n) {
3450 s.Fatalf("literal with nonzero value in SSA: %v", n)
3451 }
3452 return s.zeroVal(n.Type())
3453
3454 case ir.ONEW:
3455 n := n.(*ir.UnaryExpr)
3456 var rtype *ssa.Value
3457 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3458 rtype = s.expr(x.RType)
3459 }
3460 return s.newObject(n.Type().Elem(), rtype)
3461
3462 case ir.OUNSAFEADD:
3463 n := n.(*ir.BinaryExpr)
3464 ptr := s.expr(n.X)
3465 len := s.expr(n.Y)
3466
3467
3468
3469 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3470
3471 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3472
3473 default:
3474 s.Fatalf("unhandled expr %v", n.Op())
3475 return nil
3476 }
3477 }
3478
3479 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3480 aux := c.Aux.(*ssa.AuxCall)
3481 pa := aux.ParamAssignmentForResult(which)
3482
3483
3484 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3485 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3486 return s.rawLoad(t, addr)
3487 }
3488 return s.newValue1I(ssa.OpSelectN, t, which, c)
3489 }
3490
3491 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3492 aux := c.Aux.(*ssa.AuxCall)
3493 pa := aux.ParamAssignmentForResult(which)
3494 if len(pa.Registers) == 0 {
3495 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3496 }
3497 _, addr := s.temp(c.Pos, t)
3498 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3499 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3500 return addr
3501 }
3502
3503
3504
3505
3506
3507
3508
3509
3510
3511 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3512
3513
3514
3515
3516
3517
3518
3519
3520
3521
3522
3523
3524
3525
3526
3527
3528
3529
3530
3531
3532
3533
3534
3535
3536
3537
3538
3539
3540
3541
3542
3543
3544 et := n.Type().Elem()
3545 pt := types.NewPtr(et)
3546
3547
3548 sn := n.Args[0]
3549 var slice, addr *ssa.Value
3550 if inplace {
3551 addr = s.addr(sn)
3552 slice = s.load(n.Type(), addr)
3553 } else {
3554 slice = s.expr(sn)
3555 }
3556
3557
3558 grow := s.f.NewBlock(ssa.BlockPlain)
3559 assign := s.f.NewBlock(ssa.BlockPlain)
3560
3561
3562 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3563 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3564 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3565
3566
3567 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3568 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3569
3570
3571 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3572
3573
3574 s.vars[ptrVar] = p
3575 s.vars[lenVar] = l
3576 if !inplace {
3577 s.vars[capVar] = c
3578 }
3579
3580 b := s.endBlock()
3581 b.Kind = ssa.BlockIf
3582 b.Likely = ssa.BranchUnlikely
3583 b.SetControl(cmp)
3584 b.AddEdgeTo(grow)
3585 b.AddEdgeTo(assign)
3586
3587
3588 s.startBlock(grow)
3589 taddr := s.expr(n.Fun)
3590 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
3591
3592
3593 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
3594 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
3595 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
3596
3597 s.vars[ptrVar] = p
3598 s.vars[lenVar] = l
3599 s.vars[capVar] = c
3600 if inplace {
3601 if sn.Op() == ir.ONAME {
3602 sn := sn.(*ir.Name)
3603 if sn.Class != ir.PEXTERN {
3604
3605 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3606 }
3607 }
3608 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3609 s.store(types.Types[types.TINT], capaddr, c)
3610 s.store(pt, addr, p)
3611 }
3612
3613 b = s.endBlock()
3614 b.AddEdgeTo(assign)
3615
3616
3617 s.startBlock(assign)
3618 p = s.variable(ptrVar, pt)
3619 l = s.variable(lenVar, types.Types[types.TINT])
3620 if !inplace {
3621 c = s.variable(capVar, types.Types[types.TINT])
3622 }
3623
3624 if inplace {
3625
3626
3627 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3628 s.store(types.Types[types.TINT], lenaddr, l)
3629 }
3630
3631
3632 type argRec struct {
3633
3634
3635 v *ssa.Value
3636 store bool
3637 }
3638 args := make([]argRec, 0, len(n.Args[1:]))
3639 for _, n := range n.Args[1:] {
3640 if ssa.CanSSA(n.Type()) {
3641 args = append(args, argRec{v: s.expr(n), store: true})
3642 } else {
3643 v := s.addr(n)
3644 args = append(args, argRec{v: v})
3645 }
3646 }
3647
3648
3649 oldLen := s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3650 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
3651 for i, arg := range args {
3652 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
3653 if arg.store {
3654 s.storeType(et, addr, arg.v, 0, true)
3655 } else {
3656 s.move(et, addr, arg.v)
3657 }
3658 }
3659
3660
3661
3662
3663
3664 delete(s.vars, ptrVar)
3665 delete(s.vars, lenVar)
3666 if !inplace {
3667 delete(s.vars, capVar)
3668 }
3669
3670
3671 if inplace {
3672 return nil
3673 }
3674 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3675 }
3676
3677
3678 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
3679
3680
3681
3682 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
3683 x := s.expr(n.Args[0])
3684 for _, arg := range n.Args[1:] {
3685 x = op(x, s.expr(arg))
3686 }
3687 return x
3688 }
3689
3690 typ := n.Type()
3691
3692 if typ.IsFloat() || typ.IsString() {
3693
3694
3695
3696
3697
3698
3699
3700
3701 if typ.IsFloat() {
3702 hasIntrinsic := false
3703 switch Arch.LinkArch.Family {
3704 case sys.AMD64, sys.ARM64, sys.RISCV64:
3705 hasIntrinsic = true
3706 case sys.PPC64:
3707 hasIntrinsic = buildcfg.GOPPC64 >= 9
3708 }
3709
3710 if hasIntrinsic {
3711 var op ssa.Op
3712 switch {
3713 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
3714 op = ssa.OpMin64F
3715 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
3716 op = ssa.OpMax64F
3717 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
3718 op = ssa.OpMin32F
3719 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
3720 op = ssa.OpMax32F
3721 }
3722 return fold(func(x, a *ssa.Value) *ssa.Value {
3723 return s.newValue2(op, typ, x, a)
3724 })
3725 }
3726 }
3727 var name string
3728 switch typ.Kind() {
3729 case types.TFLOAT32:
3730 switch n.Op() {
3731 case ir.OMIN:
3732 name = "fmin32"
3733 case ir.OMAX:
3734 name = "fmax32"
3735 }
3736 case types.TFLOAT64:
3737 switch n.Op() {
3738 case ir.OMIN:
3739 name = "fmin64"
3740 case ir.OMAX:
3741 name = "fmax64"
3742 }
3743 case types.TSTRING:
3744 switch n.Op() {
3745 case ir.OMIN:
3746 name = "strmin"
3747 case ir.OMAX:
3748 name = "strmax"
3749 }
3750 }
3751 fn := typecheck.LookupRuntimeFunc(name)
3752
3753 return fold(func(x, a *ssa.Value) *ssa.Value {
3754 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
3755 })
3756 }
3757
3758 lt := s.ssaOp(ir.OLT, typ)
3759
3760 return fold(func(x, a *ssa.Value) *ssa.Value {
3761 switch n.Op() {
3762 case ir.OMIN:
3763
3764 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
3765 case ir.OMAX:
3766
3767 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
3768 }
3769 panic("unreachable")
3770 })
3771 }
3772
3773
3774 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
3775
3776
3777 ternaryVar := ssaMarker("ternary")
3778
3779 bThen := s.f.NewBlock(ssa.BlockPlain)
3780 bElse := s.f.NewBlock(ssa.BlockPlain)
3781 bEnd := s.f.NewBlock(ssa.BlockPlain)
3782
3783 b := s.endBlock()
3784 b.Kind = ssa.BlockIf
3785 b.SetControl(cond)
3786 b.AddEdgeTo(bThen)
3787 b.AddEdgeTo(bElse)
3788
3789 s.startBlock(bThen)
3790 s.vars[ternaryVar] = x
3791 s.endBlock().AddEdgeTo(bEnd)
3792
3793 s.startBlock(bElse)
3794 s.vars[ternaryVar] = y
3795 s.endBlock().AddEdgeTo(bEnd)
3796
3797 s.startBlock(bEnd)
3798 r := s.variable(ternaryVar, x.Type)
3799 delete(s.vars, ternaryVar)
3800 return r
3801 }
3802
3803
3804
3805
3806
3807 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
3808 switch cond.Op() {
3809 case ir.OANDAND:
3810 cond := cond.(*ir.LogicalExpr)
3811 mid := s.f.NewBlock(ssa.BlockPlain)
3812 s.stmtList(cond.Init())
3813 s.condBranch(cond.X, mid, no, max8(likely, 0))
3814 s.startBlock(mid)
3815 s.condBranch(cond.Y, yes, no, likely)
3816 return
3817
3818
3819
3820
3821
3822
3823 case ir.OOROR:
3824 cond := cond.(*ir.LogicalExpr)
3825 mid := s.f.NewBlock(ssa.BlockPlain)
3826 s.stmtList(cond.Init())
3827 s.condBranch(cond.X, yes, mid, min8(likely, 0))
3828 s.startBlock(mid)
3829 s.condBranch(cond.Y, yes, no, likely)
3830 return
3831
3832
3833
3834 case ir.ONOT:
3835 cond := cond.(*ir.UnaryExpr)
3836 s.stmtList(cond.Init())
3837 s.condBranch(cond.X, no, yes, -likely)
3838 return
3839 case ir.OCONVNOP:
3840 cond := cond.(*ir.ConvExpr)
3841 s.stmtList(cond.Init())
3842 s.condBranch(cond.X, yes, no, likely)
3843 return
3844 }
3845 c := s.expr(cond)
3846 b := s.endBlock()
3847 b.Kind = ssa.BlockIf
3848 b.SetControl(c)
3849 b.Likely = ssa.BranchPrediction(likely)
3850 b.AddEdgeTo(yes)
3851 b.AddEdgeTo(no)
3852 }
3853
3854 type skipMask uint8
3855
3856 const (
3857 skipPtr skipMask = 1 << iota
3858 skipLen
3859 skipCap
3860 )
3861
3862
3863
3864
3865
3866
3867
3868 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
3869 s.assignWhichMayOverlap(left, right, deref, skip, false)
3870 }
3871 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
3872 if left.Op() == ir.ONAME && ir.IsBlank(left) {
3873 return
3874 }
3875 t := left.Type()
3876 types.CalcSize(t)
3877 if s.canSSA(left) {
3878 if deref {
3879 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
3880 }
3881 if left.Op() == ir.ODOT {
3882
3883
3884
3885
3886
3887
3888
3889
3890
3891
3892 left := left.(*ir.SelectorExpr)
3893 t := left.X.Type()
3894 nf := t.NumFields()
3895 idx := fieldIdx(left)
3896
3897
3898 old := s.expr(left.X)
3899
3900
3901 new := s.newValue0(ssa.StructMakeOp(t.NumFields()), t)
3902
3903
3904 for i := 0; i < nf; i++ {
3905 if i == idx {
3906 new.AddArg(right)
3907 } else {
3908 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
3909 }
3910 }
3911
3912
3913 s.assign(left.X, new, false, 0)
3914
3915 return
3916 }
3917 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
3918 left := left.(*ir.IndexExpr)
3919 s.pushLine(left.Pos())
3920 defer s.popLine()
3921
3922
3923 t := left.X.Type()
3924 n := t.NumElem()
3925
3926 i := s.expr(left.Index)
3927 if n == 0 {
3928
3929
3930 z := s.constInt(types.Types[types.TINT], 0)
3931 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3932 return
3933 }
3934 if n != 1 {
3935 s.Fatalf("assigning to non-1-length array")
3936 }
3937
3938 len := s.constInt(types.Types[types.TINT], 1)
3939 s.boundsCheck(i, len, ssa.BoundsIndex, false)
3940 v := s.newValue1(ssa.OpArrayMake1, t, right)
3941 s.assign(left.X, v, false, 0)
3942 return
3943 }
3944 left := left.(*ir.Name)
3945
3946 s.vars[left] = right
3947 s.addNamedValue(left, right)
3948 return
3949 }
3950
3951
3952
3953 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
3954 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
3955 }
3956
3957
3958 addr := s.addr(left)
3959 if ir.IsReflectHeaderDataField(left) {
3960
3961
3962
3963
3964
3965 t = types.Types[types.TUNSAFEPTR]
3966 }
3967 if deref {
3968
3969 if right == nil {
3970 s.zero(t, addr)
3971 } else {
3972 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
3973 }
3974 return
3975 }
3976
3977 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
3978 }
3979
3980
3981 func (s *state) zeroVal(t *types.Type) *ssa.Value {
3982 switch {
3983 case t.IsInteger():
3984 switch t.Size() {
3985 case 1:
3986 return s.constInt8(t, 0)
3987 case 2:
3988 return s.constInt16(t, 0)
3989 case 4:
3990 return s.constInt32(t, 0)
3991 case 8:
3992 return s.constInt64(t, 0)
3993 default:
3994 s.Fatalf("bad sized integer type %v", t)
3995 }
3996 case t.IsFloat():
3997 switch t.Size() {
3998 case 4:
3999 return s.constFloat32(t, 0)
4000 case 8:
4001 return s.constFloat64(t, 0)
4002 default:
4003 s.Fatalf("bad sized float type %v", t)
4004 }
4005 case t.IsComplex():
4006 switch t.Size() {
4007 case 8:
4008 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4009 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4010 case 16:
4011 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4012 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4013 default:
4014 s.Fatalf("bad sized complex type %v", t)
4015 }
4016
4017 case t.IsString():
4018 return s.constEmptyString(t)
4019 case t.IsPtrShaped():
4020 return s.constNil(t)
4021 case t.IsBoolean():
4022 return s.constBool(false)
4023 case t.IsInterface():
4024 return s.constInterface(t)
4025 case t.IsSlice():
4026 return s.constSlice(t)
4027 case t.IsStruct():
4028 n := t.NumFields()
4029 v := s.entryNewValue0(ssa.StructMakeOp(t.NumFields()), t)
4030 for i := 0; i < n; i++ {
4031 v.AddArg(s.zeroVal(t.FieldType(i)))
4032 }
4033 return v
4034 case t.IsArray():
4035 switch t.NumElem() {
4036 case 0:
4037 return s.entryNewValue0(ssa.OpArrayMake0, t)
4038 case 1:
4039 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4040 }
4041 }
4042 s.Fatalf("zero for type %v not implemented", t)
4043 return nil
4044 }
4045
4046 type callKind int8
4047
4048 const (
4049 callNormal callKind = iota
4050 callDefer
4051 callDeferStack
4052 callGo
4053 callTail
4054 )
4055
4056 type sfRtCallDef struct {
4057 rtfn *obj.LSym
4058 rtype types.Kind
4059 }
4060
4061 var softFloatOps map[ssa.Op]sfRtCallDef
4062
4063 func softfloatInit() {
4064
4065 softFloatOps = map[ssa.Op]sfRtCallDef{
4066 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4067 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4068 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4069 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4070 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4071 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4072 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4073 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4074
4075 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4076 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4077 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4078 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4079 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4080 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4081 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4082 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4083
4084 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4085 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4086 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4087 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4088 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4089 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4090 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4091 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4092 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4093 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4094 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4095 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4096 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4097 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4098 }
4099 }
4100
4101
4102
4103 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4104 f2i := func(t *types.Type) *types.Type {
4105 switch t.Kind() {
4106 case types.TFLOAT32:
4107 return types.Types[types.TUINT32]
4108 case types.TFLOAT64:
4109 return types.Types[types.TUINT64]
4110 }
4111 return t
4112 }
4113
4114 if callDef, ok := softFloatOps[op]; ok {
4115 switch op {
4116 case ssa.OpLess32F,
4117 ssa.OpLess64F,
4118 ssa.OpLeq32F,
4119 ssa.OpLeq64F:
4120 args[0], args[1] = args[1], args[0]
4121 case ssa.OpSub32F,
4122 ssa.OpSub64F:
4123 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4124 }
4125
4126
4127
4128 for i, a := range args {
4129 if a.Type.IsFloat() {
4130 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4131 }
4132 }
4133
4134 rt := types.Types[callDef.rtype]
4135 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4136 if rt.IsFloat() {
4137 result = s.newValue1(ssa.OpCopy, rt, result)
4138 }
4139 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4140 result = s.newValue1(ssa.OpNot, result.Type, result)
4141 }
4142 return result, true
4143 }
4144 return nil, false
4145 }
4146
4147 var intrinsics map[intrinsicKey]intrinsicBuilder
4148
4149
4150
4151 type intrinsicBuilder func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value
4152
4153 type intrinsicKey struct {
4154 arch *sys.Arch
4155 pkg string
4156 fn string
4157 }
4158
4159 func InitTables() {
4160 intrinsics = map[intrinsicKey]intrinsicBuilder{}
4161
4162 var all []*sys.Arch
4163 var p4 []*sys.Arch
4164 var p8 []*sys.Arch
4165 var lwatomics []*sys.Arch
4166 for _, a := range &sys.Archs {
4167 all = append(all, a)
4168 if a.PtrSize == 4 {
4169 p4 = append(p4, a)
4170 } else {
4171 p8 = append(p8, a)
4172 }
4173 if a.Family != sys.PPC64 {
4174 lwatomics = append(lwatomics, a)
4175 }
4176 }
4177
4178
4179 add := func(pkg, fn string, b intrinsicBuilder, archs ...*sys.Arch) {
4180 for _, a := range archs {
4181 intrinsics[intrinsicKey{a, pkg, fn}] = b
4182 }
4183 }
4184
4185 addF := func(pkg, fn string, b intrinsicBuilder, archFamilies ...sys.ArchFamily) {
4186 m := 0
4187 for _, f := range archFamilies {
4188 if f >= 32 {
4189 panic("too many architecture families")
4190 }
4191 m |= 1 << uint(f)
4192 }
4193 for _, a := range all {
4194 if m>>uint(a.Family)&1 != 0 {
4195 intrinsics[intrinsicKey{a, pkg, fn}] = b
4196 }
4197 }
4198 }
4199
4200 alias := func(pkg, fn, pkg2, fn2 string, archs ...*sys.Arch) {
4201 aliased := false
4202 for _, a := range archs {
4203 if b, ok := intrinsics[intrinsicKey{a, pkg2, fn2}]; ok {
4204 intrinsics[intrinsicKey{a, pkg, fn}] = b
4205 aliased = true
4206 }
4207 }
4208 if !aliased {
4209 panic(fmt.Sprintf("attempted to alias undefined intrinsic: %s.%s", pkg, fn))
4210 }
4211 }
4212
4213
4214 if !base.Flag.Cfg.Instrumenting {
4215 add("runtime", "slicebytetostringtmp",
4216 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4217
4218
4219
4220 return s.newValue2(ssa.OpStringMake, n.Type(), args[0], args[1])
4221 },
4222 all...)
4223 }
4224 addF("runtime/internal/math", "MulUintptr",
4225 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4226 if s.config.PtrSize == 4 {
4227 return s.newValue2(ssa.OpMul32uover, types.NewTuple(types.Types[types.TUINT], types.Types[types.TUINT]), args[0], args[1])
4228 }
4229 return s.newValue2(ssa.OpMul64uover, types.NewTuple(types.Types[types.TUINT], types.Types[types.TUINT]), args[0], args[1])
4230 },
4231 sys.AMD64, sys.I386, sys.Loong64, sys.MIPS64, sys.RISCV64, sys.ARM64)
4232 add("runtime", "KeepAlive",
4233 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4234 data := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, args[0])
4235 s.vars[memVar] = s.newValue2(ssa.OpKeepAlive, types.TypeMem, data, s.mem())
4236 return nil
4237 },
4238 all...)
4239 add("runtime", "getclosureptr",
4240 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4241 return s.newValue0(ssa.OpGetClosurePtr, s.f.Config.Types.Uintptr)
4242 },
4243 all...)
4244
4245 add("runtime", "getcallerpc",
4246 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4247 return s.newValue0(ssa.OpGetCallerPC, s.f.Config.Types.Uintptr)
4248 },
4249 all...)
4250
4251 add("runtime", "getcallersp",
4252 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4253 return s.newValue1(ssa.OpGetCallerSP, s.f.Config.Types.Uintptr, s.mem())
4254 },
4255 all...)
4256
4257 addF("runtime", "publicationBarrier",
4258 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4259 s.vars[memVar] = s.newValue1(ssa.OpPubBarrier, types.TypeMem, s.mem())
4260 return nil
4261 },
4262 sys.ARM64, sys.PPC64, sys.RISCV64)
4263
4264 brev_arch := []sys.ArchFamily{sys.AMD64, sys.I386, sys.ARM64, sys.ARM, sys.S390X}
4265 if buildcfg.GOPPC64 >= 10 {
4266
4267
4268 brev_arch = append(brev_arch, sys.PPC64)
4269 }
4270
4271 addF("runtime/internal/sys", "Bswap32",
4272 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4273 return s.newValue1(ssa.OpBswap32, types.Types[types.TUINT32], args[0])
4274 },
4275 brev_arch...)
4276 addF("runtime/internal/sys", "Bswap64",
4277 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4278 return s.newValue1(ssa.OpBswap64, types.Types[types.TUINT64], args[0])
4279 },
4280 brev_arch...)
4281
4282
4283 makePrefetchFunc := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4284 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4285 s.vars[memVar] = s.newValue2(op, types.TypeMem, args[0], s.mem())
4286 return nil
4287 }
4288 }
4289
4290
4291
4292 addF("runtime/internal/sys", "Prefetch", makePrefetchFunc(ssa.OpPrefetchCache),
4293 sys.AMD64, sys.ARM64, sys.PPC64)
4294 addF("runtime/internal/sys", "PrefetchStreamed", makePrefetchFunc(ssa.OpPrefetchCacheStreamed),
4295 sys.AMD64, sys.ARM64, sys.PPC64)
4296
4297
4298 addF("internal/runtime/atomic", "Load",
4299 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4300 v := s.newValue2(ssa.OpAtomicLoad32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], s.mem())
4301 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4302 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4303 },
4304 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4305 addF("internal/runtime/atomic", "Load8",
4306 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4307 v := s.newValue2(ssa.OpAtomicLoad8, types.NewTuple(types.Types[types.TUINT8], types.TypeMem), args[0], s.mem())
4308 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4309 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT8], v)
4310 },
4311 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4312 addF("internal/runtime/atomic", "Load64",
4313 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4314 v := s.newValue2(ssa.OpAtomicLoad64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], s.mem())
4315 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4316 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4317 },
4318 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4319 addF("internal/runtime/atomic", "LoadAcq",
4320 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4321 v := s.newValue2(ssa.OpAtomicLoadAcq32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], s.mem())
4322 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4323 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4324 },
4325 sys.PPC64, sys.S390X)
4326 addF("internal/runtime/atomic", "LoadAcq64",
4327 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4328 v := s.newValue2(ssa.OpAtomicLoadAcq64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], s.mem())
4329 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4330 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4331 },
4332 sys.PPC64)
4333 addF("internal/runtime/atomic", "Loadp",
4334 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4335 v := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(s.f.Config.Types.BytePtr, types.TypeMem), args[0], s.mem())
4336 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4337 return s.newValue1(ssa.OpSelect0, s.f.Config.Types.BytePtr, v)
4338 },
4339 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4340
4341 addF("internal/runtime/atomic", "Store",
4342 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4343 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore32, types.TypeMem, args[0], args[1], s.mem())
4344 return nil
4345 },
4346 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4347 addF("internal/runtime/atomic", "Store8",
4348 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4349 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore8, types.TypeMem, args[0], args[1], s.mem())
4350 return nil
4351 },
4352 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4353 addF("internal/runtime/atomic", "Store64",
4354 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4355 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore64, types.TypeMem, args[0], args[1], s.mem())
4356 return nil
4357 },
4358 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4359 addF("internal/runtime/atomic", "StorepNoWB",
4360 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4361 s.vars[memVar] = s.newValue3(ssa.OpAtomicStorePtrNoWB, types.TypeMem, args[0], args[1], s.mem())
4362 return nil
4363 },
4364 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.RISCV64, sys.S390X)
4365 addF("internal/runtime/atomic", "StoreRel",
4366 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4367 s.vars[memVar] = s.newValue3(ssa.OpAtomicStoreRel32, types.TypeMem, args[0], args[1], s.mem())
4368 return nil
4369 },
4370 sys.PPC64, sys.S390X)
4371 addF("internal/runtime/atomic", "StoreRel64",
4372 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4373 s.vars[memVar] = s.newValue3(ssa.OpAtomicStoreRel64, types.TypeMem, args[0], args[1], s.mem())
4374 return nil
4375 },
4376 sys.PPC64)
4377
4378 addF("internal/runtime/atomic", "Xchg",
4379 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4380 v := s.newValue3(ssa.OpAtomicExchange32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], args[1], s.mem())
4381 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4382 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4383 },
4384 sys.AMD64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4385 addF("internal/runtime/atomic", "Xchg64",
4386 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4387 v := s.newValue3(ssa.OpAtomicExchange64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], args[1], s.mem())
4388 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4389 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4390 },
4391 sys.AMD64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4392
4393 type atomicOpEmitter func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind)
4394
4395 makeAtomicGuardedIntrinsicARM64 := func(op0, op1 ssa.Op, typ, rtyp types.Kind, emit atomicOpEmitter) intrinsicBuilder {
4396
4397 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4398 if buildcfg.GOARM64.LSE {
4399 emit(s, n, args, op1, typ)
4400 } else {
4401
4402 addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARM64HasATOMICS, s.sb)
4403 v := s.load(types.Types[types.TBOOL], addr)
4404 b := s.endBlock()
4405 b.Kind = ssa.BlockIf
4406 b.SetControl(v)
4407 bTrue := s.f.NewBlock(ssa.BlockPlain)
4408 bFalse := s.f.NewBlock(ssa.BlockPlain)
4409 bEnd := s.f.NewBlock(ssa.BlockPlain)
4410 b.AddEdgeTo(bTrue)
4411 b.AddEdgeTo(bFalse)
4412 b.Likely = ssa.BranchLikely
4413
4414
4415 s.startBlock(bTrue)
4416 emit(s, n, args, op1, typ)
4417 s.endBlock().AddEdgeTo(bEnd)
4418
4419
4420 s.startBlock(bFalse)
4421 emit(s, n, args, op0, typ)
4422 s.endBlock().AddEdgeTo(bEnd)
4423
4424
4425 s.startBlock(bEnd)
4426 }
4427 if rtyp == types.TNIL {
4428 return nil
4429 } else {
4430 return s.variable(n, types.Types[rtyp])
4431 }
4432 }
4433 }
4434
4435 atomicXchgXaddEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4436 v := s.newValue3(op, types.NewTuple(types.Types[typ], types.TypeMem), args[0], args[1], s.mem())
4437 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4438 s.vars[n] = s.newValue1(ssa.OpSelect0, types.Types[typ], v)
4439 }
4440 addF("internal/runtime/atomic", "Xchg",
4441 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicExchange32, ssa.OpAtomicExchange32Variant, types.TUINT32, types.TUINT32, atomicXchgXaddEmitterARM64),
4442 sys.ARM64)
4443 addF("internal/runtime/atomic", "Xchg64",
4444 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicExchange64, ssa.OpAtomicExchange64Variant, types.TUINT64, types.TUINT64, atomicXchgXaddEmitterARM64),
4445 sys.ARM64)
4446
4447 addF("internal/runtime/atomic", "Xadd",
4448 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4449 v := s.newValue3(ssa.OpAtomicAdd32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], args[1], s.mem())
4450 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4451 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4452 },
4453 sys.AMD64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4454 addF("internal/runtime/atomic", "Xadd64",
4455 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4456 v := s.newValue3(ssa.OpAtomicAdd64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], args[1], s.mem())
4457 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4458 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4459 },
4460 sys.AMD64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4461
4462 addF("internal/runtime/atomic", "Xadd",
4463 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAdd32, ssa.OpAtomicAdd32Variant, types.TUINT32, types.TUINT32, atomicXchgXaddEmitterARM64),
4464 sys.ARM64)
4465 addF("internal/runtime/atomic", "Xadd64",
4466 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAdd64, ssa.OpAtomicAdd64Variant, types.TUINT64, types.TUINT64, atomicXchgXaddEmitterARM64),
4467 sys.ARM64)
4468
4469 addF("internal/runtime/atomic", "Cas",
4470 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4471 v := s.newValue4(ssa.OpAtomicCompareAndSwap32, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4472 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4473 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4474 },
4475 sys.AMD64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4476 addF("internal/runtime/atomic", "Cas64",
4477 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4478 v := s.newValue4(ssa.OpAtomicCompareAndSwap64, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4479 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4480 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4481 },
4482 sys.AMD64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4483 addF("internal/runtime/atomic", "CasRel",
4484 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4485 v := s.newValue4(ssa.OpAtomicCompareAndSwap32, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4486 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4487 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4488 },
4489 sys.PPC64)
4490
4491 atomicCasEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4492 v := s.newValue4(op, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4493 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4494 s.vars[n] = s.newValue1(ssa.OpSelect0, types.Types[typ], v)
4495 }
4496
4497 addF("internal/runtime/atomic", "Cas",
4498 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicCompareAndSwap32, ssa.OpAtomicCompareAndSwap32Variant, types.TUINT32, types.TBOOL, atomicCasEmitterARM64),
4499 sys.ARM64)
4500 addF("internal/runtime/atomic", "Cas64",
4501 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicCompareAndSwap64, ssa.OpAtomicCompareAndSwap64Variant, types.TUINT64, types.TBOOL, atomicCasEmitterARM64),
4502 sys.ARM64)
4503
4504 addF("internal/runtime/atomic", "And8",
4505 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4506 s.vars[memVar] = s.newValue3(ssa.OpAtomicAnd8, types.TypeMem, args[0], args[1], s.mem())
4507 return nil
4508 },
4509 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4510 addF("internal/runtime/atomic", "And",
4511 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4512 s.vars[memVar] = s.newValue3(ssa.OpAtomicAnd32, types.TypeMem, args[0], args[1], s.mem())
4513 return nil
4514 },
4515 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4516 addF("internal/runtime/atomic", "Or8",
4517 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4518 s.vars[memVar] = s.newValue3(ssa.OpAtomicOr8, types.TypeMem, args[0], args[1], s.mem())
4519 return nil
4520 },
4521 sys.AMD64, sys.ARM64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4522 addF("internal/runtime/atomic", "Or",
4523 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4524 s.vars[memVar] = s.newValue3(ssa.OpAtomicOr32, types.TypeMem, args[0], args[1], s.mem())
4525 return nil
4526 },
4527 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4528
4529 atomicAndOrEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4530 s.vars[memVar] = s.newValue3(op, types.TypeMem, args[0], args[1], s.mem())
4531 }
4532
4533 addF("internal/runtime/atomic", "And8",
4534 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAnd8, ssa.OpAtomicAnd8Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4535 sys.ARM64)
4536 addF("internal/runtime/atomic", "And",
4537 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAnd32, ssa.OpAtomicAnd32Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4538 sys.ARM64)
4539 addF("internal/runtime/atomic", "Or8",
4540 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicOr8, ssa.OpAtomicOr8Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4541 sys.ARM64)
4542 addF("internal/runtime/atomic", "Or",
4543 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicOr32, ssa.OpAtomicOr32Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4544 sys.ARM64)
4545
4546
4547 alias("internal/runtime/atomic", "Loadint32", "internal/runtime/atomic", "Load", all...)
4548 alias("internal/runtime/atomic", "Loadint64", "internal/runtime/atomic", "Load64", all...)
4549 alias("internal/runtime/atomic", "Loaduintptr", "internal/runtime/atomic", "Load", p4...)
4550 alias("internal/runtime/atomic", "Loaduintptr", "internal/runtime/atomic", "Load64", p8...)
4551 alias("internal/runtime/atomic", "Loaduint", "internal/runtime/atomic", "Load", p4...)
4552 alias("internal/runtime/atomic", "Loaduint", "internal/runtime/atomic", "Load64", p8...)
4553 alias("internal/runtime/atomic", "LoadAcq", "internal/runtime/atomic", "Load", lwatomics...)
4554 alias("internal/runtime/atomic", "LoadAcq64", "internal/runtime/atomic", "Load64", lwatomics...)
4555 alias("internal/runtime/atomic", "LoadAcquintptr", "internal/runtime/atomic", "LoadAcq", p4...)
4556 alias("sync", "runtime_LoadAcquintptr", "internal/runtime/atomic", "LoadAcq", p4...)
4557 alias("internal/runtime/atomic", "LoadAcquintptr", "internal/runtime/atomic", "LoadAcq64", p8...)
4558 alias("sync", "runtime_LoadAcquintptr", "internal/runtime/atomic", "LoadAcq64", p8...)
4559
4560
4561 alias("internal/runtime/atomic", "Storeint32", "internal/runtime/atomic", "Store", all...)
4562 alias("internal/runtime/atomic", "Storeint64", "internal/runtime/atomic", "Store64", all...)
4563 alias("internal/runtime/atomic", "Storeuintptr", "internal/runtime/atomic", "Store", p4...)
4564 alias("internal/runtime/atomic", "Storeuintptr", "internal/runtime/atomic", "Store64", p8...)
4565 alias("internal/runtime/atomic", "StoreRel", "internal/runtime/atomic", "Store", lwatomics...)
4566 alias("internal/runtime/atomic", "StoreRel64", "internal/runtime/atomic", "Store64", lwatomics...)
4567 alias("internal/runtime/atomic", "StoreReluintptr", "internal/runtime/atomic", "StoreRel", p4...)
4568 alias("sync", "runtime_StoreReluintptr", "internal/runtime/atomic", "StoreRel", p4...)
4569 alias("internal/runtime/atomic", "StoreReluintptr", "internal/runtime/atomic", "StoreRel64", p8...)
4570 alias("sync", "runtime_StoreReluintptr", "internal/runtime/atomic", "StoreRel64", p8...)
4571
4572
4573 alias("internal/runtime/atomic", "Xchgint32", "internal/runtime/atomic", "Xchg", all...)
4574 alias("internal/runtime/atomic", "Xchgint64", "internal/runtime/atomic", "Xchg64", all...)
4575 alias("internal/runtime/atomic", "Xchguintptr", "internal/runtime/atomic", "Xchg", p4...)
4576 alias("internal/runtime/atomic", "Xchguintptr", "internal/runtime/atomic", "Xchg64", p8...)
4577
4578
4579 alias("internal/runtime/atomic", "Xaddint32", "internal/runtime/atomic", "Xadd", all...)
4580 alias("internal/runtime/atomic", "Xaddint64", "internal/runtime/atomic", "Xadd64", all...)
4581 alias("internal/runtime/atomic", "Xadduintptr", "internal/runtime/atomic", "Xadd", p4...)
4582 alias("internal/runtime/atomic", "Xadduintptr", "internal/runtime/atomic", "Xadd64", p8...)
4583
4584
4585 alias("internal/runtime/atomic", "Casint32", "internal/runtime/atomic", "Cas", all...)
4586 alias("internal/runtime/atomic", "Casint64", "internal/runtime/atomic", "Cas64", all...)
4587 alias("internal/runtime/atomic", "Casuintptr", "internal/runtime/atomic", "Cas", p4...)
4588 alias("internal/runtime/atomic", "Casuintptr", "internal/runtime/atomic", "Cas64", p8...)
4589 alias("internal/runtime/atomic", "Casp1", "internal/runtime/atomic", "Cas", p4...)
4590 alias("internal/runtime/atomic", "Casp1", "internal/runtime/atomic", "Cas64", p8...)
4591 alias("internal/runtime/atomic", "CasRel", "internal/runtime/atomic", "Cas", lwatomics...)
4592
4593
4594 addF("math", "sqrt",
4595 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4596 return s.newValue1(ssa.OpSqrt, types.Types[types.TFLOAT64], args[0])
4597 },
4598 sys.I386, sys.AMD64, sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm)
4599 addF("math", "Trunc",
4600 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4601 return s.newValue1(ssa.OpTrunc, types.Types[types.TFLOAT64], args[0])
4602 },
4603 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4604 addF("math", "Ceil",
4605 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4606 return s.newValue1(ssa.OpCeil, types.Types[types.TFLOAT64], args[0])
4607 },
4608 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4609 addF("math", "Floor",
4610 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4611 return s.newValue1(ssa.OpFloor, types.Types[types.TFLOAT64], args[0])
4612 },
4613 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4614 addF("math", "Round",
4615 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4616 return s.newValue1(ssa.OpRound, types.Types[types.TFLOAT64], args[0])
4617 },
4618 sys.ARM64, sys.PPC64, sys.S390X)
4619 addF("math", "RoundToEven",
4620 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4621 return s.newValue1(ssa.OpRoundToEven, types.Types[types.TFLOAT64], args[0])
4622 },
4623 sys.ARM64, sys.S390X, sys.Wasm)
4624 addF("math", "Abs",
4625 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4626 return s.newValue1(ssa.OpAbs, types.Types[types.TFLOAT64], args[0])
4627 },
4628 sys.ARM64, sys.ARM, sys.PPC64, sys.RISCV64, sys.Wasm, sys.MIPS, sys.MIPS64)
4629 addF("math", "Copysign",
4630 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4631 return s.newValue2(ssa.OpCopysign, types.Types[types.TFLOAT64], args[0], args[1])
4632 },
4633 sys.PPC64, sys.RISCV64, sys.Wasm)
4634 addF("math", "FMA",
4635 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4636 return s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4637 },
4638 sys.ARM64, sys.PPC64, sys.RISCV64, sys.S390X)
4639 addF("math", "FMA",
4640 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4641 if !s.config.UseFMA {
4642 s.vars[n] = s.callResult(n, callNormal)
4643 return s.variable(n, types.Types[types.TFLOAT64])
4644 }
4645
4646 if buildcfg.GOAMD64 >= 3 {
4647 return s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4648 }
4649
4650 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasFMA)
4651 b := s.endBlock()
4652 b.Kind = ssa.BlockIf
4653 b.SetControl(v)
4654 bTrue := s.f.NewBlock(ssa.BlockPlain)
4655 bFalse := s.f.NewBlock(ssa.BlockPlain)
4656 bEnd := s.f.NewBlock(ssa.BlockPlain)
4657 b.AddEdgeTo(bTrue)
4658 b.AddEdgeTo(bFalse)
4659 b.Likely = ssa.BranchLikely
4660
4661
4662 s.startBlock(bTrue)
4663 s.vars[n] = s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4664 s.endBlock().AddEdgeTo(bEnd)
4665
4666
4667 s.startBlock(bFalse)
4668 s.vars[n] = s.callResult(n, callNormal)
4669 s.endBlock().AddEdgeTo(bEnd)
4670
4671
4672 s.startBlock(bEnd)
4673 return s.variable(n, types.Types[types.TFLOAT64])
4674 },
4675 sys.AMD64)
4676 addF("math", "FMA",
4677 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4678 if !s.config.UseFMA {
4679 s.vars[n] = s.callResult(n, callNormal)
4680 return s.variable(n, types.Types[types.TFLOAT64])
4681 }
4682 addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARMHasVFPv4, s.sb)
4683 v := s.load(types.Types[types.TBOOL], addr)
4684 b := s.endBlock()
4685 b.Kind = ssa.BlockIf
4686 b.SetControl(v)
4687 bTrue := s.f.NewBlock(ssa.BlockPlain)
4688 bFalse := s.f.NewBlock(ssa.BlockPlain)
4689 bEnd := s.f.NewBlock(ssa.BlockPlain)
4690 b.AddEdgeTo(bTrue)
4691 b.AddEdgeTo(bFalse)
4692 b.Likely = ssa.BranchLikely
4693
4694
4695 s.startBlock(bTrue)
4696 s.vars[n] = s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4697 s.endBlock().AddEdgeTo(bEnd)
4698
4699
4700 s.startBlock(bFalse)
4701 s.vars[n] = s.callResult(n, callNormal)
4702 s.endBlock().AddEdgeTo(bEnd)
4703
4704
4705 s.startBlock(bEnd)
4706 return s.variable(n, types.Types[types.TFLOAT64])
4707 },
4708 sys.ARM)
4709
4710 makeRoundAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4711 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4712 if buildcfg.GOAMD64 >= 2 {
4713 return s.newValue1(op, types.Types[types.TFLOAT64], args[0])
4714 }
4715
4716 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasSSE41)
4717 b := s.endBlock()
4718 b.Kind = ssa.BlockIf
4719 b.SetControl(v)
4720 bTrue := s.f.NewBlock(ssa.BlockPlain)
4721 bFalse := s.f.NewBlock(ssa.BlockPlain)
4722 bEnd := s.f.NewBlock(ssa.BlockPlain)
4723 b.AddEdgeTo(bTrue)
4724 b.AddEdgeTo(bFalse)
4725 b.Likely = ssa.BranchLikely
4726
4727
4728 s.startBlock(bTrue)
4729 s.vars[n] = s.newValue1(op, types.Types[types.TFLOAT64], args[0])
4730 s.endBlock().AddEdgeTo(bEnd)
4731
4732
4733 s.startBlock(bFalse)
4734 s.vars[n] = s.callResult(n, callNormal)
4735 s.endBlock().AddEdgeTo(bEnd)
4736
4737
4738 s.startBlock(bEnd)
4739 return s.variable(n, types.Types[types.TFLOAT64])
4740 }
4741 }
4742 addF("math", "RoundToEven",
4743 makeRoundAMD64(ssa.OpRoundToEven),
4744 sys.AMD64)
4745 addF("math", "Floor",
4746 makeRoundAMD64(ssa.OpFloor),
4747 sys.AMD64)
4748 addF("math", "Ceil",
4749 makeRoundAMD64(ssa.OpCeil),
4750 sys.AMD64)
4751 addF("math", "Trunc",
4752 makeRoundAMD64(ssa.OpTrunc),
4753 sys.AMD64)
4754
4755
4756 addF("math/bits", "TrailingZeros64",
4757 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4758 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], args[0])
4759 },
4760 sys.AMD64, sys.I386, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4761 addF("math/bits", "TrailingZeros32",
4762 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4763 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], args[0])
4764 },
4765 sys.AMD64, sys.I386, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4766 addF("math/bits", "TrailingZeros16",
4767 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4768 x := s.newValue1(ssa.OpZeroExt16to32, types.Types[types.TUINT32], args[0])
4769 c := s.constInt32(types.Types[types.TUINT32], 1<<16)
4770 y := s.newValue2(ssa.OpOr32, types.Types[types.TUINT32], x, c)
4771 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], y)
4772 },
4773 sys.MIPS)
4774 addF("math/bits", "TrailingZeros16",
4775 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4776 return s.newValue1(ssa.OpCtz16, types.Types[types.TINT], args[0])
4777 },
4778 sys.AMD64, sys.I386, sys.ARM, sys.ARM64, sys.Wasm)
4779 addF("math/bits", "TrailingZeros16",
4780 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4781 x := s.newValue1(ssa.OpZeroExt16to64, types.Types[types.TUINT64], args[0])
4782 c := s.constInt64(types.Types[types.TUINT64], 1<<16)
4783 y := s.newValue2(ssa.OpOr64, types.Types[types.TUINT64], x, c)
4784 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], y)
4785 },
4786 sys.S390X, sys.PPC64)
4787 addF("math/bits", "TrailingZeros8",
4788 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4789 x := s.newValue1(ssa.OpZeroExt8to32, types.Types[types.TUINT32], args[0])
4790 c := s.constInt32(types.Types[types.TUINT32], 1<<8)
4791 y := s.newValue2(ssa.OpOr32, types.Types[types.TUINT32], x, c)
4792 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], y)
4793 },
4794 sys.MIPS)
4795 addF("math/bits", "TrailingZeros8",
4796 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4797 return s.newValue1(ssa.OpCtz8, types.Types[types.TINT], args[0])
4798 },
4799 sys.AMD64, sys.I386, sys.ARM, sys.ARM64, sys.Wasm)
4800 addF("math/bits", "TrailingZeros8",
4801 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4802 x := s.newValue1(ssa.OpZeroExt8to64, types.Types[types.TUINT64], args[0])
4803 c := s.constInt64(types.Types[types.TUINT64], 1<<8)
4804 y := s.newValue2(ssa.OpOr64, types.Types[types.TUINT64], x, c)
4805 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], y)
4806 },
4807 sys.S390X)
4808 alias("math/bits", "ReverseBytes64", "runtime/internal/sys", "Bswap64", all...)
4809 alias("math/bits", "ReverseBytes32", "runtime/internal/sys", "Bswap32", all...)
4810
4811
4812
4813 if buildcfg.GOPPC64 >= 10 {
4814 addF("math/bits", "ReverseBytes16",
4815 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4816 return s.newValue1(ssa.OpBswap16, types.Types[types.TUINT], args[0])
4817 },
4818 sys.PPC64)
4819 }
4820
4821 addF("math/bits", "Len64",
4822 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4823 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], args[0])
4824 },
4825 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4826 addF("math/bits", "Len32",
4827 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4828 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4829 },
4830 sys.AMD64, sys.ARM64, sys.PPC64)
4831 addF("math/bits", "Len32",
4832 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4833 if s.config.PtrSize == 4 {
4834 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4835 }
4836 x := s.newValue1(ssa.OpZeroExt32to64, types.Types[types.TUINT64], args[0])
4837 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4838 },
4839 sys.ARM, sys.S390X, sys.MIPS, sys.Wasm)
4840 addF("math/bits", "Len16",
4841 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4842 if s.config.PtrSize == 4 {
4843 x := s.newValue1(ssa.OpZeroExt16to32, types.Types[types.TUINT32], args[0])
4844 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], x)
4845 }
4846 x := s.newValue1(ssa.OpZeroExt16to64, types.Types[types.TUINT64], args[0])
4847 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4848 },
4849 sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4850 addF("math/bits", "Len16",
4851 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4852 return s.newValue1(ssa.OpBitLen16, types.Types[types.TINT], args[0])
4853 },
4854 sys.AMD64)
4855 addF("math/bits", "Len8",
4856 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4857 if s.config.PtrSize == 4 {
4858 x := s.newValue1(ssa.OpZeroExt8to32, types.Types[types.TUINT32], args[0])
4859 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], x)
4860 }
4861 x := s.newValue1(ssa.OpZeroExt8to64, types.Types[types.TUINT64], args[0])
4862 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4863 },
4864 sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4865 addF("math/bits", "Len8",
4866 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4867 return s.newValue1(ssa.OpBitLen8, types.Types[types.TINT], args[0])
4868 },
4869 sys.AMD64)
4870 addF("math/bits", "Len",
4871 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4872 if s.config.PtrSize == 4 {
4873 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4874 }
4875 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], args[0])
4876 },
4877 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4878
4879 addF("math/bits", "Reverse64",
4880 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4881 return s.newValue1(ssa.OpBitRev64, types.Types[types.TINT], args[0])
4882 },
4883 sys.ARM64)
4884 addF("math/bits", "Reverse32",
4885 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4886 return s.newValue1(ssa.OpBitRev32, types.Types[types.TINT], args[0])
4887 },
4888 sys.ARM64)
4889 addF("math/bits", "Reverse16",
4890 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4891 return s.newValue1(ssa.OpBitRev16, types.Types[types.TINT], args[0])
4892 },
4893 sys.ARM64)
4894 addF("math/bits", "Reverse8",
4895 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4896 return s.newValue1(ssa.OpBitRev8, types.Types[types.TINT], args[0])
4897 },
4898 sys.ARM64)
4899 addF("math/bits", "Reverse",
4900 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4901 return s.newValue1(ssa.OpBitRev64, types.Types[types.TINT], args[0])
4902 },
4903 sys.ARM64)
4904 addF("math/bits", "RotateLeft8",
4905 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4906 return s.newValue2(ssa.OpRotateLeft8, types.Types[types.TUINT8], args[0], args[1])
4907 },
4908 sys.AMD64, sys.RISCV64)
4909 addF("math/bits", "RotateLeft16",
4910 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4911 return s.newValue2(ssa.OpRotateLeft16, types.Types[types.TUINT16], args[0], args[1])
4912 },
4913 sys.AMD64, sys.RISCV64)
4914 addF("math/bits", "RotateLeft32",
4915 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4916 return s.newValue2(ssa.OpRotateLeft32, types.Types[types.TUINT32], args[0], args[1])
4917 },
4918 sys.AMD64, sys.ARM, sys.ARM64, sys.Loong64, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm)
4919 addF("math/bits", "RotateLeft64",
4920 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4921 return s.newValue2(ssa.OpRotateLeft64, types.Types[types.TUINT64], args[0], args[1])
4922 },
4923 sys.AMD64, sys.ARM64, sys.Loong64, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm)
4924 alias("math/bits", "RotateLeft", "math/bits", "RotateLeft64", p8...)
4925
4926 makeOnesCountAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4927 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4928 if buildcfg.GOAMD64 >= 2 {
4929 return s.newValue1(op, types.Types[types.TINT], args[0])
4930 }
4931
4932 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasPOPCNT)
4933 b := s.endBlock()
4934 b.Kind = ssa.BlockIf
4935 b.SetControl(v)
4936 bTrue := s.f.NewBlock(ssa.BlockPlain)
4937 bFalse := s.f.NewBlock(ssa.BlockPlain)
4938 bEnd := s.f.NewBlock(ssa.BlockPlain)
4939 b.AddEdgeTo(bTrue)
4940 b.AddEdgeTo(bFalse)
4941 b.Likely = ssa.BranchLikely
4942
4943
4944 s.startBlock(bTrue)
4945 s.vars[n] = s.newValue1(op, types.Types[types.TINT], args[0])
4946 s.endBlock().AddEdgeTo(bEnd)
4947
4948
4949 s.startBlock(bFalse)
4950 s.vars[n] = s.callResult(n, callNormal)
4951 s.endBlock().AddEdgeTo(bEnd)
4952
4953
4954 s.startBlock(bEnd)
4955 return s.variable(n, types.Types[types.TINT])
4956 }
4957 }
4958 addF("math/bits", "OnesCount64",
4959 makeOnesCountAMD64(ssa.OpPopCount64),
4960 sys.AMD64)
4961 addF("math/bits", "OnesCount64",
4962 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4963 return s.newValue1(ssa.OpPopCount64, types.Types[types.TINT], args[0])
4964 },
4965 sys.PPC64, sys.ARM64, sys.S390X, sys.Wasm)
4966 addF("math/bits", "OnesCount32",
4967 makeOnesCountAMD64(ssa.OpPopCount32),
4968 sys.AMD64)
4969 addF("math/bits", "OnesCount32",
4970 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4971 return s.newValue1(ssa.OpPopCount32, types.Types[types.TINT], args[0])
4972 },
4973 sys.PPC64, sys.ARM64, sys.S390X, sys.Wasm)
4974 addF("math/bits", "OnesCount16",
4975 makeOnesCountAMD64(ssa.OpPopCount16),
4976 sys.AMD64)
4977 addF("math/bits", "OnesCount16",
4978 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4979 return s.newValue1(ssa.OpPopCount16, types.Types[types.TINT], args[0])
4980 },
4981 sys.ARM64, sys.S390X, sys.PPC64, sys.Wasm)
4982 addF("math/bits", "OnesCount8",
4983 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4984 return s.newValue1(ssa.OpPopCount8, types.Types[types.TINT], args[0])
4985 },
4986 sys.S390X, sys.PPC64, sys.Wasm)
4987 addF("math/bits", "OnesCount",
4988 makeOnesCountAMD64(ssa.OpPopCount64),
4989 sys.AMD64)
4990 addF("math/bits", "Mul64",
4991 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4992 return s.newValue2(ssa.OpMul64uhilo, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1])
4993 },
4994 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.MIPS64, sys.RISCV64, sys.Loong64)
4995 alias("math/bits", "Mul", "math/bits", "Mul64", p8...)
4996 alias("runtime/internal/math", "Mul64", "math/bits", "Mul64", p8...)
4997 addF("math/bits", "Add64",
4998 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4999 return s.newValue3(ssa.OpAdd64carry, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
5000 },
5001 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.RISCV64, sys.Loong64, sys.MIPS64)
5002 alias("math/bits", "Add", "math/bits", "Add64", p8...)
5003 alias("runtime/internal/math", "Add64", "math/bits", "Add64", all...)
5004 addF("math/bits", "Sub64",
5005 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
5006 return s.newValue3(ssa.OpSub64borrow, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
5007 },
5008 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.RISCV64, sys.Loong64, sys.MIPS64)
5009 alias("math/bits", "Sub", "math/bits", "Sub64", p8...)
5010 addF("math/bits", "Div64",
5011 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
5012
5013 cmpZero := s.newValue2(s.ssaOp(ir.ONE, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[2], s.zeroVal(types.Types[types.TUINT64]))
5014 s.check(cmpZero, ir.Syms.Panicdivide)
5015 cmpOverflow := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[0], args[2])
5016 s.check(cmpOverflow, ir.Syms.Panicoverflow)
5017 return s.newValue3(ssa.OpDiv128u, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
5018 },
5019 sys.AMD64)
5020 alias("math/bits", "Div", "math/bits", "Div64", sys.ArchAMD64)
5021
5022 alias("runtime/internal/sys", "TrailingZeros8", "math/bits", "TrailingZeros8", all...)
5023 alias("runtime/internal/sys", "TrailingZeros32", "math/bits", "TrailingZeros32", all...)
5024 alias("runtime/internal/sys", "TrailingZeros64", "math/bits", "TrailingZeros64", all...)
5025 alias("runtime/internal/sys", "Len8", "math/bits", "Len8", all...)
5026 alias("runtime/internal/sys", "Len64", "math/bits", "Len64", all...)
5027 alias("runtime/internal/sys", "OnesCount64", "math/bits", "OnesCount64", all...)
5028
5029
5030
5031
5032 alias("sync/atomic", "LoadInt32", "internal/runtime/atomic", "Load", all...)
5033 alias("sync/atomic", "LoadInt64", "internal/runtime/atomic", "Load64", all...)
5034 alias("sync/atomic", "LoadPointer", "internal/runtime/atomic", "Loadp", all...)
5035 alias("sync/atomic", "LoadUint32", "internal/runtime/atomic", "Load", all...)
5036 alias("sync/atomic", "LoadUint64", "internal/runtime/atomic", "Load64", all...)
5037 alias("sync/atomic", "LoadUintptr", "internal/runtime/atomic", "Load", p4...)
5038 alias("sync/atomic", "LoadUintptr", "internal/runtime/atomic", "Load64", p8...)
5039
5040 alias("sync/atomic", "StoreInt32", "internal/runtime/atomic", "Store", all...)
5041 alias("sync/atomic", "StoreInt64", "internal/runtime/atomic", "Store64", all...)
5042
5043 alias("sync/atomic", "StoreUint32", "internal/runtime/atomic", "Store", all...)
5044 alias("sync/atomic", "StoreUint64", "internal/runtime/atomic", "Store64", all...)
5045 alias("sync/atomic", "StoreUintptr", "internal/runtime/atomic", "Store", p4...)
5046 alias("sync/atomic", "StoreUintptr", "internal/runtime/atomic", "Store64", p8...)
5047
5048 alias("sync/atomic", "SwapInt32", "internal/runtime/atomic", "Xchg", all...)
5049 alias("sync/atomic", "SwapInt64", "internal/runtime/atomic", "Xchg64", all...)
5050 alias("sync/atomic", "SwapUint32", "internal/runtime/atomic", "Xchg", all...)
5051 alias("sync/atomic", "SwapUint64", "internal/runtime/atomic", "Xchg64", all...)
5052 alias("sync/atomic", "SwapUintptr", "internal/runtime/atomic", "Xchg", p4...)
5053 alias("sync/atomic", "SwapUintptr", "internal/runtime/atomic", "Xchg64", p8...)
5054
5055 alias("sync/atomic", "CompareAndSwapInt32", "internal/runtime/atomic", "Cas", all...)
5056 alias("sync/atomic", "CompareAndSwapInt64", "internal/runtime/atomic", "Cas64", all...)
5057 alias("sync/atomic", "CompareAndSwapUint32", "internal/runtime/atomic", "Cas", all...)
5058 alias("sync/atomic", "CompareAndSwapUint64", "internal/runtime/atomic", "Cas64", all...)
5059 alias("sync/atomic", "CompareAndSwapUintptr", "internal/runtime/atomic", "Cas", p4...)
5060 alias("sync/atomic", "CompareAndSwapUintptr", "internal/runtime/atomic", "Cas64", p8...)
5061
5062 alias("sync/atomic", "AddInt32", "internal/runtime/atomic", "Xadd", all...)
5063 alias("sync/atomic", "AddInt64", "internal/runtime/atomic", "Xadd64", all...)
5064 alias("sync/atomic", "AddUint32", "internal/runtime/atomic", "Xadd", all...)
5065 alias("sync/atomic", "AddUint64", "internal/runtime/atomic", "Xadd64", all...)
5066 alias("sync/atomic", "AddUintptr", "internal/runtime/atomic", "Xadd", p4...)
5067 alias("sync/atomic", "AddUintptr", "internal/runtime/atomic", "Xadd64", p8...)
5068
5069
5070 alias("math/big", "mulWW", "math/bits", "Mul64", p8...)
5071 }
5072
5073
5074
5075 func findIntrinsic(sym *types.Sym) intrinsicBuilder {
5076 if sym == nil || sym.Pkg == nil {
5077 return nil
5078 }
5079 pkg := sym.Pkg.Path
5080 if sym.Pkg == ir.Pkgs.Runtime {
5081 pkg = "runtime"
5082 }
5083 if base.Flag.Race && pkg == "sync/atomic" {
5084
5085
5086 return nil
5087 }
5088
5089
5090 if Arch.SoftFloat && pkg == "math" {
5091 return nil
5092 }
5093
5094 fn := sym.Name
5095 if ssa.IntrinsicsDisable {
5096 if pkg == "runtime" && (fn == "getcallerpc" || fn == "getcallersp" || fn == "getclosureptr") {
5097
5098 } else {
5099 return nil
5100 }
5101 }
5102 return intrinsics[intrinsicKey{Arch.LinkArch.Arch, pkg, fn}]
5103 }
5104
5105 func IsIntrinsicCall(n *ir.CallExpr) bool {
5106 if n == nil {
5107 return false
5108 }
5109 name, ok := n.Fun.(*ir.Name)
5110 if !ok {
5111 return false
5112 }
5113 return findIntrinsic(name.Sym()) != nil
5114 }
5115
5116
5117 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
5118 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
5119 if ssa.IntrinsicsDebug > 0 {
5120 x := v
5121 if x == nil {
5122 x = s.mem()
5123 }
5124 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
5125 x = x.Args[0]
5126 }
5127 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
5128 }
5129 return v
5130 }
5131
5132
5133 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
5134 args := make([]*ssa.Value, len(n.Args))
5135 for i, n := range n.Args {
5136 args[i] = s.expr(n)
5137 }
5138 return args
5139 }
5140
5141
5142
5143
5144
5145
5146
5147 func (s *state) openDeferRecord(n *ir.CallExpr) {
5148 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
5149 s.Fatalf("defer call with arguments or results: %v", n)
5150 }
5151
5152 opendefer := &openDeferInfo{
5153 n: n,
5154 }
5155 fn := n.Fun
5156
5157
5158
5159 closureVal := s.expr(fn)
5160 closure := s.openDeferSave(fn.Type(), closureVal)
5161 opendefer.closureNode = closure.Aux.(*ir.Name)
5162 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
5163 opendefer.closure = closure
5164 }
5165 index := len(s.openDefers)
5166 s.openDefers = append(s.openDefers, opendefer)
5167
5168
5169
5170 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
5171 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
5172 s.vars[deferBitsVar] = newDeferBits
5173 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
5174 }
5175
5176
5177
5178
5179
5180
5181 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
5182 if !ssa.CanSSA(t) {
5183 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
5184 }
5185 if !t.HasPointers() {
5186 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
5187 }
5188 pos := val.Pos
5189 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
5190 temp.SetOpenDeferSlot(true)
5191 temp.SetFrameOffset(int64(len(s.openDefers)))
5192 var addrTemp *ssa.Value
5193
5194
5195 if s.curBlock.ID != s.f.Entry.ID {
5196
5197
5198
5199 if t.HasPointers() {
5200 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
5201 }
5202 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
5203 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
5204 } else {
5205
5206
5207
5208 if t.HasPointers() {
5209 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
5210 }
5211 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
5212 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
5213 }
5214
5215
5216
5217
5218
5219 temp.SetNeedzero(true)
5220
5221
5222 s.store(t, addrTemp, val)
5223 return addrTemp
5224 }
5225
5226
5227
5228
5229
5230 func (s *state) openDeferExit() {
5231 deferExit := s.f.NewBlock(ssa.BlockPlain)
5232 s.endBlock().AddEdgeTo(deferExit)
5233 s.startBlock(deferExit)
5234 s.lastDeferExit = deferExit
5235 s.lastDeferCount = len(s.openDefers)
5236 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
5237
5238 for i := len(s.openDefers) - 1; i >= 0; i-- {
5239 r := s.openDefers[i]
5240 bCond := s.f.NewBlock(ssa.BlockPlain)
5241 bEnd := s.f.NewBlock(ssa.BlockPlain)
5242
5243 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
5244
5245
5246 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
5247 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
5248 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
5249 b := s.endBlock()
5250 b.Kind = ssa.BlockIf
5251 b.SetControl(eqVal)
5252 b.AddEdgeTo(bEnd)
5253 b.AddEdgeTo(bCond)
5254 bCond.AddEdgeTo(bEnd)
5255 s.startBlock(bCond)
5256
5257
5258
5259 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
5260 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
5261 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
5262
5263
5264 s.vars[deferBitsVar] = maskedval
5265
5266
5267
5268
5269 fn := r.n.Fun
5270 stksize := fn.Type().ArgWidth()
5271 var callArgs []*ssa.Value
5272 var call *ssa.Value
5273 if r.closure != nil {
5274 v := s.load(r.closure.Type.Elem(), r.closure)
5275 s.maybeNilCheckClosure(v, callDefer)
5276 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
5277 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
5278 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
5279 } else {
5280 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
5281 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5282 }
5283 callArgs = append(callArgs, s.mem())
5284 call.AddArgs(callArgs...)
5285 call.AuxInt = stksize
5286 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
5287
5288
5289
5290
5291 if r.closureNode != nil {
5292 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
5293 }
5294
5295 s.endBlock()
5296 s.startBlock(bEnd)
5297 }
5298 }
5299
5300 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
5301 return s.call(n, k, false, nil)
5302 }
5303
5304 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
5305 return s.call(n, k, true, nil)
5306 }
5307
5308
5309
5310 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
5311 s.prevCall = nil
5312 var calleeLSym *obj.LSym
5313 var closure *ssa.Value
5314 var codeptr *ssa.Value
5315 var dextra *ssa.Value
5316 var rcvr *ssa.Value
5317 fn := n.Fun
5318 var ACArgs []*types.Type
5319 var ACResults []*types.Type
5320 var callArgs []*ssa.Value
5321
5322 callABI := s.f.ABIDefault
5323
5324 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
5325 s.Fatalf("go/defer call with arguments: %v", n)
5326 }
5327
5328 switch n.Op() {
5329 case ir.OCALLFUNC:
5330 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
5331 fn := fn.(*ir.Name)
5332 calleeLSym = callTargetLSym(fn)
5333 if buildcfg.Experiment.RegabiArgs {
5334
5335
5336
5337
5338
5339 if fn.Func != nil {
5340 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
5341 }
5342 } else {
5343
5344 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
5345 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
5346 if inRegistersImported || inRegistersSamePackage {
5347 callABI = s.f.ABI1
5348 }
5349 }
5350 break
5351 }
5352 closure = s.expr(fn)
5353 if k != callDefer && k != callDeferStack {
5354
5355
5356 s.maybeNilCheckClosure(closure, k)
5357 }
5358 case ir.OCALLINTER:
5359 if fn.Op() != ir.ODOTINTER {
5360 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
5361 }
5362 fn := fn.(*ir.SelectorExpr)
5363 var iclosure *ssa.Value
5364 iclosure, rcvr = s.getClosureAndRcvr(fn)
5365 if k == callNormal {
5366 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
5367 } else {
5368 closure = iclosure
5369 }
5370 }
5371 if deferExtra != nil {
5372 dextra = s.expr(deferExtra)
5373 }
5374
5375 params := callABI.ABIAnalyze(n.Fun.Type(), false )
5376 types.CalcSize(fn.Type())
5377 stksize := params.ArgWidth()
5378
5379 res := n.Fun.Type().Results()
5380 if k == callNormal || k == callTail {
5381 for _, p := range params.OutParams() {
5382 ACResults = append(ACResults, p.Type)
5383 }
5384 }
5385
5386 var call *ssa.Value
5387 if k == callDeferStack {
5388 if stksize != 0 {
5389 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
5390 }
5391
5392 t := deferstruct()
5393 n, addr := s.temp(n.Pos(), t)
5394 n.SetNonMergeable(true)
5395 s.store(closure.Type,
5396 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
5397 closure)
5398
5399
5400 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5401 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5402 callArgs = append(callArgs, addr, s.mem())
5403 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5404 call.AddArgs(callArgs...)
5405 call.AuxInt = int64(types.PtrSize)
5406 } else {
5407
5408
5409 argStart := base.Ctxt.Arch.FixedFrameSize
5410
5411 if k != callNormal && k != callTail {
5412
5413 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5414 callArgs = append(callArgs, closure)
5415 stksize += int64(types.PtrSize)
5416 argStart += int64(types.PtrSize)
5417 if dextra != nil {
5418
5419 ACArgs = append(ACArgs, types.Types[types.TINTER])
5420 callArgs = append(callArgs, dextra)
5421 stksize += 2 * int64(types.PtrSize)
5422 argStart += 2 * int64(types.PtrSize)
5423 }
5424 }
5425
5426
5427 if rcvr != nil {
5428 callArgs = append(callArgs, rcvr)
5429 }
5430
5431
5432 t := n.Fun.Type()
5433 args := n.Args
5434
5435 for _, p := range params.InParams() {
5436 ACArgs = append(ACArgs, p.Type)
5437 }
5438
5439
5440
5441
5442 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
5443 b := s.endBlock()
5444 b.Kind = ssa.BlockPlain
5445 curb := s.f.NewBlock(ssa.BlockPlain)
5446 b.AddEdgeTo(curb)
5447 s.startBlock(curb)
5448 }
5449
5450 for i, n := range args {
5451 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
5452 }
5453
5454 callArgs = append(callArgs, s.mem())
5455
5456
5457 switch {
5458 case k == callDefer:
5459 sym := ir.Syms.Deferproc
5460 if dextra != nil {
5461 sym = ir.Syms.Deferprocat
5462 }
5463 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5464 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5465 case k == callGo:
5466 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5467 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5468 case closure != nil:
5469
5470
5471
5472
5473
5474 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
5475 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
5476 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
5477 case codeptr != nil:
5478
5479 aux := ssa.InterfaceAuxCall(params)
5480 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
5481 case calleeLSym != nil:
5482 aux := ssa.StaticAuxCall(calleeLSym, params)
5483 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5484 if k == callTail {
5485 call.Op = ssa.OpTailLECall
5486 stksize = 0
5487 }
5488 default:
5489 s.Fatalf("bad call type %v %v", n.Op(), n)
5490 }
5491 call.AddArgs(callArgs...)
5492 call.AuxInt = stksize
5493 }
5494 s.prevCall = call
5495 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
5496
5497 for _, v := range n.KeepAlive {
5498 if !v.Addrtaken() {
5499 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
5500 }
5501 switch v.Class {
5502 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
5503 default:
5504 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
5505 }
5506 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
5507 }
5508
5509
5510 if k == callDefer || k == callDeferStack {
5511 b := s.endBlock()
5512 b.Kind = ssa.BlockDefer
5513 b.SetControl(call)
5514 bNext := s.f.NewBlock(ssa.BlockPlain)
5515 b.AddEdgeTo(bNext)
5516
5517 r := s.f.NewBlock(ssa.BlockPlain)
5518 s.startBlock(r)
5519 s.exit()
5520 b.AddEdgeTo(r)
5521 b.Likely = ssa.BranchLikely
5522 s.startBlock(bNext)
5523 }
5524
5525 if len(res) == 0 || k != callNormal {
5526
5527 return nil
5528 }
5529 fp := res[0]
5530 if returnResultAddr {
5531 return s.resultAddrOfCall(call, 0, fp.Type)
5532 }
5533 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
5534 }
5535
5536
5537
5538 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
5539 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
5540
5541
5542 s.nilCheck(closure)
5543 }
5544 }
5545
5546
5547
5548 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
5549 i := s.expr(fn.X)
5550 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
5551 s.nilCheck(itab)
5552 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
5553 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
5554 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
5555 return closure, rcvr
5556 }
5557
5558
5559
5560 func etypesign(e types.Kind) int8 {
5561 switch e {
5562 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
5563 return -1
5564 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
5565 return +1
5566 }
5567 return 0
5568 }
5569
5570
5571
5572 func (s *state) addr(n ir.Node) *ssa.Value {
5573 if n.Op() != ir.ONAME {
5574 s.pushLine(n.Pos())
5575 defer s.popLine()
5576 }
5577
5578 if s.canSSA(n) {
5579 s.Fatalf("addr of canSSA expression: %+v", n)
5580 }
5581
5582 t := types.NewPtr(n.Type())
5583 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
5584 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
5585
5586 if offset != 0 {
5587 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
5588 }
5589 return v
5590 }
5591 switch n.Op() {
5592 case ir.OLINKSYMOFFSET:
5593 no := n.(*ir.LinksymOffsetExpr)
5594 return linksymOffset(no.Linksym, no.Offset_)
5595 case ir.ONAME:
5596 n := n.(*ir.Name)
5597 if n.Heapaddr != nil {
5598 return s.expr(n.Heapaddr)
5599 }
5600 switch n.Class {
5601 case ir.PEXTERN:
5602
5603 return linksymOffset(n.Linksym(), 0)
5604 case ir.PPARAM:
5605
5606 v := s.decladdrs[n]
5607 if v != nil {
5608 return v
5609 }
5610 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
5611 return nil
5612 case ir.PAUTO:
5613 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
5614
5615 case ir.PPARAMOUT:
5616
5617
5618 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
5619 default:
5620 s.Fatalf("variable address class %v not implemented", n.Class)
5621 return nil
5622 }
5623 case ir.ORESULT:
5624
5625 n := n.(*ir.ResultExpr)
5626 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
5627 case ir.OINDEX:
5628 n := n.(*ir.IndexExpr)
5629 if n.X.Type().IsSlice() {
5630 a := s.expr(n.X)
5631 i := s.expr(n.Index)
5632 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
5633 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5634 p := s.newValue1(ssa.OpSlicePtr, t, a)
5635 return s.newValue2(ssa.OpPtrIndex, t, p, i)
5636 } else {
5637 a := s.addr(n.X)
5638 i := s.expr(n.Index)
5639 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
5640 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5641 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5642 }
5643 case ir.ODEREF:
5644 n := n.(*ir.StarExpr)
5645 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5646 case ir.ODOT:
5647 n := n.(*ir.SelectorExpr)
5648 p := s.addr(n.X)
5649 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5650 case ir.ODOTPTR:
5651 n := n.(*ir.SelectorExpr)
5652 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5653 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5654 case ir.OCONVNOP:
5655 n := n.(*ir.ConvExpr)
5656 if n.Type() == n.X.Type() {
5657 return s.addr(n.X)
5658 }
5659 addr := s.addr(n.X)
5660 return s.newValue1(ssa.OpCopy, t, addr)
5661 case ir.OCALLFUNC, ir.OCALLINTER:
5662 n := n.(*ir.CallExpr)
5663 return s.callAddr(n, callNormal)
5664 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5665 var v *ssa.Value
5666 if n.Op() == ir.ODOTTYPE {
5667 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5668 } else {
5669 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5670 }
5671 if v.Op != ssa.OpLoad {
5672 s.Fatalf("dottype of non-load")
5673 }
5674 if v.Args[1] != s.mem() {
5675 s.Fatalf("memory no longer live from dottype load")
5676 }
5677 return v.Args[0]
5678 default:
5679 s.Fatalf("unhandled addr %v", n.Op())
5680 return nil
5681 }
5682 }
5683
5684
5685
5686 func (s *state) canSSA(n ir.Node) bool {
5687 if base.Flag.N != 0 {
5688 return false
5689 }
5690 for {
5691 nn := n
5692 if nn.Op() == ir.ODOT {
5693 nn := nn.(*ir.SelectorExpr)
5694 n = nn.X
5695 continue
5696 }
5697 if nn.Op() == ir.OINDEX {
5698 nn := nn.(*ir.IndexExpr)
5699 if nn.X.Type().IsArray() {
5700 n = nn.X
5701 continue
5702 }
5703 }
5704 break
5705 }
5706 if n.Op() != ir.ONAME {
5707 return false
5708 }
5709 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5710 }
5711
5712 func (s *state) canSSAName(name *ir.Name) bool {
5713 if name.Addrtaken() || !name.OnStack() {
5714 return false
5715 }
5716 switch name.Class {
5717 case ir.PPARAMOUT:
5718 if s.hasdefer {
5719
5720
5721
5722
5723
5724 return false
5725 }
5726 if s.cgoUnsafeArgs {
5727
5728
5729 return false
5730 }
5731 }
5732 return true
5733
5734 }
5735
5736
5737 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5738 p := s.expr(n)
5739 if bounded || n.NonNil() {
5740 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5741 s.f.Warnl(lineno, "removed nil check")
5742 }
5743 return p
5744 }
5745 p = s.nilCheck(p)
5746 return p
5747 }
5748
5749
5750
5751
5752
5753
5754 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5755 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5756 return ptr
5757 }
5758 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5759 }
5760
5761
5762
5763
5764
5765
5766
5767 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5768 idx = s.extendIndex(idx, len, kind, bounded)
5769
5770 if bounded || base.Flag.B != 0 {
5771
5772
5773
5774
5775
5776
5777
5778
5779
5780
5781
5782
5783
5784
5785
5786
5787
5788
5789
5790
5791 return idx
5792 }
5793
5794 bNext := s.f.NewBlock(ssa.BlockPlain)
5795 bPanic := s.f.NewBlock(ssa.BlockExit)
5796
5797 if !idx.Type.IsSigned() {
5798 switch kind {
5799 case ssa.BoundsIndex:
5800 kind = ssa.BoundsIndexU
5801 case ssa.BoundsSliceAlen:
5802 kind = ssa.BoundsSliceAlenU
5803 case ssa.BoundsSliceAcap:
5804 kind = ssa.BoundsSliceAcapU
5805 case ssa.BoundsSliceB:
5806 kind = ssa.BoundsSliceBU
5807 case ssa.BoundsSlice3Alen:
5808 kind = ssa.BoundsSlice3AlenU
5809 case ssa.BoundsSlice3Acap:
5810 kind = ssa.BoundsSlice3AcapU
5811 case ssa.BoundsSlice3B:
5812 kind = ssa.BoundsSlice3BU
5813 case ssa.BoundsSlice3C:
5814 kind = ssa.BoundsSlice3CU
5815 }
5816 }
5817
5818 var cmp *ssa.Value
5819 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5820 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5821 } else {
5822 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5823 }
5824 b := s.endBlock()
5825 b.Kind = ssa.BlockIf
5826 b.SetControl(cmp)
5827 b.Likely = ssa.BranchLikely
5828 b.AddEdgeTo(bNext)
5829 b.AddEdgeTo(bPanic)
5830
5831 s.startBlock(bPanic)
5832 if Arch.LinkArch.Family == sys.Wasm {
5833
5834
5835 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5836 } else {
5837 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5838 s.endBlock().SetControl(mem)
5839 }
5840 s.startBlock(bNext)
5841
5842
5843 if base.Flag.Cfg.SpectreIndex {
5844 op := ssa.OpSpectreIndex
5845 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5846 op = ssa.OpSpectreSliceIndex
5847 }
5848 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5849 }
5850
5851 return idx
5852 }
5853
5854
5855 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5856 b := s.endBlock()
5857 b.Kind = ssa.BlockIf
5858 b.SetControl(cmp)
5859 b.Likely = ssa.BranchLikely
5860 bNext := s.f.NewBlock(ssa.BlockPlain)
5861 line := s.peekPos()
5862 pos := base.Ctxt.PosTable.Pos(line)
5863 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5864 bPanic := s.panics[fl]
5865 if bPanic == nil {
5866 bPanic = s.f.NewBlock(ssa.BlockPlain)
5867 s.panics[fl] = bPanic
5868 s.startBlock(bPanic)
5869
5870
5871 s.rtcall(fn, false, nil)
5872 }
5873 b.AddEdgeTo(bNext)
5874 b.AddEdgeTo(bPanic)
5875 s.startBlock(bNext)
5876 }
5877
5878 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5879 needcheck := true
5880 switch b.Op {
5881 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5882 if b.AuxInt != 0 {
5883 needcheck = false
5884 }
5885 }
5886 if needcheck {
5887
5888 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5889 s.check(cmp, ir.Syms.Panicdivide)
5890 }
5891 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5892 }
5893
5894
5895
5896
5897
5898 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5899 s.prevCall = nil
5900
5901 off := base.Ctxt.Arch.FixedFrameSize
5902 var callArgs []*ssa.Value
5903 var callArgTypes []*types.Type
5904
5905 for _, arg := range args {
5906 t := arg.Type
5907 off = types.RoundUp(off, t.Alignment())
5908 size := t.Size()
5909 callArgs = append(callArgs, arg)
5910 callArgTypes = append(callArgTypes, t)
5911 off += size
5912 }
5913 off = types.RoundUp(off, int64(types.RegSize))
5914
5915
5916 var call *ssa.Value
5917 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5918 callArgs = append(callArgs, s.mem())
5919 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5920 call.AddArgs(callArgs...)
5921 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5922
5923 if !returns {
5924
5925 b := s.endBlock()
5926 b.Kind = ssa.BlockExit
5927 b.SetControl(call)
5928 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5929 if len(results) > 0 {
5930 s.Fatalf("panic call can't have results")
5931 }
5932 return nil
5933 }
5934
5935
5936 res := make([]*ssa.Value, len(results))
5937 for i, t := range results {
5938 off = types.RoundUp(off, t.Alignment())
5939 res[i] = s.resultOfCall(call, int64(i), t)
5940 off += t.Size()
5941 }
5942 off = types.RoundUp(off, int64(types.PtrSize))
5943
5944
5945 call.AuxInt = off
5946
5947 return res
5948 }
5949
5950
5951 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5952 s.instrument(t, left, instrumentWrite)
5953
5954 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5955
5956 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5957 return
5958 }
5959
5960
5961
5962
5963
5964
5965 s.storeTypeScalars(t, left, right, skip)
5966 if skip&skipPtr == 0 && t.HasPointers() {
5967 s.storeTypePtrs(t, left, right)
5968 }
5969 }
5970
5971
5972 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5973 switch {
5974 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
5975 s.store(t, left, right)
5976 case t.IsPtrShaped():
5977 if t.IsPtr() && t.Elem().NotInHeap() {
5978 s.store(t, left, right)
5979 }
5980
5981 case t.IsString():
5982 if skip&skipLen != 0 {
5983 return
5984 }
5985 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5986 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5987 s.store(types.Types[types.TINT], lenAddr, len)
5988 case t.IsSlice():
5989 if skip&skipLen == 0 {
5990 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5991 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5992 s.store(types.Types[types.TINT], lenAddr, len)
5993 }
5994 if skip&skipCap == 0 {
5995 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5996 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5997 s.store(types.Types[types.TINT], capAddr, cap)
5998 }
5999 case t.IsInterface():
6000
6001 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
6002 s.store(types.Types[types.TUINTPTR], left, itab)
6003 case t.IsStruct():
6004 n := t.NumFields()
6005 for i := 0; i < n; i++ {
6006 ft := t.FieldType(i)
6007 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
6008 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
6009 s.storeTypeScalars(ft, addr, val, 0)
6010 }
6011 case t.IsArray() && t.NumElem() == 0:
6012
6013 case t.IsArray() && t.NumElem() == 1:
6014 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
6015 default:
6016 s.Fatalf("bad write barrier type %v", t)
6017 }
6018 }
6019
6020
6021 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
6022 switch {
6023 case t.IsPtrShaped():
6024 if t.IsPtr() && t.Elem().NotInHeap() {
6025 break
6026 }
6027 s.store(t, left, right)
6028 case t.IsString():
6029 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
6030 s.store(s.f.Config.Types.BytePtr, left, ptr)
6031 case t.IsSlice():
6032 elType := types.NewPtr(t.Elem())
6033 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
6034 s.store(elType, left, ptr)
6035 case t.IsInterface():
6036
6037 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
6038 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
6039 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
6040 case t.IsStruct():
6041 n := t.NumFields()
6042 for i := 0; i < n; i++ {
6043 ft := t.FieldType(i)
6044 if !ft.HasPointers() {
6045 continue
6046 }
6047 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
6048 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
6049 s.storeTypePtrs(ft, addr, val)
6050 }
6051 case t.IsArray() && t.NumElem() == 0:
6052
6053 case t.IsArray() && t.NumElem() == 1:
6054 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
6055 default:
6056 s.Fatalf("bad write barrier type %v", t)
6057 }
6058 }
6059
6060
6061 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
6062 var a *ssa.Value
6063 if !ssa.CanSSA(t) {
6064 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
6065 } else {
6066 a = s.expr(n)
6067 }
6068 return a
6069 }
6070
6071 func (s *state) storeArgWithBase(n ir.Node, t *types.Type, base *ssa.Value, off int64) {
6072 pt := types.NewPtr(t)
6073 var addr *ssa.Value
6074 if base == s.sp {
6075
6076 addr = s.constOffPtrSP(pt, off)
6077 } else {
6078 addr = s.newValue1I(ssa.OpOffPtr, pt, off, base)
6079 }
6080
6081 if !ssa.CanSSA(t) {
6082 a := s.addr(n)
6083 s.move(t, addr, a)
6084 return
6085 }
6086
6087 a := s.expr(n)
6088 s.storeType(t, addr, a, 0, false)
6089 }
6090
6091
6092
6093
6094 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
6095 t := v.Type
6096 var ptr, len, cap *ssa.Value
6097 switch {
6098 case t.IsSlice():
6099 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
6100 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
6101 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
6102 case t.IsString():
6103 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
6104 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
6105 cap = len
6106 case t.IsPtr():
6107 if !t.Elem().IsArray() {
6108 s.Fatalf("bad ptr to array in slice %v\n", t)
6109 }
6110 nv := s.nilCheck(v)
6111 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
6112 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
6113 cap = len
6114 default:
6115 s.Fatalf("bad type in slice %v\n", t)
6116 }
6117
6118
6119 if i == nil {
6120 i = s.constInt(types.Types[types.TINT], 0)
6121 }
6122 if j == nil {
6123 j = len
6124 }
6125 three := true
6126 if k == nil {
6127 three = false
6128 k = cap
6129 }
6130
6131
6132
6133
6134 if three {
6135 if k != cap {
6136 kind := ssa.BoundsSlice3Alen
6137 if t.IsSlice() {
6138 kind = ssa.BoundsSlice3Acap
6139 }
6140 k = s.boundsCheck(k, cap, kind, bounded)
6141 }
6142 if j != k {
6143 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
6144 }
6145 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
6146 } else {
6147 if j != k {
6148 kind := ssa.BoundsSliceAlen
6149 if t.IsSlice() {
6150 kind = ssa.BoundsSliceAcap
6151 }
6152 j = s.boundsCheck(j, k, kind, bounded)
6153 }
6154 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
6155 }
6156
6157
6158 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
6159 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
6160 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
6161
6162
6163
6164
6165
6166 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
6167 rcap := rlen
6168 if j != k && !t.IsString() {
6169 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
6170 }
6171
6172 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
6173
6174 return ptr, rlen, rcap
6175 }
6176
6177
6178
6179
6180
6181
6182
6183
6184
6185
6186
6187
6188
6189
6190
6191 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
6192
6193
6194 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
6195
6196
6197
6198 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
6199 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
6200
6201
6202 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
6203
6204 return rptr, rlen, rcap
6205 }
6206
6207 type u642fcvtTab struct {
6208 leq, cvt2F, and, rsh, or, add ssa.Op
6209 one func(*state, *types.Type, int64) *ssa.Value
6210 }
6211
6212 var u64_f64 = u642fcvtTab{
6213 leq: ssa.OpLeq64,
6214 cvt2F: ssa.OpCvt64to64F,
6215 and: ssa.OpAnd64,
6216 rsh: ssa.OpRsh64Ux64,
6217 or: ssa.OpOr64,
6218 add: ssa.OpAdd64F,
6219 one: (*state).constInt64,
6220 }
6221
6222 var u64_f32 = u642fcvtTab{
6223 leq: ssa.OpLeq64,
6224 cvt2F: ssa.OpCvt64to32F,
6225 and: ssa.OpAnd64,
6226 rsh: ssa.OpRsh64Ux64,
6227 or: ssa.OpOr64,
6228 add: ssa.OpAdd32F,
6229 one: (*state).constInt64,
6230 }
6231
6232 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6233 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
6234 }
6235
6236 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6237 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
6238 }
6239
6240 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6241
6242
6243
6244
6245
6246
6247
6248
6249
6250
6251
6252
6253
6254
6255
6256
6257
6258
6259
6260
6261
6262
6263
6264
6265 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
6266 b := s.endBlock()
6267 b.Kind = ssa.BlockIf
6268 b.SetControl(cmp)
6269 b.Likely = ssa.BranchLikely
6270
6271 bThen := s.f.NewBlock(ssa.BlockPlain)
6272 bElse := s.f.NewBlock(ssa.BlockPlain)
6273 bAfter := s.f.NewBlock(ssa.BlockPlain)
6274
6275 b.AddEdgeTo(bThen)
6276 s.startBlock(bThen)
6277 a0 := s.newValue1(cvttab.cvt2F, tt, x)
6278 s.vars[n] = a0
6279 s.endBlock()
6280 bThen.AddEdgeTo(bAfter)
6281
6282 b.AddEdgeTo(bElse)
6283 s.startBlock(bElse)
6284 one := cvttab.one(s, ft, 1)
6285 y := s.newValue2(cvttab.and, ft, x, one)
6286 z := s.newValue2(cvttab.rsh, ft, x, one)
6287 z = s.newValue2(cvttab.or, ft, z, y)
6288 a := s.newValue1(cvttab.cvt2F, tt, z)
6289 a1 := s.newValue2(cvttab.add, tt, a, a)
6290 s.vars[n] = a1
6291 s.endBlock()
6292 bElse.AddEdgeTo(bAfter)
6293
6294 s.startBlock(bAfter)
6295 return s.variable(n, n.Type())
6296 }
6297
6298 type u322fcvtTab struct {
6299 cvtI2F, cvtF2F ssa.Op
6300 }
6301
6302 var u32_f64 = u322fcvtTab{
6303 cvtI2F: ssa.OpCvt32to64F,
6304 cvtF2F: ssa.OpCopy,
6305 }
6306
6307 var u32_f32 = u322fcvtTab{
6308 cvtI2F: ssa.OpCvt32to32F,
6309 cvtF2F: ssa.OpCvt64Fto32F,
6310 }
6311
6312 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6313 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
6314 }
6315
6316 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6317 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
6318 }
6319
6320 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6321
6322
6323
6324
6325
6326 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
6327 b := s.endBlock()
6328 b.Kind = ssa.BlockIf
6329 b.SetControl(cmp)
6330 b.Likely = ssa.BranchLikely
6331
6332 bThen := s.f.NewBlock(ssa.BlockPlain)
6333 bElse := s.f.NewBlock(ssa.BlockPlain)
6334 bAfter := s.f.NewBlock(ssa.BlockPlain)
6335
6336 b.AddEdgeTo(bThen)
6337 s.startBlock(bThen)
6338 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
6339 s.vars[n] = a0
6340 s.endBlock()
6341 bThen.AddEdgeTo(bAfter)
6342
6343 b.AddEdgeTo(bElse)
6344 s.startBlock(bElse)
6345 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
6346 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
6347 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
6348 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
6349
6350 s.vars[n] = a3
6351 s.endBlock()
6352 bElse.AddEdgeTo(bAfter)
6353
6354 s.startBlock(bAfter)
6355 return s.variable(n, n.Type())
6356 }
6357
6358
6359 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
6360 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
6361 s.Fatalf("node must be a map or a channel")
6362 }
6363 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
6364 s.Fatalf("cannot inline len(chan)")
6365 }
6366 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
6367 s.Fatalf("cannot inline cap(chan)")
6368 }
6369
6370
6371
6372
6373
6374
6375
6376
6377 lenType := n.Type()
6378 nilValue := s.constNil(types.Types[types.TUINTPTR])
6379 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
6380 b := s.endBlock()
6381 b.Kind = ssa.BlockIf
6382 b.SetControl(cmp)
6383 b.Likely = ssa.BranchUnlikely
6384
6385 bThen := s.f.NewBlock(ssa.BlockPlain)
6386 bElse := s.f.NewBlock(ssa.BlockPlain)
6387 bAfter := s.f.NewBlock(ssa.BlockPlain)
6388
6389
6390 b.AddEdgeTo(bThen)
6391 s.startBlock(bThen)
6392 s.vars[n] = s.zeroVal(lenType)
6393 s.endBlock()
6394 bThen.AddEdgeTo(bAfter)
6395
6396 b.AddEdgeTo(bElse)
6397 s.startBlock(bElse)
6398 switch n.Op() {
6399 case ir.OLEN:
6400
6401 s.vars[n] = s.load(lenType, x)
6402 case ir.OCAP:
6403
6404 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
6405 s.vars[n] = s.load(lenType, sw)
6406 default:
6407 s.Fatalf("op must be OLEN or OCAP")
6408 }
6409 s.endBlock()
6410 bElse.AddEdgeTo(bAfter)
6411
6412 s.startBlock(bAfter)
6413 return s.variable(n, lenType)
6414 }
6415
6416 type f2uCvtTab struct {
6417 ltf, cvt2U, subf, or ssa.Op
6418 floatValue func(*state, *types.Type, float64) *ssa.Value
6419 intValue func(*state, *types.Type, int64) *ssa.Value
6420 cutoff uint64
6421 }
6422
6423 var f32_u64 = f2uCvtTab{
6424 ltf: ssa.OpLess32F,
6425 cvt2U: ssa.OpCvt32Fto64,
6426 subf: ssa.OpSub32F,
6427 or: ssa.OpOr64,
6428 floatValue: (*state).constFloat32,
6429 intValue: (*state).constInt64,
6430 cutoff: 1 << 63,
6431 }
6432
6433 var f64_u64 = f2uCvtTab{
6434 ltf: ssa.OpLess64F,
6435 cvt2U: ssa.OpCvt64Fto64,
6436 subf: ssa.OpSub64F,
6437 or: ssa.OpOr64,
6438 floatValue: (*state).constFloat64,
6439 intValue: (*state).constInt64,
6440 cutoff: 1 << 63,
6441 }
6442
6443 var f32_u32 = f2uCvtTab{
6444 ltf: ssa.OpLess32F,
6445 cvt2U: ssa.OpCvt32Fto32,
6446 subf: ssa.OpSub32F,
6447 or: ssa.OpOr32,
6448 floatValue: (*state).constFloat32,
6449 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6450 cutoff: 1 << 31,
6451 }
6452
6453 var f64_u32 = f2uCvtTab{
6454 ltf: ssa.OpLess64F,
6455 cvt2U: ssa.OpCvt64Fto32,
6456 subf: ssa.OpSub64F,
6457 or: ssa.OpOr32,
6458 floatValue: (*state).constFloat64,
6459 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6460 cutoff: 1 << 31,
6461 }
6462
6463 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6464 return s.floatToUint(&f32_u64, n, x, ft, tt)
6465 }
6466 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6467 return s.floatToUint(&f64_u64, n, x, ft, tt)
6468 }
6469
6470 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6471 return s.floatToUint(&f32_u32, n, x, ft, tt)
6472 }
6473
6474 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6475 return s.floatToUint(&f64_u32, n, x, ft, tt)
6476 }
6477
6478 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6479
6480
6481
6482
6483
6484
6485
6486
6487 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
6488 cmp := s.newValue2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
6489 b := s.endBlock()
6490 b.Kind = ssa.BlockIf
6491 b.SetControl(cmp)
6492 b.Likely = ssa.BranchLikely
6493
6494 bThen := s.f.NewBlock(ssa.BlockPlain)
6495 bElse := s.f.NewBlock(ssa.BlockPlain)
6496 bAfter := s.f.NewBlock(ssa.BlockPlain)
6497
6498 b.AddEdgeTo(bThen)
6499 s.startBlock(bThen)
6500 a0 := s.newValue1(cvttab.cvt2U, tt, x)
6501 s.vars[n] = a0
6502 s.endBlock()
6503 bThen.AddEdgeTo(bAfter)
6504
6505 b.AddEdgeTo(bElse)
6506 s.startBlock(bElse)
6507 y := s.newValue2(cvttab.subf, ft, x, cutoff)
6508 y = s.newValue1(cvttab.cvt2U, tt, y)
6509 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
6510 a1 := s.newValue2(cvttab.or, tt, y, z)
6511 s.vars[n] = a1
6512 s.endBlock()
6513 bElse.AddEdgeTo(bAfter)
6514
6515 s.startBlock(bAfter)
6516 return s.variable(n, n.Type())
6517 }
6518
6519
6520
6521
6522 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6523 iface := s.expr(n.X)
6524 target := s.reflectType(n.Type())
6525 var targetItab *ssa.Value
6526 if n.ITab != nil {
6527 targetItab = s.expr(n.ITab)
6528 }
6529 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
6530 }
6531
6532 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6533 iface := s.expr(n.X)
6534 var source, target, targetItab *ssa.Value
6535 if n.SrcRType != nil {
6536 source = s.expr(n.SrcRType)
6537 }
6538 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
6539 byteptr := s.f.Config.Types.BytePtr
6540 targetItab = s.expr(n.ITab)
6541
6542
6543 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
6544 } else {
6545 target = s.expr(n.RType)
6546 }
6547 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
6548 }
6549
6550
6551
6552
6553
6554
6555
6556
6557
6558 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
6559 typs := s.f.Config.Types
6560 byteptr := typs.BytePtr
6561 if dst.IsInterface() {
6562 if dst.IsEmptyInterface() {
6563
6564
6565 if base.Debug.TypeAssert > 0 {
6566 base.WarnfAt(pos, "type assertion inlined")
6567 }
6568
6569
6570 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6571
6572 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6573
6574 if src.IsEmptyInterface() && commaok {
6575
6576 return iface, cond
6577 }
6578
6579
6580 b := s.endBlock()
6581 b.Kind = ssa.BlockIf
6582 b.SetControl(cond)
6583 b.Likely = ssa.BranchLikely
6584 bOk := s.f.NewBlock(ssa.BlockPlain)
6585 bFail := s.f.NewBlock(ssa.BlockPlain)
6586 b.AddEdgeTo(bOk)
6587 b.AddEdgeTo(bFail)
6588
6589 if !commaok {
6590
6591 s.startBlock(bFail)
6592 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6593
6594
6595 s.startBlock(bOk)
6596 if src.IsEmptyInterface() {
6597 res = iface
6598 return
6599 }
6600
6601 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6602 typ := s.load(byteptr, off)
6603 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6604 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
6605 return
6606 }
6607
6608 s.startBlock(bOk)
6609
6610
6611 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6612 s.vars[typVar] = s.load(byteptr, off)
6613 s.endBlock()
6614
6615
6616 s.startBlock(bFail)
6617 s.vars[typVar] = itab
6618 s.endBlock()
6619
6620
6621 bEnd := s.f.NewBlock(ssa.BlockPlain)
6622 bOk.AddEdgeTo(bEnd)
6623 bFail.AddEdgeTo(bEnd)
6624 s.startBlock(bEnd)
6625 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6626 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
6627 resok = cond
6628 delete(s.vars, typVar)
6629 return
6630 }
6631
6632 if base.Debug.TypeAssert > 0 {
6633 base.WarnfAt(pos, "type assertion not inlined")
6634 }
6635
6636 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6637 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6638
6639
6640 bNil := s.f.NewBlock(ssa.BlockPlain)
6641 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6642 bMerge := s.f.NewBlock(ssa.BlockPlain)
6643 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6644 b := s.endBlock()
6645 b.Kind = ssa.BlockIf
6646 b.SetControl(cond)
6647 b.Likely = ssa.BranchLikely
6648 b.AddEdgeTo(bNonNil)
6649 b.AddEdgeTo(bNil)
6650
6651 s.startBlock(bNil)
6652 if commaok {
6653 s.vars[typVar] = itab
6654 b := s.endBlock()
6655 b.AddEdgeTo(bMerge)
6656 } else {
6657
6658 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6659 }
6660
6661
6662 s.startBlock(bNonNil)
6663 typ := itab
6664 if !src.IsEmptyInterface() {
6665 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
6666 }
6667
6668
6669 var d *ssa.Value
6670 if descriptor != nil {
6671 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6672 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
6673
6674
6675 if _, ok := intrinsics[intrinsicKey{Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp"}]; !ok {
6676 s.Fatalf("atomic load not available")
6677 }
6678
6679 var mul, and, add, zext ssa.Op
6680 if s.config.PtrSize == 4 {
6681 mul = ssa.OpMul32
6682 and = ssa.OpAnd32
6683 add = ssa.OpAdd32
6684 zext = ssa.OpCopy
6685 } else {
6686 mul = ssa.OpMul64
6687 and = ssa.OpAnd64
6688 add = ssa.OpAdd64
6689 zext = ssa.OpZeroExt32to64
6690 }
6691
6692 loopHead := s.f.NewBlock(ssa.BlockPlain)
6693 loopBody := s.f.NewBlock(ssa.BlockPlain)
6694 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6695 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6696
6697
6698
6699 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6700 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6701 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6702
6703
6704 var hash *ssa.Value
6705 if src.IsEmptyInterface() {
6706 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6707 } else {
6708 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6709 }
6710 hash = s.newValue1(zext, typs.Uintptr, hash)
6711 s.vars[hashVar] = hash
6712
6713 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6714
6715 b := s.endBlock()
6716 b.AddEdgeTo(loopHead)
6717
6718
6719
6720 s.startBlock(loopHead)
6721 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6722 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6723 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6724 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6725
6726 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6727
6728
6729
6730 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6731 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6732 b = s.endBlock()
6733 b.Kind = ssa.BlockIf
6734 b.SetControl(cmp1)
6735 b.AddEdgeTo(cacheHit)
6736 b.AddEdgeTo(loopBody)
6737
6738
6739
6740 s.startBlock(loopBody)
6741 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6742 b = s.endBlock()
6743 b.Kind = ssa.BlockIf
6744 b.SetControl(cmp2)
6745 b.AddEdgeTo(cacheMiss)
6746 b.AddEdgeTo(loopHead)
6747
6748
6749
6750 s.startBlock(cacheHit)
6751 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6752 s.vars[typVar] = eItab
6753 b = s.endBlock()
6754 b.AddEdgeTo(bMerge)
6755
6756
6757 s.startBlock(cacheMiss)
6758 }
6759 }
6760
6761
6762 if descriptor != nil {
6763 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6764 } else {
6765 var fn *obj.LSym
6766 if commaok {
6767 fn = ir.Syms.AssertE2I2
6768 } else {
6769 fn = ir.Syms.AssertE2I
6770 }
6771 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6772 }
6773 s.vars[typVar] = itab
6774 b = s.endBlock()
6775 b.AddEdgeTo(bMerge)
6776
6777
6778 s.startBlock(bMerge)
6779 itab = s.variable(typVar, byteptr)
6780 var ok *ssa.Value
6781 if commaok {
6782 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6783 }
6784 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6785 }
6786
6787 if base.Debug.TypeAssert > 0 {
6788 base.WarnfAt(pos, "type assertion inlined")
6789 }
6790
6791
6792 direct := types.IsDirectIface(dst)
6793 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6794 if base.Debug.TypeAssert > 0 {
6795 base.WarnfAt(pos, "type assertion inlined")
6796 }
6797 var wantedFirstWord *ssa.Value
6798 if src.IsEmptyInterface() {
6799
6800 wantedFirstWord = target
6801 } else {
6802
6803 wantedFirstWord = targetItab
6804 }
6805
6806 var tmp ir.Node
6807 var addr *ssa.Value
6808 if commaok && !ssa.CanSSA(dst) {
6809
6810
6811 tmp, addr = s.temp(pos, dst)
6812 }
6813
6814 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6815 b := s.endBlock()
6816 b.Kind = ssa.BlockIf
6817 b.SetControl(cond)
6818 b.Likely = ssa.BranchLikely
6819
6820 bOk := s.f.NewBlock(ssa.BlockPlain)
6821 bFail := s.f.NewBlock(ssa.BlockPlain)
6822 b.AddEdgeTo(bOk)
6823 b.AddEdgeTo(bFail)
6824
6825 if !commaok {
6826
6827 s.startBlock(bFail)
6828 taddr := source
6829 if taddr == nil {
6830 taddr = s.reflectType(src)
6831 }
6832 if src.IsEmptyInterface() {
6833 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6834 } else {
6835 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6836 }
6837
6838
6839 s.startBlock(bOk)
6840 if direct {
6841 return s.newValue1(ssa.OpIData, dst, iface), nil
6842 }
6843 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6844 return s.load(dst, p), nil
6845 }
6846
6847
6848
6849 bEnd := s.f.NewBlock(ssa.BlockPlain)
6850
6851
6852 valVar := ssaMarker("val")
6853
6854
6855 s.startBlock(bOk)
6856 if tmp == nil {
6857 if direct {
6858 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6859 } else {
6860 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6861 s.vars[valVar] = s.load(dst, p)
6862 }
6863 } else {
6864 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6865 s.move(dst, addr, p)
6866 }
6867 s.vars[okVar] = s.constBool(true)
6868 s.endBlock()
6869 bOk.AddEdgeTo(bEnd)
6870
6871
6872 s.startBlock(bFail)
6873 if tmp == nil {
6874 s.vars[valVar] = s.zeroVal(dst)
6875 } else {
6876 s.zero(dst, addr)
6877 }
6878 s.vars[okVar] = s.constBool(false)
6879 s.endBlock()
6880 bFail.AddEdgeTo(bEnd)
6881
6882
6883 s.startBlock(bEnd)
6884 if tmp == nil {
6885 res = s.variable(valVar, dst)
6886 delete(s.vars, valVar)
6887 } else {
6888 res = s.load(dst, addr)
6889 }
6890 resok = s.variable(okVar, types.Types[types.TBOOL])
6891 delete(s.vars, okVar)
6892 return res, resok
6893 }
6894
6895
6896 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6897 tmp := typecheck.TempAt(pos, s.curfn, t)
6898 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6899 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6900 }
6901 addr := s.addr(tmp)
6902 return tmp, addr
6903 }
6904
6905
6906 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6907 v := s.vars[n]
6908 if v != nil {
6909 return v
6910 }
6911 v = s.fwdVars[n]
6912 if v != nil {
6913 return v
6914 }
6915
6916 if s.curBlock == s.f.Entry {
6917
6918 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6919 }
6920
6921
6922 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6923 s.fwdVars[n] = v
6924 if n.Op() == ir.ONAME {
6925 s.addNamedValue(n.(*ir.Name), v)
6926 }
6927 return v
6928 }
6929
6930 func (s *state) mem() *ssa.Value {
6931 return s.variable(memVar, types.TypeMem)
6932 }
6933
6934 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6935 if n.Class == ir.Pxxx {
6936
6937 return
6938 }
6939 if ir.IsAutoTmp(n) {
6940
6941 return
6942 }
6943 if n.Class == ir.PPARAMOUT {
6944
6945
6946 return
6947 }
6948 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6949 values, ok := s.f.NamedValues[loc]
6950 if !ok {
6951 s.f.Names = append(s.f.Names, &loc)
6952 s.f.CanonicalLocalSlots[loc] = &loc
6953 }
6954 s.f.NamedValues[loc] = append(values, v)
6955 }
6956
6957
6958 type Branch struct {
6959 P *obj.Prog
6960 B *ssa.Block
6961 }
6962
6963
6964 type State struct {
6965 ABI obj.ABI
6966
6967 pp *objw.Progs
6968
6969
6970
6971 Branches []Branch
6972
6973
6974 JumpTables []*ssa.Block
6975
6976
6977 bstart []*obj.Prog
6978
6979 maxarg int64
6980
6981
6982
6983 livenessMap liveness.Map
6984
6985
6986
6987 partLiveArgs map[*ir.Name]bool
6988
6989
6990
6991
6992 lineRunStart *obj.Prog
6993
6994
6995 OnWasmStackSkipped int
6996 }
6997
6998 func (s *State) FuncInfo() *obj.FuncInfo {
6999 return s.pp.CurFunc.LSym.Func()
7000 }
7001
7002
7003 func (s *State) Prog(as obj.As) *obj.Prog {
7004 p := s.pp.Prog(as)
7005 if objw.LosesStmtMark(as) {
7006 return p
7007 }
7008
7009
7010 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
7011 s.lineRunStart = p
7012 } else if p.Pos.IsStmt() == src.PosIsStmt {
7013 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
7014 p.Pos = p.Pos.WithNotStmt()
7015 }
7016 return p
7017 }
7018
7019
7020 func (s *State) Pc() *obj.Prog {
7021 return s.pp.Next
7022 }
7023
7024
7025 func (s *State) SetPos(pos src.XPos) {
7026 s.pp.Pos = pos
7027 }
7028
7029
7030
7031
7032 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
7033 p := s.Prog(op)
7034 p.To.Type = obj.TYPE_BRANCH
7035 s.Branches = append(s.Branches, Branch{P: p, B: target})
7036 return p
7037 }
7038
7039
7040
7041
7042
7043
7044 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
7045 switch v.Op {
7046 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
7047
7048 s.SetPos(v.Pos.WithNotStmt())
7049 default:
7050 p := v.Pos
7051 if p != src.NoXPos {
7052
7053
7054
7055
7056 if p.IsStmt() != src.PosIsStmt {
7057 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
7058
7059
7060
7061
7062
7063
7064
7065
7066
7067
7068
7069
7070
7071 return
7072 }
7073 p = p.WithNotStmt()
7074
7075 }
7076 s.SetPos(p)
7077 } else {
7078 s.SetPos(s.pp.Pos.WithNotStmt())
7079 }
7080 }
7081 }
7082
7083
7084 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
7085 ft := e.curfn.Type()
7086 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
7087 return
7088 }
7089
7090 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
7091 x.Set(obj.AttrContentAddressable, true)
7092 e.curfn.LSym.Func().ArgInfo = x
7093
7094
7095 p := pp.Prog(obj.AFUNCDATA)
7096 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
7097 p.To.Type = obj.TYPE_MEM
7098 p.To.Name = obj.NAME_EXTERN
7099 p.To.Sym = x
7100 }
7101
7102
7103 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
7104 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
7105
7106
7107
7108
7109 PtrSize := int64(types.PtrSize)
7110 uintptrTyp := types.Types[types.TUINTPTR]
7111
7112 isAggregate := func(t *types.Type) bool {
7113 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
7114 }
7115
7116 wOff := 0
7117 n := 0
7118 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
7119
7120
7121 write1 := func(sz, offset int64) {
7122 if offset >= rtabi.TraceArgsSpecial {
7123 writebyte(rtabi.TraceArgsOffsetTooLarge)
7124 } else {
7125 writebyte(uint8(offset))
7126 writebyte(uint8(sz))
7127 }
7128 n++
7129 }
7130
7131
7132
7133 var visitType func(baseOffset int64, t *types.Type, depth int) bool
7134 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
7135 if n >= rtabi.TraceArgsLimit {
7136 writebyte(rtabi.TraceArgsDotdotdot)
7137 return false
7138 }
7139 if !isAggregate(t) {
7140 write1(t.Size(), baseOffset)
7141 return true
7142 }
7143 writebyte(rtabi.TraceArgsStartAgg)
7144 depth++
7145 if depth >= rtabi.TraceArgsMaxDepth {
7146 writebyte(rtabi.TraceArgsDotdotdot)
7147 writebyte(rtabi.TraceArgsEndAgg)
7148 n++
7149 return true
7150 }
7151 switch {
7152 case t.IsInterface(), t.IsString():
7153 _ = visitType(baseOffset, uintptrTyp, depth) &&
7154 visitType(baseOffset+PtrSize, uintptrTyp, depth)
7155 case t.IsSlice():
7156 _ = visitType(baseOffset, uintptrTyp, depth) &&
7157 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
7158 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
7159 case t.IsComplex():
7160 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
7161 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
7162 case t.IsArray():
7163 if t.NumElem() == 0 {
7164 n++
7165 break
7166 }
7167 for i := int64(0); i < t.NumElem(); i++ {
7168 if !visitType(baseOffset, t.Elem(), depth) {
7169 break
7170 }
7171 baseOffset += t.Elem().Size()
7172 }
7173 case t.IsStruct():
7174 if t.NumFields() == 0 {
7175 n++
7176 break
7177 }
7178 for _, field := range t.Fields() {
7179 if !visitType(baseOffset+field.Offset, field.Type, depth) {
7180 break
7181 }
7182 }
7183 }
7184 writebyte(rtabi.TraceArgsEndAgg)
7185 return true
7186 }
7187
7188 start := 0
7189 if strings.Contains(f.LSym.Name, "[") {
7190
7191 start = 1
7192 }
7193
7194 for _, a := range abiInfo.InParams()[start:] {
7195 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
7196 break
7197 }
7198 }
7199 writebyte(rtabi.TraceArgsEndSeq)
7200 if wOff > rtabi.TraceArgsMaxLen {
7201 base.Fatalf("ArgInfo too large")
7202 }
7203
7204 return x
7205 }
7206
7207
7208 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
7209 if base.Ctxt.Flag_linkshared {
7210
7211
7212 return
7213 }
7214
7215 wfn := e.curfn.WrappedFunc
7216 if wfn == nil {
7217 return
7218 }
7219
7220 wsym := wfn.Linksym()
7221 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
7222 objw.SymPtrOff(x, 0, wsym)
7223 x.Set(obj.AttrContentAddressable, true)
7224 })
7225 e.curfn.LSym.Func().WrapInfo = x
7226
7227
7228 p := pp.Prog(obj.AFUNCDATA)
7229 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
7230 p.To.Type = obj.TYPE_MEM
7231 p.To.Name = obj.NAME_EXTERN
7232 p.To.Sym = x
7233 }
7234
7235
7236 func genssa(f *ssa.Func, pp *objw.Progs) {
7237 var s State
7238 s.ABI = f.OwnAux.Fn.ABI()
7239
7240 e := f.Frontend().(*ssafn)
7241
7242 s.livenessMap, s.partLiveArgs = liveness.Compute(e.curfn, f, e.stkptrsize, pp)
7243 emitArgInfo(e, f, pp)
7244 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
7245
7246 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
7247 if openDeferInfo != nil {
7248
7249
7250 p := pp.Prog(obj.AFUNCDATA)
7251 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
7252 p.To.Type = obj.TYPE_MEM
7253 p.To.Name = obj.NAME_EXTERN
7254 p.To.Sym = openDeferInfo
7255 }
7256
7257 emitWrappedFuncInfo(e, pp)
7258
7259
7260 s.bstart = make([]*obj.Prog, f.NumBlocks())
7261 s.pp = pp
7262 var progToValue map[*obj.Prog]*ssa.Value
7263 var progToBlock map[*obj.Prog]*ssa.Block
7264 var valueToProgAfter []*obj.Prog
7265 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
7266 if gatherPrintInfo {
7267 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
7268 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
7269 f.Logf("genssa %s\n", f.Name)
7270 progToBlock[s.pp.Next] = f.Blocks[0]
7271 }
7272
7273 if base.Ctxt.Flag_locationlists {
7274 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
7275 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
7276 }
7277 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
7278 for i := range valueToProgAfter {
7279 valueToProgAfter[i] = nil
7280 }
7281 }
7282
7283
7284
7285 firstPos := src.NoXPos
7286 for _, v := range f.Entry.Values {
7287 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7288 firstPos = v.Pos
7289 v.Pos = firstPos.WithDefaultStmt()
7290 break
7291 }
7292 }
7293
7294
7295
7296
7297 var inlMarks map[*obj.Prog]int32
7298 var inlMarkList []*obj.Prog
7299
7300
7301
7302 var inlMarksByPos map[src.XPos][]*obj.Prog
7303
7304 var argLiveIdx int = -1
7305
7306
7307
7308
7309
7310 var hotAlign, hotRequire int64
7311
7312 if base.Debug.AlignHot > 0 {
7313 switch base.Ctxt.Arch.Name {
7314
7315
7316
7317
7318
7319 case "amd64", "386":
7320
7321
7322
7323 hotAlign = 64
7324 hotRequire = 31
7325 }
7326 }
7327
7328
7329 for i, b := range f.Blocks {
7330
7331 s.lineRunStart = nil
7332 s.SetPos(s.pp.Pos.WithNotStmt())
7333
7334 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
7335
7336
7337
7338
7339
7340 p := s.pp.Prog(obj.APCALIGNMAX)
7341 p.From.SetConst(hotAlign)
7342 p.To.SetConst(hotRequire)
7343 }
7344
7345 s.bstart[b.ID] = s.pp.Next
7346
7347 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
7348 argLiveIdx = idx
7349 p := s.pp.Prog(obj.APCDATA)
7350 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7351 p.To.SetConst(int64(idx))
7352 }
7353
7354
7355 Arch.SSAMarkMoves(&s, b)
7356 for _, v := range b.Values {
7357 x := s.pp.Next
7358 s.DebugFriendlySetPosFrom(v)
7359
7360 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
7361 v.Fatalf("input[0] and output not in same register %s", v.LongString())
7362 }
7363
7364 switch v.Op {
7365 case ssa.OpInitMem:
7366
7367 case ssa.OpArg:
7368
7369 case ssa.OpSP, ssa.OpSB:
7370
7371 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
7372
7373 case ssa.OpGetG:
7374
7375
7376 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
7377
7378 case ssa.OpPhi:
7379 CheckLoweredPhi(v)
7380 case ssa.OpConvert:
7381
7382 if v.Args[0].Reg() != v.Reg() {
7383 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
7384 }
7385 case ssa.OpInlMark:
7386 p := Arch.Ginsnop(s.pp)
7387 if inlMarks == nil {
7388 inlMarks = map[*obj.Prog]int32{}
7389 inlMarksByPos = map[src.XPos][]*obj.Prog{}
7390 }
7391 inlMarks[p] = v.AuxInt32()
7392 inlMarkList = append(inlMarkList, p)
7393 pos := v.Pos.AtColumn1()
7394 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
7395 firstPos = src.NoXPos
7396
7397 default:
7398
7399 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7400 s.SetPos(firstPos)
7401 firstPos = src.NoXPos
7402 }
7403
7404
7405 s.pp.NextLive = s.livenessMap.Get(v)
7406 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
7407
7408
7409 Arch.SSAGenValue(&s, v)
7410 }
7411
7412 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
7413 argLiveIdx = idx
7414 p := s.pp.Prog(obj.APCDATA)
7415 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7416 p.To.SetConst(int64(idx))
7417 }
7418
7419 if base.Ctxt.Flag_locationlists {
7420 valueToProgAfter[v.ID] = s.pp.Next
7421 }
7422
7423 if gatherPrintInfo {
7424 for ; x != s.pp.Next; x = x.Link {
7425 progToValue[x] = v
7426 }
7427 }
7428 }
7429
7430 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
7431 p := Arch.Ginsnop(s.pp)
7432 p.Pos = p.Pos.WithIsStmt()
7433 if b.Pos == src.NoXPos {
7434 b.Pos = p.Pos
7435 if b.Pos == src.NoXPos {
7436 b.Pos = s.pp.Text.Pos
7437 }
7438 }
7439 b.Pos = b.Pos.WithBogusLine()
7440 }
7441
7442
7443
7444
7445
7446 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
7447
7448
7449 var next *ssa.Block
7450 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
7451
7452
7453
7454
7455 next = f.Blocks[i+1]
7456 }
7457 x := s.pp.Next
7458 s.SetPos(b.Pos)
7459 Arch.SSAGenBlock(&s, b, next)
7460 if gatherPrintInfo {
7461 for ; x != s.pp.Next; x = x.Link {
7462 progToBlock[x] = b
7463 }
7464 }
7465 }
7466 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
7467
7468
7469
7470
7471 Arch.Ginsnop(s.pp)
7472 }
7473 if openDeferInfo != nil {
7474
7475
7476
7477 s.pp.NextLive = s.livenessMap.DeferReturn
7478 p := s.pp.Prog(obj.ACALL)
7479 p.To.Type = obj.TYPE_MEM
7480 p.To.Name = obj.NAME_EXTERN
7481 p.To.Sym = ir.Syms.Deferreturn
7482
7483
7484
7485
7486
7487 for _, o := range f.OwnAux.ABIInfo().OutParams() {
7488 n := o.Name
7489 rts, offs := o.RegisterTypesAndOffsets()
7490 for i := range o.Registers {
7491 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
7492 }
7493 }
7494
7495 s.pp.Prog(obj.ARET)
7496 }
7497
7498 if inlMarks != nil {
7499 hasCall := false
7500
7501
7502
7503
7504 for p := s.pp.Text; p != nil; p = p.Link {
7505 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
7506 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
7507
7508
7509
7510
7511
7512 continue
7513 }
7514 if _, ok := inlMarks[p]; ok {
7515
7516
7517 continue
7518 }
7519 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
7520 hasCall = true
7521 }
7522 pos := p.Pos.AtColumn1()
7523 marks := inlMarksByPos[pos]
7524 if len(marks) == 0 {
7525 continue
7526 }
7527 for _, m := range marks {
7528
7529
7530
7531 p.Pos = p.Pos.WithIsStmt()
7532 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
7533
7534 m.As = obj.ANOP
7535 m.Pos = src.NoXPos
7536 m.From = obj.Addr{}
7537 m.To = obj.Addr{}
7538 }
7539 delete(inlMarksByPos, pos)
7540 }
7541
7542 for _, p := range inlMarkList {
7543 if p.As != obj.ANOP {
7544 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
7545 }
7546 }
7547
7548 if e.stksize == 0 && !hasCall {
7549
7550
7551
7552
7553
7554
7555 for p := s.pp.Text; p != nil; p = p.Link {
7556 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
7557 continue
7558 }
7559 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
7560
7561 nop := Arch.Ginsnop(s.pp)
7562 nop.Pos = e.curfn.Pos().WithIsStmt()
7563
7564
7565
7566
7567
7568 for x := s.pp.Text; x != nil; x = x.Link {
7569 if x.Link == nop {
7570 x.Link = nop.Link
7571 break
7572 }
7573 }
7574
7575 for x := s.pp.Text; x != nil; x = x.Link {
7576 if x.Link == p {
7577 nop.Link = p
7578 x.Link = nop
7579 break
7580 }
7581 }
7582 }
7583 break
7584 }
7585 }
7586 }
7587
7588 if base.Ctxt.Flag_locationlists {
7589 var debugInfo *ssa.FuncDebug
7590 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
7591 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
7592 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7593 } else {
7594 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
7595 }
7596 bstart := s.bstart
7597 idToIdx := make([]int, f.NumBlocks())
7598 for i, b := range f.Blocks {
7599 idToIdx[b.ID] = i
7600 }
7601
7602
7603
7604 debugInfo.GetPC = func(b, v ssa.ID) int64 {
7605 switch v {
7606 case ssa.BlockStart.ID:
7607 if b == f.Entry.ID {
7608 return 0
7609
7610 }
7611 return bstart[b].Pc
7612 case ssa.BlockEnd.ID:
7613 blk := f.Blocks[idToIdx[b]]
7614 nv := len(blk.Values)
7615 return valueToProgAfter[blk.Values[nv-1].ID].Pc
7616 case ssa.FuncEnd.ID:
7617 return e.curfn.LSym.Size
7618 default:
7619 return valueToProgAfter[v].Pc
7620 }
7621 }
7622 }
7623
7624
7625 for _, br := range s.Branches {
7626 br.P.To.SetTarget(s.bstart[br.B.ID])
7627 if br.P.Pos.IsStmt() != src.PosIsStmt {
7628 br.P.Pos = br.P.Pos.WithNotStmt()
7629 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7630 br.P.Pos = br.P.Pos.WithNotStmt()
7631 }
7632
7633 }
7634
7635
7636 for _, jt := range s.JumpTables {
7637
7638 targets := make([]*obj.Prog, len(jt.Succs))
7639 for i, e := range jt.Succs {
7640 targets[i] = s.bstart[e.Block().ID]
7641 }
7642
7643
7644
7645 fi := s.pp.CurFunc.LSym.Func()
7646 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7647 }
7648
7649 if e.log {
7650 filename := ""
7651 for p := s.pp.Text; p != nil; p = p.Link {
7652 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7653 filename = p.InnermostFilename()
7654 f.Logf("# %s\n", filename)
7655 }
7656
7657 var s string
7658 if v, ok := progToValue[p]; ok {
7659 s = v.String()
7660 } else if b, ok := progToBlock[p]; ok {
7661 s = b.String()
7662 } else {
7663 s = " "
7664 }
7665 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7666 }
7667 }
7668 if f.HTMLWriter != nil {
7669 var buf strings.Builder
7670 buf.WriteString("<code>")
7671 buf.WriteString("<dl class=\"ssa-gen\">")
7672 filename := ""
7673 for p := s.pp.Text; p != nil; p = p.Link {
7674
7675
7676 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7677 filename = p.InnermostFilename()
7678 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7679 buf.WriteString(html.EscapeString("# " + filename))
7680 buf.WriteString("</dd>")
7681 }
7682
7683 buf.WriteString("<dt class=\"ssa-prog-src\">")
7684 if v, ok := progToValue[p]; ok {
7685 buf.WriteString(v.HTML())
7686 } else if b, ok := progToBlock[p]; ok {
7687 buf.WriteString("<b>" + b.HTML() + "</b>")
7688 }
7689 buf.WriteString("</dt>")
7690 buf.WriteString("<dd class=\"ssa-prog\">")
7691 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7692 buf.WriteString("</dd>")
7693 }
7694 buf.WriteString("</dl>")
7695 buf.WriteString("</code>")
7696 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7697 }
7698 if ssa.GenssaDump[f.Name] {
7699 fi := f.DumpFileForPhase("genssa")
7700 if fi != nil {
7701
7702
7703 inliningDiffers := func(a, b []src.Pos) bool {
7704 if len(a) != len(b) {
7705 return true
7706 }
7707 for i := range a {
7708 if a[i].Filename() != b[i].Filename() {
7709 return true
7710 }
7711 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7712 return true
7713 }
7714 }
7715 return false
7716 }
7717
7718 var allPosOld []src.Pos
7719 var allPos []src.Pos
7720
7721 for p := s.pp.Text; p != nil; p = p.Link {
7722 if p.Pos.IsKnown() {
7723 allPos = allPos[:0]
7724 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7725 if inliningDiffers(allPos, allPosOld) {
7726 for _, pos := range allPos {
7727 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7728 }
7729 allPos, allPosOld = allPosOld, allPos
7730 }
7731 }
7732
7733 var s string
7734 if v, ok := progToValue[p]; ok {
7735 s = v.String()
7736 } else if b, ok := progToBlock[p]; ok {
7737 s = b.String()
7738 } else {
7739 s = " "
7740 }
7741 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7742 }
7743 fi.Close()
7744 }
7745 }
7746
7747 defframe(&s, e, f)
7748
7749 f.HTMLWriter.Close()
7750 f.HTMLWriter = nil
7751 }
7752
7753 func defframe(s *State, e *ssafn, f *ssa.Func) {
7754 pp := s.pp
7755
7756 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7757 frame := s.maxarg + e.stksize
7758 if Arch.PadFrame != nil {
7759 frame = Arch.PadFrame(frame)
7760 }
7761
7762
7763 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7764 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7765 pp.Text.To.Offset = frame
7766
7767 p := pp.Text
7768
7769
7770
7771
7772
7773
7774
7775
7776
7777
7778 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7779
7780
7781 type nameOff struct {
7782 n *ir.Name
7783 off int64
7784 }
7785 partLiveArgsSpilled := make(map[nameOff]bool)
7786 for _, v := range f.Entry.Values {
7787 if v.Op.IsCall() {
7788 break
7789 }
7790 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7791 continue
7792 }
7793 n, off := ssa.AutoVar(v)
7794 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7795 continue
7796 }
7797 partLiveArgsSpilled[nameOff{n, off}] = true
7798 }
7799
7800
7801 for _, a := range f.OwnAux.ABIInfo().InParams() {
7802 n := a.Name
7803 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7804 continue
7805 }
7806 rts, offs := a.RegisterTypesAndOffsets()
7807 for i := range a.Registers {
7808 if !rts[i].HasPointers() {
7809 continue
7810 }
7811 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7812 continue
7813 }
7814 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7815 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7816 }
7817 }
7818 }
7819
7820
7821
7822
7823 var lo, hi int64
7824
7825
7826
7827 var state uint32
7828
7829
7830
7831 for _, n := range e.curfn.Dcl {
7832 if !n.Needzero() {
7833 continue
7834 }
7835 if n.Class != ir.PAUTO {
7836 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7837 }
7838 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7839 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7840 }
7841
7842 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7843
7844 lo = n.FrameOffset()
7845 continue
7846 }
7847
7848
7849 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7850
7851
7852 lo = n.FrameOffset()
7853 hi = lo + n.Type().Size()
7854 }
7855
7856
7857 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7858 }
7859
7860
7861 type IndexJump struct {
7862 Jump obj.As
7863 Index int
7864 }
7865
7866 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7867 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7868 p.Pos = b.Pos
7869 }
7870
7871
7872
7873 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7874 switch next {
7875 case b.Succs[0].Block():
7876 s.oneJump(b, &jumps[0][0])
7877 s.oneJump(b, &jumps[0][1])
7878 case b.Succs[1].Block():
7879 s.oneJump(b, &jumps[1][0])
7880 s.oneJump(b, &jumps[1][1])
7881 default:
7882 var q *obj.Prog
7883 if b.Likely != ssa.BranchUnlikely {
7884 s.oneJump(b, &jumps[1][0])
7885 s.oneJump(b, &jumps[1][1])
7886 q = s.Br(obj.AJMP, b.Succs[1].Block())
7887 } else {
7888 s.oneJump(b, &jumps[0][0])
7889 s.oneJump(b, &jumps[0][1])
7890 q = s.Br(obj.AJMP, b.Succs[0].Block())
7891 }
7892 q.Pos = b.Pos
7893 }
7894 }
7895
7896
7897 func AddAux(a *obj.Addr, v *ssa.Value) {
7898 AddAux2(a, v, v.AuxInt)
7899 }
7900 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7901 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7902 v.Fatalf("bad AddAux addr %v", a)
7903 }
7904
7905 a.Offset += offset
7906
7907
7908 if v.Aux == nil {
7909 return
7910 }
7911
7912 switch n := v.Aux.(type) {
7913 case *ssa.AuxCall:
7914 a.Name = obj.NAME_EXTERN
7915 a.Sym = n.Fn
7916 case *obj.LSym:
7917 a.Name = obj.NAME_EXTERN
7918 a.Sym = n
7919 case *ir.Name:
7920 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7921 a.Name = obj.NAME_PARAM
7922 } else {
7923 a.Name = obj.NAME_AUTO
7924 }
7925 a.Sym = n.Linksym()
7926 a.Offset += n.FrameOffset()
7927 default:
7928 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7929 }
7930 }
7931
7932
7933
7934 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7935 size := idx.Type.Size()
7936 if size == s.config.PtrSize {
7937 return idx
7938 }
7939 if size > s.config.PtrSize {
7940
7941
7942 var lo *ssa.Value
7943 if idx.Type.IsSigned() {
7944 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7945 } else {
7946 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7947 }
7948 if bounded || base.Flag.B != 0 {
7949 return lo
7950 }
7951 bNext := s.f.NewBlock(ssa.BlockPlain)
7952 bPanic := s.f.NewBlock(ssa.BlockExit)
7953 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7954 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7955 if !idx.Type.IsSigned() {
7956 switch kind {
7957 case ssa.BoundsIndex:
7958 kind = ssa.BoundsIndexU
7959 case ssa.BoundsSliceAlen:
7960 kind = ssa.BoundsSliceAlenU
7961 case ssa.BoundsSliceAcap:
7962 kind = ssa.BoundsSliceAcapU
7963 case ssa.BoundsSliceB:
7964 kind = ssa.BoundsSliceBU
7965 case ssa.BoundsSlice3Alen:
7966 kind = ssa.BoundsSlice3AlenU
7967 case ssa.BoundsSlice3Acap:
7968 kind = ssa.BoundsSlice3AcapU
7969 case ssa.BoundsSlice3B:
7970 kind = ssa.BoundsSlice3BU
7971 case ssa.BoundsSlice3C:
7972 kind = ssa.BoundsSlice3CU
7973 }
7974 }
7975 b := s.endBlock()
7976 b.Kind = ssa.BlockIf
7977 b.SetControl(cmp)
7978 b.Likely = ssa.BranchLikely
7979 b.AddEdgeTo(bNext)
7980 b.AddEdgeTo(bPanic)
7981
7982 s.startBlock(bPanic)
7983 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7984 s.endBlock().SetControl(mem)
7985 s.startBlock(bNext)
7986
7987 return lo
7988 }
7989
7990
7991 var op ssa.Op
7992 if idx.Type.IsSigned() {
7993 switch 10*size + s.config.PtrSize {
7994 case 14:
7995 op = ssa.OpSignExt8to32
7996 case 18:
7997 op = ssa.OpSignExt8to64
7998 case 24:
7999 op = ssa.OpSignExt16to32
8000 case 28:
8001 op = ssa.OpSignExt16to64
8002 case 48:
8003 op = ssa.OpSignExt32to64
8004 default:
8005 s.Fatalf("bad signed index extension %s", idx.Type)
8006 }
8007 } else {
8008 switch 10*size + s.config.PtrSize {
8009 case 14:
8010 op = ssa.OpZeroExt8to32
8011 case 18:
8012 op = ssa.OpZeroExt8to64
8013 case 24:
8014 op = ssa.OpZeroExt16to32
8015 case 28:
8016 op = ssa.OpZeroExt16to64
8017 case 48:
8018 op = ssa.OpZeroExt32to64
8019 default:
8020 s.Fatalf("bad unsigned index extension %s", idx.Type)
8021 }
8022 }
8023 return s.newValue1(op, types.Types[types.TINT], idx)
8024 }
8025
8026
8027
8028 func CheckLoweredPhi(v *ssa.Value) {
8029 if v.Op != ssa.OpPhi {
8030 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
8031 }
8032 if v.Type.IsMemory() {
8033 return
8034 }
8035 f := v.Block.Func
8036 loc := f.RegAlloc[v.ID]
8037 for _, a := range v.Args {
8038 if aloc := f.RegAlloc[a.ID]; aloc != loc {
8039 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
8040 }
8041 }
8042 }
8043
8044
8045
8046
8047
8048 func CheckLoweredGetClosurePtr(v *ssa.Value) {
8049 entry := v.Block.Func.Entry
8050 if entry != v.Block {
8051 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
8052 }
8053 for _, w := range entry.Values {
8054 if w == v {
8055 break
8056 }
8057 switch w.Op {
8058 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
8059
8060 default:
8061 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
8062 }
8063 }
8064 }
8065
8066
8067 func CheckArgReg(v *ssa.Value) {
8068 entry := v.Block.Func.Entry
8069 if entry != v.Block {
8070 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
8071 }
8072 }
8073
8074 func AddrAuto(a *obj.Addr, v *ssa.Value) {
8075 n, off := ssa.AutoVar(v)
8076 a.Type = obj.TYPE_MEM
8077 a.Sym = n.Linksym()
8078 a.Reg = int16(Arch.REGSP)
8079 a.Offset = n.FrameOffset() + off
8080 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
8081 a.Name = obj.NAME_PARAM
8082 } else {
8083 a.Name = obj.NAME_AUTO
8084 }
8085 }
8086
8087
8088
8089 func (s *State) Call(v *ssa.Value) *obj.Prog {
8090 pPosIsStmt := s.pp.Pos.IsStmt()
8091 s.PrepareCall(v)
8092
8093 p := s.Prog(obj.ACALL)
8094 if pPosIsStmt == src.PosIsStmt {
8095 p.Pos = v.Pos.WithIsStmt()
8096 } else {
8097 p.Pos = v.Pos.WithNotStmt()
8098 }
8099 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
8100 p.To.Type = obj.TYPE_MEM
8101 p.To.Name = obj.NAME_EXTERN
8102 p.To.Sym = sym.Fn
8103 } else {
8104
8105 switch Arch.LinkArch.Family {
8106 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
8107 p.To.Type = obj.TYPE_REG
8108 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
8109 p.To.Type = obj.TYPE_MEM
8110 default:
8111 base.Fatalf("unknown indirect call family")
8112 }
8113 p.To.Reg = v.Args[0].Reg()
8114 }
8115 return p
8116 }
8117
8118
8119
8120 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
8121 p := s.Call(v)
8122 p.As = obj.ARET
8123 return p
8124 }
8125
8126
8127
8128
8129 func (s *State) PrepareCall(v *ssa.Value) {
8130 idx := s.livenessMap.Get(v)
8131 if !idx.StackMapValid() {
8132
8133 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
8134 base.Fatalf("missing stack map index for %v", v.LongString())
8135 }
8136 }
8137
8138 call, ok := v.Aux.(*ssa.AuxCall)
8139
8140 if ok {
8141
8142
8143 if nowritebarrierrecCheck != nil {
8144 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
8145 }
8146 }
8147
8148 if s.maxarg < v.AuxInt {
8149 s.maxarg = v.AuxInt
8150 }
8151 }
8152
8153
8154
8155 func (s *State) UseArgs(n int64) {
8156 if s.maxarg < n {
8157 s.maxarg = n
8158 }
8159 }
8160
8161
8162 func fieldIdx(n *ir.SelectorExpr) int {
8163 t := n.X.Type()
8164 if !t.IsStruct() {
8165 panic("ODOT's LHS is not a struct")
8166 }
8167
8168 for i, f := range t.Fields() {
8169 if f.Sym == n.Sel {
8170 if f.Offset != n.Offset() {
8171 panic("field offset doesn't match")
8172 }
8173 return i
8174 }
8175 }
8176 panic(fmt.Sprintf("can't find field in expr %v\n", n))
8177
8178
8179
8180 }
8181
8182
8183
8184 type ssafn struct {
8185 curfn *ir.Func
8186 strings map[string]*obj.LSym
8187 stksize int64
8188 stkptrsize int64
8189
8190
8191
8192
8193
8194 stkalign int64
8195
8196 log bool
8197 }
8198
8199
8200
8201 func (e *ssafn) StringData(s string) *obj.LSym {
8202 if aux, ok := e.strings[s]; ok {
8203 return aux
8204 }
8205 if e.strings == nil {
8206 e.strings = make(map[string]*obj.LSym)
8207 }
8208 data := staticdata.StringSym(e.curfn.Pos(), s)
8209 e.strings[s] = data
8210 return data
8211 }
8212
8213
8214 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
8215 node := parent.N
8216
8217 if node.Class != ir.PAUTO || node.Addrtaken() {
8218
8219 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
8220 }
8221
8222 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
8223 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
8224 n.SetUsed(true)
8225 n.SetEsc(ir.EscNever)
8226 types.CalcSize(t)
8227 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
8228 }
8229
8230
8231 func (e *ssafn) Logf(msg string, args ...interface{}) {
8232 if e.log {
8233 fmt.Printf(msg, args...)
8234 }
8235 }
8236
8237 func (e *ssafn) Log() bool {
8238 return e.log
8239 }
8240
8241
8242 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
8243 base.Pos = pos
8244 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
8245 base.Fatalf("'%s': "+msg, nargs...)
8246 }
8247
8248
8249
8250 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
8251 base.WarnfAt(pos, fmt_, args...)
8252 }
8253
8254 func (e *ssafn) Debug_checknil() bool {
8255 return base.Debug.Nil != 0
8256 }
8257
8258 func (e *ssafn) UseWriteBarrier() bool {
8259 return base.Flag.WB
8260 }
8261
8262 func (e *ssafn) Syslook(name string) *obj.LSym {
8263 switch name {
8264 case "goschedguarded":
8265 return ir.Syms.Goschedguarded
8266 case "writeBarrier":
8267 return ir.Syms.WriteBarrier
8268 case "wbZero":
8269 return ir.Syms.WBZero
8270 case "wbMove":
8271 return ir.Syms.WBMove
8272 case "cgoCheckMemmove":
8273 return ir.Syms.CgoCheckMemmove
8274 case "cgoCheckPtrWrite":
8275 return ir.Syms.CgoCheckPtrWrite
8276 }
8277 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
8278 return nil
8279 }
8280
8281 func (e *ssafn) Func() *ir.Func {
8282 return e.curfn
8283 }
8284
8285 func clobberBase(n ir.Node) ir.Node {
8286 if n.Op() == ir.ODOT {
8287 n := n.(*ir.SelectorExpr)
8288 if n.X.Type().NumFields() == 1 {
8289 return clobberBase(n.X)
8290 }
8291 }
8292 if n.Op() == ir.OINDEX {
8293 n := n.(*ir.IndexExpr)
8294 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
8295 return clobberBase(n.X)
8296 }
8297 }
8298 return n
8299 }
8300
8301
8302 func callTargetLSym(callee *ir.Name) *obj.LSym {
8303 if callee.Func == nil {
8304
8305
8306
8307 return callee.Linksym()
8308 }
8309
8310 return callee.LinksymABI(callee.Func.ABI)
8311 }
8312
8313 func min8(a, b int8) int8 {
8314 if a < b {
8315 return a
8316 }
8317 return b
8318 }
8319
8320 func max8(a, b int8) int8 {
8321 if a > b {
8322 return a
8323 }
8324 return b
8325 }
8326
8327
8328 const deferStructFnField = 4
8329
8330 var deferType *types.Type
8331
8332
8333
8334 func deferstruct() *types.Type {
8335 if deferType != nil {
8336 return deferType
8337 }
8338
8339 makefield := func(name string, t *types.Type) *types.Field {
8340 sym := (*types.Pkg)(nil).Lookup(name)
8341 return types.NewField(src.NoXPos, sym, t)
8342 }
8343
8344 fields := []*types.Field{
8345 makefield("heap", types.Types[types.TBOOL]),
8346 makefield("rangefunc", types.Types[types.TBOOL]),
8347 makefield("sp", types.Types[types.TUINTPTR]),
8348 makefield("pc", types.Types[types.TUINTPTR]),
8349
8350
8351
8352 makefield("fn", types.Types[types.TUINTPTR]),
8353 makefield("link", types.Types[types.TUINTPTR]),
8354 makefield("head", types.Types[types.TUINTPTR]),
8355 }
8356 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
8357 base.Fatalf("deferStructFnField is %q, not fn", name)
8358 }
8359
8360 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
8361 typ := types.NewNamed(n)
8362 n.SetType(typ)
8363 n.SetTypecheck(1)
8364
8365
8366 typ.SetUnderlying(types.NewStruct(fields))
8367 types.CalcStructSize(typ)
8368
8369 deferType = typ
8370 return typ
8371 }
8372
8373
8374
8375
8376
8377 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
8378 return obj.Addr{
8379 Name: obj.NAME_NONE,
8380 Type: obj.TYPE_MEM,
8381 Reg: baseReg,
8382 Offset: spill.Offset + extraOffset,
8383 }
8384 }
8385
8386 var (
8387 BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
8388 ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
8389 )
8390
View as plain text