1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmp"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "internal/goexperiment"
16 "internal/runtime/gc"
17 "os"
18 "path/filepath"
19 "slices"
20 "strings"
21
22 "cmd/compile/internal/abi"
23 "cmd/compile/internal/base"
24 "cmd/compile/internal/ir"
25 "cmd/compile/internal/liveness"
26 "cmd/compile/internal/objw"
27 "cmd/compile/internal/reflectdata"
28 "cmd/compile/internal/rttype"
29 "cmd/compile/internal/ssa"
30 "cmd/compile/internal/staticdata"
31 "cmd/compile/internal/typecheck"
32 "cmd/compile/internal/types"
33 "cmd/internal/obj"
34 "cmd/internal/objabi"
35 "cmd/internal/src"
36 "cmd/internal/sys"
37
38 rtabi "internal/abi"
39 )
40
41 var ssaConfig *ssa.Config
42 var ssaCaches []ssa.Cache
43
44 var ssaDump string
45 var ssaDir string
46 var ssaDumpStdout bool
47 var ssaDumpCFG string
48 const ssaDumpFile = "ssa.html"
49
50
51 var ssaDumpInlined []*ir.Func
52
53
54
55
56 const maxAggregatedHeapAllocation = 16
57
58 func DumpInline(fn *ir.Func) {
59 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
60 ssaDumpInlined = append(ssaDumpInlined, fn)
61 }
62 }
63
64 func InitEnv() {
65 ssaDump = os.Getenv("GOSSAFUNC")
66 ssaDir = os.Getenv("GOSSADIR")
67 if ssaDump != "" {
68 if strings.HasSuffix(ssaDump, "+") {
69 ssaDump = ssaDump[:len(ssaDump)-1]
70 ssaDumpStdout = true
71 }
72 spl := strings.Split(ssaDump, ":")
73 if len(spl) > 1 {
74 ssaDump = spl[0]
75 ssaDumpCFG = spl[1]
76 }
77 }
78 }
79
80 func InitConfig() {
81 types_ := ssa.NewTypes()
82
83 if Arch.SoftFloat {
84 softfloatInit()
85 }
86
87
88
89 _ = types.NewPtr(types.Types[types.TINTER])
90 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
91 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
92 _ = types.NewPtr(types.NewPtr(types.ByteType))
93 _ = types.NewPtr(types.NewSlice(types.ByteType))
94 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
95 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
96 _ = types.NewPtr(types.Types[types.TINT16])
97 _ = types.NewPtr(types.Types[types.TINT64])
98 _ = types.NewPtr(types.ErrorType)
99 _ = types.NewPtr(reflectdata.MapType())
100 _ = types.NewPtr(deferstruct())
101 types.NewPtrCacheEnabled = false
102 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
103 ssaConfig.Race = base.Flag.Race
104 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
105
106
107 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
108 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
109 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
110 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
111 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
112 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
113 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
114 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
115 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
116 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
117 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
118 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
119 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
120 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
121 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
122 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
123 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
124 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
125 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
126 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
127 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
128 ir.Syms.GrowsliceBuf = typecheck.LookupRuntimeFunc("growsliceBuf")
129 ir.Syms.GrowsliceBufNoAlias = typecheck.LookupRuntimeFunc("growsliceBufNoAlias")
130 ir.Syms.GrowsliceNoAlias = typecheck.LookupRuntimeFunc("growsliceNoAlias")
131 ir.Syms.MoveSlice = typecheck.LookupRuntimeFunc("moveSlice")
132 ir.Syms.MoveSliceNoScan = typecheck.LookupRuntimeFunc("moveSliceNoScan")
133 ir.Syms.MoveSliceNoCap = typecheck.LookupRuntimeFunc("moveSliceNoCap")
134 ir.Syms.MoveSliceNoCapNoScan = typecheck.LookupRuntimeFunc("moveSliceNoCapNoScan")
135 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
136 for i := 1; i < len(ir.Syms.MallocGCSmallNoScan); i++ {
137 ir.Syms.MallocGCSmallNoScan[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallNoScanSC%d", i))
138 }
139 for i := 1; i < len(ir.Syms.MallocGCSmallScanNoHeader); i++ {
140 ir.Syms.MallocGCSmallScanNoHeader[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallScanNoHeaderSC%d", i))
141 }
142 for i := 1; i < len(ir.Syms.MallocGCTiny); i++ {
143 ir.Syms.MallocGCTiny[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcTinySize%d", i))
144 }
145 ir.Syms.MallocGC = typecheck.LookupRuntimeFunc("mallocgc")
146 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
147 ir.Syms.Memequal = typecheck.LookupRuntimeFunc("memequal")
148 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
149 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
150 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
151 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
152 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
153 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
154 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
155 ir.Syms.PanicBounds = typecheck.LookupRuntimeFunc("panicBounds")
156 ir.Syms.PanicExtend = typecheck.LookupRuntimeFunc("panicExtend")
157 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
158 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
159 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
160 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
161 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
162 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
163 ir.Syms.PanicSimdImm = typecheck.LookupRuntimeFunc("panicSimdImm")
164 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
165 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
166 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
167 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
168 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
169 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
170 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
171 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
172 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
173 ir.Syms.X86HasAVX = typecheck.LookupRuntimeVar("x86HasAVX")
174 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
175 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
176 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
177 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
178 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
179 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
180 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
181 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
182 ir.Syms.RISCV64HasZbb = typecheck.LookupRuntimeVar("riscv64HasZbb")
183 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
184 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
185 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
186 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
187 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
188 ir.Syms.ZeroVal = typecheck.LookupRuntimeVar("zeroVal")
189
190 if Arch.LinkArch.Family == sys.Wasm {
191 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
192 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
193 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
194 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
195 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
196 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
197 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
198 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
199 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
200 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
201 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
202 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
203 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
204 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
205 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
206 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
207 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
208 }
209
210
211 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
212 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
213 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
214 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
215 }
216
217 func InitTables() {
218 initIntrinsics(nil)
219 }
220
221
222
223
224
225
226
227
228 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
229 return ssaConfig.ABI0.Copy()
230 }
231
232
233
234 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
235 if buildcfg.Experiment.RegabiArgs {
236
237 if fn == nil {
238 return abi1
239 }
240 switch fn.ABI {
241 case obj.ABI0:
242 return abi0
243 case obj.ABIInternal:
244
245
246 return abi1
247 }
248 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
249 panic("not reachable")
250 }
251
252 a := abi0
253 if fn != nil {
254 if fn.Pragma&ir.RegisterParams != 0 {
255 a = abi1
256 }
257 }
258 return a
259 }
260
261
262
263
264
265
266
267
268
269
270
271
272 func (s *state) emitOpenDeferInfo() {
273 firstOffset := s.openDefers[0].closureNode.FrameOffset()
274
275
276 for i, r := range s.openDefers {
277 have := r.closureNode.FrameOffset()
278 want := firstOffset + int64(i)*int64(types.PtrSize)
279 if have != want {
280 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
281 }
282 }
283
284 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
285 x.Set(obj.AttrContentAddressable, true)
286 s.curfn.LSym.Func().OpenCodedDeferInfo = x
287
288 off := 0
289 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
290 off = objw.Uvarint(x, off, uint64(-firstOffset))
291 }
292
293
294
295 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
296 name := ir.FuncName(fn)
297
298 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
299
300 printssa := false
301
302
303 if strings.Contains(ssaDump, name) {
304 nameOptABI := name
305 if l := len(ssaDump); l > 1 && ssaDump[l-2] == ',' {
306 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
307 } else if strings.HasSuffix(ssaDump, ">") {
308 l := len(ssaDump)
309 if l >= 3 && ssaDump[l-3] == '<' {
310 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
311 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
312 }
313 }
314 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
315 printssa = nameOptABI == ssaDump ||
316 pkgDotName == ssaDump ||
317 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
318 }
319
320 var astBuf *bytes.Buffer
321 if printssa {
322 astBuf = &bytes.Buffer{}
323 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
324 if ssaDumpStdout {
325 fmt.Println("generating SSA for", name)
326 fmt.Print(astBuf.String())
327 }
328 }
329
330 var s state
331 s.pushLine(fn.Pos())
332 defer s.popLine()
333
334 s.hasdefer = fn.HasDefer()
335 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
336 s.cgoUnsafeArgs = true
337 }
338 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
339
340 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
341 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
342 s.instrumentMemory = true
343 if base.Flag.Race {
344 s.instrumentEnterExit = true
345 }
346 }
347 }
348
349 fe := ssafn{
350 curfn: fn,
351 log: printssa && ssaDumpStdout,
352 }
353 s.curfn = fn
354
355 cache := &ssaCaches[worker]
356 cache.Reset()
357
358 s.f = ssaConfig.NewFunc(&fe, cache)
359 s.config = ssaConfig
360 s.f.Type = fn.Type()
361 s.f.Name = name
362 s.f.PrintOrHtmlSSA = printssa
363 if fn.Pragma&ir.Nosplit != 0 {
364 s.f.NoSplit = true
365 }
366 s.f.ABI0 = ssaConfig.ABI0
367 s.f.ABI1 = ssaConfig.ABI1
368 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
369 s.f.ABISelf = abiSelf
370
371 s.panics = map[funcLine]*ssa.Block{}
372 s.softFloat = s.config.SoftFloat
373
374
375 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
376 s.f.Entry.Pos = fn.Pos()
377 s.f.IsPgoHot = isPgoHot
378
379 if printssa {
380 ssaDF := ssaDumpFile
381 if ssaDir != "" {
382 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
383 ssaD := filepath.Dir(ssaDF)
384 os.MkdirAll(ssaD, 0755)
385 }
386 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
387
388 dumpSourcesColumn(s.f.HTMLWriter, fn)
389 s.f.HTMLWriter.WriteAST("AST", astBuf)
390 }
391
392
393 s.labels = map[string]*ssaLabel{}
394 s.fwdVars = map[ir.Node]*ssa.Value{}
395 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
396
397 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
398 switch {
399 case base.Debug.NoOpenDefer != 0:
400 s.hasOpenDefers = false
401 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
402
403
404
405
406
407 s.hasOpenDefers = false
408 }
409 if s.hasOpenDefers && s.instrumentEnterExit {
410
411
412
413 s.hasOpenDefers = false
414 }
415 if s.hasOpenDefers {
416
417
418 for _, f := range s.curfn.Type().Results() {
419 if !f.Nname.(*ir.Name).OnStack() {
420 s.hasOpenDefers = false
421 break
422 }
423 }
424 }
425 if s.hasOpenDefers &&
426 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
427
428
429
430
431
432 s.hasOpenDefers = false
433 }
434
435 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
436 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
437
438 s.startBlock(s.f.Entry)
439 s.vars[memVar] = s.startmem
440 if s.hasOpenDefers {
441
442
443
444 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
445 deferBitsTemp.SetAddrtaken(true)
446 s.deferBitsTemp = deferBitsTemp
447
448 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
449 s.vars[deferBitsVar] = startDeferBits
450 s.deferBitsAddr = s.addr(deferBitsTemp)
451 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
452
453
454
455
456
457 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
458 }
459
460 var params *abi.ABIParamResultInfo
461 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
462
463
464
465
466
467
468 var debugInfo ssa.FuncDebug
469 for _, n := range fn.Dcl {
470 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
471 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
472 }
473 }
474 fn.DebugInfo = &debugInfo
475
476
477 s.decladdrs = map[*ir.Name]*ssa.Value{}
478 for _, n := range fn.Dcl {
479 switch n.Class {
480 case ir.PPARAM:
481
482 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
483 case ir.PPARAMOUT:
484 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
485 case ir.PAUTO:
486
487
488 default:
489 s.Fatalf("local variable with class %v unimplemented", n.Class)
490 }
491 }
492
493 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
494
495
496 for _, n := range fn.Dcl {
497 if n.Class == ir.PPARAM {
498 if s.canSSA(n) {
499 v := s.newValue0A(ssa.OpArg, n.Type(), n)
500 s.vars[n] = v
501 s.addNamedValue(n, v)
502 } else {
503 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
504 if len(paramAssignment.Registers) > 0 {
505 if ssa.CanSSA(n.Type()) {
506 v := s.newValue0A(ssa.OpArg, n.Type(), n)
507 s.store(n.Type(), s.decladdrs[n], v)
508 } else {
509
510
511 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
512 }
513 }
514 }
515 }
516 }
517
518
519 if fn.Needctxt() {
520 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
521 if fn.RangeParent != nil && base.Flag.N != 0 {
522
523
524
525 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
526 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
527 cloSlot.SetUsed(true)
528 cloSlot.SetEsc(ir.EscNever)
529 cloSlot.SetAddrtaken(true)
530 s.f.CloSlot = cloSlot
531 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
532 addr := s.addr(cloSlot)
533 s.store(s.f.Config.Types.BytePtr, addr, clo)
534
535 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
536 }
537 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
538 for {
539 n, typ, offset := csiter.Next()
540 if n == nil {
541 break
542 }
543
544 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
545
546
547
548
549
550
551
552
553
554 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
555 n.Class = ir.PAUTO
556 fn.Dcl = append(fn.Dcl, n)
557 s.assign(n, s.load(n.Type(), ptr), false, 0)
558 continue
559 }
560
561 if !n.Byval() {
562 ptr = s.load(typ, ptr)
563 }
564 s.setHeapaddr(fn.Pos(), n, ptr)
565 }
566 }
567
568
569 if s.instrumentEnterExit {
570 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
571 }
572 s.zeroResults()
573 s.paramsToHeap()
574 s.stmtList(fn.Body)
575
576
577 if s.curBlock != nil {
578 s.pushLine(fn.Endlineno)
579 s.exit()
580 s.popLine()
581 }
582
583 for _, b := range s.f.Blocks {
584 if b.Pos != src.NoXPos {
585 s.updateUnsetPredPos(b)
586 }
587 }
588
589 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
590
591 s.insertPhis()
592
593
594 ssa.Compile(s.f)
595
596 fe.AllocFrame(s.f)
597
598 if len(s.openDefers) != 0 {
599 s.emitOpenDeferInfo()
600 }
601
602
603
604
605
606
607 for _, p := range params.InParams() {
608 typs, offs := p.RegisterTypesAndOffsets()
609 if len(offs) < len(typs) {
610 s.Fatalf("len(offs)=%d < len(typs)=%d, params=\n%s", len(offs), len(typs), params)
611 }
612 for i, t := range typs {
613 o := offs[i]
614 fo := p.FrameOffset(params)
615 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
616 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
617 }
618 }
619
620 return s.f
621 }
622
623 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
624 typs, offs := paramAssignment.RegisterTypesAndOffsets()
625 for i, t := range typs {
626 if pointersOnly && !t.IsPtrShaped() {
627 continue
628 }
629 r := paramAssignment.Registers[i]
630 o := offs[i]
631 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
632 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
633 v := s.newValue0I(op, t, reg)
634 v.Aux = aux
635 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
636 s.store(t, p, v)
637 }
638 }
639
640
641
642
643
644
645
646 func (s *state) zeroResults() {
647 for _, f := range s.curfn.Type().Results() {
648 n := f.Nname.(*ir.Name)
649 if !n.OnStack() {
650
651
652
653 continue
654 }
655
656 if typ := n.Type(); ssa.CanSSA(typ) {
657 s.assign(n, s.zeroVal(typ), false, 0)
658 } else {
659 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
660 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
661 }
662 s.zero(n.Type(), s.decladdrs[n])
663 }
664 }
665 }
666
667
668
669 func (s *state) paramsToHeap() {
670 do := func(params []*types.Field) {
671 for _, f := range params {
672 if f.Nname == nil {
673 continue
674 }
675 n := f.Nname.(*ir.Name)
676 if ir.IsBlank(n) || n.OnStack() {
677 continue
678 }
679 s.newHeapaddr(n)
680 if n.Class == ir.PPARAM {
681 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
682 }
683 }
684 }
685
686 typ := s.curfn.Type()
687 do(typ.Recvs())
688 do(typ.Params())
689 do(typ.Results())
690 }
691
692
693
694
695 func allocSizeAndAlign(t *types.Type) (int64, int64) {
696 size, align := t.Size(), t.Alignment()
697 if types.PtrSize == 4 && align == 4 && size >= 8 {
698
699 size = types.RoundUp(size, 8)
700 align = 8
701 }
702 return size, align
703 }
704 func allocSize(t *types.Type) int64 {
705 size, _ := allocSizeAndAlign(t)
706 return size
707 }
708 func allocAlign(t *types.Type) int64 {
709 _, align := allocSizeAndAlign(t)
710 return align
711 }
712
713
714 func (s *state) newHeapaddr(n *ir.Name) {
715 size := allocSize(n.Type())
716 if n.Type().HasPointers() || size >= maxAggregatedHeapAllocation || size == 0 {
717 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type()))
718 return
719 }
720
721
722
723 var used int64
724 for _, v := range s.pendingHeapAllocations {
725 used += allocSize(v.Type.Elem())
726 }
727 if used+size > maxAggregatedHeapAllocation {
728 s.flushPendingHeapAllocations()
729 }
730
731 var allocCall *ssa.Value
732 if len(s.pendingHeapAllocations) == 0 {
733
734
735
736 allocCall = s.newObjectNonSpecialized(n.Type(), nil)
737 } else {
738 allocCall = s.pendingHeapAllocations[0].Args[0]
739 }
740
741 v := s.newValue1I(ssa.OpOffPtr, n.Type().PtrTo(), 0, allocCall)
742
743
744 s.pendingHeapAllocations = append(s.pendingHeapAllocations, v)
745
746
747 s.setHeapaddr(n.Pos(), n, v)
748 }
749
750 func (s *state) flushPendingHeapAllocations() {
751 pending := s.pendingHeapAllocations
752 if len(pending) == 0 {
753 return
754 }
755 s.pendingHeapAllocations = nil
756 ptr := pending[0].Args[0]
757 call := ptr.Args[0]
758
759 if len(pending) == 1 {
760
761 v := pending[0]
762 v.Op = ssa.OpCopy
763 return
764 }
765
766
767
768
769 slices.SortStableFunc(pending, func(x, y *ssa.Value) int {
770 return cmp.Compare(allocAlign(y.Type.Elem()), allocAlign(x.Type.Elem()))
771 })
772
773
774 var size int64
775 for _, v := range pending {
776 v.AuxInt = size
777 size += allocSize(v.Type.Elem())
778 }
779 align := allocAlign(pending[0].Type.Elem())
780 size = types.RoundUp(size, align)
781
782
783 args := []*ssa.Value{
784 s.constInt(types.Types[types.TUINTPTR], size),
785 s.constNil(call.Args[0].Type),
786 s.constBool(true),
787 call.Args[1],
788 }
789 mallocSym := ir.Syms.MallocGC
790 if specialMallocSym := s.specializedMallocSym(size, false); specialMallocSym != nil {
791 mallocSym = specialMallocSym
792 }
793 call.Aux = ssa.StaticAuxCall(mallocSym, s.f.ABIDefault.ABIAnalyzeTypes(
794 []*types.Type{args[0].Type, args[1].Type, args[2].Type},
795 []*types.Type{types.Types[types.TUNSAFEPTR]},
796 ))
797 call.AuxInt = 4 * s.config.PtrSize
798 call.SetArgs4(args[0], args[1], args[2], args[3])
799
800
801 call.Type = types.NewTuple(types.Types[types.TUNSAFEPTR], types.TypeMem)
802 ptr.Type = types.Types[types.TUNSAFEPTR]
803 }
804
805 func (s *state) specializedMallocSym(size int64, hasPointers bool) *obj.LSym {
806 if !s.sizeSpecializedMallocEnabled() {
807 return nil
808 }
809 ptrSize := s.config.PtrSize
810 ptrBits := ptrSize * 8
811 minSizeForMallocHeader := ptrSize * ptrBits
812 heapBitsInSpan := size <= minSizeForMallocHeader
813 if !heapBitsInSpan {
814 return nil
815 }
816 divRoundUp := func(n, a uintptr) uintptr { return (n + a - 1) / a }
817 sizeClass := gc.SizeToSizeClass8[divRoundUp(uintptr(size), gc.SmallSizeDiv)]
818 if hasPointers {
819 return ir.Syms.MallocGCSmallScanNoHeader[sizeClass]
820 }
821 if size < gc.TinySize {
822 return ir.Syms.MallocGCTiny[size]
823 }
824 return ir.Syms.MallocGCSmallNoScan[sizeClass]
825 }
826
827 func (s *state) sizeSpecializedMallocEnabled() bool {
828 if base.Flag.CompilingRuntime {
829
830
831
832
833
834
835
836 return false
837 }
838
839 return buildcfg.Experiment.SizeSpecializedMalloc && !base.Flag.Cfg.Instrumenting
840 }
841
842
843
844 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
845 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
846 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
847 }
848
849
850 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
851 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
852 addr.SetUsed(true)
853 types.CalcSize(addr.Type())
854
855 if n.Class == ir.PPARAMOUT {
856 addr.SetIsOutputParamHeapAddr(true)
857 }
858
859 n.Heapaddr = addr
860 s.assign(addr, ptr, false, 0)
861 }
862
863
864 func (s *state) newObject(typ *types.Type) *ssa.Value {
865 if typ.Size() == 0 {
866 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
867 }
868 rtype := s.reflectType(typ)
869 if specialMallocSym := s.specializedMallocSym(typ.Size(), typ.HasPointers()); specialMallocSym != nil {
870 return s.rtcall(specialMallocSym, true, []*types.Type{types.NewPtr(typ)},
871 s.constInt(types.Types[types.TUINTPTR], typ.Size()),
872 rtype,
873 s.constBool(true),
874 )[0]
875 }
876 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
877 }
878
879
880
881 func (s *state) newObjectNonSpecialized(typ *types.Type, rtype *ssa.Value) *ssa.Value {
882 if typ.Size() == 0 {
883 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
884 }
885 if rtype == nil {
886 rtype = s.reflectType(typ)
887 }
888 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
889 }
890
891 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
892 if !n.Type().IsPtr() {
893 s.Fatalf("expected pointer type: %v", n.Type())
894 }
895 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
896 if count != nil {
897 if !elem.IsArray() {
898 s.Fatalf("expected array type: %v", elem)
899 }
900 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
901 }
902 size := elem.Size()
903
904 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
905 return
906 }
907 if count == nil {
908 count = s.constInt(types.Types[types.TUINTPTR], 1)
909 }
910 if count.Type.Size() != s.config.PtrSize {
911 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
912 }
913 var rtype *ssa.Value
914 if rtypeExpr != nil {
915 rtype = s.expr(rtypeExpr)
916 } else {
917 rtype = s.reflectType(elem)
918 }
919 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
920 }
921
922
923
924 func (s *state) reflectType(typ *types.Type) *ssa.Value {
925
926
927 lsym := reflectdata.TypeLinksym(typ)
928 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
929 }
930
931 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
932
933 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
934 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
935 if err != nil {
936 writer.Logf("cannot read sources for function %v: %v", fn, err)
937 }
938
939
940 var inlFns []*ssa.FuncLines
941 for _, fi := range ssaDumpInlined {
942 elno := fi.Endlineno
943 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
944 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
945 if err != nil {
946 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
947 continue
948 }
949 inlFns = append(inlFns, fnLines)
950 }
951
952 slices.SortFunc(inlFns, ssa.ByTopoCmp)
953 if targetFn != nil {
954 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
955 }
956
957 writer.WriteSources("sources", inlFns)
958 }
959
960 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
961 f, err := os.Open(os.ExpandEnv(file))
962 if err != nil {
963 return nil, err
964 }
965 defer f.Close()
966 var lines []string
967 ln := uint(1)
968 scanner := bufio.NewScanner(f)
969 for scanner.Scan() && ln <= end {
970 if ln >= start {
971 lines = append(lines, scanner.Text())
972 }
973 ln++
974 }
975 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
976 }
977
978
979
980
981 func (s *state) updateUnsetPredPos(b *ssa.Block) {
982 if b.Pos == src.NoXPos {
983 s.Fatalf("Block %s should have a position", b)
984 }
985 bestPos := src.NoXPos
986 for _, e := range b.Preds {
987 p := e.Block()
988 if !p.LackingPos() {
989 continue
990 }
991 if bestPos == src.NoXPos {
992 bestPos = b.Pos
993 for _, v := range b.Values {
994 if v.LackingPos() {
995 continue
996 }
997 if v.Pos != src.NoXPos {
998
999
1000 bestPos = v.Pos
1001 break
1002 }
1003 }
1004 }
1005 p.Pos = bestPos
1006 s.updateUnsetPredPos(p)
1007 }
1008 }
1009
1010
1011 type openDeferInfo struct {
1012
1013 n *ir.CallExpr
1014
1015
1016 closure *ssa.Value
1017
1018
1019
1020 closureNode *ir.Name
1021 }
1022
1023 type state struct {
1024
1025 config *ssa.Config
1026
1027
1028 f *ssa.Func
1029
1030
1031 curfn *ir.Func
1032
1033
1034 labels map[string]*ssaLabel
1035
1036
1037 breakTo *ssa.Block
1038 continueTo *ssa.Block
1039
1040
1041 curBlock *ssa.Block
1042
1043
1044
1045
1046 vars map[ir.Node]*ssa.Value
1047
1048
1049
1050
1051 fwdVars map[ir.Node]*ssa.Value
1052
1053
1054 defvars []map[ir.Node]*ssa.Value
1055
1056
1057 decladdrs map[*ir.Name]*ssa.Value
1058
1059
1060 startmem *ssa.Value
1061 sp *ssa.Value
1062 sb *ssa.Value
1063
1064 deferBitsAddr *ssa.Value
1065 deferBitsTemp *ir.Name
1066
1067
1068 line []src.XPos
1069
1070 lastPos src.XPos
1071
1072
1073
1074 panics map[funcLine]*ssa.Block
1075
1076 cgoUnsafeArgs bool
1077 hasdefer bool
1078 softFloat bool
1079 hasOpenDefers bool
1080 checkPtrEnabled bool
1081 instrumentEnterExit bool
1082 instrumentMemory bool
1083
1084
1085
1086
1087 openDefers []*openDeferInfo
1088
1089
1090
1091
1092 lastDeferExit *ssa.Block
1093 lastDeferFinalBlock *ssa.Block
1094 lastDeferCount int
1095
1096 prevCall *ssa.Value
1097
1098
1099
1100
1101 pendingHeapAllocations []*ssa.Value
1102
1103
1104 appendTargets map[ir.Node]bool
1105
1106
1107 blockStarts []src.XPos
1108
1109
1110
1111 backingStores map[ir.Node]*backingStoreInfo
1112 }
1113
1114 type backingStoreInfo struct {
1115
1116 K int64
1117
1118 store *ir.Name
1119
1120 used *ir.Name
1121
1122
1123
1124 usedStatic bool
1125 }
1126
1127 type funcLine struct {
1128 f *obj.LSym
1129 base *src.PosBase
1130 line uint
1131 }
1132
1133 type ssaLabel struct {
1134 target *ssa.Block
1135 breakTarget *ssa.Block
1136 continueTarget *ssa.Block
1137 }
1138
1139
1140 func (s *state) label(sym *types.Sym) *ssaLabel {
1141 lab := s.labels[sym.Name]
1142 if lab == nil {
1143 lab = new(ssaLabel)
1144 s.labels[sym.Name] = lab
1145 }
1146 return lab
1147 }
1148
1149 func (s *state) Logf(msg string, args ...any) { s.f.Logf(msg, args...) }
1150 func (s *state) Log() bool { return s.f.Log() }
1151 func (s *state) Fatalf(msg string, args ...any) {
1152 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
1153 }
1154 func (s *state) Warnl(pos src.XPos, msg string, args ...any) { s.f.Warnl(pos, msg, args...) }
1155 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
1156
1157 func ssaMarker(name string) *ir.Name {
1158 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
1159 }
1160
1161 var (
1162
1163 memVar = ssaMarker("mem")
1164
1165
1166 ptrVar = ssaMarker("ptr")
1167 lenVar = ssaMarker("len")
1168 capVar = ssaMarker("cap")
1169 typVar = ssaMarker("typ")
1170 okVar = ssaMarker("ok")
1171 deferBitsVar = ssaMarker("deferBits")
1172 hashVar = ssaMarker("hash")
1173 )
1174
1175
1176 func (s *state) startBlock(b *ssa.Block) {
1177 if s.curBlock != nil {
1178 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
1179 }
1180 s.curBlock = b
1181 s.vars = map[ir.Node]*ssa.Value{}
1182 clear(s.fwdVars)
1183 for len(s.blockStarts) <= int(b.ID) {
1184 s.blockStarts = append(s.blockStarts, src.NoXPos)
1185 }
1186 }
1187
1188
1189
1190
1191 func (s *state) endBlock() *ssa.Block {
1192 b := s.curBlock
1193 if b == nil {
1194 return nil
1195 }
1196
1197 s.flushPendingHeapAllocations()
1198
1199 for len(s.defvars) <= int(b.ID) {
1200 s.defvars = append(s.defvars, nil)
1201 }
1202 s.defvars[b.ID] = s.vars
1203 s.curBlock = nil
1204 s.vars = nil
1205 if b.LackingPos() {
1206
1207
1208
1209 b.Pos = src.NoXPos
1210 } else {
1211 b.Pos = s.lastPos
1212 if s.blockStarts[b.ID] == src.NoXPos {
1213 s.blockStarts[b.ID] = s.lastPos
1214 }
1215 }
1216 return b
1217 }
1218
1219
1220 func (s *state) pushLine(line src.XPos) {
1221 if !line.IsKnown() {
1222
1223
1224 line = s.peekPos()
1225 if base.Flag.K != 0 {
1226 base.Warn("buildssa: unknown position (line 0)")
1227 }
1228 } else {
1229 s.lastPos = line
1230 }
1231
1232
1233 if b := s.curBlock; b != nil && s.blockStarts[b.ID] == src.NoXPos {
1234 s.blockStarts[b.ID] = line
1235 }
1236
1237 s.line = append(s.line, line)
1238 }
1239
1240
1241 func (s *state) popLine() {
1242 s.line = s.line[:len(s.line)-1]
1243 }
1244
1245
1246 func (s *state) peekPos() src.XPos {
1247 return s.line[len(s.line)-1]
1248 }
1249
1250
1251 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1252 return s.curBlock.NewValue0(s.peekPos(), op, t)
1253 }
1254
1255
1256 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1257 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1258 }
1259
1260
1261 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1262 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1263 }
1264
1265
1266 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1267 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1268 }
1269
1270
1271 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1272 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1273 }
1274
1275
1276
1277
1278 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1279 if isStmt {
1280 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1281 }
1282 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1283 }
1284
1285
1286 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1287 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1288 }
1289
1290
1291 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1292 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1293 }
1294
1295
1296 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1297 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1298 }
1299
1300
1301
1302
1303 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1304 if isStmt {
1305 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1306 }
1307 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1308 }
1309
1310
1311 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1312 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1313 }
1314
1315
1316 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1317 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1318 }
1319
1320
1321 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1322 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1323 }
1324
1325
1326 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1327 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1328 }
1329
1330
1331
1332
1333 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1334 if isStmt {
1335 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1336 }
1337 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1338 }
1339
1340
1341 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1342 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1343 }
1344
1345
1346 func (s *state) newValue4A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1347 return s.curBlock.NewValue4A(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1348 }
1349
1350
1351 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1352 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1353 }
1354
1355 func (s *state) entryBlock() *ssa.Block {
1356 b := s.f.Entry
1357 if base.Flag.N > 0 && s.curBlock != nil {
1358
1359
1360
1361
1362 b = s.curBlock
1363 }
1364 return b
1365 }
1366
1367
1368 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1369 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1370 }
1371
1372
1373 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1374 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1375 }
1376
1377
1378 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1379 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1380 }
1381
1382
1383 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1384 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1385 }
1386
1387
1388 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1389 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1390 }
1391
1392
1393 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1394 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1395 }
1396
1397
1398 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1399 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1400 }
1401
1402
1403 func (s *state) constSlice(t *types.Type) *ssa.Value {
1404 return s.f.ConstSlice(t)
1405 }
1406 func (s *state) constInterface(t *types.Type) *ssa.Value {
1407 return s.f.ConstInterface(t)
1408 }
1409 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1410 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1411 return s.f.ConstEmptyString(t)
1412 }
1413 func (s *state) constBool(c bool) *ssa.Value {
1414 return s.f.ConstBool(types.Types[types.TBOOL], c)
1415 }
1416 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1417 return s.f.ConstInt8(t, c)
1418 }
1419 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1420 return s.f.ConstInt16(t, c)
1421 }
1422 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1423 return s.f.ConstInt32(t, c)
1424 }
1425 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1426 return s.f.ConstInt64(t, c)
1427 }
1428 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1429 return s.f.ConstFloat32(t, c)
1430 }
1431 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1432 return s.f.ConstFloat64(t, c)
1433 }
1434 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1435 if s.config.PtrSize == 8 {
1436 return s.constInt64(t, c)
1437 }
1438 if int64(int32(c)) != c {
1439 s.Fatalf("integer constant too big %d", c)
1440 }
1441 return s.constInt32(t, int32(c))
1442 }
1443
1444
1445
1446 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1447 if s.softFloat {
1448 if c, ok := s.sfcall(op, arg); ok {
1449 return c
1450 }
1451 }
1452 return s.newValue1(op, t, arg)
1453 }
1454 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1455 if s.softFloat {
1456 if c, ok := s.sfcall(op, arg0, arg1); ok {
1457 return c
1458 }
1459 }
1460 return s.newValue2(op, t, arg0, arg1)
1461 }
1462
1463 type instrumentKind uint8
1464
1465 const (
1466 instrumentRead = iota
1467 instrumentWrite
1468 instrumentMove
1469 )
1470
1471 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1472 s.instrument2(t, addr, nil, kind)
1473 }
1474
1475
1476
1477
1478 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1479 if !(base.Flag.MSan || base.Flag.ASan) || !isStructNotSIMD(t) {
1480 s.instrument(t, addr, kind)
1481 return
1482 }
1483 for _, f := range t.Fields() {
1484 if f.Sym.IsBlank() {
1485 continue
1486 }
1487 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1488 s.instrumentFields(f.Type, offptr, kind)
1489 }
1490 }
1491
1492 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1493 if base.Flag.MSan {
1494 s.instrument2(t, dst, src, instrumentMove)
1495 } else {
1496 s.instrument(t, src, instrumentRead)
1497 s.instrument(t, dst, instrumentWrite)
1498 }
1499 }
1500
1501 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1502 if !s.instrumentMemory {
1503 return
1504 }
1505
1506 w := t.Size()
1507 if w == 0 {
1508 return
1509 }
1510
1511 if ssa.IsSanitizerSafeAddr(addr) {
1512 return
1513 }
1514
1515 var fn *obj.LSym
1516 needWidth := false
1517
1518 if addr2 != nil && kind != instrumentMove {
1519 panic("instrument2: non-nil addr2 for non-move instrumentation")
1520 }
1521
1522 if base.Flag.MSan {
1523 switch kind {
1524 case instrumentRead:
1525 fn = ir.Syms.Msanread
1526 case instrumentWrite:
1527 fn = ir.Syms.Msanwrite
1528 case instrumentMove:
1529 fn = ir.Syms.Msanmove
1530 default:
1531 panic("unreachable")
1532 }
1533 needWidth = true
1534 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1535
1536
1537
1538 switch kind {
1539 case instrumentRead:
1540 fn = ir.Syms.Racereadrange
1541 case instrumentWrite:
1542 fn = ir.Syms.Racewriterange
1543 default:
1544 panic("unreachable")
1545 }
1546 needWidth = true
1547 } else if base.Flag.Race {
1548
1549
1550 switch kind {
1551 case instrumentRead:
1552 fn = ir.Syms.Raceread
1553 case instrumentWrite:
1554 fn = ir.Syms.Racewrite
1555 default:
1556 panic("unreachable")
1557 }
1558 } else if base.Flag.ASan {
1559 switch kind {
1560 case instrumentRead:
1561 fn = ir.Syms.Asanread
1562 case instrumentWrite:
1563 fn = ir.Syms.Asanwrite
1564 default:
1565 panic("unreachable")
1566 }
1567 needWidth = true
1568 } else {
1569 panic("unreachable")
1570 }
1571
1572 args := []*ssa.Value{addr}
1573 if addr2 != nil {
1574 args = append(args, addr2)
1575 }
1576 if needWidth {
1577 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1578 }
1579 s.rtcall(fn, true, nil, args...)
1580 }
1581
1582 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1583 s.instrumentFields(t, src, instrumentRead)
1584 return s.rawLoad(t, src)
1585 }
1586
1587 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1588 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1589 }
1590
1591 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1592 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1593 }
1594
1595 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1596 s.instrument(t, dst, instrumentWrite)
1597 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1598 store.Aux = t
1599 s.vars[memVar] = store
1600 }
1601
1602 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1603 s.moveWhichMayOverlap(t, dst, src, false)
1604 }
1605 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1606 s.instrumentMove(t, dst, src)
1607 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631 if t.HasPointers() {
1632 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1633
1634
1635
1636
1637 s.curfn.SetWBPos(s.peekPos())
1638 } else {
1639 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1640 }
1641 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1642 return
1643 }
1644 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1645 store.Aux = t
1646 s.vars[memVar] = store
1647 }
1648
1649
1650 func (s *state) stmtList(l ir.Nodes) {
1651 for _, n := range l {
1652 s.stmt(n)
1653 }
1654 }
1655
1656 func peelConvNop(n ir.Node) ir.Node {
1657 if n == nil {
1658 return n
1659 }
1660 for n.Op() == ir.OCONVNOP {
1661 n = n.(*ir.ConvExpr).X
1662 }
1663 return n
1664 }
1665
1666
1667 func (s *state) stmt(n ir.Node) {
1668 s.pushLine(n.Pos())
1669 defer s.popLine()
1670
1671
1672
1673 if s.curBlock == nil && n.Op() != ir.OLABEL {
1674 return
1675 }
1676
1677 s.stmtList(n.Init())
1678 switch n.Op() {
1679
1680 case ir.OBLOCK:
1681 n := n.(*ir.BlockStmt)
1682 s.stmtList(n.List)
1683
1684 case ir.OFALL:
1685
1686
1687 case ir.OCALLFUNC:
1688 n := n.(*ir.CallExpr)
1689 if ir.IsIntrinsicCall(n) {
1690 s.intrinsicCall(n)
1691 return
1692 }
1693 fallthrough
1694
1695 case ir.OCALLINTER:
1696 n := n.(*ir.CallExpr)
1697 s.callResult(n, callNormal)
1698 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1699 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1700 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1701 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1702 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1703 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1704 fn == "panicrangestate") {
1705 m := s.mem()
1706 b := s.endBlock()
1707 b.Kind = ssa.BlockExit
1708 b.SetControl(m)
1709
1710
1711
1712 }
1713 }
1714 case ir.ODEFER:
1715 n := n.(*ir.GoDeferStmt)
1716 if base.Debug.Defer > 0 {
1717 var defertype string
1718 if s.hasOpenDefers {
1719 defertype = "open-coded"
1720 } else if n.Esc() == ir.EscNever {
1721 defertype = "stack-allocated"
1722 } else {
1723 defertype = "heap-allocated"
1724 }
1725 base.WarnfAt(n.Pos(), "%s defer", defertype)
1726 }
1727 if s.hasOpenDefers {
1728 s.openDeferRecord(n.Call.(*ir.CallExpr))
1729 } else {
1730 d := callDefer
1731 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1732 d = callDeferStack
1733 }
1734 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1735 }
1736 case ir.OGO:
1737 n := n.(*ir.GoDeferStmt)
1738 s.callResult(n.Call.(*ir.CallExpr), callGo)
1739
1740 case ir.OAS2DOTTYPE:
1741 n := n.(*ir.AssignListStmt)
1742 var res, resok *ssa.Value
1743 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1744 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1745 } else {
1746 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1747 }
1748 deref := false
1749 if !ssa.CanSSA(n.Rhs[0].Type()) {
1750 if res.Op != ssa.OpLoad {
1751 s.Fatalf("dottype of non-load")
1752 }
1753 mem := s.mem()
1754 if res.Args[1] != mem {
1755 s.Fatalf("memory no longer live from 2-result dottype load")
1756 }
1757 deref = true
1758 res = res.Args[0]
1759 }
1760 s.assign(n.Lhs[0], res, deref, 0)
1761 s.assign(n.Lhs[1], resok, false, 0)
1762 return
1763
1764 case ir.OAS2FUNC:
1765
1766 n := n.(*ir.AssignListStmt)
1767 call := n.Rhs[0].(*ir.CallExpr)
1768 if !ir.IsIntrinsicCall(call) {
1769 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1770 }
1771 v := s.intrinsicCall(call)
1772 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1773 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1774 s.assign(n.Lhs[0], v1, false, 0)
1775 s.assign(n.Lhs[1], v2, false, 0)
1776 return
1777
1778 case ir.ODCL:
1779 n := n.(*ir.Decl)
1780 if v := n.X; v.Esc() == ir.EscHeap {
1781 s.newHeapaddr(v)
1782 }
1783
1784 case ir.OLABEL:
1785 n := n.(*ir.LabelStmt)
1786 sym := n.Label
1787 if sym.IsBlank() {
1788
1789 break
1790 }
1791 lab := s.label(sym)
1792
1793
1794 if lab.target == nil {
1795 lab.target = s.f.NewBlock(ssa.BlockPlain)
1796 }
1797
1798
1799
1800 if s.curBlock != nil {
1801 b := s.endBlock()
1802 b.AddEdgeTo(lab.target)
1803 }
1804 s.startBlock(lab.target)
1805
1806 case ir.OGOTO:
1807 n := n.(*ir.BranchStmt)
1808 sym := n.Label
1809
1810 lab := s.label(sym)
1811 if lab.target == nil {
1812 lab.target = s.f.NewBlock(ssa.BlockPlain)
1813 }
1814
1815 b := s.endBlock()
1816 b.Pos = s.lastPos.WithIsStmt()
1817 b.AddEdgeTo(lab.target)
1818
1819 case ir.OAS:
1820 n := n.(*ir.AssignStmt)
1821 if n.X == n.Y && n.X.Op() == ir.ONAME {
1822
1823
1824
1825
1826
1827
1828
1829 return
1830 }
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841 ny := peelConvNop(n.Y)
1842 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && ny.Op() == ir.ODEREF)
1843 if ny != nil && ny.Op() == ir.ODEREF {
1844 p := peelConvNop(ny.(*ir.StarExpr).X)
1845 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1846
1847
1848 mayOverlap = false
1849 }
1850 }
1851
1852
1853 rhs := n.Y
1854 if rhs != nil {
1855 switch rhs.Op() {
1856 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1857
1858
1859
1860 if !ir.IsZero(rhs) {
1861 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1862 }
1863 rhs = nil
1864 case ir.OAPPEND:
1865 rhs := rhs.(*ir.CallExpr)
1866
1867
1868
1869 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1870 break
1871 }
1872
1873
1874
1875 if s.canSSA(n.X) {
1876 if base.Debug.Append > 0 {
1877 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1878 }
1879 break
1880 }
1881 if base.Debug.Append > 0 {
1882 base.WarnfAt(n.Pos(), "append: len-only update")
1883 }
1884 s.append(rhs, true)
1885 return
1886 }
1887 }
1888
1889 if ir.IsBlank(n.X) {
1890
1891
1892 if rhs != nil {
1893 s.expr(rhs)
1894 }
1895 return
1896 }
1897
1898 var t *types.Type
1899 if n.Y != nil {
1900 t = n.Y.Type()
1901 } else {
1902 t = n.X.Type()
1903 }
1904
1905 var r *ssa.Value
1906 deref := !ssa.CanSSA(t)
1907 if deref {
1908 if rhs == nil {
1909 r = nil
1910 } else {
1911 r = s.addr(rhs)
1912 }
1913 } else {
1914 if rhs == nil {
1915 r = s.zeroVal(t)
1916 } else {
1917 r = s.expr(rhs)
1918 }
1919 }
1920
1921 var skip skipMask
1922 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1923
1924
1925 rhs := rhs.(*ir.SliceExpr)
1926 i, j, k := rhs.Low, rhs.High, rhs.Max
1927 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1928
1929 i = nil
1930 }
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941 if i == nil {
1942 skip |= skipPtr
1943 if j == nil {
1944 skip |= skipLen
1945 }
1946 if k == nil {
1947 skip |= skipCap
1948 }
1949 }
1950 }
1951
1952 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1953
1954 case ir.OIF:
1955 n := n.(*ir.IfStmt)
1956 if ir.IsConst(n.Cond, constant.Bool) {
1957 s.stmtList(n.Cond.Init())
1958 if ir.BoolVal(n.Cond) {
1959 s.stmtList(n.Body)
1960 } else {
1961 s.stmtList(n.Else)
1962 }
1963 break
1964 }
1965
1966 bEnd := s.f.NewBlock(ssa.BlockPlain)
1967 var likely int8
1968 if n.Likely {
1969 likely = 1
1970 }
1971 var bThen *ssa.Block
1972 if len(n.Body) != 0 {
1973 bThen = s.f.NewBlock(ssa.BlockPlain)
1974 } else {
1975 bThen = bEnd
1976 }
1977 var bElse *ssa.Block
1978 if len(n.Else) != 0 {
1979 bElse = s.f.NewBlock(ssa.BlockPlain)
1980 } else {
1981 bElse = bEnd
1982 }
1983 s.condBranch(n.Cond, bThen, bElse, likely)
1984
1985 if len(n.Body) != 0 {
1986 s.startBlock(bThen)
1987 s.stmtList(n.Body)
1988 if b := s.endBlock(); b != nil {
1989 b.AddEdgeTo(bEnd)
1990 }
1991 }
1992 if len(n.Else) != 0 {
1993 s.startBlock(bElse)
1994 s.stmtList(n.Else)
1995 if b := s.endBlock(); b != nil {
1996 b.AddEdgeTo(bEnd)
1997 }
1998 }
1999 s.startBlock(bEnd)
2000
2001 case ir.ORETURN:
2002 n := n.(*ir.ReturnStmt)
2003 s.stmtList(n.Results)
2004 b := s.exit()
2005 b.Pos = s.lastPos.WithIsStmt()
2006
2007 case ir.OTAILCALL:
2008 n := n.(*ir.TailCallStmt)
2009 s.callResult(n.Call, callTail)
2010 call := s.mem()
2011 b := s.endBlock()
2012 b.Kind = ssa.BlockRetJmp
2013 b.SetControl(call)
2014
2015 case ir.OCONTINUE, ir.OBREAK:
2016 n := n.(*ir.BranchStmt)
2017 var to *ssa.Block
2018 if n.Label == nil {
2019
2020 switch n.Op() {
2021 case ir.OCONTINUE:
2022 to = s.continueTo
2023 case ir.OBREAK:
2024 to = s.breakTo
2025 }
2026 } else {
2027
2028 sym := n.Label
2029 lab := s.label(sym)
2030 switch n.Op() {
2031 case ir.OCONTINUE:
2032 to = lab.continueTarget
2033 case ir.OBREAK:
2034 to = lab.breakTarget
2035 }
2036 }
2037
2038 b := s.endBlock()
2039 b.Pos = s.lastPos.WithIsStmt()
2040 b.AddEdgeTo(to)
2041
2042 case ir.OFOR:
2043
2044
2045 n := n.(*ir.ForStmt)
2046 base.Assert(!n.DistinctVars)
2047 bCond := s.f.NewBlock(ssa.BlockPlain)
2048 bBody := s.f.NewBlock(ssa.BlockPlain)
2049 bIncr := s.f.NewBlock(ssa.BlockPlain)
2050 bEnd := s.f.NewBlock(ssa.BlockPlain)
2051
2052
2053 bBody.Pos = n.Pos()
2054
2055
2056 b := s.endBlock()
2057 b.AddEdgeTo(bCond)
2058
2059
2060 s.startBlock(bCond)
2061 if n.Cond != nil {
2062 s.condBranch(n.Cond, bBody, bEnd, 1)
2063 } else {
2064 b := s.endBlock()
2065 b.Kind = ssa.BlockPlain
2066 b.AddEdgeTo(bBody)
2067 }
2068
2069
2070 prevContinue := s.continueTo
2071 prevBreak := s.breakTo
2072 s.continueTo = bIncr
2073 s.breakTo = bEnd
2074 var lab *ssaLabel
2075 if sym := n.Label; sym != nil {
2076
2077 lab = s.label(sym)
2078 lab.continueTarget = bIncr
2079 lab.breakTarget = bEnd
2080 }
2081
2082
2083 s.startBlock(bBody)
2084 s.stmtList(n.Body)
2085
2086
2087 s.continueTo = prevContinue
2088 s.breakTo = prevBreak
2089 if lab != nil {
2090 lab.continueTarget = nil
2091 lab.breakTarget = nil
2092 }
2093
2094
2095 if b := s.endBlock(); b != nil {
2096 b.AddEdgeTo(bIncr)
2097 }
2098
2099
2100 s.startBlock(bIncr)
2101 if n.Post != nil {
2102 s.stmt(n.Post)
2103 }
2104 if b := s.endBlock(); b != nil {
2105 b.AddEdgeTo(bCond)
2106
2107
2108 if b.Pos == src.NoXPos {
2109 b.Pos = bCond.Pos
2110 }
2111 }
2112
2113 s.startBlock(bEnd)
2114
2115 case ir.OSWITCH, ir.OSELECT:
2116
2117
2118 bEnd := s.f.NewBlock(ssa.BlockPlain)
2119
2120 prevBreak := s.breakTo
2121 s.breakTo = bEnd
2122 var sym *types.Sym
2123 var body ir.Nodes
2124 if n.Op() == ir.OSWITCH {
2125 n := n.(*ir.SwitchStmt)
2126 sym = n.Label
2127 body = n.Compiled
2128 } else {
2129 n := n.(*ir.SelectStmt)
2130 sym = n.Label
2131 body = n.Compiled
2132 }
2133
2134 var lab *ssaLabel
2135 if sym != nil {
2136
2137 lab = s.label(sym)
2138 lab.breakTarget = bEnd
2139 }
2140
2141
2142 s.stmtList(body)
2143
2144 s.breakTo = prevBreak
2145 if lab != nil {
2146 lab.breakTarget = nil
2147 }
2148
2149
2150
2151 if s.curBlock != nil {
2152 m := s.mem()
2153 b := s.endBlock()
2154 b.Kind = ssa.BlockExit
2155 b.SetControl(m)
2156 }
2157 s.startBlock(bEnd)
2158
2159 case ir.OJUMPTABLE:
2160 n := n.(*ir.JumpTableStmt)
2161
2162
2163 jt := s.f.NewBlock(ssa.BlockJumpTable)
2164 bEnd := s.f.NewBlock(ssa.BlockPlain)
2165
2166
2167 idx := s.expr(n.Idx)
2168 unsigned := idx.Type.IsUnsigned()
2169
2170
2171 t := types.Types[types.TUINTPTR]
2172 idx = s.conv(nil, idx, idx.Type, t)
2173
2174
2175
2176
2177
2178
2179
2180 var min, max uint64
2181 if unsigned {
2182 min, _ = constant.Uint64Val(n.Cases[0])
2183 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
2184 } else {
2185 mn, _ := constant.Int64Val(n.Cases[0])
2186 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
2187 min = uint64(mn)
2188 max = uint64(mx)
2189 }
2190
2191 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
2192 width := s.uintptrConstant(max - min)
2193 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
2194 b := s.endBlock()
2195 b.Kind = ssa.BlockIf
2196 b.SetControl(cmp)
2197 b.AddEdgeTo(jt)
2198 b.AddEdgeTo(bEnd)
2199 b.Likely = ssa.BranchLikely
2200
2201
2202 s.startBlock(jt)
2203 jt.Pos = n.Pos()
2204 if base.Flag.Cfg.SpectreIndex {
2205 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
2206 }
2207 jt.SetControl(idx)
2208
2209
2210 table := make([]*ssa.Block, max-min+1)
2211 for i := range table {
2212 table[i] = bEnd
2213 }
2214 for i := range n.Targets {
2215 c := n.Cases[i]
2216 lab := s.label(n.Targets[i])
2217 if lab.target == nil {
2218 lab.target = s.f.NewBlock(ssa.BlockPlain)
2219 }
2220 var val uint64
2221 if unsigned {
2222 val, _ = constant.Uint64Val(c)
2223 } else {
2224 vl, _ := constant.Int64Val(c)
2225 val = uint64(vl)
2226 }
2227
2228 table[val-min] = lab.target
2229 }
2230 for _, t := range table {
2231 jt.AddEdgeTo(t)
2232 }
2233 s.endBlock()
2234
2235 s.startBlock(bEnd)
2236
2237 case ir.OINTERFACESWITCH:
2238 n := n.(*ir.InterfaceSwitchStmt)
2239 typs := s.f.Config.Types
2240
2241 t := s.expr(n.RuntimeType)
2242 h := s.expr(n.Hash)
2243 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2244
2245
2246 var merge *ssa.Block
2247 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
2248
2249
2250 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2251 s.Fatalf("atomic load not available")
2252 }
2253 merge = s.f.NewBlock(ssa.BlockPlain)
2254 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2255 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2256 loopHead := s.f.NewBlock(ssa.BlockPlain)
2257 loopBody := s.f.NewBlock(ssa.BlockPlain)
2258
2259
2260 var mul, and, add, zext ssa.Op
2261 if s.config.PtrSize == 4 {
2262 mul = ssa.OpMul32
2263 and = ssa.OpAnd32
2264 add = ssa.OpAdd32
2265 zext = ssa.OpCopy
2266 } else {
2267 mul = ssa.OpMul64
2268 and = ssa.OpAnd64
2269 add = ssa.OpAdd64
2270 zext = ssa.OpZeroExt32to64
2271 }
2272
2273
2274
2275 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2276 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2277 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2278
2279
2280 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2281
2282
2283 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2284
2285 b := s.endBlock()
2286 b.AddEdgeTo(loopHead)
2287
2288
2289
2290 s.startBlock(loopHead)
2291 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2292 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2293 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2294 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2295
2296 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2297
2298
2299
2300 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2301 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2302 b = s.endBlock()
2303 b.Kind = ssa.BlockIf
2304 b.SetControl(cmp1)
2305 b.AddEdgeTo(cacheHit)
2306 b.AddEdgeTo(loopBody)
2307
2308
2309
2310 s.startBlock(loopBody)
2311 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2312 b = s.endBlock()
2313 b.Kind = ssa.BlockIf
2314 b.SetControl(cmp2)
2315 b.AddEdgeTo(cacheMiss)
2316 b.AddEdgeTo(loopHead)
2317
2318
2319
2320
2321 s.startBlock(cacheHit)
2322 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2323 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2324 s.assign(n.Case, eCase, false, 0)
2325 s.assign(n.Itab, eItab, false, 0)
2326 b = s.endBlock()
2327 b.AddEdgeTo(merge)
2328
2329
2330 s.startBlock(cacheMiss)
2331 }
2332
2333 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2334 s.assign(n.Case, r[0], false, 0)
2335 s.assign(n.Itab, r[1], false, 0)
2336
2337 if merge != nil {
2338
2339 b := s.endBlock()
2340 b.Kind = ssa.BlockPlain
2341 b.AddEdgeTo(merge)
2342 s.startBlock(merge)
2343 }
2344
2345 case ir.OCHECKNIL:
2346 n := n.(*ir.UnaryExpr)
2347 p := s.expr(n.X)
2348 _ = s.nilCheck(p)
2349
2350
2351 case ir.OINLMARK:
2352 n := n.(*ir.InlineMarkStmt)
2353 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2354
2355 default:
2356 s.Fatalf("unhandled stmt %v", n.Op())
2357 }
2358 }
2359
2360
2361
2362 const shareDeferExits = false
2363
2364
2365
2366
2367 func (s *state) exit() *ssa.Block {
2368 if s.hasdefer {
2369 if s.hasOpenDefers {
2370 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2371 if s.curBlock.Kind != ssa.BlockPlain {
2372 panic("Block for an exit should be BlockPlain")
2373 }
2374 s.curBlock.AddEdgeTo(s.lastDeferExit)
2375 s.endBlock()
2376 return s.lastDeferFinalBlock
2377 }
2378 s.openDeferExit()
2379 } else {
2380
2381
2382
2383
2384
2385
2386
2387
2388 s.pushLine(s.curfn.Endlineno)
2389 s.rtcall(ir.Syms.Deferreturn, true, nil)
2390 s.popLine()
2391 }
2392 }
2393
2394
2395
2396 resultFields := s.curfn.Type().Results()
2397 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2398
2399 for i, f := range resultFields {
2400 n := f.Nname.(*ir.Name)
2401 if s.canSSA(n) {
2402 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2403
2404 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2405 }
2406 results[i] = s.variable(n, n.Type())
2407 } else if !n.OnStack() {
2408
2409 if n.Type().HasPointers() {
2410 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2411 }
2412 ha := s.expr(n.Heapaddr)
2413 s.instrumentFields(n.Type(), ha, instrumentRead)
2414 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2415 } else {
2416
2417
2418
2419 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2420 }
2421 }
2422
2423
2424
2425
2426 if s.instrumentEnterExit {
2427 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2428 }
2429
2430 results[len(results)-1] = s.mem()
2431 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2432 m.AddArgs(results...)
2433
2434 b := s.endBlock()
2435 b.Kind = ssa.BlockRet
2436 b.SetControl(m)
2437 if s.hasdefer && s.hasOpenDefers {
2438 s.lastDeferFinalBlock = b
2439 }
2440 return b
2441 }
2442
2443 type opAndType struct {
2444 op ir.Op
2445 etype types.Kind
2446 }
2447
2448 var opToSSA = map[opAndType]ssa.Op{
2449 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2450 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2451 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2452 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2453 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2454 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2455 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2456 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2457 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2458 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2459
2460 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2461 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2462 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2463 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2464 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2465 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2466 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2467 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2468 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2469 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2470
2471 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2472
2473 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2474 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2475 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2476 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2477 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2478 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2479 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2480 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2481 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2482 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2483
2484 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2485 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2486 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2487 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2488 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2489 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2490 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2491 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2492
2493 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2494 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2495 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2496 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2497
2498 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2499 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2500 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2501 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2502 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2503 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2504 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2505 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2506 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2507 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2508
2509 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2510 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2511
2512 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2513 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2514 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2515 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2516 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2517 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2518 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2519 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2520
2521 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2522 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2523 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2524 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2525 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2526 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2527 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2528 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2529
2530 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2531 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2532 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2533 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2534 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2535 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2536 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2537 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2538
2539 {ir.OOR, types.TINT8}: ssa.OpOr8,
2540 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2541 {ir.OOR, types.TINT16}: ssa.OpOr16,
2542 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2543 {ir.OOR, types.TINT32}: ssa.OpOr32,
2544 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2545 {ir.OOR, types.TINT64}: ssa.OpOr64,
2546 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2547
2548 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2549 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2550 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2551 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2552 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2553 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2554 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2555 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2556
2557 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2558 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2559 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2560 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2561 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2562 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2563 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2564 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2565 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2566 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2567 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2568 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2569 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2570 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2571 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2572 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2573 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2574 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2575 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2576
2577 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2578 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2579 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2580 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2581 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2582 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2583 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2584 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2585 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2586 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2587 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2588 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2589 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2590 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2591 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2592 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2593 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2594 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2595 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2596
2597 {ir.OLT, types.TINT8}: ssa.OpLess8,
2598 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2599 {ir.OLT, types.TINT16}: ssa.OpLess16,
2600 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2601 {ir.OLT, types.TINT32}: ssa.OpLess32,
2602 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2603 {ir.OLT, types.TINT64}: ssa.OpLess64,
2604 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2605 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2606 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2607
2608 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2609 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2610 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2611 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2612 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2613 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2614 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2615 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2616 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2617 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2618 }
2619
2620 func (s *state) concreteEtype(t *types.Type) types.Kind {
2621 e := t.Kind()
2622 switch e {
2623 default:
2624 return e
2625 case types.TINT:
2626 if s.config.PtrSize == 8 {
2627 return types.TINT64
2628 }
2629 return types.TINT32
2630 case types.TUINT:
2631 if s.config.PtrSize == 8 {
2632 return types.TUINT64
2633 }
2634 return types.TUINT32
2635 case types.TUINTPTR:
2636 if s.config.PtrSize == 8 {
2637 return types.TUINT64
2638 }
2639 return types.TUINT32
2640 }
2641 }
2642
2643 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2644 etype := s.concreteEtype(t)
2645 x, ok := opToSSA[opAndType{op, etype}]
2646 if !ok {
2647 s.Fatalf("unhandled binary op %v %s", op, etype)
2648 }
2649 return x
2650 }
2651
2652 type opAndTwoTypes struct {
2653 op ir.Op
2654 etype1 types.Kind
2655 etype2 types.Kind
2656 }
2657
2658 type twoTypes struct {
2659 etype1 types.Kind
2660 etype2 types.Kind
2661 }
2662
2663 type twoOpsAndType struct {
2664 op1 ssa.Op
2665 op2 ssa.Op
2666 intermediateType types.Kind
2667 }
2668
2669 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2670
2671 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2672 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2673 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2674 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2675
2676 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2677 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2678 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2679 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2680
2681 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2682 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2683 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2684 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2685
2686 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2687 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2688 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2689 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2690
2691 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2692 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2693 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2694 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2695
2696 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2697 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2698 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2699 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2700
2701 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2702 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2703 {types.TFLOAT32, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2704 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2705
2706 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2707 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2708 {types.TFLOAT64, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2709 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2710
2711
2712 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2713 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2714 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2715 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2716 }
2717
2718
2719
2720 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2721 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2722 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2723 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2724 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2725 }
2726
2727
2728 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2729 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2730 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2731 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2732 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2733 }
2734
2735 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2736 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2737 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2738 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2739 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2740 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2741 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2742 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2743 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2744
2745 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2746 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2747 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2748 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2749 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2750 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2751 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2752 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2753
2754 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2755 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2756 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2757 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2758 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2759 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2760 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2761 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2762
2763 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2764 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2765 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2766 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2767 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2768 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2769 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2770 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2771
2772 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2773 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2774 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2775 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2776 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2777 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2778 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2779 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2780
2781 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2782 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2783 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2784 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2785 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2786 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2787 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2788 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2789
2790 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2791 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2792 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2793 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2794 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2795 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2796 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2797 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2798
2799 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2800 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2801 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2802 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2803 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2804 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2805 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2806 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2807 }
2808
2809 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2810 etype1 := s.concreteEtype(t)
2811 etype2 := s.concreteEtype(u)
2812 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2813 if !ok {
2814 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2815 }
2816 return x
2817 }
2818
2819 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2820 if s.config.PtrSize == 4 {
2821 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2822 }
2823 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2824 }
2825
2826 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2827 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2828
2829 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2830 }
2831 if ft.IsInteger() && tt.IsInteger() {
2832 var op ssa.Op
2833 if tt.Size() == ft.Size() {
2834 op = ssa.OpCopy
2835 } else if tt.Size() < ft.Size() {
2836
2837 switch 10*ft.Size() + tt.Size() {
2838 case 21:
2839 op = ssa.OpTrunc16to8
2840 case 41:
2841 op = ssa.OpTrunc32to8
2842 case 42:
2843 op = ssa.OpTrunc32to16
2844 case 81:
2845 op = ssa.OpTrunc64to8
2846 case 82:
2847 op = ssa.OpTrunc64to16
2848 case 84:
2849 op = ssa.OpTrunc64to32
2850 default:
2851 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2852 }
2853 } else if ft.IsSigned() {
2854
2855 switch 10*ft.Size() + tt.Size() {
2856 case 12:
2857 op = ssa.OpSignExt8to16
2858 case 14:
2859 op = ssa.OpSignExt8to32
2860 case 18:
2861 op = ssa.OpSignExt8to64
2862 case 24:
2863 op = ssa.OpSignExt16to32
2864 case 28:
2865 op = ssa.OpSignExt16to64
2866 case 48:
2867 op = ssa.OpSignExt32to64
2868 default:
2869 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2870 }
2871 } else {
2872
2873 switch 10*ft.Size() + tt.Size() {
2874 case 12:
2875 op = ssa.OpZeroExt8to16
2876 case 14:
2877 op = ssa.OpZeroExt8to32
2878 case 18:
2879 op = ssa.OpZeroExt8to64
2880 case 24:
2881 op = ssa.OpZeroExt16to32
2882 case 28:
2883 op = ssa.OpZeroExt16to64
2884 case 48:
2885 op = ssa.OpZeroExt32to64
2886 default:
2887 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2888 }
2889 }
2890 return s.newValue1(op, tt, v)
2891 }
2892
2893 if ft.IsComplex() && tt.IsComplex() {
2894 var op ssa.Op
2895 if ft.Size() == tt.Size() {
2896 switch ft.Size() {
2897 case 8:
2898 op = ssa.OpRound32F
2899 case 16:
2900 op = ssa.OpRound64F
2901 default:
2902 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2903 }
2904 } else if ft.Size() == 8 && tt.Size() == 16 {
2905 op = ssa.OpCvt32Fto64F
2906 } else if ft.Size() == 16 && tt.Size() == 8 {
2907 op = ssa.OpCvt64Fto32F
2908 } else {
2909 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2910 }
2911 ftp := types.FloatForComplex(ft)
2912 ttp := types.FloatForComplex(tt)
2913 return s.newValue2(ssa.OpComplexMake, tt,
2914 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2915 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2916 }
2917
2918 if tt.IsComplex() {
2919
2920 et := types.FloatForComplex(tt)
2921 v = s.conv(n, v, ft, et)
2922 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2923 }
2924
2925 if ft.IsFloat() || tt.IsFloat() {
2926 cft, ctt := s.concreteEtype(ft), s.concreteEtype(tt)
2927 conv, ok := fpConvOpToSSA[twoTypes{cft, ctt}]
2928
2929
2930 if ctt == types.TUINT32 && ft.IsFloat() && !base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil) {
2931
2932 conv.op1 = ssa.OpCvt64Fto64
2933 if cft == types.TFLOAT32 {
2934 conv.op1 = ssa.OpCvt32Fto64
2935 }
2936 conv.op2 = ssa.OpTrunc64to32
2937
2938 }
2939 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2940 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2941 conv = conv1
2942 }
2943 }
2944 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2945 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2946 conv = conv1
2947 }
2948 }
2949
2950 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2951 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2952
2953 if tt.Size() == 4 {
2954 return s.uint32Tofloat32(n, v, ft, tt)
2955 }
2956 if tt.Size() == 8 {
2957 return s.uint32Tofloat64(n, v, ft, tt)
2958 }
2959 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2960
2961 if ft.Size() == 4 {
2962 return s.float32ToUint32(n, v, ft, tt)
2963 }
2964 if ft.Size() == 8 {
2965 return s.float64ToUint32(n, v, ft, tt)
2966 }
2967 }
2968 }
2969
2970 if !ok {
2971 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2972 }
2973 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2974
2975 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2976
2977 if op1 == ssa.OpCopy {
2978 if op2 == ssa.OpCopy {
2979 return v
2980 }
2981 return s.newValueOrSfCall1(op2, tt, v)
2982 }
2983 if op2 == ssa.OpCopy {
2984 return s.newValueOrSfCall1(op1, tt, v)
2985 }
2986 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2987 }
2988
2989 if ft.IsInteger() {
2990
2991 if tt.Size() == 4 {
2992 return s.uint64Tofloat32(n, v, ft, tt)
2993 }
2994 if tt.Size() == 8 {
2995 return s.uint64Tofloat64(n, v, ft, tt)
2996 }
2997 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2998 }
2999
3000 if ft.Size() == 4 {
3001 switch tt.Size() {
3002 case 8:
3003 return s.float32ToUint64(n, v, ft, tt)
3004 case 4, 2, 1:
3005
3006 return s.float32ToUint32(n, v, ft, tt)
3007 }
3008 }
3009 if ft.Size() == 8 {
3010 switch tt.Size() {
3011 case 8:
3012 return s.float64ToUint64(n, v, ft, tt)
3013 case 4, 2, 1:
3014
3015 return s.float64ToUint32(n, v, ft, tt)
3016 }
3017
3018 }
3019 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
3020 return nil
3021 }
3022
3023 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
3024 return nil
3025 }
3026
3027
3028 func (s *state) expr(n ir.Node) *ssa.Value {
3029 return s.exprCheckPtr(n, true)
3030 }
3031
3032 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
3033 if ir.HasUniquePos(n) {
3034
3035
3036 s.pushLine(n.Pos())
3037 defer s.popLine()
3038 }
3039
3040 s.stmtList(n.Init())
3041 switch n.Op() {
3042 case ir.OBYTES2STRTMP:
3043 n := n.(*ir.ConvExpr)
3044 slice := s.expr(n.X)
3045 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
3046 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3047 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
3048 case ir.OSTR2BYTESTMP:
3049 n := n.(*ir.ConvExpr)
3050 str := s.expr(n.X)
3051 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
3052 if !n.NonNil() {
3053
3054
3055
3056 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
3057 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
3058 ptr = s.ternary(cond, ptr, zerobase)
3059 }
3060 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
3061 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
3062 case ir.OCFUNC:
3063 n := n.(*ir.UnaryExpr)
3064 aux := n.X.(*ir.Name).Linksym()
3065
3066
3067 if aux.ABI() != obj.ABIInternal {
3068 s.Fatalf("expected ABIInternal: %v", aux.ABI())
3069 }
3070 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
3071 case ir.ONAME:
3072 n := n.(*ir.Name)
3073 if n.Class == ir.PFUNC {
3074
3075 sym := staticdata.FuncLinksym(n)
3076 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
3077 }
3078 if s.canSSA(n) {
3079 return s.variable(n, n.Type())
3080 }
3081 return s.load(n.Type(), s.addr(n))
3082 case ir.OLINKSYMOFFSET:
3083 n := n.(*ir.LinksymOffsetExpr)
3084 return s.load(n.Type(), s.addr(n))
3085 case ir.ONIL:
3086 n := n.(*ir.NilExpr)
3087 t := n.Type()
3088 switch {
3089 case t.IsSlice():
3090 return s.constSlice(t)
3091 case t.IsInterface():
3092 return s.constInterface(t)
3093 default:
3094 return s.constNil(t)
3095 }
3096 case ir.OLITERAL:
3097 switch u := n.Val(); u.Kind() {
3098 case constant.Int:
3099 i := ir.IntVal(n.Type(), u)
3100 switch n.Type().Size() {
3101 case 1:
3102 return s.constInt8(n.Type(), int8(i))
3103 case 2:
3104 return s.constInt16(n.Type(), int16(i))
3105 case 4:
3106 return s.constInt32(n.Type(), int32(i))
3107 case 8:
3108 return s.constInt64(n.Type(), i)
3109 default:
3110 s.Fatalf("bad integer size %d", n.Type().Size())
3111 return nil
3112 }
3113 case constant.String:
3114 i := constant.StringVal(u)
3115 if i == "" {
3116 return s.constEmptyString(n.Type())
3117 }
3118 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
3119 case constant.Bool:
3120 return s.constBool(constant.BoolVal(u))
3121 case constant.Float:
3122 f, _ := constant.Float64Val(u)
3123 switch n.Type().Size() {
3124 case 4:
3125 return s.constFloat32(n.Type(), f)
3126 case 8:
3127 return s.constFloat64(n.Type(), f)
3128 default:
3129 s.Fatalf("bad float size %d", n.Type().Size())
3130 return nil
3131 }
3132 case constant.Complex:
3133 re, _ := constant.Float64Val(constant.Real(u))
3134 im, _ := constant.Float64Val(constant.Imag(u))
3135 switch n.Type().Size() {
3136 case 8:
3137 pt := types.Types[types.TFLOAT32]
3138 return s.newValue2(ssa.OpComplexMake, n.Type(),
3139 s.constFloat32(pt, re),
3140 s.constFloat32(pt, im))
3141 case 16:
3142 pt := types.Types[types.TFLOAT64]
3143 return s.newValue2(ssa.OpComplexMake, n.Type(),
3144 s.constFloat64(pt, re),
3145 s.constFloat64(pt, im))
3146 default:
3147 s.Fatalf("bad complex size %d", n.Type().Size())
3148 return nil
3149 }
3150 default:
3151 s.Fatalf("unhandled OLITERAL %v", u.Kind())
3152 return nil
3153 }
3154 case ir.OCONVNOP:
3155 n := n.(*ir.ConvExpr)
3156 to := n.Type()
3157 from := n.X.Type()
3158
3159
3160
3161 x := s.expr(n.X)
3162 if to == from {
3163 return x
3164 }
3165
3166
3167
3168
3169
3170 if to.IsPtrShaped() != from.IsPtrShaped() {
3171 return s.newValue2(ssa.OpConvert, to, x, s.mem())
3172 }
3173
3174 v := s.newValue1(ssa.OpCopy, to, x)
3175
3176
3177 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
3178 return v
3179 }
3180
3181
3182 if from.Kind() == to.Kind() {
3183 return v
3184 }
3185
3186
3187 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
3188 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
3189 s.checkPtrAlignment(n, v, nil)
3190 }
3191 return v
3192 }
3193
3194
3195 mt := types.NewPtr(reflectdata.MapType())
3196 if to.Kind() == types.TMAP && from == mt {
3197 return v
3198 }
3199
3200 types.CalcSize(from)
3201 types.CalcSize(to)
3202 if from.Size() != to.Size() {
3203 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
3204 return nil
3205 }
3206 if etypesign(from.Kind()) != etypesign(to.Kind()) {
3207 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
3208 return nil
3209 }
3210
3211 if base.Flag.Cfg.Instrumenting {
3212
3213
3214
3215 return v
3216 }
3217
3218 if etypesign(from.Kind()) == 0 {
3219 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
3220 return nil
3221 }
3222
3223
3224 return v
3225
3226 case ir.OCONV:
3227 n := n.(*ir.ConvExpr)
3228 x := s.expr(n.X)
3229 return s.conv(n, x, n.X.Type(), n.Type())
3230
3231 case ir.ODOTTYPE:
3232 n := n.(*ir.TypeAssertExpr)
3233 res, _ := s.dottype(n, false)
3234 return res
3235
3236 case ir.ODYNAMICDOTTYPE:
3237 n := n.(*ir.DynamicTypeAssertExpr)
3238 res, _ := s.dynamicDottype(n, false)
3239 return res
3240
3241
3242 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3243 n := n.(*ir.BinaryExpr)
3244 a := s.expr(n.X)
3245 b := s.expr(n.Y)
3246 if n.X.Type().IsComplex() {
3247 pt := types.FloatForComplex(n.X.Type())
3248 op := s.ssaOp(ir.OEQ, pt)
3249 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3250 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3251 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3252 switch n.Op() {
3253 case ir.OEQ:
3254 return c
3255 case ir.ONE:
3256 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3257 default:
3258 s.Fatalf("ordered complex compare %v", n.Op())
3259 }
3260 }
3261
3262
3263 op := n.Op()
3264 switch op {
3265 case ir.OGE:
3266 op, a, b = ir.OLE, b, a
3267 case ir.OGT:
3268 op, a, b = ir.OLT, b, a
3269 }
3270 if n.X.Type().IsFloat() {
3271
3272 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3273 }
3274
3275 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3276 case ir.OMUL:
3277 n := n.(*ir.BinaryExpr)
3278 a := s.expr(n.X)
3279 b := s.expr(n.Y)
3280 if n.Type().IsComplex() {
3281 mulop := ssa.OpMul64F
3282 addop := ssa.OpAdd64F
3283 subop := ssa.OpSub64F
3284 pt := types.FloatForComplex(n.Type())
3285 wt := types.Types[types.TFLOAT64]
3286
3287 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3288 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3289 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3290 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3291
3292 if pt != wt {
3293 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3294 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3295 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3296 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3297 }
3298
3299 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3300 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3301
3302 if pt != wt {
3303 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3304 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3305 }
3306
3307 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3308 }
3309
3310 if n.Type().IsFloat() {
3311 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3312 }
3313
3314 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3315
3316 case ir.ODIV:
3317 n := n.(*ir.BinaryExpr)
3318 a := s.expr(n.X)
3319 b := s.expr(n.Y)
3320 if n.Type().IsComplex() {
3321
3322
3323
3324 mulop := ssa.OpMul64F
3325 addop := ssa.OpAdd64F
3326 subop := ssa.OpSub64F
3327 divop := ssa.OpDiv64F
3328 pt := types.FloatForComplex(n.Type())
3329 wt := types.Types[types.TFLOAT64]
3330
3331 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3332 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3333 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3334 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3335
3336 if pt != wt {
3337 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3338 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3339 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3340 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3341 }
3342
3343 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3344 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3345 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3346
3347
3348
3349
3350
3351 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3352 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3353
3354 if pt != wt {
3355 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3356 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3357 }
3358 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3359 }
3360 if n.Type().IsFloat() {
3361 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3362 }
3363 return s.intDivide(n, a, b)
3364 case ir.OMOD:
3365 n := n.(*ir.BinaryExpr)
3366 a := s.expr(n.X)
3367 b := s.expr(n.Y)
3368 return s.intDivide(n, a, b)
3369 case ir.OADD, ir.OSUB:
3370 n := n.(*ir.BinaryExpr)
3371 a := s.expr(n.X)
3372 b := s.expr(n.Y)
3373 if n.Type().IsComplex() {
3374 pt := types.FloatForComplex(n.Type())
3375 op := s.ssaOp(n.Op(), pt)
3376 return s.newValue2(ssa.OpComplexMake, n.Type(),
3377 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3378 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3379 }
3380 if n.Type().IsFloat() {
3381 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3382 }
3383 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3384 case ir.OAND, ir.OOR, ir.OXOR:
3385 n := n.(*ir.BinaryExpr)
3386 a := s.expr(n.X)
3387 b := s.expr(n.Y)
3388 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3389 case ir.OANDNOT:
3390 n := n.(*ir.BinaryExpr)
3391 a := s.expr(n.X)
3392 b := s.expr(n.Y)
3393 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3394 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3395 case ir.OLSH, ir.ORSH:
3396 n := n.(*ir.BinaryExpr)
3397 a := s.expr(n.X)
3398 b := s.expr(n.Y)
3399 bt := b.Type
3400 if bt.IsSigned() {
3401 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3402 s.check(cmp, ir.Syms.Panicshift)
3403 bt = bt.ToUnsigned()
3404 }
3405 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3406 case ir.OANDAND, ir.OOROR:
3407
3408
3409
3410
3411
3412
3413
3414
3415
3416
3417
3418
3419
3420 n := n.(*ir.LogicalExpr)
3421 el := s.expr(n.X)
3422 s.vars[n] = el
3423
3424 b := s.endBlock()
3425 b.Kind = ssa.BlockIf
3426 b.SetControl(el)
3427
3428
3429
3430
3431
3432 bRight := s.f.NewBlock(ssa.BlockPlain)
3433 bResult := s.f.NewBlock(ssa.BlockPlain)
3434 if n.Op() == ir.OANDAND {
3435 b.AddEdgeTo(bRight)
3436 b.AddEdgeTo(bResult)
3437 } else if n.Op() == ir.OOROR {
3438 b.AddEdgeTo(bResult)
3439 b.AddEdgeTo(bRight)
3440 }
3441
3442 s.startBlock(bRight)
3443 er := s.expr(n.Y)
3444 s.vars[n] = er
3445
3446 b = s.endBlock()
3447 b.AddEdgeTo(bResult)
3448
3449 s.startBlock(bResult)
3450 return s.variable(n, types.Types[types.TBOOL])
3451 case ir.OCOMPLEX:
3452 n := n.(*ir.BinaryExpr)
3453 r := s.expr(n.X)
3454 i := s.expr(n.Y)
3455 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3456
3457
3458 case ir.ONEG:
3459 n := n.(*ir.UnaryExpr)
3460 a := s.expr(n.X)
3461 if n.Type().IsComplex() {
3462 tp := types.FloatForComplex(n.Type())
3463 negop := s.ssaOp(n.Op(), tp)
3464 return s.newValue2(ssa.OpComplexMake, n.Type(),
3465 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3466 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3467 }
3468 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3469 case ir.ONOT, ir.OBITNOT:
3470 n := n.(*ir.UnaryExpr)
3471 a := s.expr(n.X)
3472 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3473 case ir.OIMAG, ir.OREAL:
3474 n := n.(*ir.UnaryExpr)
3475 a := s.expr(n.X)
3476 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3477 case ir.OPLUS:
3478 n := n.(*ir.UnaryExpr)
3479 return s.expr(n.X)
3480
3481 case ir.OADDR:
3482 n := n.(*ir.AddrExpr)
3483 return s.addr(n.X)
3484
3485 case ir.ORESULT:
3486 n := n.(*ir.ResultExpr)
3487 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3488 panic("Expected to see a previous call")
3489 }
3490 which := n.Index
3491 if which == -1 {
3492 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3493 }
3494 return s.resultOfCall(s.prevCall, which, n.Type())
3495
3496 case ir.ODEREF:
3497 n := n.(*ir.StarExpr)
3498 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3499 return s.load(n.Type(), p)
3500
3501 case ir.ODOT:
3502 n := n.(*ir.SelectorExpr)
3503 if n.X.Op() == ir.OSTRUCTLIT {
3504
3505
3506
3507 if !ir.IsZero(n.X) {
3508 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3509 }
3510 return s.zeroVal(n.Type())
3511 }
3512
3513
3514
3515
3516 if ir.IsAddressable(n) && !s.canSSA(n) {
3517 p := s.addr(n)
3518 return s.load(n.Type(), p)
3519 }
3520 v := s.expr(n.X)
3521 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3522
3523 case ir.ODOTPTR:
3524 n := n.(*ir.SelectorExpr)
3525 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3526 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3527 return s.load(n.Type(), p)
3528
3529 case ir.OINDEX:
3530 n := n.(*ir.IndexExpr)
3531 switch {
3532 case n.X.Type().IsString():
3533 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3534
3535
3536
3537 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3538 }
3539 a := s.expr(n.X)
3540 i := s.expr(n.Index)
3541 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3542 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3543 ptrtyp := s.f.Config.Types.BytePtr
3544 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3545 if ir.IsConst(n.Index, constant.Int) {
3546 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3547 } else {
3548 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3549 }
3550 return s.load(types.Types[types.TUINT8], ptr)
3551 case n.X.Type().IsSlice():
3552 p := s.addr(n)
3553 return s.load(n.X.Type().Elem(), p)
3554 case n.X.Type().IsArray():
3555 if ssa.CanSSA(n.X.Type()) {
3556
3557 bound := n.X.Type().NumElem()
3558 a := s.expr(n.X)
3559 i := s.expr(n.Index)
3560 if bound == 0 {
3561
3562
3563 z := s.constInt(types.Types[types.TINT], 0)
3564 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3565
3566
3567 return s.zeroVal(n.Type())
3568 }
3569 len := s.constInt(types.Types[types.TINT], bound)
3570 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3571 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3572 }
3573 p := s.addr(n)
3574 return s.load(n.X.Type().Elem(), p)
3575 default:
3576 s.Fatalf("bad type for index %v", n.X.Type())
3577 return nil
3578 }
3579
3580 case ir.OLEN, ir.OCAP:
3581 n := n.(*ir.UnaryExpr)
3582
3583
3584 a := s.expr(n.X)
3585 t := n.X.Type()
3586 switch {
3587 case t.IsSlice():
3588 op := ssa.OpSliceLen
3589 if n.Op() == ir.OCAP {
3590 op = ssa.OpSliceCap
3591 }
3592 return s.newValue1(op, types.Types[types.TINT], a)
3593 case t.IsString():
3594 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3595 case t.IsMap(), t.IsChan():
3596 return s.referenceTypeBuiltin(n, a)
3597 case t.IsArray():
3598 return s.constInt(types.Types[types.TINT], t.NumElem())
3599 case t.IsPtr() && t.Elem().IsArray():
3600 return s.constInt(types.Types[types.TINT], t.Elem().NumElem())
3601 default:
3602 s.Fatalf("bad type in len/cap: %v", t)
3603 return nil
3604 }
3605
3606 case ir.OSPTR:
3607 n := n.(*ir.UnaryExpr)
3608 a := s.expr(n.X)
3609 if n.X.Type().IsSlice() {
3610 if n.Bounded() {
3611 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3612 }
3613 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3614 } else {
3615 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3616 }
3617
3618 case ir.OITAB:
3619 n := n.(*ir.UnaryExpr)
3620 a := s.expr(n.X)
3621 return s.newValue1(ssa.OpITab, n.Type(), a)
3622
3623 case ir.OIDATA:
3624 n := n.(*ir.UnaryExpr)
3625 a := s.expr(n.X)
3626 return s.newValue1(ssa.OpIData, n.Type(), a)
3627
3628 case ir.OMAKEFACE:
3629 n := n.(*ir.BinaryExpr)
3630 tab := s.expr(n.X)
3631 data := s.expr(n.Y)
3632 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3633
3634 case ir.OSLICEHEADER:
3635 n := n.(*ir.SliceHeaderExpr)
3636 p := s.expr(n.Ptr)
3637 l := s.expr(n.Len)
3638 c := s.expr(n.Cap)
3639 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3640
3641 case ir.OSTRINGHEADER:
3642 n := n.(*ir.StringHeaderExpr)
3643 p := s.expr(n.Ptr)
3644 l := s.expr(n.Len)
3645 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3646
3647 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3648 n := n.(*ir.SliceExpr)
3649 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3650 v := s.exprCheckPtr(n.X, !check)
3651 var i, j, k *ssa.Value
3652 if n.Low != nil {
3653 i = s.expr(n.Low)
3654 }
3655 if n.High != nil {
3656 j = s.expr(n.High)
3657 }
3658 if n.Max != nil {
3659 k = s.expr(n.Max)
3660 }
3661 p, l, c := s.slice(v, i, j, k, n.Bounded())
3662 if check {
3663
3664 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3665 }
3666 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3667
3668 case ir.OSLICESTR:
3669 n := n.(*ir.SliceExpr)
3670 v := s.expr(n.X)
3671 var i, j *ssa.Value
3672 if n.Low != nil {
3673 i = s.expr(n.Low)
3674 }
3675 if n.High != nil {
3676 j = s.expr(n.High)
3677 }
3678 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3679 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3680
3681 case ir.OSLICE2ARRPTR:
3682
3683
3684
3685
3686 n := n.(*ir.ConvExpr)
3687 v := s.expr(n.X)
3688 nelem := n.Type().Elem().NumElem()
3689 arrlen := s.constInt(types.Types[types.TINT], nelem)
3690 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3691 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3692 op := ssa.OpSlicePtr
3693 if nelem == 0 {
3694 op = ssa.OpSlicePtrUnchecked
3695 }
3696 return s.newValue1(op, n.Type(), v)
3697
3698 case ir.OCALLFUNC:
3699 n := n.(*ir.CallExpr)
3700 if ir.IsIntrinsicCall(n) {
3701 return s.intrinsicCall(n)
3702 }
3703 fallthrough
3704
3705 case ir.OCALLINTER:
3706 n := n.(*ir.CallExpr)
3707 return s.callResult(n, callNormal)
3708
3709 case ir.OGETG:
3710 n := n.(*ir.CallExpr)
3711 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3712
3713 case ir.OGETCALLERSP:
3714 n := n.(*ir.CallExpr)
3715 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3716
3717 case ir.OAPPEND:
3718 return s.append(n.(*ir.CallExpr), false)
3719
3720 case ir.OMOVE2HEAP:
3721 return s.move2heap(n.(*ir.MoveToHeapExpr))
3722
3723 case ir.OMIN, ir.OMAX:
3724 return s.minMax(n.(*ir.CallExpr))
3725
3726 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3727
3728
3729
3730 n := n.(*ir.CompLitExpr)
3731 if !ir.IsZero(n) {
3732 s.Fatalf("literal with nonzero value in SSA: %v", n)
3733 }
3734 return s.zeroVal(n.Type())
3735
3736 case ir.ONEW:
3737 n := n.(*ir.UnaryExpr)
3738 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3739 return s.newObjectNonSpecialized(n.Type().Elem(), s.expr(x.RType))
3740 }
3741 return s.newObject(n.Type().Elem())
3742
3743 case ir.OUNSAFEADD:
3744 n := n.(*ir.BinaryExpr)
3745 ptr := s.expr(n.X)
3746 len := s.expr(n.Y)
3747
3748
3749
3750 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3751
3752 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3753
3754 default:
3755 s.Fatalf("unhandled expr %v", n.Op())
3756 return nil
3757 }
3758 }
3759
3760 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3761 aux := c.Aux.(*ssa.AuxCall)
3762 pa := aux.ParamAssignmentForResult(which)
3763
3764
3765 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3766 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3767 return s.rawLoad(t, addr)
3768 }
3769 return s.newValue1I(ssa.OpSelectN, t, which, c)
3770 }
3771
3772 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3773 aux := c.Aux.(*ssa.AuxCall)
3774 pa := aux.ParamAssignmentForResult(which)
3775 if len(pa.Registers) == 0 {
3776 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3777 }
3778 _, addr := s.temp(c.Pos, t)
3779 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3780 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3781 return addr
3782 }
3783
3784
3785 func (s *state) getBackingStoreInfoForAppend(n *ir.CallExpr) *backingStoreInfo {
3786 if n.Esc() != ir.EscNone {
3787 return nil
3788 }
3789 return s.getBackingStoreInfo(n.Args[0])
3790 }
3791 func (s *state) getBackingStoreInfo(n ir.Node) *backingStoreInfo {
3792 t := n.Type()
3793 et := t.Elem()
3794 maxStackSize := int64(base.Debug.VariableMakeThreshold)
3795 if et.Size() == 0 || et.Size() > maxStackSize {
3796 return nil
3797 }
3798 if base.Flag.N != 0 {
3799 return nil
3800 }
3801 if !base.VariableMakeHash.MatchPos(n.Pos(), nil) {
3802 return nil
3803 }
3804 i := s.backingStores[n]
3805 if i != nil {
3806 return i
3807 }
3808
3809
3810 K := maxStackSize / et.Size()
3811 KT := types.NewArray(et, K)
3812 KT.SetNoalg(true)
3813 types.CalcArraySize(KT)
3814
3815 align := types.NewArray(types.Types[types.TUINTPTR], 0)
3816 types.CalcArraySize(align)
3817 storeTyp := types.NewStruct([]*types.Field{
3818 {Sym: types.BlankSym, Type: align},
3819 {Sym: types.BlankSym, Type: KT},
3820 })
3821 storeTyp.SetNoalg(true)
3822 types.CalcStructSize(storeTyp)
3823
3824
3825 backingStore := typecheck.TempAt(n.Pos(), s.curfn, storeTyp)
3826 backingStore.SetAddrtaken(true)
3827
3828
3829 used := typecheck.TempAt(n.Pos(), s.curfn, types.Types[types.TBOOL])
3830 if s.curBlock == s.f.Entry {
3831 s.vars[used] = s.constBool(false)
3832 } else {
3833
3834 s.defvars[s.f.Entry.ID][used] = s.constBool(false)
3835 }
3836
3837
3838 if s.backingStores == nil {
3839 s.backingStores = map[ir.Node]*backingStoreInfo{}
3840 }
3841 i = &backingStoreInfo{K: K, store: backingStore, used: used, usedStatic: false}
3842 s.backingStores[n] = i
3843 return i
3844 }
3845
3846
3847
3848
3849
3850
3851
3852
3853
3854 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3855
3856
3857
3858
3859
3860
3861
3862
3863
3864
3865
3866
3867
3868
3869
3870
3871
3872
3873
3874
3875
3876
3877
3878
3879
3880
3881
3882
3883
3884
3885
3886
3887 et := n.Type().Elem()
3888 pt := types.NewPtr(et)
3889
3890
3891 sn := n.Args[0]
3892 var slice, addr *ssa.Value
3893 if inplace {
3894 addr = s.addr(sn)
3895 slice = s.load(n.Type(), addr)
3896 } else {
3897 slice = s.expr(sn)
3898 }
3899
3900
3901 grow := s.f.NewBlock(ssa.BlockPlain)
3902 assign := s.f.NewBlock(ssa.BlockPlain)
3903
3904
3905 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3906 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3907 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3908
3909
3910 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3911 oldLen := l
3912 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3913
3914
3915 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3916
3917
3918 s.vars[ptrVar] = p
3919 s.vars[lenVar] = l
3920 if !inplace {
3921 s.vars[capVar] = c
3922 }
3923
3924 b := s.endBlock()
3925 b.Kind = ssa.BlockIf
3926 b.Likely = ssa.BranchUnlikely
3927 b.SetControl(cmp)
3928 b.AddEdgeTo(grow)
3929 b.AddEdgeTo(assign)
3930
3931
3932
3933
3934
3935
3936
3937
3938
3939
3940
3941
3942
3943
3944
3945
3946
3947
3948
3949
3950
3951
3952
3953 var info *backingStoreInfo
3954 if !inplace {
3955 info = s.getBackingStoreInfoForAppend(n)
3956 }
3957
3958 if !inplace && info != nil && !n.UseBuf && !info.usedStatic {
3959
3960
3961
3962
3963
3964
3965
3966
3967
3968
3969
3970
3971
3972
3973
3974
3975
3976
3977
3978
3979
3980
3981
3982 info.usedStatic = true
3983
3984
3985 usedTestBlock := s.f.NewBlock(ssa.BlockPlain)
3986 oldLenTestBlock := s.f.NewBlock(ssa.BlockPlain)
3987 bodyBlock := s.f.NewBlock(ssa.BlockPlain)
3988 growSlice := s.f.NewBlock(ssa.BlockPlain)
3989 tInt := types.Types[types.TINT]
3990 tBool := types.Types[types.TBOOL]
3991
3992
3993 s.startBlock(grow)
3994 kTest := s.newValue2(s.ssaOp(ir.OLE, tInt), tBool, l, s.constInt(tInt, info.K))
3995 b := s.endBlock()
3996 b.Kind = ssa.BlockIf
3997 b.SetControl(kTest)
3998 b.AddEdgeTo(usedTestBlock)
3999 b.AddEdgeTo(growSlice)
4000 b.Likely = ssa.BranchLikely
4001
4002
4003 s.startBlock(usedTestBlock)
4004 usedTest := s.newValue1(ssa.OpNot, tBool, s.expr(info.used))
4005 b = s.endBlock()
4006 b.Kind = ssa.BlockIf
4007 b.SetControl(usedTest)
4008 b.AddEdgeTo(oldLenTestBlock)
4009 b.AddEdgeTo(growSlice)
4010 b.Likely = ssa.BranchLikely
4011
4012
4013 s.startBlock(oldLenTestBlock)
4014 oldLenTest := s.newValue2(s.ssaOp(ir.OEQ, tInt), tBool, oldLen, s.constInt(tInt, 0))
4015 b = s.endBlock()
4016 b.Kind = ssa.BlockIf
4017 b.SetControl(oldLenTest)
4018 b.AddEdgeTo(bodyBlock)
4019 b.AddEdgeTo(growSlice)
4020 b.Likely = ssa.BranchLikely
4021
4022
4023 s.startBlock(bodyBlock)
4024 if et.HasPointers() {
4025 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, info.store, s.mem())
4026 }
4027 addr := s.addr(info.store)
4028 s.zero(info.store.Type(), addr)
4029
4030
4031 s.vars[ptrVar] = addr
4032 s.vars[lenVar] = l
4033 s.vars[capVar] = s.constInt(tInt, info.K)
4034
4035
4036 s.assign(info.used, s.constBool(true), false, 0)
4037 b = s.endBlock()
4038 b.AddEdgeTo(assign)
4039
4040
4041 grow = growSlice
4042 }
4043
4044
4045 s.startBlock(grow)
4046 taddr := s.expr(n.Fun)
4047 var r []*ssa.Value
4048 if info != nil && n.UseBuf {
4049
4050 if et.HasPointers() && !info.usedStatic {
4051
4052
4053
4054 mem := s.defvars[s.f.Entry.ID][memVar]
4055 mem = s.f.Entry.NewValue1A(n.Pos(), ssa.OpVarDef, types.TypeMem, info.store, mem)
4056 addr := s.f.Entry.NewValue2A(n.Pos(), ssa.OpLocalAddr, types.NewPtr(info.store.Type()), info.store, s.sp, mem)
4057 mem = s.f.Entry.NewValue2I(n.Pos(), ssa.OpZero, types.TypeMem, info.store.Type().Size(), addr, mem)
4058 mem.Aux = info.store.Type()
4059 s.defvars[s.f.Entry.ID][memVar] = mem
4060 info.usedStatic = true
4061 }
4062 fn := ir.Syms.GrowsliceBuf
4063 if goexperiment.RuntimeFreegc && n.AppendNoAlias && !et.HasPointers() {
4064
4065
4066
4067
4068 fn = ir.Syms.GrowsliceBufNoAlias
4069 }
4070 r = s.rtcall(fn, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr, s.addr(info.store), s.constInt(types.Types[types.TINT], info.K))
4071 } else {
4072 fn := ir.Syms.Growslice
4073 if goexperiment.RuntimeFreegc && n.AppendNoAlias && !et.HasPointers() {
4074
4075
4076
4077
4078 fn = ir.Syms.GrowsliceNoAlias
4079 }
4080 r = s.rtcall(fn, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
4081 }
4082
4083
4084 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
4085 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
4086 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
4087
4088 s.vars[ptrVar] = p
4089 s.vars[lenVar] = l
4090 s.vars[capVar] = c
4091 if inplace {
4092 if sn.Op() == ir.ONAME {
4093 sn := sn.(*ir.Name)
4094 if sn.Class != ir.PEXTERN {
4095
4096 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
4097 }
4098 }
4099 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
4100 s.store(types.Types[types.TINT], capaddr, c)
4101 s.store(pt, addr, p)
4102 }
4103
4104 b = s.endBlock()
4105 b.AddEdgeTo(assign)
4106
4107
4108 s.startBlock(assign)
4109 p = s.variable(ptrVar, pt)
4110 l = s.variable(lenVar, types.Types[types.TINT])
4111 if !inplace {
4112 c = s.variable(capVar, types.Types[types.TINT])
4113 }
4114
4115 if inplace {
4116
4117
4118 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
4119 s.store(types.Types[types.TINT], lenaddr, l)
4120 }
4121
4122
4123 type argRec struct {
4124
4125
4126 v *ssa.Value
4127 store bool
4128 }
4129 args := make([]argRec, 0, len(n.Args[1:]))
4130 for _, n := range n.Args[1:] {
4131 if ssa.CanSSA(n.Type()) {
4132 args = append(args, argRec{v: s.expr(n), store: true})
4133 } else {
4134 v := s.addr(n)
4135 args = append(args, argRec{v: v})
4136 }
4137 }
4138
4139
4140 oldLen = s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
4141 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
4142 for i, arg := range args {
4143 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
4144 if arg.store {
4145 s.storeType(et, addr, arg.v, 0, true)
4146 } else {
4147 s.move(et, addr, arg.v)
4148 }
4149 }
4150
4151
4152
4153
4154
4155 delete(s.vars, ptrVar)
4156 delete(s.vars, lenVar)
4157 if !inplace {
4158 delete(s.vars, capVar)
4159 }
4160
4161
4162 if inplace {
4163 return nil
4164 }
4165 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
4166 }
4167
4168 func (s *state) move2heap(n *ir.MoveToHeapExpr) *ssa.Value {
4169
4170
4171
4172
4173
4174
4175
4176
4177 slice := s.expr(n.Slice)
4178 et := slice.Type.Elem()
4179 pt := types.NewPtr(et)
4180
4181 info := s.getBackingStoreInfo(n)
4182 if info == nil {
4183
4184
4185 return slice
4186 }
4187
4188
4189 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
4190 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
4191 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
4192
4193 moveBlock := s.f.NewBlock(ssa.BlockPlain)
4194 mergeBlock := s.f.NewBlock(ssa.BlockPlain)
4195
4196 s.vars[ptrVar] = p
4197 s.vars[lenVar] = l
4198 s.vars[capVar] = c
4199
4200
4201
4202 sub := ssa.OpSub64
4203 less := ssa.OpLess64U
4204 if s.config.PtrSize == 4 {
4205 sub = ssa.OpSub32
4206 less = ssa.OpLess32U
4207 }
4208 callerSP := s.newValue1(ssa.OpGetCallerSP, types.Types[types.TUINTPTR], s.mem())
4209 frameSize := s.newValue2(sub, types.Types[types.TUINTPTR], callerSP, s.sp)
4210 pInt := s.newValue2(ssa.OpConvert, types.Types[types.TUINTPTR], p, s.mem())
4211 off := s.newValue2(sub, types.Types[types.TUINTPTR], pInt, s.sp)
4212 cond := s.newValue2(less, types.Types[types.TBOOL], off, frameSize)
4213
4214 b := s.endBlock()
4215 b.Kind = ssa.BlockIf
4216 b.Likely = ssa.BranchUnlikely
4217 b.SetControl(cond)
4218 b.AddEdgeTo(moveBlock)
4219 b.AddEdgeTo(mergeBlock)
4220
4221
4222 s.startBlock(moveBlock)
4223 var newSlice *ssa.Value
4224 if et.HasPointers() {
4225 typ := s.expr(n.RType)
4226 if n.PreserveCapacity {
4227 newSlice = s.rtcall(ir.Syms.MoveSlice, true, []*types.Type{slice.Type}, typ, p, l, c)[0]
4228 } else {
4229 newSlice = s.rtcall(ir.Syms.MoveSliceNoCap, true, []*types.Type{slice.Type}, typ, p, l)[0]
4230 }
4231 } else {
4232 elemSize := s.constInt(types.Types[types.TUINTPTR], et.Size())
4233 if n.PreserveCapacity {
4234 newSlice = s.rtcall(ir.Syms.MoveSliceNoScan, true, []*types.Type{slice.Type}, elemSize, p, l, c)[0]
4235 } else {
4236 newSlice = s.rtcall(ir.Syms.MoveSliceNoCapNoScan, true, []*types.Type{slice.Type}, elemSize, p, l)[0]
4237 }
4238 }
4239
4240 s.vars[ptrVar] = s.newValue1(ssa.OpSlicePtr, pt, newSlice)
4241 s.vars[lenVar] = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], newSlice)
4242 s.vars[capVar] = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], newSlice)
4243 b = s.endBlock()
4244 b.AddEdgeTo(mergeBlock)
4245
4246
4247 s.startBlock(mergeBlock)
4248 p = s.variable(ptrVar, pt)
4249 l = s.variable(lenVar, types.Types[types.TINT])
4250 c = s.variable(capVar, types.Types[types.TINT])
4251 delete(s.vars, ptrVar)
4252 delete(s.vars, lenVar)
4253 delete(s.vars, capVar)
4254 return s.newValue3(ssa.OpSliceMake, slice.Type, p, l, c)
4255 }
4256
4257
4258 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
4259
4260
4261
4262 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
4263 x := s.expr(n.Args[0])
4264 for _, arg := range n.Args[1:] {
4265 x = op(x, s.expr(arg))
4266 }
4267 return x
4268 }
4269
4270 typ := n.Type()
4271
4272 if typ.IsFloat() || typ.IsString() {
4273
4274
4275
4276
4277
4278
4279
4280
4281 if typ.IsFloat() {
4282 hasIntrinsic := false
4283 switch Arch.LinkArch.Family {
4284 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64, sys.S390X:
4285 hasIntrinsic = true
4286 case sys.PPC64:
4287 hasIntrinsic = buildcfg.GOPPC64 >= 9
4288 }
4289
4290 if hasIntrinsic {
4291 var op ssa.Op
4292 switch {
4293 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
4294 op = ssa.OpMin64F
4295 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
4296 op = ssa.OpMax64F
4297 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
4298 op = ssa.OpMin32F
4299 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
4300 op = ssa.OpMax32F
4301 }
4302 return fold(func(x, a *ssa.Value) *ssa.Value {
4303 return s.newValue2(op, typ, x, a)
4304 })
4305 }
4306 }
4307 var name string
4308 switch typ.Kind() {
4309 case types.TFLOAT32:
4310 switch n.Op() {
4311 case ir.OMIN:
4312 name = "fmin32"
4313 case ir.OMAX:
4314 name = "fmax32"
4315 }
4316 case types.TFLOAT64:
4317 switch n.Op() {
4318 case ir.OMIN:
4319 name = "fmin64"
4320 case ir.OMAX:
4321 name = "fmax64"
4322 }
4323 case types.TSTRING:
4324 switch n.Op() {
4325 case ir.OMIN:
4326 name = "strmin"
4327 case ir.OMAX:
4328 name = "strmax"
4329 }
4330 }
4331 fn := typecheck.LookupRuntimeFunc(name)
4332
4333 return fold(func(x, a *ssa.Value) *ssa.Value {
4334 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
4335 })
4336 }
4337
4338 if typ.IsInteger() {
4339 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
4340 var op ssa.Op
4341 switch {
4342 case typ.IsSigned() && n.Op() == ir.OMIN:
4343 op = ssa.OpMin64
4344 case typ.IsSigned() && n.Op() == ir.OMAX:
4345 op = ssa.OpMax64
4346 case typ.IsUnsigned() && n.Op() == ir.OMIN:
4347 op = ssa.OpMin64u
4348 case typ.IsUnsigned() && n.Op() == ir.OMAX:
4349 op = ssa.OpMax64u
4350 }
4351 return fold(func(x, a *ssa.Value) *ssa.Value {
4352 return s.newValue2(op, typ, x, a)
4353 })
4354 }
4355 }
4356
4357 lt := s.ssaOp(ir.OLT, typ)
4358
4359 return fold(func(x, a *ssa.Value) *ssa.Value {
4360 switch n.Op() {
4361 case ir.OMIN:
4362
4363 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
4364 case ir.OMAX:
4365
4366 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
4367 }
4368 panic("unreachable")
4369 })
4370 }
4371
4372
4373 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
4374
4375
4376 ternaryVar := ssaMarker("ternary")
4377
4378 bThen := s.f.NewBlock(ssa.BlockPlain)
4379 bElse := s.f.NewBlock(ssa.BlockPlain)
4380 bEnd := s.f.NewBlock(ssa.BlockPlain)
4381
4382 b := s.endBlock()
4383 b.Kind = ssa.BlockIf
4384 b.SetControl(cond)
4385 b.AddEdgeTo(bThen)
4386 b.AddEdgeTo(bElse)
4387
4388 s.startBlock(bThen)
4389 s.vars[ternaryVar] = x
4390 s.endBlock().AddEdgeTo(bEnd)
4391
4392 s.startBlock(bElse)
4393 s.vars[ternaryVar] = y
4394 s.endBlock().AddEdgeTo(bEnd)
4395
4396 s.startBlock(bEnd)
4397 r := s.variable(ternaryVar, x.Type)
4398 delete(s.vars, ternaryVar)
4399 return r
4400 }
4401
4402
4403
4404
4405
4406 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
4407 switch cond.Op() {
4408 case ir.OANDAND:
4409 cond := cond.(*ir.LogicalExpr)
4410 mid := s.f.NewBlock(ssa.BlockPlain)
4411 s.stmtList(cond.Init())
4412 s.condBranch(cond.X, mid, no, max(likely, 0))
4413 s.startBlock(mid)
4414 s.condBranch(cond.Y, yes, no, likely)
4415 return
4416
4417
4418
4419
4420
4421
4422 case ir.OOROR:
4423 cond := cond.(*ir.LogicalExpr)
4424 mid := s.f.NewBlock(ssa.BlockPlain)
4425 s.stmtList(cond.Init())
4426 s.condBranch(cond.X, yes, mid, min(likely, 0))
4427 s.startBlock(mid)
4428 s.condBranch(cond.Y, yes, no, likely)
4429 return
4430
4431
4432
4433 case ir.ONOT:
4434 cond := cond.(*ir.UnaryExpr)
4435 s.stmtList(cond.Init())
4436 s.condBranch(cond.X, no, yes, -likely)
4437 return
4438 case ir.OCONVNOP:
4439 cond := cond.(*ir.ConvExpr)
4440 s.stmtList(cond.Init())
4441 s.condBranch(cond.X, yes, no, likely)
4442 return
4443 }
4444 c := s.expr(cond)
4445 b := s.endBlock()
4446 b.Kind = ssa.BlockIf
4447 b.SetControl(c)
4448 b.Likely = ssa.BranchPrediction(likely)
4449 b.AddEdgeTo(yes)
4450 b.AddEdgeTo(no)
4451 }
4452
4453 type skipMask uint8
4454
4455 const (
4456 skipPtr skipMask = 1 << iota
4457 skipLen
4458 skipCap
4459 )
4460
4461
4462
4463
4464
4465
4466
4467 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
4468 s.assignWhichMayOverlap(left, right, deref, skip, false)
4469 }
4470 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
4471 if left.Op() == ir.ONAME && ir.IsBlank(left) {
4472 return
4473 }
4474 t := left.Type()
4475 types.CalcSize(t)
4476 if s.canSSA(left) {
4477 if deref {
4478 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
4479 }
4480 if left.Op() == ir.ODOT {
4481
4482
4483
4484
4485
4486
4487
4488
4489
4490
4491 left := left.(*ir.SelectorExpr)
4492 t := left.X.Type()
4493 nf := t.NumFields()
4494 idx := fieldIdx(left)
4495
4496
4497 old := s.expr(left.X)
4498
4499 if left.Type().Size() == 0 {
4500
4501 return
4502 }
4503
4504
4505 new := s.newValue0(ssa.OpStructMake, t)
4506
4507
4508 for i := 0; i < nf; i++ {
4509 if i == idx {
4510 new.AddArg(right)
4511 } else {
4512 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
4513 }
4514 }
4515
4516
4517 s.assign(left.X, new, false, 0)
4518
4519 return
4520 }
4521 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
4522 left := left.(*ir.IndexExpr)
4523 s.pushLine(left.Pos())
4524 defer s.popLine()
4525
4526
4527 t := left.X.Type()
4528 n := t.NumElem()
4529
4530 i := s.expr(left.Index)
4531 if n == 0 {
4532
4533
4534 z := s.constInt(types.Types[types.TINT], 0)
4535 s.boundsCheck(z, z, ssa.BoundsIndex, false)
4536 return
4537 }
4538 if n != 1 {
4539
4540
4541
4542
4543
4544
4545
4546
4547 return
4548 }
4549 if t.Size() == 0 {
4550 return
4551 }
4552
4553
4554 len := s.constInt(types.Types[types.TINT], 1)
4555 s.boundsCheck(i, len, ssa.BoundsIndex, false)
4556 v := s.newValue1(ssa.OpArrayMake1, t, right)
4557 s.assign(left.X, v, false, 0)
4558 return
4559 }
4560 left := left.(*ir.Name)
4561
4562 s.vars[left] = right
4563 s.addNamedValue(left, right)
4564 return
4565 }
4566
4567
4568
4569 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4570 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4571 }
4572
4573
4574 addr := s.addr(left)
4575 if ir.IsReflectHeaderDataField(left) {
4576
4577
4578
4579
4580
4581 t = types.Types[types.TUNSAFEPTR]
4582 }
4583 if deref {
4584
4585 if right == nil {
4586 s.zero(t, addr)
4587 } else {
4588 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4589 }
4590 return
4591 }
4592
4593 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4594 }
4595
4596
4597 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4598 if t.Size() == 0 {
4599 return s.entryNewValue0(ssa.OpEmpty, t)
4600 }
4601 switch {
4602 case t.IsInteger():
4603 switch t.Size() {
4604 case 1:
4605 return s.constInt8(t, 0)
4606 case 2:
4607 return s.constInt16(t, 0)
4608 case 4:
4609 return s.constInt32(t, 0)
4610 case 8:
4611 return s.constInt64(t, 0)
4612 default:
4613 s.Fatalf("bad sized integer type %v", t)
4614 }
4615 case t.IsFloat():
4616 switch t.Size() {
4617 case 4:
4618 return s.constFloat32(t, 0)
4619 case 8:
4620 return s.constFloat64(t, 0)
4621 default:
4622 s.Fatalf("bad sized float type %v", t)
4623 }
4624 case t.IsComplex():
4625 switch t.Size() {
4626 case 8:
4627 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4628 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4629 case 16:
4630 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4631 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4632 default:
4633 s.Fatalf("bad sized complex type %v", t)
4634 }
4635
4636 case t.IsString():
4637 return s.constEmptyString(t)
4638 case t.IsPtrShaped():
4639 return s.constNil(t)
4640 case t.IsBoolean():
4641 return s.constBool(false)
4642 case t.IsInterface():
4643 return s.constInterface(t)
4644 case t.IsSlice():
4645 return s.constSlice(t)
4646 case isStructNotSIMD(t):
4647 n := t.NumFields()
4648 v := s.entryNewValue0(ssa.OpStructMake, t)
4649 for i := 0; i < n; i++ {
4650 v.AddArg(s.zeroVal(t.FieldType(i)))
4651 }
4652 return v
4653 case t.IsArray() && t.NumElem() == 1:
4654 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4655 case t.IsSIMD():
4656 return s.newValue0(ssa.OpZeroSIMD, t)
4657 }
4658 s.Fatalf("zero for type %v not implemented", t)
4659 return nil
4660 }
4661
4662 type callKind int8
4663
4664 const (
4665 callNormal callKind = iota
4666 callDefer
4667 callDeferStack
4668 callGo
4669 callTail
4670 )
4671
4672 type sfRtCallDef struct {
4673 rtfn *obj.LSym
4674 rtype types.Kind
4675 }
4676
4677 var softFloatOps map[ssa.Op]sfRtCallDef
4678
4679 func softfloatInit() {
4680
4681 softFloatOps = map[ssa.Op]sfRtCallDef{
4682 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4683 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4684 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4685 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4686 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4687 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4688 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4689 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4690
4691 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4692 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4693 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4694 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4695 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4696 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4697 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4698 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4699
4700 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4701 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4702 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4703 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4704 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4705 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4706 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4707 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4708 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4709 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4710 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4711 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4712 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4713 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4714 }
4715 }
4716
4717
4718
4719 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4720 f2i := func(t *types.Type) *types.Type {
4721 switch t.Kind() {
4722 case types.TFLOAT32:
4723 return types.Types[types.TUINT32]
4724 case types.TFLOAT64:
4725 return types.Types[types.TUINT64]
4726 }
4727 return t
4728 }
4729
4730 if callDef, ok := softFloatOps[op]; ok {
4731 switch op {
4732 case ssa.OpLess32F,
4733 ssa.OpLess64F,
4734 ssa.OpLeq32F,
4735 ssa.OpLeq64F:
4736 args[0], args[1] = args[1], args[0]
4737 case ssa.OpSub32F,
4738 ssa.OpSub64F:
4739 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4740 }
4741
4742
4743
4744 for i, a := range args {
4745 if a.Type.IsFloat() {
4746 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4747 }
4748 }
4749
4750 rt := types.Types[callDef.rtype]
4751 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4752 if rt.IsFloat() {
4753 result = s.newValue1(ssa.OpCopy, rt, result)
4754 }
4755 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4756 result = s.newValue1(ssa.OpNot, result.Type, result)
4757 }
4758 return result, true
4759 }
4760 return nil, false
4761 }
4762
4763
4764 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4765 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4766 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4767 return p0, p1
4768 }
4769
4770
4771 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4772 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4773 if ssa.IntrinsicsDebug > 0 {
4774 x := v
4775 if x == nil {
4776 x = s.mem()
4777 }
4778 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4779 x = x.Args[0]
4780 }
4781 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4782 }
4783 return v
4784 }
4785
4786
4787 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4788 args := make([]*ssa.Value, len(n.Args))
4789 for i, n := range n.Args {
4790 args[i] = s.expr(n)
4791 }
4792 return args
4793 }
4794
4795
4796
4797
4798
4799
4800
4801 func (s *state) openDeferRecord(n *ir.CallExpr) {
4802 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4803 s.Fatalf("defer call with arguments or results: %v", n)
4804 }
4805
4806 opendefer := &openDeferInfo{
4807 n: n,
4808 }
4809 fn := n.Fun
4810
4811
4812
4813 closureVal := s.expr(fn)
4814 closure := s.openDeferSave(fn.Type(), closureVal)
4815 opendefer.closureNode = closure.Aux.(*ir.Name)
4816 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4817 opendefer.closure = closure
4818 }
4819 index := len(s.openDefers)
4820 s.openDefers = append(s.openDefers, opendefer)
4821
4822
4823
4824 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4825 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4826 s.vars[deferBitsVar] = newDeferBits
4827 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4828 }
4829
4830
4831
4832
4833
4834
4835 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4836 if !ssa.CanSSA(t) {
4837 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4838 }
4839 if !t.HasPointers() {
4840 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4841 }
4842 pos := val.Pos
4843 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4844 temp.SetOpenDeferSlot(true)
4845 temp.SetFrameOffset(int64(len(s.openDefers)))
4846 var addrTemp *ssa.Value
4847
4848
4849 if s.curBlock.ID != s.f.Entry.ID {
4850
4851
4852
4853 if t.HasPointers() {
4854 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4855 }
4856 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4857 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4858 } else {
4859
4860
4861
4862 if t.HasPointers() {
4863 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4864 }
4865 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4866 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4867 }
4868
4869
4870
4871
4872
4873 temp.SetNeedzero(true)
4874
4875
4876 s.store(t, addrTemp, val)
4877 return addrTemp
4878 }
4879
4880
4881
4882
4883
4884 func (s *state) openDeferExit() {
4885 deferExit := s.f.NewBlock(ssa.BlockPlain)
4886 s.endBlock().AddEdgeTo(deferExit)
4887 s.startBlock(deferExit)
4888 s.lastDeferExit = deferExit
4889 s.lastDeferCount = len(s.openDefers)
4890 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4891
4892 for i := len(s.openDefers) - 1; i >= 0; i-- {
4893 r := s.openDefers[i]
4894 bCond := s.f.NewBlock(ssa.BlockPlain)
4895 bEnd := s.f.NewBlock(ssa.BlockPlain)
4896
4897 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4898
4899
4900 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4901 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4902 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4903 b := s.endBlock()
4904 b.Kind = ssa.BlockIf
4905 b.SetControl(eqVal)
4906 b.AddEdgeTo(bEnd)
4907 b.AddEdgeTo(bCond)
4908 bCond.AddEdgeTo(bEnd)
4909 s.startBlock(bCond)
4910
4911
4912
4913 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4914 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4915 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4916
4917
4918 s.vars[deferBitsVar] = maskedval
4919
4920
4921
4922
4923 fn := r.n.Fun
4924 stksize := fn.Type().ArgWidth()
4925 var callArgs []*ssa.Value
4926 var call *ssa.Value
4927 if r.closure != nil {
4928 v := s.load(r.closure.Type.Elem(), r.closure)
4929 s.maybeNilCheckClosure(v, callDefer)
4930 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4931 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4932 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4933 } else {
4934 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4935 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4936 }
4937 callArgs = append(callArgs, s.mem())
4938 call.AddArgs(callArgs...)
4939 call.AuxInt = stksize
4940 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4941
4942
4943
4944
4945 if r.closureNode != nil {
4946 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4947 }
4948
4949 s.endBlock()
4950 s.startBlock(bEnd)
4951 }
4952 }
4953
4954 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4955 return s.call(n, k, false, nil)
4956 }
4957
4958 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4959 return s.call(n, k, true, nil)
4960 }
4961
4962
4963
4964 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4965 s.prevCall = nil
4966 var calleeLSym *obj.LSym
4967 var closure *ssa.Value
4968 var codeptr *ssa.Value
4969 var dextra *ssa.Value
4970 var rcvr *ssa.Value
4971 fn := n.Fun
4972 var ACArgs []*types.Type
4973 var ACResults []*types.Type
4974 var callArgs []*ssa.Value
4975
4976 callABI := s.f.ABIDefault
4977
4978 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4979 s.Fatalf("go/defer call with arguments: %v", n)
4980 }
4981
4982 isCallDeferRangeFunc := false
4983
4984 switch n.Op() {
4985 case ir.OCALLFUNC:
4986 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4987 fn := fn.(*ir.Name)
4988 calleeLSym = callTargetLSym(fn)
4989 if buildcfg.Experiment.RegabiArgs {
4990
4991
4992
4993
4994
4995 if fn.Func != nil {
4996 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4997 }
4998 } else {
4999
5000 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
5001 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
5002 if inRegistersImported || inRegistersSamePackage {
5003 callABI = s.f.ABI1
5004 }
5005 }
5006 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
5007 isCallDeferRangeFunc = true
5008 }
5009 break
5010 }
5011 closure = s.expr(fn)
5012 if k != callDefer && k != callDeferStack {
5013
5014
5015 s.maybeNilCheckClosure(closure, k)
5016 }
5017 case ir.OCALLINTER:
5018 if fn.Op() != ir.ODOTINTER {
5019 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
5020 }
5021 fn := fn.(*ir.SelectorExpr)
5022 var iclosure *ssa.Value
5023 iclosure, rcvr = s.getClosureAndRcvr(fn)
5024 if k == callNormal {
5025 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
5026 } else {
5027 closure = iclosure
5028 }
5029 }
5030 if deferExtra != nil {
5031 dextra = s.expr(deferExtra)
5032 }
5033
5034 params := callABI.ABIAnalyze(n.Fun.Type(), false )
5035 types.CalcSize(fn.Type())
5036 stksize := params.ArgWidth()
5037
5038 res := n.Fun.Type().Results()
5039 if k == callNormal || k == callTail {
5040 for _, p := range params.OutParams() {
5041 ACResults = append(ACResults, p.Type)
5042 }
5043 }
5044
5045 var call *ssa.Value
5046 if k == callDeferStack {
5047 if stksize != 0 {
5048 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
5049 }
5050
5051 t := deferstruct()
5052 n, addr := s.temp(n.Pos(), t)
5053 n.SetNonMergeable(true)
5054 s.store(closure.Type,
5055 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
5056 closure)
5057
5058
5059 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5060 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5061 callArgs = append(callArgs, addr, s.mem())
5062 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5063 call.AddArgs(callArgs...)
5064 call.AuxInt = int64(types.PtrSize)
5065 } else {
5066
5067
5068 argStart := base.Ctxt.Arch.FixedFrameSize
5069
5070 if k != callNormal && k != callTail {
5071
5072 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5073 callArgs = append(callArgs, closure)
5074 stksize += int64(types.PtrSize)
5075 argStart += int64(types.PtrSize)
5076 if dextra != nil {
5077
5078 ACArgs = append(ACArgs, types.Types[types.TINTER])
5079 callArgs = append(callArgs, dextra)
5080 stksize += 2 * int64(types.PtrSize)
5081 argStart += 2 * int64(types.PtrSize)
5082 }
5083 }
5084
5085
5086 if rcvr != nil {
5087 callArgs = append(callArgs, rcvr)
5088 }
5089
5090
5091 t := n.Fun.Type()
5092 args := n.Args
5093
5094 for _, p := range params.InParams() {
5095 ACArgs = append(ACArgs, p.Type)
5096 }
5097
5098
5099
5100
5101 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
5102 b := s.endBlock()
5103 b.Kind = ssa.BlockPlain
5104 curb := s.f.NewBlock(ssa.BlockPlain)
5105 b.AddEdgeTo(curb)
5106 s.startBlock(curb)
5107 }
5108
5109 for i, n := range args {
5110 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
5111 }
5112
5113 callArgs = append(callArgs, s.mem())
5114
5115
5116 switch {
5117 case k == callDefer:
5118 sym := ir.Syms.Deferproc
5119 if dextra != nil {
5120 sym = ir.Syms.Deferprocat
5121 }
5122 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5123 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5124 case k == callGo:
5125 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5126 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5127 case closure != nil:
5128
5129
5130
5131
5132
5133 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
5134 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
5135 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
5136 case codeptr != nil:
5137
5138 aux := ssa.InterfaceAuxCall(params)
5139 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
5140 case calleeLSym != nil:
5141 aux := ssa.StaticAuxCall(calleeLSym, params)
5142 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5143 if k == callTail {
5144 call.Op = ssa.OpTailLECall
5145 stksize = 0
5146 }
5147 default:
5148 s.Fatalf("bad call type %v %v", n.Op(), n)
5149 }
5150 call.AddArgs(callArgs...)
5151 call.AuxInt = stksize
5152 }
5153 s.prevCall = call
5154 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
5155
5156 for _, v := range n.KeepAlive {
5157 if !v.Addrtaken() {
5158 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
5159 }
5160 switch v.Class {
5161 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
5162 default:
5163 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
5164 }
5165 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
5166 }
5167
5168
5169 if k == callDefer || k == callDeferStack || isCallDeferRangeFunc {
5170 b := s.endBlock()
5171 b.Kind = ssa.BlockDefer
5172 b.SetControl(call)
5173 bNext := s.f.NewBlock(ssa.BlockPlain)
5174 b.AddEdgeTo(bNext)
5175 r := s.f.DeferReturn
5176 if r == nil {
5177 r = s.f.NewBlock(ssa.BlockPlain)
5178 s.startBlock(r)
5179 s.exit()
5180 s.f.DeferReturn = r
5181 }
5182 b.AddEdgeTo(r)
5183 b.Likely = ssa.BranchLikely
5184 s.startBlock(bNext)
5185 }
5186
5187 if len(res) == 0 || k != callNormal {
5188
5189 return nil
5190 }
5191 fp := res[0]
5192 if returnResultAddr {
5193 return s.resultAddrOfCall(call, 0, fp.Type)
5194 }
5195 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
5196 }
5197
5198
5199
5200 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
5201 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
5202
5203
5204 s.nilCheck(closure)
5205 }
5206 }
5207
5208
5209
5210 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
5211 i := s.expr(fn.X)
5212 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
5213 s.nilCheck(itab)
5214 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
5215 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
5216 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
5217 return closure, rcvr
5218 }
5219
5220
5221
5222 func etypesign(e types.Kind) int8 {
5223 switch e {
5224 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
5225 return -1
5226 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
5227 return +1
5228 }
5229 return 0
5230 }
5231
5232
5233
5234 func (s *state) addr(n ir.Node) *ssa.Value {
5235 if n.Op() != ir.ONAME {
5236 s.pushLine(n.Pos())
5237 defer s.popLine()
5238 }
5239
5240 if s.canSSA(n) {
5241
5242
5243
5244
5245
5246
5247
5248
5249 return s.newValue1A(ssa.OpAddr, n.Type().PtrTo(), ir.Syms.Zerobase, s.sb)
5250 }
5251
5252 t := types.NewPtr(n.Type())
5253 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
5254 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
5255
5256 if offset != 0 {
5257 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
5258 }
5259 return v
5260 }
5261 switch n.Op() {
5262 case ir.OLINKSYMOFFSET:
5263 no := n.(*ir.LinksymOffsetExpr)
5264 return linksymOffset(no.Linksym, no.Offset_)
5265 case ir.ONAME:
5266 n := n.(*ir.Name)
5267 if n.Heapaddr != nil {
5268 return s.expr(n.Heapaddr)
5269 }
5270 switch n.Class {
5271 case ir.PEXTERN:
5272
5273 return linksymOffset(n.Linksym(), 0)
5274 case ir.PPARAM:
5275
5276 v := s.decladdrs[n]
5277 if v != nil {
5278 return v
5279 }
5280 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
5281 return nil
5282 case ir.PAUTO:
5283 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
5284
5285 case ir.PPARAMOUT:
5286
5287
5288 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
5289 default:
5290 s.Fatalf("variable address class %v not implemented", n.Class)
5291 return nil
5292 }
5293 case ir.ORESULT:
5294
5295 n := n.(*ir.ResultExpr)
5296 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
5297 case ir.OINDEX:
5298 n := n.(*ir.IndexExpr)
5299 if n.X.Type().IsSlice() {
5300 a := s.expr(n.X)
5301 i := s.expr(n.Index)
5302 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
5303 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5304 p := s.newValue1(ssa.OpSlicePtr, t, a)
5305 return s.newValue2(ssa.OpPtrIndex, t, p, i)
5306 } else {
5307 a := s.addr(n.X)
5308 i := s.expr(n.Index)
5309 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
5310 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5311 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5312 }
5313 case ir.ODEREF:
5314 n := n.(*ir.StarExpr)
5315 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5316 case ir.ODOT:
5317 n := n.(*ir.SelectorExpr)
5318 p := s.addr(n.X)
5319 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5320 case ir.ODOTPTR:
5321 n := n.(*ir.SelectorExpr)
5322 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5323 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5324 case ir.OCONVNOP:
5325 n := n.(*ir.ConvExpr)
5326 if n.Type() == n.X.Type() {
5327 return s.addr(n.X)
5328 }
5329 addr := s.addr(n.X)
5330 return s.newValue1(ssa.OpCopy, t, addr)
5331 case ir.OCALLFUNC, ir.OCALLINTER:
5332 n := n.(*ir.CallExpr)
5333 return s.callAddr(n, callNormal)
5334 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5335 var v *ssa.Value
5336 if n.Op() == ir.ODOTTYPE {
5337 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5338 } else {
5339 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5340 }
5341 if v.Op != ssa.OpLoad {
5342 s.Fatalf("dottype of non-load")
5343 }
5344 if v.Args[1] != s.mem() {
5345 s.Fatalf("memory no longer live from dottype load")
5346 }
5347 return v.Args[0]
5348 default:
5349 s.Fatalf("unhandled addr %v", n.Op())
5350 return nil
5351 }
5352 }
5353
5354
5355
5356 func (s *state) canSSA(n ir.Node) bool {
5357 if base.Flag.N != 0 {
5358 return false
5359 }
5360 for {
5361 nn := n
5362 if nn.Op() == ir.ODOT {
5363 nn := nn.(*ir.SelectorExpr)
5364 n = nn.X
5365 continue
5366 }
5367 if nn.Op() == ir.OINDEX {
5368 nn := nn.(*ir.IndexExpr)
5369 if nn.X.Type().IsArray() {
5370 n = nn.X
5371 continue
5372 }
5373 }
5374 break
5375 }
5376 if n.Op() != ir.ONAME {
5377 return false
5378 }
5379 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5380 }
5381
5382 func (s *state) canSSAName(name *ir.Name) bool {
5383 if name.Addrtaken() || !name.OnStack() {
5384 return false
5385 }
5386 switch name.Class {
5387 case ir.PPARAMOUT:
5388 if s.hasdefer {
5389
5390
5391
5392
5393
5394 return false
5395 }
5396 if s.cgoUnsafeArgs {
5397
5398
5399 return false
5400 }
5401 }
5402 return true
5403
5404 }
5405
5406
5407 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5408 p := s.expr(n)
5409 if bounded || n.NonNil() {
5410 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5411 s.f.Warnl(lineno, "removed nil check")
5412 }
5413 return p
5414 }
5415 p = s.nilCheck(p)
5416 return p
5417 }
5418
5419
5420
5421
5422
5423
5424 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5425 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5426 return ptr
5427 }
5428 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5429 }
5430
5431
5432
5433
5434
5435
5436
5437 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5438 idx = s.extendIndex(idx, len, kind, bounded)
5439
5440 if bounded || base.Flag.B != 0 {
5441
5442
5443
5444
5445
5446
5447
5448
5449
5450
5451
5452
5453
5454
5455
5456
5457
5458
5459
5460
5461 return idx
5462 }
5463
5464 bNext := s.f.NewBlock(ssa.BlockPlain)
5465 bPanic := s.f.NewBlock(ssa.BlockExit)
5466
5467 if !idx.Type.IsSigned() {
5468 switch kind {
5469 case ssa.BoundsIndex:
5470 kind = ssa.BoundsIndexU
5471 case ssa.BoundsSliceAlen:
5472 kind = ssa.BoundsSliceAlenU
5473 case ssa.BoundsSliceAcap:
5474 kind = ssa.BoundsSliceAcapU
5475 case ssa.BoundsSliceB:
5476 kind = ssa.BoundsSliceBU
5477 case ssa.BoundsSlice3Alen:
5478 kind = ssa.BoundsSlice3AlenU
5479 case ssa.BoundsSlice3Acap:
5480 kind = ssa.BoundsSlice3AcapU
5481 case ssa.BoundsSlice3B:
5482 kind = ssa.BoundsSlice3BU
5483 case ssa.BoundsSlice3C:
5484 kind = ssa.BoundsSlice3CU
5485 }
5486 }
5487
5488 var cmp *ssa.Value
5489 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5490 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5491 } else {
5492 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5493 }
5494 b := s.endBlock()
5495 b.Kind = ssa.BlockIf
5496 b.SetControl(cmp)
5497 b.Likely = ssa.BranchLikely
5498 b.AddEdgeTo(bNext)
5499 b.AddEdgeTo(bPanic)
5500
5501 s.startBlock(bPanic)
5502 if Arch.LinkArch.Family == sys.Wasm {
5503
5504
5505 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5506 } else {
5507 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5508 s.endBlock().SetControl(mem)
5509 }
5510 s.startBlock(bNext)
5511
5512
5513 if base.Flag.Cfg.SpectreIndex {
5514 op := ssa.OpSpectreIndex
5515 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5516 op = ssa.OpSpectreSliceIndex
5517 }
5518 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5519 }
5520
5521 return idx
5522 }
5523
5524
5525 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5526 b := s.endBlock()
5527 b.Kind = ssa.BlockIf
5528 b.SetControl(cmp)
5529 b.Likely = ssa.BranchLikely
5530 bNext := s.f.NewBlock(ssa.BlockPlain)
5531 line := s.peekPos()
5532 pos := base.Ctxt.PosTable.Pos(line)
5533 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5534 bPanic := s.panics[fl]
5535 if bPanic == nil {
5536 bPanic = s.f.NewBlock(ssa.BlockPlain)
5537 s.panics[fl] = bPanic
5538 s.startBlock(bPanic)
5539
5540
5541 s.rtcall(fn, false, nil)
5542 }
5543 b.AddEdgeTo(bNext)
5544 b.AddEdgeTo(bPanic)
5545 s.startBlock(bNext)
5546 }
5547
5548 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5549 needcheck := true
5550 switch b.Op {
5551 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5552 if b.AuxInt != 0 {
5553 needcheck = false
5554 }
5555 }
5556 if needcheck {
5557
5558 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5559 s.check(cmp, ir.Syms.Panicdivide)
5560 }
5561 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5562 }
5563
5564
5565
5566
5567
5568 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5569 s.prevCall = nil
5570
5571 off := base.Ctxt.Arch.FixedFrameSize
5572 var callArgs []*ssa.Value
5573 var callArgTypes []*types.Type
5574
5575 for _, arg := range args {
5576 t := arg.Type
5577 off = types.RoundUp(off, t.Alignment())
5578 size := t.Size()
5579 callArgs = append(callArgs, arg)
5580 callArgTypes = append(callArgTypes, t)
5581 off += size
5582 }
5583 off = types.RoundUp(off, int64(types.RegSize))
5584
5585
5586 var call *ssa.Value
5587 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5588 callArgs = append(callArgs, s.mem())
5589 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5590 call.AddArgs(callArgs...)
5591 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5592
5593 if !returns {
5594
5595 b := s.endBlock()
5596 b.Kind = ssa.BlockExit
5597 b.SetControl(call)
5598 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5599 if len(results) > 0 {
5600 s.Fatalf("panic call can't have results")
5601 }
5602 return nil
5603 }
5604
5605
5606 res := make([]*ssa.Value, len(results))
5607 for i, t := range results {
5608 off = types.RoundUp(off, t.Alignment())
5609 res[i] = s.resultOfCall(call, int64(i), t)
5610 off += t.Size()
5611 }
5612 off = types.RoundUp(off, int64(types.PtrSize))
5613
5614
5615 call.AuxInt = off
5616
5617 return res
5618 }
5619
5620
5621 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5622 s.instrument(t, left, instrumentWrite)
5623
5624 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5625
5626 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5627 return
5628 }
5629
5630
5631
5632
5633
5634
5635 s.storeTypeScalars(t, left, right, skip)
5636 if skip&skipPtr == 0 && t.HasPointers() {
5637 s.storeTypePtrs(t, left, right)
5638 }
5639 }
5640
5641
5642 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5643 switch {
5644 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex() || t.IsSIMD():
5645 s.store(t, left, right)
5646 case t.IsPtrShaped():
5647 if t.IsPtr() && t.Elem().NotInHeap() {
5648 s.store(t, left, right)
5649 }
5650
5651 case t.IsString():
5652 if skip&skipLen != 0 {
5653 return
5654 }
5655 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5656 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5657 s.store(types.Types[types.TINT], lenAddr, len)
5658 case t.IsSlice():
5659 if skip&skipLen == 0 {
5660 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5661 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5662 s.store(types.Types[types.TINT], lenAddr, len)
5663 }
5664 if skip&skipCap == 0 {
5665 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5666 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5667 s.store(types.Types[types.TINT], capAddr, cap)
5668 }
5669 case t.IsInterface():
5670
5671 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5672 s.store(types.Types[types.TUINTPTR], left, itab)
5673 case isStructNotSIMD(t):
5674 n := t.NumFields()
5675 for i := 0; i < n; i++ {
5676 ft := t.FieldType(i)
5677 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5678 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5679 s.storeTypeScalars(ft, addr, val, 0)
5680 }
5681 case t.IsArray() && t.Size() == 0:
5682
5683 case t.IsArray() && t.NumElem() == 1:
5684 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5685 default:
5686 s.Fatalf("bad write barrier type %v", t)
5687 }
5688 }
5689
5690
5691 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5692 switch {
5693 case t.IsPtrShaped():
5694 if t.IsPtr() && t.Elem().NotInHeap() {
5695 break
5696 }
5697 s.store(t, left, right)
5698 case t.IsString():
5699 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5700 s.store(s.f.Config.Types.BytePtr, left, ptr)
5701 case t.IsSlice():
5702 elType := types.NewPtr(t.Elem())
5703 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5704 s.store(elType, left, ptr)
5705 case t.IsInterface():
5706
5707 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5708 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5709 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5710 case isStructNotSIMD(t):
5711 n := t.NumFields()
5712 for i := 0; i < n; i++ {
5713 ft := t.FieldType(i)
5714 if !ft.HasPointers() {
5715 continue
5716 }
5717 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5718 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5719 s.storeTypePtrs(ft, addr, val)
5720 }
5721 case t.IsArray() && t.Size() == 0:
5722
5723 case t.IsArray() && t.NumElem() == 1:
5724 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5725 default:
5726 s.Fatalf("bad write barrier type %v", t)
5727 }
5728 }
5729
5730
5731 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5732 var a *ssa.Value
5733 if !ssa.CanSSA(t) {
5734 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5735 } else {
5736 a = s.expr(n)
5737 }
5738 return a
5739 }
5740
5741
5742
5743
5744 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5745 t := v.Type
5746 var ptr, len, cap *ssa.Value
5747 switch {
5748 case t.IsSlice():
5749 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5750 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5751 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5752 case t.IsString():
5753 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5754 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5755 cap = len
5756 case t.IsPtr():
5757 if !t.Elem().IsArray() {
5758 s.Fatalf("bad ptr to array in slice %v\n", t)
5759 }
5760 nv := s.nilCheck(v)
5761 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5762 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5763 cap = len
5764 default:
5765 s.Fatalf("bad type in slice %v\n", t)
5766 }
5767
5768
5769 if i == nil {
5770 i = s.constInt(types.Types[types.TINT], 0)
5771 }
5772 if j == nil {
5773 j = len
5774 }
5775 three := true
5776 if k == nil {
5777 three = false
5778 k = cap
5779 }
5780
5781
5782
5783
5784 if three {
5785 if k != cap {
5786 kind := ssa.BoundsSlice3Alen
5787 if t.IsSlice() {
5788 kind = ssa.BoundsSlice3Acap
5789 }
5790 k = s.boundsCheck(k, cap, kind, bounded)
5791 }
5792 if j != k {
5793 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5794 }
5795 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5796 } else {
5797 if j != k {
5798 kind := ssa.BoundsSliceAlen
5799 if t.IsSlice() {
5800 kind = ssa.BoundsSliceAcap
5801 }
5802 j = s.boundsCheck(j, k, kind, bounded)
5803 }
5804 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5805 }
5806
5807
5808 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5809 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5810 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5811
5812
5813
5814
5815
5816 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5817 rcap := rlen
5818 if j != k && !t.IsString() {
5819 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5820 }
5821
5822 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5823
5824 return ptr, rlen, rcap
5825 }
5826
5827
5828
5829
5830
5831
5832
5833
5834
5835
5836
5837
5838
5839
5840
5841 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5842
5843
5844 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5845
5846
5847
5848 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5849 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5850
5851
5852 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5853
5854 return rptr, rlen, rcap
5855 }
5856
5857 type u642fcvtTab struct {
5858 leq, cvt2F, and, rsh, or, add ssa.Op
5859 one func(*state, *types.Type, int64) *ssa.Value
5860 }
5861
5862 var u64_f64 = u642fcvtTab{
5863 leq: ssa.OpLeq64,
5864 cvt2F: ssa.OpCvt64to64F,
5865 and: ssa.OpAnd64,
5866 rsh: ssa.OpRsh64Ux64,
5867 or: ssa.OpOr64,
5868 add: ssa.OpAdd64F,
5869 one: (*state).constInt64,
5870 }
5871
5872 var u64_f32 = u642fcvtTab{
5873 leq: ssa.OpLeq64,
5874 cvt2F: ssa.OpCvt64to32F,
5875 and: ssa.OpAnd64,
5876 rsh: ssa.OpRsh64Ux64,
5877 or: ssa.OpOr64,
5878 add: ssa.OpAdd32F,
5879 one: (*state).constInt64,
5880 }
5881
5882 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5883 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5884 }
5885
5886 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5887 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5888 }
5889
5890 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5891
5892
5893
5894
5895
5896
5897
5898
5899
5900
5901
5902
5903
5904
5905
5906
5907
5908
5909
5910
5911
5912
5913
5914
5915
5916 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5917
5918 b := s.endBlock()
5919 b.Kind = ssa.BlockIf
5920 b.SetControl(cmp)
5921 b.Likely = ssa.BranchLikely
5922
5923 bThen := s.f.NewBlock(ssa.BlockPlain)
5924 bElse := s.f.NewBlock(ssa.BlockPlain)
5925 bAfter := s.f.NewBlock(ssa.BlockPlain)
5926
5927 b.AddEdgeTo(bThen)
5928 s.startBlock(bThen)
5929 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5930 s.vars[n] = a0
5931 s.endBlock()
5932 bThen.AddEdgeTo(bAfter)
5933
5934 b.AddEdgeTo(bElse)
5935 s.startBlock(bElse)
5936 one := cvttab.one(s, ft, 1)
5937 y := s.newValue2(cvttab.and, ft, x, one)
5938 z := s.newValue2(cvttab.rsh, ft, x, one)
5939 z = s.newValue2(cvttab.or, ft, z, y)
5940 a := s.newValue1(cvttab.cvt2F, tt, z)
5941 a1 := s.newValue2(cvttab.add, tt, a, a)
5942 s.vars[n] = a1
5943 s.endBlock()
5944 bElse.AddEdgeTo(bAfter)
5945
5946 s.startBlock(bAfter)
5947 return s.variable(n, n.Type())
5948 }
5949
5950 type u322fcvtTab struct {
5951 cvtI2F, cvtF2F ssa.Op
5952 }
5953
5954 var u32_f64 = u322fcvtTab{
5955 cvtI2F: ssa.OpCvt32to64F,
5956 cvtF2F: ssa.OpCopy,
5957 }
5958
5959 var u32_f32 = u322fcvtTab{
5960 cvtI2F: ssa.OpCvt32to32F,
5961 cvtF2F: ssa.OpCvt64Fto32F,
5962 }
5963
5964 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5965 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5966 }
5967
5968 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5969 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5970 }
5971
5972 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5973
5974
5975
5976
5977
5978 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5979 b := s.endBlock()
5980 b.Kind = ssa.BlockIf
5981 b.SetControl(cmp)
5982 b.Likely = ssa.BranchLikely
5983
5984 bThen := s.f.NewBlock(ssa.BlockPlain)
5985 bElse := s.f.NewBlock(ssa.BlockPlain)
5986 bAfter := s.f.NewBlock(ssa.BlockPlain)
5987
5988 b.AddEdgeTo(bThen)
5989 s.startBlock(bThen)
5990 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5991 s.vars[n] = a0
5992 s.endBlock()
5993 bThen.AddEdgeTo(bAfter)
5994
5995 b.AddEdgeTo(bElse)
5996 s.startBlock(bElse)
5997 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5998 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5999 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
6000 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
6001
6002 s.vars[n] = a3
6003 s.endBlock()
6004 bElse.AddEdgeTo(bAfter)
6005
6006 s.startBlock(bAfter)
6007 return s.variable(n, n.Type())
6008 }
6009
6010
6011 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
6012 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
6013 s.Fatalf("node must be a map or a channel")
6014 }
6015 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
6016 s.Fatalf("cannot inline len(chan)")
6017 }
6018 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
6019 s.Fatalf("cannot inline cap(chan)")
6020 }
6021 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
6022 s.Fatalf("cannot inline cap(map)")
6023 }
6024
6025
6026
6027
6028
6029
6030
6031
6032 lenType := n.Type()
6033 nilValue := s.constNil(types.Types[types.TUINTPTR])
6034 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
6035 b := s.endBlock()
6036 b.Kind = ssa.BlockIf
6037 b.SetControl(cmp)
6038 b.Likely = ssa.BranchUnlikely
6039
6040 bThen := s.f.NewBlock(ssa.BlockPlain)
6041 bElse := s.f.NewBlock(ssa.BlockPlain)
6042 bAfter := s.f.NewBlock(ssa.BlockPlain)
6043
6044
6045 b.AddEdgeTo(bThen)
6046 s.startBlock(bThen)
6047 s.vars[n] = s.zeroVal(lenType)
6048 s.endBlock()
6049 bThen.AddEdgeTo(bAfter)
6050
6051 b.AddEdgeTo(bElse)
6052 s.startBlock(bElse)
6053 switch n.Op() {
6054 case ir.OLEN:
6055 if n.X.Type().IsMap() {
6056
6057 loadType := reflectdata.MapType().Field(0).Type
6058 load := s.load(loadType, x)
6059 s.vars[n] = s.conv(nil, load, loadType, lenType)
6060 } else {
6061
6062 s.vars[n] = s.load(lenType, x)
6063 }
6064 case ir.OCAP:
6065
6066 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
6067 s.vars[n] = s.load(lenType, sw)
6068 default:
6069 s.Fatalf("op must be OLEN or OCAP")
6070 }
6071 s.endBlock()
6072 bElse.AddEdgeTo(bAfter)
6073
6074 s.startBlock(bAfter)
6075 return s.variable(n, lenType)
6076 }
6077
6078 type f2uCvtTab struct {
6079 ltf, cvt2U, subf, or ssa.Op
6080 floatValue func(*state, *types.Type, float64) *ssa.Value
6081 intValue func(*state, *types.Type, int64) *ssa.Value
6082 cutoff uint64
6083 }
6084
6085 var f32_u64 = f2uCvtTab{
6086 ltf: ssa.OpLess32F,
6087 cvt2U: ssa.OpCvt32Fto64,
6088 subf: ssa.OpSub32F,
6089 or: ssa.OpOr64,
6090 floatValue: (*state).constFloat32,
6091 intValue: (*state).constInt64,
6092 cutoff: 1 << 63,
6093 }
6094
6095 var f64_u64 = f2uCvtTab{
6096 ltf: ssa.OpLess64F,
6097 cvt2U: ssa.OpCvt64Fto64,
6098 subf: ssa.OpSub64F,
6099 or: ssa.OpOr64,
6100 floatValue: (*state).constFloat64,
6101 intValue: (*state).constInt64,
6102 cutoff: 1 << 63,
6103 }
6104
6105 var f32_u32 = f2uCvtTab{
6106 ltf: ssa.OpLess32F,
6107 cvt2U: ssa.OpCvt32Fto32,
6108 subf: ssa.OpSub32F,
6109 or: ssa.OpOr32,
6110 floatValue: (*state).constFloat32,
6111 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6112 cutoff: 1 << 31,
6113 }
6114
6115 var f64_u32 = f2uCvtTab{
6116 ltf: ssa.OpLess64F,
6117 cvt2U: ssa.OpCvt64Fto32,
6118 subf: ssa.OpSub64F,
6119 or: ssa.OpOr32,
6120 floatValue: (*state).constFloat64,
6121 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6122 cutoff: 1 << 31,
6123 }
6124
6125 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6126 return s.floatToUint(&f32_u64, n, x, ft, tt)
6127 }
6128 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6129 return s.floatToUint(&f64_u64, n, x, ft, tt)
6130 }
6131
6132 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6133 return s.floatToUint(&f32_u32, n, x, ft, tt)
6134 }
6135
6136 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6137 return s.floatToUint(&f64_u32, n, x, ft, tt)
6138 }
6139
6140 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6141
6142
6143
6144
6145
6146
6147
6148
6149
6150
6151
6152
6153
6154 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
6155 cmp := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
6156 b := s.endBlock()
6157 b.Kind = ssa.BlockIf
6158 b.SetControl(cmp)
6159 b.Likely = ssa.BranchLikely
6160
6161 var bThen, bZero *ssa.Block
6162
6163 newConversion := base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil)
6164 if newConversion {
6165 bZero = s.f.NewBlock(ssa.BlockPlain)
6166 bThen = s.f.NewBlock(ssa.BlockIf)
6167 } else {
6168 bThen = s.f.NewBlock(ssa.BlockPlain)
6169 }
6170
6171 bElse := s.f.NewBlock(ssa.BlockPlain)
6172 bAfter := s.f.NewBlock(ssa.BlockPlain)
6173
6174 b.AddEdgeTo(bThen)
6175 s.startBlock(bThen)
6176 a0 := s.newValueOrSfCall1(cvttab.cvt2U, tt, x)
6177 s.vars[n] = a0
6178
6179 if newConversion {
6180 cmpz := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cvttab.floatValue(s, ft, 0.0))
6181 s.endBlock()
6182 bThen.SetControl(cmpz)
6183 bThen.AddEdgeTo(bZero)
6184 bThen.Likely = ssa.BranchUnlikely
6185 bThen.AddEdgeTo(bAfter)
6186
6187 s.startBlock(bZero)
6188 s.vars[n] = cvttab.intValue(s, tt, 0)
6189 s.endBlock()
6190 bZero.AddEdgeTo(bAfter)
6191 } else {
6192 s.endBlock()
6193 bThen.AddEdgeTo(bAfter)
6194 }
6195
6196 b.AddEdgeTo(bElse)
6197 s.startBlock(bElse)
6198 y := s.newValueOrSfCall2(cvttab.subf, ft, x, cutoff)
6199 y = s.newValueOrSfCall1(cvttab.cvt2U, tt, y)
6200 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
6201 a1 := s.newValue2(cvttab.or, tt, y, z)
6202 s.vars[n] = a1
6203 s.endBlock()
6204 bElse.AddEdgeTo(bAfter)
6205
6206 s.startBlock(bAfter)
6207 return s.variable(n, n.Type())
6208 }
6209
6210
6211
6212
6213 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6214 iface := s.expr(n.X)
6215 target := s.reflectType(n.Type())
6216 var targetItab *ssa.Value
6217 if n.ITab != nil {
6218 targetItab = s.expr(n.ITab)
6219 }
6220
6221 if n.UseNilPanic {
6222 if commaok {
6223 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && commaok == true")
6224 }
6225 if n.Type().IsInterface() {
6226
6227
6228 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && Type().IsInterface() == true")
6229 }
6230 typs := s.f.Config.Types
6231 iface = s.newValue2(
6232 ssa.OpIMake,
6233 iface.Type,
6234 s.nilCheck(s.newValue1(ssa.OpITab, typs.BytePtr, iface)),
6235 s.newValue1(ssa.OpIData, typs.BytePtr, iface),
6236 )
6237 }
6238
6239 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
6240 }
6241
6242 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6243 iface := s.expr(n.X)
6244 var source, target, targetItab *ssa.Value
6245 if n.SrcRType != nil {
6246 source = s.expr(n.SrcRType)
6247 }
6248 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
6249 byteptr := s.f.Config.Types.BytePtr
6250 targetItab = s.expr(n.ITab)
6251
6252
6253 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
6254 } else {
6255 target = s.expr(n.RType)
6256 }
6257 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
6258 }
6259
6260
6261
6262
6263
6264
6265
6266
6267
6268 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
6269 typs := s.f.Config.Types
6270 byteptr := typs.BytePtr
6271 if dst.IsInterface() {
6272 if dst.IsEmptyInterface() {
6273
6274
6275 if base.Debug.TypeAssert > 0 {
6276 base.WarnfAt(pos, "type assertion inlined")
6277 }
6278
6279
6280 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6281
6282 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6283
6284 if src.IsEmptyInterface() && commaok {
6285
6286 return iface, cond
6287 }
6288
6289
6290 b := s.endBlock()
6291 b.Kind = ssa.BlockIf
6292 b.SetControl(cond)
6293 b.Likely = ssa.BranchLikely
6294 bOk := s.f.NewBlock(ssa.BlockPlain)
6295 bFail := s.f.NewBlock(ssa.BlockPlain)
6296 b.AddEdgeTo(bOk)
6297 b.AddEdgeTo(bFail)
6298
6299 if !commaok {
6300
6301 s.startBlock(bFail)
6302 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6303
6304
6305 s.startBlock(bOk)
6306 if src.IsEmptyInterface() {
6307 res = iface
6308 return
6309 }
6310
6311 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6312 typ := s.load(byteptr, off)
6313 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6314 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
6315 return
6316 }
6317
6318 s.startBlock(bOk)
6319
6320
6321 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6322 s.vars[typVar] = s.load(byteptr, off)
6323 s.endBlock()
6324
6325
6326 s.startBlock(bFail)
6327 s.vars[typVar] = itab
6328 s.endBlock()
6329
6330
6331 bEnd := s.f.NewBlock(ssa.BlockPlain)
6332 bOk.AddEdgeTo(bEnd)
6333 bFail.AddEdgeTo(bEnd)
6334 s.startBlock(bEnd)
6335 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6336 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
6337 resok = cond
6338 delete(s.vars, typVar)
6339 return
6340 }
6341
6342 if base.Debug.TypeAssert > 0 {
6343 base.WarnfAt(pos, "type assertion not inlined")
6344 }
6345
6346 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6347 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6348
6349
6350 bNil := s.f.NewBlock(ssa.BlockPlain)
6351 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6352 bMerge := s.f.NewBlock(ssa.BlockPlain)
6353 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6354 b := s.endBlock()
6355 b.Kind = ssa.BlockIf
6356 b.SetControl(cond)
6357 b.Likely = ssa.BranchLikely
6358 b.AddEdgeTo(bNonNil)
6359 b.AddEdgeTo(bNil)
6360
6361 s.startBlock(bNil)
6362 if commaok {
6363 s.vars[typVar] = itab
6364 b := s.endBlock()
6365 b.AddEdgeTo(bMerge)
6366 } else {
6367
6368 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6369 }
6370
6371
6372 s.startBlock(bNonNil)
6373 typ := itab
6374 if !src.IsEmptyInterface() {
6375 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
6376 }
6377
6378
6379 var d *ssa.Value
6380 if descriptor != nil {
6381 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6382 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
6383
6384
6385 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
6386 s.Fatalf("atomic load not available")
6387 }
6388
6389 var mul, and, add, zext ssa.Op
6390 if s.config.PtrSize == 4 {
6391 mul = ssa.OpMul32
6392 and = ssa.OpAnd32
6393 add = ssa.OpAdd32
6394 zext = ssa.OpCopy
6395 } else {
6396 mul = ssa.OpMul64
6397 and = ssa.OpAnd64
6398 add = ssa.OpAdd64
6399 zext = ssa.OpZeroExt32to64
6400 }
6401
6402 loopHead := s.f.NewBlock(ssa.BlockPlain)
6403 loopBody := s.f.NewBlock(ssa.BlockPlain)
6404 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6405 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6406
6407
6408
6409 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6410 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6411 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6412
6413
6414 var hash *ssa.Value
6415 if src.IsEmptyInterface() {
6416 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6417 } else {
6418 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6419 }
6420 hash = s.newValue1(zext, typs.Uintptr, hash)
6421 s.vars[hashVar] = hash
6422
6423 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6424
6425 b := s.endBlock()
6426 b.AddEdgeTo(loopHead)
6427
6428
6429
6430 s.startBlock(loopHead)
6431 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6432 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6433 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6434 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6435
6436 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6437
6438
6439
6440 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6441 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6442 b = s.endBlock()
6443 b.Kind = ssa.BlockIf
6444 b.SetControl(cmp1)
6445 b.AddEdgeTo(cacheHit)
6446 b.AddEdgeTo(loopBody)
6447
6448
6449
6450 s.startBlock(loopBody)
6451 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6452 b = s.endBlock()
6453 b.Kind = ssa.BlockIf
6454 b.SetControl(cmp2)
6455 b.AddEdgeTo(cacheMiss)
6456 b.AddEdgeTo(loopHead)
6457
6458
6459
6460 s.startBlock(cacheHit)
6461 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6462 s.vars[typVar] = eItab
6463 b = s.endBlock()
6464 b.AddEdgeTo(bMerge)
6465
6466
6467 s.startBlock(cacheMiss)
6468 }
6469 }
6470
6471
6472 if descriptor != nil {
6473 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6474 } else {
6475 var fn *obj.LSym
6476 if commaok {
6477 fn = ir.Syms.AssertE2I2
6478 } else {
6479 fn = ir.Syms.AssertE2I
6480 }
6481 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6482 }
6483 s.vars[typVar] = itab
6484 b = s.endBlock()
6485 b.AddEdgeTo(bMerge)
6486
6487
6488 s.startBlock(bMerge)
6489 itab = s.variable(typVar, byteptr)
6490 var ok *ssa.Value
6491 if commaok {
6492 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6493 }
6494 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6495 }
6496
6497 if base.Debug.TypeAssert > 0 {
6498 base.WarnfAt(pos, "type assertion inlined")
6499 }
6500
6501
6502 direct := types.IsDirectIface(dst)
6503 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6504 if base.Debug.TypeAssert > 0 {
6505 base.WarnfAt(pos, "type assertion inlined")
6506 }
6507 var wantedFirstWord *ssa.Value
6508 if src.IsEmptyInterface() {
6509
6510 wantedFirstWord = target
6511 } else {
6512
6513 wantedFirstWord = targetItab
6514 }
6515
6516 var tmp ir.Node
6517 var addr *ssa.Value
6518 if commaok && !ssa.CanSSA(dst) {
6519
6520
6521 tmp, addr = s.temp(pos, dst)
6522 }
6523
6524 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6525 b := s.endBlock()
6526 b.Kind = ssa.BlockIf
6527 b.SetControl(cond)
6528 b.Likely = ssa.BranchLikely
6529
6530 bOk := s.f.NewBlock(ssa.BlockPlain)
6531 bFail := s.f.NewBlock(ssa.BlockPlain)
6532 b.AddEdgeTo(bOk)
6533 b.AddEdgeTo(bFail)
6534
6535 if !commaok {
6536
6537 s.startBlock(bFail)
6538 taddr := source
6539 if taddr == nil {
6540 taddr = s.reflectType(src)
6541 }
6542 if src.IsEmptyInterface() {
6543 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6544 } else {
6545 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6546 }
6547
6548
6549 s.startBlock(bOk)
6550 if direct {
6551 return s.newValue1(ssa.OpIData, dst, iface), nil
6552 }
6553 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6554 return s.load(dst, p), nil
6555 }
6556
6557
6558
6559 bEnd := s.f.NewBlock(ssa.BlockPlain)
6560
6561
6562 valVar := ssaMarker("val")
6563
6564
6565 s.startBlock(bOk)
6566 if tmp == nil {
6567 if direct {
6568 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6569 } else {
6570 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6571 s.vars[valVar] = s.load(dst, p)
6572 }
6573 } else {
6574 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6575 s.move(dst, addr, p)
6576 }
6577 s.vars[okVar] = s.constBool(true)
6578 s.endBlock()
6579 bOk.AddEdgeTo(bEnd)
6580
6581
6582 s.startBlock(bFail)
6583 if tmp == nil {
6584 s.vars[valVar] = s.zeroVal(dst)
6585 } else {
6586 s.zero(dst, addr)
6587 }
6588 s.vars[okVar] = s.constBool(false)
6589 s.endBlock()
6590 bFail.AddEdgeTo(bEnd)
6591
6592
6593 s.startBlock(bEnd)
6594 if tmp == nil {
6595 res = s.variable(valVar, dst)
6596 delete(s.vars, valVar)
6597 } else {
6598 res = s.load(dst, addr)
6599 }
6600 resok = s.variable(okVar, types.Types[types.TBOOL])
6601 delete(s.vars, okVar)
6602 return res, resok
6603 }
6604
6605
6606 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6607 tmp := typecheck.TempAt(pos, s.curfn, t)
6608 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6609 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6610 }
6611 addr := s.addr(tmp)
6612 return tmp, addr
6613 }
6614
6615
6616 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6617 v := s.vars[n]
6618 if v != nil {
6619 return v
6620 }
6621 v = s.fwdVars[n]
6622 if v != nil {
6623 return v
6624 }
6625
6626 if s.curBlock == s.f.Entry {
6627
6628 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6629 }
6630
6631
6632 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6633 s.fwdVars[n] = v
6634 if n.Op() == ir.ONAME {
6635 s.addNamedValue(n.(*ir.Name), v)
6636 }
6637 return v
6638 }
6639
6640 func (s *state) mem() *ssa.Value {
6641 return s.variable(memVar, types.TypeMem)
6642 }
6643
6644 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6645 if n.Class == ir.Pxxx {
6646
6647 return
6648 }
6649 if ir.IsAutoTmp(n) {
6650
6651 return
6652 }
6653 if n.Class == ir.PPARAMOUT {
6654
6655
6656 return
6657 }
6658 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6659 values, ok := s.f.NamedValues[loc]
6660 if !ok {
6661 s.f.Names = append(s.f.Names, &loc)
6662 s.f.CanonicalLocalSlots[loc] = &loc
6663 }
6664 s.f.NamedValues[loc] = append(values, v)
6665 }
6666
6667
6668 type Branch struct {
6669 P *obj.Prog
6670 B *ssa.Block
6671 }
6672
6673
6674 type State struct {
6675 ABI obj.ABI
6676
6677 pp *objw.Progs
6678
6679
6680
6681 Branches []Branch
6682
6683
6684 JumpTables []*ssa.Block
6685
6686
6687 bstart []*obj.Prog
6688
6689 maxarg int64
6690
6691
6692
6693 livenessMap liveness.Map
6694
6695
6696
6697 partLiveArgs map[*ir.Name]bool
6698
6699
6700
6701
6702 lineRunStart *obj.Prog
6703
6704
6705 OnWasmStackSkipped int
6706 }
6707
6708 func (s *State) FuncInfo() *obj.FuncInfo {
6709 return s.pp.CurFunc.LSym.Func()
6710 }
6711
6712
6713 func (s *State) Prog(as obj.As) *obj.Prog {
6714 p := s.pp.Prog(as)
6715 if objw.LosesStmtMark(as) {
6716 return p
6717 }
6718
6719
6720 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6721 s.lineRunStart = p
6722 } else if p.Pos.IsStmt() == src.PosIsStmt {
6723 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6724 p.Pos = p.Pos.WithNotStmt()
6725 }
6726 return p
6727 }
6728
6729
6730 func (s *State) Pc() *obj.Prog {
6731 return s.pp.Next
6732 }
6733
6734
6735 func (s *State) SetPos(pos src.XPos) {
6736 s.pp.Pos = pos
6737 }
6738
6739
6740
6741
6742 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6743 p := s.Prog(op)
6744 p.To.Type = obj.TYPE_BRANCH
6745 s.Branches = append(s.Branches, Branch{P: p, B: target})
6746 return p
6747 }
6748
6749
6750
6751
6752
6753
6754 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6755 switch v.Op {
6756 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6757
6758 s.SetPos(v.Pos.WithNotStmt())
6759 default:
6760 p := v.Pos
6761 if p != src.NoXPos {
6762
6763
6764
6765
6766 if p.IsStmt() != src.PosIsStmt {
6767 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6768
6769
6770
6771
6772
6773
6774
6775
6776
6777
6778
6779
6780
6781 return
6782 }
6783 p = p.WithNotStmt()
6784
6785 }
6786 s.SetPos(p)
6787 } else {
6788 s.SetPos(s.pp.Pos.WithNotStmt())
6789 }
6790 }
6791 }
6792
6793
6794 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6795 ft := e.curfn.Type()
6796 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6797 return
6798 }
6799
6800 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6801 x.Set(obj.AttrContentAddressable, true)
6802 e.curfn.LSym.Func().ArgInfo = x
6803
6804
6805 p := pp.Prog(obj.AFUNCDATA)
6806 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6807 p.To.Type = obj.TYPE_MEM
6808 p.To.Name = obj.NAME_EXTERN
6809 p.To.Sym = x
6810 }
6811
6812
6813 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6814 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6815
6816
6817
6818
6819 PtrSize := int64(types.PtrSize)
6820 uintptrTyp := types.Types[types.TUINTPTR]
6821
6822 isAggregate := func(t *types.Type) bool {
6823 return isStructNotSIMD(t) || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6824 }
6825
6826 wOff := 0
6827 n := 0
6828 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6829
6830
6831 write1 := func(sz, offset int64) {
6832 if offset >= rtabi.TraceArgsSpecial {
6833 writebyte(rtabi.TraceArgsOffsetTooLarge)
6834 } else {
6835 writebyte(uint8(offset))
6836 writebyte(uint8(sz))
6837 }
6838 n++
6839 }
6840
6841
6842
6843 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6844 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6845 if n >= rtabi.TraceArgsLimit {
6846 writebyte(rtabi.TraceArgsDotdotdot)
6847 return false
6848 }
6849 if !isAggregate(t) {
6850 write1(t.Size(), baseOffset)
6851 return true
6852 }
6853 writebyte(rtabi.TraceArgsStartAgg)
6854 depth++
6855 if depth >= rtabi.TraceArgsMaxDepth {
6856 writebyte(rtabi.TraceArgsDotdotdot)
6857 writebyte(rtabi.TraceArgsEndAgg)
6858 n++
6859 return true
6860 }
6861 switch {
6862 case t.IsInterface(), t.IsString():
6863 _ = visitType(baseOffset, uintptrTyp, depth) &&
6864 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6865 case t.IsSlice():
6866 _ = visitType(baseOffset, uintptrTyp, depth) &&
6867 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6868 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6869 case t.IsComplex():
6870 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6871 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6872 case t.IsArray():
6873 if t.NumElem() == 0 {
6874 n++
6875 break
6876 }
6877 for i := int64(0); i < t.NumElem(); i++ {
6878 if !visitType(baseOffset, t.Elem(), depth) {
6879 break
6880 }
6881 baseOffset += t.Elem().Size()
6882 }
6883 case isStructNotSIMD(t):
6884 if t.NumFields() == 0 {
6885 n++
6886 break
6887 }
6888 for _, field := range t.Fields() {
6889 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6890 break
6891 }
6892 }
6893 }
6894 writebyte(rtabi.TraceArgsEndAgg)
6895 return true
6896 }
6897
6898 start := 0
6899 if strings.Contains(f.LSym.Name, "[") {
6900
6901 start = 1
6902 }
6903
6904 for _, a := range abiInfo.InParams()[start:] {
6905 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6906 break
6907 }
6908 }
6909 writebyte(rtabi.TraceArgsEndSeq)
6910 if wOff > rtabi.TraceArgsMaxLen {
6911 base.Fatalf("ArgInfo too large")
6912 }
6913
6914 return x
6915 }
6916
6917
6918 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6919 if base.Ctxt.Flag_linkshared {
6920
6921
6922 return
6923 }
6924
6925 wfn := e.curfn.WrappedFunc
6926 if wfn == nil {
6927 return
6928 }
6929
6930 wsym := wfn.Linksym()
6931 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6932 objw.SymPtrOff(x, 0, wsym)
6933 x.Set(obj.AttrContentAddressable, true)
6934 })
6935 e.curfn.LSym.Func().WrapInfo = x
6936
6937
6938 p := pp.Prog(obj.AFUNCDATA)
6939 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6940 p.To.Type = obj.TYPE_MEM
6941 p.To.Name = obj.NAME_EXTERN
6942 p.To.Sym = x
6943 }
6944
6945
6946 func genssa(f *ssa.Func, pp *objw.Progs) {
6947 var s State
6948 s.ABI = f.OwnAux.Fn.ABI()
6949
6950 e := f.Frontend().(*ssafn)
6951
6952 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6953
6954 var lv *liveness.Liveness
6955 s.livenessMap, s.partLiveArgs, lv = liveness.Compute(e.curfn, f, e.stkptrsize, pp, gatherPrintInfo)
6956 emitArgInfo(e, f, pp)
6957 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6958
6959 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6960 if openDeferInfo != nil {
6961
6962
6963 p := pp.Prog(obj.AFUNCDATA)
6964 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6965 p.To.Type = obj.TYPE_MEM
6966 p.To.Name = obj.NAME_EXTERN
6967 p.To.Sym = openDeferInfo
6968 }
6969
6970 emitWrappedFuncInfo(e, pp)
6971
6972
6973 s.bstart = make([]*obj.Prog, f.NumBlocks())
6974 s.pp = pp
6975 var progToValue map[*obj.Prog]*ssa.Value
6976 var progToBlock map[*obj.Prog]*ssa.Block
6977 var valueToProgAfter []*obj.Prog
6978 if gatherPrintInfo {
6979 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6980 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6981 f.Logf("genssa %s\n", f.Name)
6982 progToBlock[s.pp.Next] = f.Blocks[0]
6983 }
6984
6985 if base.Ctxt.Flag_locationlists {
6986 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6987 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6988 }
6989 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6990 clear(valueToProgAfter)
6991 }
6992
6993
6994
6995 firstPos := src.NoXPos
6996 for _, v := range f.Entry.Values {
6997 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6998 firstPos = v.Pos
6999 v.Pos = firstPos.WithDefaultStmt()
7000 break
7001 }
7002 }
7003
7004
7005
7006
7007 var inlMarks map[*obj.Prog]int32
7008 var inlMarkList []*obj.Prog
7009
7010
7011
7012 var inlMarksByPos map[src.XPos][]*obj.Prog
7013
7014 var argLiveIdx int = -1
7015
7016
7017
7018
7019
7020 var hotAlign, hotRequire int64
7021
7022 if base.Debug.AlignHot > 0 {
7023 switch base.Ctxt.Arch.Name {
7024
7025
7026
7027
7028
7029 case "amd64", "386":
7030
7031
7032
7033 hotAlign = 64
7034 hotRequire = 31
7035 }
7036 }
7037
7038
7039 for i, b := range f.Blocks {
7040
7041 s.lineRunStart = nil
7042 s.SetPos(s.pp.Pos.WithNotStmt())
7043
7044 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
7045
7046
7047
7048
7049
7050 p := s.pp.Prog(obj.APCALIGNMAX)
7051 p.From.SetConst(hotAlign)
7052 p.To.SetConst(hotRequire)
7053 }
7054
7055 s.bstart[b.ID] = s.pp.Next
7056
7057 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
7058 argLiveIdx = idx
7059 p := s.pp.Prog(obj.APCDATA)
7060 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7061 p.To.SetConst(int64(idx))
7062 }
7063
7064
7065 Arch.SSAMarkMoves(&s, b)
7066 for _, v := range b.Values {
7067 x := s.pp.Next
7068 s.DebugFriendlySetPosFrom(v)
7069
7070 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
7071 v.Fatalf("input[0] and output not in same register %s", v.LongString())
7072 }
7073
7074 switch v.Op {
7075 case ssa.OpInitMem:
7076
7077 case ssa.OpArg:
7078
7079 case ssa.OpSP, ssa.OpSB:
7080
7081 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
7082
7083 case ssa.OpGetG:
7084
7085
7086 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
7087
7088 case ssa.OpPhi:
7089 CheckLoweredPhi(v)
7090 case ssa.OpConvert:
7091
7092 if v.Args[0].Reg() != v.Reg() {
7093 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
7094 }
7095 case ssa.OpInlMark:
7096 p := Arch.Ginsnop(s.pp)
7097 if inlMarks == nil {
7098 inlMarks = map[*obj.Prog]int32{}
7099 inlMarksByPos = map[src.XPos][]*obj.Prog{}
7100 }
7101 inlMarks[p] = v.AuxInt32()
7102 inlMarkList = append(inlMarkList, p)
7103 pos := v.Pos.AtColumn1()
7104 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
7105 firstPos = src.NoXPos
7106
7107 default:
7108
7109 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7110 s.SetPos(firstPos)
7111 firstPos = src.NoXPos
7112 }
7113
7114
7115 s.pp.NextLive = s.livenessMap.Get(v)
7116 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
7117
7118
7119 Arch.SSAGenValue(&s, v)
7120 }
7121
7122 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
7123 argLiveIdx = idx
7124 p := s.pp.Prog(obj.APCDATA)
7125 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7126 p.To.SetConst(int64(idx))
7127 }
7128
7129 if base.Ctxt.Flag_locationlists {
7130 valueToProgAfter[v.ID] = s.pp.Next
7131 }
7132
7133 if gatherPrintInfo {
7134 for ; x != s.pp.Next; x = x.Link {
7135 progToValue[x] = v
7136 }
7137 }
7138 }
7139
7140 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
7141 p := Arch.Ginsnop(s.pp)
7142 p.Pos = p.Pos.WithIsStmt()
7143 if b.Pos == src.NoXPos {
7144 b.Pos = p.Pos
7145 if b.Pos == src.NoXPos {
7146 b.Pos = s.pp.Text.Pos
7147 }
7148 }
7149 b.Pos = b.Pos.WithBogusLine()
7150 }
7151
7152
7153
7154
7155
7156 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
7157
7158
7159 var next *ssa.Block
7160 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
7161
7162
7163
7164
7165 next = f.Blocks[i+1]
7166 }
7167 x := s.pp.Next
7168 s.SetPos(b.Pos)
7169 Arch.SSAGenBlock(&s, b, next)
7170 if gatherPrintInfo {
7171 for ; x != s.pp.Next; x = x.Link {
7172 progToBlock[x] = b
7173 }
7174 }
7175 }
7176 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
7177
7178
7179
7180
7181 Arch.Ginsnop(s.pp)
7182 }
7183 if openDeferInfo != nil {
7184
7185
7186
7187
7188
7189
7190
7191
7192 s.pp.NextLive = s.livenessMap.DeferReturn
7193 p := s.pp.Prog(obj.ACALL)
7194 p.To.Type = obj.TYPE_MEM
7195 p.To.Name = obj.NAME_EXTERN
7196 p.To.Sym = ir.Syms.Deferreturn
7197
7198
7199
7200
7201
7202 for _, o := range f.OwnAux.ABIInfo().OutParams() {
7203 n := o.Name
7204 rts, offs := o.RegisterTypesAndOffsets()
7205 for i := range o.Registers {
7206 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
7207 }
7208 }
7209
7210 s.pp.Prog(obj.ARET)
7211 }
7212
7213 if inlMarks != nil {
7214 hasCall := false
7215
7216
7217
7218
7219 for p := s.pp.Text; p != nil; p = p.Link {
7220 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
7221 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
7222
7223
7224
7225
7226
7227 continue
7228 }
7229 if _, ok := inlMarks[p]; ok {
7230
7231
7232 continue
7233 }
7234 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
7235 hasCall = true
7236 }
7237 pos := p.Pos.AtColumn1()
7238 marks := inlMarksByPos[pos]
7239 if len(marks) == 0 {
7240 continue
7241 }
7242 for _, m := range marks {
7243
7244
7245
7246 p.Pos = p.Pos.WithIsStmt()
7247 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
7248
7249 m.As = obj.ANOP
7250 m.Pos = src.NoXPos
7251 m.From = obj.Addr{}
7252 m.To = obj.Addr{}
7253 }
7254 delete(inlMarksByPos, pos)
7255 }
7256
7257 for _, p := range inlMarkList {
7258 if p.As != obj.ANOP {
7259 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
7260 }
7261 }
7262
7263 if e.stksize == 0 && !hasCall {
7264
7265
7266
7267
7268
7269
7270 for p := s.pp.Text; p != nil; p = p.Link {
7271 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
7272 continue
7273 }
7274 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
7275
7276 nop := Arch.Ginsnop(s.pp)
7277 nop.Pos = e.curfn.Pos().WithIsStmt()
7278
7279
7280
7281
7282
7283 for x := s.pp.Text; x != nil; x = x.Link {
7284 if x.Link == nop {
7285 x.Link = nop.Link
7286 break
7287 }
7288 }
7289
7290 for x := s.pp.Text; x != nil; x = x.Link {
7291 if x.Link == p {
7292 nop.Link = p
7293 x.Link = nop
7294 break
7295 }
7296 }
7297 }
7298 break
7299 }
7300 }
7301 }
7302
7303 if base.Ctxt.Flag_locationlists {
7304 var debugInfo *ssa.FuncDebug
7305 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
7306
7307
7308 debugInfo.EntryID = f.Entry.ID
7309 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
7310 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7311 } else {
7312 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
7313 }
7314 bstart := s.bstart
7315 idToIdx := make([]int, f.NumBlocks())
7316 for i, b := range f.Blocks {
7317 idToIdx[b.ID] = i
7318 }
7319
7320
7321
7322 debugInfo.GetPC = func(b, v ssa.ID) int64 {
7323 switch v {
7324 case ssa.BlockStart.ID:
7325 if b == f.Entry.ID {
7326 return 0
7327
7328 }
7329 return bstart[b].Pc
7330 case ssa.BlockEnd.ID:
7331 blk := f.Blocks[idToIdx[b]]
7332 nv := len(blk.Values)
7333 return valueToProgAfter[blk.Values[nv-1].ID].Pc
7334 case ssa.FuncEnd.ID:
7335 return e.curfn.LSym.Size
7336 default:
7337 return valueToProgAfter[v].Pc
7338 }
7339 }
7340 }
7341
7342
7343 for _, br := range s.Branches {
7344 br.P.To.SetTarget(s.bstart[br.B.ID])
7345 if br.P.Pos.IsStmt() != src.PosIsStmt {
7346 br.P.Pos = br.P.Pos.WithNotStmt()
7347 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7348 br.P.Pos = br.P.Pos.WithNotStmt()
7349 }
7350
7351 }
7352
7353
7354 for _, jt := range s.JumpTables {
7355
7356 targets := make([]*obj.Prog, len(jt.Succs))
7357 for i, e := range jt.Succs {
7358 targets[i] = s.bstart[e.Block().ID]
7359 }
7360
7361
7362
7363 fi := s.pp.CurFunc.LSym.Func()
7364 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7365 }
7366
7367 if e.log {
7368 filename := ""
7369 for p := s.pp.Text; p != nil; p = p.Link {
7370 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7371 filename = p.InnermostFilename()
7372 f.Logf("# %s\n", filename)
7373 }
7374
7375 var s string
7376 if v, ok := progToValue[p]; ok {
7377 s = v.String()
7378 } else if b, ok := progToBlock[p]; ok {
7379 s = b.String()
7380 } else {
7381 s = " "
7382 }
7383 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7384 }
7385 }
7386 if f.HTMLWriter != nil {
7387 var buf strings.Builder
7388 buf.WriteString("<code>")
7389 buf.WriteString("<dl class=\"ssa-gen\">")
7390 filename := ""
7391
7392 liveness := lv.Format(nil)
7393 if liveness != "" {
7394 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7395 buf.WriteString(html.EscapeString("# " + liveness))
7396 buf.WriteString("</dd>")
7397 }
7398
7399 for p := s.pp.Text; p != nil; p = p.Link {
7400
7401
7402 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7403 filename = p.InnermostFilename()
7404 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7405 buf.WriteString(html.EscapeString("# " + filename))
7406 buf.WriteString("</dd>")
7407 }
7408
7409 buf.WriteString("<dt class=\"ssa-prog-src\">")
7410 if v, ok := progToValue[p]; ok {
7411
7412
7413 if p.As != obj.APCDATA {
7414 if liveness := lv.Format(v); liveness != "" {
7415
7416 buf.WriteString("</dt><dd class=\"ssa-prog\">")
7417 buf.WriteString(html.EscapeString("# " + liveness))
7418 buf.WriteString("</dd>")
7419
7420 buf.WriteString("<dt class=\"ssa-prog-src\">")
7421 }
7422 }
7423
7424 buf.WriteString(v.HTML())
7425 } else if b, ok := progToBlock[p]; ok {
7426 buf.WriteString("<b>" + b.HTML() + "</b>")
7427 }
7428 buf.WriteString("</dt>")
7429 buf.WriteString("<dd class=\"ssa-prog\">")
7430 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7431 buf.WriteString("</dd>")
7432 }
7433 buf.WriteString("</dl>")
7434 buf.WriteString("</code>")
7435 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7436 }
7437 if ssa.GenssaDump[f.Name] {
7438 fi := f.DumpFileForPhase("genssa")
7439 if fi != nil {
7440
7441
7442 inliningDiffers := func(a, b []src.Pos) bool {
7443 if len(a) != len(b) {
7444 return true
7445 }
7446 for i := range a {
7447 if a[i].Filename() != b[i].Filename() {
7448 return true
7449 }
7450 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7451 return true
7452 }
7453 }
7454 return false
7455 }
7456
7457 var allPosOld []src.Pos
7458 var allPos []src.Pos
7459
7460 for p := s.pp.Text; p != nil; p = p.Link {
7461 if p.Pos.IsKnown() {
7462 allPos = allPos[:0]
7463 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7464 if inliningDiffers(allPos, allPosOld) {
7465 for _, pos := range allPos {
7466 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7467 }
7468 allPos, allPosOld = allPosOld, allPos
7469 }
7470 }
7471
7472 var s string
7473 if v, ok := progToValue[p]; ok {
7474 s = v.String()
7475 } else if b, ok := progToBlock[p]; ok {
7476 s = b.String()
7477 } else {
7478 s = " "
7479 }
7480 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7481 }
7482 fi.Close()
7483 }
7484 }
7485
7486 defframe(&s, e, f)
7487
7488 f.HTMLWriter.Close()
7489 f.HTMLWriter = nil
7490 }
7491
7492 func defframe(s *State, e *ssafn, f *ssa.Func) {
7493 pp := s.pp
7494
7495 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7496 frame := s.maxarg + e.stksize
7497 if Arch.PadFrame != nil {
7498 frame = Arch.PadFrame(frame)
7499 }
7500
7501
7502 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7503 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7504 pp.Text.To.Offset = frame
7505
7506 p := pp.Text
7507
7508
7509
7510
7511
7512
7513
7514
7515
7516
7517 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7518
7519
7520 type nameOff struct {
7521 n *ir.Name
7522 off int64
7523 }
7524 partLiveArgsSpilled := make(map[nameOff]bool)
7525 for _, v := range f.Entry.Values {
7526 if v.Op.IsCall() {
7527 break
7528 }
7529 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7530 continue
7531 }
7532 n, off := ssa.AutoVar(v)
7533 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7534 continue
7535 }
7536 partLiveArgsSpilled[nameOff{n, off}] = true
7537 }
7538
7539
7540 for _, a := range f.OwnAux.ABIInfo().InParams() {
7541 n := a.Name
7542 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7543 continue
7544 }
7545 rts, offs := a.RegisterTypesAndOffsets()
7546 for i := range a.Registers {
7547 if !rts[i].HasPointers() {
7548 continue
7549 }
7550 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7551 continue
7552 }
7553 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7554 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7555 }
7556 }
7557 }
7558
7559
7560
7561
7562 var lo, hi int64
7563
7564
7565
7566 var state uint32
7567
7568
7569
7570 for _, n := range e.curfn.Dcl {
7571 if !n.Needzero() {
7572 continue
7573 }
7574 if n.Class != ir.PAUTO {
7575 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7576 }
7577 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7578 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7579 }
7580
7581 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7582
7583 lo = n.FrameOffset()
7584 continue
7585 }
7586
7587
7588 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7589
7590
7591 lo = n.FrameOffset()
7592 hi = lo + n.Type().Size()
7593 }
7594
7595
7596 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7597 }
7598
7599
7600 type IndexJump struct {
7601 Jump obj.As
7602 Index int
7603 }
7604
7605 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7606 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7607 p.Pos = b.Pos
7608 }
7609
7610
7611
7612 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7613 switch next {
7614 case b.Succs[0].Block():
7615 s.oneJump(b, &jumps[0][0])
7616 s.oneJump(b, &jumps[0][1])
7617 case b.Succs[1].Block():
7618 s.oneJump(b, &jumps[1][0])
7619 s.oneJump(b, &jumps[1][1])
7620 default:
7621 var q *obj.Prog
7622 if b.Likely != ssa.BranchUnlikely {
7623 s.oneJump(b, &jumps[1][0])
7624 s.oneJump(b, &jumps[1][1])
7625 q = s.Br(obj.AJMP, b.Succs[1].Block())
7626 } else {
7627 s.oneJump(b, &jumps[0][0])
7628 s.oneJump(b, &jumps[0][1])
7629 q = s.Br(obj.AJMP, b.Succs[0].Block())
7630 }
7631 q.Pos = b.Pos
7632 }
7633 }
7634
7635
7636 func AddAux(a *obj.Addr, v *ssa.Value) {
7637 AddAux2(a, v, v.AuxInt)
7638 }
7639 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7640 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7641 v.Fatalf("bad AddAux addr %v", a)
7642 }
7643
7644 a.Offset += offset
7645
7646
7647 if v.Aux == nil {
7648 return
7649 }
7650
7651 switch n := v.Aux.(type) {
7652 case *ssa.AuxCall:
7653 a.Name = obj.NAME_EXTERN
7654 a.Sym = n.Fn
7655 case *obj.LSym:
7656 a.Name = obj.NAME_EXTERN
7657 a.Sym = n
7658 case *ir.Name:
7659 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7660 a.Name = obj.NAME_PARAM
7661 } else {
7662 a.Name = obj.NAME_AUTO
7663 }
7664 a.Sym = n.Linksym()
7665 a.Offset += n.FrameOffset()
7666 default:
7667 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7668 }
7669 }
7670
7671
7672
7673 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7674 size := idx.Type.Size()
7675 if size == s.config.PtrSize {
7676 return idx
7677 }
7678 if size > s.config.PtrSize {
7679
7680
7681 var lo *ssa.Value
7682 if idx.Type.IsSigned() {
7683 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7684 } else {
7685 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7686 }
7687 if bounded || base.Flag.B != 0 {
7688 return lo
7689 }
7690 bNext := s.f.NewBlock(ssa.BlockPlain)
7691 bPanic := s.f.NewBlock(ssa.BlockExit)
7692 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7693 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7694 if !idx.Type.IsSigned() {
7695 switch kind {
7696 case ssa.BoundsIndex:
7697 kind = ssa.BoundsIndexU
7698 case ssa.BoundsSliceAlen:
7699 kind = ssa.BoundsSliceAlenU
7700 case ssa.BoundsSliceAcap:
7701 kind = ssa.BoundsSliceAcapU
7702 case ssa.BoundsSliceB:
7703 kind = ssa.BoundsSliceBU
7704 case ssa.BoundsSlice3Alen:
7705 kind = ssa.BoundsSlice3AlenU
7706 case ssa.BoundsSlice3Acap:
7707 kind = ssa.BoundsSlice3AcapU
7708 case ssa.BoundsSlice3B:
7709 kind = ssa.BoundsSlice3BU
7710 case ssa.BoundsSlice3C:
7711 kind = ssa.BoundsSlice3CU
7712 }
7713 }
7714 b := s.endBlock()
7715 b.Kind = ssa.BlockIf
7716 b.SetControl(cmp)
7717 b.Likely = ssa.BranchLikely
7718 b.AddEdgeTo(bNext)
7719 b.AddEdgeTo(bPanic)
7720
7721 s.startBlock(bPanic)
7722 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7723 s.endBlock().SetControl(mem)
7724 s.startBlock(bNext)
7725
7726 return lo
7727 }
7728
7729
7730 var op ssa.Op
7731 if idx.Type.IsSigned() {
7732 switch 10*size + s.config.PtrSize {
7733 case 14:
7734 op = ssa.OpSignExt8to32
7735 case 18:
7736 op = ssa.OpSignExt8to64
7737 case 24:
7738 op = ssa.OpSignExt16to32
7739 case 28:
7740 op = ssa.OpSignExt16to64
7741 case 48:
7742 op = ssa.OpSignExt32to64
7743 default:
7744 s.Fatalf("bad signed index extension %s", idx.Type)
7745 }
7746 } else {
7747 switch 10*size + s.config.PtrSize {
7748 case 14:
7749 op = ssa.OpZeroExt8to32
7750 case 18:
7751 op = ssa.OpZeroExt8to64
7752 case 24:
7753 op = ssa.OpZeroExt16to32
7754 case 28:
7755 op = ssa.OpZeroExt16to64
7756 case 48:
7757 op = ssa.OpZeroExt32to64
7758 default:
7759 s.Fatalf("bad unsigned index extension %s", idx.Type)
7760 }
7761 }
7762 return s.newValue1(op, types.Types[types.TINT], idx)
7763 }
7764
7765
7766
7767 func CheckLoweredPhi(v *ssa.Value) {
7768 if v.Op != ssa.OpPhi {
7769 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7770 }
7771 if v.Type.IsMemory() {
7772 return
7773 }
7774 f := v.Block.Func
7775 loc := f.RegAlloc[v.ID]
7776 for _, a := range v.Args {
7777 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7778 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7779 }
7780 }
7781 }
7782
7783
7784
7785
7786
7787 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7788 entry := v.Block.Func.Entry
7789 if entry != v.Block {
7790 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7791 }
7792 for _, w := range entry.Values {
7793 if w == v {
7794 break
7795 }
7796 switch w.Op {
7797 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7798
7799 default:
7800 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7801 }
7802 }
7803 }
7804
7805
7806 func CheckArgReg(v *ssa.Value) {
7807 entry := v.Block.Func.Entry
7808 if entry != v.Block {
7809 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7810 }
7811 }
7812
7813 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7814 n, off := ssa.AutoVar(v)
7815 a.Type = obj.TYPE_MEM
7816 a.Sym = n.Linksym()
7817 a.Reg = int16(Arch.REGSP)
7818 a.Offset = n.FrameOffset() + off
7819 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7820 a.Name = obj.NAME_PARAM
7821 } else {
7822 a.Name = obj.NAME_AUTO
7823 }
7824 }
7825
7826
7827
7828 func (s *State) Call(v *ssa.Value) *obj.Prog {
7829 pPosIsStmt := s.pp.Pos.IsStmt()
7830 s.PrepareCall(v)
7831
7832 p := s.Prog(obj.ACALL)
7833 if pPosIsStmt == src.PosIsStmt {
7834 p.Pos = v.Pos.WithIsStmt()
7835 } else {
7836 p.Pos = v.Pos.WithNotStmt()
7837 }
7838 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7839 p.To.Type = obj.TYPE_MEM
7840 p.To.Name = obj.NAME_EXTERN
7841 p.To.Sym = sym.Fn
7842 } else {
7843
7844 switch Arch.LinkArch.Family {
7845 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7846 p.To.Type = obj.TYPE_REG
7847 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7848 p.To.Type = obj.TYPE_MEM
7849 default:
7850 base.Fatalf("unknown indirect call family")
7851 }
7852 p.To.Reg = v.Args[0].Reg()
7853 }
7854 return p
7855 }
7856
7857
7858
7859 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7860 p := s.Call(v)
7861 p.As = obj.ARET
7862 return p
7863 }
7864
7865
7866
7867
7868 func (s *State) PrepareCall(v *ssa.Value) {
7869 idx := s.livenessMap.Get(v)
7870 if !idx.StackMapValid() {
7871
7872 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7873 base.Fatalf("missing stack map index for %v", v.LongString())
7874 }
7875 }
7876
7877 call, ok := v.Aux.(*ssa.AuxCall)
7878
7879 if ok {
7880
7881
7882 if nowritebarrierrecCheck != nil {
7883 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7884 }
7885 }
7886
7887 if s.maxarg < v.AuxInt {
7888 s.maxarg = v.AuxInt
7889 }
7890 }
7891
7892
7893
7894 func (s *State) UseArgs(n int64) {
7895 if s.maxarg < n {
7896 s.maxarg = n
7897 }
7898 }
7899
7900
7901 func fieldIdx(n *ir.SelectorExpr) int {
7902 t := n.X.Type()
7903 if !isStructNotSIMD(t) {
7904 panic("ODOT's LHS is not a struct")
7905 }
7906
7907 for i, f := range t.Fields() {
7908 if f.Sym == n.Sel {
7909 if f.Offset != n.Offset() {
7910 panic("field offset doesn't match")
7911 }
7912 return i
7913 }
7914 }
7915 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7916
7917
7918
7919 }
7920
7921
7922
7923 type ssafn struct {
7924 curfn *ir.Func
7925 strings map[string]*obj.LSym
7926 stksize int64
7927 stkptrsize int64
7928
7929
7930
7931
7932
7933 stkalign int64
7934
7935 log bool
7936 }
7937
7938
7939
7940 func (e *ssafn) StringData(s string) *obj.LSym {
7941 if aux, ok := e.strings[s]; ok {
7942 return aux
7943 }
7944 if e.strings == nil {
7945 e.strings = make(map[string]*obj.LSym)
7946 }
7947 data := staticdata.StringSym(e.curfn.Pos(), s)
7948 e.strings[s] = data
7949 return data
7950 }
7951
7952
7953 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7954 node := parent.N
7955
7956 if node.Class != ir.PAUTO || node.Addrtaken() {
7957
7958 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7959 }
7960
7961 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7962 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7963 n.SetUsed(true)
7964 n.SetEsc(ir.EscNever)
7965 types.CalcSize(t)
7966 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7967 }
7968
7969
7970 func (e *ssafn) Logf(msg string, args ...any) {
7971 if e.log {
7972 fmt.Printf(msg, args...)
7973 }
7974 }
7975
7976 func (e *ssafn) Log() bool {
7977 return e.log
7978 }
7979
7980
7981 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...any) {
7982 base.Pos = pos
7983 nargs := append([]any{ir.FuncName(e.curfn)}, args...)
7984 base.Fatalf("'%s': "+msg, nargs...)
7985 }
7986
7987
7988
7989 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...any) {
7990 base.WarnfAt(pos, fmt_, args...)
7991 }
7992
7993 func (e *ssafn) Debug_checknil() bool {
7994 return base.Debug.Nil != 0
7995 }
7996
7997 func (e *ssafn) UseWriteBarrier() bool {
7998 return base.Flag.WB
7999 }
8000
8001 func (e *ssafn) Syslook(name string) *obj.LSym {
8002 switch name {
8003 case "goschedguarded":
8004 return ir.Syms.Goschedguarded
8005 case "writeBarrier":
8006 return ir.Syms.WriteBarrier
8007 case "wbZero":
8008 return ir.Syms.WBZero
8009 case "wbMove":
8010 return ir.Syms.WBMove
8011 case "cgoCheckMemmove":
8012 return ir.Syms.CgoCheckMemmove
8013 case "cgoCheckPtrWrite":
8014 return ir.Syms.CgoCheckPtrWrite
8015 }
8016 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
8017 return nil
8018 }
8019
8020 func (e *ssafn) Func() *ir.Func {
8021 return e.curfn
8022 }
8023
8024 func clobberBase(n ir.Node) ir.Node {
8025 if n.Op() == ir.ODOT {
8026 n := n.(*ir.SelectorExpr)
8027 if n.X.Type().NumFields() == 1 {
8028 return clobberBase(n.X)
8029 }
8030 }
8031 if n.Op() == ir.OINDEX {
8032 n := n.(*ir.IndexExpr)
8033 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
8034 return clobberBase(n.X)
8035 }
8036 }
8037 return n
8038 }
8039
8040
8041 func callTargetLSym(callee *ir.Name) *obj.LSym {
8042 if callee.Func == nil {
8043
8044
8045
8046 return callee.Linksym()
8047 }
8048
8049 return callee.LinksymABI(callee.Func.ABI)
8050 }
8051
8052
8053 const deferStructFnField = 4
8054
8055 var deferType *types.Type
8056
8057
8058
8059 func deferstruct() *types.Type {
8060 if deferType != nil {
8061 return deferType
8062 }
8063
8064 makefield := func(name string, t *types.Type) *types.Field {
8065 sym := (*types.Pkg)(nil).Lookup(name)
8066 return types.NewField(src.NoXPos, sym, t)
8067 }
8068
8069 fields := []*types.Field{
8070 makefield("heap", types.Types[types.TBOOL]),
8071 makefield("rangefunc", types.Types[types.TBOOL]),
8072 makefield("sp", types.Types[types.TUINTPTR]),
8073 makefield("pc", types.Types[types.TUINTPTR]),
8074
8075
8076
8077 makefield("fn", types.Types[types.TUINTPTR]),
8078 makefield("link", types.Types[types.TUINTPTR]),
8079 makefield("head", types.Types[types.TUINTPTR]),
8080 }
8081 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
8082 base.Fatalf("deferStructFnField is %q, not fn", name)
8083 }
8084
8085 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
8086 typ := types.NewNamed(n)
8087 n.SetType(typ)
8088 n.SetTypecheck(1)
8089
8090
8091 typ.SetUnderlying(types.NewStruct(fields))
8092 types.CalcStructSize(typ)
8093
8094 deferType = typ
8095 return typ
8096 }
8097
8098
8099
8100
8101
8102 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
8103 return obj.Addr{
8104 Name: obj.NAME_NONE,
8105 Type: obj.TYPE_MEM,
8106 Reg: baseReg,
8107 Offset: spill.Offset + extraOffset,
8108 }
8109 }
8110
8111 func isStructNotSIMD(t *types.Type) bool {
8112 return t.IsStruct() && !t.IsSIMD()
8113 }
8114
8115 var BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
8116
View as plain text