1// Copyright 2009 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package walk
6
7import (
8	"fmt"
9	"internal/abi"
10
11	"cmd/compile/internal/base"
12	"cmd/compile/internal/ir"
13	"cmd/compile/internal/reflectdata"
14	"cmd/compile/internal/rttype"
15	"cmd/compile/internal/ssagen"
16	"cmd/compile/internal/typecheck"
17	"cmd/compile/internal/types"
18	"cmd/internal/src"
19)
20
21// The constant is known to runtime.
22const tmpstringbufsize = 32
23
24func Walk(fn *ir.Func) {
25	ir.CurFunc = fn
26	errorsBefore := base.Errors()
27	order(fn)
28	if base.Errors() > errorsBefore {
29		return
30	}
31
32	if base.Flag.W != 0 {
33		s := fmt.Sprintf("\nbefore walk %v", ir.CurFunc.Sym())
34		ir.DumpList(s, ir.CurFunc.Body)
35	}
36
37	walkStmtList(ir.CurFunc.Body)
38	if base.Flag.W != 0 {
39		s := fmt.Sprintf("after walk %v", ir.CurFunc.Sym())
40		ir.DumpList(s, ir.CurFunc.Body)
41	}
42
43	// Eagerly compute sizes of all variables for SSA.
44	for _, n := range fn.Dcl {
45		types.CalcSize(n.Type())
46	}
47}
48
49// walkRecv walks an ORECV node.
50func walkRecv(n *ir.UnaryExpr) ir.Node {
51	if n.Typecheck() == 0 {
52		base.Fatalf("missing typecheck: %+v", n)
53	}
54	init := ir.TakeInit(n)
55
56	n.X = walkExpr(n.X, &init)
57	call := walkExpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, typecheck.NodNil()), &init)
58	return ir.InitExpr(init, call)
59}
60
61func convas(n *ir.AssignStmt, init *ir.Nodes) *ir.AssignStmt {
62	if n.Op() != ir.OAS {
63		base.Fatalf("convas: not OAS %v", n.Op())
64	}
65	n.SetTypecheck(1)
66
67	if n.X == nil || n.Y == nil {
68		return n
69	}
70
71	lt := n.X.Type()
72	rt := n.Y.Type()
73	if lt == nil || rt == nil {
74		return n
75	}
76
77	if ir.IsBlank(n.X) {
78		n.Y = typecheck.DefaultLit(n.Y, nil)
79		return n
80	}
81
82	if !types.Identical(lt, rt) {
83		n.Y = typecheck.AssignConv(n.Y, lt, "assignment")
84		n.Y = walkExpr(n.Y, init)
85	}
86	types.CalcSize(n.Y.Type())
87
88	return n
89}
90
91func vmkcall(fn ir.Node, t *types.Type, init *ir.Nodes, va []ir.Node) *ir.CallExpr {
92	if init == nil {
93		base.Fatalf("mkcall with nil init: %v", fn)
94	}
95	if fn.Type() == nil || fn.Type().Kind() != types.TFUNC {
96		base.Fatalf("mkcall %v %v", fn, fn.Type())
97	}
98
99	n := fn.Type().NumParams()
100	if n != len(va) {
101		base.Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
102	}
103
104	call := typecheck.Call(base.Pos, fn, va, false).(*ir.CallExpr)
105	call.SetType(t)
106	return walkExpr(call, init).(*ir.CallExpr)
107}
108
109func mkcall(name string, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
110	return vmkcall(typecheck.LookupRuntime(name), t, init, args)
111}
112
113func mkcallstmt(name string, args ...ir.Node) ir.Node {
114	return mkcallstmt1(typecheck.LookupRuntime(name), args...)
115}
116
117func mkcall1(fn ir.Node, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
118	return vmkcall(fn, t, init, args)
119}
120
121func mkcallstmt1(fn ir.Node, args ...ir.Node) ir.Node {
122	var init ir.Nodes
123	n := vmkcall(fn, nil, &init, args)
124	if len(init) == 0 {
125		return n
126	}
127	init.Append(n)
128	return ir.NewBlockStmt(n.Pos(), init)
129}
130
131func chanfn(name string, n int, t *types.Type) ir.Node {
132	if !t.IsChan() {
133		base.Fatalf("chanfn %v", t)
134	}
135	switch n {
136	case 1:
137		return typecheck.LookupRuntime(name, t.Elem())
138	case 2:
139		return typecheck.LookupRuntime(name, t.Elem(), t.Elem())
140	}
141	base.Fatalf("chanfn %d", n)
142	return nil
143}
144
145func mapfn(name string, t *types.Type, isfat bool) ir.Node {
146	if !t.IsMap() {
147		base.Fatalf("mapfn %v", t)
148	}
149	if mapfast(t) == mapslow || isfat {
150		return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Key(), t.Elem())
151	}
152	return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Elem())
153}
154
155func mapfndel(name string, t *types.Type) ir.Node {
156	if !t.IsMap() {
157		base.Fatalf("mapfn %v", t)
158	}
159	if mapfast(t) == mapslow {
160		return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Key())
161	}
162	return typecheck.LookupRuntime(name, t.Key(), t.Elem())
163}
164
165const (
166	mapslow = iota
167	mapfast32
168	mapfast32ptr
169	mapfast64
170	mapfast64ptr
171	mapfaststr
172	nmapfast
173)
174
175type mapnames [nmapfast]string
176
177func mkmapnames(base string, ptr string) mapnames {
178	return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
179}
180
181var mapaccess1 = mkmapnames("mapaccess1", "")
182var mapaccess2 = mkmapnames("mapaccess2", "")
183var mapassign = mkmapnames("mapassign", "ptr")
184var mapdelete = mkmapnames("mapdelete", "")
185
186func mapfast(t *types.Type) int {
187	if t.Elem().Size() > abi.MapMaxElemBytes {
188		return mapslow
189	}
190	switch reflectdata.AlgType(t.Key()) {
191	case types.AMEM32:
192		if !t.Key().HasPointers() {
193			return mapfast32
194		}
195		if types.PtrSize == 4 {
196			return mapfast32ptr
197		}
198		base.Fatalf("small pointer %v", t.Key())
199	case types.AMEM64:
200		if !t.Key().HasPointers() {
201			return mapfast64
202		}
203		if types.PtrSize == 8 {
204			return mapfast64ptr
205		}
206		// Two-word object, at least one of which is a pointer.
207		// Use the slow path.
208	case types.ASTRING:
209		return mapfaststr
210	}
211	return mapslow
212}
213
214func walkAppendArgs(n *ir.CallExpr, init *ir.Nodes) {
215	walkExprListSafe(n.Args, init)
216
217	// walkExprListSafe will leave OINDEX (s[n]) alone if both s
218	// and n are name or literal, but those may index the slice we're
219	// modifying here. Fix explicitly.
220	ls := n.Args
221	for i1, n1 := range ls {
222		ls[i1] = cheapExpr(n1, init)
223	}
224}
225
226// appendWalkStmt typechecks and walks stmt and then appends it to init.
227func appendWalkStmt(init *ir.Nodes, stmt ir.Node) {
228	op := stmt.Op()
229	n := typecheck.Stmt(stmt)
230	if op == ir.OAS || op == ir.OAS2 {
231		// If the assignment has side effects, walkExpr will append them
232		// directly to init for us, while walkStmt will wrap it in an OBLOCK.
233		// We need to append them directly.
234		// TODO(rsc): Clean this up.
235		n = walkExpr(n, init)
236	} else {
237		n = walkStmt(n)
238	}
239	init.Append(n)
240}
241
242// The max number of defers in a function using open-coded defers. We enforce this
243// limit because the deferBits bitmask is currently a single byte (to minimize code size)
244const maxOpenDefers = 8
245
246// backingArrayPtrLen extracts the pointer and length from a slice or string.
247// This constructs two nodes referring to n, so n must be a cheapExpr.
248func backingArrayPtrLen(n ir.Node) (ptr, length ir.Node) {
249	var init ir.Nodes
250	c := cheapExpr(n, &init)
251	if c != n || len(init) != 0 {
252		base.Fatalf("backingArrayPtrLen not cheap: %v", n)
253	}
254	ptr = ir.NewUnaryExpr(base.Pos, ir.OSPTR, n)
255	if n.Type().IsString() {
256		ptr.SetType(types.Types[types.TUINT8].PtrTo())
257	} else {
258		ptr.SetType(n.Type().Elem().PtrTo())
259	}
260	ptr.SetTypecheck(1)
261	length = ir.NewUnaryExpr(base.Pos, ir.OLEN, n)
262	length.SetType(types.Types[types.TINT])
263	length.SetTypecheck(1)
264	return ptr, length
265}
266
267// mayCall reports whether evaluating expression n may require
268// function calls, which could clobber function call arguments/results
269// currently on the stack.
270func mayCall(n ir.Node) bool {
271	// When instrumenting, any expression might require function calls.
272	if base.Flag.Cfg.Instrumenting {
273		return true
274	}
275
276	isSoftFloat := func(typ *types.Type) bool {
277		return types.IsFloat[typ.Kind()] || types.IsComplex[typ.Kind()]
278	}
279
280	return ir.Any(n, func(n ir.Node) bool {
281		// walk should have already moved any Init blocks off of
282		// expressions.
283		if len(n.Init()) != 0 {
284			base.FatalfAt(n.Pos(), "mayCall %+v", n)
285		}
286
287		switch n.Op() {
288		default:
289			base.FatalfAt(n.Pos(), "mayCall %+v", n)
290
291		case ir.OCALLFUNC, ir.OCALLINTER,
292			ir.OUNSAFEADD, ir.OUNSAFESLICE:
293			return true
294
295		case ir.OINDEX, ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR,
296			ir.ODEREF, ir.ODOTPTR, ir.ODOTTYPE, ir.ODYNAMICDOTTYPE, ir.ODIV, ir.OMOD,
297			ir.OSLICE2ARR, ir.OSLICE2ARRPTR:
298			// These ops might panic, make sure they are done
299			// before we start marshaling args for a call. See issue 16760.
300			return true
301
302		case ir.OANDAND, ir.OOROR:
303			n := n.(*ir.LogicalExpr)
304			// The RHS expression may have init statements that
305			// should only execute conditionally, and so cannot be
306			// pulled out to the top-level init list. We could try
307			// to be more precise here.
308			return len(n.Y.Init()) != 0
309
310		// When using soft-float, these ops might be rewritten to function calls
311		// so we ensure they are evaluated first.
312		case ir.OADD, ir.OSUB, ir.OMUL, ir.ONEG:
313			return ssagen.Arch.SoftFloat && isSoftFloat(n.Type())
314		case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
315			n := n.(*ir.BinaryExpr)
316			return ssagen.Arch.SoftFloat && isSoftFloat(n.X.Type())
317		case ir.OCONV:
318			n := n.(*ir.ConvExpr)
319			return ssagen.Arch.SoftFloat && (isSoftFloat(n.Type()) || isSoftFloat(n.X.Type()))
320
321		case ir.OMIN, ir.OMAX:
322			// string or float requires runtime call, see (*ssagen.state).minmax method.
323			return n.Type().IsString() || n.Type().IsFloat()
324
325		case ir.OLITERAL, ir.ONIL, ir.ONAME, ir.OLINKSYMOFFSET, ir.OMETHEXPR,
326			ir.OAND, ir.OANDNOT, ir.OLSH, ir.OOR, ir.ORSH, ir.OXOR, ir.OCOMPLEX, ir.OMAKEFACE,
327			ir.OADDR, ir.OBITNOT, ir.ONOT, ir.OPLUS,
328			ir.OCAP, ir.OIMAG, ir.OLEN, ir.OREAL,
329			ir.OCONVNOP, ir.ODOT,
330			ir.OCFUNC, ir.OIDATA, ir.OITAB, ir.OSPTR,
331			ir.OBYTES2STRTMP, ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP, ir.OSLICEHEADER, ir.OSTRINGHEADER:
332			// ok: operations that don't require function calls.
333			// Expand as needed.
334		}
335
336		return false
337	})
338}
339
340// itabType loads the _type field from a runtime.itab struct.
341func itabType(itab ir.Node) ir.Node {
342	if itabTypeField == nil {
343		// internal/abi.ITab's Type field
344		itabTypeField = runtimeField("Type", rttype.ITab.OffsetOf("Type"), types.NewPtr(types.Types[types.TUINT8]))
345	}
346	return boundedDotPtr(base.Pos, itab, itabTypeField)
347}
348
349var itabTypeField *types.Field
350
351// boundedDotPtr returns a selector expression representing ptr.field
352// and omits nil-pointer checks for ptr.
353func boundedDotPtr(pos src.XPos, ptr ir.Node, field *types.Field) *ir.SelectorExpr {
354	sel := ir.NewSelectorExpr(pos, ir.ODOTPTR, ptr, field.Sym)
355	sel.Selection = field
356	sel.SetType(field.Type)
357	sel.SetTypecheck(1)
358	sel.SetBounded(true) // guaranteed not to fault
359	return sel
360}
361
362func runtimeField(name string, offset int64, typ *types.Type) *types.Field {
363	f := types.NewField(src.NoXPos, ir.Pkgs.Runtime.Lookup(name), typ)
364	f.Offset = offset
365	return f
366}
367
368// ifaceData loads the data field from an interface.
369// The concrete type must be known to have type t.
370// It follows the pointer if !IsDirectIface(t).
371func ifaceData(pos src.XPos, n ir.Node, t *types.Type) ir.Node {
372	if t.IsInterface() {
373		base.Fatalf("ifaceData interface: %v", t)
374	}
375	ptr := ir.NewUnaryExpr(pos, ir.OIDATA, n)
376	if types.IsDirectIface(t) {
377		ptr.SetType(t)
378		ptr.SetTypecheck(1)
379		return ptr
380	}
381	ptr.SetType(types.NewPtr(t))
382	ptr.SetTypecheck(1)
383	ind := ir.NewStarExpr(pos, ptr)
384	ind.SetType(t)
385	ind.SetTypecheck(1)
386	ind.SetBounded(true)
387	return ind
388}
389