1// Copyright 2021 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package liveness
6
7import (
8	"fmt"
9	"internal/abi"
10
11	"cmd/compile/internal/base"
12	"cmd/compile/internal/bitvec"
13	"cmd/compile/internal/ir"
14	"cmd/compile/internal/objw"
15	"cmd/compile/internal/ssa"
16	"cmd/internal/obj"
17)
18
19// Argument liveness tracking.
20//
21// For arguments passed in registers, this file tracks if their spill slots
22// are live for runtime traceback. An argument spill slot is live at a PC
23// if we know that an actual value has stored into it at or before this point.
24//
25// Stack args are always live and not tracked in this code. Stack args are
26// laid out before register spill slots, so we emit the smallest offset that
27// needs tracking. Slots before that offset are always live. That offset is
28// usually the offset of the first spill slot. But if the first spill slot is
29// always live (e.g. if it is address-taken), it will be the offset of a later
30// one.
31//
32// The liveness information is emitted as a FUNCDATA and a PCDATA.
33//
34// FUNCDATA format:
35// - start (smallest) offset that needs tracking (1 byte)
36// - a list of bitmaps.
37//   In a bitmap bit i is set if the i-th spill slot is live.
38//
39// At a PC where the liveness info changes, a PCDATA indicates the
40// byte offset of the liveness map in the FUNCDATA. PCDATA -1 is a
41// special case indicating all slots are live (for binary size
42// saving).
43
44const allLiveIdx = -1
45
46// name and offset
47type nameOff struct {
48	n   *ir.Name
49	off int64
50}
51
52func (a nameOff) FrameOffset() int64 { return a.n.FrameOffset() + a.off }
53func (a nameOff) String() string     { return fmt.Sprintf("%v+%d", a.n, a.off) }
54
55type blockArgEffects struct {
56	livein  bitvec.BitVec // variables live at block entry
57	liveout bitvec.BitVec // variables live at block exit
58}
59
60type argLiveness struct {
61	fn   *ir.Func
62	f    *ssa.Func
63	args []nameOff         // name and offset of spill slots
64	idx  map[nameOff]int32 // index in args
65
66	be []blockArgEffects // indexed by block ID
67
68	bvset bvecSet // Set of liveness bitmaps, used for uniquifying.
69
70	// Liveness map indices at each Value (where it changes) and Block entry.
71	// During the computation the indices are temporarily index to bvset.
72	// At the end they will be index (offset) to the output funcdata (changed
73	// in (*argLiveness).emit).
74	blockIdx map[ssa.ID]int
75	valueIdx map[ssa.ID]int
76}
77
78// ArgLiveness computes the liveness information of register argument spill slots.
79// An argument's spill slot is "live" if we know it contains a meaningful value,
80// that is, we have stored the register value to it.
81// Returns the liveness map indices at each Block entry and at each Value (where
82// it changes).
83func ArgLiveness(fn *ir.Func, f *ssa.Func, pp *objw.Progs) (blockIdx, valueIdx map[ssa.ID]int) {
84	if f.OwnAux.ABIInfo().InRegistersUsed() == 0 || base.Flag.N != 0 {
85		// No register args. Nothing to emit.
86		// Or if -N is used we spill everything upfront so it is always live.
87		return nil, nil
88	}
89
90	lv := &argLiveness{
91		fn:       fn,
92		f:        f,
93		idx:      make(map[nameOff]int32),
94		be:       make([]blockArgEffects, f.NumBlocks()),
95		blockIdx: make(map[ssa.ID]int),
96		valueIdx: make(map[ssa.ID]int),
97	}
98	// Gather all register arg spill slots.
99	for _, a := range f.OwnAux.ABIInfo().InParams() {
100		n := a.Name
101		if n == nil || len(a.Registers) == 0 {
102			continue
103		}
104		_, offs := a.RegisterTypesAndOffsets()
105		for _, off := range offs {
106			if n.FrameOffset()+off > 0xff {
107				// We only print a limited number of args, with stack
108				// offsets no larger than 255.
109				continue
110			}
111			lv.args = append(lv.args, nameOff{n, off})
112		}
113	}
114	if len(lv.args) > 10 {
115		lv.args = lv.args[:10] // We print no more than 10 args.
116	}
117
118	// We spill address-taken or non-SSA-able value upfront, so they are always live.
119	alwaysLive := func(n *ir.Name) bool { return n.Addrtaken() || !ssa.CanSSA(n.Type()) }
120
121	// We'll emit the smallest offset for the slots that need liveness info.
122	// No need to include a slot with a lower offset if it is always live.
123	for len(lv.args) > 0 && alwaysLive(lv.args[0].n) {
124		lv.args = lv.args[1:]
125	}
126	if len(lv.args) == 0 {
127		return // everything is always live
128	}
129
130	for i, a := range lv.args {
131		lv.idx[a] = int32(i)
132	}
133
134	nargs := int32(len(lv.args))
135	bulk := bitvec.NewBulk(nargs, int32(len(f.Blocks)*2))
136	for _, b := range f.Blocks {
137		be := &lv.be[b.ID]
138		be.livein = bulk.Next()
139		be.liveout = bulk.Next()
140
141		// initialize to all 1s, so we can AND them
142		be.livein.Not()
143		be.liveout.Not()
144	}
145
146	entrybe := &lv.be[f.Entry.ID]
147	entrybe.livein.Clear()
148	for i, a := range lv.args {
149		if alwaysLive(a.n) {
150			entrybe.livein.Set(int32(i))
151		}
152	}
153
154	// Visit blocks in reverse-postorder, compute block effects.
155	po := f.Postorder()
156	for i := len(po) - 1; i >= 0; i-- {
157		b := po[i]
158		be := &lv.be[b.ID]
159
160		// A slot is live at block entry if it is live in all predecessors.
161		for _, pred := range b.Preds {
162			pb := pred.Block()
163			be.livein.And(be.livein, lv.be[pb.ID].liveout)
164		}
165
166		be.liveout.Copy(be.livein)
167		for _, v := range b.Values {
168			lv.valueEffect(v, be.liveout)
169		}
170	}
171
172	// Coalesce identical live vectors. Compute liveness indices at each PC
173	// where it changes.
174	live := bitvec.New(nargs)
175	addToSet := func(bv bitvec.BitVec) (int, bool) {
176		if bv.Count() == int(nargs) { // special case for all live
177			return allLiveIdx, false
178		}
179		return lv.bvset.add(bv)
180	}
181	for _, b := range lv.f.Blocks {
182		be := &lv.be[b.ID]
183		lv.blockIdx[b.ID], _ = addToSet(be.livein)
184
185		live.Copy(be.livein)
186		var lastv *ssa.Value
187		for i, v := range b.Values {
188			if lv.valueEffect(v, live) {
189				// Record that liveness changes but not emit a map now.
190				// For a sequence of StoreRegs we only need to emit one
191				// at last.
192				lastv = v
193			}
194			if lastv != nil && (mayFault(v) || i == len(b.Values)-1) {
195				// Emit the liveness map if it may fault or at the end of
196				// the block. We may need a traceback if the instruction
197				// may cause a panic.
198				var added bool
199				lv.valueIdx[lastv.ID], added = addToSet(live)
200				if added {
201					// live is added to bvset and we cannot modify it now.
202					// Make a copy.
203					t := live
204					live = bitvec.New(nargs)
205					live.Copy(t)
206				}
207				lastv = nil
208			}
209		}
210
211		// Sanity check.
212		if !live.Eq(be.liveout) {
213			panic("wrong arg liveness map at block end")
214		}
215	}
216
217	// Emit funcdata symbol, update indices to offsets in the symbol data.
218	lsym := lv.emit()
219	fn.LSym.Func().ArgLiveInfo = lsym
220
221	//lv.print()
222
223	p := pp.Prog(obj.AFUNCDATA)
224	p.From.SetConst(abi.FUNCDATA_ArgLiveInfo)
225	p.To.Type = obj.TYPE_MEM
226	p.To.Name = obj.NAME_EXTERN
227	p.To.Sym = lsym
228
229	return lv.blockIdx, lv.valueIdx
230}
231
232// valueEffect applies the effect of v to live, return whether it is changed.
233func (lv *argLiveness) valueEffect(v *ssa.Value, live bitvec.BitVec) bool {
234	if v.Op != ssa.OpStoreReg { // TODO: include other store instructions?
235		return false
236	}
237	n, off := ssa.AutoVar(v)
238	if n.Class != ir.PPARAM {
239		return false
240	}
241	i, ok := lv.idx[nameOff{n, off}]
242	if !ok || live.Get(i) {
243		return false
244	}
245	live.Set(i)
246	return true
247}
248
249func mayFault(v *ssa.Value) bool {
250	switch v.Op {
251	case ssa.OpLoadReg, ssa.OpStoreReg, ssa.OpCopy, ssa.OpPhi,
252		ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive,
253		ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult,
254		ssa.OpConvert, ssa.OpInlMark, ssa.OpGetG:
255		return false
256	}
257	if len(v.Args) == 0 {
258		return false // assume constant op cannot fault
259	}
260	return true // conservatively assume all other ops could fault
261}
262
263func (lv *argLiveness) print() {
264	fmt.Println("argument liveness:", lv.f.Name)
265	live := bitvec.New(int32(len(lv.args)))
266	for _, b := range lv.f.Blocks {
267		be := &lv.be[b.ID]
268
269		fmt.Printf("%v: live in: ", b)
270		lv.printLivenessVec(be.livein)
271		if idx, ok := lv.blockIdx[b.ID]; ok {
272			fmt.Printf("   #%d", idx)
273		}
274		fmt.Println()
275
276		for _, v := range b.Values {
277			if lv.valueEffect(v, live) {
278				fmt.Printf("  %v: ", v)
279				lv.printLivenessVec(live)
280				if idx, ok := lv.valueIdx[v.ID]; ok {
281					fmt.Printf("   #%d", idx)
282				}
283				fmt.Println()
284			}
285		}
286
287		fmt.Printf("%v: live out: ", b)
288		lv.printLivenessVec(be.liveout)
289		fmt.Println()
290	}
291	fmt.Println("liveness maps data:", lv.fn.LSym.Func().ArgLiveInfo.P)
292}
293
294func (lv *argLiveness) printLivenessVec(bv bitvec.BitVec) {
295	for i, a := range lv.args {
296		if bv.Get(int32(i)) {
297			fmt.Printf("%v ", a)
298		}
299	}
300}
301
302func (lv *argLiveness) emit() *obj.LSym {
303	livenessMaps := lv.bvset.extractUnique()
304
305	// stack offsets of register arg spill slots
306	argOffsets := make([]uint8, len(lv.args))
307	for i, a := range lv.args {
308		off := a.FrameOffset()
309		if off > 0xff {
310			panic("offset too large")
311		}
312		argOffsets[i] = uint8(off)
313	}
314
315	idx2off := make([]int, len(livenessMaps))
316
317	lsym := base.Ctxt.Lookup(lv.fn.LSym.Name + ".argliveinfo")
318	lsym.Set(obj.AttrContentAddressable, true)
319
320	off := objw.Uint8(lsym, 0, argOffsets[0]) // smallest offset that needs liveness info.
321	for idx, live := range livenessMaps {
322		idx2off[idx] = off
323		off = objw.BitVec(lsym, off, live)
324	}
325
326	// Update liveness indices to offsets.
327	for i, x := range lv.blockIdx {
328		if x != allLiveIdx {
329			lv.blockIdx[i] = idx2off[x]
330		}
331	}
332	for i, x := range lv.valueIdx {
333		if x != allLiveIdx {
334			lv.valueIdx[i] = idx2off[x]
335		}
336	}
337
338	return lsym
339}
340