1// Copyright 2016 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package mips64
6
7import (
8	"math"
9
10	"cmd/compile/internal/base"
11	"cmd/compile/internal/ir"
12	"cmd/compile/internal/logopt"
13	"cmd/compile/internal/ssa"
14	"cmd/compile/internal/ssagen"
15	"cmd/compile/internal/types"
16	"cmd/internal/obj"
17	"cmd/internal/obj/mips"
18)
19
20// isFPreg reports whether r is an FP register.
21func isFPreg(r int16) bool {
22	return mips.REG_F0 <= r && r <= mips.REG_F31
23}
24
25// isHILO reports whether r is HI or LO register.
26func isHILO(r int16) bool {
27	return r == mips.REG_HI || r == mips.REG_LO
28}
29
30// loadByType returns the load instruction of the given type.
31func loadByType(t *types.Type, r int16) obj.As {
32	if isFPreg(r) {
33		if t.Size() == 4 { // float32 or int32
34			return mips.AMOVF
35		} else { // float64 or int64
36			return mips.AMOVD
37		}
38	} else {
39		switch t.Size() {
40		case 1:
41			if t.IsSigned() {
42				return mips.AMOVB
43			} else {
44				return mips.AMOVBU
45			}
46		case 2:
47			if t.IsSigned() {
48				return mips.AMOVH
49			} else {
50				return mips.AMOVHU
51			}
52		case 4:
53			if t.IsSigned() {
54				return mips.AMOVW
55			} else {
56				return mips.AMOVWU
57			}
58		case 8:
59			return mips.AMOVV
60		}
61	}
62	panic("bad load type")
63}
64
65// storeByType returns the store instruction of the given type.
66func storeByType(t *types.Type, r int16) obj.As {
67	if isFPreg(r) {
68		if t.Size() == 4 { // float32 or int32
69			return mips.AMOVF
70		} else { // float64 or int64
71			return mips.AMOVD
72		}
73	} else {
74		switch t.Size() {
75		case 1:
76			return mips.AMOVB
77		case 2:
78			return mips.AMOVH
79		case 4:
80			return mips.AMOVW
81		case 8:
82			return mips.AMOVV
83		}
84	}
85	panic("bad store type")
86}
87
88func ssaGenValue(s *ssagen.State, v *ssa.Value) {
89	switch v.Op {
90	case ssa.OpCopy, ssa.OpMIPS64MOVVreg:
91		if v.Type.IsMemory() {
92			return
93		}
94		x := v.Args[0].Reg()
95		y := v.Reg()
96		if x == y {
97			return
98		}
99		as := mips.AMOVV
100		if isFPreg(x) && isFPreg(y) {
101			as = mips.AMOVD
102		}
103		p := s.Prog(as)
104		p.From.Type = obj.TYPE_REG
105		p.From.Reg = x
106		p.To.Type = obj.TYPE_REG
107		p.To.Reg = y
108		if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) {
109			// cannot move between special registers, use TMP as intermediate
110			p.To.Reg = mips.REGTMP
111			p = s.Prog(mips.AMOVV)
112			p.From.Type = obj.TYPE_REG
113			p.From.Reg = mips.REGTMP
114			p.To.Type = obj.TYPE_REG
115			p.To.Reg = y
116		}
117	case ssa.OpMIPS64MOVVnop:
118		// nothing to do
119	case ssa.OpLoadReg:
120		if v.Type.IsFlags() {
121			v.Fatalf("load flags not implemented: %v", v.LongString())
122			return
123		}
124		r := v.Reg()
125		p := s.Prog(loadByType(v.Type, r))
126		ssagen.AddrAuto(&p.From, v.Args[0])
127		p.To.Type = obj.TYPE_REG
128		p.To.Reg = r
129		if isHILO(r) {
130			// cannot directly load, load to TMP and move
131			p.To.Reg = mips.REGTMP
132			p = s.Prog(mips.AMOVV)
133			p.From.Type = obj.TYPE_REG
134			p.From.Reg = mips.REGTMP
135			p.To.Type = obj.TYPE_REG
136			p.To.Reg = r
137		}
138	case ssa.OpStoreReg:
139		if v.Type.IsFlags() {
140			v.Fatalf("store flags not implemented: %v", v.LongString())
141			return
142		}
143		r := v.Args[0].Reg()
144		if isHILO(r) {
145			// cannot directly store, move to TMP and store
146			p := s.Prog(mips.AMOVV)
147			p.From.Type = obj.TYPE_REG
148			p.From.Reg = r
149			p.To.Type = obj.TYPE_REG
150			p.To.Reg = mips.REGTMP
151			r = mips.REGTMP
152		}
153		p := s.Prog(storeByType(v.Type, r))
154		p.From.Type = obj.TYPE_REG
155		p.From.Reg = r
156		ssagen.AddrAuto(&p.To, v)
157	case ssa.OpMIPS64ADDV,
158		ssa.OpMIPS64SUBV,
159		ssa.OpMIPS64AND,
160		ssa.OpMIPS64OR,
161		ssa.OpMIPS64XOR,
162		ssa.OpMIPS64NOR,
163		ssa.OpMIPS64SLLV,
164		ssa.OpMIPS64SRLV,
165		ssa.OpMIPS64SRAV,
166		ssa.OpMIPS64ADDF,
167		ssa.OpMIPS64ADDD,
168		ssa.OpMIPS64SUBF,
169		ssa.OpMIPS64SUBD,
170		ssa.OpMIPS64MULF,
171		ssa.OpMIPS64MULD,
172		ssa.OpMIPS64DIVF,
173		ssa.OpMIPS64DIVD:
174		p := s.Prog(v.Op.Asm())
175		p.From.Type = obj.TYPE_REG
176		p.From.Reg = v.Args[1].Reg()
177		p.Reg = v.Args[0].Reg()
178		p.To.Type = obj.TYPE_REG
179		p.To.Reg = v.Reg()
180	case ssa.OpMIPS64SGT,
181		ssa.OpMIPS64SGTU:
182		p := s.Prog(v.Op.Asm())
183		p.From.Type = obj.TYPE_REG
184		p.From.Reg = v.Args[0].Reg()
185		p.Reg = v.Args[1].Reg()
186		p.To.Type = obj.TYPE_REG
187		p.To.Reg = v.Reg()
188	case ssa.OpMIPS64ADDVconst,
189		ssa.OpMIPS64SUBVconst,
190		ssa.OpMIPS64ANDconst,
191		ssa.OpMIPS64ORconst,
192		ssa.OpMIPS64XORconst,
193		ssa.OpMIPS64NORconst,
194		ssa.OpMIPS64SLLVconst,
195		ssa.OpMIPS64SRLVconst,
196		ssa.OpMIPS64SRAVconst,
197		ssa.OpMIPS64SGTconst,
198		ssa.OpMIPS64SGTUconst:
199		p := s.Prog(v.Op.Asm())
200		p.From.Type = obj.TYPE_CONST
201		p.From.Offset = v.AuxInt
202		p.Reg = v.Args[0].Reg()
203		p.To.Type = obj.TYPE_REG
204		p.To.Reg = v.Reg()
205	case ssa.OpMIPS64MULV,
206		ssa.OpMIPS64MULVU,
207		ssa.OpMIPS64DIVV,
208		ssa.OpMIPS64DIVVU:
209		// result in hi,lo
210		p := s.Prog(v.Op.Asm())
211		p.From.Type = obj.TYPE_REG
212		p.From.Reg = v.Args[1].Reg()
213		p.Reg = v.Args[0].Reg()
214	case ssa.OpMIPS64MOVVconst:
215		r := v.Reg()
216		p := s.Prog(v.Op.Asm())
217		p.From.Type = obj.TYPE_CONST
218		p.From.Offset = v.AuxInt
219		p.To.Type = obj.TYPE_REG
220		p.To.Reg = r
221		if isFPreg(r) || isHILO(r) {
222			// cannot move into FP or special registers, use TMP as intermediate
223			p.To.Reg = mips.REGTMP
224			p = s.Prog(mips.AMOVV)
225			p.From.Type = obj.TYPE_REG
226			p.From.Reg = mips.REGTMP
227			p.To.Type = obj.TYPE_REG
228			p.To.Reg = r
229		}
230	case ssa.OpMIPS64MOVFconst,
231		ssa.OpMIPS64MOVDconst:
232		p := s.Prog(v.Op.Asm())
233		p.From.Type = obj.TYPE_FCONST
234		p.From.Val = math.Float64frombits(uint64(v.AuxInt))
235		p.To.Type = obj.TYPE_REG
236		p.To.Reg = v.Reg()
237	case ssa.OpMIPS64CMPEQF,
238		ssa.OpMIPS64CMPEQD,
239		ssa.OpMIPS64CMPGEF,
240		ssa.OpMIPS64CMPGED,
241		ssa.OpMIPS64CMPGTF,
242		ssa.OpMIPS64CMPGTD:
243		p := s.Prog(v.Op.Asm())
244		p.From.Type = obj.TYPE_REG
245		p.From.Reg = v.Args[0].Reg()
246		p.Reg = v.Args[1].Reg()
247	case ssa.OpMIPS64MOVVaddr:
248		p := s.Prog(mips.AMOVV)
249		p.From.Type = obj.TYPE_ADDR
250		p.From.Reg = v.Args[0].Reg()
251		var wantreg string
252		// MOVV $sym+off(base), R
253		// the assembler expands it as the following:
254		// - base is SP: add constant offset to SP (R29)
255		//               when constant is large, tmp register (R23) may be used
256		// - base is SB: load external address with relocation
257		switch v.Aux.(type) {
258		default:
259			v.Fatalf("aux is of unknown type %T", v.Aux)
260		case *obj.LSym:
261			wantreg = "SB"
262			ssagen.AddAux(&p.From, v)
263		case *ir.Name:
264			wantreg = "SP"
265			ssagen.AddAux(&p.From, v)
266		case nil:
267			// No sym, just MOVV $off(SP), R
268			wantreg = "SP"
269			p.From.Offset = v.AuxInt
270		}
271		if reg := v.Args[0].RegName(); reg != wantreg {
272			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
273		}
274		p.To.Type = obj.TYPE_REG
275		p.To.Reg = v.Reg()
276	case ssa.OpMIPS64MOVBload,
277		ssa.OpMIPS64MOVBUload,
278		ssa.OpMIPS64MOVHload,
279		ssa.OpMIPS64MOVHUload,
280		ssa.OpMIPS64MOVWload,
281		ssa.OpMIPS64MOVWUload,
282		ssa.OpMIPS64MOVVload,
283		ssa.OpMIPS64MOVFload,
284		ssa.OpMIPS64MOVDload:
285		p := s.Prog(v.Op.Asm())
286		p.From.Type = obj.TYPE_MEM
287		p.From.Reg = v.Args[0].Reg()
288		ssagen.AddAux(&p.From, v)
289		p.To.Type = obj.TYPE_REG
290		p.To.Reg = v.Reg()
291	case ssa.OpMIPS64MOVBstore,
292		ssa.OpMIPS64MOVHstore,
293		ssa.OpMIPS64MOVWstore,
294		ssa.OpMIPS64MOVVstore,
295		ssa.OpMIPS64MOVFstore,
296		ssa.OpMIPS64MOVDstore:
297		p := s.Prog(v.Op.Asm())
298		p.From.Type = obj.TYPE_REG
299		p.From.Reg = v.Args[1].Reg()
300		p.To.Type = obj.TYPE_MEM
301		p.To.Reg = v.Args[0].Reg()
302		ssagen.AddAux(&p.To, v)
303	case ssa.OpMIPS64MOVBstorezero,
304		ssa.OpMIPS64MOVHstorezero,
305		ssa.OpMIPS64MOVWstorezero,
306		ssa.OpMIPS64MOVVstorezero:
307		p := s.Prog(v.Op.Asm())
308		p.From.Type = obj.TYPE_REG
309		p.From.Reg = mips.REGZERO
310		p.To.Type = obj.TYPE_MEM
311		p.To.Reg = v.Args[0].Reg()
312		ssagen.AddAux(&p.To, v)
313	case ssa.OpMIPS64MOVBreg,
314		ssa.OpMIPS64MOVBUreg,
315		ssa.OpMIPS64MOVHreg,
316		ssa.OpMIPS64MOVHUreg,
317		ssa.OpMIPS64MOVWreg,
318		ssa.OpMIPS64MOVWUreg:
319		a := v.Args[0]
320		for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
321			a = a.Args[0]
322		}
323		if a.Op == ssa.OpLoadReg && mips.REG_R0 <= a.Reg() && a.Reg() <= mips.REG_R31 {
324			// LoadReg from a narrower type does an extension, except loading
325			// to a floating point register. So only eliminate the extension
326			// if it is loaded to an integer register.
327			t := a.Type
328			switch {
329			case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
330				v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
331				v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
332				v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
333				v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
334				v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
335				// arg is a proper-typed load, already zero/sign-extended, don't extend again
336				if v.Reg() == v.Args[0].Reg() {
337					return
338				}
339				p := s.Prog(mips.AMOVV)
340				p.From.Type = obj.TYPE_REG
341				p.From.Reg = v.Args[0].Reg()
342				p.To.Type = obj.TYPE_REG
343				p.To.Reg = v.Reg()
344				return
345			default:
346			}
347		}
348		fallthrough
349	case ssa.OpMIPS64MOVWF,
350		ssa.OpMIPS64MOVWD,
351		ssa.OpMIPS64TRUNCFW,
352		ssa.OpMIPS64TRUNCDW,
353		ssa.OpMIPS64MOVVF,
354		ssa.OpMIPS64MOVVD,
355		ssa.OpMIPS64TRUNCFV,
356		ssa.OpMIPS64TRUNCDV,
357		ssa.OpMIPS64MOVFD,
358		ssa.OpMIPS64MOVDF,
359		ssa.OpMIPS64MOVWfpgp,
360		ssa.OpMIPS64MOVWgpfp,
361		ssa.OpMIPS64MOVVfpgp,
362		ssa.OpMIPS64MOVVgpfp,
363		ssa.OpMIPS64NEGF,
364		ssa.OpMIPS64NEGD,
365		ssa.OpMIPS64ABSD,
366		ssa.OpMIPS64SQRTF,
367		ssa.OpMIPS64SQRTD:
368		p := s.Prog(v.Op.Asm())
369		p.From.Type = obj.TYPE_REG
370		p.From.Reg = v.Args[0].Reg()
371		p.To.Type = obj.TYPE_REG
372		p.To.Reg = v.Reg()
373	case ssa.OpMIPS64NEGV:
374		// SUB from REGZERO
375		p := s.Prog(mips.ASUBVU)
376		p.From.Type = obj.TYPE_REG
377		p.From.Reg = v.Args[0].Reg()
378		p.Reg = mips.REGZERO
379		p.To.Type = obj.TYPE_REG
380		p.To.Reg = v.Reg()
381	case ssa.OpMIPS64DUFFZERO:
382		// runtime.duffzero expects start address - 8 in R1
383		p := s.Prog(mips.ASUBVU)
384		p.From.Type = obj.TYPE_CONST
385		p.From.Offset = 8
386		p.Reg = v.Args[0].Reg()
387		p.To.Type = obj.TYPE_REG
388		p.To.Reg = mips.REG_R1
389		p = s.Prog(obj.ADUFFZERO)
390		p.To.Type = obj.TYPE_MEM
391		p.To.Name = obj.NAME_EXTERN
392		p.To.Sym = ir.Syms.Duffzero
393		p.To.Offset = v.AuxInt
394	case ssa.OpMIPS64LoweredZero:
395		// SUBV	$8, R1
396		// MOVV	R0, 8(R1)
397		// ADDV	$8, R1
398		// BNE	Rarg1, R1, -2(PC)
399		// arg1 is the address of the last element to zero
400		var sz int64
401		var mov obj.As
402		switch {
403		case v.AuxInt%8 == 0:
404			sz = 8
405			mov = mips.AMOVV
406		case v.AuxInt%4 == 0:
407			sz = 4
408			mov = mips.AMOVW
409		case v.AuxInt%2 == 0:
410			sz = 2
411			mov = mips.AMOVH
412		default:
413			sz = 1
414			mov = mips.AMOVB
415		}
416		p := s.Prog(mips.ASUBVU)
417		p.From.Type = obj.TYPE_CONST
418		p.From.Offset = sz
419		p.To.Type = obj.TYPE_REG
420		p.To.Reg = mips.REG_R1
421		p2 := s.Prog(mov)
422		p2.From.Type = obj.TYPE_REG
423		p2.From.Reg = mips.REGZERO
424		p2.To.Type = obj.TYPE_MEM
425		p2.To.Reg = mips.REG_R1
426		p2.To.Offset = sz
427		p3 := s.Prog(mips.AADDVU)
428		p3.From.Type = obj.TYPE_CONST
429		p3.From.Offset = sz
430		p3.To.Type = obj.TYPE_REG
431		p3.To.Reg = mips.REG_R1
432		p4 := s.Prog(mips.ABNE)
433		p4.From.Type = obj.TYPE_REG
434		p4.From.Reg = v.Args[1].Reg()
435		p4.Reg = mips.REG_R1
436		p4.To.Type = obj.TYPE_BRANCH
437		p4.To.SetTarget(p2)
438	case ssa.OpMIPS64DUFFCOPY:
439		p := s.Prog(obj.ADUFFCOPY)
440		p.To.Type = obj.TYPE_MEM
441		p.To.Name = obj.NAME_EXTERN
442		p.To.Sym = ir.Syms.Duffcopy
443		p.To.Offset = v.AuxInt
444	case ssa.OpMIPS64LoweredMove:
445		// SUBV	$8, R1
446		// MOVV	8(R1), Rtmp
447		// MOVV	Rtmp, (R2)
448		// ADDV	$8, R1
449		// ADDV	$8, R2
450		// BNE	Rarg2, R1, -4(PC)
451		// arg2 is the address of the last element of src
452		var sz int64
453		var mov obj.As
454		switch {
455		case v.AuxInt%8 == 0:
456			sz = 8
457			mov = mips.AMOVV
458		case v.AuxInt%4 == 0:
459			sz = 4
460			mov = mips.AMOVW
461		case v.AuxInt%2 == 0:
462			sz = 2
463			mov = mips.AMOVH
464		default:
465			sz = 1
466			mov = mips.AMOVB
467		}
468		p := s.Prog(mips.ASUBVU)
469		p.From.Type = obj.TYPE_CONST
470		p.From.Offset = sz
471		p.To.Type = obj.TYPE_REG
472		p.To.Reg = mips.REG_R1
473		p2 := s.Prog(mov)
474		p2.From.Type = obj.TYPE_MEM
475		p2.From.Reg = mips.REG_R1
476		p2.From.Offset = sz
477		p2.To.Type = obj.TYPE_REG
478		p2.To.Reg = mips.REGTMP
479		p3 := s.Prog(mov)
480		p3.From.Type = obj.TYPE_REG
481		p3.From.Reg = mips.REGTMP
482		p3.To.Type = obj.TYPE_MEM
483		p3.To.Reg = mips.REG_R2
484		p4 := s.Prog(mips.AADDVU)
485		p4.From.Type = obj.TYPE_CONST
486		p4.From.Offset = sz
487		p4.To.Type = obj.TYPE_REG
488		p4.To.Reg = mips.REG_R1
489		p5 := s.Prog(mips.AADDVU)
490		p5.From.Type = obj.TYPE_CONST
491		p5.From.Offset = sz
492		p5.To.Type = obj.TYPE_REG
493		p5.To.Reg = mips.REG_R2
494		p6 := s.Prog(mips.ABNE)
495		p6.From.Type = obj.TYPE_REG
496		p6.From.Reg = v.Args[2].Reg()
497		p6.Reg = mips.REG_R1
498		p6.To.Type = obj.TYPE_BRANCH
499		p6.To.SetTarget(p2)
500	case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter:
501		s.Call(v)
502	case ssa.OpMIPS64CALLtail:
503		s.TailCall(v)
504	case ssa.OpMIPS64LoweredWB:
505		p := s.Prog(obj.ACALL)
506		p.To.Type = obj.TYPE_MEM
507		p.To.Name = obj.NAME_EXTERN
508		// AuxInt encodes how many buffer entries we need.
509		p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
510	case ssa.OpMIPS64LoweredPanicBoundsA, ssa.OpMIPS64LoweredPanicBoundsB, ssa.OpMIPS64LoweredPanicBoundsC:
511		p := s.Prog(obj.ACALL)
512		p.To.Type = obj.TYPE_MEM
513		p.To.Name = obj.NAME_EXTERN
514		p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
515		s.UseArgs(16) // space used in callee args area by assembly stubs
516	case ssa.OpMIPS64LoweredAtomicLoad8, ssa.OpMIPS64LoweredAtomicLoad32, ssa.OpMIPS64LoweredAtomicLoad64:
517		as := mips.AMOVV
518		switch v.Op {
519		case ssa.OpMIPS64LoweredAtomicLoad8:
520			as = mips.AMOVB
521		case ssa.OpMIPS64LoweredAtomicLoad32:
522			as = mips.AMOVW
523		}
524		s.Prog(mips.ASYNC)
525		p := s.Prog(as)
526		p.From.Type = obj.TYPE_MEM
527		p.From.Reg = v.Args[0].Reg()
528		p.To.Type = obj.TYPE_REG
529		p.To.Reg = v.Reg0()
530		s.Prog(mips.ASYNC)
531	case ssa.OpMIPS64LoweredAtomicStore8, ssa.OpMIPS64LoweredAtomicStore32, ssa.OpMIPS64LoweredAtomicStore64:
532		as := mips.AMOVV
533		switch v.Op {
534		case ssa.OpMIPS64LoweredAtomicStore8:
535			as = mips.AMOVB
536		case ssa.OpMIPS64LoweredAtomicStore32:
537			as = mips.AMOVW
538		}
539		s.Prog(mips.ASYNC)
540		p := s.Prog(as)
541		p.From.Type = obj.TYPE_REG
542		p.From.Reg = v.Args[1].Reg()
543		p.To.Type = obj.TYPE_MEM
544		p.To.Reg = v.Args[0].Reg()
545		s.Prog(mips.ASYNC)
546	case ssa.OpMIPS64LoweredAtomicStorezero32, ssa.OpMIPS64LoweredAtomicStorezero64:
547		as := mips.AMOVV
548		if v.Op == ssa.OpMIPS64LoweredAtomicStorezero32 {
549			as = mips.AMOVW
550		}
551		s.Prog(mips.ASYNC)
552		p := s.Prog(as)
553		p.From.Type = obj.TYPE_REG
554		p.From.Reg = mips.REGZERO
555		p.To.Type = obj.TYPE_MEM
556		p.To.Reg = v.Args[0].Reg()
557		s.Prog(mips.ASYNC)
558	case ssa.OpMIPS64LoweredAtomicExchange32, ssa.OpMIPS64LoweredAtomicExchange64:
559		// SYNC
560		// MOVV	Rarg1, Rtmp
561		// LL	(Rarg0), Rout
562		// SC	Rtmp, (Rarg0)
563		// BEQ	Rtmp, -3(PC)
564		// SYNC
565		ll := mips.ALLV
566		sc := mips.ASCV
567		if v.Op == ssa.OpMIPS64LoweredAtomicExchange32 {
568			ll = mips.ALL
569			sc = mips.ASC
570		}
571		s.Prog(mips.ASYNC)
572		p := s.Prog(mips.AMOVV)
573		p.From.Type = obj.TYPE_REG
574		p.From.Reg = v.Args[1].Reg()
575		p.To.Type = obj.TYPE_REG
576		p.To.Reg = mips.REGTMP
577		p1 := s.Prog(ll)
578		p1.From.Type = obj.TYPE_MEM
579		p1.From.Reg = v.Args[0].Reg()
580		p1.To.Type = obj.TYPE_REG
581		p1.To.Reg = v.Reg0()
582		p2 := s.Prog(sc)
583		p2.From.Type = obj.TYPE_REG
584		p2.From.Reg = mips.REGTMP
585		p2.To.Type = obj.TYPE_MEM
586		p2.To.Reg = v.Args[0].Reg()
587		p3 := s.Prog(mips.ABEQ)
588		p3.From.Type = obj.TYPE_REG
589		p3.From.Reg = mips.REGTMP
590		p3.To.Type = obj.TYPE_BRANCH
591		p3.To.SetTarget(p)
592		s.Prog(mips.ASYNC)
593	case ssa.OpMIPS64LoweredAtomicAdd32, ssa.OpMIPS64LoweredAtomicAdd64:
594		// SYNC
595		// LL	(Rarg0), Rout
596		// ADDV Rarg1, Rout, Rtmp
597		// SC	Rtmp, (Rarg0)
598		// BEQ	Rtmp, -3(PC)
599		// SYNC
600		// ADDV Rarg1, Rout
601		ll := mips.ALLV
602		sc := mips.ASCV
603		if v.Op == ssa.OpMIPS64LoweredAtomicAdd32 {
604			ll = mips.ALL
605			sc = mips.ASC
606		}
607		s.Prog(mips.ASYNC)
608		p := s.Prog(ll)
609		p.From.Type = obj.TYPE_MEM
610		p.From.Reg = v.Args[0].Reg()
611		p.To.Type = obj.TYPE_REG
612		p.To.Reg = v.Reg0()
613		p1 := s.Prog(mips.AADDVU)
614		p1.From.Type = obj.TYPE_REG
615		p1.From.Reg = v.Args[1].Reg()
616		p1.Reg = v.Reg0()
617		p1.To.Type = obj.TYPE_REG
618		p1.To.Reg = mips.REGTMP
619		p2 := s.Prog(sc)
620		p2.From.Type = obj.TYPE_REG
621		p2.From.Reg = mips.REGTMP
622		p2.To.Type = obj.TYPE_MEM
623		p2.To.Reg = v.Args[0].Reg()
624		p3 := s.Prog(mips.ABEQ)
625		p3.From.Type = obj.TYPE_REG
626		p3.From.Reg = mips.REGTMP
627		p3.To.Type = obj.TYPE_BRANCH
628		p3.To.SetTarget(p)
629		s.Prog(mips.ASYNC)
630		p4 := s.Prog(mips.AADDVU)
631		p4.From.Type = obj.TYPE_REG
632		p4.From.Reg = v.Args[1].Reg()
633		p4.Reg = v.Reg0()
634		p4.To.Type = obj.TYPE_REG
635		p4.To.Reg = v.Reg0()
636	case ssa.OpMIPS64LoweredAtomicAddconst32, ssa.OpMIPS64LoweredAtomicAddconst64:
637		// SYNC
638		// LL	(Rarg0), Rout
639		// ADDV $auxint, Rout, Rtmp
640		// SC	Rtmp, (Rarg0)
641		// BEQ	Rtmp, -3(PC)
642		// SYNC
643		// ADDV $auxint, Rout
644		ll := mips.ALLV
645		sc := mips.ASCV
646		if v.Op == ssa.OpMIPS64LoweredAtomicAddconst32 {
647			ll = mips.ALL
648			sc = mips.ASC
649		}
650		s.Prog(mips.ASYNC)
651		p := s.Prog(ll)
652		p.From.Type = obj.TYPE_MEM
653		p.From.Reg = v.Args[0].Reg()
654		p.To.Type = obj.TYPE_REG
655		p.To.Reg = v.Reg0()
656		p1 := s.Prog(mips.AADDVU)
657		p1.From.Type = obj.TYPE_CONST
658		p1.From.Offset = v.AuxInt
659		p1.Reg = v.Reg0()
660		p1.To.Type = obj.TYPE_REG
661		p1.To.Reg = mips.REGTMP
662		p2 := s.Prog(sc)
663		p2.From.Type = obj.TYPE_REG
664		p2.From.Reg = mips.REGTMP
665		p2.To.Type = obj.TYPE_MEM
666		p2.To.Reg = v.Args[0].Reg()
667		p3 := s.Prog(mips.ABEQ)
668		p3.From.Type = obj.TYPE_REG
669		p3.From.Reg = mips.REGTMP
670		p3.To.Type = obj.TYPE_BRANCH
671		p3.To.SetTarget(p)
672		s.Prog(mips.ASYNC)
673		p4 := s.Prog(mips.AADDVU)
674		p4.From.Type = obj.TYPE_CONST
675		p4.From.Offset = v.AuxInt
676		p4.Reg = v.Reg0()
677		p4.To.Type = obj.TYPE_REG
678		p4.To.Reg = v.Reg0()
679	case ssa.OpMIPS64LoweredAtomicAnd32,
680		ssa.OpMIPS64LoweredAtomicOr32:
681		// SYNC
682		// LL	(Rarg0), Rtmp
683		// AND/OR	Rarg1, Rtmp
684		// SC	Rtmp, (Rarg0)
685		// BEQ	Rtmp, -3(PC)
686		// SYNC
687		s.Prog(mips.ASYNC)
688
689		p := s.Prog(mips.ALL)
690		p.From.Type = obj.TYPE_MEM
691		p.From.Reg = v.Args[0].Reg()
692		p.To.Type = obj.TYPE_REG
693		p.To.Reg = mips.REGTMP
694
695		p1 := s.Prog(v.Op.Asm())
696		p1.From.Type = obj.TYPE_REG
697		p1.From.Reg = v.Args[1].Reg()
698		p1.Reg = mips.REGTMP
699		p1.To.Type = obj.TYPE_REG
700		p1.To.Reg = mips.REGTMP
701
702		p2 := s.Prog(mips.ASC)
703		p2.From.Type = obj.TYPE_REG
704		p2.From.Reg = mips.REGTMP
705		p2.To.Type = obj.TYPE_MEM
706		p2.To.Reg = v.Args[0].Reg()
707
708		p3 := s.Prog(mips.ABEQ)
709		p3.From.Type = obj.TYPE_REG
710		p3.From.Reg = mips.REGTMP
711		p3.To.Type = obj.TYPE_BRANCH
712		p3.To.SetTarget(p)
713
714		s.Prog(mips.ASYNC)
715
716	case ssa.OpMIPS64LoweredAtomicCas32, ssa.OpMIPS64LoweredAtomicCas64:
717		// MOVV $0, Rout
718		// SYNC
719		// LL	(Rarg0), Rtmp
720		// BNE	Rtmp, Rarg1, 4(PC)
721		// MOVV Rarg2, Rout
722		// SC	Rout, (Rarg0)
723		// BEQ	Rout, -4(PC)
724		// SYNC
725		ll := mips.ALLV
726		sc := mips.ASCV
727		if v.Op == ssa.OpMIPS64LoweredAtomicCas32 {
728			ll = mips.ALL
729			sc = mips.ASC
730		}
731		p := s.Prog(mips.AMOVV)
732		p.From.Type = obj.TYPE_REG
733		p.From.Reg = mips.REGZERO
734		p.To.Type = obj.TYPE_REG
735		p.To.Reg = v.Reg0()
736		s.Prog(mips.ASYNC)
737		p1 := s.Prog(ll)
738		p1.From.Type = obj.TYPE_MEM
739		p1.From.Reg = v.Args[0].Reg()
740		p1.To.Type = obj.TYPE_REG
741		p1.To.Reg = mips.REGTMP
742		p2 := s.Prog(mips.ABNE)
743		p2.From.Type = obj.TYPE_REG
744		p2.From.Reg = v.Args[1].Reg()
745		p2.Reg = mips.REGTMP
746		p2.To.Type = obj.TYPE_BRANCH
747		p3 := s.Prog(mips.AMOVV)
748		p3.From.Type = obj.TYPE_REG
749		p3.From.Reg = v.Args[2].Reg()
750		p3.To.Type = obj.TYPE_REG
751		p3.To.Reg = v.Reg0()
752		p4 := s.Prog(sc)
753		p4.From.Type = obj.TYPE_REG
754		p4.From.Reg = v.Reg0()
755		p4.To.Type = obj.TYPE_MEM
756		p4.To.Reg = v.Args[0].Reg()
757		p5 := s.Prog(mips.ABEQ)
758		p5.From.Type = obj.TYPE_REG
759		p5.From.Reg = v.Reg0()
760		p5.To.Type = obj.TYPE_BRANCH
761		p5.To.SetTarget(p1)
762		p6 := s.Prog(mips.ASYNC)
763		p2.To.SetTarget(p6)
764	case ssa.OpMIPS64LoweredNilCheck:
765		// Issue a load which will fault if arg is nil.
766		p := s.Prog(mips.AMOVB)
767		p.From.Type = obj.TYPE_MEM
768		p.From.Reg = v.Args[0].Reg()
769		ssagen.AddAux(&p.From, v)
770		p.To.Type = obj.TYPE_REG
771		p.To.Reg = mips.REGTMP
772		if logopt.Enabled() {
773			logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
774		}
775		if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
776			base.WarnfAt(v.Pos, "generated nil check")
777		}
778	case ssa.OpMIPS64FPFlagTrue,
779		ssa.OpMIPS64FPFlagFalse:
780		// MOVV	$0, r
781		// BFPF	2(PC)
782		// MOVV	$1, r
783		branch := mips.ABFPF
784		if v.Op == ssa.OpMIPS64FPFlagFalse {
785			branch = mips.ABFPT
786		}
787		p := s.Prog(mips.AMOVV)
788		p.From.Type = obj.TYPE_REG
789		p.From.Reg = mips.REGZERO
790		p.To.Type = obj.TYPE_REG
791		p.To.Reg = v.Reg()
792		p2 := s.Prog(branch)
793		p2.To.Type = obj.TYPE_BRANCH
794		p3 := s.Prog(mips.AMOVV)
795		p3.From.Type = obj.TYPE_CONST
796		p3.From.Offset = 1
797		p3.To.Type = obj.TYPE_REG
798		p3.To.Reg = v.Reg()
799		p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land
800		p2.To.SetTarget(p4)
801	case ssa.OpMIPS64LoweredGetClosurePtr:
802		// Closure pointer is R22 (mips.REGCTXT).
803		ssagen.CheckLoweredGetClosurePtr(v)
804	case ssa.OpMIPS64LoweredGetCallerSP:
805		// caller's SP is FixedFrameSize below the address of the first arg
806		p := s.Prog(mips.AMOVV)
807		p.From.Type = obj.TYPE_ADDR
808		p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
809		p.From.Name = obj.NAME_PARAM
810		p.To.Type = obj.TYPE_REG
811		p.To.Reg = v.Reg()
812	case ssa.OpMIPS64LoweredGetCallerPC:
813		p := s.Prog(obj.AGETCALLERPC)
814		p.To.Type = obj.TYPE_REG
815		p.To.Reg = v.Reg()
816	case ssa.OpClobber, ssa.OpClobberReg:
817		// TODO: implement for clobberdead experiment. Nop is ok for now.
818	default:
819		v.Fatalf("genValue not implemented: %s", v.LongString())
820	}
821}
822
823var blockJump = map[ssa.BlockKind]struct {
824	asm, invasm obj.As
825}{
826	ssa.BlockMIPS64EQ:  {mips.ABEQ, mips.ABNE},
827	ssa.BlockMIPS64NE:  {mips.ABNE, mips.ABEQ},
828	ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ},
829	ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ},
830	ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ},
831	ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ},
832	ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF},
833	ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT},
834}
835
836func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
837	switch b.Kind {
838	case ssa.BlockPlain:
839		if b.Succs[0].Block() != next {
840			p := s.Prog(obj.AJMP)
841			p.To.Type = obj.TYPE_BRANCH
842			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
843		}
844	case ssa.BlockDefer:
845		// defer returns in R1:
846		// 0 if we should continue executing
847		// 1 if we should jump to deferreturn call
848		p := s.Prog(mips.ABNE)
849		p.From.Type = obj.TYPE_REG
850		p.From.Reg = mips.REGZERO
851		p.Reg = mips.REG_R1
852		p.To.Type = obj.TYPE_BRANCH
853		s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()})
854		if b.Succs[0].Block() != next {
855			p := s.Prog(obj.AJMP)
856			p.To.Type = obj.TYPE_BRANCH
857			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
858		}
859	case ssa.BlockExit, ssa.BlockRetJmp:
860	case ssa.BlockRet:
861		s.Prog(obj.ARET)
862	case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
863		ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
864		ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
865		ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF:
866		jmp := blockJump[b.Kind]
867		var p *obj.Prog
868		switch next {
869		case b.Succs[0].Block():
870			p = s.Br(jmp.invasm, b.Succs[1].Block())
871		case b.Succs[1].Block():
872			p = s.Br(jmp.asm, b.Succs[0].Block())
873		default:
874			if b.Likely != ssa.BranchUnlikely {
875				p = s.Br(jmp.asm, b.Succs[0].Block())
876				s.Br(obj.AJMP, b.Succs[1].Block())
877			} else {
878				p = s.Br(jmp.invasm, b.Succs[1].Block())
879				s.Br(obj.AJMP, b.Succs[0].Block())
880			}
881		}
882		if !b.Controls[0].Type.IsFlags() {
883			p.From.Type = obj.TYPE_REG
884			p.From.Reg = b.Controls[0].Reg()
885		}
886	default:
887		b.Fatalf("branch not implemented: %s", b.LongString())
888	}
889}
890