Source file src/cmd/compile/internal/mips64/ssa.go

     1  // Copyright 2016 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package mips64
     6  
     7  import (
     8  	"math"
     9  
    10  	"cmd/compile/internal/base"
    11  	"cmd/compile/internal/ir"
    12  	"cmd/compile/internal/logopt"
    13  	"cmd/compile/internal/ssa"
    14  	"cmd/compile/internal/ssagen"
    15  	"cmd/compile/internal/types"
    16  	"cmd/internal/obj"
    17  	"cmd/internal/obj/mips"
    18  	"internal/abi"
    19  )
    20  
    21  // isFPreg reports whether r is an FP register.
    22  func isFPreg(r int16) bool {
    23  	return mips.REG_F0 <= r && r <= mips.REG_F31
    24  }
    25  
    26  // isHILO reports whether r is HI or LO register.
    27  func isHILO(r int16) bool {
    28  	return r == mips.REG_HI || r == mips.REG_LO
    29  }
    30  
    31  // loadByType returns the load instruction of the given type.
    32  func loadByType(t *types.Type, r int16) obj.As {
    33  	if isFPreg(r) {
    34  		if t.Size() == 4 { // float32 or int32
    35  			return mips.AMOVF
    36  		} else { // float64 or int64
    37  			return mips.AMOVD
    38  		}
    39  	} else {
    40  		switch t.Size() {
    41  		case 1:
    42  			if t.IsSigned() {
    43  				return mips.AMOVB
    44  			} else {
    45  				return mips.AMOVBU
    46  			}
    47  		case 2:
    48  			if t.IsSigned() {
    49  				return mips.AMOVH
    50  			} else {
    51  				return mips.AMOVHU
    52  			}
    53  		case 4:
    54  			if t.IsSigned() {
    55  				return mips.AMOVW
    56  			} else {
    57  				return mips.AMOVWU
    58  			}
    59  		case 8:
    60  			return mips.AMOVV
    61  		}
    62  	}
    63  	panic("bad load type")
    64  }
    65  
    66  // storeByType returns the store instruction of the given type.
    67  func storeByType(t *types.Type, r int16) obj.As {
    68  	if isFPreg(r) {
    69  		if t.Size() == 4 { // float32 or int32
    70  			return mips.AMOVF
    71  		} else { // float64 or int64
    72  			return mips.AMOVD
    73  		}
    74  	} else {
    75  		switch t.Size() {
    76  		case 1:
    77  			return mips.AMOVB
    78  		case 2:
    79  			return mips.AMOVH
    80  		case 4:
    81  			return mips.AMOVW
    82  		case 8:
    83  			return mips.AMOVV
    84  		}
    85  	}
    86  	panic("bad store type")
    87  }
    88  
    89  func ssaGenValue(s *ssagen.State, v *ssa.Value) {
    90  	switch v.Op {
    91  	case ssa.OpCopy, ssa.OpMIPS64MOVVreg:
    92  		if v.Type.IsMemory() {
    93  			return
    94  		}
    95  		x := v.Args[0].Reg()
    96  		y := v.Reg()
    97  		if x == y {
    98  			return
    99  		}
   100  		as := mips.AMOVV
   101  		if isFPreg(x) && isFPreg(y) {
   102  			as = mips.AMOVD
   103  		}
   104  		p := s.Prog(as)
   105  		p.From.Type = obj.TYPE_REG
   106  		p.From.Reg = x
   107  		p.To.Type = obj.TYPE_REG
   108  		p.To.Reg = y
   109  		if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) {
   110  			// cannot move between special registers, use TMP as intermediate
   111  			p.To.Reg = mips.REGTMP
   112  			p = s.Prog(mips.AMOVV)
   113  			p.From.Type = obj.TYPE_REG
   114  			p.From.Reg = mips.REGTMP
   115  			p.To.Type = obj.TYPE_REG
   116  			p.To.Reg = y
   117  		}
   118  	case ssa.OpMIPS64MOVVnop, ssa.OpMIPS64ZERO:
   119  		// nothing to do
   120  	case ssa.OpLoadReg:
   121  		if v.Type.IsFlags() {
   122  			v.Fatalf("load flags not implemented: %v", v.LongString())
   123  			return
   124  		}
   125  		r := v.Reg()
   126  		p := s.Prog(loadByType(v.Type, r))
   127  		ssagen.AddrAuto(&p.From, v.Args[0])
   128  		p.To.Type = obj.TYPE_REG
   129  		p.To.Reg = r
   130  		if isHILO(r) {
   131  			// cannot directly load, load to TMP and move
   132  			p.To.Reg = mips.REGTMP
   133  			p = s.Prog(mips.AMOVV)
   134  			p.From.Type = obj.TYPE_REG
   135  			p.From.Reg = mips.REGTMP
   136  			p.To.Type = obj.TYPE_REG
   137  			p.To.Reg = r
   138  		}
   139  	case ssa.OpStoreReg:
   140  		if v.Type.IsFlags() {
   141  			v.Fatalf("store flags not implemented: %v", v.LongString())
   142  			return
   143  		}
   144  		r := v.Args[0].Reg()
   145  		if isHILO(r) {
   146  			// cannot directly store, move to TMP and store
   147  			p := s.Prog(mips.AMOVV)
   148  			p.From.Type = obj.TYPE_REG
   149  			p.From.Reg = r
   150  			p.To.Type = obj.TYPE_REG
   151  			p.To.Reg = mips.REGTMP
   152  			r = mips.REGTMP
   153  		}
   154  		p := s.Prog(storeByType(v.Type, r))
   155  		p.From.Type = obj.TYPE_REG
   156  		p.From.Reg = r
   157  		ssagen.AddrAuto(&p.To, v)
   158  	case ssa.OpMIPS64ADDV,
   159  		ssa.OpMIPS64SUBV,
   160  		ssa.OpMIPS64AND,
   161  		ssa.OpMIPS64OR,
   162  		ssa.OpMIPS64XOR,
   163  		ssa.OpMIPS64NOR,
   164  		ssa.OpMIPS64SLLV,
   165  		ssa.OpMIPS64SRLV,
   166  		ssa.OpMIPS64SRAV,
   167  		ssa.OpMIPS64ADDF,
   168  		ssa.OpMIPS64ADDD,
   169  		ssa.OpMIPS64SUBF,
   170  		ssa.OpMIPS64SUBD,
   171  		ssa.OpMIPS64MULF,
   172  		ssa.OpMIPS64MULD,
   173  		ssa.OpMIPS64DIVF,
   174  		ssa.OpMIPS64DIVD:
   175  		p := s.Prog(v.Op.Asm())
   176  		p.From.Type = obj.TYPE_REG
   177  		p.From.Reg = v.Args[1].Reg()
   178  		p.Reg = v.Args[0].Reg()
   179  		p.To.Type = obj.TYPE_REG
   180  		p.To.Reg = v.Reg()
   181  	case ssa.OpMIPS64SGT,
   182  		ssa.OpMIPS64SGTU:
   183  		p := s.Prog(v.Op.Asm())
   184  		p.From.Type = obj.TYPE_REG
   185  		p.From.Reg = v.Args[0].Reg()
   186  		p.Reg = v.Args[1].Reg()
   187  		p.To.Type = obj.TYPE_REG
   188  		p.To.Reg = v.Reg()
   189  	case ssa.OpMIPS64ADDVconst,
   190  		ssa.OpMIPS64SUBVconst,
   191  		ssa.OpMIPS64ANDconst,
   192  		ssa.OpMIPS64ORconst,
   193  		ssa.OpMIPS64XORconst,
   194  		ssa.OpMIPS64NORconst,
   195  		ssa.OpMIPS64SLLVconst,
   196  		ssa.OpMIPS64SRLVconst,
   197  		ssa.OpMIPS64SRAVconst,
   198  		ssa.OpMIPS64SGTconst,
   199  		ssa.OpMIPS64SGTUconst:
   200  		p := s.Prog(v.Op.Asm())
   201  		p.From.Type = obj.TYPE_CONST
   202  		p.From.Offset = v.AuxInt
   203  		p.Reg = v.Args[0].Reg()
   204  		p.To.Type = obj.TYPE_REG
   205  		p.To.Reg = v.Reg()
   206  	case ssa.OpMIPS64MULV,
   207  		ssa.OpMIPS64MULVU,
   208  		ssa.OpMIPS64DIVV,
   209  		ssa.OpMIPS64DIVVU:
   210  		// result in hi,lo
   211  		p := s.Prog(v.Op.Asm())
   212  		p.From.Type = obj.TYPE_REG
   213  		p.From.Reg = v.Args[1].Reg()
   214  		p.Reg = v.Args[0].Reg()
   215  	case ssa.OpMIPS64MOVVconst:
   216  		r := v.Reg()
   217  		p := s.Prog(v.Op.Asm())
   218  		p.From.Type = obj.TYPE_CONST
   219  		p.From.Offset = v.AuxInt
   220  		p.To.Type = obj.TYPE_REG
   221  		p.To.Reg = r
   222  		if isFPreg(r) || isHILO(r) {
   223  			// cannot move into FP or special registers, use TMP as intermediate
   224  			p.To.Reg = mips.REGTMP
   225  			p = s.Prog(mips.AMOVV)
   226  			p.From.Type = obj.TYPE_REG
   227  			p.From.Reg = mips.REGTMP
   228  			p.To.Type = obj.TYPE_REG
   229  			p.To.Reg = r
   230  		}
   231  	case ssa.OpMIPS64MOVFconst,
   232  		ssa.OpMIPS64MOVDconst:
   233  		p := s.Prog(v.Op.Asm())
   234  		p.From.Type = obj.TYPE_FCONST
   235  		p.From.Val = math.Float64frombits(uint64(v.AuxInt))
   236  		p.To.Type = obj.TYPE_REG
   237  		p.To.Reg = v.Reg()
   238  	case ssa.OpMIPS64CMPEQF,
   239  		ssa.OpMIPS64CMPEQD,
   240  		ssa.OpMIPS64CMPGEF,
   241  		ssa.OpMIPS64CMPGED,
   242  		ssa.OpMIPS64CMPGTF,
   243  		ssa.OpMIPS64CMPGTD:
   244  		p := s.Prog(v.Op.Asm())
   245  		p.From.Type = obj.TYPE_REG
   246  		p.From.Reg = v.Args[0].Reg()
   247  		p.Reg = v.Args[1].Reg()
   248  	case ssa.OpMIPS64MOVVaddr:
   249  		p := s.Prog(mips.AMOVV)
   250  		p.From.Type = obj.TYPE_ADDR
   251  		p.From.Reg = v.Args[0].Reg()
   252  		var wantreg string
   253  		// MOVV $sym+off(base), R
   254  		// the assembler expands it as the following:
   255  		// - base is SP: add constant offset to SP (R29)
   256  		//               when constant is large, tmp register (R23) may be used
   257  		// - base is SB: load external address with relocation
   258  		switch v.Aux.(type) {
   259  		default:
   260  			v.Fatalf("aux is of unknown type %T", v.Aux)
   261  		case *obj.LSym:
   262  			wantreg = "SB"
   263  			ssagen.AddAux(&p.From, v)
   264  		case *ir.Name:
   265  			wantreg = "SP"
   266  			ssagen.AddAux(&p.From, v)
   267  		case nil:
   268  			// No sym, just MOVV $off(SP), R
   269  			wantreg = "SP"
   270  			p.From.Offset = v.AuxInt
   271  		}
   272  		if reg := v.Args[0].RegName(); reg != wantreg {
   273  			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
   274  		}
   275  		p.To.Type = obj.TYPE_REG
   276  		p.To.Reg = v.Reg()
   277  	case ssa.OpMIPS64MOVBload,
   278  		ssa.OpMIPS64MOVBUload,
   279  		ssa.OpMIPS64MOVHload,
   280  		ssa.OpMIPS64MOVHUload,
   281  		ssa.OpMIPS64MOVWload,
   282  		ssa.OpMIPS64MOVWUload,
   283  		ssa.OpMIPS64MOVVload,
   284  		ssa.OpMIPS64MOVFload,
   285  		ssa.OpMIPS64MOVDload:
   286  		p := s.Prog(v.Op.Asm())
   287  		p.From.Type = obj.TYPE_MEM
   288  		p.From.Reg = v.Args[0].Reg()
   289  		ssagen.AddAux(&p.From, v)
   290  		p.To.Type = obj.TYPE_REG
   291  		p.To.Reg = v.Reg()
   292  	case ssa.OpMIPS64MOVBstore,
   293  		ssa.OpMIPS64MOVHstore,
   294  		ssa.OpMIPS64MOVWstore,
   295  		ssa.OpMIPS64MOVVstore,
   296  		ssa.OpMIPS64MOVFstore,
   297  		ssa.OpMIPS64MOVDstore:
   298  		p := s.Prog(v.Op.Asm())
   299  		p.From.Type = obj.TYPE_REG
   300  		p.From.Reg = v.Args[1].Reg()
   301  		p.To.Type = obj.TYPE_MEM
   302  		p.To.Reg = v.Args[0].Reg()
   303  		ssagen.AddAux(&p.To, v)
   304  	case ssa.OpMIPS64MOVBreg,
   305  		ssa.OpMIPS64MOVBUreg,
   306  		ssa.OpMIPS64MOVHreg,
   307  		ssa.OpMIPS64MOVHUreg,
   308  		ssa.OpMIPS64MOVWreg,
   309  		ssa.OpMIPS64MOVWUreg:
   310  		a := v.Args[0]
   311  		for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
   312  			a = a.Args[0]
   313  		}
   314  		if a.Op == ssa.OpLoadReg && mips.REG_R0 <= a.Reg() && a.Reg() <= mips.REG_R31 {
   315  			// LoadReg from a narrower type does an extension, except loading
   316  			// to a floating point register. So only eliminate the extension
   317  			// if it is loaded to an integer register.
   318  			t := a.Type
   319  			switch {
   320  			case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
   321  				v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
   322  				v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
   323  				v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
   324  				v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
   325  				v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
   326  				// arg is a proper-typed load, already zero/sign-extended, don't extend again
   327  				if v.Reg() == v.Args[0].Reg() {
   328  					return
   329  				}
   330  				p := s.Prog(mips.AMOVV)
   331  				p.From.Type = obj.TYPE_REG
   332  				p.From.Reg = v.Args[0].Reg()
   333  				p.To.Type = obj.TYPE_REG
   334  				p.To.Reg = v.Reg()
   335  				return
   336  			default:
   337  			}
   338  		}
   339  		fallthrough
   340  	case ssa.OpMIPS64MOVWF,
   341  		ssa.OpMIPS64MOVWD,
   342  		ssa.OpMIPS64TRUNCFW,
   343  		ssa.OpMIPS64TRUNCDW,
   344  		ssa.OpMIPS64MOVVF,
   345  		ssa.OpMIPS64MOVVD,
   346  		ssa.OpMIPS64TRUNCFV,
   347  		ssa.OpMIPS64TRUNCDV,
   348  		ssa.OpMIPS64MOVFD,
   349  		ssa.OpMIPS64MOVDF,
   350  		ssa.OpMIPS64MOVWfpgp,
   351  		ssa.OpMIPS64MOVWgpfp,
   352  		ssa.OpMIPS64MOVVfpgp,
   353  		ssa.OpMIPS64MOVVgpfp,
   354  		ssa.OpMIPS64NEGF,
   355  		ssa.OpMIPS64NEGD,
   356  		ssa.OpMIPS64ABSD,
   357  		ssa.OpMIPS64SQRTF,
   358  		ssa.OpMIPS64SQRTD:
   359  		p := s.Prog(v.Op.Asm())
   360  		p.From.Type = obj.TYPE_REG
   361  		p.From.Reg = v.Args[0].Reg()
   362  		p.To.Type = obj.TYPE_REG
   363  		p.To.Reg = v.Reg()
   364  	case ssa.OpMIPS64NEGV:
   365  		// SUB from REGZERO
   366  		p := s.Prog(mips.ASUBVU)
   367  		p.From.Type = obj.TYPE_REG
   368  		p.From.Reg = v.Args[0].Reg()
   369  		p.Reg = mips.REGZERO
   370  		p.To.Type = obj.TYPE_REG
   371  		p.To.Reg = v.Reg()
   372  	case ssa.OpMIPS64DUFFZERO:
   373  		// runtime.duffzero expects start address - 8 in R1
   374  		p := s.Prog(mips.ASUBVU)
   375  		p.From.Type = obj.TYPE_CONST
   376  		p.From.Offset = 8
   377  		p.Reg = v.Args[0].Reg()
   378  		p.To.Type = obj.TYPE_REG
   379  		p.To.Reg = mips.REG_R1
   380  		p = s.Prog(obj.ADUFFZERO)
   381  		p.To.Type = obj.TYPE_MEM
   382  		p.To.Name = obj.NAME_EXTERN
   383  		p.To.Sym = ir.Syms.Duffzero
   384  		p.To.Offset = v.AuxInt
   385  	case ssa.OpMIPS64LoweredZero:
   386  		// SUBV	$8, R1
   387  		// MOVV	R0, 8(R1)
   388  		// ADDV	$8, R1
   389  		// BNE	Rarg1, R1, -2(PC)
   390  		// arg1 is the address of the last element to zero
   391  		var sz int64
   392  		var mov obj.As
   393  		switch {
   394  		case v.AuxInt%8 == 0:
   395  			sz = 8
   396  			mov = mips.AMOVV
   397  		case v.AuxInt%4 == 0:
   398  			sz = 4
   399  			mov = mips.AMOVW
   400  		case v.AuxInt%2 == 0:
   401  			sz = 2
   402  			mov = mips.AMOVH
   403  		default:
   404  			sz = 1
   405  			mov = mips.AMOVB
   406  		}
   407  		p := s.Prog(mips.ASUBVU)
   408  		p.From.Type = obj.TYPE_CONST
   409  		p.From.Offset = sz
   410  		p.To.Type = obj.TYPE_REG
   411  		p.To.Reg = mips.REG_R1
   412  		p2 := s.Prog(mov)
   413  		p2.From.Type = obj.TYPE_REG
   414  		p2.From.Reg = mips.REGZERO
   415  		p2.To.Type = obj.TYPE_MEM
   416  		p2.To.Reg = mips.REG_R1
   417  		p2.To.Offset = sz
   418  		p3 := s.Prog(mips.AADDVU)
   419  		p3.From.Type = obj.TYPE_CONST
   420  		p3.From.Offset = sz
   421  		p3.To.Type = obj.TYPE_REG
   422  		p3.To.Reg = mips.REG_R1
   423  		p4 := s.Prog(mips.ABNE)
   424  		p4.From.Type = obj.TYPE_REG
   425  		p4.From.Reg = v.Args[1].Reg()
   426  		p4.Reg = mips.REG_R1
   427  		p4.To.Type = obj.TYPE_BRANCH
   428  		p4.To.SetTarget(p2)
   429  	case ssa.OpMIPS64DUFFCOPY:
   430  		p := s.Prog(obj.ADUFFCOPY)
   431  		p.To.Type = obj.TYPE_MEM
   432  		p.To.Name = obj.NAME_EXTERN
   433  		p.To.Sym = ir.Syms.Duffcopy
   434  		p.To.Offset = v.AuxInt
   435  	case ssa.OpMIPS64LoweredMove:
   436  		// SUBV	$8, R1
   437  		// MOVV	8(R1), Rtmp
   438  		// MOVV	Rtmp, (R2)
   439  		// ADDV	$8, R1
   440  		// ADDV	$8, R2
   441  		// BNE	Rarg2, R1, -4(PC)
   442  		// arg2 is the address of the last element of src
   443  		var sz int64
   444  		var mov obj.As
   445  		switch {
   446  		case v.AuxInt%8 == 0:
   447  			sz = 8
   448  			mov = mips.AMOVV
   449  		case v.AuxInt%4 == 0:
   450  			sz = 4
   451  			mov = mips.AMOVW
   452  		case v.AuxInt%2 == 0:
   453  			sz = 2
   454  			mov = mips.AMOVH
   455  		default:
   456  			sz = 1
   457  			mov = mips.AMOVB
   458  		}
   459  		p := s.Prog(mips.ASUBVU)
   460  		p.From.Type = obj.TYPE_CONST
   461  		p.From.Offset = sz
   462  		p.To.Type = obj.TYPE_REG
   463  		p.To.Reg = mips.REG_R1
   464  		p2 := s.Prog(mov)
   465  		p2.From.Type = obj.TYPE_MEM
   466  		p2.From.Reg = mips.REG_R1
   467  		p2.From.Offset = sz
   468  		p2.To.Type = obj.TYPE_REG
   469  		p2.To.Reg = mips.REGTMP
   470  		p3 := s.Prog(mov)
   471  		p3.From.Type = obj.TYPE_REG
   472  		p3.From.Reg = mips.REGTMP
   473  		p3.To.Type = obj.TYPE_MEM
   474  		p3.To.Reg = mips.REG_R2
   475  		p4 := s.Prog(mips.AADDVU)
   476  		p4.From.Type = obj.TYPE_CONST
   477  		p4.From.Offset = sz
   478  		p4.To.Type = obj.TYPE_REG
   479  		p4.To.Reg = mips.REG_R1
   480  		p5 := s.Prog(mips.AADDVU)
   481  		p5.From.Type = obj.TYPE_CONST
   482  		p5.From.Offset = sz
   483  		p5.To.Type = obj.TYPE_REG
   484  		p5.To.Reg = mips.REG_R2
   485  		p6 := s.Prog(mips.ABNE)
   486  		p6.From.Type = obj.TYPE_REG
   487  		p6.From.Reg = v.Args[2].Reg()
   488  		p6.Reg = mips.REG_R1
   489  		p6.To.Type = obj.TYPE_BRANCH
   490  		p6.To.SetTarget(p2)
   491  	case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter:
   492  		s.Call(v)
   493  	case ssa.OpMIPS64CALLtail:
   494  		s.TailCall(v)
   495  	case ssa.OpMIPS64LoweredWB:
   496  		p := s.Prog(obj.ACALL)
   497  		p.To.Type = obj.TYPE_MEM
   498  		p.To.Name = obj.NAME_EXTERN
   499  		// AuxInt encodes how many buffer entries we need.
   500  		p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
   501  
   502  	case ssa.OpMIPS64LoweredPanicBoundsRR, ssa.OpMIPS64LoweredPanicBoundsRC, ssa.OpMIPS64LoweredPanicBoundsCR, ssa.OpMIPS64LoweredPanicBoundsCC:
   503  		// Compute the constant we put in the PCData entry for this call.
   504  		code, signed := ssa.BoundsKind(v.AuxInt).Code()
   505  		xIsReg := false
   506  		yIsReg := false
   507  		xVal := 0
   508  		yVal := 0
   509  		switch v.Op {
   510  		case ssa.OpMIPS64LoweredPanicBoundsRR:
   511  			xIsReg = true
   512  			xVal = int(v.Args[0].Reg() - mips.REG_R1)
   513  			yIsReg = true
   514  			yVal = int(v.Args[1].Reg() - mips.REG_R1)
   515  		case ssa.OpMIPS64LoweredPanicBoundsRC:
   516  			xIsReg = true
   517  			xVal = int(v.Args[0].Reg() - mips.REG_R1)
   518  			c := v.Aux.(ssa.PanicBoundsC).C
   519  			if c >= 0 && c <= abi.BoundsMaxConst {
   520  				yVal = int(c)
   521  			} else {
   522  				// Move constant to a register
   523  				yIsReg = true
   524  				if yVal == xVal {
   525  					yVal = 1
   526  				}
   527  				p := s.Prog(mips.AMOVV)
   528  				p.From.Type = obj.TYPE_CONST
   529  				p.From.Offset = c
   530  				p.To.Type = obj.TYPE_REG
   531  				p.To.Reg = mips.REG_R1 + int16(yVal)
   532  			}
   533  		case ssa.OpMIPS64LoweredPanicBoundsCR:
   534  			yIsReg = true
   535  			yVal = int(v.Args[0].Reg() - mips.REG_R1)
   536  			c := v.Aux.(ssa.PanicBoundsC).C
   537  			if c >= 0 && c <= abi.BoundsMaxConst {
   538  				xVal = int(c)
   539  			} else {
   540  				// Move constant to a register
   541  				xIsReg = true
   542  				if xVal == yVal {
   543  					xVal = 1
   544  				}
   545  				p := s.Prog(mips.AMOVV)
   546  				p.From.Type = obj.TYPE_CONST
   547  				p.From.Offset = c
   548  				p.To.Type = obj.TYPE_REG
   549  				p.To.Reg = mips.REG_R1 + int16(xVal)
   550  			}
   551  		case ssa.OpMIPS64LoweredPanicBoundsCC:
   552  			c := v.Aux.(ssa.PanicBoundsCC).Cx
   553  			if c >= 0 && c <= abi.BoundsMaxConst {
   554  				xVal = int(c)
   555  			} else {
   556  				// Move constant to a register
   557  				xIsReg = true
   558  				p := s.Prog(mips.AMOVV)
   559  				p.From.Type = obj.TYPE_CONST
   560  				p.From.Offset = c
   561  				p.To.Type = obj.TYPE_REG
   562  				p.To.Reg = mips.REG_R1 + int16(xVal)
   563  			}
   564  			c = v.Aux.(ssa.PanicBoundsCC).Cy
   565  			if c >= 0 && c <= abi.BoundsMaxConst {
   566  				yVal = int(c)
   567  			} else {
   568  				// Move constant to a register
   569  				yIsReg = true
   570  				yVal = 1
   571  				p := s.Prog(mips.AMOVV)
   572  				p.From.Type = obj.TYPE_CONST
   573  				p.From.Offset = c
   574  				p.To.Type = obj.TYPE_REG
   575  				p.To.Reg = mips.REG_R1 + int16(yVal)
   576  			}
   577  		}
   578  		c := abi.BoundsEncode(code, signed, xIsReg, yIsReg, xVal, yVal)
   579  
   580  		p := s.Prog(obj.APCDATA)
   581  		p.From.SetConst(abi.PCDATA_PanicBounds)
   582  		p.To.SetConst(int64(c))
   583  		p = s.Prog(obj.ACALL)
   584  		p.To.Type = obj.TYPE_MEM
   585  		p.To.Name = obj.NAME_EXTERN
   586  		p.To.Sym = ir.Syms.PanicBounds
   587  
   588  	case ssa.OpMIPS64LoweredAtomicLoad8, ssa.OpMIPS64LoweredAtomicLoad32, ssa.OpMIPS64LoweredAtomicLoad64:
   589  		as := mips.AMOVV
   590  		switch v.Op {
   591  		case ssa.OpMIPS64LoweredAtomicLoad8:
   592  			as = mips.AMOVB
   593  		case ssa.OpMIPS64LoweredAtomicLoad32:
   594  			as = mips.AMOVW
   595  		}
   596  		s.Prog(mips.ASYNC)
   597  		p := s.Prog(as)
   598  		p.From.Type = obj.TYPE_MEM
   599  		p.From.Reg = v.Args[0].Reg()
   600  		p.To.Type = obj.TYPE_REG
   601  		p.To.Reg = v.Reg0()
   602  		s.Prog(mips.ASYNC)
   603  	case ssa.OpMIPS64LoweredAtomicStore8, ssa.OpMIPS64LoweredAtomicStore32, ssa.OpMIPS64LoweredAtomicStore64:
   604  		as := mips.AMOVV
   605  		switch v.Op {
   606  		case ssa.OpMIPS64LoweredAtomicStore8:
   607  			as = mips.AMOVB
   608  		case ssa.OpMIPS64LoweredAtomicStore32:
   609  			as = mips.AMOVW
   610  		}
   611  		s.Prog(mips.ASYNC)
   612  		p := s.Prog(as)
   613  		p.From.Type = obj.TYPE_REG
   614  		p.From.Reg = v.Args[1].Reg()
   615  		p.To.Type = obj.TYPE_MEM
   616  		p.To.Reg = v.Args[0].Reg()
   617  		s.Prog(mips.ASYNC)
   618  	case ssa.OpMIPS64LoweredAtomicStorezero32, ssa.OpMIPS64LoweredAtomicStorezero64:
   619  		as := mips.AMOVV
   620  		if v.Op == ssa.OpMIPS64LoweredAtomicStorezero32 {
   621  			as = mips.AMOVW
   622  		}
   623  		s.Prog(mips.ASYNC)
   624  		p := s.Prog(as)
   625  		p.From.Type = obj.TYPE_REG
   626  		p.From.Reg = mips.REGZERO
   627  		p.To.Type = obj.TYPE_MEM
   628  		p.To.Reg = v.Args[0].Reg()
   629  		s.Prog(mips.ASYNC)
   630  	case ssa.OpMIPS64LoweredAtomicExchange32, ssa.OpMIPS64LoweredAtomicExchange64:
   631  		// SYNC
   632  		// MOVV	Rarg1, Rtmp
   633  		// LL	(Rarg0), Rout
   634  		// SC	Rtmp, (Rarg0)
   635  		// BEQ	Rtmp, -3(PC)
   636  		// SYNC
   637  		ll := mips.ALLV
   638  		sc := mips.ASCV
   639  		if v.Op == ssa.OpMIPS64LoweredAtomicExchange32 {
   640  			ll = mips.ALL
   641  			sc = mips.ASC
   642  		}
   643  		s.Prog(mips.ASYNC)
   644  		p := s.Prog(mips.AMOVV)
   645  		p.From.Type = obj.TYPE_REG
   646  		p.From.Reg = v.Args[1].Reg()
   647  		p.To.Type = obj.TYPE_REG
   648  		p.To.Reg = mips.REGTMP
   649  		p1 := s.Prog(ll)
   650  		p1.From.Type = obj.TYPE_MEM
   651  		p1.From.Reg = v.Args[0].Reg()
   652  		p1.To.Type = obj.TYPE_REG
   653  		p1.To.Reg = v.Reg0()
   654  		p2 := s.Prog(sc)
   655  		p2.From.Type = obj.TYPE_REG
   656  		p2.From.Reg = mips.REGTMP
   657  		p2.To.Type = obj.TYPE_MEM
   658  		p2.To.Reg = v.Args[0].Reg()
   659  		p3 := s.Prog(mips.ABEQ)
   660  		p3.From.Type = obj.TYPE_REG
   661  		p3.From.Reg = mips.REGTMP
   662  		p3.To.Type = obj.TYPE_BRANCH
   663  		p3.To.SetTarget(p)
   664  		s.Prog(mips.ASYNC)
   665  	case ssa.OpMIPS64LoweredAtomicAdd32, ssa.OpMIPS64LoweredAtomicAdd64:
   666  		// SYNC
   667  		// LL	(Rarg0), Rout
   668  		// ADDV Rarg1, Rout, Rtmp
   669  		// SC	Rtmp, (Rarg0)
   670  		// BEQ	Rtmp, -3(PC)
   671  		// SYNC
   672  		// ADDV Rarg1, Rout
   673  		ll := mips.ALLV
   674  		sc := mips.ASCV
   675  		if v.Op == ssa.OpMIPS64LoweredAtomicAdd32 {
   676  			ll = mips.ALL
   677  			sc = mips.ASC
   678  		}
   679  		s.Prog(mips.ASYNC)
   680  		p := s.Prog(ll)
   681  		p.From.Type = obj.TYPE_MEM
   682  		p.From.Reg = v.Args[0].Reg()
   683  		p.To.Type = obj.TYPE_REG
   684  		p.To.Reg = v.Reg0()
   685  		p1 := s.Prog(mips.AADDVU)
   686  		p1.From.Type = obj.TYPE_REG
   687  		p1.From.Reg = v.Args[1].Reg()
   688  		p1.Reg = v.Reg0()
   689  		p1.To.Type = obj.TYPE_REG
   690  		p1.To.Reg = mips.REGTMP
   691  		p2 := s.Prog(sc)
   692  		p2.From.Type = obj.TYPE_REG
   693  		p2.From.Reg = mips.REGTMP
   694  		p2.To.Type = obj.TYPE_MEM
   695  		p2.To.Reg = v.Args[0].Reg()
   696  		p3 := s.Prog(mips.ABEQ)
   697  		p3.From.Type = obj.TYPE_REG
   698  		p3.From.Reg = mips.REGTMP
   699  		p3.To.Type = obj.TYPE_BRANCH
   700  		p3.To.SetTarget(p)
   701  		s.Prog(mips.ASYNC)
   702  		p4 := s.Prog(mips.AADDVU)
   703  		p4.From.Type = obj.TYPE_REG
   704  		p4.From.Reg = v.Args[1].Reg()
   705  		p4.Reg = v.Reg0()
   706  		p4.To.Type = obj.TYPE_REG
   707  		p4.To.Reg = v.Reg0()
   708  	case ssa.OpMIPS64LoweredAtomicAddconst32, ssa.OpMIPS64LoweredAtomicAddconst64:
   709  		// SYNC
   710  		// LL	(Rarg0), Rout
   711  		// ADDV $auxint, Rout, Rtmp
   712  		// SC	Rtmp, (Rarg0)
   713  		// BEQ	Rtmp, -3(PC)
   714  		// SYNC
   715  		// ADDV $auxint, Rout
   716  		ll := mips.ALLV
   717  		sc := mips.ASCV
   718  		if v.Op == ssa.OpMIPS64LoweredAtomicAddconst32 {
   719  			ll = mips.ALL
   720  			sc = mips.ASC
   721  		}
   722  		s.Prog(mips.ASYNC)
   723  		p := s.Prog(ll)
   724  		p.From.Type = obj.TYPE_MEM
   725  		p.From.Reg = v.Args[0].Reg()
   726  		p.To.Type = obj.TYPE_REG
   727  		p.To.Reg = v.Reg0()
   728  		p1 := s.Prog(mips.AADDVU)
   729  		p1.From.Type = obj.TYPE_CONST
   730  		p1.From.Offset = v.AuxInt
   731  		p1.Reg = v.Reg0()
   732  		p1.To.Type = obj.TYPE_REG
   733  		p1.To.Reg = mips.REGTMP
   734  		p2 := s.Prog(sc)
   735  		p2.From.Type = obj.TYPE_REG
   736  		p2.From.Reg = mips.REGTMP
   737  		p2.To.Type = obj.TYPE_MEM
   738  		p2.To.Reg = v.Args[0].Reg()
   739  		p3 := s.Prog(mips.ABEQ)
   740  		p3.From.Type = obj.TYPE_REG
   741  		p3.From.Reg = mips.REGTMP
   742  		p3.To.Type = obj.TYPE_BRANCH
   743  		p3.To.SetTarget(p)
   744  		s.Prog(mips.ASYNC)
   745  		p4 := s.Prog(mips.AADDVU)
   746  		p4.From.Type = obj.TYPE_CONST
   747  		p4.From.Offset = v.AuxInt
   748  		p4.Reg = v.Reg0()
   749  		p4.To.Type = obj.TYPE_REG
   750  		p4.To.Reg = v.Reg0()
   751  	case ssa.OpMIPS64LoweredAtomicAnd32,
   752  		ssa.OpMIPS64LoweredAtomicOr32:
   753  		// SYNC
   754  		// LL	(Rarg0), Rtmp
   755  		// AND/OR	Rarg1, Rtmp
   756  		// SC	Rtmp, (Rarg0)
   757  		// BEQ	Rtmp, -3(PC)
   758  		// SYNC
   759  		s.Prog(mips.ASYNC)
   760  
   761  		p := s.Prog(mips.ALL)
   762  		p.From.Type = obj.TYPE_MEM
   763  		p.From.Reg = v.Args[0].Reg()
   764  		p.To.Type = obj.TYPE_REG
   765  		p.To.Reg = mips.REGTMP
   766  
   767  		p1 := s.Prog(v.Op.Asm())
   768  		p1.From.Type = obj.TYPE_REG
   769  		p1.From.Reg = v.Args[1].Reg()
   770  		p1.Reg = mips.REGTMP
   771  		p1.To.Type = obj.TYPE_REG
   772  		p1.To.Reg = mips.REGTMP
   773  
   774  		p2 := s.Prog(mips.ASC)
   775  		p2.From.Type = obj.TYPE_REG
   776  		p2.From.Reg = mips.REGTMP
   777  		p2.To.Type = obj.TYPE_MEM
   778  		p2.To.Reg = v.Args[0].Reg()
   779  
   780  		p3 := s.Prog(mips.ABEQ)
   781  		p3.From.Type = obj.TYPE_REG
   782  		p3.From.Reg = mips.REGTMP
   783  		p3.To.Type = obj.TYPE_BRANCH
   784  		p3.To.SetTarget(p)
   785  
   786  		s.Prog(mips.ASYNC)
   787  
   788  	case ssa.OpMIPS64LoweredAtomicCas32, ssa.OpMIPS64LoweredAtomicCas64:
   789  		// MOVV $0, Rout
   790  		// SYNC
   791  		// LL	(Rarg0), Rtmp
   792  		// BNE	Rtmp, Rarg1, 4(PC)
   793  		// MOVV Rarg2, Rout
   794  		// SC	Rout, (Rarg0)
   795  		// BEQ	Rout, -4(PC)
   796  		// SYNC
   797  		ll := mips.ALLV
   798  		sc := mips.ASCV
   799  		if v.Op == ssa.OpMIPS64LoweredAtomicCas32 {
   800  			ll = mips.ALL
   801  			sc = mips.ASC
   802  		}
   803  		p := s.Prog(mips.AMOVV)
   804  		p.From.Type = obj.TYPE_REG
   805  		p.From.Reg = mips.REGZERO
   806  		p.To.Type = obj.TYPE_REG
   807  		p.To.Reg = v.Reg0()
   808  		s.Prog(mips.ASYNC)
   809  		p1 := s.Prog(ll)
   810  		p1.From.Type = obj.TYPE_MEM
   811  		p1.From.Reg = v.Args[0].Reg()
   812  		p1.To.Type = obj.TYPE_REG
   813  		p1.To.Reg = mips.REGTMP
   814  		p2 := s.Prog(mips.ABNE)
   815  		p2.From.Type = obj.TYPE_REG
   816  		p2.From.Reg = v.Args[1].Reg()
   817  		p2.Reg = mips.REGTMP
   818  		p2.To.Type = obj.TYPE_BRANCH
   819  		p3 := s.Prog(mips.AMOVV)
   820  		p3.From.Type = obj.TYPE_REG
   821  		p3.From.Reg = v.Args[2].Reg()
   822  		p3.To.Type = obj.TYPE_REG
   823  		p3.To.Reg = v.Reg0()
   824  		p4 := s.Prog(sc)
   825  		p4.From.Type = obj.TYPE_REG
   826  		p4.From.Reg = v.Reg0()
   827  		p4.To.Type = obj.TYPE_MEM
   828  		p4.To.Reg = v.Args[0].Reg()
   829  		p5 := s.Prog(mips.ABEQ)
   830  		p5.From.Type = obj.TYPE_REG
   831  		p5.From.Reg = v.Reg0()
   832  		p5.To.Type = obj.TYPE_BRANCH
   833  		p5.To.SetTarget(p1)
   834  		p6 := s.Prog(mips.ASYNC)
   835  		p2.To.SetTarget(p6)
   836  	case ssa.OpMIPS64LoweredNilCheck:
   837  		// Issue a load which will fault if arg is nil.
   838  		p := s.Prog(mips.AMOVB)
   839  		p.From.Type = obj.TYPE_MEM
   840  		p.From.Reg = v.Args[0].Reg()
   841  		ssagen.AddAux(&p.From, v)
   842  		p.To.Type = obj.TYPE_REG
   843  		p.To.Reg = mips.REGTMP
   844  		if logopt.Enabled() {
   845  			logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
   846  		}
   847  		if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
   848  			base.WarnfAt(v.Pos, "generated nil check")
   849  		}
   850  	case ssa.OpMIPS64FPFlagTrue,
   851  		ssa.OpMIPS64FPFlagFalse:
   852  		// MOVV	$0, r
   853  		// BFPF	2(PC)
   854  		// MOVV	$1, r
   855  		branch := mips.ABFPF
   856  		if v.Op == ssa.OpMIPS64FPFlagFalse {
   857  			branch = mips.ABFPT
   858  		}
   859  		p := s.Prog(mips.AMOVV)
   860  		p.From.Type = obj.TYPE_REG
   861  		p.From.Reg = mips.REGZERO
   862  		p.To.Type = obj.TYPE_REG
   863  		p.To.Reg = v.Reg()
   864  		p2 := s.Prog(branch)
   865  		p2.To.Type = obj.TYPE_BRANCH
   866  		p3 := s.Prog(mips.AMOVV)
   867  		p3.From.Type = obj.TYPE_CONST
   868  		p3.From.Offset = 1
   869  		p3.To.Type = obj.TYPE_REG
   870  		p3.To.Reg = v.Reg()
   871  		p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land
   872  		p2.To.SetTarget(p4)
   873  	case ssa.OpMIPS64LoweredGetClosurePtr:
   874  		// Closure pointer is R22 (mips.REGCTXT).
   875  		ssagen.CheckLoweredGetClosurePtr(v)
   876  	case ssa.OpMIPS64LoweredGetCallerSP:
   877  		// caller's SP is FixedFrameSize below the address of the first arg
   878  		p := s.Prog(mips.AMOVV)
   879  		p.From.Type = obj.TYPE_ADDR
   880  		p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
   881  		p.From.Name = obj.NAME_PARAM
   882  		p.To.Type = obj.TYPE_REG
   883  		p.To.Reg = v.Reg()
   884  	case ssa.OpMIPS64LoweredGetCallerPC:
   885  		p := s.Prog(obj.AGETCALLERPC)
   886  		p.To.Type = obj.TYPE_REG
   887  		p.To.Reg = v.Reg()
   888  	case ssa.OpMIPS64LoweredPubBarrier:
   889  		// SYNC
   890  		s.Prog(v.Op.Asm())
   891  	case ssa.OpClobber, ssa.OpClobberReg:
   892  		// TODO: implement for clobberdead experiment. Nop is ok for now.
   893  	default:
   894  		v.Fatalf("genValue not implemented: %s", v.LongString())
   895  	}
   896  }
   897  
   898  var blockJump = map[ssa.BlockKind]struct {
   899  	asm, invasm obj.As
   900  }{
   901  	ssa.BlockMIPS64EQ:  {mips.ABEQ, mips.ABNE},
   902  	ssa.BlockMIPS64NE:  {mips.ABNE, mips.ABEQ},
   903  	ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ},
   904  	ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ},
   905  	ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ},
   906  	ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ},
   907  	ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF},
   908  	ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT},
   909  }
   910  
   911  func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
   912  	switch b.Kind {
   913  	case ssa.BlockPlain, ssa.BlockDefer:
   914  		if b.Succs[0].Block() != next {
   915  			p := s.Prog(obj.AJMP)
   916  			p.To.Type = obj.TYPE_BRANCH
   917  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   918  		}
   919  	case ssa.BlockExit, ssa.BlockRetJmp:
   920  	case ssa.BlockRet:
   921  		s.Prog(obj.ARET)
   922  	case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
   923  		ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
   924  		ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
   925  		ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF:
   926  		jmp := blockJump[b.Kind]
   927  		var p *obj.Prog
   928  		switch next {
   929  		case b.Succs[0].Block():
   930  			p = s.Br(jmp.invasm, b.Succs[1].Block())
   931  		case b.Succs[1].Block():
   932  			p = s.Br(jmp.asm, b.Succs[0].Block())
   933  		default:
   934  			if b.Likely != ssa.BranchUnlikely {
   935  				p = s.Br(jmp.asm, b.Succs[0].Block())
   936  				s.Br(obj.AJMP, b.Succs[1].Block())
   937  			} else {
   938  				p = s.Br(jmp.invasm, b.Succs[1].Block())
   939  				s.Br(obj.AJMP, b.Succs[0].Block())
   940  			}
   941  		}
   942  		if !b.Controls[0].Type.IsFlags() {
   943  			p.From.Type = obj.TYPE_REG
   944  			p.From.Reg = b.Controls[0].Reg()
   945  		}
   946  	default:
   947  		b.Fatalf("branch not implemented: %s", b.LongString())
   948  	}
   949  }
   950  

View as plain text