Source file src/cmd/compile/internal/loong64/ssa.go

     1  // Copyright 2022 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package loong64
     6  
     7  import (
     8  	"math"
     9  
    10  	"cmd/compile/internal/base"
    11  	"cmd/compile/internal/ir"
    12  	"cmd/compile/internal/logopt"
    13  	"cmd/compile/internal/objw"
    14  	"cmd/compile/internal/ssa"
    15  	"cmd/compile/internal/ssagen"
    16  	"cmd/compile/internal/types"
    17  	"cmd/internal/obj"
    18  	"cmd/internal/obj/loong64"
    19  )
    20  
    21  // isFPreg reports whether r is an FP register.
    22  func isFPreg(r int16) bool {
    23  	return loong64.REG_F0 <= r && r <= loong64.REG_F31
    24  }
    25  
    26  // loadByType returns the load instruction of the given type.
    27  func loadByType(t *types.Type, r int16) obj.As {
    28  	if isFPreg(r) {
    29  		if t.Size() == 4 {
    30  			return loong64.AMOVF
    31  		} else {
    32  			return loong64.AMOVD
    33  		}
    34  	} else {
    35  		switch t.Size() {
    36  		case 1:
    37  			if t.IsSigned() {
    38  				return loong64.AMOVB
    39  			} else {
    40  				return loong64.AMOVBU
    41  			}
    42  		case 2:
    43  			if t.IsSigned() {
    44  				return loong64.AMOVH
    45  			} else {
    46  				return loong64.AMOVHU
    47  			}
    48  		case 4:
    49  			if t.IsSigned() {
    50  				return loong64.AMOVW
    51  			} else {
    52  				return loong64.AMOVWU
    53  			}
    54  		case 8:
    55  			return loong64.AMOVV
    56  		}
    57  	}
    58  	panic("bad load type")
    59  }
    60  
    61  // storeByType returns the store instruction of the given type.
    62  func storeByType(t *types.Type, r int16) obj.As {
    63  	if isFPreg(r) {
    64  		if t.Size() == 4 {
    65  			return loong64.AMOVF
    66  		} else {
    67  			return loong64.AMOVD
    68  		}
    69  	} else {
    70  		switch t.Size() {
    71  		case 1:
    72  			return loong64.AMOVB
    73  		case 2:
    74  			return loong64.AMOVH
    75  		case 4:
    76  			return loong64.AMOVW
    77  		case 8:
    78  			return loong64.AMOVV
    79  		}
    80  	}
    81  	panic("bad store type")
    82  }
    83  
    84  // largestMove returns the largest move instruction possible and its size,
    85  // given the alignment of the total size of the move.
    86  //
    87  // e.g., a 16-byte move may use MOVV, but an 11-byte move must use MOVB.
    88  //
    89  // Note that the moves may not be on naturally aligned addresses depending on
    90  // the source and destination.
    91  //
    92  // This matches the calculation in ssa.moveSize.
    93  func largestMove(alignment int64) (obj.As, int64) {
    94  	switch {
    95  	case alignment%8 == 0:
    96  		return loong64.AMOVV, 8
    97  	case alignment%4 == 0:
    98  		return loong64.AMOVW, 4
    99  	case alignment%2 == 0:
   100  		return loong64.AMOVH, 2
   101  	default:
   102  		return loong64.AMOVB, 1
   103  	}
   104  }
   105  
   106  func ssaGenValue(s *ssagen.State, v *ssa.Value) {
   107  	switch v.Op {
   108  	case ssa.OpCopy, ssa.OpLOONG64MOVVreg:
   109  		if v.Type.IsMemory() {
   110  			return
   111  		}
   112  		x := v.Args[0].Reg()
   113  		y := v.Reg()
   114  		if x == y {
   115  			return
   116  		}
   117  		as := loong64.AMOVV
   118  		if isFPreg(x) && isFPreg(y) {
   119  			as = loong64.AMOVD
   120  		}
   121  		p := s.Prog(as)
   122  		p.From.Type = obj.TYPE_REG
   123  		p.From.Reg = x
   124  		p.To.Type = obj.TYPE_REG
   125  		p.To.Reg = y
   126  	case ssa.OpLOONG64MOVVnop,
   127  		ssa.OpLOONG64LoweredRound32F,
   128  		ssa.OpLOONG64LoweredRound64F:
   129  		// nothing to do
   130  	case ssa.OpLoadReg:
   131  		if v.Type.IsFlags() {
   132  			v.Fatalf("load flags not implemented: %v", v.LongString())
   133  			return
   134  		}
   135  		r := v.Reg()
   136  		p := s.Prog(loadByType(v.Type, r))
   137  		ssagen.AddrAuto(&p.From, v.Args[0])
   138  		p.To.Type = obj.TYPE_REG
   139  		p.To.Reg = r
   140  	case ssa.OpStoreReg:
   141  		if v.Type.IsFlags() {
   142  			v.Fatalf("store flags not implemented: %v", v.LongString())
   143  			return
   144  		}
   145  		r := v.Args[0].Reg()
   146  		p := s.Prog(storeByType(v.Type, r))
   147  		p.From.Type = obj.TYPE_REG
   148  		p.From.Reg = r
   149  		ssagen.AddrAuto(&p.To, v)
   150  	case ssa.OpArgIntReg, ssa.OpArgFloatReg:
   151  		// The assembler needs to wrap the entry safepoint/stack growth code with spill/unspill
   152  		// The loop only runs once.
   153  		for _, a := range v.Block.Func.RegArgs {
   154  			// Pass the spill/unspill information along to the assembler, offset by size of
   155  			// the saved LR slot.
   156  			addr := ssagen.SpillSlotAddr(a, loong64.REGSP, base.Ctxt.Arch.FixedFrameSize)
   157  			s.FuncInfo().AddSpill(
   158  				obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type, a.Reg), Spill: storeByType(a.Type, a.Reg)})
   159  		}
   160  		v.Block.Func.RegArgs = nil
   161  		ssagen.CheckArgReg(v)
   162  	case ssa.OpLOONG64ADDV,
   163  		ssa.OpLOONG64SUBV,
   164  		ssa.OpLOONG64AND,
   165  		ssa.OpLOONG64OR,
   166  		ssa.OpLOONG64XOR,
   167  		ssa.OpLOONG64NOR,
   168  		ssa.OpLOONG64SLLV,
   169  		ssa.OpLOONG64SRLV,
   170  		ssa.OpLOONG64SRAV,
   171  		ssa.OpLOONG64ROTR,
   172  		ssa.OpLOONG64ROTRV,
   173  		ssa.OpLOONG64ADDF,
   174  		ssa.OpLOONG64ADDD,
   175  		ssa.OpLOONG64SUBF,
   176  		ssa.OpLOONG64SUBD,
   177  		ssa.OpLOONG64MULF,
   178  		ssa.OpLOONG64MULD,
   179  		ssa.OpLOONG64DIVF,
   180  		ssa.OpLOONG64DIVD,
   181  		ssa.OpLOONG64MULV, ssa.OpLOONG64MULHV, ssa.OpLOONG64MULHVU,
   182  		ssa.OpLOONG64DIVV, ssa.OpLOONG64REMV, ssa.OpLOONG64DIVVU, ssa.OpLOONG64REMVU,
   183  		ssa.OpLOONG64FCOPYSGD:
   184  		p := s.Prog(v.Op.Asm())
   185  		p.From.Type = obj.TYPE_REG
   186  		p.From.Reg = v.Args[1].Reg()
   187  		p.Reg = v.Args[0].Reg()
   188  		p.To.Type = obj.TYPE_REG
   189  		p.To.Reg = v.Reg()
   190  
   191  	case ssa.OpLOONG64BSTRPICKV,
   192  		ssa.OpLOONG64BSTRPICKW:
   193  		p := s.Prog(v.Op.Asm())
   194  		p.From.Type = obj.TYPE_CONST
   195  		if v.Op == ssa.OpLOONG64BSTRPICKW {
   196  			p.From.Offset = v.AuxInt >> 5
   197  			p.AddRestSourceConst(v.AuxInt & 0x1f)
   198  		} else {
   199  			p.From.Offset = v.AuxInt >> 6
   200  			p.AddRestSourceConst(v.AuxInt & 0x3f)
   201  		}
   202  		p.Reg = v.Args[0].Reg()
   203  		p.To.Type = obj.TYPE_REG
   204  		p.To.Reg = v.Reg()
   205  
   206  	case ssa.OpLOONG64FMINF,
   207  		ssa.OpLOONG64FMIND,
   208  		ssa.OpLOONG64FMAXF,
   209  		ssa.OpLOONG64FMAXD:
   210  		// ADDD Rarg0, Rarg1, Rout
   211  		// CMPEQD Rarg0, Rarg0, FCC0
   212  		// bceqz FCC0, end
   213  		// CMPEQD Rarg1, Rarg1, FCC0
   214  		// bceqz FCC0, end
   215  		// F(MIN|MAX)(F|D)
   216  
   217  		r0 := v.Args[0].Reg()
   218  		r1 := v.Args[1].Reg()
   219  		out := v.Reg()
   220  		add, fcmp := loong64.AADDD, loong64.ACMPEQD
   221  		if v.Op == ssa.OpLOONG64FMINF || v.Op == ssa.OpLOONG64FMAXF {
   222  			add = loong64.AADDF
   223  			fcmp = loong64.ACMPEQF
   224  		}
   225  		p1 := s.Prog(add)
   226  		p1.From.Type = obj.TYPE_REG
   227  		p1.From.Reg = r0
   228  		p1.Reg = r1
   229  		p1.To.Type = obj.TYPE_REG
   230  		p1.To.Reg = out
   231  
   232  		p2 := s.Prog(fcmp)
   233  		p2.From.Type = obj.TYPE_REG
   234  		p2.From.Reg = r0
   235  		p2.Reg = r0
   236  		p2.To.Type = obj.TYPE_REG
   237  		p2.To.Reg = loong64.REG_FCC0
   238  
   239  		p3 := s.Prog(loong64.ABFPF)
   240  		p3.To.Type = obj.TYPE_BRANCH
   241  
   242  		p4 := s.Prog(fcmp)
   243  		p4.From.Type = obj.TYPE_REG
   244  		p4.From.Reg = r1
   245  		p4.Reg = r1
   246  		p4.To.Type = obj.TYPE_REG
   247  		p4.To.Reg = loong64.REG_FCC0
   248  
   249  		p5 := s.Prog(loong64.ABFPF)
   250  		p5.To.Type = obj.TYPE_BRANCH
   251  
   252  		p6 := s.Prog(v.Op.Asm())
   253  		p6.From.Type = obj.TYPE_REG
   254  		p6.From.Reg = r1
   255  		p6.Reg = r0
   256  		p6.To.Type = obj.TYPE_REG
   257  		p6.To.Reg = out
   258  
   259  		nop := s.Prog(obj.ANOP)
   260  		p3.To.SetTarget(nop)
   261  		p5.To.SetTarget(nop)
   262  
   263  	case ssa.OpLOONG64SGT,
   264  		ssa.OpLOONG64SGTU:
   265  		p := s.Prog(v.Op.Asm())
   266  		p.From.Type = obj.TYPE_REG
   267  		p.From.Reg = v.Args[0].Reg()
   268  		p.Reg = v.Args[1].Reg()
   269  		p.To.Type = obj.TYPE_REG
   270  		p.To.Reg = v.Reg()
   271  	case ssa.OpLOONG64ADDVconst,
   272  		ssa.OpLOONG64SUBVconst,
   273  		ssa.OpLOONG64ANDconst,
   274  		ssa.OpLOONG64ORconst,
   275  		ssa.OpLOONG64XORconst,
   276  		ssa.OpLOONG64NORconst,
   277  		ssa.OpLOONG64SLLVconst,
   278  		ssa.OpLOONG64SRLVconst,
   279  		ssa.OpLOONG64SRAVconst,
   280  		ssa.OpLOONG64ROTRconst,
   281  		ssa.OpLOONG64ROTRVconst,
   282  		ssa.OpLOONG64SGTconst,
   283  		ssa.OpLOONG64SGTUconst:
   284  		p := s.Prog(v.Op.Asm())
   285  		p.From.Type = obj.TYPE_CONST
   286  		p.From.Offset = v.AuxInt
   287  		p.Reg = v.Args[0].Reg()
   288  		p.To.Type = obj.TYPE_REG
   289  		p.To.Reg = v.Reg()
   290  	case ssa.OpLOONG64MOVVconst:
   291  		r := v.Reg()
   292  		p := s.Prog(v.Op.Asm())
   293  		p.From.Type = obj.TYPE_CONST
   294  		p.From.Offset = v.AuxInt
   295  		p.To.Type = obj.TYPE_REG
   296  		p.To.Reg = r
   297  		if isFPreg(r) {
   298  			// cannot move into FP or special registers, use TMP as intermediate
   299  			p.To.Reg = loong64.REGTMP
   300  			p = s.Prog(loong64.AMOVV)
   301  			p.From.Type = obj.TYPE_REG
   302  			p.From.Reg = loong64.REGTMP
   303  			p.To.Type = obj.TYPE_REG
   304  			p.To.Reg = r
   305  		}
   306  	case ssa.OpLOONG64MOVFconst,
   307  		ssa.OpLOONG64MOVDconst:
   308  		p := s.Prog(v.Op.Asm())
   309  		p.From.Type = obj.TYPE_FCONST
   310  		p.From.Val = math.Float64frombits(uint64(v.AuxInt))
   311  		p.To.Type = obj.TYPE_REG
   312  		p.To.Reg = v.Reg()
   313  	case ssa.OpLOONG64CMPEQF,
   314  		ssa.OpLOONG64CMPEQD,
   315  		ssa.OpLOONG64CMPGEF,
   316  		ssa.OpLOONG64CMPGED,
   317  		ssa.OpLOONG64CMPGTF,
   318  		ssa.OpLOONG64CMPGTD:
   319  		p := s.Prog(v.Op.Asm())
   320  		p.From.Type = obj.TYPE_REG
   321  		p.From.Reg = v.Args[0].Reg()
   322  		p.Reg = v.Args[1].Reg()
   323  		p.To.Type = obj.TYPE_REG
   324  		p.To.Reg = loong64.REG_FCC0
   325  
   326  	case ssa.OpLOONG64FMADDF,
   327  		ssa.OpLOONG64FMADDD,
   328  		ssa.OpLOONG64FMSUBF,
   329  		ssa.OpLOONG64FMSUBD,
   330  		ssa.OpLOONG64FNMADDF,
   331  		ssa.OpLOONG64FNMADDD,
   332  		ssa.OpLOONG64FNMSUBF,
   333  		ssa.OpLOONG64FNMSUBD:
   334  		p := s.Prog(v.Op.Asm())
   335  		// r=(FMA x y z) -> FMADDD z, y, x, r
   336  		// the SSA operand order is for taking advantage of
   337  		// commutativity (that only applies for the first two operands)
   338  		r := v.Reg()
   339  		x := v.Args[0].Reg()
   340  		y := v.Args[1].Reg()
   341  		z := v.Args[2].Reg()
   342  		p.From.Type = obj.TYPE_REG
   343  		p.From.Reg = z
   344  		p.Reg = y
   345  		p.AddRestSourceReg(x)
   346  		p.To.Type = obj.TYPE_REG
   347  		p.To.Reg = r
   348  
   349  	case ssa.OpLOONG64MOVVaddr:
   350  		p := s.Prog(loong64.AMOVV)
   351  		p.From.Type = obj.TYPE_ADDR
   352  		p.From.Reg = v.Args[0].Reg()
   353  		var wantreg string
   354  		// MOVV $sym+off(base), R
   355  		// the assembler expands it as the following:
   356  		// - base is SP: add constant offset to SP (R3)
   357  		// when constant is large, tmp register (R30) may be used
   358  		// - base is SB: load external address with relocation
   359  		switch v.Aux.(type) {
   360  		default:
   361  			v.Fatalf("aux is of unknown type %T", v.Aux)
   362  		case *obj.LSym:
   363  			wantreg = "SB"
   364  			ssagen.AddAux(&p.From, v)
   365  		case *ir.Name:
   366  			wantreg = "SP"
   367  			ssagen.AddAux(&p.From, v)
   368  		case nil:
   369  			// No sym, just MOVV $off(SP), R
   370  			wantreg = "SP"
   371  			p.From.Offset = v.AuxInt
   372  		}
   373  		if reg := v.Args[0].RegName(); reg != wantreg {
   374  			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
   375  		}
   376  		p.To.Type = obj.TYPE_REG
   377  		p.To.Reg = v.Reg()
   378  
   379  	case ssa.OpLOONG64MOVBloadidx,
   380  		ssa.OpLOONG64MOVBUloadidx,
   381  		ssa.OpLOONG64MOVHloadidx,
   382  		ssa.OpLOONG64MOVHUloadidx,
   383  		ssa.OpLOONG64MOVWloadidx,
   384  		ssa.OpLOONG64MOVWUloadidx,
   385  		ssa.OpLOONG64MOVVloadidx,
   386  		ssa.OpLOONG64MOVFloadidx,
   387  		ssa.OpLOONG64MOVDloadidx:
   388  		p := s.Prog(v.Op.Asm())
   389  		p.From.Type = obj.TYPE_MEM
   390  		p.From.Name = obj.NAME_NONE
   391  		p.From.Reg = v.Args[0].Reg()
   392  		p.From.Index = v.Args[1].Reg()
   393  		p.To.Type = obj.TYPE_REG
   394  		p.To.Reg = v.Reg()
   395  
   396  	case ssa.OpLOONG64MOVBstoreidx,
   397  		ssa.OpLOONG64MOVHstoreidx,
   398  		ssa.OpLOONG64MOVWstoreidx,
   399  		ssa.OpLOONG64MOVVstoreidx,
   400  		ssa.OpLOONG64MOVFstoreidx,
   401  		ssa.OpLOONG64MOVDstoreidx:
   402  		p := s.Prog(v.Op.Asm())
   403  		p.From.Type = obj.TYPE_REG
   404  		p.From.Reg = v.Args[2].Reg()
   405  		p.To.Type = obj.TYPE_MEM
   406  		p.To.Name = obj.NAME_NONE
   407  		p.To.Reg = v.Args[0].Reg()
   408  		p.To.Index = v.Args[1].Reg()
   409  
   410  	case ssa.OpLOONG64MOVBstorezeroidx,
   411  		ssa.OpLOONG64MOVHstorezeroidx,
   412  		ssa.OpLOONG64MOVWstorezeroidx,
   413  		ssa.OpLOONG64MOVVstorezeroidx:
   414  		p := s.Prog(v.Op.Asm())
   415  		p.From.Type = obj.TYPE_REG
   416  		p.From.Reg = loong64.REGZERO
   417  		p.To.Type = obj.TYPE_MEM
   418  		p.To.Name = obj.NAME_NONE
   419  		p.To.Reg = v.Args[0].Reg()
   420  		p.To.Index = v.Args[1].Reg()
   421  
   422  	case ssa.OpLOONG64MOVBload,
   423  		ssa.OpLOONG64MOVBUload,
   424  		ssa.OpLOONG64MOVHload,
   425  		ssa.OpLOONG64MOVHUload,
   426  		ssa.OpLOONG64MOVWload,
   427  		ssa.OpLOONG64MOVWUload,
   428  		ssa.OpLOONG64MOVVload,
   429  		ssa.OpLOONG64MOVFload,
   430  		ssa.OpLOONG64MOVDload:
   431  		p := s.Prog(v.Op.Asm())
   432  		p.From.Type = obj.TYPE_MEM
   433  		p.From.Reg = v.Args[0].Reg()
   434  		ssagen.AddAux(&p.From, v)
   435  		p.To.Type = obj.TYPE_REG
   436  		p.To.Reg = v.Reg()
   437  	case ssa.OpLOONG64MOVBstore,
   438  		ssa.OpLOONG64MOVHstore,
   439  		ssa.OpLOONG64MOVWstore,
   440  		ssa.OpLOONG64MOVVstore,
   441  		ssa.OpLOONG64MOVFstore,
   442  		ssa.OpLOONG64MOVDstore:
   443  		p := s.Prog(v.Op.Asm())
   444  		p.From.Type = obj.TYPE_REG
   445  		p.From.Reg = v.Args[1].Reg()
   446  		p.To.Type = obj.TYPE_MEM
   447  		p.To.Reg = v.Args[0].Reg()
   448  		ssagen.AddAux(&p.To, v)
   449  	case ssa.OpLOONG64MOVBstorezero,
   450  		ssa.OpLOONG64MOVHstorezero,
   451  		ssa.OpLOONG64MOVWstorezero,
   452  		ssa.OpLOONG64MOVVstorezero:
   453  		p := s.Prog(v.Op.Asm())
   454  		p.From.Type = obj.TYPE_REG
   455  		p.From.Reg = loong64.REGZERO
   456  		p.To.Type = obj.TYPE_MEM
   457  		p.To.Reg = v.Args[0].Reg()
   458  		ssagen.AddAux(&p.To, v)
   459  	case ssa.OpLOONG64MOVBreg,
   460  		ssa.OpLOONG64MOVBUreg,
   461  		ssa.OpLOONG64MOVHreg,
   462  		ssa.OpLOONG64MOVHUreg,
   463  		ssa.OpLOONG64MOVWreg,
   464  		ssa.OpLOONG64MOVWUreg:
   465  		a := v.Args[0]
   466  		for a.Op == ssa.OpCopy || a.Op == ssa.OpLOONG64MOVVreg {
   467  			a = a.Args[0]
   468  		}
   469  		if a.Op == ssa.OpLoadReg && loong64.REG_R0 <= a.Reg() && a.Reg() <= loong64.REG_R31 {
   470  			// LoadReg from a narrower type does an extension, except loading
   471  			// to a floating point register. So only eliminate the extension
   472  			// if it is loaded to an integer register.
   473  
   474  			t := a.Type
   475  			switch {
   476  			case v.Op == ssa.OpLOONG64MOVBreg && t.Size() == 1 && t.IsSigned(),
   477  				v.Op == ssa.OpLOONG64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
   478  				v.Op == ssa.OpLOONG64MOVHreg && t.Size() == 2 && t.IsSigned(),
   479  				v.Op == ssa.OpLOONG64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
   480  				v.Op == ssa.OpLOONG64MOVWreg && t.Size() == 4 && t.IsSigned(),
   481  				v.Op == ssa.OpLOONG64MOVWUreg && t.Size() == 4 && !t.IsSigned():
   482  				// arg is a proper-typed load, already zero/sign-extended, don't extend again
   483  				if v.Reg() == v.Args[0].Reg() {
   484  					return
   485  				}
   486  				p := s.Prog(loong64.AMOVV)
   487  				p.From.Type = obj.TYPE_REG
   488  				p.From.Reg = v.Args[0].Reg()
   489  				p.To.Type = obj.TYPE_REG
   490  				p.To.Reg = v.Reg()
   491  				return
   492  			default:
   493  			}
   494  		}
   495  		fallthrough
   496  
   497  	case ssa.OpLOONG64MOVWF,
   498  		ssa.OpLOONG64MOVWD,
   499  		ssa.OpLOONG64TRUNCFW,
   500  		ssa.OpLOONG64TRUNCDW,
   501  		ssa.OpLOONG64MOVVF,
   502  		ssa.OpLOONG64MOVVD,
   503  		ssa.OpLOONG64TRUNCFV,
   504  		ssa.OpLOONG64TRUNCDV,
   505  		ssa.OpLOONG64MOVFD,
   506  		ssa.OpLOONG64MOVDF,
   507  		ssa.OpLOONG64MOVWfpgp,
   508  		ssa.OpLOONG64MOVWgpfp,
   509  		ssa.OpLOONG64MOVVfpgp,
   510  		ssa.OpLOONG64MOVVgpfp,
   511  		ssa.OpLOONG64NEGF,
   512  		ssa.OpLOONG64NEGD,
   513  		ssa.OpLOONG64CLZW,
   514  		ssa.OpLOONG64CLZV,
   515  		ssa.OpLOONG64CTZW,
   516  		ssa.OpLOONG64CTZV,
   517  		ssa.OpLOONG64SQRTD,
   518  		ssa.OpLOONG64SQRTF,
   519  		ssa.OpLOONG64REVB2H,
   520  		ssa.OpLOONG64REVB2W,
   521  		ssa.OpLOONG64REVBV,
   522  		ssa.OpLOONG64BITREV4B,
   523  		ssa.OpLOONG64BITREVW,
   524  		ssa.OpLOONG64BITREVV,
   525  		ssa.OpLOONG64ABSD:
   526  		p := s.Prog(v.Op.Asm())
   527  		p.From.Type = obj.TYPE_REG
   528  		p.From.Reg = v.Args[0].Reg()
   529  		p.To.Type = obj.TYPE_REG
   530  		p.To.Reg = v.Reg()
   531  
   532  	case ssa.OpLOONG64VPCNT64,
   533  		ssa.OpLOONG64VPCNT32,
   534  		ssa.OpLOONG64VPCNT16:
   535  		p := s.Prog(v.Op.Asm())
   536  		p.From.Type = obj.TYPE_REG
   537  		p.From.Reg = ((v.Args[0].Reg() - loong64.REG_F0) & 31) + loong64.REG_V0
   538  		p.To.Type = obj.TYPE_REG
   539  		p.To.Reg = ((v.Reg() - loong64.REG_F0) & 31) + loong64.REG_V0
   540  
   541  	case ssa.OpLOONG64NEGV:
   542  		// SUB from REGZERO
   543  		p := s.Prog(loong64.ASUBVU)
   544  		p.From.Type = obj.TYPE_REG
   545  		p.From.Reg = v.Args[0].Reg()
   546  		p.Reg = loong64.REGZERO
   547  		p.To.Type = obj.TYPE_REG
   548  		p.To.Reg = v.Reg()
   549  
   550  	case ssa.OpLOONG64DUFFZERO:
   551  		// runtime.duffzero expects start address in R20
   552  		p := s.Prog(obj.ADUFFZERO)
   553  		p.To.Type = obj.TYPE_MEM
   554  		p.To.Name = obj.NAME_EXTERN
   555  		p.To.Sym = ir.Syms.Duffzero
   556  		p.To.Offset = v.AuxInt
   557  	case ssa.OpLOONG64LoweredZero:
   558  		// MOVx	R0, (Rarg0)
   559  		// ADDV	$sz, Rarg0
   560  		// BGEU	Rarg1, Rarg0, -2(PC)
   561  		mov, sz := largestMove(v.AuxInt)
   562  		p := s.Prog(mov)
   563  		p.From.Type = obj.TYPE_REG
   564  		p.From.Reg = loong64.REGZERO
   565  		p.To.Type = obj.TYPE_MEM
   566  		p.To.Reg = v.Args[0].Reg()
   567  
   568  		p2 := s.Prog(loong64.AADDVU)
   569  		p2.From.Type = obj.TYPE_CONST
   570  		p2.From.Offset = sz
   571  		p2.To.Type = obj.TYPE_REG
   572  		p2.To.Reg = v.Args[0].Reg()
   573  
   574  		p3 := s.Prog(loong64.ABGEU)
   575  		p3.From.Type = obj.TYPE_REG
   576  		p3.From.Reg = v.Args[1].Reg()
   577  		p3.Reg = v.Args[0].Reg()
   578  		p3.To.Type = obj.TYPE_BRANCH
   579  		p3.To.SetTarget(p)
   580  
   581  	case ssa.OpLOONG64DUFFCOPY:
   582  		p := s.Prog(obj.ADUFFCOPY)
   583  		p.To.Type = obj.TYPE_MEM
   584  		p.To.Name = obj.NAME_EXTERN
   585  		p.To.Sym = ir.Syms.Duffcopy
   586  		p.To.Offset = v.AuxInt
   587  	case ssa.OpLOONG64LoweredMove:
   588  		// MOVx	(Rarg1), Rtmp
   589  		// MOVx	Rtmp, (Rarg0)
   590  		// ADDV	$sz, Rarg1
   591  		// ADDV	$sz, Rarg0
   592  		// BGEU	Rarg2, Rarg0, -4(PC)
   593  		mov, sz := largestMove(v.AuxInt)
   594  		p := s.Prog(mov)
   595  		p.From.Type = obj.TYPE_MEM
   596  		p.From.Reg = v.Args[1].Reg()
   597  		p.To.Type = obj.TYPE_REG
   598  		p.To.Reg = loong64.REGTMP
   599  
   600  		p2 := s.Prog(mov)
   601  		p2.From.Type = obj.TYPE_REG
   602  		p2.From.Reg = loong64.REGTMP
   603  		p2.To.Type = obj.TYPE_MEM
   604  		p2.To.Reg = v.Args[0].Reg()
   605  
   606  		p3 := s.Prog(loong64.AADDVU)
   607  		p3.From.Type = obj.TYPE_CONST
   608  		p3.From.Offset = sz
   609  		p3.To.Type = obj.TYPE_REG
   610  		p3.To.Reg = v.Args[1].Reg()
   611  
   612  		p4 := s.Prog(loong64.AADDVU)
   613  		p4.From.Type = obj.TYPE_CONST
   614  		p4.From.Offset = sz
   615  		p4.To.Type = obj.TYPE_REG
   616  		p4.To.Reg = v.Args[0].Reg()
   617  
   618  		p5 := s.Prog(loong64.ABGEU)
   619  		p5.From.Type = obj.TYPE_REG
   620  		p5.From.Reg = v.Args[2].Reg()
   621  		p5.Reg = v.Args[1].Reg()
   622  		p5.To.Type = obj.TYPE_BRANCH
   623  		p5.To.SetTarget(p)
   624  
   625  	case ssa.OpLOONG64CALLstatic, ssa.OpLOONG64CALLclosure, ssa.OpLOONG64CALLinter:
   626  		s.Call(v)
   627  	case ssa.OpLOONG64CALLtail:
   628  		s.TailCall(v)
   629  	case ssa.OpLOONG64LoweredWB:
   630  		p := s.Prog(obj.ACALL)
   631  		p.To.Type = obj.TYPE_MEM
   632  		p.To.Name = obj.NAME_EXTERN
   633  		// AuxInt encodes how many buffer entries we need.
   634  		p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
   635  
   636  	case ssa.OpLOONG64LoweredPubBarrier:
   637  		// DBAR 0x1A
   638  		p := s.Prog(v.Op.Asm())
   639  		p.From.Type = obj.TYPE_CONST
   640  		p.From.Offset = 0x1A
   641  
   642  	case ssa.OpLOONG64LoweredPanicBoundsA, ssa.OpLOONG64LoweredPanicBoundsB, ssa.OpLOONG64LoweredPanicBoundsC:
   643  		p := s.Prog(obj.ACALL)
   644  		p.To.Type = obj.TYPE_MEM
   645  		p.To.Name = obj.NAME_EXTERN
   646  		p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
   647  		s.UseArgs(16) // space used in callee args area by assembly stubs
   648  	case ssa.OpLOONG64LoweredAtomicLoad8, ssa.OpLOONG64LoweredAtomicLoad32, ssa.OpLOONG64LoweredAtomicLoad64:
   649  		// MOVB	(Rarg0), Rout
   650  		// DBAR	0x14
   651  		as := loong64.AMOVV
   652  		switch v.Op {
   653  		case ssa.OpLOONG64LoweredAtomicLoad8:
   654  			as = loong64.AMOVB
   655  		case ssa.OpLOONG64LoweredAtomicLoad32:
   656  			as = loong64.AMOVW
   657  		}
   658  		p := s.Prog(as)
   659  		p.From.Type = obj.TYPE_MEM
   660  		p.From.Reg = v.Args[0].Reg()
   661  		p.To.Type = obj.TYPE_REG
   662  		p.To.Reg = v.Reg0()
   663  		p1 := s.Prog(loong64.ADBAR)
   664  		p1.From.Type = obj.TYPE_CONST
   665  		p1.From.Offset = 0x14
   666  
   667  	case ssa.OpLOONG64LoweredAtomicStore8,
   668  		ssa.OpLOONG64LoweredAtomicStore32,
   669  		ssa.OpLOONG64LoweredAtomicStore64:
   670  		// DBAR 0x12
   671  		// MOVx (Rarg1), Rout
   672  		// DBAR 0x18
   673  		movx := loong64.AMOVV
   674  		switch v.Op {
   675  		case ssa.OpLOONG64LoweredAtomicStore8:
   676  			movx = loong64.AMOVB
   677  		case ssa.OpLOONG64LoweredAtomicStore32:
   678  			movx = loong64.AMOVW
   679  		}
   680  		p := s.Prog(loong64.ADBAR)
   681  		p.From.Type = obj.TYPE_CONST
   682  		p.From.Offset = 0x12
   683  
   684  		p1 := s.Prog(movx)
   685  		p1.From.Type = obj.TYPE_REG
   686  		p1.From.Reg = v.Args[1].Reg()
   687  		p1.To.Type = obj.TYPE_MEM
   688  		p1.To.Reg = v.Args[0].Reg()
   689  
   690  		p2 := s.Prog(loong64.ADBAR)
   691  		p2.From.Type = obj.TYPE_CONST
   692  		p2.From.Offset = 0x18
   693  
   694  	case ssa.OpLOONG64LoweredAtomicStore8Variant,
   695  		ssa.OpLOONG64LoweredAtomicStore32Variant,
   696  		ssa.OpLOONG64LoweredAtomicStore64Variant:
   697  		//AMSWAPx  Rarg1, (Rarg0), Rout
   698  		amswapx := loong64.AAMSWAPDBV
   699  		switch v.Op {
   700  		case ssa.OpLOONG64LoweredAtomicStore32Variant:
   701  			amswapx = loong64.AAMSWAPDBW
   702  		case ssa.OpLOONG64LoweredAtomicStore8Variant:
   703  			amswapx = loong64.AAMSWAPDBB
   704  		}
   705  		p := s.Prog(amswapx)
   706  		p.From.Type = obj.TYPE_REG
   707  		p.From.Reg = v.Args[1].Reg()
   708  		p.To.Type = obj.TYPE_MEM
   709  		p.To.Reg = v.Args[0].Reg()
   710  		p.RegTo2 = loong64.REGZERO
   711  
   712  	case ssa.OpLOONG64LoweredAtomicExchange32, ssa.OpLOONG64LoweredAtomicExchange64:
   713  		// AMSWAPx	Rarg1, (Rarg0), Rout
   714  		amswapx := loong64.AAMSWAPDBV
   715  		if v.Op == ssa.OpLOONG64LoweredAtomicExchange32 {
   716  			amswapx = loong64.AAMSWAPDBW
   717  		}
   718  		p := s.Prog(amswapx)
   719  		p.From.Type = obj.TYPE_REG
   720  		p.From.Reg = v.Args[1].Reg()
   721  		p.To.Type = obj.TYPE_MEM
   722  		p.To.Reg = v.Args[0].Reg()
   723  		p.RegTo2 = v.Reg0()
   724  
   725  	case ssa.OpLOONG64LoweredAtomicAdd32, ssa.OpLOONG64LoweredAtomicAdd64:
   726  		// AMADDx  Rarg1, (Rarg0), Rout
   727  		// ADDV    Rarg1, Rout, Rout
   728  		amaddx := loong64.AAMADDDBV
   729  		addx := loong64.AADDV
   730  		if v.Op == ssa.OpLOONG64LoweredAtomicAdd32 {
   731  			amaddx = loong64.AAMADDDBW
   732  		}
   733  		p := s.Prog(amaddx)
   734  		p.From.Type = obj.TYPE_REG
   735  		p.From.Reg = v.Args[1].Reg()
   736  		p.To.Type = obj.TYPE_MEM
   737  		p.To.Reg = v.Args[0].Reg()
   738  		p.RegTo2 = v.Reg0()
   739  
   740  		p1 := s.Prog(addx)
   741  		p1.From.Type = obj.TYPE_REG
   742  		p1.From.Reg = v.Args[1].Reg()
   743  		p1.Reg = v.Reg0()
   744  		p1.To.Type = obj.TYPE_REG
   745  		p1.To.Reg = v.Reg0()
   746  
   747  	case ssa.OpLOONG64LoweredAtomicCas32, ssa.OpLOONG64LoweredAtomicCas64:
   748  		// MOVV $0, Rout
   749  		// DBAR
   750  		// LL	(Rarg0), Rtmp
   751  		// BNE	Rtmp, Rarg1, 4(PC)
   752  		// MOVV Rarg2, Rout
   753  		// SC	Rout, (Rarg0)
   754  		// BEQ	Rout, -4(PC)
   755  		// DBAR
   756  		ll := loong64.ALLV
   757  		sc := loong64.ASCV
   758  		if v.Op == ssa.OpLOONG64LoweredAtomicCas32 {
   759  			ll = loong64.ALL
   760  			sc = loong64.ASC
   761  		}
   762  		p := s.Prog(loong64.AMOVV)
   763  		p.From.Type = obj.TYPE_REG
   764  		p.From.Reg = loong64.REGZERO
   765  		p.To.Type = obj.TYPE_REG
   766  		p.To.Reg = v.Reg0()
   767  		s.Prog(loong64.ADBAR)
   768  		p1 := s.Prog(ll)
   769  		p1.From.Type = obj.TYPE_MEM
   770  		p1.From.Reg = v.Args[0].Reg()
   771  		p1.To.Type = obj.TYPE_REG
   772  		p1.To.Reg = loong64.REGTMP
   773  		p2 := s.Prog(loong64.ABNE)
   774  		p2.From.Type = obj.TYPE_REG
   775  		p2.From.Reg = v.Args[1].Reg()
   776  		p2.Reg = loong64.REGTMP
   777  		p2.To.Type = obj.TYPE_BRANCH
   778  		p3 := s.Prog(loong64.AMOVV)
   779  		p3.From.Type = obj.TYPE_REG
   780  		p3.From.Reg = v.Args[2].Reg()
   781  		p3.To.Type = obj.TYPE_REG
   782  		p3.To.Reg = v.Reg0()
   783  		p4 := s.Prog(sc)
   784  		p4.From.Type = obj.TYPE_REG
   785  		p4.From.Reg = v.Reg0()
   786  		p4.To.Type = obj.TYPE_MEM
   787  		p4.To.Reg = v.Args[0].Reg()
   788  		p5 := s.Prog(loong64.ABEQ)
   789  		p5.From.Type = obj.TYPE_REG
   790  		p5.From.Reg = v.Reg0()
   791  		p5.To.Type = obj.TYPE_BRANCH
   792  		p5.To.SetTarget(p1)
   793  		p6 := s.Prog(loong64.ADBAR)
   794  		p2.To.SetTarget(p6)
   795  
   796  	case ssa.OpLOONG64LoweredAtomicAnd32,
   797  		ssa.OpLOONG64LoweredAtomicOr32:
   798  		// AM{AND,OR}DBx  Rarg1, (Rarg0), RegZero
   799  		p := s.Prog(v.Op.Asm())
   800  		p.From.Type = obj.TYPE_REG
   801  		p.From.Reg = v.Args[1].Reg()
   802  		p.To.Type = obj.TYPE_MEM
   803  		p.To.Reg = v.Args[0].Reg()
   804  		p.RegTo2 = loong64.REGZERO
   805  
   806  	case ssa.OpLOONG64LoweredAtomicAnd32value,
   807  		ssa.OpLOONG64LoweredAtomicAnd64value,
   808  		ssa.OpLOONG64LoweredAtomicOr64value,
   809  		ssa.OpLOONG64LoweredAtomicOr32value:
   810  		// AM{AND,OR}DBx  Rarg1, (Rarg0), Rout
   811  		p := s.Prog(v.Op.Asm())
   812  		p.From.Type = obj.TYPE_REG
   813  		p.From.Reg = v.Args[1].Reg()
   814  		p.To.Type = obj.TYPE_MEM
   815  		p.To.Reg = v.Args[0].Reg()
   816  		p.RegTo2 = v.Reg0()
   817  
   818  	case ssa.OpLOONG64LoweredNilCheck:
   819  		// Issue a load which will fault if arg is nil.
   820  		p := s.Prog(loong64.AMOVB)
   821  		p.From.Type = obj.TYPE_MEM
   822  		p.From.Reg = v.Args[0].Reg()
   823  		ssagen.AddAux(&p.From, v)
   824  		p.To.Type = obj.TYPE_REG
   825  		p.To.Reg = loong64.REGTMP
   826  		if logopt.Enabled() {
   827  			logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
   828  		}
   829  		if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
   830  			base.WarnfAt(v.Pos, "generated nil check")
   831  		}
   832  	case ssa.OpLOONG64FPFlagTrue,
   833  		ssa.OpLOONG64FPFlagFalse:
   834  		// MOVV	$0, r
   835  		// BFPF	2(PC)
   836  		// MOVV	$1, r
   837  		branch := loong64.ABFPF
   838  		if v.Op == ssa.OpLOONG64FPFlagFalse {
   839  			branch = loong64.ABFPT
   840  		}
   841  		p := s.Prog(loong64.AMOVV)
   842  		p.From.Type = obj.TYPE_REG
   843  		p.From.Reg = loong64.REGZERO
   844  		p.To.Type = obj.TYPE_REG
   845  		p.To.Reg = v.Reg()
   846  		p2 := s.Prog(branch)
   847  		p2.To.Type = obj.TYPE_BRANCH
   848  		p3 := s.Prog(loong64.AMOVV)
   849  		p3.From.Type = obj.TYPE_CONST
   850  		p3.From.Offset = 1
   851  		p3.To.Type = obj.TYPE_REG
   852  		p3.To.Reg = v.Reg()
   853  		p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land
   854  		p2.To.SetTarget(p4)
   855  	case ssa.OpLOONG64LoweredGetClosurePtr:
   856  		// Closure pointer is R22 (loong64.REGCTXT).
   857  		ssagen.CheckLoweredGetClosurePtr(v)
   858  	case ssa.OpLOONG64LoweredGetCallerSP:
   859  		// caller's SP is FixedFrameSize below the address of the first arg
   860  		p := s.Prog(loong64.AMOVV)
   861  		p.From.Type = obj.TYPE_ADDR
   862  		p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
   863  		p.From.Name = obj.NAME_PARAM
   864  		p.To.Type = obj.TYPE_REG
   865  		p.To.Reg = v.Reg()
   866  	case ssa.OpLOONG64LoweredGetCallerPC:
   867  		p := s.Prog(obj.AGETCALLERPC)
   868  		p.To.Type = obj.TYPE_REG
   869  		p.To.Reg = v.Reg()
   870  	case ssa.OpLOONG64MASKEQZ, ssa.OpLOONG64MASKNEZ:
   871  		p := s.Prog(v.Op.Asm())
   872  		p.From.Type = obj.TYPE_REG
   873  		p.From.Reg = v.Args[1].Reg()
   874  		p.Reg = v.Args[0].Reg()
   875  		p.To.Type = obj.TYPE_REG
   876  		p.To.Reg = v.Reg()
   877  	case ssa.OpClobber, ssa.OpClobberReg:
   878  		// TODO: implement for clobberdead experiment. Nop is ok for now.
   879  	default:
   880  		v.Fatalf("genValue not implemented: %s", v.LongString())
   881  	}
   882  }
   883  
   884  var blockJump = map[ssa.BlockKind]struct {
   885  	asm, invasm obj.As
   886  }{
   887  	ssa.BlockLOONG64EQ:   {loong64.ABEQ, loong64.ABNE},
   888  	ssa.BlockLOONG64NE:   {loong64.ABNE, loong64.ABEQ},
   889  	ssa.BlockLOONG64LTZ:  {loong64.ABLTZ, loong64.ABGEZ},
   890  	ssa.BlockLOONG64GEZ:  {loong64.ABGEZ, loong64.ABLTZ},
   891  	ssa.BlockLOONG64LEZ:  {loong64.ABLEZ, loong64.ABGTZ},
   892  	ssa.BlockLOONG64GTZ:  {loong64.ABGTZ, loong64.ABLEZ},
   893  	ssa.BlockLOONG64FPT:  {loong64.ABFPT, loong64.ABFPF},
   894  	ssa.BlockLOONG64FPF:  {loong64.ABFPF, loong64.ABFPT},
   895  	ssa.BlockLOONG64BEQ:  {loong64.ABEQ, loong64.ABNE},
   896  	ssa.BlockLOONG64BNE:  {loong64.ABNE, loong64.ABEQ},
   897  	ssa.BlockLOONG64BGE:  {loong64.ABGE, loong64.ABLT},
   898  	ssa.BlockLOONG64BLT:  {loong64.ABLT, loong64.ABGE},
   899  	ssa.BlockLOONG64BLTU: {loong64.ABLTU, loong64.ABGEU},
   900  	ssa.BlockLOONG64BGEU: {loong64.ABGEU, loong64.ABLTU},
   901  }
   902  
   903  func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
   904  	switch b.Kind {
   905  	case ssa.BlockPlain:
   906  		if b.Succs[0].Block() != next {
   907  			p := s.Prog(obj.AJMP)
   908  			p.To.Type = obj.TYPE_BRANCH
   909  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   910  		}
   911  	case ssa.BlockDefer:
   912  		// defer returns in R19:
   913  		// 0 if we should continue executing
   914  		// 1 if we should jump to deferreturn call
   915  		p := s.Prog(loong64.ABNE)
   916  		p.From.Type = obj.TYPE_REG
   917  		p.From.Reg = loong64.REGZERO
   918  		p.Reg = loong64.REG_R19
   919  		p.To.Type = obj.TYPE_BRANCH
   920  		s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()})
   921  		if b.Succs[0].Block() != next {
   922  			p := s.Prog(obj.AJMP)
   923  			p.To.Type = obj.TYPE_BRANCH
   924  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   925  		}
   926  	case ssa.BlockExit, ssa.BlockRetJmp:
   927  	case ssa.BlockRet:
   928  		s.Prog(obj.ARET)
   929  	case ssa.BlockLOONG64EQ, ssa.BlockLOONG64NE,
   930  		ssa.BlockLOONG64LTZ, ssa.BlockLOONG64GEZ,
   931  		ssa.BlockLOONG64LEZ, ssa.BlockLOONG64GTZ,
   932  		ssa.BlockLOONG64BEQ, ssa.BlockLOONG64BNE,
   933  		ssa.BlockLOONG64BLT, ssa.BlockLOONG64BGE,
   934  		ssa.BlockLOONG64BLTU, ssa.BlockLOONG64BGEU,
   935  		ssa.BlockLOONG64FPT, ssa.BlockLOONG64FPF:
   936  		jmp := blockJump[b.Kind]
   937  		var p *obj.Prog
   938  		switch next {
   939  		case b.Succs[0].Block():
   940  			p = s.Br(jmp.invasm, b.Succs[1].Block())
   941  		case b.Succs[1].Block():
   942  			p = s.Br(jmp.asm, b.Succs[0].Block())
   943  		default:
   944  			if b.Likely != ssa.BranchUnlikely {
   945  				p = s.Br(jmp.asm, b.Succs[0].Block())
   946  				s.Br(obj.AJMP, b.Succs[1].Block())
   947  			} else {
   948  				p = s.Br(jmp.invasm, b.Succs[1].Block())
   949  				s.Br(obj.AJMP, b.Succs[0].Block())
   950  			}
   951  		}
   952  		switch b.Kind {
   953  		case ssa.BlockLOONG64BEQ, ssa.BlockLOONG64BNE,
   954  			ssa.BlockLOONG64BGE, ssa.BlockLOONG64BLT,
   955  			ssa.BlockLOONG64BGEU, ssa.BlockLOONG64BLTU:
   956  			p.From.Type = obj.TYPE_REG
   957  			p.From.Reg = b.Controls[0].Reg()
   958  			p.Reg = b.Controls[1].Reg()
   959  		case ssa.BlockLOONG64EQ, ssa.BlockLOONG64NE,
   960  			ssa.BlockLOONG64LTZ, ssa.BlockLOONG64GEZ,
   961  			ssa.BlockLOONG64LEZ, ssa.BlockLOONG64GTZ,
   962  			ssa.BlockLOONG64FPT, ssa.BlockLOONG64FPF:
   963  			if !b.Controls[0].Type.IsFlags() {
   964  				p.From.Type = obj.TYPE_REG
   965  				p.From.Reg = b.Controls[0].Reg()
   966  			}
   967  		}
   968  	default:
   969  		b.Fatalf("branch not implemented: %s", b.LongString())
   970  	}
   971  }
   972  
   973  func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
   974  	p := s.Prog(loadByType(t, reg))
   975  	p.From.Type = obj.TYPE_MEM
   976  	p.From.Name = obj.NAME_AUTO
   977  	p.From.Sym = n.Linksym()
   978  	p.From.Offset = n.FrameOffset() + off
   979  	p.To.Type = obj.TYPE_REG
   980  	p.To.Reg = reg
   981  	return p
   982  }
   983  
   984  func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
   985  	p = pp.Append(p, storeByType(t, reg), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
   986  	p.To.Name = obj.NAME_PARAM
   987  	p.To.Sym = n.Linksym()
   988  	p.Pos = p.Pos.WithNotStmt()
   989  	return p
   990  }
   991  

View as plain text