Source file src/cmd/compile/internal/ssa/sccp.go

     1  // Copyright 2023 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package ssa
     6  
     7  // ----------------------------------------------------------------------------
     8  // Sparse Conditional Constant Propagation
     9  //
    10  // Described in
    11  // Mark N. Wegman, F. Kenneth Zadeck: Constant Propagation with Conditional Branches.
    12  // TOPLAS 1991.
    13  //
    14  // This algorithm uses three level lattice for SSA value
    15  //
    16  //      Top        undefined
    17  //     / | \
    18  // .. 1  2  3 ..   constant
    19  //     \ | /
    20  //     Bottom      not constant
    21  //
    22  // It starts with optimistically assuming that all SSA values are initially Top
    23  // and then propagates constant facts only along reachable control flow paths.
    24  // Since some basic blocks are not visited yet, corresponding inputs of phi become
    25  // Top, we use the meet(phi) to compute its lattice.
    26  //
    27  // 	  Top ∩ any = any
    28  // 	  Bottom ∩ any = Bottom
    29  // 	  ConstantA ∩ ConstantA = ConstantA
    30  // 	  ConstantA ∩ ConstantB = Bottom
    31  //
    32  // Each lattice value is lowered most twice(Top to Constant, Constant to Bottom)
    33  // due to lattice depth, resulting in a fast convergence speed of the algorithm.
    34  // In this way, sccp can discover optimization opportunities that cannot be found
    35  // by just combining constant folding and constant propagation and dead code
    36  // elimination separately.
    37  
    38  // Three level lattice holds compile time knowledge about SSA value
    39  const (
    40  	top      int8 = iota // undefined
    41  	constant             // constant
    42  	bottom               // not a constant
    43  )
    44  
    45  type lattice struct {
    46  	tag int8   // lattice type
    47  	val *Value // constant value
    48  }
    49  
    50  type worklist struct {
    51  	f            *Func               // the target function to be optimized out
    52  	edges        []Edge              // propagate constant facts through edges
    53  	uses         []*Value            // re-visiting set
    54  	visited      map[Edge]bool       // visited edges
    55  	latticeCells map[*Value]lattice  // constant lattices
    56  	defUse       map[*Value][]*Value // def-use chains for some values
    57  	defBlock     map[*Value][]*Block // use blocks of def
    58  	visitedBlock []bool              // visited block
    59  }
    60  
    61  // sccp stands for sparse conditional constant propagation, it propagates constants
    62  // through CFG conditionally and applies constant folding, constant replacement and
    63  // dead code elimination all together.
    64  func sccp(f *Func) {
    65  	var t worklist
    66  	t.f = f
    67  	t.edges = make([]Edge, 0)
    68  	t.visited = make(map[Edge]bool)
    69  	t.edges = append(t.edges, Edge{f.Entry, 0})
    70  	t.defUse = make(map[*Value][]*Value)
    71  	t.defBlock = make(map[*Value][]*Block)
    72  	t.latticeCells = make(map[*Value]lattice)
    73  	t.visitedBlock = f.Cache.allocBoolSlice(f.NumBlocks())
    74  	defer f.Cache.freeBoolSlice(t.visitedBlock)
    75  
    76  	// build it early since we rely heavily on the def-use chain later
    77  	t.buildDefUses()
    78  
    79  	// pick up either an edge or SSA value from worklist, process it
    80  	for {
    81  		if len(t.edges) > 0 {
    82  			edge := t.edges[0]
    83  			t.edges = t.edges[1:]
    84  			if _, exist := t.visited[edge]; !exist {
    85  				dest := edge.b
    86  				destVisited := t.visitedBlock[dest.ID]
    87  
    88  				// mark edge as visited
    89  				t.visited[edge] = true
    90  				t.visitedBlock[dest.ID] = true
    91  				for _, val := range dest.Values {
    92  					if val.Op == OpPhi || !destVisited {
    93  						t.visitValue(val)
    94  					}
    95  				}
    96  				// propagates constants facts through CFG, taking condition test
    97  				// into account
    98  				if !destVisited {
    99  					t.propagate(dest)
   100  				}
   101  			}
   102  			continue
   103  		}
   104  		if len(t.uses) > 0 {
   105  			use := t.uses[0]
   106  			t.uses = t.uses[1:]
   107  			t.visitValue(use)
   108  			continue
   109  		}
   110  		break
   111  	}
   112  
   113  	// apply optimizations based on discovered constants
   114  	constCnt, rewireCnt := t.replaceConst()
   115  	if f.pass.debug > 0 {
   116  		if constCnt > 0 || rewireCnt > 0 {
   117  			f.Warnl(f.Entry.Pos, "Phase SCCP for %v : %v constants, %v dce", f.Name, constCnt, rewireCnt)
   118  		}
   119  	}
   120  }
   121  
   122  func equals(a, b lattice) bool {
   123  	if a == b {
   124  		// fast path
   125  		return true
   126  	}
   127  	if a.tag != b.tag {
   128  		return false
   129  	}
   130  	if a.tag == constant {
   131  		// The same content of const value may be different, we should
   132  		// compare with auxInt instead
   133  		v1 := a.val
   134  		v2 := b.val
   135  		if v1.Op == v2.Op && v1.AuxInt == v2.AuxInt {
   136  			return true
   137  		} else {
   138  			return false
   139  		}
   140  	}
   141  	return true
   142  }
   143  
   144  // possibleConst checks if Value can be folded to const. For those Values that can
   145  // never become constants(e.g. StaticCall), we don't make futile efforts.
   146  func possibleConst(val *Value) bool {
   147  	if isConst(val) {
   148  		return true
   149  	}
   150  	switch val.Op {
   151  	case OpCopy:
   152  		return true
   153  	case OpPhi:
   154  		return true
   155  	case
   156  		// negate
   157  		OpNeg8, OpNeg16, OpNeg32, OpNeg64, OpNeg32F, OpNeg64F,
   158  		OpCom8, OpCom16, OpCom32, OpCom64,
   159  		// math
   160  		OpFloor, OpCeil, OpTrunc, OpRoundToEven, OpSqrt,
   161  		// conversion
   162  		OpTrunc16to8, OpTrunc32to8, OpTrunc32to16, OpTrunc64to8,
   163  		OpTrunc64to16, OpTrunc64to32, OpCvt32to32F, OpCvt32to64F,
   164  		OpCvt64to32F, OpCvt64to64F, OpCvt32Fto32, OpCvt32Fto64,
   165  		OpCvt64Fto32, OpCvt64Fto64, OpCvt32Fto64F, OpCvt64Fto32F,
   166  		OpCvtBoolToUint8,
   167  		OpZeroExt8to16, OpZeroExt8to32, OpZeroExt8to64, OpZeroExt16to32,
   168  		OpZeroExt16to64, OpZeroExt32to64, OpSignExt8to16, OpSignExt8to32,
   169  		OpSignExt8to64, OpSignExt16to32, OpSignExt16to64, OpSignExt32to64,
   170  		// bit
   171  		OpCtz8, OpCtz16, OpCtz32, OpCtz64,
   172  		// mask
   173  		OpSlicemask,
   174  		// safety check
   175  		OpIsNonNil,
   176  		// not
   177  		OpNot:
   178  		return true
   179  	case
   180  		// add
   181  		OpAdd64, OpAdd32, OpAdd16, OpAdd8,
   182  		OpAdd32F, OpAdd64F,
   183  		// sub
   184  		OpSub64, OpSub32, OpSub16, OpSub8,
   185  		OpSub32F, OpSub64F,
   186  		// mul
   187  		OpMul64, OpMul32, OpMul16, OpMul8,
   188  		OpMul32F, OpMul64F,
   189  		// div
   190  		OpDiv32F, OpDiv64F,
   191  		OpDiv8, OpDiv16, OpDiv32, OpDiv64,
   192  		OpDiv8u, OpDiv16u, OpDiv32u, OpDiv64u,
   193  		OpMod8, OpMod16, OpMod32, OpMod64,
   194  		OpMod8u, OpMod16u, OpMod32u, OpMod64u,
   195  		// compare
   196  		OpEq64, OpEq32, OpEq16, OpEq8,
   197  		OpEq32F, OpEq64F,
   198  		OpLess64, OpLess32, OpLess16, OpLess8,
   199  		OpLess64U, OpLess32U, OpLess16U, OpLess8U,
   200  		OpLess32F, OpLess64F,
   201  		OpLeq64, OpLeq32, OpLeq16, OpLeq8,
   202  		OpLeq64U, OpLeq32U, OpLeq16U, OpLeq8U,
   203  		OpLeq32F, OpLeq64F,
   204  		OpEqB, OpNeqB,
   205  		// shift
   206  		OpLsh64x64, OpRsh64x64, OpRsh64Ux64, OpLsh32x64,
   207  		OpRsh32x64, OpRsh32Ux64, OpLsh16x64, OpRsh16x64,
   208  		OpRsh16Ux64, OpLsh8x64, OpRsh8x64, OpRsh8Ux64,
   209  		// safety check
   210  		OpIsInBounds, OpIsSliceInBounds,
   211  		// bit
   212  		OpAnd8, OpAnd16, OpAnd32, OpAnd64,
   213  		OpOr8, OpOr16, OpOr32, OpOr64,
   214  		OpXor8, OpXor16, OpXor32, OpXor64:
   215  		return true
   216  	default:
   217  		return false
   218  	}
   219  }
   220  
   221  func (t *worklist) getLatticeCell(val *Value) lattice {
   222  	if !possibleConst(val) {
   223  		// they are always worst
   224  		return lattice{bottom, nil}
   225  	}
   226  	lt, exist := t.latticeCells[val]
   227  	if !exist {
   228  		return lattice{top, nil} // optimistically for un-visited value
   229  	}
   230  	return lt
   231  }
   232  
   233  func isConst(val *Value) bool {
   234  	switch val.Op {
   235  	case OpConst64, OpConst32, OpConst16, OpConst8,
   236  		OpConstBool, OpConst32F, OpConst64F:
   237  		return true
   238  	default:
   239  		return false
   240  	}
   241  }
   242  
   243  // buildDefUses builds def-use chain for some values early, because once the
   244  // lattice of a value is changed, we need to update lattices of use. But we don't
   245  // need all uses of it, only uses that can become constants would be added into
   246  // re-visit worklist since no matter how many times they are revisited, uses which
   247  // can't become constants lattice remains unchanged, i.e. Bottom.
   248  func (t *worklist) buildDefUses() {
   249  	for _, block := range t.f.Blocks {
   250  		for _, val := range block.Values {
   251  			for _, arg := range val.Args {
   252  				// find its uses, only uses that can become constants take into account
   253  				if possibleConst(arg) && possibleConst(val) {
   254  					if _, exist := t.defUse[arg]; !exist {
   255  						t.defUse[arg] = make([]*Value, 0, arg.Uses)
   256  					}
   257  					t.defUse[arg] = append(t.defUse[arg], val)
   258  				}
   259  			}
   260  		}
   261  		for _, ctl := range block.ControlValues() {
   262  			// for control values that can become constants, find their use blocks
   263  			if possibleConst(ctl) {
   264  				t.defBlock[ctl] = append(t.defBlock[ctl], block)
   265  			}
   266  		}
   267  	}
   268  }
   269  
   270  // addUses finds all uses of value and appends them into work list for further process
   271  func (t *worklist) addUses(val *Value) {
   272  	for _, use := range t.defUse[val] {
   273  		if val == use {
   274  			// Phi may refer to itself as uses, ignore them to avoid re-visiting phi
   275  			// for performance reason
   276  			continue
   277  		}
   278  		t.uses = append(t.uses, use)
   279  	}
   280  	for _, block := range t.defBlock[val] {
   281  		if t.visitedBlock[block.ID] {
   282  			t.propagate(block)
   283  		}
   284  	}
   285  }
   286  
   287  // meet meets all of phi arguments and computes result lattice
   288  func (t *worklist) meet(val *Value) lattice {
   289  	optimisticLt := lattice{top, nil}
   290  	for i := 0; i < len(val.Args); i++ {
   291  		edge := Edge{val.Block, i}
   292  		// If incoming edge for phi is not visited, assume top optimistically.
   293  		// According to rules of meet:
   294  		// 		Top ∩ any = any
   295  		// Top participates in meet() but does not affect the result, so here
   296  		// we will ignore Top and only take other lattices into consideration.
   297  		if _, exist := t.visited[edge]; exist {
   298  			lt := t.getLatticeCell(val.Args[i])
   299  			if lt.tag == constant {
   300  				if optimisticLt.tag == top {
   301  					optimisticLt = lt
   302  				} else {
   303  					if !equals(optimisticLt, lt) {
   304  						// ConstantA ∩ ConstantB = Bottom
   305  						return lattice{bottom, nil}
   306  					}
   307  				}
   308  			} else if lt.tag == bottom {
   309  				// Bottom ∩ any = Bottom
   310  				return lattice{bottom, nil}
   311  			} else {
   312  				// Top ∩ any = any
   313  			}
   314  		} else {
   315  			// Top ∩ any = any
   316  		}
   317  	}
   318  
   319  	// ConstantA ∩ ConstantA = ConstantA or Top ∩ any = any
   320  	return optimisticLt
   321  }
   322  
   323  func computeLattice(f *Func, val *Value, args ...*Value) lattice {
   324  	// In general, we need to perform constant evaluation based on constant args:
   325  	//
   326  	//  res := lattice{constant, nil}
   327  	// 	switch op {
   328  	// 	case OpAdd16:
   329  	//		res.val = newConst(argLt1.val.AuxInt16() + argLt2.val.AuxInt16())
   330  	// 	case OpAdd32:
   331  	// 		res.val = newConst(argLt1.val.AuxInt32() + argLt2.val.AuxInt32())
   332  	//	case OpDiv8:
   333  	//		if !isDivideByZero(argLt2.val.AuxInt8()) {
   334  	//			res.val = newConst(argLt1.val.AuxInt8() / argLt2.val.AuxInt8())
   335  	//		}
   336  	//  ...
   337  	// 	}
   338  	//
   339  	// However, this would create a huge switch for all opcodes that can be
   340  	// evaluated during compile time. Moreover, some operations can be evaluated
   341  	// only if its arguments satisfy additional conditions(e.g. divide by zero).
   342  	// It's fragile and error-prone. We did a trick by reusing the existing rules
   343  	// in generic rules for compile-time evaluation. But generic rules rewrite
   344  	// original value, this behavior is undesired, because the lattice of values
   345  	// may change multiple times, once it was rewritten, we lose the opportunity
   346  	// to change it permanently, which can lead to errors. For example, We cannot
   347  	// change its value immediately after visiting Phi, because some of its input
   348  	// edges may still not be visited at this moment.
   349  	constValue := f.newValue(val.Op, val.Type, f.Entry, val.Pos)
   350  	constValue.AddArgs(args...)
   351  	matched := rewriteValuegeneric(constValue)
   352  	if matched {
   353  		if isConst(constValue) {
   354  			return lattice{constant, constValue}
   355  		}
   356  	}
   357  	// Either we can not match generic rules for given value or it does not
   358  	// satisfy additional constraints(e.g. divide by zero), in these cases, clean
   359  	// up temporary value immediately in case they are not dominated by their args.
   360  	constValue.reset(OpInvalid)
   361  	return lattice{bottom, nil}
   362  }
   363  
   364  func (t *worklist) visitValue(val *Value) {
   365  	if !possibleConst(val) {
   366  		// fast fail for always worst Values, i.e. there is no lowering happen
   367  		// on them, their lattices must be initially worse Bottom.
   368  		return
   369  	}
   370  
   371  	oldLt := t.getLatticeCell(val)
   372  	defer func() {
   373  		// re-visit all uses of value if its lattice is changed
   374  		newLt := t.getLatticeCell(val)
   375  		if !equals(newLt, oldLt) {
   376  			if oldLt.tag > newLt.tag {
   377  				t.f.Fatalf("Must lower lattice\n")
   378  			}
   379  			t.addUses(val)
   380  		}
   381  	}()
   382  
   383  	switch val.Op {
   384  	// they are constant values, aren't they?
   385  	case OpConst64, OpConst32, OpConst16, OpConst8,
   386  		OpConstBool, OpConst32F, OpConst64F: //TODO: support ConstNil ConstString etc
   387  		t.latticeCells[val] = lattice{constant, val}
   388  	// lattice value of copy(x) actually means lattice value of (x)
   389  	case OpCopy:
   390  		t.latticeCells[val] = t.getLatticeCell(val.Args[0])
   391  	// phi should be processed specially
   392  	case OpPhi:
   393  		t.latticeCells[val] = t.meet(val)
   394  	// fold 1-input operations:
   395  	case
   396  		// negate
   397  		OpNeg8, OpNeg16, OpNeg32, OpNeg64, OpNeg32F, OpNeg64F,
   398  		OpCom8, OpCom16, OpCom32, OpCom64,
   399  		// math
   400  		OpFloor, OpCeil, OpTrunc, OpRoundToEven, OpSqrt,
   401  		// conversion
   402  		OpTrunc16to8, OpTrunc32to8, OpTrunc32to16, OpTrunc64to8,
   403  		OpTrunc64to16, OpTrunc64to32, OpCvt32to32F, OpCvt32to64F,
   404  		OpCvt64to32F, OpCvt64to64F, OpCvt32Fto32, OpCvt32Fto64,
   405  		OpCvt64Fto32, OpCvt64Fto64, OpCvt32Fto64F, OpCvt64Fto32F,
   406  		OpCvtBoolToUint8,
   407  		OpZeroExt8to16, OpZeroExt8to32, OpZeroExt8to64, OpZeroExt16to32,
   408  		OpZeroExt16to64, OpZeroExt32to64, OpSignExt8to16, OpSignExt8to32,
   409  		OpSignExt8to64, OpSignExt16to32, OpSignExt16to64, OpSignExt32to64,
   410  		// bit
   411  		OpCtz8, OpCtz16, OpCtz32, OpCtz64,
   412  		// mask
   413  		OpSlicemask,
   414  		// safety check
   415  		OpIsNonNil,
   416  		// not
   417  		OpNot:
   418  		lt1 := t.getLatticeCell(val.Args[0])
   419  
   420  		if lt1.tag == constant {
   421  			// here we take a shortcut by reusing generic rules to fold constants
   422  			t.latticeCells[val] = computeLattice(t.f, val, lt1.val)
   423  		} else {
   424  			t.latticeCells[val] = lattice{lt1.tag, nil}
   425  		}
   426  	// fold 2-input operations
   427  	case
   428  		// add
   429  		OpAdd64, OpAdd32, OpAdd16, OpAdd8,
   430  		OpAdd32F, OpAdd64F,
   431  		// sub
   432  		OpSub64, OpSub32, OpSub16, OpSub8,
   433  		OpSub32F, OpSub64F,
   434  		// mul
   435  		OpMul64, OpMul32, OpMul16, OpMul8,
   436  		OpMul32F, OpMul64F,
   437  		// div
   438  		OpDiv32F, OpDiv64F,
   439  		OpDiv8, OpDiv16, OpDiv32, OpDiv64,
   440  		OpDiv8u, OpDiv16u, OpDiv32u, OpDiv64u, //TODO: support div128u
   441  		// mod
   442  		OpMod8, OpMod16, OpMod32, OpMod64,
   443  		OpMod8u, OpMod16u, OpMod32u, OpMod64u,
   444  		// compare
   445  		OpEq64, OpEq32, OpEq16, OpEq8,
   446  		OpEq32F, OpEq64F,
   447  		OpLess64, OpLess32, OpLess16, OpLess8,
   448  		OpLess64U, OpLess32U, OpLess16U, OpLess8U,
   449  		OpLess32F, OpLess64F,
   450  		OpLeq64, OpLeq32, OpLeq16, OpLeq8,
   451  		OpLeq64U, OpLeq32U, OpLeq16U, OpLeq8U,
   452  		OpLeq32F, OpLeq64F,
   453  		OpEqB, OpNeqB,
   454  		// shift
   455  		OpLsh64x64, OpRsh64x64, OpRsh64Ux64, OpLsh32x64,
   456  		OpRsh32x64, OpRsh32Ux64, OpLsh16x64, OpRsh16x64,
   457  		OpRsh16Ux64, OpLsh8x64, OpRsh8x64, OpRsh8Ux64,
   458  		// safety check
   459  		OpIsInBounds, OpIsSliceInBounds,
   460  		// bit
   461  		OpAnd8, OpAnd16, OpAnd32, OpAnd64,
   462  		OpOr8, OpOr16, OpOr32, OpOr64,
   463  		OpXor8, OpXor16, OpXor32, OpXor64:
   464  		lt1 := t.getLatticeCell(val.Args[0])
   465  		lt2 := t.getLatticeCell(val.Args[1])
   466  
   467  		if lt1.tag == constant && lt2.tag == constant {
   468  			// here we take a shortcut by reusing generic rules to fold constants
   469  			t.latticeCells[val] = computeLattice(t.f, val, lt1.val, lt2.val)
   470  		} else {
   471  			if lt1.tag == bottom || lt2.tag == bottom {
   472  				t.latticeCells[val] = lattice{bottom, nil}
   473  			} else {
   474  				t.latticeCells[val] = lattice{top, nil}
   475  			}
   476  		}
   477  	default:
   478  		// Any other type of value cannot be a constant, they are always worst(Bottom)
   479  	}
   480  }
   481  
   482  // propagate propagates constants facts through CFG. If the block has single successor,
   483  // add the successor anyway. If the block has multiple successors, only add the
   484  // branch destination corresponding to lattice value of condition value.
   485  func (t *worklist) propagate(block *Block) {
   486  	switch block.Kind {
   487  	case BlockExit, BlockRet, BlockRetJmp, BlockInvalid:
   488  		// control flow ends, do nothing then
   489  		break
   490  	case BlockDefer:
   491  		// we know nothing about control flow, add all branch destinations
   492  		t.edges = append(t.edges, block.Succs...)
   493  	case BlockFirst:
   494  		fallthrough // always takes the first branch
   495  	case BlockPlain:
   496  		t.edges = append(t.edges, block.Succs[0])
   497  	case BlockIf, BlockJumpTable:
   498  		cond := block.ControlValues()[0]
   499  		condLattice := t.getLatticeCell(cond)
   500  		if condLattice.tag == bottom {
   501  			// we know nothing about control flow, add all branch destinations
   502  			t.edges = append(t.edges, block.Succs...)
   503  		} else if condLattice.tag == constant {
   504  			// add branchIdx destinations depends on its condition
   505  			var branchIdx int64
   506  			if block.Kind == BlockIf {
   507  				branchIdx = 1 - condLattice.val.AuxInt
   508  			} else {
   509  				branchIdx = condLattice.val.AuxInt
   510  				if branchIdx < 0 || branchIdx >= int64(len(block.Succs)) {
   511  					// unreachable code, do nothing then
   512  					break
   513  				}
   514  			}
   515  			t.edges = append(t.edges, block.Succs[branchIdx])
   516  		} else {
   517  			// condition value is not visited yet, don't propagate it now
   518  		}
   519  	default:
   520  		t.f.Fatalf("All kind of block should be processed above.")
   521  	}
   522  }
   523  
   524  // rewireSuccessor rewires corresponding successors according to constant value
   525  // discovered by previous analysis. As the result, some successors become unreachable
   526  // and thus can be removed in further deadcode phase
   527  func rewireSuccessor(block *Block, constVal *Value) bool {
   528  	switch block.Kind {
   529  	case BlockIf:
   530  		block.removeEdge(int(constVal.AuxInt))
   531  		block.Kind = BlockPlain
   532  		block.Likely = BranchUnknown
   533  		block.ResetControls()
   534  		return true
   535  	case BlockJumpTable:
   536  		// Remove everything but the known taken branch.
   537  		idx := int(constVal.AuxInt)
   538  		if idx < 0 || idx >= len(block.Succs) {
   539  			// This can only happen in unreachable code,
   540  			// as an invariant of jump tables is that their
   541  			// input index is in range.
   542  			// See issue 64826.
   543  			return false
   544  		}
   545  		block.swapSuccessorsByIdx(0, idx)
   546  		for len(block.Succs) > 1 {
   547  			block.removeEdge(1)
   548  		}
   549  		block.Kind = BlockPlain
   550  		block.Likely = BranchUnknown
   551  		block.ResetControls()
   552  		return true
   553  	default:
   554  		return false
   555  	}
   556  }
   557  
   558  // replaceConst will replace non-constant values that have been proven by sccp
   559  // to be constants.
   560  func (t *worklist) replaceConst() (int, int) {
   561  	constCnt, rewireCnt := 0, 0
   562  	for val, lt := range t.latticeCells {
   563  		if lt.tag == constant {
   564  			if !isConst(val) {
   565  				if t.f.pass.debug > 0 {
   566  					t.f.Warnl(val.Pos, "Replace %v with %v", val.LongString(), lt.val.LongString())
   567  				}
   568  				val.reset(lt.val.Op)
   569  				val.AuxInt = lt.val.AuxInt
   570  				constCnt++
   571  			}
   572  			// If const value controls this block, rewires successors according to its value
   573  			ctrlBlock := t.defBlock[val]
   574  			for _, block := range ctrlBlock {
   575  				if rewireSuccessor(block, lt.val) {
   576  					rewireCnt++
   577  					if t.f.pass.debug > 0 {
   578  						t.f.Warnl(block.Pos, "Rewire %v %v successors", block.Kind, block)
   579  					}
   580  				}
   581  			}
   582  		}
   583  	}
   584  	return constCnt, rewireCnt
   585  }
   586  

View as plain text