Source file src/cmd/compile/internal/inline/inl.go

     1  // Copyright 2011 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  //
     5  // The inlining facility makes 2 passes: first CanInline determines which
     6  // functions are suitable for inlining, and for those that are it
     7  // saves a copy of the body. Then InlineCalls walks each function body to
     8  // expand calls to inlinable functions.
     9  //
    10  // The Debug.l flag controls the aggressiveness. Note that main() swaps level 0 and 1,
    11  // making 1 the default and -l disable. Additional levels (beyond -l) may be buggy and
    12  // are not supported.
    13  //      0: disabled
    14  //      1: 80-nodes leaf functions, oneliners, panic, lazy typechecking (default)
    15  //      2: (unassigned)
    16  //      3: (unassigned)
    17  //      4: allow non-leaf functions
    18  //
    19  // At some point this may get another default and become switch-offable with -N.
    20  //
    21  // The -d typcheckinl flag enables early typechecking of all imported bodies,
    22  // which is useful to flush out bugs.
    23  //
    24  // The Debug.m flag enables diagnostic output.  a single -m is useful for verifying
    25  // which calls get inlined or not, more is for debugging, and may go away at any point.
    26  
    27  package inline
    28  
    29  import (
    30  	"fmt"
    31  	"go/constant"
    32  	"internal/buildcfg"
    33  	"strconv"
    34  
    35  	"cmd/compile/internal/base"
    36  	"cmd/compile/internal/inline/inlheur"
    37  	"cmd/compile/internal/ir"
    38  	"cmd/compile/internal/logopt"
    39  	"cmd/compile/internal/pgoir"
    40  	"cmd/compile/internal/typecheck"
    41  	"cmd/compile/internal/types"
    42  	"cmd/internal/obj"
    43  	"cmd/internal/pgo"
    44  )
    45  
    46  // Inlining budget parameters, gathered in one place
    47  const (
    48  	inlineMaxBudget       = 80
    49  	inlineExtraAppendCost = 0
    50  	// default is to inline if there's at most one call. -l=4 overrides this by using 1 instead.
    51  	inlineExtraCallCost  = 57              // 57 was benchmarked to provided most benefit with no bad surprises; see https://github.com/golang/go/issues/19348#issuecomment-439370742
    52  	inlineExtraPanicCost = 1               // do not penalize inlining panics.
    53  	inlineExtraThrowCost = inlineMaxBudget // with current (2018-05/1.11) code, inlining runtime.throw does not help.
    54  
    55  	inlineBigFunctionNodes   = 5000 // Functions with this many nodes are considered "big".
    56  	inlineBigFunctionMaxCost = 20   // Max cost of inlinee when inlining into a "big" function.
    57  )
    58  
    59  var (
    60  	// List of all hot callee nodes.
    61  	// TODO(prattmic): Make this non-global.
    62  	candHotCalleeMap = make(map[*pgoir.IRNode]struct{})
    63  
    64  	// Set of functions that contain hot call sites.
    65  	hasHotCall = make(map[*ir.Func]struct{})
    66  
    67  	// List of all hot call sites. CallSiteInfo.Callee is always nil.
    68  	// TODO(prattmic): Make this non-global.
    69  	candHotEdgeMap = make(map[pgoir.CallSiteInfo]struct{})
    70  
    71  	// Threshold in percentage for hot callsite inlining.
    72  	inlineHotCallSiteThresholdPercent float64
    73  
    74  	// Threshold in CDF percentage for hot callsite inlining,
    75  	// that is, for a threshold of X the hottest callsites that
    76  	// make up the top X% of total edge weight will be
    77  	// considered hot for inlining candidates.
    78  	inlineCDFHotCallSiteThresholdPercent = float64(99)
    79  
    80  	// Budget increased due to hotness.
    81  	inlineHotMaxBudget int32 = 2000
    82  )
    83  
    84  func IsPgoHotFunc(fn *ir.Func, profile *pgoir.Profile) bool {
    85  	if profile == nil {
    86  		return false
    87  	}
    88  	if n, ok := profile.WeightedCG.IRNodes[ir.LinkFuncName(fn)]; ok {
    89  		_, ok := candHotCalleeMap[n]
    90  		return ok
    91  	}
    92  	return false
    93  }
    94  
    95  func HasPgoHotInline(fn *ir.Func) bool {
    96  	_, has := hasHotCall[fn]
    97  	return has
    98  }
    99  
   100  // PGOInlinePrologue records the hot callsites from ir-graph.
   101  func PGOInlinePrologue(p *pgoir.Profile) {
   102  	if base.Debug.PGOInlineCDFThreshold != "" {
   103  		if s, err := strconv.ParseFloat(base.Debug.PGOInlineCDFThreshold, 64); err == nil && s >= 0 && s <= 100 {
   104  			inlineCDFHotCallSiteThresholdPercent = s
   105  		} else {
   106  			base.Fatalf("invalid PGOInlineCDFThreshold, must be between 0 and 100")
   107  		}
   108  	}
   109  	var hotCallsites []pgo.NamedCallEdge
   110  	inlineHotCallSiteThresholdPercent, hotCallsites = hotNodesFromCDF(p)
   111  	if base.Debug.PGODebug > 0 {
   112  		fmt.Printf("hot-callsite-thres-from-CDF=%v\n", inlineHotCallSiteThresholdPercent)
   113  	}
   114  
   115  	if x := base.Debug.PGOInlineBudget; x != 0 {
   116  		inlineHotMaxBudget = int32(x)
   117  	}
   118  
   119  	for _, n := range hotCallsites {
   120  		// mark inlineable callees from hot edges
   121  		if callee := p.WeightedCG.IRNodes[n.CalleeName]; callee != nil {
   122  			candHotCalleeMap[callee] = struct{}{}
   123  		}
   124  		// mark hot call sites
   125  		if caller := p.WeightedCG.IRNodes[n.CallerName]; caller != nil && caller.AST != nil {
   126  			csi := pgoir.CallSiteInfo{LineOffset: n.CallSiteOffset, Caller: caller.AST}
   127  			candHotEdgeMap[csi] = struct{}{}
   128  		}
   129  	}
   130  
   131  	if base.Debug.PGODebug >= 3 {
   132  		fmt.Printf("hot-cg before inline in dot format:")
   133  		p.PrintWeightedCallGraphDOT(inlineHotCallSiteThresholdPercent)
   134  	}
   135  }
   136  
   137  // hotNodesFromCDF computes an edge weight threshold and the list of hot
   138  // nodes that make up the given percentage of the CDF. The threshold, as
   139  // a percent, is the lower bound of weight for nodes to be considered hot
   140  // (currently only used in debug prints) (in case of equal weights,
   141  // comparing with the threshold may not accurately reflect which nodes are
   142  // considered hot).
   143  func hotNodesFromCDF(p *pgoir.Profile) (float64, []pgo.NamedCallEdge) {
   144  	cum := int64(0)
   145  	for i, n := range p.NamedEdgeMap.ByWeight {
   146  		w := p.NamedEdgeMap.Weight[n]
   147  		cum += w
   148  		if pgo.WeightInPercentage(cum, p.TotalWeight) > inlineCDFHotCallSiteThresholdPercent {
   149  			// nodes[:i+1] to include the very last node that makes it to go over the threshold.
   150  			// (Say, if the CDF threshold is 50% and one hot node takes 60% of weight, we want to
   151  			// include that node instead of excluding it.)
   152  			return pgo.WeightInPercentage(w, p.TotalWeight), p.NamedEdgeMap.ByWeight[:i+1]
   153  		}
   154  	}
   155  	return 0, p.NamedEdgeMap.ByWeight
   156  }
   157  
   158  // CanInlineFuncs computes whether a batch of functions are inlinable.
   159  func CanInlineFuncs(funcs []*ir.Func, profile *pgoir.Profile) {
   160  	if profile != nil {
   161  		PGOInlinePrologue(profile)
   162  	}
   163  
   164  	if base.Flag.LowerL == 0 {
   165  		return
   166  	}
   167  
   168  	ir.VisitFuncsBottomUp(funcs, func(funcs []*ir.Func, recursive bool) {
   169  		numfns := numNonClosures(funcs)
   170  
   171  		for _, fn := range funcs {
   172  			if !recursive || numfns > 1 {
   173  				// We allow inlining if there is no
   174  				// recursion, or the recursion cycle is
   175  				// across more than one function.
   176  				CanInline(fn, profile)
   177  			} else {
   178  				if base.Flag.LowerM > 1 && fn.OClosure == nil {
   179  					fmt.Printf("%v: cannot inline %v: recursive\n", ir.Line(fn), fn.Nname)
   180  				}
   181  			}
   182  			if inlheur.Enabled() {
   183  				analyzeFuncProps(fn, profile)
   184  			}
   185  		}
   186  	})
   187  }
   188  
   189  // GarbageCollectUnreferencedHiddenClosures makes a pass over all the
   190  // top-level (non-hidden-closure) functions looking for nested closure
   191  // functions that are reachable, then sweeps through the Target.Decls
   192  // list and marks any non-reachable hidden closure function as dead.
   193  // See issues #59404 and #59638 for more context.
   194  func GarbageCollectUnreferencedHiddenClosures() {
   195  
   196  	liveFuncs := make(map[*ir.Func]bool)
   197  
   198  	var markLiveFuncs func(fn *ir.Func)
   199  	markLiveFuncs = func(fn *ir.Func) {
   200  		if liveFuncs[fn] {
   201  			return
   202  		}
   203  		liveFuncs[fn] = true
   204  		ir.Visit(fn, func(n ir.Node) {
   205  			if clo, ok := n.(*ir.ClosureExpr); ok {
   206  				markLiveFuncs(clo.Func)
   207  			}
   208  		})
   209  	}
   210  
   211  	for i := 0; i < len(typecheck.Target.Funcs); i++ {
   212  		fn := typecheck.Target.Funcs[i]
   213  		if fn.IsHiddenClosure() {
   214  			continue
   215  		}
   216  		markLiveFuncs(fn)
   217  	}
   218  
   219  	for i := 0; i < len(typecheck.Target.Funcs); i++ {
   220  		fn := typecheck.Target.Funcs[i]
   221  		if !fn.IsHiddenClosure() {
   222  			continue
   223  		}
   224  		if fn.IsDeadcodeClosure() {
   225  			continue
   226  		}
   227  		if liveFuncs[fn] {
   228  			continue
   229  		}
   230  		fn.SetIsDeadcodeClosure(true)
   231  		if base.Flag.LowerM > 2 {
   232  			fmt.Printf("%v: unreferenced closure %v marked as dead\n", ir.Line(fn), fn)
   233  		}
   234  		if fn.Inl != nil && fn.LSym == nil {
   235  			ir.InitLSym(fn, true)
   236  		}
   237  	}
   238  }
   239  
   240  // inlineBudget determines the max budget for function 'fn' prior to
   241  // analyzing the hairiness of the body of 'fn'. We pass in the pgo
   242  // profile if available (which can change the budget), also a
   243  // 'relaxed' flag, which expands the budget slightly to allow for the
   244  // possibility that a call to the function might have its score
   245  // adjusted downwards. If 'verbose' is set, then print a remark where
   246  // we boost the budget due to PGO.
   247  func inlineBudget(fn *ir.Func, profile *pgoir.Profile, relaxed bool, verbose bool) int32 {
   248  	// Update the budget for profile-guided inlining.
   249  	budget := int32(inlineMaxBudget)
   250  	if IsPgoHotFunc(fn, profile) {
   251  		budget = inlineHotMaxBudget
   252  		if verbose {
   253  			fmt.Printf("hot-node enabled increased budget=%v for func=%v\n", budget, ir.PkgFuncName(fn))
   254  		}
   255  	}
   256  	if relaxed {
   257  		budget += inlheur.BudgetExpansion(inlineMaxBudget)
   258  	}
   259  	return budget
   260  }
   261  
   262  // CanInline determines whether fn is inlineable.
   263  // If so, CanInline saves copies of fn.Body and fn.Dcl in fn.Inl.
   264  // fn and fn.Body will already have been typechecked.
   265  func CanInline(fn *ir.Func, profile *pgoir.Profile) {
   266  	if fn.Nname == nil {
   267  		base.Fatalf("CanInline no nname %+v", fn)
   268  	}
   269  
   270  	var reason string // reason, if any, that the function was not inlined
   271  	if base.Flag.LowerM > 1 || logopt.Enabled() {
   272  		defer func() {
   273  			if reason != "" {
   274  				if base.Flag.LowerM > 1 {
   275  					fmt.Printf("%v: cannot inline %v: %s\n", ir.Line(fn), fn.Nname, reason)
   276  				}
   277  				if logopt.Enabled() {
   278  					logopt.LogOpt(fn.Pos(), "cannotInlineFunction", "inline", ir.FuncName(fn), reason)
   279  				}
   280  			}
   281  		}()
   282  	}
   283  
   284  	reason = InlineImpossible(fn)
   285  	if reason != "" {
   286  		return
   287  	}
   288  	if fn.Typecheck() == 0 {
   289  		base.Fatalf("CanInline on non-typechecked function %v", fn)
   290  	}
   291  
   292  	n := fn.Nname
   293  	if n.Func.InlinabilityChecked() {
   294  		return
   295  	}
   296  	defer n.Func.SetInlinabilityChecked(true)
   297  
   298  	cc := int32(inlineExtraCallCost)
   299  	if base.Flag.LowerL == 4 {
   300  		cc = 1 // this appears to yield better performance than 0.
   301  	}
   302  
   303  	// Used a "relaxed" inline budget if the new inliner is enabled.
   304  	relaxed := inlheur.Enabled()
   305  
   306  	// Compute the inline budget for this func.
   307  	budget := inlineBudget(fn, profile, relaxed, base.Debug.PGODebug > 0)
   308  
   309  	// At this point in the game the function we're looking at may
   310  	// have "stale" autos, vars that still appear in the Dcl list, but
   311  	// which no longer have any uses in the function body (due to
   312  	// elimination by deadcode). We'd like to exclude these dead vars
   313  	// when creating the "Inline.Dcl" field below; to accomplish this,
   314  	// the hairyVisitor below builds up a map of used/referenced
   315  	// locals, and we use this map to produce a pruned Inline.Dcl
   316  	// list. See issue 25459 for more context.
   317  
   318  	visitor := hairyVisitor{
   319  		curFunc:       fn,
   320  		isBigFunc:     IsBigFunc(fn),
   321  		budget:        budget,
   322  		maxBudget:     budget,
   323  		extraCallCost: cc,
   324  		profile:       profile,
   325  	}
   326  	if visitor.tooHairy(fn) {
   327  		reason = visitor.reason
   328  		return
   329  	}
   330  
   331  	n.Func.Inl = &ir.Inline{
   332  		Cost:            budget - visitor.budget,
   333  		Dcl:             pruneUnusedAutos(n.Func.Dcl, &visitor),
   334  		HaveDcl:         true,
   335  		CanDelayResults: canDelayResults(fn),
   336  	}
   337  	if base.Flag.LowerM != 0 || logopt.Enabled() {
   338  		noteInlinableFunc(n, fn, budget-visitor.budget)
   339  	}
   340  }
   341  
   342  // noteInlinableFunc issues a message to the user that the specified
   343  // function is inlinable.
   344  func noteInlinableFunc(n *ir.Name, fn *ir.Func, cost int32) {
   345  	if base.Flag.LowerM > 1 {
   346  		fmt.Printf("%v: can inline %v with cost %d as: %v { %v }\n", ir.Line(fn), n, cost, fn.Type(), ir.Nodes(fn.Body))
   347  	} else if base.Flag.LowerM != 0 {
   348  		fmt.Printf("%v: can inline %v\n", ir.Line(fn), n)
   349  	}
   350  	// JSON optimization log output.
   351  	if logopt.Enabled() {
   352  		logopt.LogOpt(fn.Pos(), "canInlineFunction", "inline", ir.FuncName(fn), fmt.Sprintf("cost: %d", cost))
   353  	}
   354  }
   355  
   356  // InlineImpossible returns a non-empty reason string if fn is impossible to
   357  // inline regardless of cost or contents.
   358  func InlineImpossible(fn *ir.Func) string {
   359  	var reason string // reason, if any, that the function can not be inlined.
   360  	if fn.Nname == nil {
   361  		reason = "no name"
   362  		return reason
   363  	}
   364  
   365  	// If marked "go:noinline", don't inline.
   366  	if fn.Pragma&ir.Noinline != 0 {
   367  		reason = "marked go:noinline"
   368  		return reason
   369  	}
   370  
   371  	// If marked "go:norace" and -race compilation, don't inline.
   372  	if base.Flag.Race && fn.Pragma&ir.Norace != 0 {
   373  		reason = "marked go:norace with -race compilation"
   374  		return reason
   375  	}
   376  
   377  	// If marked "go:nocheckptr" and -d checkptr compilation, don't inline.
   378  	if base.Debug.Checkptr != 0 && fn.Pragma&ir.NoCheckPtr != 0 {
   379  		reason = "marked go:nocheckptr"
   380  		return reason
   381  	}
   382  
   383  	// If marked "go:cgo_unsafe_args", don't inline, since the function
   384  	// makes assumptions about its argument frame layout.
   385  	if fn.Pragma&ir.CgoUnsafeArgs != 0 {
   386  		reason = "marked go:cgo_unsafe_args"
   387  		return reason
   388  	}
   389  
   390  	// If marked as "go:uintptrkeepalive", don't inline, since the keep
   391  	// alive information is lost during inlining.
   392  	//
   393  	// TODO(prattmic): This is handled on calls during escape analysis,
   394  	// which is after inlining. Move prior to inlining so the keep-alive is
   395  	// maintained after inlining.
   396  	if fn.Pragma&ir.UintptrKeepAlive != 0 {
   397  		reason = "marked as having a keep-alive uintptr argument"
   398  		return reason
   399  	}
   400  
   401  	// If marked as "go:uintptrescapes", don't inline, since the escape
   402  	// information is lost during inlining.
   403  	if fn.Pragma&ir.UintptrEscapes != 0 {
   404  		reason = "marked as having an escaping uintptr argument"
   405  		return reason
   406  	}
   407  
   408  	// The nowritebarrierrec checker currently works at function
   409  	// granularity, so inlining yeswritebarrierrec functions can confuse it
   410  	// (#22342). As a workaround, disallow inlining them for now.
   411  	if fn.Pragma&ir.Yeswritebarrierrec != 0 {
   412  		reason = "marked go:yeswritebarrierrec"
   413  		return reason
   414  	}
   415  
   416  	// If a local function has no fn.Body (is defined outside of Go), cannot inline it.
   417  	// Imported functions don't have fn.Body but might have inline body in fn.Inl.
   418  	if len(fn.Body) == 0 && !typecheck.HaveInlineBody(fn) {
   419  		reason = "no function body"
   420  		return reason
   421  	}
   422  
   423  	return ""
   424  }
   425  
   426  // canDelayResults reports whether inlined calls to fn can delay
   427  // declaring the result parameter until the "return" statement.
   428  func canDelayResults(fn *ir.Func) bool {
   429  	// We can delay declaring+initializing result parameters if:
   430  	// (1) there's exactly one "return" statement in the inlined function;
   431  	// (2) it's not an empty return statement (#44355); and
   432  	// (3) the result parameters aren't named.
   433  
   434  	nreturns := 0
   435  	ir.VisitList(fn.Body, func(n ir.Node) {
   436  		if n, ok := n.(*ir.ReturnStmt); ok {
   437  			nreturns++
   438  			if len(n.Results) == 0 {
   439  				nreturns++ // empty return statement (case 2)
   440  			}
   441  		}
   442  	})
   443  
   444  	if nreturns != 1 {
   445  		return false // not exactly one return statement (case 1)
   446  	}
   447  
   448  	// temporaries for return values.
   449  	for _, param := range fn.Type().Results() {
   450  		if sym := param.Sym; sym != nil && !sym.IsBlank() {
   451  			return false // found a named result parameter (case 3)
   452  		}
   453  	}
   454  
   455  	return true
   456  }
   457  
   458  // hairyVisitor visits a function body to determine its inlining
   459  // hairiness and whether or not it can be inlined.
   460  type hairyVisitor struct {
   461  	// This is needed to access the current caller in the doNode function.
   462  	curFunc       *ir.Func
   463  	isBigFunc     bool
   464  	budget        int32
   465  	maxBudget     int32
   466  	reason        string
   467  	extraCallCost int32
   468  	usedLocals    ir.NameSet
   469  	do            func(ir.Node) bool
   470  	profile       *pgoir.Profile
   471  }
   472  
   473  func (v *hairyVisitor) tooHairy(fn *ir.Func) bool {
   474  	v.do = v.doNode // cache closure
   475  	if ir.DoChildren(fn, v.do) {
   476  		return true
   477  	}
   478  	if v.budget < 0 {
   479  		v.reason = fmt.Sprintf("function too complex: cost %d exceeds budget %d", v.maxBudget-v.budget, v.maxBudget)
   480  		return true
   481  	}
   482  	return false
   483  }
   484  
   485  // doNode visits n and its children, updates the state in v, and returns true if
   486  // n makes the current function too hairy for inlining.
   487  func (v *hairyVisitor) doNode(n ir.Node) bool {
   488  	if n == nil {
   489  		return false
   490  	}
   491  opSwitch:
   492  	switch n.Op() {
   493  	// Call is okay if inlinable and we have the budget for the body.
   494  	case ir.OCALLFUNC:
   495  		n := n.(*ir.CallExpr)
   496  		// Functions that call runtime.getcaller{pc,sp} can not be inlined
   497  		// because getcaller{pc,sp} expect a pointer to the caller's first argument.
   498  		//
   499  		// runtime.throw is a "cheap call" like panic in normal code.
   500  		var cheap bool
   501  		if n.Fun.Op() == ir.ONAME {
   502  			name := n.Fun.(*ir.Name)
   503  			if name.Class == ir.PFUNC {
   504  				switch fn := types.RuntimeSymName(name.Sym()); fn {
   505  				case "getcallerpc", "getcallersp":
   506  					v.reason = "call to " + fn
   507  					return true
   508  				case "throw":
   509  					v.budget -= inlineExtraThrowCost
   510  					break opSwitch
   511  				case "panicrangeexit":
   512  					cheap = true
   513  				}
   514  				// Special case for reflect.noescape. It does just type
   515  				// conversions to appease the escape analysis, and doesn't
   516  				// generate code.
   517  				if types.ReflectSymName(name.Sym()) == "noescape" {
   518  					cheap = true
   519  				}
   520  			}
   521  			// Special case for coverage counter updates; although
   522  			// these correspond to real operations, we treat them as
   523  			// zero cost for the moment. This is due to the existence
   524  			// of tests that are sensitive to inlining-- if the
   525  			// insertion of coverage instrumentation happens to tip a
   526  			// given function over the threshold and move it from
   527  			// "inlinable" to "not-inlinable", this can cause changes
   528  			// in allocation behavior, which can then result in test
   529  			// failures (a good example is the TestAllocations in
   530  			// crypto/ed25519).
   531  			if isAtomicCoverageCounterUpdate(n) {
   532  				return false
   533  			}
   534  		}
   535  		if n.Fun.Op() == ir.OMETHEXPR {
   536  			if meth := ir.MethodExprName(n.Fun); meth != nil {
   537  				if fn := meth.Func; fn != nil {
   538  					s := fn.Sym()
   539  					if types.RuntimeSymName(s) == "heapBits.nextArena" {
   540  						// Special case: explicitly allow mid-stack inlining of
   541  						// runtime.heapBits.next even though it calls slow-path
   542  						// runtime.heapBits.nextArena.
   543  						cheap = true
   544  					}
   545  					// Special case: on architectures that can do unaligned loads,
   546  					// explicitly mark encoding/binary methods as cheap,
   547  					// because in practice they are, even though our inlining
   548  					// budgeting system does not see that. See issue 42958.
   549  					if base.Ctxt.Arch.CanMergeLoads && s.Pkg.Path == "encoding/binary" {
   550  						switch s.Name {
   551  						case "littleEndian.Uint64", "littleEndian.Uint32", "littleEndian.Uint16",
   552  							"bigEndian.Uint64", "bigEndian.Uint32", "bigEndian.Uint16",
   553  							"littleEndian.PutUint64", "littleEndian.PutUint32", "littleEndian.PutUint16",
   554  							"bigEndian.PutUint64", "bigEndian.PutUint32", "bigEndian.PutUint16",
   555  							"littleEndian.AppendUint64", "littleEndian.AppendUint32", "littleEndian.AppendUint16",
   556  							"bigEndian.AppendUint64", "bigEndian.AppendUint32", "bigEndian.AppendUint16":
   557  							cheap = true
   558  						}
   559  					}
   560  				}
   561  			}
   562  		}
   563  
   564  		if n.Fun.Op() == ir.ONAME {
   565  			name := n.Fun.(*ir.Name)
   566  			if name.Class == ir.PFUNC {
   567  				// Special case: on architectures that can do unaligned loads,
   568  				// explicitly mark internal/byteorder methods as cheap,
   569  				// because in practice they are, even though our inlining
   570  				// budgeting system does not see that. See issue 42958.
   571  				if base.Ctxt.Arch.CanMergeLoads && name.Sym().Pkg.Path == "internal/byteorder" {
   572  					switch name.Sym().Name {
   573  					case "LeUint64", "LeUint32", "LeUint16",
   574  						"BeUint64", "BeUint32", "BeUint16",
   575  						"LePutUint64", "LePutUint32", "LePutUint16",
   576  						"BePutUint64", "BePutUint32", "BePutUint16",
   577  						"LeAppendUint64", "LeAppendUint32", "LeAppendUint16",
   578  						"BeAppendUint64", "BeAppendUint32", "BeAppendUint16":
   579  						cheap = true
   580  					}
   581  				}
   582  			}
   583  		}
   584  
   585  		if cheap {
   586  			break // treat like any other node, that is, cost of 1
   587  		}
   588  
   589  		if ir.IsIntrinsicCall(n) {
   590  			// Treat like any other node.
   591  			break
   592  		}
   593  
   594  		if callee := inlCallee(v.curFunc, n.Fun, v.profile); callee != nil && typecheck.HaveInlineBody(callee) {
   595  			// Check whether we'd actually inline this call. Set
   596  			// log == false since we aren't actually doing inlining
   597  			// yet.
   598  			if ok, _, _ := canInlineCallExpr(v.curFunc, n, callee, v.isBigFunc, false); ok {
   599  				// mkinlcall would inline this call [1], so use
   600  				// the cost of the inline body as the cost of
   601  				// the call, as that is what will actually
   602  				// appear in the code.
   603  				//
   604  				// [1] This is almost a perfect match to the
   605  				// mkinlcall logic, except that
   606  				// canInlineCallExpr considers inlining cycles
   607  				// by looking at what has already been inlined.
   608  				// Since we haven't done any inlining yet we
   609  				// will miss those.
   610  				v.budget -= callee.Inl.Cost
   611  				break
   612  			}
   613  		}
   614  
   615  		// Call cost for non-leaf inlining.
   616  		v.budget -= v.extraCallCost
   617  
   618  	case ir.OCALLMETH:
   619  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   620  
   621  	// Things that are too hairy, irrespective of the budget
   622  	case ir.OCALL, ir.OCALLINTER:
   623  		// Call cost for non-leaf inlining.
   624  		v.budget -= v.extraCallCost
   625  
   626  	case ir.OPANIC:
   627  		n := n.(*ir.UnaryExpr)
   628  		if n.X.Op() == ir.OCONVIFACE && n.X.(*ir.ConvExpr).Implicit() {
   629  			// Hack to keep reflect.flag.mustBe inlinable for TestIntendedInlining.
   630  			// Before CL 284412, these conversions were introduced later in the
   631  			// compiler, so they didn't count against inlining budget.
   632  			v.budget++
   633  		}
   634  		v.budget -= inlineExtraPanicCost
   635  
   636  	case ir.ORECOVER:
   637  		base.FatalfAt(n.Pos(), "ORECOVER missed typecheck")
   638  	case ir.ORECOVERFP:
   639  		// recover matches the argument frame pointer to find
   640  		// the right panic value, so it needs an argument frame.
   641  		v.reason = "call to recover"
   642  		return true
   643  
   644  	case ir.OCLOSURE:
   645  		if base.Debug.InlFuncsWithClosures == 0 {
   646  			v.reason = "not inlining functions with closures"
   647  			return true
   648  		}
   649  
   650  		// TODO(danscales): Maybe make budget proportional to number of closure
   651  		// variables, e.g.:
   652  		//v.budget -= int32(len(n.(*ir.ClosureExpr).Func.ClosureVars) * 3)
   653  		// TODO(austin): However, if we're able to inline this closure into
   654  		// v.curFunc, then we actually pay nothing for the closure captures. We
   655  		// should try to account for that if we're going to account for captures.
   656  		v.budget -= 15
   657  
   658  	case ir.OGO, ir.ODEFER, ir.OTAILCALL:
   659  		v.reason = "unhandled op " + n.Op().String()
   660  		return true
   661  
   662  	case ir.OAPPEND:
   663  		v.budget -= inlineExtraAppendCost
   664  
   665  	case ir.OADDR:
   666  		n := n.(*ir.AddrExpr)
   667  		// Make "&s.f" cost 0 when f's offset is zero.
   668  		if dot, ok := n.X.(*ir.SelectorExpr); ok && (dot.Op() == ir.ODOT || dot.Op() == ir.ODOTPTR) {
   669  			if _, ok := dot.X.(*ir.Name); ok && dot.Selection.Offset == 0 {
   670  				v.budget += 2 // undo ir.OADDR+ir.ODOT/ir.ODOTPTR
   671  			}
   672  		}
   673  
   674  	case ir.ODEREF:
   675  		// *(*X)(unsafe.Pointer(&x)) is low-cost
   676  		n := n.(*ir.StarExpr)
   677  
   678  		ptr := n.X
   679  		for ptr.Op() == ir.OCONVNOP {
   680  			ptr = ptr.(*ir.ConvExpr).X
   681  		}
   682  		if ptr.Op() == ir.OADDR {
   683  			v.budget += 1 // undo half of default cost of ir.ODEREF+ir.OADDR
   684  		}
   685  
   686  	case ir.OCONVNOP:
   687  		// This doesn't produce code, but the children might.
   688  		v.budget++ // undo default cost
   689  
   690  	case ir.OFALL, ir.OTYPE:
   691  		// These nodes don't produce code; omit from inlining budget.
   692  		return false
   693  
   694  	case ir.OIF:
   695  		n := n.(*ir.IfStmt)
   696  		if ir.IsConst(n.Cond, constant.Bool) {
   697  			// This if and the condition cost nothing.
   698  			if doList(n.Init(), v.do) {
   699  				return true
   700  			}
   701  			if ir.BoolVal(n.Cond) {
   702  				return doList(n.Body, v.do)
   703  			} else {
   704  				return doList(n.Else, v.do)
   705  			}
   706  		}
   707  
   708  	case ir.ONAME:
   709  		n := n.(*ir.Name)
   710  		if n.Class == ir.PAUTO {
   711  			v.usedLocals.Add(n)
   712  		}
   713  
   714  	case ir.OBLOCK:
   715  		// The only OBLOCK we should see at this point is an empty one.
   716  		// In any event, let the visitList(n.List()) below take care of the statements,
   717  		// and don't charge for the OBLOCK itself. The ++ undoes the -- below.
   718  		v.budget++
   719  
   720  	case ir.OMETHVALUE, ir.OSLICELIT:
   721  		v.budget-- // Hack for toolstash -cmp.
   722  
   723  	case ir.OMETHEXPR:
   724  		v.budget++ // Hack for toolstash -cmp.
   725  
   726  	case ir.OAS2:
   727  		n := n.(*ir.AssignListStmt)
   728  
   729  		// Unified IR unconditionally rewrites:
   730  		//
   731  		//	a, b = f()
   732  		//
   733  		// into:
   734  		//
   735  		//	DCL tmp1
   736  		//	DCL tmp2
   737  		//	tmp1, tmp2 = f()
   738  		//	a, b = tmp1, tmp2
   739  		//
   740  		// so that it can insert implicit conversions as necessary. To
   741  		// minimize impact to the existing inlining heuristics (in
   742  		// particular, to avoid breaking the existing inlinability regress
   743  		// tests), we need to compensate for this here.
   744  		//
   745  		// See also identical logic in IsBigFunc.
   746  		if len(n.Rhs) > 0 {
   747  			if init := n.Rhs[0].Init(); len(init) == 1 {
   748  				if _, ok := init[0].(*ir.AssignListStmt); ok {
   749  					// 4 for each value, because each temporary variable now
   750  					// appears 3 times (DCL, LHS, RHS), plus an extra DCL node.
   751  					//
   752  					// 1 for the extra "tmp1, tmp2 = f()" assignment statement.
   753  					v.budget += 4*int32(len(n.Lhs)) + 1
   754  				}
   755  			}
   756  		}
   757  
   758  	case ir.OAS:
   759  		// Special case for coverage counter updates and coverage
   760  		// function registrations. Although these correspond to real
   761  		// operations, we treat them as zero cost for the moment. This
   762  		// is primarily due to the existence of tests that are
   763  		// sensitive to inlining-- if the insertion of coverage
   764  		// instrumentation happens to tip a given function over the
   765  		// threshold and move it from "inlinable" to "not-inlinable",
   766  		// this can cause changes in allocation behavior, which can
   767  		// then result in test failures (a good example is the
   768  		// TestAllocations in crypto/ed25519).
   769  		n := n.(*ir.AssignStmt)
   770  		if n.X.Op() == ir.OINDEX && isIndexingCoverageCounter(n.X) {
   771  			return false
   772  		}
   773  	}
   774  
   775  	v.budget--
   776  
   777  	// When debugging, don't stop early, to get full cost of inlining this function
   778  	if v.budget < 0 && base.Flag.LowerM < 2 && !logopt.Enabled() {
   779  		v.reason = "too expensive"
   780  		return true
   781  	}
   782  
   783  	return ir.DoChildren(n, v.do)
   784  }
   785  
   786  // IsBigFunc reports whether fn is a "big" function.
   787  //
   788  // Note: The criteria for "big" is heuristic and subject to change.
   789  func IsBigFunc(fn *ir.Func) bool {
   790  	budget := inlineBigFunctionNodes
   791  	return ir.Any(fn, func(n ir.Node) bool {
   792  		// See logic in hairyVisitor.doNode, explaining unified IR's
   793  		// handling of "a, b = f()" assignments.
   794  		if n, ok := n.(*ir.AssignListStmt); ok && n.Op() == ir.OAS2 && len(n.Rhs) > 0 {
   795  			if init := n.Rhs[0].Init(); len(init) == 1 {
   796  				if _, ok := init[0].(*ir.AssignListStmt); ok {
   797  					budget += 4*len(n.Lhs) + 1
   798  				}
   799  			}
   800  		}
   801  
   802  		budget--
   803  		return budget <= 0
   804  	})
   805  }
   806  
   807  // TryInlineCall returns an inlined call expression for call, or nil
   808  // if inlining is not possible.
   809  func TryInlineCall(callerfn *ir.Func, call *ir.CallExpr, bigCaller bool, profile *pgoir.Profile) *ir.InlinedCallExpr {
   810  	if base.Flag.LowerL == 0 {
   811  		return nil
   812  	}
   813  	if call.Op() != ir.OCALLFUNC {
   814  		return nil
   815  	}
   816  	if call.GoDefer || call.NoInline {
   817  		return nil
   818  	}
   819  
   820  	// Prevent inlining some reflect.Value methods when using checkptr,
   821  	// even when package reflect was compiled without it (#35073).
   822  	if base.Debug.Checkptr != 0 && call.Fun.Op() == ir.OMETHEXPR {
   823  		if method := ir.MethodExprName(call.Fun); method != nil {
   824  			switch types.ReflectSymName(method.Sym()) {
   825  			case "Value.UnsafeAddr", "Value.Pointer":
   826  				return nil
   827  			}
   828  		}
   829  	}
   830  
   831  	if base.Flag.LowerM > 3 {
   832  		fmt.Printf("%v:call to func %+v\n", ir.Line(call), call.Fun)
   833  	}
   834  	if ir.IsIntrinsicCall(call) {
   835  		return nil
   836  	}
   837  	if fn := inlCallee(callerfn, call.Fun, profile); fn != nil && typecheck.HaveInlineBody(fn) {
   838  		return mkinlcall(callerfn, call, fn, bigCaller)
   839  	}
   840  	return nil
   841  }
   842  
   843  // inlCallee takes a function-typed expression and returns the underlying function ONAME
   844  // that it refers to if statically known. Otherwise, it returns nil.
   845  func inlCallee(caller *ir.Func, fn ir.Node, profile *pgoir.Profile) (res *ir.Func) {
   846  	fn = ir.StaticValue(fn)
   847  	switch fn.Op() {
   848  	case ir.OMETHEXPR:
   849  		fn := fn.(*ir.SelectorExpr)
   850  		n := ir.MethodExprName(fn)
   851  		// Check that receiver type matches fn.X.
   852  		// TODO(mdempsky): Handle implicit dereference
   853  		// of pointer receiver argument?
   854  		if n == nil || !types.Identical(n.Type().Recv().Type, fn.X.Type()) {
   855  			return nil
   856  		}
   857  		return n.Func
   858  	case ir.ONAME:
   859  		fn := fn.(*ir.Name)
   860  		if fn.Class == ir.PFUNC {
   861  			return fn.Func
   862  		}
   863  	case ir.OCLOSURE:
   864  		fn := fn.(*ir.ClosureExpr)
   865  		c := fn.Func
   866  		if len(c.ClosureVars) != 0 && c.ClosureVars[0].Outer.Curfn != caller {
   867  			return nil // inliner doesn't support inlining across closure frames
   868  		}
   869  		CanInline(c, profile)
   870  		return c
   871  	}
   872  	return nil
   873  }
   874  
   875  var inlgen int
   876  
   877  // SSADumpInline gives the SSA back end a chance to dump the function
   878  // when producing output for debugging the compiler itself.
   879  var SSADumpInline = func(*ir.Func) {}
   880  
   881  // InlineCall allows the inliner implementation to be overridden.
   882  // If it returns nil, the function will not be inlined.
   883  var InlineCall = func(callerfn *ir.Func, call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr {
   884  	base.Fatalf("inline.InlineCall not overridden")
   885  	panic("unreachable")
   886  }
   887  
   888  // inlineCostOK returns true if call n from caller to callee is cheap enough to
   889  // inline. bigCaller indicates that caller is a big function.
   890  //
   891  // In addition to the "cost OK" boolean, it also returns
   892  //   - the "max cost" limit used to make the decision (which may differ depending on func size)
   893  //   - the score assigned to this specific callsite
   894  //   - whether the inlined function is "hot" according to PGO.
   895  func inlineCostOK(n *ir.CallExpr, caller, callee *ir.Func, bigCaller bool) (bool, int32, int32, bool) {
   896  	maxCost := int32(inlineMaxBudget)
   897  	if bigCaller {
   898  		// We use this to restrict inlining into very big functions.
   899  		// See issue 26546 and 17566.
   900  		maxCost = inlineBigFunctionMaxCost
   901  	}
   902  
   903  	metric := callee.Inl.Cost
   904  	if inlheur.Enabled() {
   905  		score, ok := inlheur.GetCallSiteScore(caller, n)
   906  		if ok {
   907  			metric = int32(score)
   908  		}
   909  	}
   910  
   911  	lineOffset := pgoir.NodeLineOffset(n, caller)
   912  	csi := pgoir.CallSiteInfo{LineOffset: lineOffset, Caller: caller}
   913  	_, hot := candHotEdgeMap[csi]
   914  
   915  	if metric <= maxCost {
   916  		// Simple case. Function is already cheap enough.
   917  		return true, 0, metric, hot
   918  	}
   919  
   920  	// We'll also allow inlining of hot functions below inlineHotMaxBudget,
   921  	// but only in small functions.
   922  
   923  	if !hot {
   924  		// Cold
   925  		return false, maxCost, metric, false
   926  	}
   927  
   928  	// Hot
   929  
   930  	if bigCaller {
   931  		if base.Debug.PGODebug > 0 {
   932  			fmt.Printf("hot-big check disallows inlining for call %s (cost %d) at %v in big function %s\n", ir.PkgFuncName(callee), callee.Inl.Cost, ir.Line(n), ir.PkgFuncName(caller))
   933  		}
   934  		return false, maxCost, metric, false
   935  	}
   936  
   937  	if metric > inlineHotMaxBudget {
   938  		return false, inlineHotMaxBudget, metric, false
   939  	}
   940  
   941  	if !base.PGOHash.MatchPosWithInfo(n.Pos(), "inline", nil) {
   942  		// De-selected by PGO Hash.
   943  		return false, maxCost, metric, false
   944  	}
   945  
   946  	if base.Debug.PGODebug > 0 {
   947  		fmt.Printf("hot-budget check allows inlining for call %s (cost %d) at %v in function %s\n", ir.PkgFuncName(callee), callee.Inl.Cost, ir.Line(n), ir.PkgFuncName(caller))
   948  	}
   949  
   950  	return true, 0, metric, hot
   951  }
   952  
   953  // canInlineCallExpr returns true if the call n from caller to callee
   954  // can be inlined, plus the score computed for the call expr in question,
   955  // and whether the callee is hot according to PGO.
   956  // bigCaller indicates that caller is a big function. log
   957  // indicates that the 'cannot inline' reason should be logged.
   958  //
   959  // Preconditions: CanInline(callee) has already been called.
   960  func canInlineCallExpr(callerfn *ir.Func, n *ir.CallExpr, callee *ir.Func, bigCaller bool, log bool) (bool, int32, bool) {
   961  	if callee.Inl == nil {
   962  		// callee is never inlinable.
   963  		if log && logopt.Enabled() {
   964  			logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(callerfn),
   965  				fmt.Sprintf("%s cannot be inlined", ir.PkgFuncName(callee)))
   966  		}
   967  		return false, 0, false
   968  	}
   969  
   970  	ok, maxCost, callSiteScore, hot := inlineCostOK(n, callerfn, callee, bigCaller)
   971  	if !ok {
   972  		// callee cost too high for this call site.
   973  		if log && logopt.Enabled() {
   974  			logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(callerfn),
   975  				fmt.Sprintf("cost %d of %s exceeds max caller cost %d", callee.Inl.Cost, ir.PkgFuncName(callee), maxCost))
   976  		}
   977  		return false, 0, false
   978  	}
   979  
   980  	if callee == callerfn {
   981  		// Can't recursively inline a function into itself.
   982  		if log && logopt.Enabled() {
   983  			logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", fmt.Sprintf("recursive call to %s", ir.FuncName(callerfn)))
   984  		}
   985  		return false, 0, false
   986  	}
   987  
   988  	if base.Flag.Cfg.Instrumenting && types.IsNoInstrumentPkg(callee.Sym().Pkg) {
   989  		// Runtime package must not be instrumented.
   990  		// Instrument skips runtime package. However, some runtime code can be
   991  		// inlined into other packages and instrumented there. To avoid this,
   992  		// we disable inlining of runtime functions when instrumenting.
   993  		// The example that we observed is inlining of LockOSThread,
   994  		// which lead to false race reports on m contents.
   995  		if log && logopt.Enabled() {
   996  			logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(callerfn),
   997  				fmt.Sprintf("call to runtime function %s in instrumented build", ir.PkgFuncName(callee)))
   998  		}
   999  		return false, 0, false
  1000  	}
  1001  
  1002  	if base.Flag.Race && types.IsNoRacePkg(callee.Sym().Pkg) {
  1003  		if log && logopt.Enabled() {
  1004  			logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(callerfn),
  1005  				fmt.Sprintf(`call to into "no-race" package function %s in race build`, ir.PkgFuncName(callee)))
  1006  		}
  1007  		return false, 0, false
  1008  	}
  1009  
  1010  	// Check if we've already inlined this function at this particular
  1011  	// call site, in order to stop inlining when we reach the beginning
  1012  	// of a recursion cycle again. We don't inline immediately recursive
  1013  	// functions, but allow inlining if there is a recursion cycle of
  1014  	// many functions. Most likely, the inlining will stop before we
  1015  	// even hit the beginning of the cycle again, but this catches the
  1016  	// unusual case.
  1017  	parent := base.Ctxt.PosTable.Pos(n.Pos()).Base().InliningIndex()
  1018  	sym := callee.Linksym()
  1019  	for inlIndex := parent; inlIndex >= 0; inlIndex = base.Ctxt.InlTree.Parent(inlIndex) {
  1020  		if base.Ctxt.InlTree.InlinedFunction(inlIndex) == sym {
  1021  			if log {
  1022  				if base.Flag.LowerM > 1 {
  1023  					fmt.Printf("%v: cannot inline %v into %v: repeated recursive cycle\n", ir.Line(n), callee, ir.FuncName(callerfn))
  1024  				}
  1025  				if logopt.Enabled() {
  1026  					logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(callerfn),
  1027  						fmt.Sprintf("repeated recursive cycle to %s", ir.PkgFuncName(callee)))
  1028  				}
  1029  			}
  1030  			return false, 0, false
  1031  		}
  1032  	}
  1033  
  1034  	return true, callSiteScore, hot
  1035  }
  1036  
  1037  // mkinlcall returns an OINLCALL node that can replace OCALLFUNC n, or
  1038  // nil if it cannot be inlined. callerfn is the function that contains
  1039  // n, and fn is the function being called.
  1040  //
  1041  // The result of mkinlcall MUST be assigned back to n, e.g.
  1042  //
  1043  //	n.Left = mkinlcall(n.Left, fn, isddd)
  1044  func mkinlcall(callerfn *ir.Func, n *ir.CallExpr, fn *ir.Func, bigCaller bool) *ir.InlinedCallExpr {
  1045  	ok, score, hot := canInlineCallExpr(callerfn, n, fn, bigCaller, true)
  1046  	if !ok {
  1047  		return nil
  1048  	}
  1049  	if hot {
  1050  		hasHotCall[callerfn] = struct{}{}
  1051  	}
  1052  	typecheck.AssertFixedCall(n)
  1053  
  1054  	parent := base.Ctxt.PosTable.Pos(n.Pos()).Base().InliningIndex()
  1055  	sym := fn.Linksym()
  1056  	inlIndex := base.Ctxt.InlTree.Add(parent, n.Pos(), sym, ir.FuncName(fn))
  1057  
  1058  	closureInitLSym := func(n *ir.CallExpr, fn *ir.Func) {
  1059  		// The linker needs FuncInfo metadata for all inlined
  1060  		// functions. This is typically handled by gc.enqueueFunc
  1061  		// calling ir.InitLSym for all function declarations in
  1062  		// typecheck.Target.Decls (ir.UseClosure adds all closures to
  1063  		// Decls).
  1064  		//
  1065  		// However, non-trivial closures in Decls are ignored, and are
  1066  		// instead enqueued when walk of the calling function
  1067  		// discovers them.
  1068  		//
  1069  		// This presents a problem for direct calls to closures.
  1070  		// Inlining will replace the entire closure definition with its
  1071  		// body, which hides the closure from walk and thus suppresses
  1072  		// symbol creation.
  1073  		//
  1074  		// Explicitly create a symbol early in this edge case to ensure
  1075  		// we keep this metadata.
  1076  		//
  1077  		// TODO: Refactor to keep a reference so this can all be done
  1078  		// by enqueueFunc.
  1079  
  1080  		if n.Op() != ir.OCALLFUNC {
  1081  			// Not a standard call.
  1082  			return
  1083  		}
  1084  		if n.Fun.Op() != ir.OCLOSURE {
  1085  			// Not a direct closure call.
  1086  			return
  1087  		}
  1088  
  1089  		clo := n.Fun.(*ir.ClosureExpr)
  1090  		if ir.IsTrivialClosure(clo) {
  1091  			// enqueueFunc will handle trivial closures anyways.
  1092  			return
  1093  		}
  1094  
  1095  		ir.InitLSym(fn, true)
  1096  	}
  1097  
  1098  	closureInitLSym(n, fn)
  1099  
  1100  	if base.Flag.GenDwarfInl > 0 {
  1101  		if !sym.WasInlined() {
  1102  			base.Ctxt.DwFixups.SetPrecursorFunc(sym, fn)
  1103  			sym.Set(obj.AttrWasInlined, true)
  1104  		}
  1105  	}
  1106  
  1107  	if base.Flag.LowerM != 0 {
  1108  		if buildcfg.Experiment.NewInliner {
  1109  			fmt.Printf("%v: inlining call to %v with score %d\n",
  1110  				ir.Line(n), fn, score)
  1111  		} else {
  1112  			fmt.Printf("%v: inlining call to %v\n", ir.Line(n), fn)
  1113  		}
  1114  	}
  1115  	if base.Flag.LowerM > 2 {
  1116  		fmt.Printf("%v: Before inlining: %+v\n", ir.Line(n), n)
  1117  	}
  1118  
  1119  	res := InlineCall(callerfn, n, fn, inlIndex)
  1120  
  1121  	if res == nil {
  1122  		base.FatalfAt(n.Pos(), "inlining call to %v failed", fn)
  1123  	}
  1124  
  1125  	if base.Flag.LowerM > 2 {
  1126  		fmt.Printf("%v: After inlining %+v\n\n", ir.Line(res), res)
  1127  	}
  1128  
  1129  	if inlheur.Enabled() {
  1130  		inlheur.UpdateCallsiteTable(callerfn, n, res)
  1131  	}
  1132  
  1133  	return res
  1134  }
  1135  
  1136  // CalleeEffects appends any side effects from evaluating callee to init.
  1137  func CalleeEffects(init *ir.Nodes, callee ir.Node) {
  1138  	for {
  1139  		init.Append(ir.TakeInit(callee)...)
  1140  
  1141  		switch callee.Op() {
  1142  		case ir.ONAME, ir.OCLOSURE, ir.OMETHEXPR:
  1143  			return // done
  1144  
  1145  		case ir.OCONVNOP:
  1146  			conv := callee.(*ir.ConvExpr)
  1147  			callee = conv.X
  1148  
  1149  		case ir.OINLCALL:
  1150  			ic := callee.(*ir.InlinedCallExpr)
  1151  			init.Append(ic.Body.Take()...)
  1152  			callee = ic.SingleResult()
  1153  
  1154  		default:
  1155  			base.FatalfAt(callee.Pos(), "unexpected callee expression: %v", callee)
  1156  		}
  1157  	}
  1158  }
  1159  
  1160  func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name {
  1161  	s := make([]*ir.Name, 0, len(ll))
  1162  	for _, n := range ll {
  1163  		if n.Class == ir.PAUTO {
  1164  			if !vis.usedLocals.Has(n) {
  1165  				// TODO(mdempsky): Simplify code after confident that this
  1166  				// never happens anymore.
  1167  				base.FatalfAt(n.Pos(), "unused auto: %v", n)
  1168  				continue
  1169  			}
  1170  		}
  1171  		s = append(s, n)
  1172  	}
  1173  	return s
  1174  }
  1175  
  1176  // numNonClosures returns the number of functions in list which are not closures.
  1177  func numNonClosures(list []*ir.Func) int {
  1178  	count := 0
  1179  	for _, fn := range list {
  1180  		if fn.OClosure == nil {
  1181  			count++
  1182  		}
  1183  	}
  1184  	return count
  1185  }
  1186  
  1187  func doList(list []ir.Node, do func(ir.Node) bool) bool {
  1188  	for _, x := range list {
  1189  		if x != nil {
  1190  			if do(x) {
  1191  				return true
  1192  			}
  1193  		}
  1194  	}
  1195  	return false
  1196  }
  1197  
  1198  // isIndexingCoverageCounter returns true if the specified node 'n' is indexing
  1199  // into a coverage counter array.
  1200  func isIndexingCoverageCounter(n ir.Node) bool {
  1201  	if n.Op() != ir.OINDEX {
  1202  		return false
  1203  	}
  1204  	ixn := n.(*ir.IndexExpr)
  1205  	if ixn.X.Op() != ir.ONAME || !ixn.X.Type().IsArray() {
  1206  		return false
  1207  	}
  1208  	nn := ixn.X.(*ir.Name)
  1209  	// CoverageAuxVar implies either a coverage counter or a package
  1210  	// ID; since the cover tool never emits code to index into ID vars
  1211  	// this is effectively testing whether nn is a coverage counter.
  1212  	return nn.CoverageAuxVar()
  1213  }
  1214  
  1215  // isAtomicCoverageCounterUpdate examines the specified node to
  1216  // determine whether it represents a call to sync/atomic.AddUint32 to
  1217  // increment a coverage counter.
  1218  func isAtomicCoverageCounterUpdate(cn *ir.CallExpr) bool {
  1219  	if cn.Fun.Op() != ir.ONAME {
  1220  		return false
  1221  	}
  1222  	name := cn.Fun.(*ir.Name)
  1223  	if name.Class != ir.PFUNC {
  1224  		return false
  1225  	}
  1226  	fn := name.Sym().Name
  1227  	if name.Sym().Pkg.Path != "sync/atomic" ||
  1228  		(fn != "AddUint32" && fn != "StoreUint32") {
  1229  		return false
  1230  	}
  1231  	if len(cn.Args) != 2 || cn.Args[0].Op() != ir.OADDR {
  1232  		return false
  1233  	}
  1234  	adn := cn.Args[0].(*ir.AddrExpr)
  1235  	v := isIndexingCoverageCounter(adn.X)
  1236  	return v
  1237  }
  1238  
  1239  func PostProcessCallSites(profile *pgoir.Profile) {
  1240  	if base.Debug.DumpInlCallSiteScores != 0 {
  1241  		budgetCallback := func(fn *ir.Func, prof *pgoir.Profile) (int32, bool) {
  1242  			v := inlineBudget(fn, prof, false, false)
  1243  			return v, v == inlineHotMaxBudget
  1244  		}
  1245  		inlheur.DumpInlCallSiteScores(profile, budgetCallback)
  1246  	}
  1247  }
  1248  
  1249  func analyzeFuncProps(fn *ir.Func, p *pgoir.Profile) {
  1250  	canInline := func(fn *ir.Func) { CanInline(fn, p) }
  1251  	budgetForFunc := func(fn *ir.Func) int32 {
  1252  		return inlineBudget(fn, p, true, false)
  1253  	}
  1254  	inlheur.AnalyzeFunc(fn, canInline, budgetForFunc, inlineMaxBudget)
  1255  }
  1256  

View as plain text