Skip to content

Commit

Permalink
cmd/compile/internal/escape: optimize indirect closure calls
Browse files Browse the repository at this point in the history
This CL extends escape analysis in two ways.

First, we already optimize directly called closures. For example,
given:

	var x int  // already stack allocated today
	p := func() *int { return &x }()

we don't need to move x to the heap, because we can statically track
where &x flows. This CL extends the same idea to work for indirectly
called closures too, as long as we know everywhere that they're
called. For example:

	var x int  // stack allocated after this CL
	f := func() *int { return &x }
	p := f()

This will allow a subsequent CL to move the generation of go/defer
wrappers earlier.

Second, this CL adds tracking to detect when pointer values flow to
the pointee operand of an indirect assignment statement (i.e., flows
to p in "*p = x") or to builtins that modify memory (append, copy,
clear). This isn't utilized in the current CL, but a subsequent CL
will make use of it to better optimize string->[]byte conversions.

Updates #2205.

Change-Id: I610f9c531e135129c947684833e288ce64406f35
Reviewed-on: https://go-review.googlesource.com/c/go/+/520259
Run-TryBot: Matthew Dempsky <[email protected]>
TryBot-Result: Gopher Robot <[email protected]>
Auto-Submit: Matthew Dempsky <[email protected]>
Reviewed-by: Cuong Manh Le <[email protected]>
Reviewed-by: Dmitri Shuralyov <[email protected]>
  • Loading branch information
mdempsky authored and gopherbot committed Aug 17, 2023
1 parent f278ae6 commit ff47dd1
Show file tree
Hide file tree
Showing 9 changed files with 268 additions and 97 deletions.
8 changes: 6 additions & 2 deletions src/cmd/compile/internal/escape/assign.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ func (e *escape) addr(n ir.Node) hole {
if n.X.Type().IsArray() {
k = e.addr(n.X)
} else {
e.discard(n.X)
e.mutate(n.X)
}
case ir.ODEREF, ir.ODOTPTR:
e.discard(n)
e.mutate(n)
case ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
e.discard(n.X)
Expand All @@ -52,6 +52,10 @@ func (e *escape) addr(n ir.Node) hole {
return k
}

func (e *escape) mutate(n ir.Node) {
e.expr(e.mutatorHole(), n)
}

func (e *escape) addrs(l ir.Nodes) []hole {
var ks []hole
for _, n := range l {
Expand Down
49 changes: 33 additions & 16 deletions src/cmd/compile/internal/escape/call.go
Original file line number Diff line number Diff line change
Expand Up @@ -68,17 +68,8 @@ func (e *escape) callCommon(ks []hole, call ir.Node, init *ir.Nodes, wrapper *ir
var fn *ir.Name
switch call.Op() {
case ir.OCALLFUNC:
// If we have a direct call to a closure (not just one we were
// able to statically resolve with ir.StaticValue), mark it as
// such so batch.outlives can optimize the flow results.
if call.X.Op() == ir.OCLOSURE {
call.X.(*ir.ClosureExpr).Func.SetClosureCalled(true)
}

v := ir.StaticValue(call.X)
fn = ir.StaticCalleeName(v)
case ir.OCALLMETH:
base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
}

fntype := call.X.Type()
Expand All @@ -88,7 +79,7 @@ func (e *escape) callCommon(ks []hole, call ir.Node, init *ir.Nodes, wrapper *ir

if ks != nil && fn != nil && e.inMutualBatch(fn) {
for i, result := range fn.Type().Results().FieldSlice() {
e.expr(ks[i], ir.AsNode(result.Nname))
e.expr(ks[i], result.Nname.(*ir.Name))
}
}

Expand All @@ -99,7 +90,20 @@ func (e *escape) callCommon(ks []hole, call ir.Node, init *ir.Nodes, wrapper *ir
// Note: We use argument and not argumentFunc, because while
// call.X here may be an argument to runtime.{new,defer}proc,
// it's not an argument to fn itself.
argument(e.discardHole(), &call.X)
calleeK := e.discardHole()
if fn == nil { // unknown callee
for _, k := range ks {
if k.dst != &e.blankLoc {
// The results flow somewhere, but we don't statically
// know the callee function. If a closure flows here, we
// need to conservatively assume its results might flow to
// the heap.
calleeK = e.calleeHole()
break
}
}
}
argument(calleeK, &call.X)
} else {
recvp = &call.X.(*ir.SelectorExpr).X
}
Expand Down Expand Up @@ -139,7 +143,7 @@ func (e *escape) callCommon(ks []hole, call ir.Node, init *ir.Nodes, wrapper *ir
// it has enough capacity. Alternatively, a new heap
// slice might be allocated, and all slice elements
// might flow to heap.
appendeeK := ks[0]
appendeeK := e.teeHole(ks[0], e.mutatorHole())
if args[0].Type().Elem().HasPointers() {
appendeeK = e.teeHole(appendeeK, e.heapHole().deref(call, "appendee slice"))
}
Expand All @@ -160,7 +164,7 @@ func (e *escape) callCommon(ks []hole, call ir.Node, init *ir.Nodes, wrapper *ir

case ir.OCOPY:
call := call.(*ir.BinaryExpr)
argument(e.discardHole(), &call.X)
argument(e.mutatorHole(), &call.X)

copiedK := e.discardHole()
if call.Y.Type().IsSlice() && call.Y.Type().Elem().HasPointers() {
Expand All @@ -185,10 +189,14 @@ func (e *escape) callCommon(ks []hole, call ir.Node, init *ir.Nodes, wrapper *ir
}
argumentRType(&call.RType)

case ir.OLEN, ir.OCAP, ir.OREAL, ir.OIMAG, ir.OCLOSE, ir.OCLEAR:
case ir.OLEN, ir.OCAP, ir.OREAL, ir.OIMAG, ir.OCLOSE:
call := call.(*ir.UnaryExpr)
argument(e.discardHole(), &call.X)

case ir.OCLEAR:
call := call.(*ir.UnaryExpr)
argument(e.mutatorHole(), &call.X)

case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
call := call.(*ir.UnaryExpr)
argument(ks[0], &call.X)
Expand Down Expand Up @@ -251,6 +259,7 @@ func (e *escape) goDeferStmt(n *ir.GoDeferStmt) {
fn := ir.NewClosureFunc(n.Pos(), true)
fn.SetWrapper(true)
fn.Nname.SetType(types.NewSignature(nil, nil, nil))
fn.SetEsc(escFuncTagged) // no params; effectively tagged already
fn.Body = []ir.Node{call}
if call, ok := call.(*ir.CallExpr); ok && call.Op() == ir.OCALLFUNC {
// If the callee is a named function, link to the original callee.
Expand Down Expand Up @@ -310,9 +319,11 @@ func (e *escape) rewriteArgument(argp *ir.Node, init *ir.Nodes, call ir.Node, fn
// Create and declare a new pointer-typed temp variable.
tmp := e.wrapExpr(arg.Pos(), &arg.X, init, call, wrapper)

k := e.mutatorHole()
if pragma&ir.UintptrEscapes != 0 {
e.flow(e.heapHole().note(arg, "//go:uintptrescapes"), e.oldLoc(tmp))
k = e.heapHole().note(arg, "//go:uintptrescapes")
}
e.flow(k, e.oldLoc(tmp))

if pragma&ir.UintptrKeepAlive != 0 {
call := call.(*ir.CallExpr)
Expand Down Expand Up @@ -454,11 +465,17 @@ func (e *escape) tagHole(ks []hole, fn *ir.Name, param *types.Field) hole {
// Call to previously tagged function.

var tagKs []hole

esc := parseLeaks(param.Note)

if x := esc.Heap(); x >= 0 {
tagKs = append(tagKs, e.heapHole().shift(x))
}
if x := esc.Mutator(); x >= 0 {
tagKs = append(tagKs, e.mutatorHole().shift(x))
}
if x := esc.Callee(); x >= 0 {
tagKs = append(tagKs, e.calleeHole().shift(x))
}

if ks != nil {
for i := 0; i < numEscResults; i++ {
Expand Down
53 changes: 45 additions & 8 deletions src/cmd/compile/internal/escape/escape.go
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,10 @@ type batch struct {
allLocs []*location
closures []closure

heapLoc location
blankLoc location
heapLoc location
mutatorLoc location
calleeLoc location
blankLoc location
}

// A closure holds a closure expression and its spill hole (i.e.,
Expand Down Expand Up @@ -129,7 +131,9 @@ func Batch(fns []*ir.Func, recursive bool) {
}

var b batch
b.heapLoc.attrs = attrEscapes | attrPersists
b.heapLoc.attrs = attrEscapes | attrPersists | attrMutates | attrCalls
b.mutatorLoc.attrs = attrMutates
b.calleeLoc.attrs = attrCalls

// Construct data-flow graph from syntax trees.
for _, fn := range fns {
Expand Down Expand Up @@ -288,6 +292,7 @@ func (b *batch) finish(fns []*ir.Func) {
if n == nil {
continue
}

if n.Op() == ir.ONAME {
n := n.(*ir.Name)
n.Opt = nil
Expand Down Expand Up @@ -337,6 +342,20 @@ func (b *batch) finish(fns []*ir.Func) {
}
}
}

// If the result of a string->[]byte conversion is never mutated,
// then it can simply reuse the string's memory directly.
//
// TODO(mdempsky): Enable in a subsequent CL. We need to ensure
// []byte("") evaluates to []byte{}, not []byte(nil).
if false {
if n, ok := n.(*ir.ConvExpr); ok && n.Op() == ir.OSTR2BYTES && !loc.hasAttr(attrMutates) {
if base.Flag.LowerM >= 1 {
base.WarnfAt(n.Pos(), "zero-copy string->[]byte conversion")
}
n.SetOp(ir.OSTR2BYTESTMP)
}
}
}
}

Expand All @@ -345,10 +364,10 @@ func (b *batch) finish(fns []*ir.Func) {
// fn has not yet been analyzed, so its parameters and results
// should be incorporated directly into the flow graph instead of
// relying on its escape analysis tagging.
func (e *escape) inMutualBatch(fn *ir.Name) bool {
func (b *batch) inMutualBatch(fn *ir.Name) bool {
if fn.Defn != nil && fn.Defn.Esc() < escFuncTagged {
if fn.Defn.Esc() == escFuncUnknown {
base.Fatalf("graph inconsistency: %v", fn)
base.FatalfAt(fn.Pos(), "graph inconsistency: %v", fn)
}
return true
}
Expand Down Expand Up @@ -411,6 +430,8 @@ func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
if diagnose && f.Sym != nil {
base.WarnfAt(f.Pos, "%v does not escape", name())
}
esc.AddMutator(0)
esc.AddCallee(0)
} else {
if diagnose && f.Sym != nil {
base.WarnfAt(f.Pos, "leaking param: %v", name())
Expand Down Expand Up @@ -453,21 +474,37 @@ func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
esc.Optimize()

if diagnose && !loc.hasAttr(attrEscapes) {
if esc.Empty() {
base.WarnfAt(f.Pos, "%v does not escape", name())
}
anyLeaks := false
if x := esc.Heap(); x >= 0 {
if x == 0 {
base.WarnfAt(f.Pos, "leaking param: %v", name())
} else {
// TODO(mdempsky): Mention level=x like below?
base.WarnfAt(f.Pos, "leaking param content: %v", name())
}
anyLeaks = true
}
for i := 0; i < numEscResults; i++ {
if x := esc.Result(i); x >= 0 {
res := fn.Type().Results().Field(i).Sym
base.WarnfAt(f.Pos, "leaking param: %v to result %v level=%d", name(), res, x)
anyLeaks = true
}
}
if !anyLeaks {
base.WarnfAt(f.Pos, "%v does not escape", name())
}

if base.Flag.LowerM >= 2 {
if x := esc.Mutator(); x >= 0 {
base.WarnfAt(f.Pos, "mutates param: %v derefs=%v", name(), x)
} else {
base.WarnfAt(f.Pos, "does not mutate param: %v", name())
}
if x := esc.Callee(); x >= 0 {
base.WarnfAt(f.Pos, "calls param: %v derefs=%v", name(), x)
} else {
base.WarnfAt(f.Pos, "does not call param: %v", name())
}
}
}
Expand Down
52 changes: 47 additions & 5 deletions src/cmd/compile/internal/escape/graph.go
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,17 @@ const (
// address outlives the statement; that is, whether its storage
// cannot be immediately reused.
attrPersists

// attrMutates indicates whether pointers that are reachable from
// this location may have their addressed memory mutated. This is
// used to detect string->[]byte conversions that can be safely
// optimized away.
attrMutates

// attrCalls indicates whether closures that are reachable from this
// location may be called without tracking their results. This is
// used to better optimize indirect closure calls.
attrCalls
)

func (l *location) hasAttr(attr locAttr) bool { return l.attrs&attr != 0 }
Expand Down Expand Up @@ -121,6 +132,35 @@ func (l *location) leakTo(sink *location, derefs int) {
l.paramEsc.AddHeap(derefs)
}

// leakTo records that parameter l leaks to sink.
func (b *batch) leakTo(l, sink *location, derefs int) {
if (logopt.Enabled() || base.Flag.LowerM >= 2) && !l.hasAttr(attrEscapes) {
if base.Flag.LowerM >= 2 {
fmt.Printf("%s: parameter %v leaks to %s with derefs=%d:\n", base.FmtPos(l.n.Pos()), l.n, b.explainLoc(sink), derefs)
}
explanation := b.explainPath(sink, l)
if logopt.Enabled() {
var e_curfn *ir.Func // TODO(mdempsky): Fix.
logopt.LogOpt(l.n.Pos(), "leak", "escape", ir.FuncName(e_curfn),
fmt.Sprintf("parameter %v leaks to %s with derefs=%d", l.n, b.explainLoc(sink), derefs), explanation)
}
}

// If sink is a result parameter that doesn't escape (#44614)
// and we can fit return bits into the escape analysis tag,
// then record as a result leak.
if !sink.hasAttr(attrEscapes) && sink.isName(ir.PPARAMOUT) && sink.curfn == l.curfn {
if ri := sink.resultIndex - 1; ri < numEscResults {
// Leak to result parameter.
l.paramEsc.AddResult(ri, derefs)
return
}
}

// Otherwise, record as heap leak.
l.paramEsc.AddHeap(derefs)
}

func (l *location) isName(c ir.Class) bool {
return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class == c
}
Expand Down Expand Up @@ -203,7 +243,7 @@ func (b *batch) flow(k hole, src *location) {
}

}
src.attrs |= attrEscapes
src.attrs |= attrEscapes | attrPersists | attrMutates | attrCalls
return
}

Expand All @@ -212,11 +252,13 @@ func (b *batch) flow(k hole, src *location) {
}

func (b *batch) heapHole() hole { return b.heapLoc.asHole() }
func (b *batch) mutatorHole() hole { return b.mutatorLoc.asHole() }
func (b *batch) calleeHole() hole { return b.calleeLoc.asHole() }
func (b *batch) discardHole() hole { return b.blankLoc.asHole() }

func (b *batch) oldLoc(n *ir.Name) *location {
if n.Canonical().Opt == nil {
base.Fatalf("%v has no location", n)
base.FatalfAt(n.Pos(), "%v has no location", n)
}
return n.Canonical().Opt.(*location)
}
Expand All @@ -231,7 +273,7 @@ func (e *escape) newLoc(n ir.Node, persists bool) *location {

if n != nil && n.Op() == ir.ONAME {
if canon := n.(*ir.Name).Canonical(); n != canon {
base.Fatalf("newLoc on non-canonical %v (canonical is %v)", n, canon)
base.FatalfAt(n.Pos(), "newLoc on non-canonical %v (canonical is %v)", n, canon)
}
}
loc := &location{
Expand All @@ -249,11 +291,11 @@ func (e *escape) newLoc(n ir.Node, persists bool) *location {
if n.Class == ir.PPARAM && n.Curfn == nil {
// ok; hidden parameter
} else if n.Curfn != e.curfn {
base.Fatalf("curfn mismatch: %v != %v for %v", n.Curfn, e.curfn, n)
base.FatalfAt(n.Pos(), "curfn mismatch: %v != %v for %v", n.Curfn, e.curfn, n)
}

if n.Opt != nil {
base.Fatalf("%v already has a location", n)
base.FatalfAt(n.Pos(), "%v already has a location", n)
}
n.Opt = loc
}
Expand Down
Loading

0 comments on commit ff47dd1

Please sign in to comment.